@@ -7,8 +7,7 @@ Use the [`obstore.list`][] method.
77``` py
88import obstore as obs
99
10- # Create a Store
11- store = get_object_store()
10+ store = ... # store of your choice
1211
1312# Recursively list all files below the 'data' path.
1413# 1. On AWS S3 this would be the 'data/' prefix
@@ -35,8 +34,7 @@ This Arrow integration requires the [`arro3-core` dependency](https://kylebarron
3534``` py
3635import obstore as obs
3736
38- # Create a Store
39- store = get_object_store()
37+ store = ... # store of your choice
4038
4139# Get a stream of Arrow RecordBatches of metadata
4240list_stream = obs.list(store, prefix = " data" , return_arrow = True )
@@ -80,8 +78,7 @@ Use the [`obstore.get`][] function to fetch data bytes from remote storage or fi
8078``` py
8179import obstore as obs
8280
83- # Create a Store
84- store = get_object_store()
81+ store = ... # store of your choice
8582
8683# Retrieve a specific file
8784path = " data/file01.parquet"
@@ -131,7 +128,7 @@ Use the [`obstore.put`][] function to atomically write data. `obstore.put` will
131128``` py
132129import obstore as obs
133130
134- store = get_object_store()
131+ store = ... # store of your choice
135132path = " data/file1"
136133content = b " hello"
137134obs.put(store, path, content)
@@ -143,7 +140,7 @@ You can also upload local files:
143140from pathlib import Path
144141import obstore as obs
145142
146- store = get_object_store()
143+ store = ... # store of your choice
147144path = " data/file1"
148145content = Path(" path/to/local/file" )
149146obs.put(store, path, content)
@@ -154,7 +151,7 @@ Or file-like objects:
154151``` py
155152import obstore as obs
156153
157- store = get_object_store()
154+ store = ... # store of your choice
158155path = " data/file1"
159156with open (" path/to/local/file" , " rb" ) as content:
160157 obs.put(store, path, content)
@@ -169,7 +166,7 @@ def bytes_iter():
169166 for i in range (5 ):
170167 yield b " foo"
171168
172- store = get_object_store()
169+ store = ... # store of your choice
173170path = " data/file1"
174171content = bytes_iter()
175172obs.put(store, path, content)
@@ -184,7 +181,7 @@ async def bytes_stream():
184181 for i in range (5 ):
185182 yield b " foo"
186183
187- store = get_object_store()
184+ store = ... # store of your choice
188185path = " data/file1"
189186content = bytes_stream()
190187obs.put(store, path, content)
@@ -201,8 +198,8 @@ Download the file, collect its bytes in memory, then upload it. Note that this w
201198``` py
202199import obstore as obs
203200
204- store1 = get_object_store()
205- store2 = get_object_store()
201+ store1 = ... # store of your choice
202+ store2 = ... # store of your choice
206203
207204path1 = " data/file1"
208205path2 = " data/file2"
@@ -219,8 +216,8 @@ First download the file to disk, then upload it.
219216from pathlib import Path
220217import obstore as obs
221218
222- store1 = get_object_store()
223- store2 = get_object_store()
219+ store1 = ... # store of your choice
220+ store2 = ... # store of your choice
224221
225222path1 = " data/file1"
226223path2 = " data/file2"
@@ -245,8 +242,8 @@ Using the async API is currently required to use streaming copies.
245242``` py
246243import obstore as obs
247244
248- store1 = get_object_store()
249- store2 = get_object_store()
245+ store1 = ... # store of your choice
246+ store2 = ... # store of your choice
250247
251248path1 = " data/file1"
252249path2 = " data/file2"
0 commit comments