Skip to content

Commit

Permalink
dbg
Browse files Browse the repository at this point in the history
  • Loading branch information
johanneskoester committed Dec 7, 2023
1 parent 32241e0 commit f7ff963
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 0 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,10 @@ jobs:
run: |
docker run -d -p 5050:4443 -v storage_data:/storage fsouza/fake-gcs-server -scheme http
- name: Test fake GCS
run:
python tests/test_fake_gcs.py

- name: Run pytest
run: poetry run coverage run -m pytest tests/tests.py

Expand Down
33 changes: 33 additions & 0 deletions tests/test_fake_gcs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import tempfile
import os

from google.auth.credentials import AnonymousCredentials
from google.cloud import storage

# This endpoint assumes that you are using the default port 4443 from the container.
# If you are using a different port, please set the environment variable STORAGE_EMULATOR_HOST.
os.environ.setdefault("STORAGE_EMULATOR_HOST", "http://localhost:4443")


client = storage.Client(
credentials=AnonymousCredentials(),
project="test",
# Alternatively instead of using the global env STORAGE_EMULATOR_HOST. You can define it here.
# This will set this client object to point to the local google cloud storage.
# client_options={"api_endpoint": "http://localhost:4443"},
)

# List the Buckets
for bucket in client.list_buckets():
print(f"Bucket: {bucket.name}\n")

# List the Blobs in each Bucket
for blob in bucket.list_blobs():
print(f"Blob: {blob.name}")

# Print the content of the Blob
b = bucket.get_blob(blob.name)
with tempfile.NamedTemporaryFile() as temp_file:
s = b.download_to_filename(temp_file.name)
temp_file.seek(0, 0)
print(temp_file.read(), "\n")

0 comments on commit f7ff963

Please sign in to comment.