Skip to content

Commit

Permalink
Merge branch 'main' of github.com:snakemake/snakemake-storage-plugin-gs
Browse files Browse the repository at this point in the history
  • Loading branch information
johanneskoester committed Aug 19, 2024
2 parents bb92fdb + 27c80dc commit f284a1e
Show file tree
Hide file tree
Showing 5 changed files with 95 additions and 191 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ jobs:
poetry run python tests/test_fake_gcs.py
- name: Run pytest
run: poetry run coverage run -m pytest -v tests/tests.py
run: poetry run coverage run -m pytest -vv -s tests/tests.py

- name: Run Coverage
run: poetry run coverage report -m
161 changes: 8 additions & 153 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,163 +1,18 @@
poetry.lock
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# coverage

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
.pybuilder/
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock

# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock

# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml

# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site
# .pytest_cache

# mypy
.mypy_cache/
.dmypy.json
dmypy.json
pytest_cache

# Pyre type checker
.pyre/

# pytype static type analyzer
.pytype/
# __pycache__ and any .pyc files

# Cython debug symbols
cython_debug/
__pycache__
*.pyc

# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
**/__pycache__/
**/*.pyc

poetry.lock
82 changes: 60 additions & 22 deletions snakemake_storage_plugin_gcs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,12 @@
get_constant_prefix,
Mtime,
)
from snakemake_interface_common.logging import get_logger

from urllib.parse import urlparse
import base64
import os

from pathlib import Path
import google.cloud.exceptions
from google.cloud import storage
from google.api_core import retry
Expand Down Expand Up @@ -266,6 +268,7 @@ def __post_init__(self):
self.key = parsed.path.lstrip("/")
self._local_suffix = self._local_suffix_from_key(self.key)
self._is_dir = None
self.logger = get_logger()

def cleanup(self):
# Close any open connections, unmount stuff, etc.
Expand All @@ -288,8 +291,8 @@ async def inventory(self, cache: IOCacheStorageInterface):
- cache.size
"""
if self.get_inventory_parent() in cache.exists_in_storage:
# bucket has been inventorized before, stop here
return
# bucket has been inventorized before, stop here
return

# check if bucket exists
if not self.bucket.exists():
Expand Down Expand Up @@ -381,29 +384,20 @@ def store_object(self):
TODO: note from vsoch - I'm not sure I read this function name right,
but I didn't find an equivalent "upload" function so I thought this might
be it. The original function comment is below.
be it. The original function comment is below.
"""
# Ensure that the object is stored at the location specified by
# self.local_path().
try:
if not self.bucket.exists():
self.client.create_bucket(self.bucket)
self.ensure_bucket_exists()

# Distinguish between single file, and folder
f = self.local_path()
if os.path.isdir(f):
# Ensure the "directory" exists
self.blob.upload_from_string(
"", content_type="application/x-www-form-urlencoded;charset=UTF-8"
)
for root, _, files in os.walk(f):
for filename in files:
filename = os.path.join(root, filename)
bucket_path = filename.lstrip(self.bucket.name).lstrip("/")
blob = self.bucket.blob(bucket_path)
blob.upload_from_filename(filename)
local_object = self.local_path()
if os.path.isdir(local_object):
self.upload_directory(local_directory_path=local_object)
else:
self.blob.upload_from_filename(f)
self.blob.upload_from_filename(local_object)

except google.cloud.exceptions.Forbidden as e:
raise WorkflowError(
e,
Expand All @@ -413,13 +407,57 @@ def store_object(self):
"--scopes (see Snakemake documentation).",
)

def ensure_bucket_exists(self) -> None:
"""
Check that the bucket exists, if not create it.
"""
if not self.bucket.exists():
self.client.create_bucket(self.bucket)

def upload_directory(self, local_directory_path: Path):
"""
Upload a directory to the storage.
"""
self.ensure_bucket_exists()

# if the local directory is empty, we need to create a blob
# with no content to represent the directory
if not os.listdir(local_directory_path):
self.blob.upload_from_string(
"", content_type="application/x-www-form-urlencoded;charset=UTF-8"
)

for root, _, files in os.walk(local_directory_path):
for filename in files:
relative_filepath = os.path.join(root, filename)
local_prefix = self.provider.local_prefix.as_posix()

# remove the prefix ("".snakemake/storage/gcs/{bucket_name}/)
# this gives us the path to the file relative to the bucket
bucket_file_path = (
relative_filepath.removeprefix(local_prefix)
.lstrip("/")
.removeprefix(self.bucket_name)
.lstrip("/")
)

blob = self.bucket.blob(bucket_file_path)
blob.upload_from_filename(relative_filepath)

@retry.Retry(predicate=google_cloud_retry_predicate)
def remove(self):
def remove(self) -> None:
"""
Remove the object from the storage.
"""
# This was a total guess lol
self.blob.delete()
if self.is_directory():
prefix = self.key
if not prefix.endswith("/"):
prefix += "/"
blobs = self.client.list_blobs(self.bucket_name, prefix=prefix)
for blob in blobs:
blob.delete()
else:
self.blob.delete()

# The following to methods are only required if the class inherits from
# StorageObjectGlob.
Expand Down
2 changes: 1 addition & 1 deletion tests/test_fake_gcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@

# Create a new Bucket
bucket = client.bucket("snakemake-test-bucket")

try:
client.create_bucket(bucket)
except Conflict:
Expand All @@ -54,6 +55,5 @@
blob = bucket.blob(file_name)
blob.upload_from_string(contents)


assert not bucket.blob("foo").exists()
print(list(bucket.list_blobs()))
Loading

0 comments on commit f284a1e

Please sign in to comment.