From 1a31dfc4b435e3b8490398955989ec5c6b5f929b Mon Sep 17 00:00:00 2001 From: Johannes Koester Date: Thu, 7 Dec 2023 12:25:55 +0100 Subject: [PATCH] fixes --- snakemake_storage_plugin_gcs/__init__.py | 46 ++++++++---------------- tests/tests.py | 4 +-- 2 files changed, 17 insertions(+), 33 deletions(-) diff --git a/snakemake_storage_plugin_gcs/__init__.py b/snakemake_storage_plugin_gcs/__init__.py index aed40d9..93972a8 100644 --- a/snakemake_storage_plugin_gcs/__init__.py +++ b/snakemake_storage_plugin_gcs/__init__.py @@ -338,47 +338,35 @@ def mtime(self) -> float: """ Return the modification time """ - if self.exists(): - if self.is_directory(): - return max( - blob.updated.timestamp() for blob in self.directory_entries() - ) - else: - self.update_blob() - return self.blob.updated.timestamp() - else: - raise WorkflowError( - "The file does not seem to exist remotely: %s" % self.local_file() + if self.is_directory(): + return max( + blob.updated.timestamp() for blob in self.directory_entries() ) + else: + self.update_blob() + return self.blob.updated.timestamp() @retry.Retry(predicate=google_cloud_retry_predicate) def size(self) -> int: """ Return the size in bytes """ - if self.exists(): - if self.is_directory(): - return 0 - else: - self.update_blob() - return self.blob.size // 1024 + if self.is_directory(): + return 0 else: - return self._iofile.size_local + self.update_blob() + return self.blob.size // 1024 @retry.Retry(predicate=google_cloud_retry_predicate, deadline=600) def retrieve_object(self): """ Ensure that the object is accessible locally under self.local_path() - - In the previous GLS.py this was _download. We retry with 10 minutes. """ - if not self.exists(): - return None - # Create just a directory, or a file itself if self.is_directory(): - return self._download_directory() - return download_blob(self.blob, self.local_file()) + self._download_directory() + else: + download_blob(self.blob, self.local_path()) # The following to methods are only required if the class inherits from # StorageObjectReadWrite. @@ -400,7 +388,7 @@ def store_object(self): self.update_blob() # Distinguish between single file, and folder - f = self.local_file() + f = self.local_path() if os.path.isdir(f): # Ensure the "directory" exists self.blob.upload_from_string( @@ -470,8 +458,7 @@ def _download_directory(self): Handle download of a storage folder (assists retrieve_blob) """ # Create the directory locally - # TODO check that local_file is still valid - os.makedirs(self.local_file(), exist_ok=True) + self.local_path().mkdir(exist_ok=True) for blob in self.directory_entries(): local_name = f"{blob.bucket.name}/{blob.name}" @@ -482,9 +469,6 @@ def _download_directory(self): download_blob(blob, local_name) - # Return the root directory - return self.local_file() - @retry.Retry(predicate=google_cloud_retry_predicate) def update_blob(self): """ diff --git a/tests/tests.py b/tests/tests.py index 80bbd11..e1a4ae0 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -7,6 +7,8 @@ from snakemake_storage_plugin_gcs import StorageProvider, StorageProviderSettings +# Use local fake server as outlined here: +# https://github.com/fsouza/fake-gcs-server os.environ["STORAGE_EMULATOR_HOST"] = "http://localhost:5050" @@ -27,8 +29,6 @@ def get_storage_provider_cls(self) -> Type[StorageProviderBase]: def get_storage_provider_settings(self) -> Optional[StorageProviderSettingsBase]: # instantiate StorageProviderSettings of this plugin as appropriate - # Use local fake server as outlined here: - # https://www.claritician.com/how-to-mock-google-cloud-storage-during-development return StorageProviderSettings() def get_example_args(self) -> List[str]: