diff --git a/cloudpathlib/azure/azblobclient.py b/cloudpathlib/azure/azblobclient.py index 480bc348..98189378 100644 --- a/cloudpathlib/azure/azblobclient.py +++ b/cloudpathlib/azure/azblobclient.py @@ -170,7 +170,7 @@ def __init__( self._hns_enabled = None def _check_hns(self) -> Optional[bool]: - if not self._hns_enabled: + if self._hns_enabled is None: account_info = self.service_client.get_account_information() # type: ignore self._hns_enabled = account_info.get("is_hns_enabled", False) # type: ignore @@ -327,7 +327,7 @@ def _move_file( metadata=dict(last_modified=str(datetime.utcnow().timestamp())) ) - # we can use rename API same account and container on adls gen2 + # we can use rename API when the same account on adls gen2 elif remove_src and (src.client is dst.client) and self._check_hns(): fsc = self.data_lake_client.get_file_system_client(src.container) # type: ignore diff --git a/docs/docs/authentication.md b/docs/docs/authentication.md index 0557c196..36018532 100644 --- a/docs/docs/authentication.md +++ b/docs/docs/authentication.md @@ -215,7 +215,7 @@ cp3 = CloudPath("s3://cloudpathlib-test-bucket/") Some Azure storage accounts are configured with "hierarchical namespace" enabled. This means that the storage account is backed by the Azure DataLake Storage Gen2 product rather than Azure Blob Storage. For many operations, the two are the same and one can use the Azure Blob Storage API. However, for some operations, a developer will need to use the Azure DataLake Storage API. The `AzureBlobClient` class implemented in cloudpathlib is designed to detect if hierarchical namespace is enabled and use the Azure DataLake Storage API in the places where it is necessary or it provides a performance improvement. Usually, a user of cloudpathlib will not need to know if hierarchical namespace is enabled and the storage account is backed by Azure DataLake Storage Gen2 or Azure Blob Storage. -If needed, the Azure SDK provided `DataLakeServiceClient` object can be accessed via the `AzureBlobClient.data_lake_client`. The Azure SDK provided `BlobServiceClient` object can be accessed via `AzureBlobClient.blob_client`. +If needed, the Azure SDK provided `DataLakeServiceClient` object can be accessed via the `AzureBlobClient.data_lake_client`. The Azure SDK provided `BlobServiceClient` object can be accessed via `AzureBlobClient.service_client`. ## Pickling `CloudPath` objects diff --git a/tests/mock_clients/mock_azureblob.py b/tests/mock_clients/mock_azureblob.py index 1afb8a39..f99e0d4a 100644 --- a/tests/mock_clients/mock_azureblob.py +++ b/tests/mock_clients/mock_azureblob.py @@ -18,6 +18,10 @@ class _JsonCache: + """Used to mock file metadata store on cloud storage; saves/writes to disk so + different clients can access the same metadata store. + """ + def __init__(self, path: Path): self.path = path @@ -49,7 +53,7 @@ def __init__(self, test_dir, adls): # copy test assets for reference in tests without affecting assets shutil.copytree(TEST_ASSETS, test_dir, dirs_exist_ok=True) - # root is parent of the test specific directort + # root is parent of the test specific directory self.root = test_dir.parent self.test_dir = test_dir