Skip to content

Commit

Permalink
fix linting errors
Browse files Browse the repository at this point in the history
  • Loading branch information
agnesgaroux committed Dec 17, 2024
1 parent a5e1614 commit a8e7982
Show file tree
Hide file tree
Showing 6 changed files with 9 additions and 9 deletions.
4 changes: 2 additions & 2 deletions .buildkite/scripts/send_and_wait_for_test_bag.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,10 +77,10 @@ def pprint_time(seconds):
print(f"status = {status.ljust(9)} (t = {pprint_time(elapsed_time)})")

if status == "succeeded":
print(f"🎉 Test bag successful!")
print("🎉 Test bag successful!")
break
elif status == "failed":
print(f"😱 Test bag failed!")
print("😱 Test bag failed!")
sys.exit(1)
else:
time.sleep(5)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ def run_query(*, es_host, es_user, es_pass, query_type, query):
es_url = f"https://{es_host}:9243/storage_files/_{query_type}"

auth_string = f"{es_user}:{es_pass}".encode("ascii")
auth_header = f"Basic " + base64.b64encode(auth_string).decode("ascii")
auth_header = "Basic " + base64.b64encode(auth_string).decode("ascii")

print(f"Making query {json.dumps(query)}")
req = urllib.request.Request(
Expand Down
2 changes: 1 addition & 1 deletion scripts/_azure.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def _create_sas_uris(connection_string, *, expiry, ip):
container_name = container["name"]

if expiry is None:
raise TypeError(f"expiry cannot be None!")
raise TypeError("expiry cannot be None!")

for mode, allow_write in (("read_only", False), ("read_write", True)):
permission = ContainerSasPermissions(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def get_backfill_item(id, version):
Key={"id": id, "version": version}
)["Item"]
except KeyError:
record_error(id, version, f"Cannot find backfill storage manifest!!!!")
record_error(id, version, "Cannot find backfill storage manifest!!!!")


def is_expected_diff(id, version, diff):
Expand Down
6 changes: 3 additions & 3 deletions scripts/migrations/2020-10-archivematica_bags_migration.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def migrate(self, version, space, external_identifier):
self._write_fetch_file(
working_folder=working_folder, bucket=bucket, path=path, files=payload_files
)
logger.log(f"Wrote fetch.txt")
logger.log("Wrote fetch.txt")

# Get required files from bag
self._get_bagit_files_from_s3(
Expand All @@ -251,7 +251,7 @@ def migrate(self, version, space, external_identifier):
version=version,
tagmanifest_files=tagmanifest_files,
)
logger.log(f"Got BagIt files from S3")
logger.log("Got BagIt files from S3")

# Update bag-info.txt
archivematica_uuid = self._get_archivematica_uuid(files=payload_files)
Expand Down Expand Up @@ -357,7 +357,7 @@ def migrate(self, version, space, external_identifier):
os.makedirs(target_folder, exist_ok=True)

logger = SimpleLog(
log_location=os.path.join(target_folder, f"error.log"),
log_location=os.path.join(target_folder, "error.log"),
init_msg=f"Starting migration of {documents_to_process} bags",
)

Expand Down
2 changes: 1 addition & 1 deletion scripts/migrations/2020-10-miro-migration/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def get_s3_object(*, bucket, key):
"""
Retrieves the contents of an object from S3.
"""
out_path = os.path.join(f"_cache", bucket, key)
out_path = os.path.join("_cache", bucket, key)
os.makedirs(os.path.dirname(out_path), exist_ok=True)

try:
Expand Down

0 comments on commit a8e7982

Please sign in to comment.