diff --git a/.buildkite/scripts/send_and_wait_for_test_bag.py b/.buildkite/scripts/send_and_wait_for_test_bag.py index 0964d0f3a8..3d12efdc2c 100755 --- a/.buildkite/scripts/send_and_wait_for_test_bag.py +++ b/.buildkite/scripts/send_and_wait_for_test_bag.py @@ -77,10 +77,10 @@ def pprint_time(seconds): print(f"status = {status.ljust(9)} (t = {pprint_time(elapsed_time)})") if status == "succeeded": - print(f"🎉 Test bag successful!") + print("🎉 Test bag successful!") break elif status == "failed": - print(f"😱 Test bag failed!") + print("😱 Test bag failed!") sys.exit(1) else: time.sleep(5) diff --git a/monitoring/digital_production_report/digital_production_report.py b/monitoring/digital_production_report/digital_production_report.py index 83ab919b0a..bb1ac1ba1c 100755 --- a/monitoring/digital_production_report/digital_production_report.py +++ b/monitoring/digital_production_report/digital_production_report.py @@ -155,7 +155,7 @@ def run_query(*, es_host, es_user, es_pass, query_type, query): es_url = f"https://{es_host}:9243/storage_files/_{query_type}" auth_string = f"{es_user}:{es_pass}".encode("ascii") - auth_header = f"Basic " + base64.b64encode(auth_string).decode("ascii") + auth_header = "Basic " + base64.b64encode(auth_string).decode("ascii") print(f"Making query {json.dumps(query)}") req = urllib.request.Request( diff --git a/scripts/_azure.py b/scripts/_azure.py index 78e22ab811..ec53ded7fd 100644 --- a/scripts/_azure.py +++ b/scripts/_azure.py @@ -27,7 +27,7 @@ def _create_sas_uris(connection_string, *, expiry, ip): container_name = container["name"] if expiry is None: - raise TypeError(f"expiry cannot be None!") + raise TypeError("expiry cannot be None!") for mode, allow_write in (("read_only", False), ("read_write", True)): permission = ContainerSasPermissions( diff --git a/scripts/migrations/2020-08-update_migration_created_date.py b/scripts/migrations/2020-08-update_migration_created_date.py index b449940d62..f5a6cea129 100755 --- a/scripts/migrations/2020-08-update_migration_created_date.py +++ b/scripts/migrations/2020-08-update_migration_created_date.py @@ -71,7 +71,7 @@ def get_backfill_item(id, version): Key={"id": id, "version": version} )["Item"] except KeyError: - record_error(id, version, f"Cannot find backfill storage manifest!!!!") + record_error(id, version, "Cannot find backfill storage manifest!!!!") def is_expected_diff(id, version, diff): diff --git a/scripts/migrations/2020-10-archivematica_bags_migration.py b/scripts/migrations/2020-10-archivematica_bags_migration.py index 4424d1cf21..42519ba5e1 100755 --- a/scripts/migrations/2020-10-archivematica_bags_migration.py +++ b/scripts/migrations/2020-10-archivematica_bags_migration.py @@ -241,7 +241,7 @@ def migrate(self, version, space, external_identifier): self._write_fetch_file( working_folder=working_folder, bucket=bucket, path=path, files=payload_files ) - logger.log(f"Wrote fetch.txt") + logger.log("Wrote fetch.txt") # Get required files from bag self._get_bagit_files_from_s3( @@ -251,7 +251,7 @@ def migrate(self, version, space, external_identifier): version=version, tagmanifest_files=tagmanifest_files, ) - logger.log(f"Got BagIt files from S3") + logger.log("Got BagIt files from S3") # Update bag-info.txt archivematica_uuid = self._get_archivematica_uuid(files=payload_files) @@ -357,7 +357,7 @@ def migrate(self, version, space, external_identifier): os.makedirs(target_folder, exist_ok=True) logger = SimpleLog( - log_location=os.path.join(target_folder, f"error.log"), + log_location=os.path.join(target_folder, "error.log"), init_msg=f"Starting migration of {documents_to_process} bags", ) diff --git a/scripts/migrations/2020-10-miro-migration/s3.py b/scripts/migrations/2020-10-miro-migration/s3.py index c84ea88126..d0586df3e5 100644 --- a/scripts/migrations/2020-10-miro-migration/s3.py +++ b/scripts/migrations/2020-10-miro-migration/s3.py @@ -42,7 +42,7 @@ def get_s3_object(*, bucket, key): """ Retrieves the contents of an object from S3. """ - out_path = os.path.join(f"_cache", bucket, key) + out_path = os.path.join("_cache", bucket, key) os.makedirs(os.path.dirname(out_path), exist_ok=True) try: