diff --git a/lib/galaxy/util/__init__.py b/lib/galaxy/util/__init__.py
index d2c8617c08bd..ed39a98f9e54 100644
--- a/lib/galaxy/util/__init__.py
+++ b/lib/galaxy/util/__init__.py
@@ -525,9 +525,7 @@ def shrink_stream_by_size(
rval = value.read(size)
value.seek(start)
return rval
- raise ValueError(
- "With the provided join_by value (%s), the minimum size value is %i." % (join_by, min_size)
- )
+ raise ValueError(f"With the provided join_by value ({join_by}), the minimum size value is {min_size}.")
left_index = right_index = int((size - len_join_by) / 2)
if left_index + right_index + len_join_by < size:
if left_larger:
@@ -566,9 +564,7 @@ def shrink_string_by_size(
return value[:size]
elif end_on_size_error:
return value[-size:]
- raise ValueError(
- "With the provided join_by value (%s), the minimum size value is %i." % (join_by, min_size)
- )
+ raise ValueError(f"With the provided join_by value ({join_by}), the minimum size value is {min_size}.")
left_index = right_index = int((size - len_join_by) / 2)
if left_index + right_index + len_join_by < size:
if left_larger:
@@ -1540,7 +1536,7 @@ def nice_size(size: Union[float, int, str, Decimal]) -> str:
return "??? bytes"
size, prefix = metric_prefix(size, 1024)
if prefix == "":
- return "%d bytes" % size
+ return f"{int(size)} bytes"
else:
return f"{size:.1f} {prefix}B"
@@ -1841,7 +1837,7 @@ def build_url(base_url, port=80, scheme="http", pathspec=None, params=None, dose
parsed_url.scheme = scheme
assert parsed_url.scheme in ("http", "https", "ftp"), f"Invalid URL scheme: {parsed_url.scheme}"
if port != 80:
- url = "%s://%s:%d/%s" % (parsed_url.scheme, parsed_url.netloc.rstrip("/"), int(port), parsed_url.path)
+ url = "{}://{}:{}/{}".format(parsed_url.scheme, parsed_url.netloc.rstrip("/"), int(port), parsed_url.path)
else:
url = f"{parsed_url.scheme}://{parsed_url.netloc.rstrip('/')}/{parsed_url.path.lstrip('/')}"
if len(pathspec) > 0:
diff --git a/lib/galaxy/util/heartbeat.py b/lib/galaxy/util/heartbeat.py
index 901f87291e8b..520ddde12348 100644
--- a/lib/galaxy/util/heartbeat.py
+++ b/lib/galaxy/util/heartbeat.py
@@ -3,6 +3,7 @@
import threading
import time
import traceback
+from typing import Dict
def get_current_thread_object_dict():
@@ -42,7 +43,7 @@ def __init__(self, config, name="Heartbeat Thread", period=20, fname="heartbeat.
self.fname_nonsleeping = None
self.file_nonsleeping = None
self.pid = None
- self.nonsleeping_heartbeats = {}
+ self.nonsleeping_heartbeats: Dict[int, int] = {}
# Event to wait on when sleeping, allows us to interrupt for shutdown
self.wait_event = threading.Event()
@@ -63,16 +64,16 @@ def open_logs(self):
if self.file is None or self.file.closed:
self.file = open(self.fname, "a")
self.file_nonsleeping = open(self.fname_nonsleeping, "a")
- self.file.write("Heartbeat for pid %d thread started at %s\n\n" % (self.pid, time.asctime()))
+ self.file.write(f"Heartbeat for pid {self.pid} thread started at {time.asctime()}\n\n")
self.file_nonsleeping.write(
- "Non-Sleeping-threads for pid %d thread started at %s\n\n" % (self.pid, time.asctime())
+ f"Non-Sleeping-threads for pid {self.pid} thread started at {time.asctime()}\n\n"
)
def close_logs(self):
if self.file is not None and not self.file.closed:
- self.file.write("Heartbeat for pid %d thread stopped at %s\n\n" % (self.pid, time.asctime()))
+ self.file.write(f"Heartbeat for pid {self.pid} thread stopped at {time.asctime()}\n\n")
self.file_nonsleeping.write(
- "Non-Sleeping-threads for pid %d thread stopped at %s\n\n" % (self.pid, time.asctime())
+ f"Non-Sleeping-threads for pid {self.pid} thread stopped at {time.asctime()}\n\n"
)
self.file.close()
self.file_nonsleeping.close()
@@ -183,16 +184,7 @@ def print_nonsleeping(self, threads_object_dict):
good_frame = self.get_interesting_stack_frame(tb)
self.file_nonsleeping.write(
- 'Thread %s\t%s\tnon-sleeping for %d heartbeat(s)\n File %s:%d\n Function "%s"\n %s\n'
- % (
- thread_id,
- object,
- self.nonsleeping_heartbeats[thread_id],
- good_frame[0],
- good_frame[1],
- good_frame[2],
- good_frame[3],
- )
+ f'Thread {thread_id}\t{object}\tnon-sleeping for {self.nonsleeping_heartbeats[thread_id]} heartbeat(s)\n File {good_frame[0]}:{good_frame[1]}\n Function "{good_frame[2]}"\n {good_frame[3]}\n'
)
all_threads_are_sleeping = False
diff --git a/lib/galaxy/util/permutations.py b/lib/galaxy/util/permutations.py
index 92573d8c6dbc..023778ce9332 100644
--- a/lib/galaxy/util/permutations.py
+++ b/lib/galaxy/util/permutations.py
@@ -60,8 +60,7 @@ def __extend_with_matched_combos(single_inputs, multi_inputs, nested):
continue
if len(multi_input_values) != len(first_multi_value):
raise InputMatchedException(
- "Received %d inputs for '%s' and %d inputs for '%s', these should be of equal length"
- % (len(multi_input_values), multi_input_key, len(first_multi_value), first_multi_input_key)
+ f"Received {len(multi_input_values)} inputs for '{multi_input_key}' and {len(first_multi_value)} inputs for '{first_multi_input_key}', these should be of equal length"
)
for index, value in enumerate(multi_input_values):
diff --git a/lib/galaxy/util/rules_dsl.py b/lib/galaxy/util/rules_dsl.py
index 3ce75495e7a3..ebea78be82a1 100644
--- a/lib/galaxy/util/rules_dsl.py
+++ b/lib/galaxy/util/rules_dsl.py
@@ -187,7 +187,7 @@ def apply(self, rule, data, sources):
new_rows = []
for index, row in enumerate(data):
- new_rows.append(row + ["%d" % (index + start)])
+ new_rows.append(row + [f"{index + start}"])
return new_rows, sources
diff --git a/lib/galaxy/visualization/data_providers/basic.py b/lib/galaxy/visualization/data_providers/basic.py
index 686b5e870c24..8629f30d14de 100644
--- a/lib/galaxy/visualization/data_providers/basic.py
+++ b/lib/galaxy/visualization/data_providers/basic.py
@@ -77,10 +77,9 @@ def get_data(self, columns=None, start_val=0, max_vals=None, skip_comments=True,
# columns is an array of ints for now (should handle column names later)
columns = loads(columns)
for column in columns:
- assert (column < self.original_dataset.metadata.columns) and (
- column >= 0
- ), "column index (%d) must be positive and less" % (column) + " than the number of columns: %d" % (
- self.original_dataset.metadata.columns
+ assert column < self.original_dataset.metadata.columns and column >= 0, (
+ f"column index ({column}) must be positive and less"
+ f" than the number of columns: {self.original_dataset.metadata.columns}"
)
# set up the response, column lists
diff --git a/lib/galaxy/visualization/data_providers/cigar.py b/lib/galaxy/visualization/data_providers/cigar.py
index 41d5e9435960..5a2fd934688d 100644
--- a/lib/galaxy/visualization/data_providers/cigar.py
+++ b/lib/galaxy/visualization/data_providers/cigar.py
@@ -43,34 +43,34 @@ def get_ref_based_read_seq_and_cigar(read_seq, read_start, ref_seq, ref_seq_star
new_op = "X"
# Include mismatched bases in new read sequence.
new_read_seq += read_seq[read_pos : read_pos + count]
- new_cigar += "%i%s" % (count, new_op)
+ new_cigar += f"{count}{new_op}"
total_count += count
read_pos += count
ref_seq_pos += count
# If end of read falls outside of ref_seq data, leave as M.
if total_count < op_len:
- new_cigar += "%iM" % (op_len - total_count)
+ new_cigar += "%sM" % (op_len - total_count)
elif op == 1: # Insertion
- new_cigar += "%i%s" % (op_len, cigar_ops[op])
+ new_cigar += f"{op_len}{cigar_ops[op]}"
# Include insertion bases in new read sequence.
new_read_seq += read_seq[read_pos : read_pos + op_len]
read_pos += op_len
elif op in [2, 3, 6]: # Deletion, Skip, or Padding
ref_seq_pos += op_len
- new_cigar += "%i%s" % (op_len, cigar_ops[op])
+ new_cigar += f"{op_len}{cigar_ops[op]}"
elif op == 4: # Soft clipping
read_pos += op_len
- new_cigar += "%i%s" % (op_len, cigar_ops[op])
+ new_cigar += f"{op_len}{cigar_ops[op]}"
elif op == 5: # Hard clipping
- new_cigar += "%i%s" % (op_len, cigar_ops[op])
+ new_cigar += f"{op_len}{cigar_ops[op]}"
elif op in [7, 8]: # Match or mismatch
if op == 8:
# Include mismatched bases in new read sequence.
new_read_seq += read_seq[read_pos : read_pos + op_len]
read_pos += op_len
ref_seq_pos += op_len
- new_cigar += "%i%s" % (op_len, cigar_ops[op])
+ new_cigar += f"{op_len}{cigar_ops[op]}"
return (new_read_seq, new_cigar)
diff --git a/lib/galaxy/visualization/data_providers/genome.py b/lib/galaxy/visualization/data_providers/genome.py
index f241ceebf286..907615712f10 100644
--- a/lib/galaxy/visualization/data_providers/genome.py
+++ b/lib/galaxy/visualization/data_providers/genome.py
@@ -251,7 +251,7 @@ def get_genome_data(self, chroms_info, **kwargs):
# create a dummy dict if necessary.
if not chrom_data:
chrom_data = {"data": None}
- chrom_data["region"] = "%s:%i-%i" % (chrom, 0, chrom_len)
+ chrom_data["region"] = f"{chrom}:{0}-{chrom_len}"
genome_data.append(chrom_data)
return {"data": genome_data, "dataset_type": self.dataset_type}
@@ -970,7 +970,7 @@ def _nth_read_iterator(read_iterator, threshold):
pair = paired_pending[qname]
results.append(
[
- hash("%i_%s" % (pair["start"], qname)),
+ hash("{}_{}".format(pair["start"], qname)),
pair["start"],
read.pos + read_len,
qname,
@@ -997,7 +997,7 @@ def _nth_read_iterator(read_iterator, threshold):
else:
results.append(
[
- hash("%i_%s" % (read.pos, qname)),
+ hash(f"{read.pos}_{qname}"),
read.pos,
read.pos + read_len,
qname,
@@ -1028,9 +1028,7 @@ def _nth_read_iterator(read_iterator, threshold):
r1 = [read["start"], read["end"], read["cigar"], read["strand"], read["seq"]]
r2 = [read["mate_start"], read["mate_start"]]
- results.append(
- [hash("%i_%s" % (read_start, qname)), read_start, read_end, qname, r1, r2, [read["mapq"], 125]]
- )
+ results.append([hash(f"{read_start}_{qname}"), read_start, read_end, qname, r1, r2, [read["mapq"], 125]])
# Clean up. TODO: is this needed? If so, we'll need a cleanup function after processing the data.
# bamfile.close()
@@ -1052,7 +1050,7 @@ def convert_cigar(read, start_field, cigar_field, seq_field):
cigar_ops = "MIDNSHP=X"
read_cigar = ""
for op_tuple in read[cigar_field]:
- read_cigar += "%i%s" % (op_tuple[1], cigar_ops[op_tuple[0]])
+ read_cigar += f"{op_tuple[1]}{cigar_ops[op_tuple[0]]}"
read[cigar_field] = read_cigar
# Choose method for processing reads. Use reference-based compression
diff --git a/lib/galaxy/web/framework/base.py b/lib/galaxy/web/framework/base.py
index b0c72256d411..875494d3a378 100644
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -530,7 +530,7 @@ def wsgi_status(self):
"""
if isinstance(self.status, int):
exception = webob.exc.status_map.get(self.status)
- return "%d %s" % (exception.code, exception.title)
+ return f"{exception.code} {exception.title}"
else:
return self.status
diff --git a/lib/galaxy/web/framework/middleware/profile.py b/lib/galaxy/web/framework/middleware/profile.py
index 649ec2933596..672c90d4154e 100644
--- a/lib/galaxy/web/framework/middleware/profile.py
+++ b/lib/galaxy/web/framework/middleware/profile.py
@@ -88,8 +88,7 @@ def pstats_as_html(stats, *sel_list):
rval = []
# Number of function calls, primitive calls, total time
rval.append(
- "
%d function calls (%d primitive) in %0.3f CPU seconds
"
- % (stats.total_calls, stats.prim_calls, stats.total_tt)
+ f"{stats.total_calls} function calls ({stats.prim_calls} primitive) in {stats.total_tt:0.3f} CPU seconds
"
)
# Extract functions that match 'sel_list'
funcs, order_message, select_message = get_func_list(stats, sel_list)
@@ -175,4 +174,4 @@ def func_std_string(func_name):
else:
return name
else:
- return "%s:%d(%s)" % func_name
+ return "{}:{}({})".format(*func_name)
diff --git a/lib/galaxy/web/framework/middleware/translogger.py b/lib/galaxy/web/framework/middleware/translogger.py
index aedfb83c6227..cd988621dfac 100644
--- a/lib/galaxy/web/framework/middleware/translogger.py
+++ b/lib/galaxy/web/framework/middleware/translogger.py
@@ -80,17 +80,13 @@ def write_log(self, environ, method, req_uri, start, status, bytes):
offset = time.altzone / 60 / 60 * -100
else:
offset = time.timezone / 60 / 60 * -100
- if offset >= 0:
- offset = "+%0.4d" % (offset)
- elif offset < 0:
- offset = "%0.4d" % (offset)
d = {
"REMOTE_ADDR": environ.get("REMOTE_ADDR") or "-",
"REMOTE_USER": environ.get("REMOTE_USER") or "-",
"REQUEST_METHOD": method,
"REQUEST_URI": req_uri,
"HTTP_VERSION": environ.get("SERVER_PROTOCOL"),
- "time": time.strftime("%d/%b/%Y:%H:%M:%S ", start) + offset,
+ "time": time.strftime("%d/%b/%Y:%H:%M:%S ", start) + f"{int(offset):+05}",
"status": status.split(None, 1)[0],
"bytes": bytes,
"HTTP_REFERER": environ.get("HTTP_REFERER", "-"),
diff --git a/lib/galaxy/web/proxy/__init__.py b/lib/galaxy/web/proxy/__init__.py
index d0212756fa4f..68ef06046513 100644
--- a/lib/galaxy/web/proxy/__init__.py
+++ b/lib/galaxy/web/proxy/__init__.py
@@ -96,7 +96,7 @@ def setup_proxy(
host = host[0 : host.index(":")]
scheme = trans.request.scheme
if not self.dynamic_proxy_external_proxy:
- proxy_url = "%s://%s:%d" % (scheme, host, self.dynamic_proxy_bind_port)
+ proxy_url = f"{scheme}://{host}:{self.dynamic_proxy_bind_port}"
else:
proxy_url = f"{scheme}://{host}{proxy_prefix}"
return {
@@ -160,11 +160,7 @@ def launch_proxy_command(self, config):
args = [
"gxproxy", # Must be on path. TODO: wheel?
"--listenAddr",
- "%s:%d"
- % (
- config.dynamic_proxy_bind_ip,
- config.dynamic_proxy_bind_port,
- ),
+ f"{config.dynamic_proxy_bind_ip}:{config.dynamic_proxy_bind_port}",
"--listenPath",
"/".join(((config.cookie_path or url_for("/")), config.dynamic_proxy_prefix)),
"--cookieName",
@@ -197,7 +193,7 @@ def __init__(self, host=None, port=None):
host = DEFAULT_PROXY_TO_HOST
if port is None:
port = sockets.unused_port()
- log.info("Obtained unused port %d" % port)
+ log.info(f"Obtained unused port {port}")
self.host = host
self.port = port
diff --git a/lib/galaxy/webapps/base/controller.py b/lib/galaxy/webapps/base/controller.py
index 9ee829bfae28..b74eae017646 100644
--- a/lib/galaxy/webapps/base/controller.py
+++ b/lib/galaxy/webapps/base/controller.py
@@ -1297,7 +1297,7 @@ def _scan_json_block(self, meta, prefix=""):
yield from self._scan_json_block(meta[a], f"{prefix}/{a}")
elif isinstance(meta, list):
for i, a in enumerate(meta):
- yield from self._scan_json_block(a, prefix + "[%d]" % (i))
+ yield from self._scan_json_block(a, prefix + f"[{i}]")
else:
# BUG: Everything is cast to string, which can lead to false positives
# for cross type comparisions, ie "True" == True
diff --git a/lib/galaxy/webapps/galaxy/controllers/admin.py b/lib/galaxy/webapps/galaxy/controllers/admin.py
index 634c668b1f01..2ea9e1a339c1 100644
--- a/lib/galaxy/webapps/galaxy/controllers/admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin.py
@@ -539,8 +539,7 @@ def manage_users_and_groups_for_quota(self, trans, payload=None, **kwd):
all_groups.append((group.name, trans.security.encode_id(group.id)))
return {
"title": f"Quota '{quota.name}'",
- "message": "Quota '%s' is currently associated with %d user(s) and %d group(s)."
- % (quota.name, len(in_users), len(in_groups)),
+ "message": f"Quota '{quota.name}' is currently associated with {len(in_users)} user(s) and {len(in_groups)} group(s).",
"status": "info",
"inputs": [
build_select_input("in_groups", "Groups", all_groups, in_groups),
@@ -802,8 +801,7 @@ def manage_users_and_groups_for_role(self, trans, payload=None, **kwd):
all_groups.append((group.name, trans.security.encode_id(group.id)))
return {
"title": f"Role '{role.name}'",
- "message": "Role '%s' is currently associated with %d user(s) and %d group(s)."
- % (role.name, len(in_users), len(in_groups)),
+ "message": f"Role '{role.name}' is currently associated with {len(in_users)} user(s) and {len(in_groups)} group(s).",
"status": "info",
"inputs": [
build_select_input("in_groups", "Groups", all_groups, in_groups),
@@ -889,8 +887,7 @@ def manage_users_and_roles_for_group(self, trans, payload=None, **kwd):
all_roles.append((role.name, trans.security.encode_id(role.id)))
return {
"title": f"Group '{group.name}'",
- "message": "Group '%s' is currently associated with %d user(s) and %d role(s)."
- % (group.name, len(in_users), len(in_roles)),
+ "message": f"Group '{group.name}' is currently associated with {len(in_users)} user(s) and {len(in_roles)} role(s).",
"status": "info",
"inputs": [
build_select_input("in_roles", "Roles", all_roles, in_roles),
@@ -993,11 +990,7 @@ def create_group(self, trans, payload=None, **kwd):
num_in_roles = len(in_roles)
with transaction(trans.sa_session):
trans.sa_session.commit()
- message = "Group '%s' has been created with %d associated users and %d associated roles." % (
- group.name,
- len(in_users),
- num_in_roles,
- )
+ message = f"Group '{group.name}' has been created with {len(in_users)} associated users and {num_in_roles} associated roles."
if auto_create_checked:
message += (
"One of the roles associated with this group is the newly created role with the same name."
@@ -1034,7 +1027,7 @@ def reset_user_password(self, trans, payload=None, **kwd):
trans.sa_session.add(user)
with transaction(trans.sa_session):
trans.sa_session.commit()
- return {"message": "Passwords reset for %d user(s)." % len(users)}
+ return {"message": f"Passwords reset for {len(users)} user(s)."}
else:
return self.message_exception(trans, "Please specify user ids.")
diff --git a/lib/galaxy/webapps/galaxy/controllers/async.py b/lib/galaxy/webapps/galaxy/controllers/async.py
index 7a8c45a7badf..8ed26bf95603 100644
--- a/lib/galaxy/webapps/galaxy/controllers/async.py
+++ b/lib/galaxy/webapps/galaxy/controllers/async.py
@@ -95,7 +95,7 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd):
STATUS = params.get("STATUS")
if STATUS == "OK":
- key = hmac_new(trans.app.config.tool_secret, "%d:%d" % (data.id, data.history_id))
+ key = hmac_new(trans.app.config.tool_secret, f"{data.id}:{data.history_id}")
if key != data_secret:
return f"You do not have permission to alter data {data_id}."
if not params.get("GALAXY_URL"):
@@ -203,10 +203,10 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd):
trans.sa_session.commit()
# Need to explicitly create the file
data.dataset.object_store.create(data.dataset)
- trans.log_event("Added dataset %d to history %d" % (data.id, trans.history.id), tool_id=tool_id)
+ trans.log_event(f"Added dataset {data.id} to history {trans.history.id}", tool_id=tool_id)
try:
- key = hmac_new(trans.app.config.tool_secret, "%d:%d" % (data.id, data.history_id))
+ key = hmac_new(trans.app.config.tool_secret, f"{data.id}:{data.history_id}")
galaxy_url = f"{trans.request.url_path}/async/{tool_id}/{data.id}/{key}"
params.update({"GALAXY_URL": galaxy_url})
params.update({"data_id": data.id})
diff --git a/lib/galaxy/webapps/galaxy/controllers/dataset.py b/lib/galaxy/webapps/galaxy/controllers/dataset.py
index 6d98060692da..4be64e58b4e0 100644
--- a/lib/galaxy/webapps/galaxy/controllers/dataset.py
+++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py
@@ -724,11 +724,7 @@ def copy_datasets(
else:
target_histories = [history]
if len(target_histories) != len(target_history_ids):
- error_msg = (
- error_msg
- + "You do not have permission to add datasets to %i requested histories. "
- % (len(target_history_ids) - len(target_histories))
- )
+ error_msg += f"You do not have permission to add datasets to {len(target_history_ids) - len(target_histories)} requested histories. "
source_contents = list(
map(trans.sa_session.query(trans.app.model.HistoryDatasetAssociation).get, decoded_dataset_ids)
)
@@ -741,10 +737,10 @@ def copy_datasets(
source_contents.sort(key=lambda content: content.hid)
for content in source_contents:
if content is None:
- error_msg = f"{error_msg}You tried to copy a dataset that does not exist. "
+ error_msg += "You tried to copy a dataset that does not exist. "
invalid_contents += 1
elif content.history != history:
- error_msg = f"{error_msg}You tried to copy a dataset which is not in your current history. "
+ error_msg += "You tried to copy a dataset which is not in your current history. "
invalid_contents += 1
else:
for hist in target_histories:
@@ -772,7 +768,7 @@ def copy_datasets(
)
num_source = len(source_content_ids) - invalid_contents
num_target = len(target_histories)
- done_msg = "%i %s copied to %i %s: %s." % (
+ done_msg = "{} {} copied to {} {}: {}.".format(
num_source,
inflector.cond_plural(num_source, "dataset"),
num_target,
diff --git a/lib/galaxy/webapps/galaxy/controllers/forms.py b/lib/galaxy/webapps/galaxy/controllers/forms.py
index 112f8fa998cc..251b8efee5a4 100644
--- a/lib/galaxy/webapps/galaxy/controllers/forms.py
+++ b/lib/galaxy/webapps/galaxy/controllers/forms.py
@@ -140,8 +140,8 @@ def create_form(self, trans, payload=None, **kwd):
if len(row) >= 6:
for column in range(len(row)):
row[column] = str(row[column]).strip('"')
- prefix = "fields_%i|" % index
- payload[f"{prefix}name"] = "%i_imported_field" % (index + 1)
+ prefix = f"fields_{index}|"
+ payload[f"{prefix}name"] = f"{index + 1}_imported_field"
payload[f"{prefix}label"] = row[0]
payload[f"{prefix}helptext"] = row[1]
payload[f"{prefix}type"] = row[2]
@@ -152,7 +152,7 @@ def create_form(self, trans, payload=None, **kwd):
new_form, message = self.save_form_definition(trans, None, payload)
if new_form is None:
return self.message_exception(trans, message)
- imported = (" with %i imported fields" % index) if index > 0 else ""
+ imported = (f" with {index} imported fields") if index > 0 else ""
message = f"The form '{payload.get('name')}' has been created{imported}."
return {"message": message}
@@ -235,7 +235,7 @@ def get_current_form(self, trans, payload=None, **kwd):
fields = []
index = 0
while True:
- prefix = "fields_%i|" % index
+ prefix = f"fields_{index}|"
if f"{prefix}label" in payload:
field_attributes = ["name", "label", "helptext", "required", "type", "selectlist", "default"]
field_dict = {attr: payload.get(f"{prefix}{attr}") for attr in field_attributes}
diff --git a/lib/galaxy/webapps/galaxy/controllers/history.py b/lib/galaxy/webapps/galaxy/controllers/history.py
index 03739a9e4016..56afb50cda4e 100644
--- a/lib/galaxy/webapps/galaxy/controllers/history.py
+++ b/lib/galaxy/webapps/galaxy/controllers/history.py
@@ -231,9 +231,7 @@ def purge_deleted_datasets(self, trans):
except Exception:
log.exception(f"Unable to purge dataset ({hda.dataset.id}) on purge of hda ({hda.id}):")
count += 1
- return trans.show_ok_message(
- "%d datasets have been deleted permanently" % count, refresh_frames=["history"]
- )
+ return trans.show_ok_message(f"{count} datasets have been deleted permanently", refresh_frames=["history"])
return trans.show_error_message("Cannot purge deleted datasets from this session.")
@web.expose_api_anonymous
@@ -267,7 +265,7 @@ def rename(self, trans, payload=None, **kwd):
return {
"title": "Change history name(s)",
"inputs": [
- {"name": "name_%i" % i, "label": f"Current: {h.name}", "value": h.name}
+ {"name": f"name_{i}", "label": f"Current: {h.name}", "value": h.name}
for i, h in enumerate(histories)
],
}
@@ -275,7 +273,7 @@ def rename(self, trans, payload=None, **kwd):
messages = []
for i, h in enumerate(histories):
cur_name = h.get_display_name()
- new_name = payload.get("name_%i" % i)
+ new_name = payload.get(f"name_{i}")
# validate name is empty
if not isinstance(new_name, str) or not new_name.strip():
messages.append(f"You must specify a valid name for History '{cur_name}'.")
diff --git a/lib/galaxy/webapps/galaxy/controllers/root.py b/lib/galaxy/webapps/galaxy/controllers/root.py
index 1d03e3b37647..b08ba4431d8d 100644
--- a/lib/galaxy/webapps/galaxy/controllers/root.py
+++ b/lib/galaxy/webapps/galaxy/controllers/root.py
@@ -122,7 +122,7 @@ def display_as(self, trans: GalaxyWebTransaction, id=None, display_app=None, **k
return data.as_display_type(display_app, **kwd)
else:
trans.response.status = "400"
- return "No data with id=%d" % id
+ return f"No data with id={id}"
@web.expose
def welcome(self, trans: GalaxyWebTransaction, **kwargs):
diff --git a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
index d3337bb8e602..e6bf157400b1 100644
--- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
+++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
@@ -145,7 +145,7 @@ def rerun(self, trans, id=None, job_id=None, **kwd):
if job:
job_id = trans.security.encode_id(job.id)
else:
- raise Exception("Failed to get job information for dataset hid %d" % data.hid)
+ raise Exception(f"Failed to get job information for dataset hid {data.hid}")
return trans.response.send_redirect(url_for(controller="root", job_id=job_id))
@web.expose
diff --git a/lib/galaxy/webapps/reports/controllers/history.py b/lib/galaxy/webapps/reports/controllers/history.py
index cb6f184b55ab..f27debf3aba6 100644
--- a/lib/galaxy/webapps/reports/controllers/history.py
+++ b/lib/galaxy/webapps/reports/controllers/history.py
@@ -137,7 +137,7 @@ def history_and_dataset_per_user(self, trans, **kwd):
for user in users:
dataset = datasets.get(user, [0, 0])
history = histories.get(user, 0)
- data[user] = ("%d (%s)" % (history, int_to_octet(dataset[1])), dataset[0])
+ data[user] = (f"{history} ({int_to_octet(dataset[1])})", dataset[0])
return trans.fill_template(
"/webapps/reports/history_and_dataset_per_user.mako",
diff --git a/lib/galaxy/webapps/reports/controllers/system.py b/lib/galaxy/webapps/reports/controllers/system.py
index 0b18f18f2d5d..589d44e79dab 100644
--- a/lib/galaxy/webapps/reports/controllers/system.py
+++ b/lib/galaxy/webapps/reports/controllers/system.py
@@ -87,10 +87,7 @@ def userless_histories(self, trans, **kwd):
if not dataset.deleted:
dataset_count += 1
history_count += 1
- message = (
- "%d userless histories ( including a total of %d datasets ) have not been updated for at least %d days."
- % (history_count, dataset_count, userless_histories_days)
- )
+ message = f"{history_count} userless histories ( including a total of {dataset_count} datasets ) have not been updated for at least {userless_histories_days} days."
else:
message = "Enter the number of days."
return str(userless_histories_days), message
@@ -133,8 +130,8 @@ def deleted_histories(self, trans, **kwd):
pass
history_count += 1
message = (
- "%d histories ( including a total of %d datasets ) were deleted more than %d days ago, but have not yet been purged, "
- "disk space: %s." % (history_count, dataset_count, deleted_histories_days, nice_size(disk_space, True))
+ f"{history_count} histories ( including a total of {dataset_count} datasets ) were deleted more than {deleted_histories_days} days ago, but have not yet been purged, "
+ f"disk space: {nice_size(disk_space, True)}."
)
else:
message = "Enter the number of days."
@@ -163,8 +160,8 @@ def deleted_datasets(self, trans, **kwd):
except Exception:
pass
message = (
- "%d datasets were deleted more than %d days ago, but have not yet been purged,"
- " disk space: %s." % (dataset_count, deleted_datasets_days, nice_size(disk_space, True))
+ f"{dataset_count} datasets were deleted more than {deleted_datasets_days} days ago, but have not yet been purged,"
+ f" disk space: {nice_size(disk_space, True)}."
)
else:
message = "Enter the number of days."
diff --git a/lib/galaxy/webapps/reports/controllers/tools.py b/lib/galaxy/webapps/reports/controllers/tools.py
index 0979f522b3e7..570d67beeba0 100644
--- a/lib/galaxy/webapps/reports/controllers/tools.py
+++ b/lib/galaxy/webapps/reports/controllers/tools.py
@@ -49,19 +49,19 @@ class Tools(BaseUIController):
def formatted(self, date, colored=False):
splited = str(date).split(",")
if len(splited) == 2:
- returned = "%s %dH" % (splited[0], int(splited[1].split(":")[0]))
+ returned = "{} {}H".format(splited[0], int(splited[1].split(":")[0]))
if colored:
return f'{returned}'
return returned
else:
splited = tuple(float(_) for _ in str(date).split(":"))
if splited[0]:
- returned = "%d h. %d min." % splited[:2]
+ returned = f"{int(splited[0])} h. {int(splited[1])} min."
if colored:
return f'{returned}'
return returned
if splited[1]:
- return "%d min. %d sec." % splited[1:3]
+ return f"{int(splited[1])} min. {int(splited[2])} sec."
return f"{splited[2]:.1f} sec."
@web.expose
@@ -365,7 +365,7 @@ def tool_error_messages(self, trans, **kwd):
sentence += f"{word}"
count += 1
if sentence in new_key:
- new_key = new_key.replace(sentence, f"{word}{' [this line in %d times]' % count}")
+ new_key = new_key.replace(sentence, f"{word} [this line in {count} times]")
data[new_key] = counter[key]
return trans.fill_template(
diff --git a/lib/galaxy/workflow/extract.py b/lib/galaxy/workflow/extract.py
index d96be13b35cc..76b5c6212c86 100644
--- a/lib/galaxy/workflow/extract.py
+++ b/lib/galaxy/workflow/extract.py
@@ -442,7 +442,7 @@ def cleanup(prefix, inputs, values):
group_values = values[key]
for i, rep_values in enumerate(group_values):
rep_index = rep_values["__index__"]
- cleanup("%s%s_%d|" % (prefix, key, rep_index), input.inputs, group_values[i])
+ cleanup(f"{prefix}{key}_{rep_index}|", input.inputs, group_values[i])
elif isinstance(input, Conditional):
# Scrub dynamic resource related parameters from workflows,
# they cause problems and the workflow probably should include
diff --git a/lib/galaxy/workflow/modules.py b/lib/galaxy/workflow/modules.py
index bf7fffae9d90..abb72160652a 100644
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -168,7 +168,7 @@ def to_cwl(value, hda_references, step):
hda_references.append(value)
properties = {
"class": "File",
- "location": "step_input://%d" % len(hda_references),
+ "location": f"step_input://{len(hda_references)}",
}
set_basename_and_derived_properties(
properties, value.dataset.created_from_basename or element_identifier or value.name
diff --git a/lib/galaxy/workflow/refactor/execute.py b/lib/galaxy/workflow/refactor/execute.py
index 264fa843d830..7f050e67eb0a 100644
--- a/lib/galaxy/workflow/refactor/execute.py
+++ b/lib/galaxy/workflow/refactor/execute.py
@@ -116,7 +116,7 @@ def _apply_add_step(self, action: AddStepAction, execution: RefactorActionExecut
order_index = len(steps)
step_dict = {
"order_index": order_index,
- "id": "new_%d" % order_index,
+ "id": f"new_{order_index}",
"type": action.type,
}
if action.tool_state: