Skip to content

Commit

Permalink
Change legacy string formatting to f-strings (#1374)
Browse files Browse the repository at this point in the history
  • Loading branch information
DimitriPapadopoulos authored Oct 4, 2023
1 parent 7036b5a commit edf9b73
Show file tree
Hide file tree
Showing 33 changed files with 124 additions and 135 deletions.
2 changes: 1 addition & 1 deletion fsspec/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ class AbstractArchiveFileSystem(AbstractFileSystem):
"""

def __str__(self):
return "<Archive-like object %s at %s>" % (type(self).__name__, id(self))
return f"<Archive-like object {type(self).__name__} at {id(self)}>"

__repr__ = __str__

Expand Down
2 changes: 1 addition & 1 deletion fsspec/asyn.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ async def _process_limits(self, url, start, end):
end = ""
if isinstance(end, numbers.Integral):
end -= 1 # bytes range is inclusive
return "bytes=%s-%s" % (start, end)
return f"bytes={start}-{end}"

async def _cat_file(self, path, start=None, end=None, **kwargs):
raise NotImplementedError
Expand Down
20 changes: 10 additions & 10 deletions fsspec/caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,8 +222,9 @@ def __init__(self, blocksize, fetcher, size, maxblocks=32):
self._fetch_block_cached = functools.lru_cache(maxblocks)(self._fetch_block)

def __repr__(self):
return "<BlockCache blocksize={}, size={}, nblocks={}>".format(
self.blocksize, self.size, self.nblocks
return (
f"<BlockCache blocksize={self.blocksize}, "
f"size={self.size}, nblocks={self.nblocks}>"
)

def cache_info(self):
Expand Down Expand Up @@ -277,9 +278,8 @@ def _fetch_block(self, block_number):
"""
if block_number > self.nblocks:
raise ValueError(
"'block_number={}' is greater than the number of blocks ({})".format(
block_number, self.nblocks
)
f"'block_number={block_number}' is greater than "
f"the number of blocks ({self.nblocks})"
)

start = block_number * self.blocksize
Expand Down Expand Up @@ -606,8 +606,9 @@ def __init__(self, blocksize, fetcher, size, maxblocks=32):
self._fetch_future_lock = threading.Lock()

def __repr__(self):
return "<BackgroundBlockCache blocksize={}, size={}, nblocks={}>".format(
self.blocksize, self.size, self.nblocks
return (
f"<BackgroundBlockCache blocksize={self.blocksize}, "
f"size={self.size}, nblocks={self.nblocks}>"
)

def cache_info(self):
Expand Down Expand Up @@ -719,9 +720,8 @@ def _fetch_block(self, block_number, log_info="sync"):
"""
if block_number > self.nblocks:
raise ValueError(
"'block_number={}' is greater than the number of blocks ({})".format(
block_number, self.nblocks
)
f"'block_number={block_number}' is greater than "
f"the number of blocks ({self.nblocks})"
)

start = block_number * self.blocksize
Expand Down
6 changes: 2 additions & 4 deletions fsspec/compression.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,11 @@ def register_compression(name, callback, extensions, force=False):

# Validate registration
if name in compr and not force:
raise ValueError("Duplicate compression registration: %s" % name)
raise ValueError(f"Duplicate compression registration: {name}")

for ext in extensions:
if ext in fsspec.utils.compressions and not force:
raise ValueError(
"Duplicate compression file extension: %s (%s)" % (ext, name)
)
raise ValueError(f"Duplicate compression file extension: {ext} ({name})")

compr[name] = callback

Expand Down
6 changes: 3 additions & 3 deletions fsspec/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def __reduce__(self):
)

def __repr__(self):
return "<OpenFile '{}'>".format(self.path)
return f"<OpenFile '{self.path}'>"

def __enter__(self):
mode = self.mode.replace("t", "").replace("b", "") + "b"
Expand Down Expand Up @@ -195,7 +195,7 @@ def __getitem__(self, item):
return out

def __repr__(self):
return "<List of %s OpenFile instances>" % len(self)
return f"<List of {len(self)} OpenFile instances>"


def open_files(
Expand Down Expand Up @@ -498,7 +498,7 @@ def get_compression(urlpath, compression):
if compression == "infer":
compression = infer_compression(urlpath)
if compression is not None and compression not in compr:
raise ValueError("Compression type %s not supported" % compression)
raise ValueError(f"Compression type {compression} not supported")
return compression


Expand Down
2 changes: 1 addition & 1 deletion fsspec/fuse.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def format_help(self):
for item in args.option or []:
key, sep, value = item.partition("=")
if not sep:
parser.error(message="Wrong option: {!r}".format(item))
parser.error(message=f"Wrong option: {item!r}")
val = value.lower()
if val.endswith("[int]"):
value = int(value[: -len("[int]")])
Expand Down
6 changes: 3 additions & 3 deletions fsspec/gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class which owns it.
same name.
"""
if name not in self.signals:
raise ValueError("Attempt to assign an undeclared signal: %s" % name)
raise ValueError(f"Attempt to assign an undeclared signal: {name}")
self._sigs[name] = {
"widget": widget,
"callbacks": [],
Expand Down Expand Up @@ -141,7 +141,7 @@ def _emit(self, sig, value=None):
Calling of callbacks will halt whenever one returns False.
"""
logger.log(self._sigs[sig]["log"], "{}: {}".format(sig, value))
logger.log(self._sigs[sig]["log"], f"{sig}: {value}")
for callback in self._sigs[sig]["callbacks"]:
if isinstance(callback, str):
self._emit(callback)
Expand Down Expand Up @@ -319,7 +319,7 @@ def fs(self):
def urlpath(self):
"""URL of currently selected item"""
return (
(self.protocol.value + "://" + self.main.value[0])
(f"{self.protocol.value}://{self.main.value[0]}")
if self.main.value
else None
)
Expand Down
22 changes: 11 additions & 11 deletions fsspec/implementations/cached.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,10 +304,10 @@ def _open(
hash, blocks = detail["fn"], detail["blocks"]
if blocks is True:
# stored file is complete
logger.debug("Opening local copy of %s" % path)
logger.debug("Opening local copy of %s", path)
return open(fn, mode)
# TODO: action where partial file exists in read-only cache
logger.debug("Opening partially cached copy of %s" % path)
logger.debug("Opening partially cached copy of %s", path)
else:
hash = self._mapper(path)
fn = os.path.join(self.storage[-1], hash)
Expand All @@ -320,7 +320,7 @@ def _open(
"uid": self.fs.ukey(path),
}
self._metadata.update_file(path, detail)
logger.debug("Creating local sparse file for %s" % path)
logger.debug("Creating local sparse file for %s", path)

# call target filesystems open
self._mkcache()
Expand All @@ -343,9 +343,9 @@ def _open(
if "blocksize" in detail:
if detail["blocksize"] != f.blocksize:
raise BlocksizeMismatchError(
"Cached file must be reopened with same block"
"size as original (old: %i, new %i)"
"" % (detail["blocksize"], f.blocksize)
f"Cached file must be reopened with same block"
f" size as original (old: {detail['blocksize']},"
f" new {f.blocksize})"
)
else:
detail["blocksize"] = f.blocksize
Expand Down Expand Up @@ -570,7 +570,7 @@ def _make_local_details(self, path):
"uid": self.fs.ukey(path),
}
self._metadata.update_file(path, detail)
logger.debug("Copying %s to local cache" % path)
logger.debug("Copying %s to local cache", path)
return fn

def cat(
Expand Down Expand Up @@ -627,7 +627,7 @@ def _open(self, path, mode="rb", **kwargs):
detail, fn = detail
_, blocks = detail["fn"], detail["blocks"]
if blocks is True:
logger.debug("Opening local copy of %s" % path)
logger.debug("Opening local copy of %s", path)

# In order to support downstream filesystems to be able to
# infer the compression from the original filename, like
Expand All @@ -639,8 +639,8 @@ def _open(self, path, mode="rb", **kwargs):
return f
else:
raise ValueError(
"Attempt to open partially cached file %s"
"as a wholly cached file" % path
f"Attempt to open partially cached file {path}"
f" as a wholly cached file"
)
else:
fn = self._make_local_details(path)
Expand Down Expand Up @@ -723,7 +723,7 @@ def _open(self, path, mode="rb", **kwargs):

sha = self._mapper(path)
fn = os.path.join(self.storage[-1], sha)
logger.debug("Copying %s to local cache" % path)
logger.debug("Copying %s to local cache", path)
kwargs["mode"] = mode

self._mkcache()
Expand Down
8 changes: 4 additions & 4 deletions fsspec/implementations/ftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def cb(x):
outfile.write(x)

self.ftp.retrbinary(
"RETR %s" % rpath,
f"RETR {rpath}",
blocksize=self.blocksize,
callback=cb,
)
Expand All @@ -172,7 +172,7 @@ def cb(x):
out.append(x)

self.ftp.retrbinary(
"RETR %s" % path,
f"RETR {path}",
blocksize=self.blocksize,
rest=start,
callback=cb,
Expand Down Expand Up @@ -321,7 +321,7 @@ def callback(x):

try:
self.fs.ftp.retrbinary(
"RETR %s" % self.path,
f"RETR {self.path}",
blocksize=self.blocksize,
rest=start,
callback=callback,
Expand All @@ -339,7 +339,7 @@ def callback(x):
def _upload_chunk(self, final=False):
self.buffer.seek(0)
self.fs.ftp.storbinary(
"STOR " + self.path, self.buffer, blocksize=self.blocksize, rest=self.offset
f"STOR {self.path}", self.buffer, blocksize=self.blocksize, rest=self.offset
)
return True

Expand Down
6 changes: 3 additions & 3 deletions fsspec/implementations/git.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def ls(self, path, detail=True, ref=None, **kwargs):
"type": "directory",
"name": "/".join([path, obj.name]).lstrip("/"),
"hex": obj.hex,
"mode": "%o" % obj.filemode,
"mode": f"{obj.filemode:o}",
"size": 0,
}
)
Expand All @@ -91,7 +91,7 @@ def ls(self, path, detail=True, ref=None, **kwargs):
"type": "file",
"name": "/".join([path, obj.name]).lstrip("/"),
"hex": obj.hex,
"mode": "%o" % obj.filemode,
"mode": f"{obj.filemode:o}",
"size": obj.size,
}
)
Expand All @@ -102,7 +102,7 @@ def ls(self, path, detail=True, ref=None, **kwargs):
"type": "file",
"name": obj.name,
"hex": obj.hex,
"mode": "%o" % obj.filemode,
"mode": f"{obj.filemode:o}",
"size": obj.size,
}
]
Expand Down
10 changes: 3 additions & 7 deletions fsspec/implementations/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,7 @@ def repos(cls, org_or_user, is_org=True):
List of string
"""
r = requests.get(
"https://api.github.com/{part}/{org}/repos".format(
part=["users", "orgs"][is_org], org=org_or_user
)
f"https://api.github.com/{['users', 'orgs'][is_org]}/{org_or_user}/repos"
)
r.raise_for_status()
return [repo["name"] for repo in r.json()]
Expand All @@ -90,8 +88,7 @@ def repos(cls, org_or_user, is_org=True):
def tags(self):
"""Names of tags in the repo"""
r = requests.get(
"https://api.github.com/repos/{org}/{repo}/tags"
"".format(org=self.org, repo=self.repo),
f"https://api.github.com/repos/{self.org}/{self.repo}/tags",
**self.kw,
)
r.raise_for_status()
Expand All @@ -101,8 +98,7 @@ def tags(self):
def branches(self):
"""Names of branches in the repo"""
r = requests.get(
"https://api.github.com/repos/{org}/{repo}/branches"
"".format(org=self.org, repo=self.repo),
f"https://api.github.com/repos/{self.org}/{self.repo}/branches",
**self.kw,
)
r.raise_for_status()
Expand Down
12 changes: 6 additions & 6 deletions fsspec/implementations/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ async def _ls_real(self, url, detail=True, **kwargs):
l = l[1]
if l.startswith("/") and len(l) > 1:
# absolute URL on this server
l = parts.scheme + "://" + parts.netloc + l
l = f"{parts.scheme}://{parts.netloc}{l}"
if l.startswith("http"):
if self.same_schema and l.startswith(url.rstrip("/") + "/"):
out.add(l)
Expand Down Expand Up @@ -655,8 +655,8 @@ async def async_fetch_range(self, start, end):
logger.debug(f"Fetch range for {self}: {start}-{end}")
kwargs = self.kwargs.copy()
headers = kwargs.pop("headers", {}).copy()
headers["Range"] = "bytes=%i-%i" % (start, end - 1)
logger.debug(str(self.url) + " : " + headers["Range"])
headers["Range"] = f"bytes={start}-{end - 1}"
logger.debug(f"{self.url} : {headers['Range']}")
r = await self.session.get(
self.fs.encode_url(self.url), headers=headers, **kwargs
)
Expand Down Expand Up @@ -812,7 +812,7 @@ async def get_range(session, url, start, end, file=None, **kwargs):
# explicit get a range when we know it must be safe
kwargs = kwargs.copy()
headers = kwargs.pop("headers", {}).copy()
headers["Range"] = "bytes=%i-%i" % (start, end - 1)
headers["Range"] = f"bytes={start}-{end - 1}"
r = await session.get(url, headers=headers, **kwargs)
r.raise_for_status()
async with r:
Expand All @@ -831,7 +831,7 @@ async def _file_info(url, session, size_policy="head", **kwargs):
Default operation is to explicitly allow redirects and use encoding
'identity' (no compression) to get the true size of the target.
"""
logger.debug("Retrieve file size for %s" % url)
logger.debug("Retrieve file size for %s", url)
kwargs = kwargs.copy()
ar = kwargs.pop("allow_redirects", True)
head = kwargs.get("headers", {}).copy()
Expand All @@ -844,7 +844,7 @@ async def _file_info(url, session, size_policy="head", **kwargs):
elif size_policy == "get":
r = await session.get(url, allow_redirects=ar, **kwargs)
else:
raise TypeError('size_policy must be "head" or "get", got %s' "" % size_policy)
raise TypeError(f'size_policy must be "head" or "get", got {size_policy}')
async with r:
r.raise_for_status()

Expand Down
10 changes: 5 additions & 5 deletions fsspec/implementations/jupyter.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def __init__(self, url, tok=None, **kwargs):

def ls(self, path, detail=True, **kwargs):
path = self._strip_protocol(path)
r = self.session.get(self.url + "/" + path)
r = self.session.get(f"{self.url}/{path}")
if r.status_code == 404:
return FileNotFoundError(path)
r.raise_for_status()
Expand All @@ -61,7 +61,7 @@ def ls(self, path, detail=True, **kwargs):

def cat_file(self, path, start=None, end=None, **kwargs):
path = self._strip_protocol(path)
r = self.session.get(self.url + "/" + path)
r = self.session.get(f"{self.url}/{path}")
if r.status_code == 404:
return FileNotFoundError(path)
r.raise_for_status()
Expand All @@ -83,7 +83,7 @@ def pipe_file(self, path, value, **_):
"format": "base64",
"type": "file",
}
self.session.put(self.url + "/" + path, json=json)
self.session.put(f"{self.url}/{path}", json=json)

def mkdir(self, path, create_parents=True, **kwargs):
path = self._strip_protocol(path)
Expand All @@ -96,11 +96,11 @@ def mkdir(self, path, create_parents=True, **kwargs):
"content": None,
"type": "directory",
}
self.session.put(self.url + "/" + path, json=json)
self.session.put(f"{self.url}/{path}", json=json)

def _rm(self, path):
path = self._strip_protocol(path)
self.session.delete(self.url + "/" + path)
self.session.delete(f"{self.url}/{path}")

def _open(self, path, mode="rb", **kwargs):
path = self._strip_protocol(path)
Expand Down
Loading

0 comments on commit edf9b73

Please sign in to comment.