Skip to content

Commit

Permalink
Change legacy string formatting to f-strings
Browse files Browse the repository at this point in the history
  • Loading branch information
DimitriPapadopoulos committed Sep 21, 2023
1 parent b8aeb13 commit 75f407c
Show file tree
Hide file tree
Showing 27 changed files with 90 additions and 102 deletions.
2 changes: 1 addition & 1 deletion fsspec/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ class AbstractArchiveFileSystem(AbstractFileSystem):
"""

def __str__(self):
return "<Archive-like object %s at %s>" % (type(self).__name__, id(self))
return f"<Archive-like object {type(self).__name__} at {id(self)}>"

__repr__ = __str__

Expand Down
2 changes: 1 addition & 1 deletion fsspec/asyn.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ async def _process_limits(self, url, start, end):
end = ""
if isinstance(end, numbers.Integral):
end -= 1 # bytes range is inclusive
return "bytes=%s-%s" % (start, end)
return f"bytes={start}-{end}"

async def _cat_file(self, path, start=None, end=None, **kwargs):
raise NotImplementedError
Expand Down
24 changes: 12 additions & 12 deletions fsspec/caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,8 +222,9 @@ def __init__(self, blocksize, fetcher, size, maxblocks=32):
self._fetch_block_cached = functools.lru_cache(maxblocks)(self._fetch_block)

def __repr__(self):
return "<BlockCache blocksize={}, size={}, nblocks={}>".format(
self.blocksize, self.size, self.nblocks
return (
f"<BlockCache blocksize={self.blocksize}, "
f"size={self.size}, nblocks={self.nblocks}>"
)

def cache_info(self):
Expand Down Expand Up @@ -277,14 +278,13 @@ def _fetch_block(self, block_number):
"""
if block_number > self.nblocks:
raise ValueError(
"'block_number={}' is greater than the number of blocks ({})".format(
block_number, self.nblocks
)
f"'block_number={block_number}' is greater than "
f"the number of blocks ({self.nblocks})"
)

start = block_number * self.blocksize
end = start + self.blocksize
logger.info("BlockCache fetching block %d", block_number)
logger.info(f"BlockCache fetching block {block_number}")
block_contents = super()._fetch(start, end)
return block_contents

Expand Down Expand Up @@ -606,8 +606,9 @@ def __init__(self, blocksize, fetcher, size, maxblocks=32):
self._fetch_future_lock = threading.Lock()

def __repr__(self):
return "<BackgroundBlockCache blocksize={}, size={}, nblocks={}>".format(
self.blocksize, self.size, self.nblocks
return (
f"<BackgroundBlockCache blocksize={self.blocksize}, "
f"size={self.size}, nblocks={self.nblocks}>"
)

def cache_info(self):
Expand Down Expand Up @@ -719,14 +720,13 @@ def _fetch_block(self, block_number, log_info="sync"):
"""
if block_number > self.nblocks:
raise ValueError(
"'block_number={}' is greater than the number of blocks ({})".format(
block_number, self.nblocks
)
f"'block_number={block_number}' is greater than "
f"the number of blocks ({self.nblocks})"
)

start = block_number * self.blocksize
end = start + self.blocksize
logger.info("BlockCache fetching block (%s) %d", log_info, block_number)
logger.info(f"BlockCache fetching block ({log_info}) {block_number}")
block_contents = super()._fetch(start, end)
return block_contents

Expand Down
6 changes: 2 additions & 4 deletions fsspec/compression.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,13 +39,11 @@ def register_compression(name, callback, extensions, force=False):

# Validate registration
if name in compr and not force:
raise ValueError("Duplicate compression registration: %s" % name)
raise ValueError(f"Duplicate compression registration: {name}")

for ext in extensions:
if ext in fsspec.utils.compressions and not force:
raise ValueError(
"Duplicate compression file extension: %s (%s)" % (ext, name)
)
raise ValueError(f"Duplicate compression file extension: {ext} ({name})")

compr[name] = callback

Expand Down
6 changes: 3 additions & 3 deletions fsspec/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def __reduce__(self):
)

def __repr__(self):
return "<OpenFile '{}'>".format(self.path)
return f"<OpenFile '{self.path}'>"

def __enter__(self):
mode = self.mode.replace("t", "").replace("b", "") + "b"
Expand Down Expand Up @@ -195,7 +195,7 @@ def __getitem__(self, item):
return out

def __repr__(self):
return "<List of %s OpenFile instances>" % len(self)
return f"<List of {len(self)} OpenFile instances>"


def open_files(
Expand Down Expand Up @@ -498,7 +498,7 @@ def get_compression(urlpath, compression):
if compression == "infer":
compression = infer_compression(urlpath)
if compression is not None and compression not in compr:
raise ValueError("Compression type %s not supported" % compression)
raise ValueError(f"Compression type {compression} not supported")
return compression


Expand Down
2 changes: 1 addition & 1 deletion fsspec/fuse.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def format_help(self):
for item in args.option or []:
key, sep, value = item.partition("=")
if not sep:
parser.error(message="Wrong option: {!r}".format(item))
parser.error(message=f"Wrong option: {item!r}")
val = value.lower()
if val.endswith("[int]"):
value = int(value[: -len("[int]")])
Expand Down
7 changes: 3 additions & 4 deletions fsspec/gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class which owns it.
same name.
"""
if name not in self.signals:
raise ValueError("Attempt to assign an undeclared signal: %s" % name)
raise ValueError(f"Attempt to assign an undeclared signal: {name}")
self._sigs[name] = {
"widget": widget,
"callbacks": [],
Expand Down Expand Up @@ -141,7 +141,7 @@ def _emit(self, sig, value=None):
Calling of callbacks will halt whenever one returns False.
"""
logger.log(self._sigs[sig]["log"], "{}: {}".format(sig, value))
logger.log(self._sigs[sig]["log"], f"{sig}: {value}")
for callback in self._sigs[sig]["callbacks"]:
if isinstance(callback, str):
self._emit(callback)
Expand All @@ -153,8 +153,7 @@ def _emit(self, sig, value=None):
break
except Exception as e:
logger.exception(
"Exception (%s) while executing callback for signal: %s"
"" % (e, sig)
f"Exception ({e}) while executing callback for signal: {sig}"
)

def show(self, threads=False):
Expand Down
22 changes: 11 additions & 11 deletions fsspec/implementations/cached.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,10 +283,10 @@ def _open(
hash, blocks = detail["fn"], detail["blocks"]
if blocks is True:
# stored file is complete
logger.debug("Opening local copy of %s" % path)
logger.debug(f"Opening local copy of {path}")
return open(fn, mode)
# TODO: action where partial file exists in read-only cache
logger.debug("Opening partially cached copy of %s" % path)
logger.debug(f"Opening partially cached copy of {path}")
else:
hash = self._mapper(path)
fn = os.path.join(self.storage[-1], hash)
Expand All @@ -299,7 +299,7 @@ def _open(
"uid": self.fs.ukey(path),
}
self._metadata.update_file(path, detail)
logger.debug("Creating local sparse file for %s" % path)
logger.debug(f"Creating local sparse file for {path}")

# call target filesystems open
self._mkcache()
Expand All @@ -322,9 +322,9 @@ def _open(
if "blocksize" in detail:
if detail["blocksize"] != f.blocksize:
raise BlocksizeMismatchError(
"Cached file must be reopened with same block"
"size as original (old: %i, new %i)"
"" % (detail["blocksize"], f.blocksize)
f"Cached file must be reopened with same block"
f" size as original (old: {detail['blocksize']},"
f" new {f.blocksize})"
)
else:
detail["blocksize"] = f.blocksize
Expand Down Expand Up @@ -547,7 +547,7 @@ def _make_local_details(self, path):
"uid": self.fs.ukey(path),
}
self._metadata.update_file(path, detail)
logger.debug("Copying %s to local cache" % path)
logger.debug(f"Copying {path} to local cache")
return fn

def cat(
Expand Down Expand Up @@ -604,7 +604,7 @@ def _open(self, path, mode="rb", **kwargs):
detail, fn = detail
_, blocks = detail["fn"], detail["blocks"]
if blocks is True:
logger.debug("Opening local copy of %s" % path)
logger.debug(f"Opening local copy of {path}")

# In order to support downstream filesystems to be able to
# infer the compression from the original filename, like
Expand All @@ -616,8 +616,8 @@ def _open(self, path, mode="rb", **kwargs):
return f
else:
raise ValueError(
"Attempt to open partially cached file %s"
"as a wholly cached file" % path
f"Attempt to open partially cached file {path}"
f" as a wholly cached file"
)
else:
fn = self._make_local_details(path)
Expand Down Expand Up @@ -700,7 +700,7 @@ def _open(self, path, mode="rb", **kwargs):

sha = self._mapper(path)
fn = os.path.join(self.storage[-1], sha)
logger.debug("Copying %s to local cache" % path)
logger.debug(f"Copying {path} to local cache")
kwargs["mode"] = mode

self._mkcache()
Expand Down
8 changes: 4 additions & 4 deletions fsspec/implementations/ftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def cb(x):
outfile.write(x)

self.ftp.retrbinary(
"RETR %s" % rpath,
f"RETR {rpath}",
blocksize=self.blocksize,
callback=cb,
)
Expand All @@ -172,7 +172,7 @@ def cb(x):
out.append(x)

self.ftp.retrbinary(
"RETR %s" % path,
f"RETR {path}",
blocksize=self.blocksize,
rest=start,
callback=cb,
Expand Down Expand Up @@ -321,7 +321,7 @@ def callback(x):

try:
self.fs.ftp.retrbinary(
"RETR %s" % self.path,
f"RETR {self.path}",
blocksize=self.blocksize,
rest=start,
callback=callback,
Expand All @@ -339,7 +339,7 @@ def callback(x):
def _upload_chunk(self, final=False):
self.buffer.seek(0)
self.fs.ftp.storbinary(
"STOR " + self.path, self.buffer, blocksize=self.blocksize, rest=self.offset
f"STOR {self.path}", self.buffer, blocksize=self.blocksize, rest=self.offset
)
return True

Expand Down
6 changes: 3 additions & 3 deletions fsspec/implementations/git.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ def ls(self, path, detail=True, ref=None, **kwargs):
"type": "directory",
"name": "/".join([path, obj.name]).lstrip("/"),
"hex": obj.hex,
"mode": "%o" % obj.filemode,
"mode": f"{obj.filemode:o}",
"size": 0,
}
)
Expand All @@ -91,7 +91,7 @@ def ls(self, path, detail=True, ref=None, **kwargs):
"type": "file",
"name": "/".join([path, obj.name]).lstrip("/"),
"hex": obj.hex,
"mode": "%o" % obj.filemode,
"mode": f"{obj.filemode:o}",
"size": obj.size,
}
)
Expand All @@ -102,7 +102,7 @@ def ls(self, path, detail=True, ref=None, **kwargs):
"type": "file",
"name": obj.name,
"hex": obj.hex,
"mode": "%o" % obj.filemode,
"mode": f"{obj.filemode:o}",
"size": obj.size,
}
]
Expand Down
10 changes: 3 additions & 7 deletions fsspec/implementations/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,7 @@ def repos(cls, org_or_user, is_org=True):
List of string
"""
r = requests.get(
"https://api.github.com/{part}/{org}/repos".format(
part=["users", "orgs"][is_org], org=org_or_user
)
f"https://api.github.com/{['users', 'orgs'][is_org]}/{org_or_user}/repos"
)
r.raise_for_status()
return [repo["name"] for repo in r.json()]
Expand All @@ -90,8 +88,7 @@ def repos(cls, org_or_user, is_org=True):
def tags(self):
"""Names of tags in the repo"""
r = requests.get(
"https://api.github.com/repos/{org}/{repo}/tags"
"".format(org=self.org, repo=self.repo),
f"https://api.github.com/repos/{self.org}/{self.repo}/tags",
**self.kw,
)
r.raise_for_status()
Expand All @@ -101,8 +98,7 @@ def tags(self):
def branches(self):
"""Names of branches in the repo"""
r = requests.get(
"https://api.github.com/repos/{org}/{repo}/branches"
"".format(org=self.org, repo=self.repo),
f"https://api.github.com/repos/{self.org}/{self.repo}/branches",
**self.kw,
)
r.raise_for_status()
Expand Down
8 changes: 4 additions & 4 deletions fsspec/implementations/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -655,7 +655,7 @@ async def async_fetch_range(self, start, end):
logger.debug(f"Fetch range for {self}: {start}-{end}")
kwargs = self.kwargs.copy()
headers = kwargs.pop("headers", {}).copy()
headers["Range"] = "bytes=%i-%i" % (start, end - 1)
headers["Range"] = f"bytes={start}-{end - 1}"
logger.debug(str(self.url) + " : " + headers["Range"])
r = await self.session.get(
self.fs.encode_url(self.url), headers=headers, **kwargs
Expand Down Expand Up @@ -812,7 +812,7 @@ async def get_range(session, url, start, end, file=None, **kwargs):
# explicit get a range when we know it must be safe
kwargs = kwargs.copy()
headers = kwargs.pop("headers", {}).copy()
headers["Range"] = "bytes=%i-%i" % (start, end - 1)
headers["Range"] = f"bytes={start}-{end - 1}"
r = await session.get(url, headers=headers, **kwargs)
r.raise_for_status()
async with r:
Expand All @@ -831,7 +831,7 @@ async def _file_info(url, session, size_policy="head", **kwargs):
Default operation is to explicitly allow redirects and use encoding
'identity' (no compression) to get the true size of the target.
"""
logger.debug("Retrieve file size for %s" % url)
logger.debug(f"Retrieve file size for {url}")
kwargs = kwargs.copy()
ar = kwargs.pop("allow_redirects", True)
head = kwargs.get("headers", {}).copy()
Expand All @@ -844,7 +844,7 @@ async def _file_info(url, session, size_policy="head", **kwargs):
elif size_policy == "get":
r = await session.get(url, allow_redirects=ar, **kwargs)
else:
raise TypeError('size_policy must be "head" or "get", got %s' "" % size_policy)
raise TypeError(f'size_policy must be "head" or "get", got {size_policy}')
async with r:
r.raise_for_status()

Expand Down
3 changes: 1 addition & 2 deletions fsspec/implementations/libarchive.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,7 @@ def __init__(
files = open_files(fo, protocol=target_protocol, **(target_options or {}))
if len(files) != 1:
raise ValueError(
'Path "{}" did not resolve to exactly'
'one file: "{}"'.format(fo, files)
f'Path "{fo}" did not resolve to exactly one file: "{files}"'
)
fo = files[0]
self.of = fo
Expand Down
Loading

0 comments on commit 75f407c

Please sign in to comment.