From 75f407c0789454e4650ebb11815efba9d2608526 Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Thu, 21 Sep 2023 20:28:46 +0200 Subject: [PATCH] Change legacy string formatting to f-strings --- fsspec/archive.py | 2 +- fsspec/asyn.py | 2 +- fsspec/caching.py | 24 ++++++++++----------- fsspec/compression.py | 6 ++---- fsspec/core.py | 6 +++--- fsspec/fuse.py | 2 +- fsspec/gui.py | 7 +++--- fsspec/implementations/cached.py | 22 +++++++++---------- fsspec/implementations/ftp.py | 8 +++---- fsspec/implementations/git.py | 6 +++--- fsspec/implementations/github.py | 10 +++------ fsspec/implementations/http.py | 8 +++---- fsspec/implementations/libarchive.py | 3 +-- fsspec/implementations/sftp.py | 18 ++++++++-------- fsspec/implementations/smb.py | 4 ++-- fsspec/implementations/tests/test_cached.py | 4 ++-- fsspec/implementations/tests/test_ftp.py | 4 ++-- fsspec/implementations/tests/test_local.py | 8 +++---- fsspec/implementations/tests/test_sftp.py | 4 ++-- fsspec/implementations/webhdfs.py | 4 +--- fsspec/mapping.py | 4 ++-- fsspec/registry.py | 10 ++++----- fsspec/spec.py | 10 ++++----- fsspec/tests/conftest.py | 8 +++---- fsspec/tests/test_spec.py | 2 +- fsspec/tests/test_utils.py | 2 +- fsspec/utils.py | 4 ++-- 27 files changed, 90 insertions(+), 102 deletions(-) diff --git a/fsspec/archive.py b/fsspec/archive.py index dc5c1490b..9bdd8efce 100644 --- a/fsspec/archive.py +++ b/fsspec/archive.py @@ -13,7 +13,7 @@ class AbstractArchiveFileSystem(AbstractFileSystem): """ def __str__(self): - return "" % (type(self).__name__, id(self)) + return f"" __repr__ = __str__ diff --git a/fsspec/asyn.py b/fsspec/asyn.py index 347e262ad..4ac1419f0 100644 --- a/fsspec/asyn.py +++ b/fsspec/asyn.py @@ -426,7 +426,7 @@ async def _process_limits(self, url, start, end): end = "" if isinstance(end, numbers.Integral): end -= 1 # bytes range is inclusive - return "bytes=%s-%s" % (start, end) + return f"bytes={start}-{end}" async def _cat_file(self, path, start=None, end=None, **kwargs): raise NotImplementedError diff --git a/fsspec/caching.py b/fsspec/caching.py index 41b661ba2..b64240215 100644 --- a/fsspec/caching.py +++ b/fsspec/caching.py @@ -222,8 +222,9 @@ def __init__(self, blocksize, fetcher, size, maxblocks=32): self._fetch_block_cached = functools.lru_cache(maxblocks)(self._fetch_block) def __repr__(self): - return "".format( - self.blocksize, self.size, self.nblocks + return ( + f"" ) def cache_info(self): @@ -277,14 +278,13 @@ def _fetch_block(self, block_number): """ if block_number > self.nblocks: raise ValueError( - "'block_number={}' is greater than the number of blocks ({})".format( - block_number, self.nblocks - ) + f"'block_number={block_number}' is greater than " + f"the number of blocks ({self.nblocks})" ) start = block_number * self.blocksize end = start + self.blocksize - logger.info("BlockCache fetching block %d", block_number) + logger.info(f"BlockCache fetching block {block_number}") block_contents = super()._fetch(start, end) return block_contents @@ -606,8 +606,9 @@ def __init__(self, blocksize, fetcher, size, maxblocks=32): self._fetch_future_lock = threading.Lock() def __repr__(self): - return "".format( - self.blocksize, self.size, self.nblocks + return ( + f"" ) def cache_info(self): @@ -719,14 +720,13 @@ def _fetch_block(self, block_number, log_info="sync"): """ if block_number > self.nblocks: raise ValueError( - "'block_number={}' is greater than the number of blocks ({})".format( - block_number, self.nblocks - ) + f"'block_number={block_number}' is greater than " + f"the number of blocks ({self.nblocks})" ) start = block_number * self.blocksize end = start + self.blocksize - logger.info("BlockCache fetching block (%s) %d", log_info, block_number) + logger.info(f"BlockCache fetching block ({log_info}) {block_number}") block_contents = super()._fetch(start, end) return block_contents diff --git a/fsspec/compression.py b/fsspec/compression.py index 38b1d6dd6..53b7426e2 100644 --- a/fsspec/compression.py +++ b/fsspec/compression.py @@ -39,13 +39,11 @@ def register_compression(name, callback, extensions, force=False): # Validate registration if name in compr and not force: - raise ValueError("Duplicate compression registration: %s" % name) + raise ValueError(f"Duplicate compression registration: {name}") for ext in extensions: if ext in fsspec.utils.compressions and not force: - raise ValueError( - "Duplicate compression file extension: %s (%s)" % (ext, name) - ) + raise ValueError(f"Duplicate compression file extension: {ext} ({name})") compr[name] = callback diff --git a/fsspec/core.py b/fsspec/core.py index 6e5a831ae..23c0db535 100644 --- a/fsspec/core.py +++ b/fsspec/core.py @@ -92,7 +92,7 @@ def __reduce__(self): ) def __repr__(self): - return "".format(self.path) + return f"" def __enter__(self): mode = self.mode.replace("t", "").replace("b", "") + "b" @@ -195,7 +195,7 @@ def __getitem__(self, item): return out def __repr__(self): - return "" % len(self) + return f"" def open_files( @@ -498,7 +498,7 @@ def get_compression(urlpath, compression): if compression == "infer": compression = infer_compression(urlpath) if compression is not None and compression not in compr: - raise ValueError("Compression type %s not supported" % compression) + raise ValueError(f"Compression type {compression} not supported") return compression diff --git a/fsspec/fuse.py b/fsspec/fuse.py index cdf742a52..8b04626fa 100644 --- a/fsspec/fuse.py +++ b/fsspec/fuse.py @@ -275,7 +275,7 @@ def format_help(self): for item in args.option or []: key, sep, value = item.partition("=") if not sep: - parser.error(message="Wrong option: {!r}".format(item)) + parser.error(message=f"Wrong option: {item!r}") val = value.lower() if val.endswith("[int]"): value = int(value[: -len("[int]")]) diff --git a/fsspec/gui.py b/fsspec/gui.py index 80ccac21f..3c49003e6 100644 --- a/fsspec/gui.py +++ b/fsspec/gui.py @@ -70,7 +70,7 @@ class which owns it. same name. """ if name not in self.signals: - raise ValueError("Attempt to assign an undeclared signal: %s" % name) + raise ValueError(f"Attempt to assign an undeclared signal: {name}") self._sigs[name] = { "widget": widget, "callbacks": [], @@ -141,7 +141,7 @@ def _emit(self, sig, value=None): Calling of callbacks will halt whenever one returns False. """ - logger.log(self._sigs[sig]["log"], "{}: {}".format(sig, value)) + logger.log(self._sigs[sig]["log"], f"{sig}: {value}") for callback in self._sigs[sig]["callbacks"]: if isinstance(callback, str): self._emit(callback) @@ -153,8 +153,7 @@ def _emit(self, sig, value=None): break except Exception as e: logger.exception( - "Exception (%s) while executing callback for signal: %s" - "" % (e, sig) + f"Exception ({e}) while executing callback for signal: {sig}" ) def show(self, threads=False): diff --git a/fsspec/implementations/cached.py b/fsspec/implementations/cached.py index b679cce51..d60f870c5 100644 --- a/fsspec/implementations/cached.py +++ b/fsspec/implementations/cached.py @@ -283,10 +283,10 @@ def _open( hash, blocks = detail["fn"], detail["blocks"] if blocks is True: # stored file is complete - logger.debug("Opening local copy of %s" % path) + logger.debug(f"Opening local copy of {path}") return open(fn, mode) # TODO: action where partial file exists in read-only cache - logger.debug("Opening partially cached copy of %s" % path) + logger.debug(f"Opening partially cached copy of {path}") else: hash = self._mapper(path) fn = os.path.join(self.storage[-1], hash) @@ -299,7 +299,7 @@ def _open( "uid": self.fs.ukey(path), } self._metadata.update_file(path, detail) - logger.debug("Creating local sparse file for %s" % path) + logger.debug(f"Creating local sparse file for {path}") # call target filesystems open self._mkcache() @@ -322,9 +322,9 @@ def _open( if "blocksize" in detail: if detail["blocksize"] != f.blocksize: raise BlocksizeMismatchError( - "Cached file must be reopened with same block" - "size as original (old: %i, new %i)" - "" % (detail["blocksize"], f.blocksize) + f"Cached file must be reopened with same block" + f" size as original (old: {detail['blocksize']}," + f" new {f.blocksize})" ) else: detail["blocksize"] = f.blocksize @@ -547,7 +547,7 @@ def _make_local_details(self, path): "uid": self.fs.ukey(path), } self._metadata.update_file(path, detail) - logger.debug("Copying %s to local cache" % path) + logger.debug(f"Copying {path} to local cache") return fn def cat( @@ -604,7 +604,7 @@ def _open(self, path, mode="rb", **kwargs): detail, fn = detail _, blocks = detail["fn"], detail["blocks"] if blocks is True: - logger.debug("Opening local copy of %s" % path) + logger.debug(f"Opening local copy of {path}") # In order to support downstream filesystems to be able to # infer the compression from the original filename, like @@ -616,8 +616,8 @@ def _open(self, path, mode="rb", **kwargs): return f else: raise ValueError( - "Attempt to open partially cached file %s" - "as a wholly cached file" % path + f"Attempt to open partially cached file {path}" + f" as a wholly cached file" ) else: fn = self._make_local_details(path) @@ -700,7 +700,7 @@ def _open(self, path, mode="rb", **kwargs): sha = self._mapper(path) fn = os.path.join(self.storage[-1], sha) - logger.debug("Copying %s to local cache" % path) + logger.debug(f"Copying {path} to local cache") kwargs["mode"] = mode self._mkcache() diff --git a/fsspec/implementations/ftp.py b/fsspec/implementations/ftp.py index 7e79877eb..c8e3cf2d2 100644 --- a/fsspec/implementations/ftp.py +++ b/fsspec/implementations/ftp.py @@ -156,7 +156,7 @@ def cb(x): outfile.write(x) self.ftp.retrbinary( - "RETR %s" % rpath, + f"RETR {rpath}", blocksize=self.blocksize, callback=cb, ) @@ -172,7 +172,7 @@ def cb(x): out.append(x) self.ftp.retrbinary( - "RETR %s" % path, + f"RETR {path}", blocksize=self.blocksize, rest=start, callback=cb, @@ -321,7 +321,7 @@ def callback(x): try: self.fs.ftp.retrbinary( - "RETR %s" % self.path, + f"RETR {self.path}", blocksize=self.blocksize, rest=start, callback=callback, @@ -339,7 +339,7 @@ def callback(x): def _upload_chunk(self, final=False): self.buffer.seek(0) self.fs.ftp.storbinary( - "STOR " + self.path, self.buffer, blocksize=self.blocksize, rest=self.offset + f"STOR {self.path}", self.buffer, blocksize=self.blocksize, rest=self.offset ) return True diff --git a/fsspec/implementations/git.py b/fsspec/implementations/git.py index 80c73e066..7c34d93e0 100644 --- a/fsspec/implementations/git.py +++ b/fsspec/implementations/git.py @@ -81,7 +81,7 @@ def ls(self, path, detail=True, ref=None, **kwargs): "type": "directory", "name": "/".join([path, obj.name]).lstrip("/"), "hex": obj.hex, - "mode": "%o" % obj.filemode, + "mode": f"{obj.filemode:o}", "size": 0, } ) @@ -91,7 +91,7 @@ def ls(self, path, detail=True, ref=None, **kwargs): "type": "file", "name": "/".join([path, obj.name]).lstrip("/"), "hex": obj.hex, - "mode": "%o" % obj.filemode, + "mode": f"{obj.filemode:o}", "size": obj.size, } ) @@ -102,7 +102,7 @@ def ls(self, path, detail=True, ref=None, **kwargs): "type": "file", "name": obj.name, "hex": obj.hex, - "mode": "%o" % obj.filemode, + "mode": f"{obj.filemode:o}", "size": obj.size, } ] diff --git a/fsspec/implementations/github.py b/fsspec/implementations/github.py index b148124d7..b5df3f7ab 100644 --- a/fsspec/implementations/github.py +++ b/fsspec/implementations/github.py @@ -79,9 +79,7 @@ def repos(cls, org_or_user, is_org=True): List of string """ r = requests.get( - "https://api.github.com/{part}/{org}/repos".format( - part=["users", "orgs"][is_org], org=org_or_user - ) + f"https://api.github.com/{['users', 'orgs'][is_org]}/{org_or_user}/repos" ) r.raise_for_status() return [repo["name"] for repo in r.json()] @@ -90,8 +88,7 @@ def repos(cls, org_or_user, is_org=True): def tags(self): """Names of tags in the repo""" r = requests.get( - "https://api.github.com/repos/{org}/{repo}/tags" - "".format(org=self.org, repo=self.repo), + f"https://api.github.com/repos/{self.org}/{self.repo}/tags", **self.kw, ) r.raise_for_status() @@ -101,8 +98,7 @@ def tags(self): def branches(self): """Names of branches in the repo""" r = requests.get( - "https://api.github.com/repos/{org}/{repo}/branches" - "".format(org=self.org, repo=self.repo), + f"https://api.github.com/repos/{self.org}/{self.repo}/branches", **self.kw, ) r.raise_for_status() diff --git a/fsspec/implementations/http.py b/fsspec/implementations/http.py index 06551017e..25ecb4450 100644 --- a/fsspec/implementations/http.py +++ b/fsspec/implementations/http.py @@ -655,7 +655,7 @@ async def async_fetch_range(self, start, end): logger.debug(f"Fetch range for {self}: {start}-{end}") kwargs = self.kwargs.copy() headers = kwargs.pop("headers", {}).copy() - headers["Range"] = "bytes=%i-%i" % (start, end - 1) + headers["Range"] = f"bytes={start}-{end - 1}" logger.debug(str(self.url) + " : " + headers["Range"]) r = await self.session.get( self.fs.encode_url(self.url), headers=headers, **kwargs @@ -812,7 +812,7 @@ async def get_range(session, url, start, end, file=None, **kwargs): # explicit get a range when we know it must be safe kwargs = kwargs.copy() headers = kwargs.pop("headers", {}).copy() - headers["Range"] = "bytes=%i-%i" % (start, end - 1) + headers["Range"] = f"bytes={start}-{end - 1}" r = await session.get(url, headers=headers, **kwargs) r.raise_for_status() async with r: @@ -831,7 +831,7 @@ async def _file_info(url, session, size_policy="head", **kwargs): Default operation is to explicitly allow redirects and use encoding 'identity' (no compression) to get the true size of the target. """ - logger.debug("Retrieve file size for %s" % url) + logger.debug(f"Retrieve file size for {url}") kwargs = kwargs.copy() ar = kwargs.pop("allow_redirects", True) head = kwargs.get("headers", {}).copy() @@ -844,7 +844,7 @@ async def _file_info(url, session, size_policy="head", **kwargs): elif size_policy == "get": r = await session.get(url, allow_redirects=ar, **kwargs) else: - raise TypeError('size_policy must be "head" or "get", got %s' "" % size_policy) + raise TypeError(f'size_policy must be "head" or "get", got {size_policy}') async with r: r.raise_for_status() diff --git a/fsspec/implementations/libarchive.py b/fsspec/implementations/libarchive.py index 849040423..592e8979d 100644 --- a/fsspec/implementations/libarchive.py +++ b/fsspec/implementations/libarchive.py @@ -122,8 +122,7 @@ def __init__( files = open_files(fo, protocol=target_protocol, **(target_options or {})) if len(files) != 1: raise ValueError( - 'Path "{}" did not resolve to exactly' - 'one file: "{}"'.format(fo, files) + f'Path "{fo}" did not resolve to exactly one file: "{files}"' ) fo = files[0] self.of = fo diff --git a/fsspec/implementations/sftp.py b/fsspec/implementations/sftp.py index 1153e7c6a..aa7ac899a 100644 --- a/fsspec/implementations/sftp.py +++ b/fsspec/implementations/sftp.py @@ -48,7 +48,7 @@ def __init__(self, host, **ssh_kwargs): self._connect() def _connect(self): - logger.debug("Connecting to SFTP server %s" % self.host) + logger.debug(f"Connecting to SFTP server {self.host}") self.client = paramiko.SSHClient() self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.client.connect(self.host, **self.ssh_kwargs) @@ -66,9 +66,9 @@ def _get_kwargs_from_urls(urlpath): return out def mkdir(self, path, create_parents=False, mode=511): - logger.debug("Creating folder %s" % path) + logger.debug(f"Creating folder {path}") if self.exists(path): - raise FileExistsError("File exists: {}".format(path)) + raise FileExistsError(f"File exists: {path}") if create_parents: self.makedirs(path) @@ -77,7 +77,7 @@ def mkdir(self, path, create_parents=False, mode=511): def makedirs(self, path, exist_ok=False, mode=511): if self.exists(path) and not exist_ok: - raise FileExistsError("File exists: {}".format(path)) + raise FileExistsError(f"File exists: {path}") parts = path.split("/") path = "" @@ -88,7 +88,7 @@ def makedirs(self, path, exist_ok=False, mode=511): self.ftp.mkdir(path, mode) def rmdir(self, path): - logger.debug("Removing folder %s" % path) + logger.debug(f"Removing folder {path}") self.ftp.rmdir(path) def info(self, path): @@ -122,7 +122,7 @@ def _decode_stat(stat, parent_path=None): return out def ls(self, path, detail=False): - logger.debug("Listing folder %s" % path) + logger.debug(f"Listing folder {path}") stats = [self._decode_stat(stat, path) for stat in self.ftp.listdir_iter(path)] if detail: return stats @@ -131,7 +131,7 @@ def ls(self, path, detail=False): return sorted(paths) def put(self, lpath, rpath, callback=None, **kwargs): - logger.debug("Put file %s into %s" % (lpath, rpath)) + logger.debug(f"Put file {lpath} into {rpath}") self.ftp.put(lpath, rpath) def get_file(self, rpath, lpath, **kwargs): @@ -146,7 +146,7 @@ def _open(self, path, mode="rb", block_size=None, **kwargs): If 0, no buffering, if 1, line buffering, if >1, buffer that many bytes, if None use default from paramiko. """ - logger.debug("Opening file %s" % path) + logger.debug(f"Opening file {path}") if kwargs.get("autocommit", True) is False: # writes to temporary file, move on commit path2 = "/".join([self.temppath, str(uuid.uuid4())]) @@ -167,7 +167,7 @@ def _rm(self, path): self.ftp.remove(path) def mv(self, old, new): - logger.debug("Renaming %s into %s" % (old, new)) + logger.debug(f"Renaming {old} into {new}") self.ftp.posix_rename(old, new) diff --git a/fsspec/implementations/smb.py b/fsspec/implementations/smb.py index a3816773c..5804a6bf5 100644 --- a/fsspec/implementations/smb.py +++ b/fsspec/implementations/smb.py @@ -256,13 +256,13 @@ def mv(self, path1, path2, **kwargs): def _as_unc_path(host, path): rpath = path.replace("/", "\\") - unc = "\\\\{}{}".format(host, rpath) + unc = f"\\\\{host}{rpath}" return unc def _as_temp_path(host, path, temppath): share = path.split("/")[1] - temp_file = "/{}{}/{}".format(share, temppath, uuid.uuid4()) + temp_file = f"/{share}{temppath}/{uuid.uuid4()}" unc = _as_unc_path(host, temp_file) return unc diff --git a/fsspec/implementations/tests/test_cached.py b/fsspec/implementations/tests/test_cached.py index d4e05bdcf..ab92d5cea 100644 --- a/fsspec/implementations/tests/test_cached.py +++ b/fsspec/implementations/tests/test_cached.py @@ -926,7 +926,7 @@ def test_with_compression(impl, compression): f.close() with fsspec.open( - "%s::%s" % (impl, fn), + f"{impl}::{fn}", "rb", compression=compression, **{impl: {"same_names": True, "cache_storage": cachedir}}, @@ -939,7 +939,7 @@ def test_with_compression(impl, compression): cachedir = tempfile.mkdtemp() with fsspec.open( - "%s::%s" % (impl, fn), + f"{impl}::{fn}", "rb", **{ impl: { diff --git a/fsspec/implementations/tests/test_ftp.py b/fsspec/implementations/tests/test_ftp.py index af5e6cda1..718b8d2f3 100644 --- a/fsspec/implementations/tests/test_ftp.py +++ b/fsspec/implementations/tests/test_ftp.py @@ -77,10 +77,10 @@ def test_write_small(ftp_writable): def test_with_url(ftp_writable): host, port, user, pw = ftp_writable - fo = fsspec.open("ftp://{}:{}@{}:{}/out".format(user, pw, host, port), "wb") + fo = fsspec.open(f"ftp://{user}:{pw}@{host}:{port}/out", "wb") with fo as f: f.write(b"hello") - fo = fsspec.open("ftp://{}:{}@{}:{}/out".format(user, pw, host, port), "rb") + fo = fsspec.open(f"ftp://{user}:{pw}@{host}:{port}/out", "rb") with fo as f: assert f.read() == b"hello" diff --git a/fsspec/implementations/tests/test_local.py b/fsspec/implementations/tests/test_local.py index 961537520..268cab422 100644 --- a/fsspec/implementations/tests/test_local.py +++ b/fsspec/implementations/tests/test_local.py @@ -79,7 +79,7 @@ def filetexts(d, open=open, mode="t"): def test_urlpath_inference_strips_protocol(tmpdir): tmpdir = make_path_posix(str(tmpdir)) - paths = ["/".join([tmpdir, "test.%02d.csv" % i]) for i in range(20)] + paths = ["/".join([tmpdir, f"test.{i:02d}.csv"]) for i in range(20)] for path in paths: with open(path, "wb") as f: @@ -425,11 +425,11 @@ def test_recursive_get_put(tmpdir): fs.touch(tmpdir + "/a1/a2/a3/afile") fs.touch(tmpdir + "/a1/afile") - fs.get("file://{0}/a1".format(tmpdir), tmpdir + "/b1", recursive=True) + fs.get(f"file://{tmpdir}/a1", tmpdir + "/b1", recursive=True) assert fs.isfile(tmpdir + "/b1/afile") assert fs.isfile(tmpdir + "/b1/a2/a3/afile") - fs.put(tmpdir + "/b1", "file://{0}/c1".format(tmpdir), recursive=True) + fs.put(tmpdir + "/b1", f"file://{tmpdir}/c1", recursive=True) assert fs.isfile(tmpdir + "/c1/afile") assert fs.isfile(tmpdir + "/c1/a2/a3/afile") @@ -652,7 +652,7 @@ def test_iterable(tmpdir): fn = os.path.join(tmpdir, "test") with open(fn, "wb") as f: f.write(data) - of = fsspec.open("file://%s" % fn, "rb") + of = fsspec.open(f"file://{fn}", "rb") with of as f: out = list(f) assert b"".join(out) == data diff --git a/fsspec/implementations/tests/test_sftp.py b/fsspec/implementations/tests/test_sftp.py index 6fe1ea88a..3ddc524bd 100644 --- a/fsspec/implementations/tests/test_sftp.py +++ b/fsspec/implementations/tests/test_sftp.py @@ -12,7 +12,7 @@ def stop_docker(name): - cmd = shlex.split('docker ps -a -q --filter "name=%s"' % name) + cmd = shlex.split(f'docker ps -a -q --filter "name={name}"') cid = subprocess.check_output(cmd).strip().decode() if cid: subprocess.call(["docker", "rm", "-f", cid]) @@ -45,7 +45,7 @@ def ssh(): ] name = "fsspec_sftp" stop_docker(name) - cmd = "docker run -d -p 9200:22 --name {} ubuntu:16.04 sleep 9000".format(name) + cmd = f"docker run -d -p 9200:22 --name {name} ubuntu:16.04 sleep 9000" cid = subprocess.check_output(shlex.split(cmd)).strip().decode() for cmd in cmds: subprocess.call(["docker", "exec", cid] + shlex.split(cmd)) diff --git a/fsspec/implementations/webhdfs.py b/fsspec/implementations/webhdfs.py index cc595934f..fd3b075c7 100644 --- a/fsspec/implementations/webhdfs.py +++ b/fsspec/implementations/webhdfs.py @@ -89,9 +89,7 @@ def __init__( if self._cached: return super().__init__(**kwargs) - self.url = "{protocol}://{host}:{port}/webhdfs/v1".format( - protocol="https" if use_https else "http", host=host, port=port - ) + self.url = f"{'https' if use_https else 'http'}://{host}:{port}/webhdfs/v1" self.kerb = kerberos self.kerb_kwargs = kerb_kwargs or {} self.pars = {} diff --git a/fsspec/mapping.py b/fsspec/mapping.py index 2b75c2e41..b9822ae17 100644 --- a/fsspec/mapping.py +++ b/fsspec/mapping.py @@ -54,8 +54,8 @@ def __init__(self, root, fs, check=False, create=False, missing_exceptions=None) if check: if not self.fs.exists(root): raise ValueError( - "Path %s does not exist. Create " - " with the ``create=True`` keyword" % root + f"Path {root} does not exist. Create " + f" with the ``create=True`` keyword" ) self.fs.touch(root + "/a") self.fs.rm(root + "/a") diff --git a/fsspec/registry.py b/fsspec/registry.py index 851bc65bc..126d9a522 100644 --- a/fsspec/registry.py +++ b/fsspec/registry.py @@ -38,20 +38,20 @@ def register_implementation(name, cls, clobber=False, errtxt=None): if name in known_implementations and clobber is False: if cls != known_implementations[name]["class"]: raise ValueError( - "Name (%s) already in the known_implementations and clobber " - "is False" % name + f"Name ({name}) already in the known_implementations and clobber " + f"is False" ) else: known_implementations[name] = { "class": cls, - "err": errtxt or "%s import failed for protocol %s" % (cls, name), + "err": errtxt or f"{cls} import failed for protocol {name}", } else: if name in registry and clobber is False: if _registry[name] is not cls: raise ValueError( - "Name (%s) already in the registry and clobber is False" % name + f"Name ({name}) already in the registry and clobber is False" ) else: _registry[name] = cls @@ -228,7 +228,7 @@ def get_filesystem_class(protocol): if protocol not in registry: if protocol not in known_implementations: - raise ValueError("Protocol not known: %s" % protocol) + raise ValueError(f"Protocol not known: {protocol}") bit = known_implementations[protocol] try: register_implementation(protocol, _import_class(bit["class"])) diff --git a/fsspec/spec.py b/fsspec/spec.py index a205cb2b6..f491098a8 100644 --- a/fsspec/spec.py +++ b/fsspec/spec.py @@ -1187,9 +1187,7 @@ def expand_path(self, path, recursive=False, maxdepth=None, **kwargs): def mv(self, path1, path2, recursive=False, maxdepth=None, **kwargs): """Move file(s) from one location to another""" if path1 == path2: - logger.debug( - "%s mv: The paths are the same, so no files were moved." % (self) - ) + logger.debug(f"{self} mv: The paths are the same, so no files were moved.") else: self.copy(path1, path2, recursive=recursive, maxdepth=maxdepth) self.rm(path1, recursive=recursive) @@ -1744,7 +1742,7 @@ def seek(self, loc, whence=0): elif whence == 2: nloc = self.size + loc else: - raise ValueError("invalid whence (%s, should be 0, 1 or 2)" % whence) + raise ValueError(f"invalid whence ({whence}, should be 0, 1 or 2)") if nloc < 0: raise ValueError("Seek before start of file") self.loc = nloc @@ -1851,7 +1849,7 @@ def read(self, length=-1): length = self.size - self.loc if self.closed: raise ValueError("I/O operation on closed file.") - logger.debug("%s read: %i - %i" % (self, self.loc, self.loc + length)) + logger.debug(f"{self} read: {self.loc} - {self.loc + length}") if length == 0: # don't even bother calling fetch return b"" @@ -1966,7 +1964,7 @@ def __del__(self): self.close() def __str__(self): - return "" % (type(self.fs).__name__, self.path) + return f"" __repr__ = __str__ diff --git a/fsspec/tests/conftest.py b/fsspec/tests/conftest.py index 2c7a1742b..f5056335e 100644 --- a/fsspec/tests/conftest.py +++ b/fsspec/tests/conftest.py @@ -11,7 +11,7 @@ requests = pytest.importorskip("requests") port = 9898 data = b"\n".join([b"some test data"] * 1000) -realfile = "http://127.0.0.1:%i/index/realfile" % port +realfile = f"http://127.0.0.1:{port}/index/realfile" index = b'Link' % realfile.encode() listing = open( os.path.join(os.path.dirname(__file__), "data", "listing.html"), "rb" @@ -61,7 +61,7 @@ def do_GET(self): return self._respond(404) status = 200 - content_range = "bytes 0-%i/%i" % (len(file_data) - 1, len(file_data)) + content_range = f"bytes 0-{len(file_data) - 1}/{len(file_data)}" if ("Range" in self.headers) and ("ignore_range" not in self.headers): ran = self.headers["Range"] b, ran = ran.split("=") @@ -139,7 +139,7 @@ def do_HEAD(self): self._respond(200, response_headers) elif "give_range" in self.headers: self._respond( - 200, {"Content-Range": "0-%i/%i" % (len(file_data) - 1, len(file_data))} + 200, {f"Content-Range": "0-{len(file_data) - 1}/{len(file_data)}"} ) elif "give_etag" in self.headers: self._respond(200, {"ETag": "xxx"}) @@ -155,7 +155,7 @@ def serve(): th.daemon = True th.start() try: - yield "http://127.0.0.1:%i" % port + yield f"http://127.0.0.1:{port}" finally: httpd.socket.close() httpd.shutdown() diff --git a/fsspec/tests/test_spec.py b/fsspec/tests/test_spec.py index 38e6dabb1..f76bbef76 100644 --- a/fsspec/tests/test_spec.py +++ b/fsspec/tests/test_spec.py @@ -420,7 +420,7 @@ def __getitem__(self, name): for item in self._fs_contents: if item["name"] == name: return item - raise IndexError("{name} not found!".format(name=name)) + raise IndexError(f"{name} not found!") def ls(self, path, detail=True, refresh=True, **kwargs): if kwargs.pop("strip_proto", True): diff --git a/fsspec/tests/test_utils.py b/fsspec/tests/test_utils.py index c83eeea0c..b5732e9f1 100644 --- a/fsspec/tests/test_utils.py +++ b/fsspec/tests/test_utils.py @@ -206,7 +206,7 @@ def test_infer_options(): # - Parsing doesn't lowercase the bucket # - The bucket is included in path for protocol in ["s3", "s3a", "gcs", "gs"]: - options = infer_storage_options("%s://Bucket-name.com/test.csv" % protocol) + options = infer_storage_options(f"{protocol}://Bucket-name.com/test.csv") assert options["path"] == "Bucket-name.com/test.csv" with pytest.raises(KeyError): diff --git a/fsspec/utils.py b/fsspec/utils.py index 593a40d2a..9e52ffe51 100644 --- a/fsspec/utils.py +++ b/fsspec/utils.py @@ -106,8 +106,8 @@ def update_storage_options(options, inherited=None): for collision in collisions: if options.get(collision) != inherited.get(collision): raise KeyError( - "Collision between inferred and specified storage " - "option:\n%s" % collision + f"Collision between inferred and specified storage " + f"option:\n{collision}" ) options.update(inherited)