From edf9b732275fc52092197a728766b1c5a592adeb Mon Sep 17 00:00:00 2001 From: Dimitri Papadopoulos Orfanos <3234522+DimitriPapadopoulos@users.noreply.github.com> Date: Wed, 4 Oct 2023 16:24:39 +0200 Subject: [PATCH] Change legacy string formatting to f-strings (#1374) --- fsspec/archive.py | 2 +- fsspec/asyn.py | 2 +- fsspec/caching.py | 20 ++++++++--------- fsspec/compression.py | 6 ++--- fsspec/core.py | 6 ++--- fsspec/fuse.py | 2 +- fsspec/gui.py | 6 ++--- fsspec/implementations/cached.py | 22 +++++++++---------- fsspec/implementations/ftp.py | 8 +++---- fsspec/implementations/git.py | 6 ++--- fsspec/implementations/github.py | 10 +++------ fsspec/implementations/http.py | 12 +++++----- fsspec/implementations/jupyter.py | 10 ++++----- fsspec/implementations/libarchive.py | 3 +-- fsspec/implementations/local.py | 6 ++--- fsspec/implementations/sftp.py | 20 ++++++++--------- fsspec/implementations/smb.py | 4 ++-- fsspec/implementations/tests/test_cached.py | 14 ++++++------ fsspec/implementations/tests/test_ftp.py | 6 ++--- fsspec/implementations/tests/test_local.py | 14 ++++++------ .../implementations/tests/test_reference.py | 8 +++---- fsspec/implementations/tests/test_sftp.py | 14 ++++++------ fsspec/implementations/tests/test_webhdfs.py | 2 +- fsspec/implementations/webhdfs.py | 6 ++--- fsspec/mapping.py | 4 ++-- fsspec/registry.py | 10 ++++----- fsspec/spec.py | 10 ++++----- fsspec/tests/conftest.py | 8 +++---- fsspec/tests/test_core.py | 2 +- fsspec/tests/test_mapping.py | 8 +++---- fsspec/tests/test_spec.py | 2 +- fsspec/tests/test_utils.py | 2 +- fsspec/utils.py | 4 ++-- 33 files changed, 124 insertions(+), 135 deletions(-) diff --git a/fsspec/archive.py b/fsspec/archive.py index dc5c1490b..9bdd8efce 100644 --- a/fsspec/archive.py +++ b/fsspec/archive.py @@ -13,7 +13,7 @@ class AbstractArchiveFileSystem(AbstractFileSystem): """ def __str__(self): - return "" % (type(self).__name__, id(self)) + return f"" __repr__ = __str__ diff --git a/fsspec/asyn.py b/fsspec/asyn.py index 347e262ad..4ac1419f0 100644 --- a/fsspec/asyn.py +++ b/fsspec/asyn.py @@ -426,7 +426,7 @@ async def _process_limits(self, url, start, end): end = "" if isinstance(end, numbers.Integral): end -= 1 # bytes range is inclusive - return "bytes=%s-%s" % (start, end) + return f"bytes={start}-{end}" async def _cat_file(self, path, start=None, end=None, **kwargs): raise NotImplementedError diff --git a/fsspec/caching.py b/fsspec/caching.py index 41b661ba2..42c116cb3 100644 --- a/fsspec/caching.py +++ b/fsspec/caching.py @@ -222,8 +222,9 @@ def __init__(self, blocksize, fetcher, size, maxblocks=32): self._fetch_block_cached = functools.lru_cache(maxblocks)(self._fetch_block) def __repr__(self): - return "".format( - self.blocksize, self.size, self.nblocks + return ( + f"" ) def cache_info(self): @@ -277,9 +278,8 @@ def _fetch_block(self, block_number): """ if block_number > self.nblocks: raise ValueError( - "'block_number={}' is greater than the number of blocks ({})".format( - block_number, self.nblocks - ) + f"'block_number={block_number}' is greater than " + f"the number of blocks ({self.nblocks})" ) start = block_number * self.blocksize @@ -606,8 +606,9 @@ def __init__(self, blocksize, fetcher, size, maxblocks=32): self._fetch_future_lock = threading.Lock() def __repr__(self): - return "".format( - self.blocksize, self.size, self.nblocks + return ( + f"" ) def cache_info(self): @@ -719,9 +720,8 @@ def _fetch_block(self, block_number, log_info="sync"): """ if block_number > self.nblocks: raise ValueError( - "'block_number={}' is greater than the number of blocks ({})".format( - block_number, self.nblocks - ) + f"'block_number={block_number}' is greater than " + f"the number of blocks ({self.nblocks})" ) start = block_number * self.blocksize diff --git a/fsspec/compression.py b/fsspec/compression.py index 38b1d6dd6..53b7426e2 100644 --- a/fsspec/compression.py +++ b/fsspec/compression.py @@ -39,13 +39,11 @@ def register_compression(name, callback, extensions, force=False): # Validate registration if name in compr and not force: - raise ValueError("Duplicate compression registration: %s" % name) + raise ValueError(f"Duplicate compression registration: {name}") for ext in extensions: if ext in fsspec.utils.compressions and not force: - raise ValueError( - "Duplicate compression file extension: %s (%s)" % (ext, name) - ) + raise ValueError(f"Duplicate compression file extension: {ext} ({name})") compr[name] = callback diff --git a/fsspec/core.py b/fsspec/core.py index 6e5a831ae..23c0db535 100644 --- a/fsspec/core.py +++ b/fsspec/core.py @@ -92,7 +92,7 @@ def __reduce__(self): ) def __repr__(self): - return "".format(self.path) + return f"" def __enter__(self): mode = self.mode.replace("t", "").replace("b", "") + "b" @@ -195,7 +195,7 @@ def __getitem__(self, item): return out def __repr__(self): - return "" % len(self) + return f"" def open_files( @@ -498,7 +498,7 @@ def get_compression(urlpath, compression): if compression == "infer": compression = infer_compression(urlpath) if compression is not None and compression not in compr: - raise ValueError("Compression type %s not supported" % compression) + raise ValueError(f"Compression type {compression} not supported") return compression diff --git a/fsspec/fuse.py b/fsspec/fuse.py index cdf742a52..8b04626fa 100644 --- a/fsspec/fuse.py +++ b/fsspec/fuse.py @@ -275,7 +275,7 @@ def format_help(self): for item in args.option or []: key, sep, value = item.partition("=") if not sep: - parser.error(message="Wrong option: {!r}".format(item)) + parser.error(message=f"Wrong option: {item!r}") val = value.lower() if val.endswith("[int]"): value = int(value[: -len("[int]")]) diff --git a/fsspec/gui.py b/fsspec/gui.py index 80ccac21f..c6e9eb9df 100644 --- a/fsspec/gui.py +++ b/fsspec/gui.py @@ -70,7 +70,7 @@ class which owns it. same name. """ if name not in self.signals: - raise ValueError("Attempt to assign an undeclared signal: %s" % name) + raise ValueError(f"Attempt to assign an undeclared signal: {name}") self._sigs[name] = { "widget": widget, "callbacks": [], @@ -141,7 +141,7 @@ def _emit(self, sig, value=None): Calling of callbacks will halt whenever one returns False. """ - logger.log(self._sigs[sig]["log"], "{}: {}".format(sig, value)) + logger.log(self._sigs[sig]["log"], f"{sig}: {value}") for callback in self._sigs[sig]["callbacks"]: if isinstance(callback, str): self._emit(callback) @@ -319,7 +319,7 @@ def fs(self): def urlpath(self): """URL of currently selected item""" return ( - (self.protocol.value + "://" + self.main.value[0]) + (f"{self.protocol.value}://{self.main.value[0]}") if self.main.value else None ) diff --git a/fsspec/implementations/cached.py b/fsspec/implementations/cached.py index 496bcb07a..5c495e354 100644 --- a/fsspec/implementations/cached.py +++ b/fsspec/implementations/cached.py @@ -304,10 +304,10 @@ def _open( hash, blocks = detail["fn"], detail["blocks"] if blocks is True: # stored file is complete - logger.debug("Opening local copy of %s" % path) + logger.debug("Opening local copy of %s", path) return open(fn, mode) # TODO: action where partial file exists in read-only cache - logger.debug("Opening partially cached copy of %s" % path) + logger.debug("Opening partially cached copy of %s", path) else: hash = self._mapper(path) fn = os.path.join(self.storage[-1], hash) @@ -320,7 +320,7 @@ def _open( "uid": self.fs.ukey(path), } self._metadata.update_file(path, detail) - logger.debug("Creating local sparse file for %s" % path) + logger.debug("Creating local sparse file for %s", path) # call target filesystems open self._mkcache() @@ -343,9 +343,9 @@ def _open( if "blocksize" in detail: if detail["blocksize"] != f.blocksize: raise BlocksizeMismatchError( - "Cached file must be reopened with same block" - "size as original (old: %i, new %i)" - "" % (detail["blocksize"], f.blocksize) + f"Cached file must be reopened with same block" + f" size as original (old: {detail['blocksize']}," + f" new {f.blocksize})" ) else: detail["blocksize"] = f.blocksize @@ -570,7 +570,7 @@ def _make_local_details(self, path): "uid": self.fs.ukey(path), } self._metadata.update_file(path, detail) - logger.debug("Copying %s to local cache" % path) + logger.debug("Copying %s to local cache", path) return fn def cat( @@ -627,7 +627,7 @@ def _open(self, path, mode="rb", **kwargs): detail, fn = detail _, blocks = detail["fn"], detail["blocks"] if blocks is True: - logger.debug("Opening local copy of %s" % path) + logger.debug("Opening local copy of %s", path) # In order to support downstream filesystems to be able to # infer the compression from the original filename, like @@ -639,8 +639,8 @@ def _open(self, path, mode="rb", **kwargs): return f else: raise ValueError( - "Attempt to open partially cached file %s" - "as a wholly cached file" % path + f"Attempt to open partially cached file {path}" + f" as a wholly cached file" ) else: fn = self._make_local_details(path) @@ -723,7 +723,7 @@ def _open(self, path, mode="rb", **kwargs): sha = self._mapper(path) fn = os.path.join(self.storage[-1], sha) - logger.debug("Copying %s to local cache" % path) + logger.debug("Copying %s to local cache", path) kwargs["mode"] = mode self._mkcache() diff --git a/fsspec/implementations/ftp.py b/fsspec/implementations/ftp.py index 7e79877eb..c8e3cf2d2 100644 --- a/fsspec/implementations/ftp.py +++ b/fsspec/implementations/ftp.py @@ -156,7 +156,7 @@ def cb(x): outfile.write(x) self.ftp.retrbinary( - "RETR %s" % rpath, + f"RETR {rpath}", blocksize=self.blocksize, callback=cb, ) @@ -172,7 +172,7 @@ def cb(x): out.append(x) self.ftp.retrbinary( - "RETR %s" % path, + f"RETR {path}", blocksize=self.blocksize, rest=start, callback=cb, @@ -321,7 +321,7 @@ def callback(x): try: self.fs.ftp.retrbinary( - "RETR %s" % self.path, + f"RETR {self.path}", blocksize=self.blocksize, rest=start, callback=callback, @@ -339,7 +339,7 @@ def callback(x): def _upload_chunk(self, final=False): self.buffer.seek(0) self.fs.ftp.storbinary( - "STOR " + self.path, self.buffer, blocksize=self.blocksize, rest=self.offset + f"STOR {self.path}", self.buffer, blocksize=self.blocksize, rest=self.offset ) return True diff --git a/fsspec/implementations/git.py b/fsspec/implementations/git.py index 80c73e066..7c34d93e0 100644 --- a/fsspec/implementations/git.py +++ b/fsspec/implementations/git.py @@ -81,7 +81,7 @@ def ls(self, path, detail=True, ref=None, **kwargs): "type": "directory", "name": "/".join([path, obj.name]).lstrip("/"), "hex": obj.hex, - "mode": "%o" % obj.filemode, + "mode": f"{obj.filemode:o}", "size": 0, } ) @@ -91,7 +91,7 @@ def ls(self, path, detail=True, ref=None, **kwargs): "type": "file", "name": "/".join([path, obj.name]).lstrip("/"), "hex": obj.hex, - "mode": "%o" % obj.filemode, + "mode": f"{obj.filemode:o}", "size": obj.size, } ) @@ -102,7 +102,7 @@ def ls(self, path, detail=True, ref=None, **kwargs): "type": "file", "name": obj.name, "hex": obj.hex, - "mode": "%o" % obj.filemode, + "mode": f"{obj.filemode:o}", "size": obj.size, } ] diff --git a/fsspec/implementations/github.py b/fsspec/implementations/github.py index b148124d7..b5df3f7ab 100644 --- a/fsspec/implementations/github.py +++ b/fsspec/implementations/github.py @@ -79,9 +79,7 @@ def repos(cls, org_or_user, is_org=True): List of string """ r = requests.get( - "https://api.github.com/{part}/{org}/repos".format( - part=["users", "orgs"][is_org], org=org_or_user - ) + f"https://api.github.com/{['users', 'orgs'][is_org]}/{org_or_user}/repos" ) r.raise_for_status() return [repo["name"] for repo in r.json()] @@ -90,8 +88,7 @@ def repos(cls, org_or_user, is_org=True): def tags(self): """Names of tags in the repo""" r = requests.get( - "https://api.github.com/repos/{org}/{repo}/tags" - "".format(org=self.org, repo=self.repo), + f"https://api.github.com/repos/{self.org}/{self.repo}/tags", **self.kw, ) r.raise_for_status() @@ -101,8 +98,7 @@ def tags(self): def branches(self): """Names of branches in the repo""" r = requests.get( - "https://api.github.com/repos/{org}/{repo}/branches" - "".format(org=self.org, repo=self.repo), + f"https://api.github.com/repos/{self.org}/{self.repo}/branches", **self.kw, ) r.raise_for_status() diff --git a/fsspec/implementations/http.py b/fsspec/implementations/http.py index 06551017e..5b21f2605 100644 --- a/fsspec/implementations/http.py +++ b/fsspec/implementations/http.py @@ -165,7 +165,7 @@ async def _ls_real(self, url, detail=True, **kwargs): l = l[1] if l.startswith("/") and len(l) > 1: # absolute URL on this server - l = parts.scheme + "://" + parts.netloc + l + l = f"{parts.scheme}://{parts.netloc}{l}" if l.startswith("http"): if self.same_schema and l.startswith(url.rstrip("/") + "/"): out.add(l) @@ -655,8 +655,8 @@ async def async_fetch_range(self, start, end): logger.debug(f"Fetch range for {self}: {start}-{end}") kwargs = self.kwargs.copy() headers = kwargs.pop("headers", {}).copy() - headers["Range"] = "bytes=%i-%i" % (start, end - 1) - logger.debug(str(self.url) + " : " + headers["Range"]) + headers["Range"] = f"bytes={start}-{end - 1}" + logger.debug(f"{self.url} : {headers['Range']}") r = await self.session.get( self.fs.encode_url(self.url), headers=headers, **kwargs ) @@ -812,7 +812,7 @@ async def get_range(session, url, start, end, file=None, **kwargs): # explicit get a range when we know it must be safe kwargs = kwargs.copy() headers = kwargs.pop("headers", {}).copy() - headers["Range"] = "bytes=%i-%i" % (start, end - 1) + headers["Range"] = f"bytes={start}-{end - 1}" r = await session.get(url, headers=headers, **kwargs) r.raise_for_status() async with r: @@ -831,7 +831,7 @@ async def _file_info(url, session, size_policy="head", **kwargs): Default operation is to explicitly allow redirects and use encoding 'identity' (no compression) to get the true size of the target. """ - logger.debug("Retrieve file size for %s" % url) + logger.debug("Retrieve file size for %s", url) kwargs = kwargs.copy() ar = kwargs.pop("allow_redirects", True) head = kwargs.get("headers", {}).copy() @@ -844,7 +844,7 @@ async def _file_info(url, session, size_policy="head", **kwargs): elif size_policy == "get": r = await session.get(url, allow_redirects=ar, **kwargs) else: - raise TypeError('size_policy must be "head" or "get", got %s' "" % size_policy) + raise TypeError(f'size_policy must be "head" or "get", got {size_policy}') async with r: r.raise_for_status() diff --git a/fsspec/implementations/jupyter.py b/fsspec/implementations/jupyter.py index 782fa8639..2839f4c1f 100644 --- a/fsspec/implementations/jupyter.py +++ b/fsspec/implementations/jupyter.py @@ -40,7 +40,7 @@ def __init__(self, url, tok=None, **kwargs): def ls(self, path, detail=True, **kwargs): path = self._strip_protocol(path) - r = self.session.get(self.url + "/" + path) + r = self.session.get(f"{self.url}/{path}") if r.status_code == 404: return FileNotFoundError(path) r.raise_for_status() @@ -61,7 +61,7 @@ def ls(self, path, detail=True, **kwargs): def cat_file(self, path, start=None, end=None, **kwargs): path = self._strip_protocol(path) - r = self.session.get(self.url + "/" + path) + r = self.session.get(f"{self.url}/{path}") if r.status_code == 404: return FileNotFoundError(path) r.raise_for_status() @@ -83,7 +83,7 @@ def pipe_file(self, path, value, **_): "format": "base64", "type": "file", } - self.session.put(self.url + "/" + path, json=json) + self.session.put(f"{self.url}/{path}", json=json) def mkdir(self, path, create_parents=True, **kwargs): path = self._strip_protocol(path) @@ -96,11 +96,11 @@ def mkdir(self, path, create_parents=True, **kwargs): "content": None, "type": "directory", } - self.session.put(self.url + "/" + path, json=json) + self.session.put(f"{self.url}/{path}", json=json) def _rm(self, path): path = self._strip_protocol(path) - self.session.delete(self.url + "/" + path) + self.session.delete(f"{self.url}/{path}") def _open(self, path, mode="rb", **kwargs): path = self._strip_protocol(path) diff --git a/fsspec/implementations/libarchive.py b/fsspec/implementations/libarchive.py index 849040423..592e8979d 100644 --- a/fsspec/implementations/libarchive.py +++ b/fsspec/implementations/libarchive.py @@ -122,8 +122,7 @@ def __init__( files = open_files(fo, protocol=target_protocol, **(target_options or {})) if len(files) != 1: raise ValueError( - 'Path "{}" did not resolve to exactly' - 'one file: "{}"'.format(fo, files) + f'Path "{fo}" did not resolve to exactly one file: "{files}"' ) fo = files[0] self.of = fo diff --git a/fsspec/implementations/local.py b/fsspec/implementations/local.py index 117fd95a8..1d8e21fd2 100644 --- a/fsspec/implementations/local.py +++ b/fsspec/implementations/local.py @@ -98,7 +98,7 @@ def info(self, path, **kwargs): "islink": link, } for field in ["mode", "uid", "gid", "mtime", "ino", "nlink"]: - result[field] = getattr(out, "st_" + field) + result[field] = getattr(out, f"st_{field}") if result["islink"]: result["destination"] = os.readlink(path) try: @@ -244,7 +244,7 @@ def make_path_posix(path, sep=os.sep): return path if path.startswith("./"): path = path[2:] - return os.getcwd() + "/" + path + return f"{os.getcwd()}/{path}" if ( (sep not in path and "/" not in path) or (sep == "/" and not path.startswith("/")) @@ -255,7 +255,7 @@ def make_path_posix(path, sep=os.sep): # abspath made some more '\\' separators return make_path_posix(osp.abspath(path)) else: - return os.getcwd() + "/" + path + return f"{os.getcwd()}/{path}" if path.startswith("file://"): path = path[7:] if re.match("/[A-Za-z]:", path): diff --git a/fsspec/implementations/sftp.py b/fsspec/implementations/sftp.py index 1153e7c6a..14f171dde 100644 --- a/fsspec/implementations/sftp.py +++ b/fsspec/implementations/sftp.py @@ -48,7 +48,7 @@ def __init__(self, host, **ssh_kwargs): self._connect() def _connect(self): - logger.debug("Connecting to SFTP server %s" % self.host) + logger.debug("Connecting to SFTP server %s", self.host) self.client = paramiko.SSHClient() self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.client.connect(self.host, **self.ssh_kwargs) @@ -66,9 +66,9 @@ def _get_kwargs_from_urls(urlpath): return out def mkdir(self, path, create_parents=False, mode=511): - logger.debug("Creating folder %s" % path) + logger.debug("Creating folder %s", path) if self.exists(path): - raise FileExistsError("File exists: {}".format(path)) + raise FileExistsError(f"File exists: {path}") if create_parents: self.makedirs(path) @@ -77,18 +77,18 @@ def mkdir(self, path, create_parents=False, mode=511): def makedirs(self, path, exist_ok=False, mode=511): if self.exists(path) and not exist_ok: - raise FileExistsError("File exists: {}".format(path)) + raise FileExistsError(f"File exists: {path}") parts = path.split("/") path = "" for part in parts: - path += "/" + part + path += f"/{part}" if not self.exists(path): self.ftp.mkdir(path, mode) def rmdir(self, path): - logger.debug("Removing folder %s" % path) + logger.debug("Removing folder %s", path) self.ftp.rmdir(path) def info(self, path): @@ -122,7 +122,7 @@ def _decode_stat(stat, parent_path=None): return out def ls(self, path, detail=False): - logger.debug("Listing folder %s" % path) + logger.debug("Listing folder %s", path) stats = [self._decode_stat(stat, path) for stat in self.ftp.listdir_iter(path)] if detail: return stats @@ -131,7 +131,7 @@ def ls(self, path, detail=False): return sorted(paths) def put(self, lpath, rpath, callback=None, **kwargs): - logger.debug("Put file %s into %s" % (lpath, rpath)) + logger.debug("Put file %s into %s", lpath, rpath) self.ftp.put(lpath, rpath) def get_file(self, rpath, lpath, **kwargs): @@ -146,7 +146,7 @@ def _open(self, path, mode="rb", block_size=None, **kwargs): If 0, no buffering, if 1, line buffering, if >1, buffer that many bytes, if None use default from paramiko. """ - logger.debug("Opening file %s" % path) + logger.debug("Opening file %s", path) if kwargs.get("autocommit", True) is False: # writes to temporary file, move on commit path2 = "/".join([self.temppath, str(uuid.uuid4())]) @@ -167,7 +167,7 @@ def _rm(self, path): self.ftp.remove(path) def mv(self, old, new): - logger.debug("Renaming %s into %s" % (old, new)) + logger.debug("Renaming %s into %s", old, new) self.ftp.posix_rename(old, new) diff --git a/fsspec/implementations/smb.py b/fsspec/implementations/smb.py index a3816773c..5804a6bf5 100644 --- a/fsspec/implementations/smb.py +++ b/fsspec/implementations/smb.py @@ -256,13 +256,13 @@ def mv(self, path1, path2, **kwargs): def _as_unc_path(host, path): rpath = path.replace("/", "\\") - unc = "\\\\{}{}".format(host, rpath) + unc = f"\\\\{host}{rpath}" return unc def _as_temp_path(host, path, temppath): share = path.split("/")[1] - temp_file = "/{}{}/{}".format(share, temppath, uuid.uuid4()) + temp_file = f"/{share}{temppath}/{uuid.uuid4()}" unc = _as_unc_path(host, temp_file) return unc diff --git a/fsspec/implementations/tests/test_cached.py b/fsspec/implementations/tests/test_cached.py index 2f7b35861..ae5e2f0cb 100644 --- a/fsspec/implementations/tests/test_cached.py +++ b/fsspec/implementations/tests/test_cached.py @@ -307,7 +307,7 @@ def test_glob(ftp_writable, impl): def test_write(): tmp = str(tempfile.mkdtemp()) fn = tmp + "afile" - url = "simplecache::file://" + fn + url = f"simplecache::file://{fn}" with fsspec.open(url, "wb") as f: f.write(b"hello") assert fn not in f.name @@ -512,7 +512,7 @@ def test_pop(): def test_write_pickle_context(): tmp = str(tempfile.mkdtemp()) fn = tmp + "afile" - url = "simplecache::file://" + fn + url = f"simplecache::file://{fn}" with fsspec.open(url, "wb") as f: pickle.loads(pickle.dumps(f)) f.write(b"hello ") @@ -927,7 +927,7 @@ def test_with_compression(impl, compression): f.close() with fsspec.open( - "%s::%s" % (impl, fn), + f"{impl}::{fn}", "rb", compression=compression, **{impl: {"same_names": True, "cache_storage": cachedir}}, @@ -940,7 +940,7 @@ def test_with_compression(impl, compression): cachedir = tempfile.mkdtemp() with fsspec.open( - "%s::%s" % (impl, fn), + f"{impl}::{fn}", "rb", **{ impl: { @@ -1157,11 +1157,11 @@ def test_getitems_errors(tmpdir): os.makedirs(os.path.join(tmpdir, "afolder")) open(os.path.join(tmpdir, "afile"), "w").write("test") open(os.path.join(tmpdir, "afolder", "anotherfile"), "w").write("test2") - m = fsspec.get_mapper("file://" + tmpdir) + m = fsspec.get_mapper(f"file://{tmpdir}") assert m.getitems(["afile", "bfile"], on_error="omit") == {"afile": b"test"} # my code - m2 = fsspec.get_mapper("simplecache::file://" + tmpdir) + m2 = fsspec.get_mapper(f"simplecache::file://{tmpdir}") assert m2.getitems(["afile"], on_error="omit") == {"afile": b"test"} # works assert m2.getitems(["afile", "bfile"], on_error="omit") == { "afile": b"test" @@ -1171,7 +1171,7 @@ def test_getitems_errors(tmpdir): m.getitems(["afile", "bfile"]) out = m.getitems(["afile", "bfile"], on_error="return") assert isinstance(out["bfile"], KeyError) - m = fsspec.get_mapper("file://" + tmpdir, missing_exceptions=()) + m = fsspec.get_mapper(f"file://{tmpdir}", missing_exceptions=()) assert m.getitems(["afile", "bfile"], on_error="omit") == {"afile": b"test"} with pytest.raises(FileNotFoundError): m.getitems(["afile", "bfile"]) diff --git a/fsspec/implementations/tests/test_ftp.py b/fsspec/implementations/tests/test_ftp.py index af5e6cda1..d443d865b 100644 --- a/fsspec/implementations/tests/test_ftp.py +++ b/fsspec/implementations/tests/test_ftp.py @@ -33,7 +33,7 @@ def test_basic(ftp): host, port = ftp fs = FTPFileSystem(host, port) assert fs.ls("/", detail=False) == sorted(os.listdir(here)) - out = fs.cat("/" + os.path.basename(__file__)) + out = fs.cat(f"/{os.path.basename(__file__)}") assert out == open(__file__, "rb").read() @@ -77,10 +77,10 @@ def test_write_small(ftp_writable): def test_with_url(ftp_writable): host, port, user, pw = ftp_writable - fo = fsspec.open("ftp://{}:{}@{}:{}/out".format(user, pw, host, port), "wb") + fo = fsspec.open(f"ftp://{user}:{pw}@{host}:{port}/out", "wb") with fo as f: f.write(b"hello") - fo = fsspec.open("ftp://{}:{}@{}:{}/out".format(user, pw, host, port), "rb") + fo = fsspec.open(f"ftp://{user}:{pw}@{host}:{port}/out", "rb") with fo as f: assert f.read() == b"hello" diff --git a/fsspec/implementations/tests/test_local.py b/fsspec/implementations/tests/test_local.py index 961537520..82eba0f02 100644 --- a/fsspec/implementations/tests/test_local.py +++ b/fsspec/implementations/tests/test_local.py @@ -56,7 +56,7 @@ def filetexts(d, open=open, mode="t"): try: os.chdir(dirname) for filename, text in d.items(): - f = open(filename, "w" + mode) + f = open(filename, f"w{mode}") try: f.write(text) finally: @@ -79,7 +79,7 @@ def filetexts(d, open=open, mode="t"): def test_urlpath_inference_strips_protocol(tmpdir): tmpdir = make_path_posix(str(tmpdir)) - paths = ["/".join([tmpdir, "test.%02d.csv" % i]) for i in range(20)] + paths = ["/".join([tmpdir, f"test.{i:02d}.csv"]) for i in range(20)] for path in paths: with open(path, "wb") as f: @@ -221,7 +221,7 @@ def test_isfile(): with filetexts(files, mode="b"): for f in files.keys(): assert fs.isfile(f) - assert fs.isfile("file://" + f) + assert fs.isfile(f"file://{f}") assert not fs.isfile("not-a-file") assert not fs.isfile("file://not-a-file") @@ -293,7 +293,7 @@ def test_abs_paths(tmpdir): def test_glob_weird_characters(tmpdir, sep, chars): tmpdir = make_path_posix(str(tmpdir)) - subdir = tmpdir + sep + "test" + chars + "x" + subdir = f"{tmpdir}{sep}test{chars}x" try: os.makedirs(subdir, exist_ok=True) except OSError as e: @@ -425,11 +425,11 @@ def test_recursive_get_put(tmpdir): fs.touch(tmpdir + "/a1/a2/a3/afile") fs.touch(tmpdir + "/a1/afile") - fs.get("file://{0}/a1".format(tmpdir), tmpdir + "/b1", recursive=True) + fs.get(f"file://{tmpdir}/a1", tmpdir + "/b1", recursive=True) assert fs.isfile(tmpdir + "/b1/afile") assert fs.isfile(tmpdir + "/b1/a2/a3/afile") - fs.put(tmpdir + "/b1", "file://{0}/c1".format(tmpdir), recursive=True) + fs.put(tmpdir + "/b1", f"file://{tmpdir}/c1", recursive=True) assert fs.isfile(tmpdir + "/c1/afile") assert fs.isfile(tmpdir + "/c1/a2/a3/afile") @@ -652,7 +652,7 @@ def test_iterable(tmpdir): fn = os.path.join(tmpdir, "test") with open(fn, "wb") as f: f.write(data) - of = fsspec.open("file://%s" % fn, "rb") + of = fsspec.open(f"file://{fn}", "rb") with of as f: out = list(f) assert b"".join(out) == data diff --git a/fsspec/implementations/tests/test_reference.py b/fsspec/implementations/tests/test_reference.py index 0487459f4..2486d7ca7 100644 --- a/fsspec/implementations/tests/test_reference.py +++ b/fsspec/implementations/tests/test_reference.py @@ -338,8 +338,8 @@ def test_multi_fs_provided(m, tmpdir): # local URLs are file:// by default refs = { "a": b"data", - "b": ("file://" + str(real), 0, 5), - "c/d": ("file://" + str(real), 1, 6), + "b": (f"file://{real}", 0, 5), + "c/d": (f"file://{real}", 1, 6), "c/e": ["memory://afile"], } @@ -361,8 +361,8 @@ def test_multi_fs_created(m, tmpdir): # local URLs are file:// by default refs = { "a": b"data", - "b": ("file://" + str(real), 0, 5), - "c/d": ("file://" + str(real), 1, 6), + "b": (f"file://{real}", 0, 5), + "c/d": (f"file://{real}", 1, 6), "c/e": ["memory://afile"], } diff --git a/fsspec/implementations/tests/test_sftp.py b/fsspec/implementations/tests/test_sftp.py index 6fe1ea88a..a38104953 100644 --- a/fsspec/implementations/tests/test_sftp.py +++ b/fsspec/implementations/tests/test_sftp.py @@ -12,7 +12,7 @@ def stop_docker(name): - cmd = shlex.split('docker ps -a -q --filter "name=%s"' % name) + cmd = shlex.split(f'docker ps -a -q --filter "name={name}"') cid = subprocess.check_output(cmd).strip().decode() if cid: subprocess.call(["docker", "rm", "-f", cid]) @@ -45,7 +45,7 @@ def ssh(): ] name = "fsspec_sftp" stop_docker(name) - cmd = "docker run -d -p 9200:22 --name {} ubuntu:16.04 sleep 9000".format(name) + cmd = f"docker run -d -p 9200:22 --name {name} ubuntu:16.04 sleep 9000" cid = subprocess.check_output(shlex.split(cmd)).strip().decode() for cmd in cmds: subprocess.call(["docker", "exec", cid] + shlex.split(cmd)) @@ -128,11 +128,11 @@ def netloc(ssh): host = ssh.get("host") port = ssh.get("port") userpass = ( - username + ((":" + password) if password is not None else "") + "@" + f"{username}:{password if password is not None else ''}@" if username is not None else "" ) - netloc = host + ((":" + str(port)) if port is not None else "") + netloc = f"{host}:{port if port is not None else ''}" return userpass + netloc @@ -153,13 +153,13 @@ def test_simple_with_tar(ssh, netloc, tmp_path, root_path): tar_filename = make_tarfile(files_to_pack, tmp_path) f = fsspec.get_filesystem_class("sftp")(**ssh) - f.mkdirs(root_path + "deeper", exist_ok=True) + f.mkdirs(f"{root_path}deeper", exist_ok=True) try: - remote_tar_filename = root_path + "deeper/somefile.tar" + remote_tar_filename = f"{root_path}deeper/somefile.tar" with f.open(remote_tar_filename, mode="wb") as wfd: with open(tar_filename, mode="rb") as rfd: wfd.write(rfd.read()) - fs = fsspec.open("tar::ssh://" + netloc + remote_tar_filename).fs + fs = fsspec.open(f"tar::ssh://{netloc}{remote_tar_filename}").fs files = fs.find("/") assert files == files_to_pack finally: diff --git a/fsspec/implementations/tests/test_webhdfs.py b/fsspec/implementations/tests/test_webhdfs.py index 0d919731e..f55a6d6dc 100644 --- a/fsspec/implementations/tests/test_webhdfs.py +++ b/fsspec/implementations/tests/test_webhdfs.py @@ -20,7 +20,7 @@ def hdfs_cluster(): except FileNotFoundError: pytest.skip("htcluster not found") except subprocess.CalledProcessError as ex: - pytest.skip("htcluster failed: " + ex.output.decode()) + pytest.skip(f"htcluster failed: {ex.output.decode()}") cmd1 = shlex.split("htcluster startup --image base") subprocess.check_output(cmd1) try: diff --git a/fsspec/implementations/webhdfs.py b/fsspec/implementations/webhdfs.py index cc595934f..2a57170ea 100644 --- a/fsspec/implementations/webhdfs.py +++ b/fsspec/implementations/webhdfs.py @@ -89,9 +89,7 @@ def __init__( if self._cached: return super().__init__(**kwargs) - self.url = "{protocol}://{host}:{port}/webhdfs/v1".format( - protocol="https" if use_https else "http", host=host, port=port - ) + self.url = f"{'https' if use_https else 'http'}://{host}:{port}/webhdfs/v1" self.kerb = kerberos self.kerb_kwargs = kerb_kwargs or {} self.pars = {} @@ -115,7 +113,7 @@ def __init__( ) self._connect() - self._fsid = "webhdfs_" + tokenize(host, port) + self._fsid = f"webhdfs_{tokenize(host, port)}" @property def fsid(self): diff --git a/fsspec/mapping.py b/fsspec/mapping.py index 2b75c2e41..b9822ae17 100644 --- a/fsspec/mapping.py +++ b/fsspec/mapping.py @@ -54,8 +54,8 @@ def __init__(self, root, fs, check=False, create=False, missing_exceptions=None) if check: if not self.fs.exists(root): raise ValueError( - "Path %s does not exist. Create " - " with the ``create=True`` keyword" % root + f"Path {root} does not exist. Create " + f" with the ``create=True`` keyword" ) self.fs.touch(root + "/a") self.fs.rm(root + "/a") diff --git a/fsspec/registry.py b/fsspec/registry.py index cf1deb95a..d1614f130 100644 --- a/fsspec/registry.py +++ b/fsspec/registry.py @@ -38,20 +38,20 @@ def register_implementation(name, cls, clobber=False, errtxt=None): if name in known_implementations and clobber is False: if cls != known_implementations[name]["class"]: raise ValueError( - "Name (%s) already in the known_implementations and clobber " - "is False" % name + f"Name ({name}) already in the known_implementations and clobber " + f"is False" ) else: known_implementations[name] = { "class": cls, - "err": errtxt or "%s import failed for protocol %s" % (cls, name), + "err": errtxt or f"{cls} import failed for protocol {name}", } else: if name in registry and clobber is False: if _registry[name] is not cls: raise ValueError( - "Name (%s) already in the registry and clobber is False" % name + f"Name ({name}) already in the registry and clobber is False" ) else: _registry[name] = cls @@ -229,7 +229,7 @@ def get_filesystem_class(protocol): if protocol not in registry: if protocol not in known_implementations: - raise ValueError("Protocol not known: %s" % protocol) + raise ValueError(f"Protocol not known: {protocol}") bit = known_implementations[protocol] try: register_implementation(protocol, _import_class(bit["class"])) diff --git a/fsspec/spec.py b/fsspec/spec.py index 417adc33f..2af44f780 100644 --- a/fsspec/spec.py +++ b/fsspec/spec.py @@ -1187,9 +1187,7 @@ def expand_path(self, path, recursive=False, maxdepth=None, **kwargs): def mv(self, path1, path2, recursive=False, maxdepth=None, **kwargs): """Move file(s) from one location to another""" if path1 == path2: - logger.debug( - "%s mv: The paths are the same, so no files were moved." % (self) - ) + logger.debug("%s mv: The paths are the same, so no files were moved.", self) else: self.copy(path1, path2, recursive=recursive, maxdepth=maxdepth) self.rm(path1, recursive=recursive) @@ -1744,7 +1742,7 @@ def seek(self, loc, whence=0): elif whence == 2: nloc = self.size + loc else: - raise ValueError("invalid whence (%s, should be 0, 1 or 2)" % whence) + raise ValueError(f"invalid whence ({whence}, should be 0, 1 or 2)") if nloc < 0: raise ValueError("Seek before start of file") self.loc = nloc @@ -1851,7 +1849,7 @@ def read(self, length=-1): length = self.size - self.loc if self.closed: raise ValueError("I/O operation on closed file.") - logger.debug("%s read: %i - %i" % (self, self.loc, self.loc + length)) + logger.debug("%s read: %i - %i", self, self.loc, self.loc + length) if length == 0: # don't even bother calling fetch return b"" @@ -1966,7 +1964,7 @@ def __del__(self): self.close() def __str__(self): - return "" % (type(self.fs).__name__, self.path) + return f"" __repr__ = __str__ diff --git a/fsspec/tests/conftest.py b/fsspec/tests/conftest.py index 2c7a1742b..178898efe 100644 --- a/fsspec/tests/conftest.py +++ b/fsspec/tests/conftest.py @@ -11,7 +11,7 @@ requests = pytest.importorskip("requests") port = 9898 data = b"\n".join([b"some test data"] * 1000) -realfile = "http://127.0.0.1:%i/index/realfile" % port +realfile = f"http://127.0.0.1:{port}/index/realfile" index = b'Link' % realfile.encode() listing = open( os.path.join(os.path.dirname(__file__), "data", "listing.html"), "rb" @@ -61,7 +61,7 @@ def do_GET(self): return self._respond(404) status = 200 - content_range = "bytes 0-%i/%i" % (len(file_data) - 1, len(file_data)) + content_range = f"bytes 0-{len(file_data) - 1}/{len(file_data)}" if ("Range" in self.headers) and ("ignore_range" not in self.headers): ran = self.headers["Range"] b, ran = ran.split("=") @@ -139,7 +139,7 @@ def do_HEAD(self): self._respond(200, response_headers) elif "give_range" in self.headers: self._respond( - 200, {"Content-Range": "0-%i/%i" % (len(file_data) - 1, len(file_data))} + 200, {"Content-Range": f"0-{len(file_data) - 1}/{len(file_data)}"} ) elif "give_etag" in self.headers: self._respond(200, {"ETag": "xxx"}) @@ -155,7 +155,7 @@ def serve(): th.daemon = True th.start() try: - yield "http://127.0.0.1:%i" % port + yield f"http://127.0.0.1:{port}" finally: httpd.socket.close() httpd.shutdown() diff --git a/fsspec/tests/test_core.py b/fsspec/tests/test_core.py index e2d4fb995..6e8e1c751 100644 --- a/fsspec/tests/test_core.py +++ b/fsspec/tests/test_core.py @@ -106,7 +106,7 @@ def test_open_local(): f1 = os.path.join(d1, "f1") open(f1, "w").write("test1") d2 = str(tempfile.mkdtemp()) - fn = open_local("simplecache://" + f1, cache_storage=d2, target_protocol="file") + fn = open_local(f"simplecache://{f1}", cache_storage=d2, target_protocol="file") assert isinstance(fn, str) assert open(fn).read() == "test1" assert d2 in fn diff --git a/fsspec/tests/test_mapping.py b/fsspec/tests/test_mapping.py index 581425039..fb6212a1e 100644 --- a/fsspec/tests/test_mapping.py +++ b/fsspec/tests/test_mapping.py @@ -17,13 +17,13 @@ def test_mapping_prefix(tmpdir): open(os.path.join(tmpdir, "afile"), "w").write("test") open(os.path.join(tmpdir, "afolder", "anotherfile"), "w").write("test2") - m = fsspec.get_mapper("file://" + tmpdir) + m = fsspec.get_mapper(f"file://{tmpdir}") assert "afile" in m assert m["afolder/anotherfile"] == b"test2" fs = fsspec.filesystem("file") m2 = fs.get_mapper(tmpdir) - m3 = fs.get_mapper("file://" + tmpdir) + m3 = fs.get_mapper(f"file://{tmpdir}") assert m == m2 == m3 @@ -33,13 +33,13 @@ def test_getitems_errors(tmpdir): os.makedirs(os.path.join(tmpdir, "afolder")) open(os.path.join(tmpdir, "afile"), "w").write("test") open(os.path.join(tmpdir, "afolder", "anotherfile"), "w").write("test2") - m = fsspec.get_mapper("file://" + tmpdir) + m = fsspec.get_mapper(f"file://{tmpdir}") assert m.getitems(["afile", "bfile"], on_error="omit") == {"afile": b"test"} with pytest.raises(KeyError): m.getitems(["afile", "bfile"]) out = m.getitems(["afile", "bfile"], on_error="return") assert isinstance(out["bfile"], KeyError) - m = fsspec.get_mapper("file://" + tmpdir, missing_exceptions=()) + m = fsspec.get_mapper(f"file://{tmpdir}", missing_exceptions=()) assert m.getitems(["afile", "bfile"], on_error="omit") == {"afile": b"test"} with pytest.raises(FileNotFoundError): m.getitems(["afile", "bfile"]) diff --git a/fsspec/tests/test_spec.py b/fsspec/tests/test_spec.py index 38e6dabb1..f76bbef76 100644 --- a/fsspec/tests/test_spec.py +++ b/fsspec/tests/test_spec.py @@ -420,7 +420,7 @@ def __getitem__(self, name): for item in self._fs_contents: if item["name"] == name: return item - raise IndexError("{name} not found!".format(name=name)) + raise IndexError(f"{name} not found!") def ls(self, path, detail=True, refresh=True, **kwargs): if kwargs.pop("strip_proto", True): diff --git a/fsspec/tests/test_utils.py b/fsspec/tests/test_utils.py index c83eeea0c..b5732e9f1 100644 --- a/fsspec/tests/test_utils.py +++ b/fsspec/tests/test_utils.py @@ -206,7 +206,7 @@ def test_infer_options(): # - Parsing doesn't lowercase the bucket # - The bucket is included in path for protocol in ["s3", "s3a", "gcs", "gs"]: - options = infer_storage_options("%s://Bucket-name.com/test.csv" % protocol) + options = infer_storage_options(f"{protocol}://Bucket-name.com/test.csv") assert options["path"] == "Bucket-name.com/test.csv" with pytest.raises(KeyError): diff --git a/fsspec/utils.py b/fsspec/utils.py index 593a40d2a..9e52ffe51 100644 --- a/fsspec/utils.py +++ b/fsspec/utils.py @@ -106,8 +106,8 @@ def update_storage_options(options, inherited=None): for collision in collisions: if options.get(collision) != inherited.get(collision): raise KeyError( - "Collision between inferred and specified storage " - "option:\n%s" % collision + f"Collision between inferred and specified storage " + f"option:\n{collision}" ) options.update(inherited)