Skip to content

Commit

Permalink
Revert f-strings with logging.Logger.debug()
Browse files Browse the repository at this point in the history
  • Loading branch information
DimitriPapadopoulos committed Sep 28, 2023
1 parent 247332b commit f420f9d
Show file tree
Hide file tree
Showing 6 changed files with 19 additions and 18 deletions.
2 changes: 1 addition & 1 deletion fsspec/caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -726,7 +726,7 @@ def _fetch_block(self, block_number, log_info="sync"):

start = block_number * self.blocksize
end = start + self.blocksize
logger.info(f"BlockCache fetching block ({log_info}) {block_number}")
logger.info("BlockCache fetching block (%s) %d", log_info, block_number)
block_contents = super()._fetch(start, end)
return block_contents

Expand Down
3 changes: 2 additions & 1 deletion fsspec/gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,8 @@ def _emit(self, sig, value=None):
break
except Exception as e:
logger.exception(
f"Exception ({e}) while executing callback for signal: {sig}"
"Exception (%s) while executing callback for signal: %s"
"" % (e, sig)
)

def show(self, threads=False):
Expand Down
12 changes: 6 additions & 6 deletions fsspec/implementations/cached.py
Original file line number Diff line number Diff line change
Expand Up @@ -283,10 +283,10 @@ def _open(
hash, blocks = detail["fn"], detail["blocks"]
if blocks is True:
# stored file is complete
logger.debug(f"Opening local copy of {path}")
logger.debug("Opening local copy of %s", path)
return open(fn, mode)
# TODO: action where partial file exists in read-only cache
logger.debug(f"Opening partially cached copy of {path}")
logger.debug("Opening partially cached copy of %s", path)
else:
hash = self._mapper(path)
fn = os.path.join(self.storage[-1], hash)
Expand All @@ -299,7 +299,7 @@ def _open(
"uid": self.fs.ukey(path),
}
self._metadata.update_file(path, detail)
logger.debug(f"Creating local sparse file for {path}")
logger.debug("Creating local sparse file for %s", path)

# call target filesystems open
self._mkcache()
Expand Down Expand Up @@ -547,7 +547,7 @@ def _make_local_details(self, path):
"uid": self.fs.ukey(path),
}
self._metadata.update_file(path, detail)
logger.debug(f"Copying {path} to local cache")
logger.debug("Copying %s to local cache", path)
return fn

def cat(
Expand Down Expand Up @@ -604,7 +604,7 @@ def _open(self, path, mode="rb", **kwargs):
detail, fn = detail
_, blocks = detail["fn"], detail["blocks"]
if blocks is True:
logger.debug(f"Opening local copy of {path}")
logger.debug("Opening local copy of %s", path)

# In order to support downstream filesystems to be able to
# infer the compression from the original filename, like
Expand Down Expand Up @@ -700,7 +700,7 @@ def _open(self, path, mode="rb", **kwargs):

sha = self._mapper(path)
fn = os.path.join(self.storage[-1], sha)
logger.debug(f"Copying {path} to local cache")
logger.debug("Copying %s to local cache", path)
kwargs["mode"] = mode

self._mkcache()
Expand Down
2 changes: 1 addition & 1 deletion fsspec/implementations/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -831,7 +831,7 @@ async def _file_info(url, session, size_policy="head", **kwargs):
Default operation is to explicitly allow redirects and use encoding
'identity' (no compression) to get the true size of the target.
"""
logger.debug(f"Retrieve file size for {url}")
logger.debug("Retrieve file size for %s", url)
kwargs = kwargs.copy()
ar = kwargs.pop("allow_redirects", True)
head = kwargs.get("headers", {}).copy()
Expand Down
14 changes: 7 additions & 7 deletions fsspec/implementations/sftp.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def __init__(self, host, **ssh_kwargs):
self._connect()

def _connect(self):
logger.debug(f"Connecting to SFTP server {self.host}")
logger.debug("Connecting to SFTP server %s", self.host)
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client.connect(self.host, **self.ssh_kwargs)
Expand All @@ -66,7 +66,7 @@ def _get_kwargs_from_urls(urlpath):
return out

def mkdir(self, path, create_parents=False, mode=511):
logger.debug(f"Creating folder {path}")
logger.debug("Creating folder %s", path)
if self.exists(path):
raise FileExistsError(f"File exists: {path}")

Expand All @@ -88,7 +88,7 @@ def makedirs(self, path, exist_ok=False, mode=511):
self.ftp.mkdir(path, mode)

def rmdir(self, path):
logger.debug(f"Removing folder %s", path)
logger.debug("Removing folder %s", path)
self.ftp.rmdir(path)

def info(self, path):
Expand Down Expand Up @@ -122,7 +122,7 @@ def _decode_stat(stat, parent_path=None):
return out

def ls(self, path, detail=False):
logger.debug(f"Listing folder {path}")
logger.debug("Listing folder %s", path)
stats = [self._decode_stat(stat, path) for stat in self.ftp.listdir_iter(path)]
if detail:
return stats
Expand All @@ -131,7 +131,7 @@ def ls(self, path, detail=False):
return sorted(paths)

def put(self, lpath, rpath, callback=None, **kwargs):
logger.debug(f"Put file {lpath} into {rpath}")
logger.debug("Put file %s into %s", lpath, rpath)
self.ftp.put(lpath, rpath)

def get_file(self, rpath, lpath, **kwargs):
Expand All @@ -146,7 +146,7 @@ def _open(self, path, mode="rb", block_size=None, **kwargs):
If 0, no buffering, if 1, line buffering, if >1, buffer that many
bytes, if None use default from paramiko.
"""
logger.debug(f"Opening file {path}")
logger.debug("Opening file %s", path)
if kwargs.get("autocommit", True) is False:
# writes to temporary file, move on commit
path2 = "/".join([self.temppath, str(uuid.uuid4())])
Expand All @@ -167,7 +167,7 @@ def _rm(self, path):
self.ftp.remove(path)

def mv(self, old, new):
logger.debug(f"Renaming {old} into {new}")
logger.debug("Renaming %s into %s", old, new)
self.ftp.posix_rename(old, new)


Expand Down
4 changes: 2 additions & 2 deletions fsspec/spec.py
Original file line number Diff line number Diff line change
Expand Up @@ -1187,7 +1187,7 @@ def expand_path(self, path, recursive=False, maxdepth=None, **kwargs):
def mv(self, path1, path2, recursive=False, maxdepth=None, **kwargs):
"""Move file(s) from one location to another"""
if path1 == path2:
logger.debug(f"{self} mv: The paths are the same, so no files were moved.")
logger.debug("%s mv: The paths are the same, so no files were moved.", self)
else:
self.copy(path1, path2, recursive=recursive, maxdepth=maxdepth)
self.rm(path1, recursive=recursive)
Expand Down Expand Up @@ -1849,7 +1849,7 @@ def read(self, length=-1):
length = self.size - self.loc
if self.closed:
raise ValueError("I/O operation on closed file.")
logger.debug(f"{self} read: {self.loc} - {self.loc + length}")
logger.debug("%s read: %i - %i", self, self.loc, self.loc + length)
if length == 0:
# don't even bother calling fetch
return b""
Expand Down

0 comments on commit f420f9d

Please sign in to comment.