Skip to content

Commit

Permalink
Backport PR pandas-dev#56617: TYP: some return types from ruff
Browse files Browse the repository at this point in the history
  • Loading branch information
twoertwein authored and meeseeksmachine committed Dec 26, 2023
1 parent 8f8b514 commit 06c0081
Show file tree
Hide file tree
Showing 16 changed files with 38 additions and 35 deletions.
2 changes: 2 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ repos:
# TODO: remove autofixe-only rules when they are checked by ruff
name: ruff-selected-autofixes
alias: ruff-selected-autofixes
files: ^pandas
exclude: ^pandas/tests
args: [--select, "ANN001,ANN2", --fix-only, --exit-non-zero-on-fix]
- repo: https://github.com/jendrikseipp/vulture
rev: 'v2.10'
Expand Down
2 changes: 1 addition & 1 deletion doc/source/whatsnew/v2.2.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,7 @@ Optional libraries below the lowest tested version may still work, but are not c
+-----------------+-----------------+---------+
| Package | Minimum Version | Changed |
+=================+=================+=========+
| mypy (dev) | 1.7.1 | X |
| mypy (dev) | 1.8.0 | X |
+-----------------+-----------------+---------+
| | | X |
+-----------------+-----------------+---------+
Expand Down
2 changes: 1 addition & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ dependencies:

# code checks
- flake8=6.1.0 # run in subprocess over docstring examples
- mypy=1.7.1 # pre-commit uses locally installed mypy
- mypy=1.8.0 # pre-commit uses locally installed mypy
- tokenize-rt # scripts/check_for_inconsistent_pandas_namespace.py
- pre-commit>=3.6.0

Expand Down
7 changes: 4 additions & 3 deletions pandas/_testing/asserters.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from typing import (
TYPE_CHECKING,
Literal,
NoReturn,
cast,
)

Expand Down Expand Up @@ -143,7 +144,7 @@ def assert_almost_equal(
)


def _check_isinstance(left, right, cls):
def _check_isinstance(left, right, cls) -> None:
"""
Helper method for our assert_* methods that ensures that
the two objects being compared have the right type before
Expand Down Expand Up @@ -576,7 +577,7 @@ def assert_timedelta_array_equal(

def raise_assert_detail(
obj, message, left, right, diff=None, first_diff=None, index_values=None
):
) -> NoReturn:
__tracebackhide__ = True

msg = f"""{obj} are different
Expand Down Expand Up @@ -664,7 +665,7 @@ def _get_base(obj):
if left_base is right_base:
raise AssertionError(f"{repr(left_base)} is {repr(right_base)}")

def _raise(left, right, err_msg):
def _raise(left, right, err_msg) -> NoReturn:
if err_msg is None:
if left.shape != right.shape:
raise_assert_detail(
Expand Down
2 changes: 1 addition & 1 deletion pandas/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command):
return pieces


def plus_or_dot(pieces):
def plus_or_dot(pieces) -> str:
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/computation/expr.py
Original file line number Diff line number Diff line change
Expand Up @@ -695,8 +695,8 @@ def visit_Call(self, node, side=None, **kwargs):
if not isinstance(key, ast.keyword):
# error: "expr" has no attribute "id"
raise ValueError(
"keyword error in function call " # type: ignore[attr-defined]
f"'{node.func.id}'"
"keyword error in function call "
f"'{node.func.id}'" # type: ignore[attr-defined]
)

if key.arg:
Expand Down
8 changes: 4 additions & 4 deletions pandas/io/html.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ def _attr_getter(self, obj, attr):
# Both lxml and BeautifulSoup have the same implementation:
return obj.get(attr)

def _href_getter(self, obj):
def _href_getter(self, obj) -> str | None:
"""
Return a href if the DOM node contains a child <a> or None.
Expand Down Expand Up @@ -392,7 +392,7 @@ def _parse_tables(self, document, match, attrs):
"""
raise AbstractMethodError(self)

def _equals_tag(self, obj, tag):
def _equals_tag(self, obj, tag) -> bool:
"""
Return whether an individual DOM node matches a tag
Expand Down Expand Up @@ -629,7 +629,7 @@ def _href_getter(self, obj) -> str | None:
def _text_getter(self, obj):
return obj.text

def _equals_tag(self, obj, tag):
def _equals_tag(self, obj, tag) -> bool:
return obj.name == tag

def _parse_td(self, row):
Expand Down Expand Up @@ -758,7 +758,7 @@ def _parse_tables(self, document, match, kwargs):
raise ValueError(f"No tables found matching regex {repr(pattern)}")
return tables

def _equals_tag(self, obj, tag):
def _equals_tag(self, obj, tag) -> bool:
return obj.tag == tag

def _build_doc(self):
Expand Down
10 changes: 5 additions & 5 deletions pandas/io/json/_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ def __init__(
self.is_copy = None
self._format_axes()

def _format_axes(self):
def _format_axes(self) -> None:
raise AbstractMethodError(self)

def write(self) -> str:
Expand Down Expand Up @@ -287,7 +287,7 @@ def obj_to_write(self) -> NDFrame | Mapping[IndexLabel, Any]:
else:
return self.obj

def _format_axes(self):
def _format_axes(self) -> None:
if not self.obj.index.is_unique and self.orient == "index":
raise ValueError(f"Series index must be unique for orient='{self.orient}'")

Expand All @@ -304,7 +304,7 @@ def obj_to_write(self) -> NDFrame | Mapping[IndexLabel, Any]:
obj_to_write = self.obj
return obj_to_write

def _format_axes(self):
def _format_axes(self) -> None:
"""
Try to format axes if they are datelike.
"""
Expand Down Expand Up @@ -1193,7 +1193,7 @@ def parse(self):
self._try_convert_types()
return self.obj

def _parse(self):
def _parse(self) -> None:
raise AbstractMethodError(self)

@final
Expand All @@ -1217,7 +1217,7 @@ def _convert_axes(self) -> None:
new_axis = Index(new_ser, dtype=new_ser.dtype, copy=False)
setattr(self.obj, axis_name, new_axis)

def _try_convert_types(self):
def _try_convert_types(self) -> None:
raise AbstractMethodError(self)

@final
Expand Down
6 changes: 3 additions & 3 deletions pandas/io/parsers/arrow_parser_wrapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def __init__(self, src: ReadBuffer[bytes], **kwds) -> None:

self._parse_kwds()

def _parse_kwds(self):
def _parse_kwds(self) -> None:
"""
Validates keywords before passing to pyarrow.
"""
Expand Down Expand Up @@ -104,7 +104,7 @@ def _get_pyarrow_options(self) -> None:
] = None # PyArrow raises an exception by default
elif on_bad_lines == ParserBase.BadLineHandleMethod.WARN:

def handle_warning(invalid_row):
def handle_warning(invalid_row) -> str:
warnings.warn(
f"Expected {invalid_row.expected_columns} columns, but found "
f"{invalid_row.actual_columns}: {invalid_row.text}",
Expand Down Expand Up @@ -219,7 +219,7 @@ def _finalize_pandas_output(self, frame: DataFrame) -> DataFrame:
raise ValueError(e)
return frame

def _validate_usecols(self, usecols):
def _validate_usecols(self, usecols) -> None:
if lib.is_list_like(usecols) and not all(isinstance(x, str) for x in usecols):
raise ValueError(
"The pyarrow engine does not allow 'usecols' to be integer "
Expand Down
2 changes: 1 addition & 1 deletion pandas/io/pytables.py
Original file line number Diff line number Diff line change
Expand Up @@ -1707,7 +1707,7 @@ def info(self) -> str:
# ------------------------------------------------------------------------
# private methods

def _check_if_open(self):
def _check_if_open(self) -> None:
if not self.is_open:
raise ClosedFileError(f"{self._path} file is not open!")

Expand Down
2 changes: 1 addition & 1 deletion pandas/io/sas/sas_xport.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ def close(self) -> None:
def _get_row(self):
return self.filepath_or_buffer.read(80).decode()

def _read_header(self):
def _read_header(self) -> None:
self.filepath_or_buffer.seek(0)

# read file header
Expand Down
8 changes: 4 additions & 4 deletions pandas/io/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -1514,7 +1514,7 @@ def _create_sql_schema(
keys: list[str] | None = None,
dtype: DtypeArg | None = None,
schema: str | None = None,
):
) -> str:
pass


Expand Down Expand Up @@ -2073,7 +2073,7 @@ def _create_sql_schema(
keys: list[str] | None = None,
dtype: DtypeArg | None = None,
schema: str | None = None,
):
) -> str:
table = SQLTable(
table_name,
self,
Expand Down Expand Up @@ -2433,7 +2433,7 @@ def _create_sql_schema(
keys: list[str] | None = None,
dtype: DtypeArg | None = None,
schema: str | None = None,
):
) -> str:
raise NotImplementedError("not implemented for adbc")


Expand Down Expand Up @@ -2879,7 +2879,7 @@ def _create_sql_schema(
keys=None,
dtype: DtypeArg | None = None,
schema: str | None = None,
):
) -> str:
table = SQLiteTable(
table_name,
self,
Expand Down
2 changes: 1 addition & 1 deletion pandas/io/stata.py
Original file line number Diff line number Diff line change
Expand Up @@ -687,7 +687,7 @@ def __init__(

self._prepare_value_labels()

def _prepare_value_labels(self):
def _prepare_value_labels(self) -> None:
"""Encode value labels."""

self.text_len = 0
Expand Down
8 changes: 4 additions & 4 deletions pandas/plotting/_matplotlib/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -662,7 +662,7 @@ def _ensure_frame(self, data) -> DataFrame:
return data

@final
def _compute_plot_data(self):
def _compute_plot_data(self) -> None:
data = self.data

# GH15079 reconstruct data if by is defined
Expand Down Expand Up @@ -699,7 +699,7 @@ def _compute_plot_data(self):

self.data = numeric_data.apply(type(self)._convert_to_ndarray)

def _make_plot(self, fig: Figure):
def _make_plot(self, fig: Figure) -> None:
raise AbstractMethodError(self)

@final
Expand Down Expand Up @@ -745,7 +745,7 @@ def _post_plot_logic(self, ax: Axes, data) -> None:
"""Post process for each axes. Overridden in child classes"""

@final
def _adorn_subplots(self, fig: Figure):
def _adorn_subplots(self, fig: Figure) -> None:
"""Common post process unrelated to data"""
if len(self.axes) > 0:
all_axes = self._get_subplots(fig)
Expand Down Expand Up @@ -1323,7 +1323,7 @@ def __init__(
c = self.data.columns[c]
self.c = c

def _make_plot(self, fig: Figure):
def _make_plot(self, fig: Figure) -> None:
x, y, c, data = self.x, self.y, self.c, self.data
ax = self.axes[0]

Expand Down
6 changes: 3 additions & 3 deletions pandas/util/_validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
BoolishNoneT = TypeVar("BoolishNoneT", bool, int, None)


def _check_arg_length(fname, args, max_fname_arg_count, compat_args):
def _check_arg_length(fname, args, max_fname_arg_count, compat_args) -> None:
"""
Checks whether 'args' has length of at most 'compat_args'. Raises
a TypeError if that is not the case, similar to in Python when a
Expand All @@ -46,7 +46,7 @@ def _check_arg_length(fname, args, max_fname_arg_count, compat_args):
)


def _check_for_default_values(fname, arg_val_dict, compat_args):
def _check_for_default_values(fname, arg_val_dict, compat_args) -> None:
"""
Check that the keys in `arg_val_dict` are mapped to their
default values as specified in `compat_args`.
Expand Down Expand Up @@ -125,7 +125,7 @@ def validate_args(fname, args, max_fname_arg_count, compat_args) -> None:
_check_for_default_values(fname, kwargs, compat_args)


def _check_for_invalid_keys(fname, kwargs, compat_args):
def _check_for_invalid_keys(fname, kwargs, compat_args) -> None:
"""
Checks whether 'kwargs' contains any keys that are not
in 'compat_args' and raises a TypeError if there is one.
Expand Down
2 changes: 1 addition & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ moto
flask
asv>=0.6.1
flake8==6.1.0
mypy==1.7.1
mypy==1.8.0
tokenize-rt
pre-commit>=3.6.0
gitpython
Expand Down

0 comments on commit 06c0081

Please sign in to comment.