Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CLN: changes for spelling mistakes #56474

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pandas/core/dtypes/astype.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def astype_array(values: ArrayLike, dtype: DtypeObj, copy: bool = False) -> Arra
else:
values = _astype_nansafe(values, dtype, copy=copy)

# in pandas we don't store numpy str dtypes, so convert to object
# in pandas, we don't store numpy str dtypes, so convert to object
if isinstance(dtype, np.dtype) and issubclass(values.dtype.type, str):
values = np.array(values, dtype=object)

Expand Down
4 changes: 2 additions & 2 deletions pandas/core/dtypes/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def __eq__(self, other: object) -> bool:
By default, 'other' is considered equal if either

* it's a string matching 'self.name'.
* it's an instance of this type and all of the attributes
* it's an instance of this type and all the attributes
in ``self._metadata`` are equal between `self` and `other`.

Parameters
Expand Down Expand Up @@ -339,7 +339,7 @@ def _is_numeric(self) -> bool:
"""
Whether columns with this dtype should be considered numeric.

By default ExtensionDtypes are assumed to be non-numeric.
By default, ExtensionDtypes are assumed to be non-numeric.
They'll be excluded from operations that exclude non-numeric
columns, like (groupby) reductions, plotting, etc.
"""
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/dtypes/cast.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,7 @@ def maybe_cast_pointwise_result(
dtype : np.dtype or ExtensionDtype
Input Series from which result was calculated.
numeric_only : bool, default False
Whether to cast only numerics or datetimes as well.
Whether to cast only numerics or datetime as well.
same_dtype : bool, default True
Specify dtype when calling _from_sequence

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/dtypes/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -1120,7 +1120,7 @@ def is_any_real_numeric_dtype(arr_or_dtype) -> bool:
Returns
-------
boolean
Whether or not the array or dtype is of a real number dtype.
Whether the array or dtype is of a real number dtype.

Examples
--------
Expand Down
4 changes: 2 additions & 2 deletions pandas/core/dtypes/concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def concat_compat(
# i.e. isinstance(to_concat[0], ExtensionArray)
to_concat_eas = cast("Sequence[ExtensionArray]", to_concat)
cls = type(to_concat[0])
# GH#53640: eg. for datetime array, axis=1 but 0 is default
# GH#53640: e.g. for datetime array, axis=1 but 0 is default
# However, class method `_concat_same_type()` for some classes
# may not support the `axis` keyword
if ea_compat_axis or axis == 0:
Expand Down Expand Up @@ -337,7 +337,7 @@ def _maybe_unwrap(x):
else:
# ordered - to show a proper error message
if all(c.ordered for c in to_union):
msg = "to union ordered Categoricals, all categories must be the same"
msg = "to union ordered Categorical, all categories must be the same"
raise TypeError(msg)
raise TypeError("Categorical.ordered must be the same")

Expand Down
2 changes: 1 addition & 1 deletion pandas/core/groupby/categorical.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def recode_for_groupby(
c: Categorical, sort: bool, observed: bool
) -> tuple[Categorical, Categorical | None]:
"""
Code the categories to ensure we can groupby for categoricals.
Code the categories to ensure we can groupby for categorical.

If observed=True, we return a new Categorical with the observed
categories only.
Expand Down
2 changes: 1 addition & 1 deletion pandas/core/groupby/grouper.py
Original file line number Diff line number Diff line change
Expand Up @@ -1051,7 +1051,7 @@ def is_in_obj(gpr) -> bool:
in_axis = False

# create the Grouping
# allow us to passing the actual Grouping as the gpr
# allow us to pass the actual Grouping as the gpr
ping = (
Grouping(
group_axis,
Expand Down
2 changes: 1 addition & 1 deletion pandas/plotting/_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -992,7 +992,7 @@ def __call__(self, *args, **kwargs):
if y is not None:
if is_integer(y) and not data.columns._holds_integer():
y = data.columns[y]
# converted to series actually. copy to not modify
# converted to series, actually. copy to not modify
data = data[y].copy()
data.index.name = y
elif isinstance(data, ABCDataFrame):
Expand Down
4 changes: 2 additions & 2 deletions scripts/validate_unwanted_patterns.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ def private_function_across_module(file_obj: IO[str]) -> Iterable[tuple[int, str
continue

if module_name in imported_modules and function_name.startswith("_"):
yield (node.lineno, f"Private function '{module_name}.{function_name}'")
yield node.lineno, f"Private function '{module_name}.{function_name}'"


def private_import_across_module(file_obj: IO[str]) -> Iterable[tuple[int, str]]:
Expand Down Expand Up @@ -232,7 +232,7 @@ def private_import_across_module(file_obj: IO[str]) -> Iterable[tuple[int, str]]
continue

if module_name.startswith("_"):
yield (node.lineno, f"Import of internal function {repr(module_name)}")
yield node.lineno, f"Import of internal function {repr(module_name)}"


def strings_with_wrong_placed_whitespace(
Expand Down
Loading