Skip to content

Commit

Permalink
MNT: Enable Sphinx nitpicky mode
Browse files Browse the repository at this point in the history
Also clean up a bunch of issues with references found by nitpicky mode.
  • Loading branch information
dopplershift committed Nov 12, 2024
1 parent fa4f169 commit 74b01ef
Show file tree
Hide file tree
Showing 11 changed files with 45 additions and 29 deletions.
7 changes: 7 additions & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,15 @@
'numpy': ('https://numpy.org/doc/stable/', None),
'matplotlib': ('https://matplotlib.org/stable/', None),
'requests': ('https://requests.kennethreitz.org/en/latest/', None),
'pandas': ('https://pandas.pydata.org/docs/', None),
}

nitpicky = True
nitpick_ignore = [
('py:class', 'optional'), ('py:class', 'file-like object'), ('py:class', 'iterator')
]
nitpick_ignore_regex = [('py:class', r'.*[cC]allable'),]

# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']

Expand Down
2 changes: 1 addition & 1 deletion examples/ncss/NCSS_Cartopy_Example.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@

###########################################
# We now request data from the server using this query. The `NCSS` class handles parsing
# this NetCDF data (using the `netCDF4` module). If we print out the variable names, we see
# this NetCDF data (using the ``netCDF4`` module). If we print out the variable names, we see
# our requested variable, as well as the coordinate variables (needed to properly reference
# the data).
data = ncss.get_data(query)
Expand Down
2 changes: 1 addition & 1 deletion examples/ncss/NCSS_Example.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@

###########################################
# We now request data from the server using this query. The `NCSS` class handles parsing
# this NetCDF data (using the `netCDF4` module). If we print out the variable names,
# this NetCDF data (using the ``netCDF4`` module). If we print out the variable names,
# we see our requested variables, as well as a few others (more metadata information)
data = ncss.get_data(query)
list(data.variables)
Expand Down
4 changes: 2 additions & 2 deletions examples/ncss/NCSS_Timeseries_Examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@

###########################################
# We now request data from the server using this query. The `NCSS` class handles parsing
# this NetCDF data (using the `netCDF4` module). If we print out the variable names, we
# this NetCDF data (using the ``netCDF4`` module). If we print out the variable names, we
# see our requested variables, as well as a few others (more metadata information)
data = ncss.get_data(query)
list(data.variables)
Expand All @@ -57,7 +57,7 @@

###########################################
# The time values are in hours relative to the start of the entire model collection.
# Fortunately, the `netCDF4` module has a helper function to convert these numbers into
# Fortunately, the ``netCDF4`` module has a helper function to convert these numbers into
# Python `datetime` objects. We can see the first 5 element output by the function look
# reasonable.
time_vals = num2date(time[:].squeeze(), time.units, only_use_cftime_datetimes=False)
Expand Down
7 changes: 5 additions & 2 deletions src/siphon/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ def filter_time_nearest(self, time, regex=None, strptime=None):
Returns
-------
Dataset
The value with a time closest to that desired
"""
Expand Down Expand Up @@ -137,6 +138,7 @@ def filter_time_range(self, start, end, regex=None, strptime=None):
Returns
-------
List[Dataset]
All values corresponding to times within the specified range
"""
Expand Down Expand Up @@ -611,7 +613,7 @@ def remote_open(self, mode='b', encoding='ascii', errors='ignore'):
Parameters
----------
mode : 'b' or 't', optional
mode : `'b'` or `'t'`, optional
Mode with which to open the remote data; 'b' for binary, 't' for text. Defaults
to 'b'.
Expand All @@ -625,7 +627,8 @@ def remote_open(self, mode='b', encoding='ascii', errors='ignore'):
Returns
-------
A random access, file-like object
fobj : file-like object
A random access, file-like object for reading data
"""
fobj = self.access_with_service('HTTPServer')
Expand Down
14 changes: 7 additions & 7 deletions src/siphon/http_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def urlopen(self, url, decompress=False, **kwargs):
url : str
The URL to request
kwargs : arbitrary keyword arguments
kwargs
Additional keyword arguments to pass to :meth:`requests.Session.get`.
Returns
Expand Down Expand Up @@ -140,14 +140,14 @@ class DataQuery:
This object provides a clear API to formulate a query for data, including
a spatial query, a time query, and possibly some variables or other parameters.
These objects provide a dictionary-like interface, (:meth:`items` and :meth:`__iter__`)
These objects provide a dictionary-like interface, (``items`` and ``__iter__``)
sufficient to be passed to functions expecting a dictionary representing a URL query.
Instances of this object can also be turned into a string, which will yield a
properly escaped string for a URL.
"""

def __init__(self):
"""Construct an empty :class:`DataQuery`."""
"""Construct an empty class representing a query for data."""
self.var = set()
self.time_query = OrderedDict()
self.spatial_query = OrderedDict()
Expand All @@ -163,7 +163,7 @@ def variables(self, *var_names):
Parameters
----------
var_names : one or more strings
var_names : str
One or more names of variables to request. Use 'all' to request all.
Returns
Expand All @@ -183,7 +183,7 @@ def add_query_parameter(self, **kwargs):
Parameters
----------
kwargs : one or more strings passed as keyword arguments
kwargs
Names and values of parameters to add to the query
Returns
Expand Down Expand Up @@ -471,7 +471,7 @@ def get(self, path, params=None):
Raises
------
HTTPError
`~requests.HTTPError`
If the server returns anything other than a 200 (OK) code
See Also
Expand Down Expand Up @@ -506,7 +506,7 @@ def validate_query(self, query):
Parameters
----------
query : DataQuery (or subclass)
query : DataQuery
Returns
-------
Expand Down
4 changes: 2 additions & 2 deletions src/siphon/radarserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def stations(self, *stns):
Parameters
----------
stns : one or more strings
stns : str
One or more names of variables to request
Returns
Expand Down Expand Up @@ -192,7 +192,7 @@ def get_radarserver_datasets(server):
Parameters
----------
server : string
server : str
The base URL to the THREDDS server
Returns
Expand Down
5 changes: 3 additions & 2 deletions src/siphon/simplewebservice/acis.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,12 @@ def acis_request(method, params):
Returns
-------
A dictionary of data based on the JSON parameters
dict[str, Any]
A dictionary of data based on the JSON parameters
Raises
------
:class: `ACIS_API_Exception`
`AcisApiException`
When the API is unable to establish a connection or returns
unparsable data.
Expand Down
8 changes: 4 additions & 4 deletions src/siphon/simplewebservice/iastate.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def request_data(cls, time, site_id, interp_nans=False, **kwargs):
Parameters
----------
time : datetime
time : datetime.datetime
The date and time of the desired observation.
site_id : str
Expand Down Expand Up @@ -61,7 +61,7 @@ def request_all_data(cls, time, pressure=None, **kwargs):
Parameters
----------
time : datetime
time : datetime.datetime
The date and time of the desired observation.
pressure : float, optional
Expand All @@ -85,7 +85,7 @@ def _get_data(self, time, site_id, pressure=None):
Parameters
----------
time : datetime
time : datetime.datetime
Date and time for which data should be downloaded
site_id : str
Site id for which data should be downloaded
Expand Down Expand Up @@ -153,7 +153,7 @@ def _get_data_raw(self, time, site_id, pressure=None):
Parameters
----------
time : datetime
time : datetime.datetime
Date and time for which data should be downloaded
site_id : str
Site id for which data should be downloaded
Expand Down
7 changes: 5 additions & 2 deletions src/siphon/simplewebservice/ndbc.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ def realtime_observations(cls, buoy, data_type='txt'):
Returns
-------
Raw data string
`pandas.DataFrame`
Parsed data
"""
endpoint = cls()
Expand Down Expand Up @@ -524,6 +525,7 @@ def buoy_data_types(cls, buoy):
Returns
-------
dict[str, str]
dict of valid file extensions and their descriptions
"""
Expand Down Expand Up @@ -583,7 +585,8 @@ def raw_buoy_data(cls, buoy, data_type='txt'):
Returns
-------
Raw data string
str
Raw data string
"""
endpoint = cls()
Expand Down
14 changes: 8 additions & 6 deletions src/siphon/simplewebservice/wyoming.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def request_data(cls, time, site_id, **kwargs):
Parameters
----------
time : datetime
time : datetime.datetime
The date and time of the desired observation.
site_id : str
Expand All @@ -40,7 +40,8 @@ def request_data(cls, time, site_id, **kwargs):
Returns
-------
:class:`pandas.DataFrame` containing the data
`pandas.DataFrame`
Parsed data
"""
endpoint = cls()
Expand All @@ -52,7 +53,7 @@ def _get_data(self, time, site_id):
Parameters
----------
time : datetime
time : datetime.datetime
The date and time of the desired observation.
site_id : str
Expand All @@ -61,7 +62,7 @@ def _get_data(self, time, site_id):
Returns
-------
:class:`pandas.DataFrame` containing the data
`pandas.DataFrame`
"""
raw_data = self._get_data_raw(time, site_id)
Expand Down Expand Up @@ -130,14 +131,15 @@ def _get_data_raw(self, time, site_id):
Parameters
----------
time : datetime
time : datetime.datetime
Date and time for which data should be downloaded
site_id : str
Site id for which data should be downloaded
Returns
-------
text of the server response
str
text of the server response
"""
path = ('?region=naconf&TYPE=TEXT%3ALIST'
Expand Down

0 comments on commit 74b01ef

Please sign in to comment.