Skip to content

Commit

Permalink
Merge pull request #799 from kr-2003/eliminate_astropy_logging
Browse files Browse the repository at this point in the history
Eliminated the usage of astropy logging
  • Loading branch information
matteobachetti authored Feb 12, 2024
2 parents 5285c23 + 004f1ae commit d47c95b
Show file tree
Hide file tree
Showing 16 changed files with 134 additions and 76 deletions.
1 change: 1 addition & 0 deletions docs/changes/799.removal.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Eliminated the usage of astropy logging
70 changes: 29 additions & 41 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,16 +34,14 @@
try:
from sphinx_astropy.conf.v1 import * # noqa
except ImportError:
print(
'ERROR: the documentation requires the sphinx-astropy package to be installed'
)
print("ERROR: the documentation requires the sphinx-astropy package to be installed")
sys.exit(1)

# Get configuration information from setup.cfg
conf = ConfigParser()

conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')])
setup_cfg = dict(conf.items('metadata'))
conf.read([os.path.join(os.path.dirname(__file__), "..", "setup.cfg")])
setup_cfg = dict(conf.items("metadata"))

# -- General configuration ----------------------------------------------------

Expand All @@ -59,10 +57,7 @@

# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns += ['_templates',
'notebooks/README.rst',
'notebooks/Debug',
'changes']
exclude_patterns += ["_templates", "notebooks/README.rst", "notebooks/Debug", "changes"]

# This is added to the end of RST files - a good place to put substitutions to
# be used globally.
Expand All @@ -72,20 +67,19 @@
# -- Project information ------------------------------------------------------

# This does not *have* to match the package name, but typically does
project = setup_cfg['name']
author = setup_cfg['author']
copyright = '{0}, {1}'.format(datetime.datetime.now().year,
setup_cfg['author'])
project = setup_cfg["name"]
author = setup_cfg["author"]
copyright = "{0}, {1}".format(datetime.datetime.now().year, setup_cfg["author"])

# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.

import_module(setup_cfg['name'])
package = sys.modules[setup_cfg['name']]
import_module(setup_cfg["name"])
package = sys.modules[setup_cfg["name"]]

# The short X.Y version.
version = package.__version__.split('-', 1)[0]
version = package.__version__.split("-", 1)[0]
# The full version, including alpha/beta/rc tags.
release = package.__version__

Expand All @@ -110,15 +104,17 @@
html_css_files = ["css/custom.css"]

html_theme_options = {
'logotext1': 'Sting', # white, semi-bold
'logotext2': 'ray', # orange, light
'logotext3': ':docs' # white, light
"logotext1": "Sting", # white, semi-bold
"logotext2": "ray", # orange, light
"logotext3": ":docs", # white, light
}

extensions += [
'matplotlib.sphinxext.plot_directive', 'sphinx.ext.autodoc',
'sphinx.ext.napoleon', 'nbsphinx',
'IPython.sphinxext.ipython_console_highlighting'
"matplotlib.sphinxext.plot_directive",
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
"nbsphinx",
"IPython.sphinxext.ipython_console_highlighting",
]

# Custom sidebar templates, maps document names to template names.
Expand All @@ -139,47 +135,43 @@

# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = '{0} v{1}'.format(project, release)
html_title = "{0} v{1}".format(project, release)

# Output file base name for HTML help builder.
htmlhelp_basename = project + 'doc'
htmlhelp_basename = project + "doc"

# -- Options for LaTeX output -------------------------------------------------

# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [('index', project + '.tex', project + u' Documentation',
author, 'manual')]
latex_documents = [("index", project + ".tex", project + " Documentation", author, "manual")]

# -- Options for manual page output -------------------------------------------

# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [('index', project.lower(), project + u' Documentation', [author],
1)]
man_pages = [("index", project.lower(), project + " Documentation", [author], 1)]

# Trust the links from doi.org, even if they might have Client errors or other minor issues
linkcheck_ignore = [r"https://doi.org/"]

# -- Options for the edit_on_github extension ---------------------------------

if setup_cfg.get('edit_on_github').lower() == 'true':
if setup_cfg.get("edit_on_github").lower() == "true":
extensions += ["sphinx_astropy.ext.edit_on_github"]

extensions += ['sphinx_astropy.ext.edit_on_github']

edit_on_github_project = setup_cfg['github_project']
edit_on_github_project = setup_cfg["github_project"]
edit_on_github_branch = "master"

edit_on_github_source_root = ""
edit_on_github_doc_root = "docs"

# -- Resolving issue number to links in changelog -----------------------------
github_issues_url = 'https://github.com/{0}/issues/'.format(
setup_cfg['github_project'])
github_issues_url = "https://github.com/{0}/issues/".format(setup_cfg["github_project"])

# -- Configuration for nbsphinx -----------------------------------------------
# disable notebook execution
nbsphinx_execute = 'never'
nbsphinx_execute = "never"

# -- Generate DOI listing from Zenodo -----------------------------------------
import json
Expand Down Expand Up @@ -207,18 +199,14 @@ def zenodo_url(self):

@property
def github_url(self):
return (
f"https://github.com/StingraySoftware/stingray/releases/tag/{self.version}"
)
return f"https://github.com/StingraySoftware/stingray/releases/tag/{self.version}"

@property
def bibtex_url(self):
return self.zenodo_url + "/export/hx"


params = urllib.parse.urlencode(
{"q": f'conceptdoi: "{CONCEPT_DOI}"', "all_versions": 1}
)
params = urllib.parse.urlencode({"q": f'conceptdoi: "{CONCEPT_DOI}"', "all_versions": 1})
try:
with urllib.request.urlopen(ZENODO_API_ENDPOINT + "?" + params) as url:
data = json.loads(url.read().decode("utf-8"))
Expand Down
11 changes: 6 additions & 5 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
http://docs.astropy.org/en/latest/development/testguide.html#running-tests
"""

if 'test' in sys.argv:
if "test" in sys.argv:
print(TEST_HELP)
sys.exit(1)

Expand All @@ -58,7 +58,7 @@
http://docs.astropy.org/en/latest/install.html#builddocs
"""

if 'build_docs' in sys.argv or 'build_sphinx' in sys.argv:
if "build_docs" in sys.argv or "build_sphinx" in sys.argv:
print(DOCS_HELP)
sys.exit(1)

Expand All @@ -75,6 +75,7 @@

setup(
use_scm_version={
'write_to': os.path.join('stingray', 'version.py'),
'write_to_template': VERSION_TEMPLATE
})
"write_to": os.path.join("stingray", "version.py"),
"write_to_template": VERSION_TEMPLATE,
}
)
1 change: 1 addition & 0 deletions stingray/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,4 @@
from stingray.bispectrum import *
from stingray.varenergyspectrum import *
from stingray.lombscargle import *
from stingray.loggingconfig import *
15 changes: 9 additions & 6 deletions stingray/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from astropy.table import Table
from astropy.time import Time, TimeDelta
from astropy.units import Quantity
from stingray.loggingconfig import setup_logger

from .io import _can_save_longdouble, _can_serialize_meta
from .utils import (
Expand Down Expand Up @@ -58,6 +59,8 @@
"StingrayTimeseries",
]

logger = setup_logger()


def convert_table_attrs_to_lowercase(table: Table) -> Table:
"""Convert the column names of an Astropy Table to lowercase."""
Expand Down Expand Up @@ -1971,7 +1974,7 @@ def _get_all_array_attrs(objs):
all_meta_attrs.remove(attr)

for attr in ignore_meta:
logging.info(f"The {attr} attribute will be removed from the output ")
logger.info(f"The {attr} attribute will be removed from the output ")
if attr in all_meta_attrs:
all_meta_attrs.remove(attr)

Expand Down Expand Up @@ -2242,7 +2245,7 @@ def fill_bad_time_intervals(

btis = get_btis(self.gti, self.time[0], self.time[-1])
if len(btis) == 0:
logging.info("No bad time intervals to fill")
logger.info("No bad time intervals to fill")
return copy.deepcopy(self)
filtered_times = self.time[self.mask]

Expand All @@ -2255,7 +2258,7 @@ def fill_bad_time_intervals(
even_sampling = False
if self.dt > 0 and np.isclose(mean_data_separation, self.dt, rtol=0.01):
even_sampling = True
logging.info(f"Data are {'not' if not even_sampling else ''} evenly sampled")
logger.info(f"Data are {'not' if not even_sampling else ''} evenly sampled")

if even_sampling:
est_samples_in_gap = int(max_length / self.dt)
Expand All @@ -2272,7 +2275,7 @@ def fill_bad_time_intervals(
length = bti[1] - bti[0]
if length > max_length:
continue
logging.info(f"Filling bad time interval {bti} ({length:.4f} s)")
logger.info(f"Filling bad time interval {bti} ({length:.4f} s)")
epsilon = 1e-5 * length
added_gtis.append([bti[0] - epsilon, bti[1] + epsilon])
filt_low_t, filt_low_idx = find_nearest(filtered_times, bti[0])
Expand Down Expand Up @@ -2307,7 +2310,7 @@ def fill_bad_time_intervals(
new_attrs[attr].append(np.zeros(nevents) + np.nan)
total_filled_time += length

logging.info(f"A total of {total_filled_time} s of data were simulated")
logger.info(f"A total of {total_filled_time} s of data were simulated")

new_gtis = join_gtis(self.gti, added_gtis)
new_times = np.concatenate(new_times)
Expand Down Expand Up @@ -2497,7 +2500,7 @@ def estimate_segment_size(self, min_counts=None, min_samples=None, even_sampling
and np.isclose(mean_data_separation, self.dt, rtol=0.01)
):
even_sampling = True
logging.info(f"Data are {'not' if not even_sampling else ''} evenly sampled")
logger.info(f"Data are {'not' if not even_sampling else ''} evenly sampled")

if min_counts is None:
if even_sampling and hasattr(self, "counts"):
Expand Down
6 changes: 4 additions & 2 deletions stingray/deadtime/fad.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@

from scipy.ndimage import gaussian_filter1d
from scipy.interpolate import UnivariateSpline
from astropy import log
from astropy.table import Table

from stingray.lightcurve import Lightcurve
from stingray.loggingconfig import setup_logger
from ..crossspectrum import AveragedCrossspectrum, get_flux_generator
from ..powerspectrum import AveragedPowerspectrum
from ..fourier import normalize_periodograms, fft, fftfreq, positive_fft_bins
Expand All @@ -18,6 +18,8 @@

__all__ = ["calculate_FAD_correction", "get_periodograms_from_FAD_results", "FAD"]

logger = setup_logger()


def FAD(
data1,
Expand Down Expand Up @@ -249,7 +251,7 @@ def FAD(
)

if verbose and is_compliant:
log.info(verbose_string)
logger.info(verbose_string)
elif not is_compliant:
warnings.warn(verbose_string)

Expand Down
6 changes: 4 additions & 2 deletions stingray/deadtime/model.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
from stingray.utils import njit, prange
from stingray.loggingconfig import setup_logger
import numpy as np
import matplotlib.pyplot as plt
from astropy import log
from scipy.special import factorial


__FACTORIALS = factorial(np.arange(160))

logger = setup_logger()


def r_in(td, r_0):
"""Calculate incident countrate given dead time and detected countrate."""
Expand Down Expand Up @@ -188,7 +190,7 @@ def pds_model_zhang(N, rate, td, tb, limit_k=60):
tau = 1 / rate
r0 = r_det(td, rate)
# Nph = N / tau
log.info("Calculating PDS model (update)")
logger.info("Calculating PDS model (update)")
P = _inner_loop_pds_zhang(N, tau, r0, td, tb, limit_k=limit_k)

maxf = 0.5 / tb
Expand Down
5 changes: 4 additions & 1 deletion stingray/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import numpy as np

from stingray.utils import _int_sum_non_zero
from stingray.loggingconfig import setup_logger

from .base import StingrayTimeseries
from .filters import get_deadtime_mask
Expand All @@ -21,6 +22,8 @@

__all__ = ["EventList"]

logger = setup_logger()


@njit
def _from_lc_numba(times, counts, empty_times):
Expand Down Expand Up @@ -309,7 +312,7 @@ def to_binned_timeseries(self, dt, array_attrs=None):

for attr in array_attrs:
if getattr(self, attr, None) is not None:
logging.info(f"Creating the {attr} array")
logger.info(f"Creating the {attr} array")

attr_dict[attr] = histogram(
self.time, bins=nbins, weights=getattr(self, attr), range=ranges
Expand Down
7 changes: 4 additions & 3 deletions stingray/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,14 @@

import numpy as np
import numpy.random as ra
from astropy import log
from astropy.logger import AstropyUserWarning

from .utils import njit
from .loggingconfig import setup_logger

__all__ = ["Window1D", "Optimal1D"]

logger = setup_logger()


class Window1D(object):
"""
Expand Down Expand Up @@ -259,7 +260,7 @@ def get_deadtime_mask(
deadtime_values = deadtime_values[saved_mask]
final_len = tot_ev_list_filt.size
if verbose:
log.info(
logger.info(
"filter_for_deadtime: "
"{0}/{1} events rejected".format(initial_len - final_len, initial_len)
)
Expand Down
Loading

0 comments on commit d47c95b

Please sign in to comment.