diff --git a/docs/changes/799.removal.rst b/docs/changes/799.removal.rst new file mode 100644 index 000000000..00a946650 --- /dev/null +++ b/docs/changes/799.removal.rst @@ -0,0 +1 @@ +Eliminated the usage of astropy logging \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index 1879ac3b0..dc0ced1b4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -34,16 +34,14 @@ try: from sphinx_astropy.conf.v1 import * # noqa except ImportError: - print( - 'ERROR: the documentation requires the sphinx-astropy package to be installed' - ) + print("ERROR: the documentation requires the sphinx-astropy package to be installed") sys.exit(1) # Get configuration information from setup.cfg conf = ConfigParser() -conf.read([os.path.join(os.path.dirname(__file__), '..', 'setup.cfg')]) -setup_cfg = dict(conf.items('metadata')) +conf.read([os.path.join(os.path.dirname(__file__), "..", "setup.cfg")]) +setup_cfg = dict(conf.items("metadata")) # -- General configuration ---------------------------------------------------- @@ -59,10 +57,7 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns += ['_templates', - 'notebooks/README.rst', - 'notebooks/Debug', - 'changes'] +exclude_patterns += ["_templates", "notebooks/README.rst", "notebooks/Debug", "changes"] # This is added to the end of RST files - a good place to put substitutions to # be used globally. @@ -72,20 +67,19 @@ # -- Project information ------------------------------------------------------ # This does not *have* to match the package name, but typically does -project = setup_cfg['name'] -author = setup_cfg['author'] -copyright = '{0}, {1}'.format(datetime.datetime.now().year, - setup_cfg['author']) +project = setup_cfg["name"] +author = setup_cfg["author"] +copyright = "{0}, {1}".format(datetime.datetime.now().year, setup_cfg["author"]) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. -import_module(setup_cfg['name']) -package = sys.modules[setup_cfg['name']] +import_module(setup_cfg["name"]) +package = sys.modules[setup_cfg["name"]] # The short X.Y version. -version = package.__version__.split('-', 1)[0] +version = package.__version__.split("-", 1)[0] # The full version, including alpha/beta/rc tags. release = package.__version__ @@ -110,15 +104,17 @@ html_css_files = ["css/custom.css"] html_theme_options = { - 'logotext1': 'Sting', # white, semi-bold - 'logotext2': 'ray', # orange, light - 'logotext3': ':docs' # white, light + "logotext1": "Sting", # white, semi-bold + "logotext2": "ray", # orange, light + "logotext3": ":docs", # white, light } extensions += [ - 'matplotlib.sphinxext.plot_directive', 'sphinx.ext.autodoc', - 'sphinx.ext.napoleon', 'nbsphinx', - 'IPython.sphinxext.ipython_console_highlighting' + "matplotlib.sphinxext.plot_directive", + "sphinx.ext.autodoc", + "sphinx.ext.napoleon", + "nbsphinx", + "IPython.sphinxext.ipython_console_highlighting", ] # Custom sidebar templates, maps document names to template names. @@ -139,47 +135,43 @@ # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -html_title = '{0} v{1}'.format(project, release) +html_title = "{0} v{1}".format(project, release) # Output file base name for HTML help builder. -htmlhelp_basename = project + 'doc' +htmlhelp_basename = project + "doc" # -- Options for LaTeX output ------------------------------------------------- # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [('index', project + '.tex', project + u' Documentation', - author, 'manual')] +latex_documents = [("index", project + ".tex", project + " Documentation", author, "manual")] # -- Options for manual page output ------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [('index', project.lower(), project + u' Documentation', [author], - 1)] +man_pages = [("index", project.lower(), project + " Documentation", [author], 1)] # Trust the links from doi.org, even if they might have Client errors or other minor issues linkcheck_ignore = [r"https://doi.org/"] # -- Options for the edit_on_github extension --------------------------------- -if setup_cfg.get('edit_on_github').lower() == 'true': +if setup_cfg.get("edit_on_github").lower() == "true": + extensions += ["sphinx_astropy.ext.edit_on_github"] - extensions += ['sphinx_astropy.ext.edit_on_github'] - - edit_on_github_project = setup_cfg['github_project'] + edit_on_github_project = setup_cfg["github_project"] edit_on_github_branch = "master" edit_on_github_source_root = "" edit_on_github_doc_root = "docs" # -- Resolving issue number to links in changelog ----------------------------- -github_issues_url = 'https://github.com/{0}/issues/'.format( - setup_cfg['github_project']) +github_issues_url = "https://github.com/{0}/issues/".format(setup_cfg["github_project"]) # -- Configuration for nbsphinx ----------------------------------------------- # disable notebook execution -nbsphinx_execute = 'never' +nbsphinx_execute = "never" # -- Generate DOI listing from Zenodo ----------------------------------------- import json @@ -207,18 +199,14 @@ def zenodo_url(self): @property def github_url(self): - return ( - f"https://github.com/StingraySoftware/stingray/releases/tag/{self.version}" - ) + return f"https://github.com/StingraySoftware/stingray/releases/tag/{self.version}" @property def bibtex_url(self): return self.zenodo_url + "/export/hx" -params = urllib.parse.urlencode( - {"q": f'conceptdoi: "{CONCEPT_DOI}"', "all_versions": 1} -) +params = urllib.parse.urlencode({"q": f'conceptdoi: "{CONCEPT_DOI}"', "all_versions": 1}) try: with urllib.request.urlopen(ZENODO_API_ENDPOINT + "?" + params) as url: data = json.loads(url.read().decode("utf-8")) diff --git a/setup.py b/setup.py index 22db4c3a4..c25a07202 100755 --- a/setup.py +++ b/setup.py @@ -33,7 +33,7 @@ http://docs.astropy.org/en/latest/development/testguide.html#running-tests """ -if 'test' in sys.argv: +if "test" in sys.argv: print(TEST_HELP) sys.exit(1) @@ -58,7 +58,7 @@ http://docs.astropy.org/en/latest/install.html#builddocs """ -if 'build_docs' in sys.argv or 'build_sphinx' in sys.argv: +if "build_docs" in sys.argv or "build_sphinx" in sys.argv: print(DOCS_HELP) sys.exit(1) @@ -75,6 +75,7 @@ setup( use_scm_version={ - 'write_to': os.path.join('stingray', 'version.py'), - 'write_to_template': VERSION_TEMPLATE - }) + "write_to": os.path.join("stingray", "version.py"), + "write_to_template": VERSION_TEMPLATE, + } +) diff --git a/stingray/__init__.py b/stingray/__init__.py index 84c6eeca7..d27947bdc 100644 --- a/stingray/__init__.py +++ b/stingray/__init__.py @@ -25,3 +25,4 @@ from stingray.bispectrum import * from stingray.varenergyspectrum import * from stingray.lombscargle import * + from stingray.loggingconfig import * diff --git a/stingray/base.py b/stingray/base.py index 954f5ff0c..f9734725a 100644 --- a/stingray/base.py +++ b/stingray/base.py @@ -13,6 +13,7 @@ from astropy.table import Table from astropy.time import Time, TimeDelta from astropy.units import Quantity +from stingray.loggingconfig import setup_logger from .io import _can_save_longdouble, _can_serialize_meta from .utils import ( @@ -58,6 +59,8 @@ "StingrayTimeseries", ] +logger = setup_logger() + def convert_table_attrs_to_lowercase(table: Table) -> Table: """Convert the column names of an Astropy Table to lowercase.""" @@ -1971,7 +1974,7 @@ def _get_all_array_attrs(objs): all_meta_attrs.remove(attr) for attr in ignore_meta: - logging.info(f"The {attr} attribute will be removed from the output ") + logger.info(f"The {attr} attribute will be removed from the output ") if attr in all_meta_attrs: all_meta_attrs.remove(attr) @@ -2242,7 +2245,7 @@ def fill_bad_time_intervals( btis = get_btis(self.gti, self.time[0], self.time[-1]) if len(btis) == 0: - logging.info("No bad time intervals to fill") + logger.info("No bad time intervals to fill") return copy.deepcopy(self) filtered_times = self.time[self.mask] @@ -2255,7 +2258,7 @@ def fill_bad_time_intervals( even_sampling = False if self.dt > 0 and np.isclose(mean_data_separation, self.dt, rtol=0.01): even_sampling = True - logging.info(f"Data are {'not' if not even_sampling else ''} evenly sampled") + logger.info(f"Data are {'not' if not even_sampling else ''} evenly sampled") if even_sampling: est_samples_in_gap = int(max_length / self.dt) @@ -2272,7 +2275,7 @@ def fill_bad_time_intervals( length = bti[1] - bti[0] if length > max_length: continue - logging.info(f"Filling bad time interval {bti} ({length:.4f} s)") + logger.info(f"Filling bad time interval {bti} ({length:.4f} s)") epsilon = 1e-5 * length added_gtis.append([bti[0] - epsilon, bti[1] + epsilon]) filt_low_t, filt_low_idx = find_nearest(filtered_times, bti[0]) @@ -2307,7 +2310,7 @@ def fill_bad_time_intervals( new_attrs[attr].append(np.zeros(nevents) + np.nan) total_filled_time += length - logging.info(f"A total of {total_filled_time} s of data were simulated") + logger.info(f"A total of {total_filled_time} s of data were simulated") new_gtis = join_gtis(self.gti, added_gtis) new_times = np.concatenate(new_times) @@ -2497,7 +2500,7 @@ def estimate_segment_size(self, min_counts=None, min_samples=None, even_sampling and np.isclose(mean_data_separation, self.dt, rtol=0.01) ): even_sampling = True - logging.info(f"Data are {'not' if not even_sampling else ''} evenly sampled") + logger.info(f"Data are {'not' if not even_sampling else ''} evenly sampled") if min_counts is None: if even_sampling and hasattr(self, "counts"): diff --git a/stingray/deadtime/fad.py b/stingray/deadtime/fad.py index 01a453985..3d95c324a 100644 --- a/stingray/deadtime/fad.py +++ b/stingray/deadtime/fad.py @@ -5,10 +5,10 @@ from scipy.ndimage import gaussian_filter1d from scipy.interpolate import UnivariateSpline -from astropy import log from astropy.table import Table from stingray.lightcurve import Lightcurve +from stingray.loggingconfig import setup_logger from ..crossspectrum import AveragedCrossspectrum, get_flux_generator from ..powerspectrum import AveragedPowerspectrum from ..fourier import normalize_periodograms, fft, fftfreq, positive_fft_bins @@ -18,6 +18,8 @@ __all__ = ["calculate_FAD_correction", "get_periodograms_from_FAD_results", "FAD"] +logger = setup_logger() + def FAD( data1, @@ -249,7 +251,7 @@ def FAD( ) if verbose and is_compliant: - log.info(verbose_string) + logger.info(verbose_string) elif not is_compliant: warnings.warn(verbose_string) diff --git a/stingray/deadtime/model.py b/stingray/deadtime/model.py index d96cb6a66..8a1529477 100644 --- a/stingray/deadtime/model.py +++ b/stingray/deadtime/model.py @@ -1,12 +1,14 @@ from stingray.utils import njit, prange +from stingray.loggingconfig import setup_logger import numpy as np import matplotlib.pyplot as plt -from astropy import log from scipy.special import factorial __FACTORIALS = factorial(np.arange(160)) +logger = setup_logger() + def r_in(td, r_0): """Calculate incident countrate given dead time and detected countrate.""" @@ -188,7 +190,7 @@ def pds_model_zhang(N, rate, td, tb, limit_k=60): tau = 1 / rate r0 = r_det(td, rate) # Nph = N / tau - log.info("Calculating PDS model (update)") + logger.info("Calculating PDS model (update)") P = _inner_loop_pds_zhang(N, tau, r0, td, tb, limit_k=limit_k) maxf = 0.5 / tb diff --git a/stingray/events.py b/stingray/events.py index a45d0bde3..343bbb255 100644 --- a/stingray/events.py +++ b/stingray/events.py @@ -10,6 +10,7 @@ import numpy as np from stingray.utils import _int_sum_non_zero +from stingray.loggingconfig import setup_logger from .base import StingrayTimeseries from .filters import get_deadtime_mask @@ -21,6 +22,8 @@ __all__ = ["EventList"] +logger = setup_logger() + @njit def _from_lc_numba(times, counts, empty_times): @@ -309,7 +312,7 @@ def to_binned_timeseries(self, dt, array_attrs=None): for attr in array_attrs: if getattr(self, attr, None) is not None: - logging.info(f"Creating the {attr} array") + logger.info(f"Creating the {attr} array") attr_dict[attr] = histogram( self.time, bins=nbins, weights=getattr(self, attr), range=ranges diff --git a/stingray/filters.py b/stingray/filters.py index aaf2d21be..ffdaa2763 100644 --- a/stingray/filters.py +++ b/stingray/filters.py @@ -4,13 +4,14 @@ import numpy as np import numpy.random as ra -from astropy import log -from astropy.logger import AstropyUserWarning from .utils import njit +from .loggingconfig import setup_logger __all__ = ["Window1D", "Optimal1D"] +logger = setup_logger() + class Window1D(object): """ @@ -259,7 +260,7 @@ def get_deadtime_mask( deadtime_values = deadtime_values[saved_mask] final_len = tot_ev_list_filt.size if verbose: - log.info( + logger.info( "filter_for_deadtime: " "{0}/{1} events rejected".format(initial_len - final_len, initial_len) ) diff --git a/stingray/gti.py b/stingray/gti.py index 22d5d1ec0..99d7689c4 100644 --- a/stingray/gti.py +++ b/stingray/gti.py @@ -10,6 +10,7 @@ from .utils import assign_value_if_none, apply_function_if_none from .utils import check_iterables_close, is_sorted from stingray.exceptions import StingrayError +from stingray.loggingconfig import setup_logger __all__ = [ @@ -38,6 +39,8 @@ "generate_indices_of_segment_boundaries_binned", ] +logger = setup_logger() + def gti_len(gti): """Deprecated, will be removed in version 2.0. Use get_total_gti_length.""" @@ -116,7 +119,7 @@ def load_gtis(fits_file, gtistring=None): """ gtistring = assign_value_if_none(gtistring, "GTI") - logging.info("Loading GTIS from file %s" % fits_file) + logger.info("Loading GTIS from file %s" % fits_file) lchdulist = fits.open(fits_file, checksum=True, ignore_missing_end=True) lchdulist.verify("warn") @@ -624,7 +627,7 @@ def create_gti_from_condition(time, condition, safe_interval=0, dt=None): gtis = [] for idx in idxs: - logging.debug(idx) + logger.debug(idx) startidx = idx[0] stopidx = idx[1] - 1 diff --git a/stingray/io.py b/stingray/io.py index 465394a72..32a6b21d2 100644 --- a/stingray/io.py +++ b/stingray/io.py @@ -14,6 +14,7 @@ import matplotlib.pyplot as plt import stingray.utils as utils +from stingray.loggingconfig import setup_logger from .utils import assign_value_if_none, is_string, order_list_of_arrays, is_sorted from .gti import get_gti_from_all_extensions, load_gtis @@ -35,6 +36,8 @@ except AttributeError: # pragma: no cover HAS_128 = False +logger = setup_logger() + def rough_calibration(pis, mission): """Make a rough conversion between PI channel and energy. @@ -799,7 +802,7 @@ def ref_mjd(fits_file, hdu=1): if isinstance(fits_file, Iterable) and not is_string(fits_file): # pragma: no cover fits_file = fits_file[0] - logging.info("opening %s" % fits_file) + logger.info("opening %s" % fits_file) hdulist = fits.open(fits_file, ignore_missing_end=True) @@ -843,7 +846,7 @@ def common_name(str1, str2, default="common"): common_str = common_str.lstrip("_").lstrip("-") if common_str == "": common_str = default - logging.debug("common_name: %s %s -> %s" % (str1, str2, common_str)) + logger.debug("common_name: %s %s -> %s" % (str1, str2, common_str)) return common_str diff --git a/stingray/lightcurve.py b/stingray/lightcurve.py index 86be3734d..ca597f189 100644 --- a/stingray/lightcurve.py +++ b/stingray/lightcurve.py @@ -34,11 +34,14 @@ from stingray.io import lcurve_from_fits from stingray import bexvar from stingray.base import interpret_times +from stingray.loggingconfig import setup_logger __all__ = ["Lightcurve"] valid_statistics = ["poisson", "gauss", None] +logger = setup_logger() + class Lightcurve(StingrayTimeseries): """ @@ -288,7 +291,7 @@ def __init__( self._time = time if dt is None and time.size > 1: - logging.info( + logger.info( "Computing the bin time ``dt``. This can take " "time. If you know the bin time, please specify it" " at light curve creation" @@ -480,7 +483,7 @@ def bin_hi(self): return self._bin_hi def initial_optional_checks(self, time, counts, err, gti=None): - logging.info( + logger.info( "Checking if light curve is well behaved. This " "can take time, so if you are sure it is already " "sorted, specify skip_checks=True at light curve " @@ -515,11 +518,11 @@ def initial_optional_checks(self, time, counts, err, gti=None): if nonfinite_flag: warnings.warn("There are non-finite points in the data, but they are outside GTIs. ") - logging.info("Checking if light curve is sorted.") + logger.info("Checking if light curve is sorted.") unsorted = not is_sorted(time) if unsorted: - logging.warning("The light curve is unsorted.") + logger.warning("The light curve is unsorted.") return time, counts, err def check_lightcurve(self): @@ -875,14 +878,14 @@ def make_lightcurve(toa, dt, tseg=None, tstart=None, gti=None, mjdref=0, use_his if gti is not None: tseg = np.max(gti) - tstart - logging.info("make_lightcurve: tseg: " + str(tseg)) + logger.info("make_lightcurve: tseg: " + str(tseg)) timebin = int(tseg / dt) # If we are missing the next bin by just 1%, let's round up: if tseg / dt - timebin >= 0.99: timebin += 1 - logging.info("make_lightcurve: timebin: " + str(timebin)) + logger.info("make_lightcurve: timebin: " + str(timebin)) tend = tstart + timebin * dt good = (tstart <= toa) & (toa < tend) diff --git a/stingray/loggingconfig.py b/stingray/loggingconfig.py new file mode 100644 index 000000000..72a93e710 --- /dev/null +++ b/stingray/loggingconfig.py @@ -0,0 +1,39 @@ +import logging + +logger = None + + +class CustomFormatter(logging.Formatter): + grey = "\x1b[38;20m" + yellow = "\x1b[33;20m" + red = "\x1b[31;20m" + bold_red = "\x1b[31;1m" + reset = "\x1b[0m" + format = "%(asctime)s - %(name)s - %(levelname)s - %(message)s (%(filename)s:%(lineno)d)" + + FORMATS = { + logging.DEBUG: grey + format + reset, + logging.INFO: grey + format + reset, + logging.WARNING: yellow + format + reset, + logging.ERROR: red + format + reset, + logging.CRITICAL: bold_red + format + reset, + } + + def format(self, record): + log_fmt = self.FORMATS.get(record.levelno) + formatter = logging.Formatter(log_fmt) + return formatter.format(record) + + +def setup_logger(): + global logger + + if not logger: + logger = logging.getLogger(__name__) + handler = logging.StreamHandler() + formatter = CustomFormatter() + handler.setFormatter(formatter) + logger.addHandler(handler) + logger.setLevel(logging.DEBUG) + + return logger diff --git a/stingray/modeling/parameterestimation.py b/stingray/modeling/parameterestimation.py index e5c61d4e4..4113c6cd1 100644 --- a/stingray/modeling/parameterestimation.py +++ b/stingray/modeling/parameterestimation.py @@ -46,6 +46,7 @@ logmin, fitter_to_model_params, ) +from stingray.loggingconfig import CustomFormatter class OptimizationResults(object): @@ -140,6 +141,8 @@ def __init__(self, lpost, res, neg=True, log=None): self.log.setLevel(logging.DEBUG) if not self.log.handlers: ch = logging.StreamHandler() + formatter = CustomFormatter() + ch.setFormatter(formatter) ch.setLevel(logging.DEBUG) self.log.addHandler(ch) diff --git a/stingray/modeling/tests/test_parameterestimation.py b/stingray/modeling/tests/test_parameterestimation.py index 786356079..4e1f44ba7 100644 --- a/stingray/modeling/tests/test_parameterestimation.py +++ b/stingray/modeling/tests/test_parameterestimation.py @@ -11,6 +11,7 @@ from stingray.modeling import ParameterEstimation, PSDParEst, OptimizationResults, SamplingResults from stingray.modeling import PSDPosterior, set_logprior, PSDLogLikelihood, LogLikelihood from stingray.modeling.posterior import fitter_to_model_params +from stingray.loggingconfig import CustomFormatter try: from statsmodels.tools.numdiff import approx_hess @@ -46,6 +47,8 @@ def __init__(self, lpost, res, neg, log=None): self.log.setLevel(logging.DEBUG) if not self.log.handlers: ch = logging.StreamHandler() + formatter = CustomFormatter() + ch.setFormatter(formatter) ch.setLevel(logging.DEBUG) self.log.addHandler(ch) diff --git a/stingray/pulse/accelsearch.py b/stingray/pulse/accelsearch.py index 2d647b6c2..1b15755ab 100644 --- a/stingray/pulse/accelsearch.py +++ b/stingray/pulse/accelsearch.py @@ -7,9 +7,11 @@ import scipy from scipy import special import scipy.signal -from astropy import log from astropy.table import Table import matplotlib.pyplot as plt +from stingray.loggingconfig import setup_logger + +logger = setup_logger() try: from tqdm import tqdm as show_progress @@ -93,7 +95,7 @@ def _create_responses(range_z): List of arrays describing the shape of the response function corresponding to each value of ``range_z``. """ - log.info("Creating responses") + logger.info("Creating responses") responses = [] for j, z in enumerate(show_progress(range_z)): # fdot = z / T**2 @@ -237,7 +239,7 @@ def _calculate_all_convolutions( candidate_powers: array of float Power of candidates """ - log.info("Convolving FFT with responses...") + logger.info("Convolving FFT with responses...") candidate_powers = [0.0] candidate_rs = [1] @@ -378,7 +380,7 @@ def accelsearch( plt.loglog() if fft_rescale is not None: - log.info("Applying initial filters...") + logger.info("Applying initial filters...") spectr = fft_rescale(spectr) if debug: @@ -396,11 +398,11 @@ def accelsearch( T = times[-1] - times[0] + dt freq_intv_to_search = (freq >= fmin) & (freq < fmax) - log.info("Starting search over full plane...") + logger.info("Starting search over full plane...") start_z = -zmax end_z = zmax range_z = np.arange(start_z, end_z, delta_z) - log.info( + logger.info( "min and max possible r_dot: {}--{}".format(delta_z / T**2, np.max(range_z) / T**2) ) freqs_to_search = freq[freq_intv_to_search]