Skip to content

Commit

Permalink
Replace print calls with logging
Browse files Browse the repository at this point in the history
... using loguru
  • Loading branch information
mfisher87 committed Jul 23, 2024
1 parent 5e52b30 commit 32264c7
Show file tree
Hide file tree
Showing 15 changed files with 98 additions and 120 deletions.
9 changes: 9 additions & 0 deletions antarctica_today/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import os
import sys

# IMPORTANT: If we don't specify this setting, then the projection we want to use will
# be replaced with another (and this warning will be printed)!
Expand All @@ -10,3 +11,11 @@
# used instead. To use the original CRS, set the OSR_USE_NON_DEPRECATED configuration
# option to NO.
os.environ["OSR_USE_NON_DEPRECATED"] = "NO"


# Ignore warnings by default, while still allowing users to change the behavior, e.g. by
# upgrading them to exceptions.
if not sys.warnoptions:
import warnings

warnings.simplefilter("ignore")
23 changes: 6 additions & 17 deletions antarctica_today/generate_antarctica_today_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,8 @@ def main():
"""Do stuff I want to do here."""

m = AT_map_generator(
fill_pole_hole=False, filter_out_error_swaths=True, verbose=True
fill_pole_hole=False,
filter_out_error_swaths=True,
)

for region in [
Expand Down Expand Up @@ -208,13 +209,6 @@ def read_and_parse_args():
default=False,
help="Omit the legend. Default if not set: include a legend.",
)
parser.add_argument(
"--verbose",
"-v",
action="store_true",
default=False,
help="Increase output verbosity.",
)

return parser.parse_args()

Expand All @@ -231,7 +225,6 @@ def __init__(
melt_array_picklefile=model_results_picklefile,
fill_pole_hole=True,
filter_out_error_swaths=True,
verbose=True,
):
"""Initialize the class."""
self.melt_array_picklefile = melt_array_picklefile
Expand Down Expand Up @@ -893,7 +886,7 @@ def _strip_empty_image_border(self, filename):
return
# svgclip.py isn't working... can't seem to resolve the Rsvg namespace.
# svgclip.clip(filename, filename, margin=0)
# logger.debug("Trimmed {filename}.")
# logger.debug(f"Trimmed {filename}")

else:
bg = PIL.Image.new(im.mode, im.size, im.getpixel((0, 0)))
Expand All @@ -903,7 +896,7 @@ def _strip_empty_image_border(self, filename):
if bbox:
im2 = im.crop(bbox)
im2.save(filename)
logger.debug("Trimmed {filename}.")
logger.debug(f"Trimmed {filename}")

return

Expand Down Expand Up @@ -1790,7 +1783,6 @@ def generate_anomaly_melt_map(
keep_year_label_wrapped=True,
reset_picklefile=False,
message_below_year="relative to 1990-2020",
verbose=True,
):
"""Generate a cumulative annual anomaly melt map compared to the baseline climatology period.
Expand Down Expand Up @@ -1864,7 +1856,7 @@ def generate_anomaly_melt_map(

if mmdd_of_year is None:
# Just get the annual anomlay map for that year.
anomaly_data = read_annual_melt_anomaly_tif(year=year, verbose=verbose)
anomaly_data = read_annual_melt_anomaly_tif(year=year)
else:
datetime_this_year = datetime.datetime(
year=year
Expand All @@ -1875,7 +1867,6 @@ def generate_anomaly_melt_map(
anomaly_data = create_partial_year_melt_anomaly_tif(
current_datetime=datetime_this_year,
gap_filled=False,
verbose=verbose,
)

if anomaly_data is None:
Expand Down Expand Up @@ -1966,7 +1957,6 @@ def generate_latest_partial_anomaly_melt_map(
keep_year_label_wrapped=True,
reset_picklefile=False,
message_below_year=None,
verbose=True,
):
"""Same as generate_anomaly_melt_map, but do it for only a partial year,
up until the last day of data that we have in the melt array.
Expand Down Expand Up @@ -1998,7 +1988,6 @@ def generate_latest_partial_anomaly_melt_map(
keep_year_label_wrapped=keep_year_label_wrapped,
reset_picklefile=reset_picklefile,
message_below_year=message_below_year,
verbose=verbose,
)


Expand All @@ -2014,7 +2003,7 @@ def SPECIAL_make_map_with_borders(year=2020):
DATA_QGIS_DIR / "basins " / "Antarctic_Regions_v2_interior_borders.shp"
)

at = AT_map_generator(fill_pole_hole=False, verbose=True)
at = AT_map_generator(fill_pole_hole=False)
for fmt in ("png", "svg"):
# for fmt in ("png",):
fname = os.path.join(
Expand Down
12 changes: 8 additions & 4 deletions antarctica_today/generate_daily_melt_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import numpy
import xarray
from loguru import logger

from antarctica_today import tb_file_data, write_NSIDC_bin_to_gtif
from antarctica_today.melt_array_picklefile import get_ice_mask_array
Expand Down Expand Up @@ -91,11 +92,13 @@ def generate_new_daily_melt_files(
# Make sure there's at least one of each file (i.e. exactly one). If not, just skip & continue
if len(nsidc_fps) == 0:
if warn_if_missing_files:
warnings.warn(
msg = (
"Warning: At least one NSIDC Tb file on date '"
+ dt.strftime("%Y%m%d")
+ "' is missing. Skipping that date."
)
logger.warning(msg)
warnings.warn(msg)
continue

threshold_file = get_correct_threshold_file(dt)
Expand All @@ -119,7 +122,6 @@ def create_daily_melt_file(
output_bin_filename,
output_gtif_filename=None,
Tb_nodata_value=-999,
verbose=True,
) -> numpy.ndarray:
"""Read input files and generate a daily melt file. Primary function."""
output_array = read_files_and_generate_melt_array(
Expand All @@ -131,7 +133,10 @@ def create_daily_melt_file(
# Write the output .bin file
# write_flat_binary.write_array_to_binary(
write_array_to_binary(
output_array, output_bin_filename, numbytes=2, signed=True, verbose=verbose
output_array,
output_bin_filename,
numbytes=2,
signed=True,
)

# Write the output.tif file, if called for
Expand All @@ -142,7 +147,6 @@ def create_daily_melt_file(
resolution=25,
hemisphere="S",
nodata=None,
verbose=verbose,
)

return output_array
Expand Down
1 change: 0 additions & 1 deletion antarctica_today/generate_plots_for_given_day.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,6 @@ def generate_maps_and_plots_for_a_date(
gap_filled=True,
dpi=dpi,
outfile=lineplot_outfile,
verbose=True,
)

# Close the current plots open in matplotlib. (Keeps them from accumulating.)
Expand Down
4 changes: 2 additions & 2 deletions antarctica_today/progress_bar.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,10 @@ def ProgressBar(
)
filledLength = int((length * iteration) // total)
bar = fill * filledLength + "-" * (length - filledLength)
print(f"\r{prefix} |{bar}| {percent}% {suffix}", end=printEnd)
print(f"\r{prefix} |{bar}| {percent}% {suffix}", end=printEnd) # noqa: T201
# Print New Line on Complete
if iteration == total:
print()
print() # noqa: T201


# Sample Usage
Expand Down
26 changes: 14 additions & 12 deletions antarctica_today/src_baseline/download_NSIDC_Tb_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@
from getpass import getpass
from typing import List

from loguru import logger

try:
from urllib.error import HTTPError, URLError
from urllib.parse import urlparse
Expand Down Expand Up @@ -107,7 +109,7 @@ def get_credentials(url):
errprefix = "netrc error: "
except Exception as e:
if not ("No such file" in str(e)):
print("netrc error: {0}".format(str(e)))
logger.error("netrc error: {0}".format(str(e)))
username = None
password = None

Expand All @@ -125,7 +127,7 @@ def get_credentials(url):
opener = build_opener(HTTPCookieProcessor())
opener.open(req)
except HTTPError:
print(errprefix + "Incorrect username or password")
logger.error(errprefix + "Incorrect username or password")
errprefix = ""
credentials = None
username = None
Expand All @@ -137,7 +139,7 @@ def get_credentials(url):
def build_version_query_params(version):
desired_pad_length = 3
if len(version) > desired_pad_length:
print('Version string too long: "{0}"'.format(version))
logger.error('Version string too long: "{0}"'.format(version))
quit()

version = str(int(version)) # Strip off any leading zeros
Expand Down Expand Up @@ -178,7 +180,7 @@ def cmr_download(urls, output_dir=None, credentials=None):
return

url_count = len(urls)
print("Downloading {0} files...".format(url_count))
logger.info("Downloading {0} files...".format(url_count))
# credentials = None

for index, url in enumerate(urls, start=1):
Expand All @@ -188,7 +190,7 @@ def cmr_download(urls, output_dir=None, credentials=None):
filename = url.split("/")[-1]
if output_dir != None:
filename = os.path.join(output_dir, filename)
print(
logger.info(
"{0}/{1}: {2}".format(
str(index).zfill(len(str(url_count))), url_count, filename
)
Expand All @@ -205,9 +207,9 @@ def cmr_download(urls, output_dir=None, credentials=None):
data = opener.open(req).read()
open(filename, "wb").write(data)
except HTTPError as e:
print("HTTP error {0}, {1}".format(e.code, e.reason))
logger.info("HTTP error {0}, {1}".format(e.code, e.reason))
except URLError as e:
print("URL error: {0}".format(e.reason))
logger.info("URL error: {0}".format(e.reason))
except IOError:
raise
except KeyboardInterrupt:
Expand Down Expand Up @@ -272,7 +274,7 @@ def cmr_search(
polygon=polygon,
filename_filter=filename_filter,
)
print("Querying for data:\n\t{0}\n".format(cmr_query_url))
logger.info("Querying for data:\n\t{0}\n".format(cmr_query_url))

cmr_scroll_id = None
ctx = ssl.create_default_context()
Expand All @@ -292,21 +294,21 @@ def cmr_search(
cmr_scroll_id = headers["cmr-scroll-id"]
hits = int(headers["cmr-hits"])
if hits > 0:
print("Found {0} matches.".format(hits))
logger.info("Found {0} matches.".format(hits))
else:
print("Found no matches.")
logger.info("Found no matches.")
search_page = response.read()
search_page = json.loads(search_page.decode("utf-8"))
url_scroll_results = cmr_filter_urls(search_page)
if not url_scroll_results:
break
if hits > CMR_PAGE_SIZE:
print(".", end="")
print(".", end="") # noqa: T201
sys.stdout.flush()
urls += url_scroll_results

if hits > CMR_PAGE_SIZE:
print()
print() # noqa: T201
return urls
except KeyboardInterrupt:
quit()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import os

import numpy
from loguru import logger
from osgeo import gdal

ice_mask_tif = "F:/Research/DATA/Antarctica_Today/baseline_datasets/ice_mask.tif"
Expand All @@ -38,7 +39,7 @@
out_ndv = 0.0
out_array[out_array == tm_ndv] = out_ndv

print(numpy.where(numpy.logical_and((im_array == 1), (tm_array == tm_ndv))))
logger.info(numpy.where(numpy.logical_and((im_array == 1), (tm_array == tm_ndv))))

# These are the hand-selected pixel values, eight lines total.
# Five going vertically along Queen Maud Land, extrapolating 1-2 pixels
Expand Down Expand Up @@ -85,9 +86,9 @@ def f(x, a, b, c):
p = numpy.polyfit(known_x, tm_array[known_i, known_j], 2)
extrapolated_values = f(numpy.array(interp_x), *p)

print("\n", known_i, interp_i, known_j, interp_j)
print(tm_array[known_i, known_j], extrapolated_values)
print(known_x, interp_x)
logger.info(f"\n {known_i} {interp_i} {known_j} {interp_j}")
logger.info(f"{tm_array[known_i, known_j]} {extrapolated_values}")
logger.info(f"{known_x} {interp_x}")

# Fill in missing values with extrapolated values
out_array[interp_i, interp_j] = extrapolated_values
Expand Down Expand Up @@ -119,4 +120,4 @@ def f(x, a, b, c):
ds_out.FlushCache()
band_out = None
ds_out = None
print("\n", thermap_tif_out, "written.")
logger.info(f"\n {thermap_tif_out} written.")
9 changes: 5 additions & 4 deletions antarctica_today/src_baseline/plot_thermap_lapse_rate.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import numpy
import pandas as pd
import statsmodels.api as sm
from loguru import logger
from matplotlib import pyplot as plt
from matplotlib.axes import Axes
from matplotlib.figure import Figure
Expand Down Expand Up @@ -30,7 +31,7 @@

X = sm.add_constant(elevs)
model_elev_only = sm.OLS(thermap_df[["Temp"]], X).fit()
print(model_elev_only.summary())
logger.info(model_elev_only.summary())
coefs = model_elev_only.params


Expand Down Expand Up @@ -75,19 +76,19 @@ def plus_minus_op(x):
X = thermap_df[["REMA_or_Thermap_Elev", "Lat(S)"]]
Y = thermap_df[["Temp"]]

print("\n=== Statsmodels ===")
logger.info("=== Statsmodels ===")
X = sm.add_constant(X)
model = sm.OLS(Y, X).fit()

print(model.summary())
logger.info(model.summary())
coefs = model.params

temps_lat_corrected_75 = temps - coefs["Lat(S)"] * (75 + lats)
axes[2].scatter(elevs, temps_lat_corrected_75, color="purple")

# # Compute a quadratic curve through this line.
# poly_coefs = numpy.polyfit(elevs, temps_lat_corrected_75, deg=2)
# print(poly_coefs)
# logger.info(poly_coefs)
# # Quadratic trend-line
# trend_x = numpy.linspace(*min_max_elev, 100)
# trend_y = poly_coefs[0]*(trend_x**2) + poly_coefs[1]*trend_x + poly_coefs[2]
Expand Down
3 changes: 2 additions & 1 deletion antarctica_today/src_baseline/resize_25m_grid_by_1000.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

import os

from loguru import logger
from osgeo import gdal

infile = "C:/Users/mmacferrin/Dropbox/Research/Antarctica_Today/Dan Dixon/derived/polar_grid_10m_temps_25m_OFF_BY_1000.tif"
Expand Down Expand Up @@ -45,4 +46,4 @@
ds_out.FlushCache()
band_out = None
ds_out = None
print(outfile, "written.")
logger.info(f"Wrote {outfile}")
Loading

0 comments on commit 32264c7

Please sign in to comment.