From b9070f2daf61bcc5429b3c24d4454d03b422b7a1 Mon Sep 17 00:00:00 2001 From: giumas Date: Sun, 27 Oct 2024 08:12:06 +0100 Subject: [PATCH] switched RTOFS from OpenDAP to local files --- .../soundspeed/atlas/ex_atlases_auto_query.py | 4 +- examples/soundspeed/profile/ex_oc_p_d.py | 2 +- hyo2/ssm2/lib/atlas/regofsonline.py | 2 +- hyo2/ssm2/lib/atlas/rtofs.py | 519 +++++++----------- hyo2/ssm2/lib/profile/oceanography.py | 22 +- 5 files changed, 227 insertions(+), 322 deletions(-) diff --git a/examples/soundspeed/atlas/ex_atlases_auto_query.py b/examples/soundspeed/atlas/ex_atlases_auto_query.py index c5c3eaf1..19ec0d33 100644 --- a/examples/soundspeed/atlas/ex_atlases_auto_query.py +++ b/examples/soundspeed/atlas/ex_atlases_auto_query.py @@ -47,7 +47,7 @@ class ModelOptions(IntEnum): # Choose Model -switch = ModelOptions.WCOFS # Choose a ModelOptions Value to test +switch = ModelOptions.RTOFS # Choose a ModelOptions Value to test app = QtWidgets.QApplication([]) # PySide stuff (start) mw = QtWidgets.QMainWindow() @@ -58,7 +58,7 @@ class ModelOptions(IntEnum): # Choose test location tests = [ - # (-19.1, 74.16, dt.utcnow()), # Indian Ocean + # (-19.1, 74.17, dt.utcnow()), # Indian Ocean # (72.852028, -67.315431, dt.utcnow()) # Baffin Bay # (18.2648113, 16.1761115, dt.utcnow()), # in land -> middle of Africa # (39.725989, -104.967745, dt.utcnow()) # in land -> Denver, CO diff --git a/examples/soundspeed/profile/ex_oc_p_d.py b/examples/soundspeed/profile/ex_oc_p_d.py index c680a432..68439dbc 100644 --- a/examples/soundspeed/profile/ex_oc_p_d.py +++ b/examples/soundspeed/profile/ex_oc_p_d.py @@ -23,7 +23,7 @@ calc_d = Oc.p2d_gsw(p=trusted_gsw_p, lat=trusted_gsw_lat, dyn_height=None) logger.info("GSW: Depth: %.3f <> %.3f" % (calc_d, trusted_gsw_d)) -calc_p = Oc.d2p_backup(d=calc_d, lat=trusted_fof_lat) +calc_p = Oc.d2p_backup(d=trusted_fof_d, lat=trusted_fof_lat) logger.info("Backup: Pressure: %.3f <> %.3f" % (calc_p, trusted_fof_p)) calc_p = Oc.d2p_gsw(d=trusted_gsw_d, lat=trusted_gsw_lat, dyn_height=None) diff --git a/hyo2/ssm2/lib/atlas/regofsonline.py b/hyo2/ssm2/lib/atlas/regofsonline.py index 16dd2299..e391ee28 100644 --- a/hyo2/ssm2/lib/atlas/regofsonline.py +++ b/hyo2/ssm2/lib/atlas/regofsonline.py @@ -157,7 +157,7 @@ def query(self, lat: Optional[float], lon: Optional[float], dtstamp: Union[dt, N try: lat_idx, lon_idx = self.grid_coords(lat, lon, dtstamp=dtstamp, server_mode=server_mode) if lat_idx is None: - logger.info("location outside of %s coverage" % self.name) + logger.info("troubles with data source or location outside of %s coverage" % self.name) return None except TypeError as e: diff --git a/hyo2/ssm2/lib/atlas/rtofs.py b/hyo2/ssm2/lib/atlas/rtofs.py index b4649b15..9d5a3809 100644 --- a/hyo2/ssm2/lib/atlas/rtofs.py +++ b/hyo2/ssm2/lib/atlas/rtofs.py @@ -1,6 +1,9 @@ +import math +import os.path from datetime import datetime as dt, date, timedelta import logging -from typing import Optional, Union, TYPE_CHECKING +import shutil +from typing import TYPE_CHECKING from netCDF4 import Dataset import numpy as np @@ -13,6 +16,7 @@ from hyo2.ssm2.lib.profile.profilelist import ProfileList from hyo2.ssm2.lib.profile.dicts import Dicts from hyo2.ssm2.lib.profile.oceanography import Oceanography as Oc + if TYPE_CHECKING: from hyo2.ssm2.lib.soundspeed import SoundSpeedLibrary @@ -27,13 +31,10 @@ def __init__(self, data_folder: str, prj: 'SoundSpeedLibrary') -> None: self.name = self.__class__.__name__ self.desc = "Global Real-Time Ocean Forecast System" - # How far are we willing to look for solutions? size in grid nodes - self._search_window = 5 - self._search_half_window = self._search_window // 2 # 2000 dBar is the ref depth associated with the potential temperatures in the grid (sigma-2) self._ref_p = 2000 - self._has_data_loaded = False # grids are "loaded" ? (netCDF files are opened) + self._has_data_loaded = False self._last_loaded_day = dt(1900, 1, 1) # some silly day in the past self._file_temp = None self._file_sal = None @@ -41,32 +42,172 @@ def __init__(self, data_folder: str, prj: 'SoundSpeedLibrary') -> None: self._d = None self._lat = None self._lon = None - self._lat_step = None - self._lat_0 = None - self._lon_step = None - self._lon_0 = None - # ### public API ### + def clear_data(self) -> None: + """Delete the data and reset the last loaded day""" + logger.debug("clearing data") + self._has_data_loaded = False + self._last_loaded_day = dt(1900, 1, 1) # some silly day in the past + if self._file_temp: + self._file_temp.close() + self._file_temp = None + if self._file_sal: + self._file_sal.close() + self._file_sal = None + self._day_idx = None + self._d = None + self._lat = None + self._lon = None + + @staticmethod + def _check_url(url: str) -> bool: + try: + with requests.get(url, allow_redirects=True, stream=True) as resp: + logger.debug("passed url: %s -> %s" % (url, resp.status_code)) + if resp.status_code == 200: + return True + else: + return False + + except Exception as e: + logger.warning("while checking %s, %s" % (url, e)) + return False + + @staticmethod + def _build_check_urls(input_date: date) -> tuple: + """make up the url to use for salinity and temperature""" + # Primary server: http://nomads.ncep.noaa.gov/pub/data/nccf/com/rtofs/prod/rtofs.20160410/ + url_temp = 'https://nomads.ncep.noaa.gov/pub/data/nccf/com/rtofs/prod/rtofs.%s/' \ + 'rtofs_glo_3dz_n024_daily_3ztio.nc' % input_date.strftime("%Y%m%d") + logger.debug("target RTOFS temp: %s" % url_temp) + url_sal = 'https://nomads.ncep.noaa.gov/pub/data/nccf/com/rtofs/prod/rtofs.%s/' \ + 'rtofs_glo_3dz_n024_daily_3zsio.nc' % input_date.strftime("%Y%m%d") + logger.debug("target RTOFS sal: %s" % url_sal) + return url_temp, url_sal + + @staticmethod + def _build_opendap_urls(input_date: date) -> tuple: + """make up the url to use for salinity and temperature""" + # TODO: currently unused! + + # Primary server: http://nomads.ncep.noaa.gov/dods/rtofs + url_temp = 'https://nomads.ncep.noaa.gov/dods/rtofs/rtofs_global%s/rtofs_glo_3dz_nowcast_daily_temp' \ + % input_date.strftime("%Y%m%d") + logger.debug("OpenDAP temp: %s" % url_temp) + url_sal = 'https://nomads.ncep.noaa.gov/dods/rtofs/rtofs_global%s/rtofs_glo_3dz_nowcast_daily_salt' \ + % input_date.strftime("%Y%m%d") + logger.debug("OpenDAP sal: %s" % url_sal) + return url_temp, url_sal def is_present(self) -> bool: """check the availability""" return self._has_data_loaded - def download_db(self, dtstamp: Union[dt, None] = None, server_mode: bool = False) -> bool: + def _clean_rtofs_folder(self, skip_folder: str | None = None) -> None: + for item in os.listdir(self.data_folder): + full_path = os.path.join(self.data_folder, item) + + if os.path.isfile(full_path): + os.remove(full_path) + continue + + if skip_folder: + if item == skip_folder: + continue + shutil.rmtree(full_path) + + def _download_files(self, datestamp: dt, server_mode: bool = False) -> bool: + progress = CliProgress() + + # check if the files are loaded and that the date matches + if self._has_data_loaded: + # logger.info("%s" % self.last_loaded_day) + if self._last_loaded_day == datestamp: + return True + # the data are old + logger.info("cleaning data: %s %s" % (self._last_loaded_day, datestamp)) + self.clear_data() + + progress.start(text="Check RTOFS urls", is_disabled=server_mode) + + # check if the data are available on the RTOFS server + url_ck_temp, url_ck_sal = self._build_check_urls(datestamp) + if not self._check_url(url_ck_temp) or not self._check_url(url_ck_sal): + + logger.info('issue with %s -> trying with the previous day' % datestamp) + datestamp -= timedelta(days=1) + url_ck_temp, url_ck_sal = self._build_check_urls(datestamp) + + if not self._check_url(url_ck_temp) or not self._check_url(url_ck_sal): + logger.warning('unable to locate data on RTOFS server for date: %s and next day' % datestamp) + self.clear_data() + progress.end() + return False + + try: + progress.update(text="Delete old RTOFS files", value=30) + + # remove all the RTOFS folder content, except the current date folder + datestamp_name = datestamp.strftime("%Y%m%d") + self._clean_rtofs_folder(skip_folder=datestamp_name) + + progress.update(text="Download RTOFS temperature", value=40) + + datestamp_folder = os.path.join(self.data_folder, datestamp_name) + if not os.path.exists(datestamp_folder): + os.makedirs(datestamp_folder) + + loc_file_temp = os.path.basename(url_ck_temp) + loc_path_temp = os.path.join(datestamp_folder, loc_file_temp) + logger.info('local temp: %s' % loc_path_temp) + if not os.path.exists(loc_path_temp): + progress.update(value=50) + with requests.get(url_ck_temp, stream=True) as r: + with open(loc_path_temp, 'wb') as f: + shutil.copyfileobj(r.raw, f) + self._file_temp = Dataset(loc_path_temp) + + progress.update(text="Download RTOFS salinity", value=60) + + loc_file_sal = os.path.basename(url_ck_sal) + loc_path_sal = os.path.join(datestamp_folder, loc_file_sal) + logger.info('local sal: %s' % loc_path_sal) + if not os.path.exists(loc_path_sal): + progress.update(value=75) + with requests.get(url_ck_sal, stream=True) as r: + with open(loc_path_sal, 'wb') as f: + shutil.copyfileobj(r.raw, f) + self._file_sal = Dataset(loc_path_sal) + + self._day_idx = 0 + + except (RuntimeError, IOError) as e: + logger.warning("unable to download RTOFS data: %s -> %s" % (datestamp.strftime("%Y%m%d"), e), exc_info=True) + self.clear_data() + self._clean_rtofs_folder() + progress.end() + return False + + # success! + self._has_data_loaded = True + self._last_loaded_day = datestamp + # logger.info("loaded data for %s" % datestamp) + progress.end() + return True + + def download_db(self, dtstamp: dt | None = None, server_mode: bool = False) -> bool: """try to connect and load info from the data set""" if dtstamp is None: dtstamp = dt.utcnow() - if not isinstance(dtstamp, dt): - raise RuntimeError("invalid datetime passed: %s" % type(dtstamp)) if not self._download_files(datestamp=dtstamp, server_mode=server_mode): return False try: - # Now get latitudes, longitudes and depths for x,y,z referencing - self._d = self._file_temp.variables['lev'][:] - self._lat = self._file_temp.variables['lat'][:] - self._lon = self._file_temp.variables['lon'][:] + self._d = self._file_temp.variables['Depth'][:] + self._lat = self._file_temp.variables['Latitude'][:] + self._lon = self._file_temp.variables['Longitude'][:] + # logger.debug('d:(%s)\n%s' % (self._d.shape, self._d)) # logger.debug('lat:(%s)\n%s' % (self._lat.shape, self._lat)) # logger.debug('lon:(%s)\n%s' % (self._lon.shape, self._lon)) @@ -74,144 +215,64 @@ def download_db(self, dtstamp: Union[dt, None] = None, server_mode: bool = False except Exception as e: logger.error("troubles in variable lookup for lat/long grid and/or depth: %s" % e) self.clear_data() + self._clean_rtofs_folder() return False - self._lat_0 = self._lat[0] - self._lat_step = self._lat[1] - self._lat_0 - self._lon_0 = self._lon[0] - self._lon_step = self._lon[1] - self._lon_0 - # logger.debug("0(%.3f, %.3f); step(%.3f, %.3f)" % (self._lat_0, self._lon_0, self._lat_step, self._lon_step)) - return True - def query(self, lat: Optional[float], lon: Optional[float], dtstamp: Optional[dt] = None, - server_mode: bool = False): + def grid_coords(self, lat: float, lon: float, dtstamp: dt, server_mode: bool | None = False) -> tuple: + """Convert the passed position in RTOFS grid coords""" + + # check if we need to update the data set (new day!) + if not self.download_db(dtstamp, server_mode=server_mode): + logger.error("troubles in downloading RTOFS data for timestamp: %s" % dtstamp.strftime("%Y%m%d")) + return None, None + + # make longitude "safe" since RTOFS grid starts at east longitude 70-ish degrees + if lon < self._lon.min(): + lon += 360.0 + + # logger.debug("min/max lon: %s %s" % (self._lon.min(), self._lon.max())) + # logger.debug("min/max lat: %s %s" % (self._lat.min(), self._lat.max())) + + delta_lat = self._lat - lat + delta_lon = self._lon - lon + # logger.debug("delta lat:(%s)\n%s" % (delta_lat.shape, delta_lat)) + # logger.debug("delta lon:(%s)\n%s" % (delta_lon.shape, delta_lon)) + dist_square = delta_lon * delta_lon + delta_lat * delta_lat + # logger.debug("dist_square:(%s)\n%s" % (dist_square.shape, dist_square)) + lat_idx, lon_idx = np.unravel_index(np.nanargmin(dist_square), dist_square.shape) + d2 = dist_square[lat_idx, lon_idx] + if d2 > 0.04: + logger.info("Located RTOFS point is too far: %s deg" % math.sqrt(d2)) + return None, None + logger.debug("Valid RTOFS idx: (%s, %s) d: %s > lat: %s, lon: %s" + % (lat_idx, lon_idx, d2, self._lat[lat_idx, lon_idx], self._lon[lat_idx, lon_idx])) + + return lat_idx, lon_idx + + def query(self, lat: float | None, lon: float | None, dtstamp: dt | None = None, server_mode: bool = False): """Query RTOFS for passed location and timestamp""" if dtstamp is None: dtstamp = dt.utcnow() - if not isinstance(dtstamp, dt): - raise RuntimeError("invalid datetime passed: %s" % type(dtstamp)) - logger.debug("query: %s @ (%.6f, %.6f)" % (dtstamp, lon, lat)) # check the inputs if (lat is None) or (lon is None): logger.error("invalid query: %s @ (%s, %s)" % (dtstamp.strftime("%Y/%m/%d %H:%M:%S"), lon, lat)) return None + logger.debug("query: %s @ (%.6f, %.6f)" % (dtstamp.strftime("%Y/%m/%d %H:%M:%S"), lon, lat)) try: lat_idx, lon_idx = self.grid_coords(lat, lon, dtstamp=dtstamp, server_mode=server_mode) + if lat_idx is None: + logger.info("troubles with data source or location outside of %s coverage" % self.name) + return None + except TypeError as e: - logger.critical("while converting location to grid coords, %s" % e) + logger.critical("while converting location to grid coords, %s" % e, exc_info=True) return None - # logger.debug("idx > lat: %s, lon: %s" % (lat_idx, lon_idx)) - - lat_s_idx = lat_idx - self._search_half_window - lat_n_idx = lat_idx + self._search_half_window - lon_w_idx = lon_idx - self._search_half_window - lon_e_idx = lon_idx + self._search_half_window - # logger.info("indices -> %s %s %s %s" % (lat_s_idx, lat_n_idx, lon_w_idx, lon_e_idx)) - if lon < self._lon_0: # Make all longitudes safe - lon += 360.0 - longitudes = np.zeros((self._search_window, self._search_window)) - if (lon_e_idx < self._lon.size) and (lon_w_idx >= 0): - # logger.info("safe case") - - # Need +1 on the north and east indices since it is the "stop" value in these slices - t = self._file_temp.variables['temperature'][self._day_idx, :, lat_s_idx:lat_n_idx + 1, - lon_w_idx:lon_e_idx + 1] - s = self._file_sal.variables['salinity'][self._day_idx, :, lat_s_idx:lat_n_idx + 1, lon_w_idx:lon_e_idx + 1] - # Set 'unfilled' elements to NANs (BUT when the entire array has valid data, it returns numpy.ndarray) - if isinstance(t, np.ma.core.MaskedArray): - t_mask = t.mask - t._sharedmask = False - t[t_mask] = np.nan - if isinstance(s, np.ma.core.MaskedArray): - s_mask = s.mask - s._sharedmask = False - s[s_mask] = np.nan - - lons = self._lon[lon_w_idx:lon_e_idx + 1] - for i in range(self._search_window): - longitudes[i, :] = lons - else: - logger.info("split case") - - # --- Do the left portion of the array first, this will run into the wrap longitude - lon_e_idx = self._lon.size - 1 - # lon_west_index can be negative if lon_index is on the westernmost end of the array - if lon_w_idx < 0: - lon_w_idx = lon_w_idx + self._lon.size - # logger.info("using lon west/east indices -> %s %s" % (lon_w_idx, lon_e_idx)) - - # Need +1 on the north and east indices since it is the "stop" value in these slices - t_left = self._file_temp.variables['temperature'][self._day_idx, :, lat_s_idx:lat_n_idx + 1, - lon_w_idx:lon_e_idx + 1] - s_left = self._file_sal.variables['salinity'][self._day_idx, :, lat_s_idx:lat_n_idx + 1, - lon_w_idx:lon_e_idx + 1] - # Set 'unfilled' elements to NANs (BUT when the entire array has valid data, it returns numpy.ndarray) - if isinstance(t_left, np.ma.core.MaskedArray): - t_mask = t_left.mask - t_left[t_mask] = np.nan - if isinstance(s_left, np.ma.core.MaskedArray): - s_mask = s_left.mask - s_left[s_mask] = np.nan - - lons_left = self._lon[lon_w_idx:lon_e_idx + 1] - for i in range(self._search_window): - longitudes[i, 0:lons_left.size] = lons_left - # logger.info("longitudes are now: %s" % longitudes) - - # --- Do the right portion of the array first, this will run into the wrap - # longitude so limit it accordingly - lon_w_idx = 0 - lon_e_idx = self._search_window - lons_left.size - 1 - - # Need +1 on the north and east indices since it is the "stop" value in these slices - t_right = self._file_temp.variables['temperature'][self._day_idx, :, lat_s_idx:lat_n_idx + 1, - lon_w_idx:lon_e_idx + 1] - s_right = self._file_sal.variables['salinity'][self._day_idx, :, lat_s_idx:lat_n_idx + 1, - lon_w_idx:lon_e_idx + 1] - # Set 'unfilled' elements to NANs (BUT when the entire array has valid data, it returns numpy.ndarray) - if isinstance(t_right, np.ma.core.MaskedArray): - t_mask = t_right.mask - t_right[t_mask] = np.nan - if isinstance(s_right, np.ma.core.MaskedArray): - s_mask = s_right.mask - s_right[s_mask] = np.nan - - lons_right = self._lon[lon_w_idx:lon_e_idx + 1] - for i in range(self._search_window): - longitudes[i, lons_left.size:self._search_window] = lons_right - - # merge data - t = np.zeros((self._file_temp.variables['lev'].size, self._search_window, self._search_window)) - t[:, :, 0:lons_left.size] = t_left - t[:, :, lons_left.size:self._search_window] = t_right - s = np.zeros((self._file_temp.variables['lev'].size, self._search_window, self._search_window)) - s[:, :, 0:lons_left.size] = s_left - s[:, :, lons_left.size:self._search_window] = s_right - - # Calculate distances from requested position to each of the grid node locations - distances = np.zeros((self._d.size, self._search_window, self._search_window)) - latitudes = np.zeros((self._search_window, self._search_window)) - lats = self._lat[lat_s_idx:lat_n_idx + 1] - for i in range(self._search_window): - latitudes[:, i] = lats - - for i in range(self._search_window): - - for j in range(self._search_window): - dist = self.g.distance(longitudes[i, j], latitudes[i, j], lon, lat) - distances[:, i, j] = dist - # logger.info("node %s, pos: %3.1f, %3.1f, dist: %3.1f" - # % (i, latitudes[i, j], longitudes[i, j], distances[0, i, j])) - # logger.info("distance array:\n%s" % distances[0]) - # Get mask of "no data" elements and replace these with NaNs in distance array - t_mask = np.isnan(t) - distances[t_mask] = np.nan - s_mask = np.isnan(s) - distances[s_mask] = np.nan + # logger.debug("RTOFS idx: (%s, %s)" % (lat_idx, lon_idx)) # Spin through all the depth levels temp_pot = np.zeros(self._d.size) @@ -221,35 +282,24 @@ def query(self, lat: Optional[float], lon: Optional[float], dtstamp: Optional[dt num_values = 0 for i in range(self._d.size): - t_level = t[i] - s_level = s[i] - d_level = distances[i] - - try: - ind = np.nanargmin(d_level) - except ValueError: - # logger.info("%s: all-NaN slices" % i) - continue + t_p = self._file_temp.variables['temperature'][self._day_idx, i, lat_idx, lon_idx] + if isinstance(t_p, np.ma.core.MaskedArray): + t_p = t_p.filled(np.nan) + temp_pot[i] = t_p + s = self._file_sal.variables['salinity'][self._day_idx, i, lat_idx, lon_idx] + if isinstance(s, np.ma.core.MaskedArray): + s = s.filled(np.nan) + sal[i] = s + d[i] = self._d[i] - if np.isnan(ind): - logger.info("%s: bottom of valid data" % i) + if np.isnan(temp_pot[i]) or np.isnan(sal[i]): break - ind2 = np.unravel_index(ind, t_level.shape) - - t_closest = t_level[ind2] - s_closest = s_level[ind2] - # d_closest = d_level[ind2] - - temp_pot[i] = t_closest - sal[i] = s_closest - d[i] = self._d[i] - # Calculate in-situ temperature p = Oc.d2p(d[i], lat) - temp_in_situ[i] = Oc.in_situ_temp(s=sal[i], t=t_closest, p=p, pr=self._ref_p) - # logger.info("%02d: %6.1f %6.1f > T/S/Dist: %3.1f %3.1f %3.1f [pot.temp. %3.1f]" - # % (i, d[i], p, temp_in_situ[i], s_closest, d_closest, t_closest)) + temp_in_situ[i] = Oc.in_situ_temp(s=sal[i], t=temp_pot[i], p=p, pr=self._ref_p) + # logger.info("%02d: %6.1f (%6.1f) > Tp/Ts/Sal: %3.2f %3.2f %3.2f" + # % (i, d[i], p, temp_pot[i], temp_in_situ[i], sal[i])) num_values += 1 @@ -257,14 +307,6 @@ def query(self, lat: Optional[float], lon: Optional[float], dtstamp: Optional[dt logger.info("no data from lookup!") return None - # ind = np.nanargmin(distances[0]) - # ind2 = np.unravel_index(ind, distances[0].shape) - # switching to the query location - # lat_out = latitudes[ind2] - # lon_out = longitudes[ind2] - # while lon_out > 180.0: - # lon_out -= 360.0 - # Make a new SV object to return our query in ssp = Profile() ssp.meta.sensor_type = Dicts.sensor_types['Synthetic'] @@ -289,147 +331,8 @@ def query(self, lat: Optional[float], lon: Optional[float], dtstamp: Optional[dt return profiles - def clear_data(self) -> None: - """Delete the data and reset the last loaded day""" - logger.debug("clearing data") - if self._has_data_loaded: - if self._file_temp: - self._file_temp.close() - self._file_temp = None - if self._file_sal: - self._file_sal.close() - self._file_sal = None - self._lat = None - self._lon = None - self._lat_step = None - self._lat_0 = None - self._lon_step = None - self._lon_0 = None - self._has_data_loaded = False # grids are "loaded" ? (netCDF files are opened) - self._last_loaded_day = dt(1900, 1, 1) # some silly day in the past - self._day_idx = None - def __repr__(self) -> str: msg = "%s" % super(Rtofs, self).__repr__() msg += " \n" % (self._has_data_loaded,) msg += " \n" % (self._last_loaded_day.strftime(r"%d\%m\%Y"),) return msg - - # ### private methods ### - - @staticmethod - def _check_url(url: str) -> bool: - try: - with requests.get(url, allow_redirects=True, stream=True) as resp: - logger.debug("passed url: %s -> %s" % (url, resp.status_code)) - if resp.status_code == 200: - return True - else: - return False - - except Exception as e: - logger.warning("while checking %s, %s" % (url, e)) - return False - - @staticmethod - def _build_check_urls(input_date: date) -> tuple: - """make up the url to use for salinity and temperature""" - # Primary server: http://nomads.ncep.noaa.gov/pub/data/nccf/com/rtofs/prod/rtofs.20160410/ - url_temp = 'https://nomads.ncep.noaa.gov/pub/data/nccf/com/rtofs/prod/rtofs.%s/' \ - 'rtofs_glo_3dz_n024_daily_3ztio.nc' \ - % input_date.strftime("%Y%m%d") - logger.debug("check temp: %s" % url_temp) - url_sal = 'https://nomads.ncep.noaa.gov/pub/data/nccf/com/rtofs/prod/rtofs.%s/' \ - 'rtofs_glo_3dz_n024_daily_3zsio.nc' \ - % input_date.strftime("%Y%m%d") - logger.debug("check sal: %s" % url_sal) - return url_temp, url_sal - - @staticmethod - def _build_opendap_urls(input_date: date) -> tuple: - """make up the url to use for salinity and temperature""" - # Primary server: http://nomads.ncep.noaa.gov/dods/rtofs - url_temp = 'https://nomads.ncep.noaa.gov/dods/rtofs/rtofs_global%s/' \ - 'rtofs_glo_3dz_nowcast_daily_temp' \ - % input_date.strftime("%Y%m%d") - logger.debug("opendap temp: %s" % url_temp) - url_sal = 'https://nomads.ncep.noaa.gov/dods/rtofs/rtofs_global%s/' \ - 'rtofs_glo_3dz_nowcast_daily_salt' \ - % input_date.strftime("%Y%m%d") - logger.debug("opendap sal: %s" % url_sal) - return url_temp, url_sal - - def _download_files(self, datestamp: dt, server_mode: bool = False): - """Actually, just try to connect with the remote files - For a given queried date, we may have to use the forecast from the previous - day since the current nowcast doesn't hold data for today (solved?) - """ - progress = CliProgress() - - # check if the files are loaded and that the date matches - if self._has_data_loaded: - # logger.info("%s" % self.last_loaded_day) - if self._last_loaded_day == datestamp: - return True - # the data are old - logger.info("cleaning data: %s %s" % (self._last_loaded_day, datestamp)) - self.clear_data() - - progress.start(text="Download RTOFS", is_disabled=server_mode) - - # check if the data are available on the RTOFS server - url_ck_temp, url_ck_sal = self._build_check_urls(datestamp) - if not self._check_url(url_ck_temp) or not self._check_url(url_ck_sal): - - logger.info('issue with %s -> trying with the previous day' % datestamp) - datestamp -= timedelta(days=1) - url_ck_temp, url_ck_sal = self._build_check_urls(datestamp) - - if not self._check_url(url_ck_temp) or not self._check_url(url_ck_sal): - logger.warning('unable to retrieve data from RTOFS server for date: %s and next day' % datestamp) - self.clear_data() - progress.end() - return False - - progress.update(30) - - # Try to download the data grid grids - url_temp, url_sal = self._build_opendap_urls(datestamp) - # logger.debug('downloading RTOFS data for %s' % datestamp) - try: - self._file_temp = Dataset(url_temp) - progress.update(60) - self._file_sal = Dataset(url_sal) - progress.update(80) - self._day_idx = 1 # it was 3 1-day steps, now only 2 steps - - except (RuntimeError, IOError) as e: - logger.warning("unable to access data: %s -> %s" % (datestamp.strftime("%Y%m%d"), e)) - self.clear_data() - progress.end() - return False - - # success! - self._has_data_loaded = True - self._last_loaded_day = datestamp - # logger.info("loaded data for %s" % datestamp) - progress.end() - return True - - def grid_coords(self, lat: float, lon: float, dtstamp: dt, server_mode: Optional[bool] = False) -> tuple: - """Convert the passed position in RTOFS grid coords""" - - # check if we need to update the data set (new day!) - if not self.download_db(dtstamp, server_mode=server_mode): - logger.error("troubles in updating data set for timestamp: %s" % dtstamp.strftime("%Y%m%d")) - return None, None - - # make longitude "safe" since RTOFS grid starts at east longitude 70-ish degrees - if lon < self._lon_0: - lon += 360.0 - - # This does a nearest neighbour lookup - lat_idx = int(round((lat - self._lat_0) / self._lat_step, 0)) - lon_idx = int(round((lon - self._lon_0) / self._lon_step, 0)) - - return lat_idx, lon_idx diff --git a/hyo2/ssm2/lib/profile/oceanography.py b/hyo2/ssm2/lib/profile/oceanography.py index 246d9c3b..eb302f05 100644 --- a/hyo2/ssm2/lib/profile/oceanography.py +++ b/hyo2/ssm2/lib/profile/oceanography.py @@ -28,7 +28,7 @@ class Oceanography: # ### PRESSURE/DEPTH METHODS ### @classmethod - def p2d(cls, p, lat=30.0, dyn_height=None, debug=False): + def p2d(cls, p, lat: float = 30.0, dyn_height: int | None = None, debug: bool = False) -> float: """Convert pressure to depth""" try: return cls.p2d_gsw(p=p, lat=lat, dyn_height=dyn_height) @@ -39,13 +39,14 @@ def p2d(cls, p, lat=30.0, dyn_height=None, debug=False): return cls.p2d_backup(p=p, lat=lat) @classmethod - def p2d_gsw(cls, p, lat, dyn_height): + def p2d_gsw(cls, p, lat: float, dyn_height: int | None) -> float: if not isinstance(p, np.ndarray): p = np.array(p, ndmin=1, copy=False) if dyn_height is None: - return -gsw.conversions.z_from_p(p=p, lat=lat) + depth = -gsw.conversions.z_from_p(p=p, lat=lat) + return depth[0] depth = -gsw.conversions.z_from_p(p=p, lat=lat, geo_strf_dyn_height=dyn_height) for val in depth: @@ -53,10 +54,10 @@ def p2d_gsw(cls, p, lat, dyn_height): logger.info("nan in gsw.conversions.z_from_p with dyn_height") return -gsw.conversions.z_from_p(p=p, lat=lat) - return depth + return depth[0] @classmethod - def p2d_backup(cls, p, lat): + def p2d_backup(cls, p, lat: float) -> float: """Convert pressure to depth If the latitude is not passed, a default value of 30.0 is used. @@ -83,7 +84,7 @@ def p2d_backup(cls, p, lat): return d / g @classmethod - def d2p(cls, d, lat=30.0, dyn_height=None, debug=False): + def d2p(cls, d, lat: float = 30.0, dyn_height: int | None = None, debug: bool = False) -> float: """Convert pressure to depth""" try: return cls.d2p_gsw(d=d, lat=lat, dyn_height=dyn_height) @@ -94,13 +95,14 @@ def d2p(cls, d, lat=30.0, dyn_height=None, debug=False): return cls.d2p_backup(d=d, lat=lat) @classmethod - def d2p_gsw(cls, d, lat, dyn_height): + def d2p_gsw(cls, d, lat: float, dyn_height: int | None) -> float: if not isinstance(d, np.ndarray): d = np.array(d, ndmin=1, copy=False) if dyn_height is None: - return gsw.conversions.p_from_z(z=-d, lat=lat) + pressure = gsw.conversions.p_from_z(z=-d, lat=lat) + return pressure[0] pressure = gsw.conversions.p_from_z(z=-d, lat=lat, geo_strf_dyn_height=dyn_height) for val in pressure: @@ -108,10 +110,10 @@ def d2p_gsw(cls, d, lat, dyn_height): logger.info("nan in gsw.conversions.p_from_z with dyn_height") return gsw.conversions.p_from_z(z=-d, lat=lat) - return pressure + return pressure[0] @classmethod - def d2p_backup(cls, d, lat): + def d2p_backup(cls, d, lat: float) -> float: """Convert depth to pressure ref: Leroy and Parthiot(1998)