Skip to content

Commit

Permalink
Run linting for Python files in tstester.
Browse files Browse the repository at this point in the history
  • Loading branch information
Jeffrey-Vervoort-KNMI committed Sep 22, 2023
1 parent 3a76325 commit 4a47910
Show file tree
Hide file tree
Showing 13 changed files with 462 additions and 334 deletions.
1 change: 0 additions & 1 deletion tstester/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -120,4 +120,3 @@ CONTAINER ID IMAGE COMMAND CREATED STATUS
## Running TimescaleDB in docker container on local machine

TODO!

48 changes: 28 additions & 20 deletions tstester/common.py
Original file line number Diff line number Diff line change
@@ -1,23 +1,24 @@
import random
import time
import os
import random
import subprocess
import time


def select_weighted_value(x):
"""Select a random value based on probability weights.
x is of the form [(v_1, w_1), (v_2, w_2), ..., (v_n, w_n)].
Returns v_i with a probability of w_i / (w_1 + w_2 + ... + w_n).
x is of the form [(v_1, w_1), (v_2, w_2), ..., (v_n, w_n)].
Returns v_i with a probability of w_i / (w_1 + w_2 + ... + w_n).
"""

# check preconditions
if len(x) == 0:
raise Exception('can\'t select from empty list')
raise Exception("can't select from empty list")
for item in x:
if item[1] <= 0:
raise Exception('non-positive weight not allowed (value: {}, weight: {})'.format(
item[0], item[1]))
raise Exception(
"non-positive weight not allowed (value: {}, weight: {})".format(item[0], item[1])
)

w_sum_n = sum([z[1] for z in x]) # get total weight sum
r = random.random() * w_sum_n # get random value within total weight sum
Expand All @@ -43,22 +44,23 @@ def elapsed_secs(start_secs):
return now_secs() - start_secs


def get_env_var(name, default_value='', fail_on_empty=True):
def get_env_var(name, default_value="", fail_on_empty=True):
"""Get environment variable."""
v = os.getenv(name, default_value)
if (v == '') and fail_on_empty:
raise Exception('environment variable {} empty or undefined'.format(name))
if (v == "") and fail_on_empty:
raise Exception("environment variable {} empty or undefined".format(name))
return v


def exec_command(cmd):
"""Execute a command, returning stdout on success, raising an error on failure.
"""
"""Execute a command, returning stdout on success, raising an error on failure."""
p = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if p.returncode != 0:
raise Exception(
'\n\'{}\' failed:\n EXIT CODE: {}\n STDOUT: \'{}\'\n STDERR: \'{}\'\n'.format(
cmd, p.returncode, p.stdout.strip(), p.stderr.strip()))
"\n'{}' failed:\n EXIT CODE: {}\n STDOUT: '{}'\n STDERR: '{}'\n".format(
cmd, p.returncode, p.stdout.strip(), p.stderr.strip()
)
)
return p.stdout


Expand Down Expand Up @@ -91,18 +93,24 @@ def validate_precondition():
v = (v1, v2)
for j in [0, 1]:
if len(t[j]) != len(v[j]):
raise Exception('precondition failed: len(t[{}]) ({}) != len(v[{}]) ({})'.format(
j, len(t[j]), j, len(v[j])))
raise Exception(
"precondition failed: len(t[{}]) ({}) != len(v[{}]) ({})".format(
j, len(t[j]), j, len(v[j])
)
)
if len(t[j]) > 0:
if t[j][-1] >= sentinel_obs_time:
raise Exception('precondition failed: t[{}][-1] >= {}'.format(
j, sentinel_obs_time))
raise Exception(
"precondition failed: t[{}][-1] >= {}".format(j, sentinel_obs_time)
)
if len(t[j]) > 1:
for i in range(1, len(t[j])):
if t[j][i - 1] >= t[j][i]:
raise Exception(
'precondition failed: t[{}][{}] ({}) >= t[{}][{}] ({})'.format(
j, i - 1, t[j][i - 1], j, i, t[j][i]))
"precondition failed: t[{}][{}] ({}) >= t[{}][{}] ({})".format(
j, i - 1, t[j][i - 1], j, i, t[j][i]
)
)

validate_precondition()

Expand Down
20 changes: 6 additions & 14 deletions tstester/config.json
Original file line number Diff line number Diff line change
@@ -1,39 +1,31 @@
{
"max_age": 86400,
"_comment": "max age in secs relative to current time (older observations are inaccessible)",

"_comment": "extra secs values to try with the AddNewObs test",
"nstations": 3,
"_comment": "number of stations to generate",

"bbox": {
"min_lat": -60.5,
"max_lat": 62.5,
"min_lon": -10.5,
"max_lon": 12.5
},
"_comment": "bounding box for randomly generated station locations (no two stations will have the same location)",

"params": {
"min": 1,
"max": 3
},
"_comment": "minimum and maximum number of randomly generated params for a station",

"time_res": {
"60": 0.2,
"600": 0.3,
"3600": 0.5
},
"_comment": "probability weights of time series resolutions (around 20% will have time res 60 secs, around 30% will have time res 600 secs, and so on)",

"extra_secs": [60, 600, 3600],
"_comment": "extra secs values to try with the AddNewObs test",

"extra_secs": [
60,
600,
3600
],
"ts_other_metadata": {
"sensor_location_quality": 9,
"sensor_performance_quality": 9
},

"obs_metadata": {
"quality": 9
}
Expand Down
31 changes: 14 additions & 17 deletions tstester/main.py
Original file line number Diff line number Diff line change
@@ -1,48 +1,45 @@
#!/usr/bin/env python3

# tested with Python 3.9

# Usage: ./main

import sys
import argparse
import json
import pathlib
from traceback import format_exc
import random
import sys
from traceback import format_exc

from tstester import TsTester


def parse_args(args):
"""Parse and return command-line arguments."""
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='Test different data storage solutions for time series of observations.',
exit_on_error=False)
parser.add_argument(
'-v', '--verbose', action='store_true', help='Enable logging to stdout.')
description="Test different data storage solutions for time series of observations.",
exit_on_error=False,
)
parser.add_argument("-v", "--verbose", action="store_true", help="Enable logging to stdout.")
parser.add_argument(
'-c', '--cfg_file', default='config.json', type=pathlib.Path, help='Config file.')
parser.add_argument(
'-s', '--random_seed', type=int, default=-1, help='Random seed.')
"-c", "--cfg_file", default="config.json", type=pathlib.Path, help="Config file."
)
parser.add_argument("-s", "--random_seed", type=int, default=-1, help="Random seed.")

pres = parser.parse_args(args)
return pres.verbose, pres.cfg_file, pres.random_seed


if __name__ == '__main__':

if __name__ == "__main__":
try:
verbose, cfg_file, random_seed = parse_args(sys.argv[1:])
if random_seed >= 0:
random.seed(random_seed)
config = json.load(open(cfg_file))
TsTester(verbose, config).execute()
except argparse.ArgumentError as e:
print('failed to parse command-line arguments: {}'.format(e), file=sys.stderr)
print("failed to parse command-line arguments: {}".format(e), file=sys.stderr)
sys.exit(1)
except SystemExit:
sys.exit(1) # don't print stack trace in this case (e.g. when --help option is given)
except:
sys.stderr.write('error: {}'.format(format_exc()))
except Exception:
sys.stderr.write("error: {}".format(format_exc()))
sys.exit(1)
94 changes: 48 additions & 46 deletions tstester/netcdf.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import netCDF4 as nc
import datetime as dt
import numpy as np

import common
import netCDF4 as nc
import numpy as np


class NetCDF:
Expand All @@ -13,52 +14,53 @@ def __init__(self, verbose):
def create_initial_file(self, path, ts):
"""Create the initial file for a time series."""

with nc.Dataset(path, 'w') as dset:

dset.setncatts({
'station_id': ts.station_id(),
'param_id': ts.param_id(),
'spatial_representation': 'point',
'geospatial_lat_min': ts.lat(),
'geospatial_lat_max': ts.lat(),
'geospatial_lon_min': ts.lon(),
'geospatial_lon_max': ts.lon(),
})

vlat = dset.createVariable('latitude', 'f')
vlat.standard_name = 'latitude'
vlat.long_name = 'station latitude'
vlat.units = 'degrees_north'
with nc.Dataset(path, "w") as dset:
dset.setncatts(
{
"station_id": ts.station_id(),
"param_id": ts.param_id(),
"spatial_representation": "point",
"geospatial_lat_min": ts.lat(),
"geospatial_lat_max": ts.lat(),
"geospatial_lon_min": ts.lon(),
"geospatial_lon_max": ts.lon(),
}
)

vlat = dset.createVariable("latitude", "f")
vlat.standard_name = "latitude"
vlat.long_name = "station latitude"
vlat.units = "degrees_north"
vlat[:] = ts.lat()

vlon = dset.createVariable('longitude', 'f')
vlon.standard_name = 'longitude'
vlon.long_name = 'station longitude'
vlon.units = 'degrees_east'
vlon = dset.createVariable("longitude", "f")
vlon.standard_name = "longitude"
vlon.long_name = "station longitude"
vlon.units = "degrees_east"
vlon[:] = ts.lon()

dset.createDimension('time', 0) # create time as an unlimited dimension
dset.createDimension("time", 0) # create time as an unlimited dimension

v = dset.createVariable('time', 'i4', ('time',))
v.standard_name = 'time'
v.long_name = 'Time of measurement'
v.calendar = 'standard'
ref_dt = dt.datetime.strptime('1970-01-01', '%Y-%m-%d').replace(tzinfo=dt.timezone.utc)
v = dset.createVariable("time", "i4", ("time",))
v.standard_name = "time"
v.long_name = "Time of measurement"
v.calendar = "standard"
ref_dt = dt.datetime.strptime("1970-01-01", "%Y-%m-%d").replace(tzinfo=dt.timezone.utc)
v.units = f"seconds since {ref_dt.strftime('%Y-%m-%d %H:%M:%S')}"
v.axis = 'T'
v.axis = "T"

v = dset.createVariable('value', 'f4', ['time'])
v = dset.createVariable("value", "f4", ["time"])
v.standard_name = ts.param_id() # for now
v.long_name = '{} (long name)'.format(ts.param_id()) # for now
v.coordinates = 'time latitude longitude'
v.coverage_content_type = 'physicalMeasurement'
v.long_name = "{} (long name)".format(ts.param_id()) # for now
v.coordinates = "time latitude longitude"
v.coverage_content_type = "physicalMeasurement"

def replace_times_and_values(self, path, times, values):
"""Replace contents of 'time' and 'value' variables in file."""

with nc.Dataset(path, 'a') as dset:
dset['time'][:] = times
dset['value'][:] = values
with nc.Dataset(path, "a") as dset:
dset["time"][:] = times
dset["value"][:] = values

def add_times_and_values(self, path, times, values, oldest_time=None):
"""Add new or replace/remove observations in file.
Expand All @@ -73,19 +75,19 @@ def add_times_and_values(self, path, times, values, oldest_time=None):
# times_add = np.array(times)
# values_add = np.array(values)

with nc.Dataset(path, 'a') as dset:

with nc.Dataset(path, "a") as dset:
# retrieve file variables
ftimes = dset['time'][:]
fvalues = dset['value'][:]
ftimes = dset["time"][:]
fvalues = dset["value"][:]

# merge
mtimes, mvalues = common.ts_merge(
ftimes.tolist(), fvalues.tolist(), times, values, oldest_time)
ftimes.tolist(), fvalues.tolist(), times, values, oldest_time
)

# replace file variables with merged arrays
dset['time'][:] = mtimes
dset['value'][:] = mvalues
dset["time"][:] = mtimes
dset["value"][:] = mvalues

def get_times_and_values(self, path, from_time, to_time):
"""Retrieve contents of 'time' and 'value' variables from file within [from_time, to_time>.
Expand All @@ -94,7 +96,7 @@ def get_times_and_values(self, path, from_time, to_time):
StorageBackend.set_obs())
"""

with nc.Dataset(path, 'r') as dset:
time_var = dset.variables['time']
with nc.Dataset(path, "r") as dset:
time_var = dset.variables["time"]
indices = np.where((time_var[:] >= from_time) & (time_var[:] < to_time))
return list(time_var[indices]), list(dset.variables['value'][indices])
return list(time_var[indices]), list(dset.variables["value"][indices])
Loading

0 comments on commit 4a47910

Please sign in to comment.