Skip to content

Commit

Permalink
Reorderd for multiple endspoints, and add async
Browse files Browse the repository at this point in the history
  • Loading branch information
Teddy-1000 committed Jan 19, 2024
1 parent 9b6c250 commit ec8b581
Show file tree
Hide file tree
Showing 12 changed files with 185 additions and 315 deletions.
Empty file added api/__init__.py
Empty file.
32 changes: 32 additions & 0 deletions api/dependencies.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
from typing import Tuple
from datetime import datetime, timedelta
from pydantic import AwareDatetime
from pydantic import TypeAdapter
from google.protobuf.timestamp_pb2 import Timestamp


def get_datetime_range(datetime_string: str | None) -> Tuple[Timestamp, Timestamp] | None:
if not datetime_string:
return None

start_datetime, end_datetime = Timestamp(), Timestamp()
aware_datetime_type_adapter = TypeAdapter(AwareDatetime)
datetimes = tuple(value.strip() for value in datetime_string.split("/"))
if len(datetimes) == 1:
start_datetime.FromDatetime(aware_datetime_type_adapter.validate_python(datetimes[0]))
end_datetime.FromDatetime(
aware_datetime_type_adapter.validate_python(datetimes[0]) + timedelta(seconds=1)
) # HACK: Add one second so we get some data, as the store returns [start, end)
else:
if datetimes[0] != "..":
start_datetime.FromDatetime(aware_datetime_type_adapter.validate_python(datetimes[0]))
else:
start_datetime.FromDatetime(datetime.min)
if datetimes[1] != "..":
# HACK add one second so that the end_datetime is included in the interval.
end_datetime.FromDatetime(aware_datetime_type_adapter.validate_python(
datetimes[1]) + timedelta(seconds=1))
else:
end_datetime.FromDatetime(datetime.max)

return start_datetime, end_datetime
27 changes: 0 additions & 27 deletions api/formatter/__init__.py

This file was deleted.

16 changes: 0 additions & 16 deletions api/formatter/base_formatter.py

This file was deleted.

96 changes: 0 additions & 96 deletions api/formatter/covjson.py

This file was deleted.

20 changes: 7 additions & 13 deletions api/formatters/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
from pydantic import BaseModel, validator
import importlib
import pkgutil
import logging

import formatters

logger = logging.getLogger(__name__)


def get_EDR_formatters() -> dict:
"""
Expand All @@ -15,21 +18,12 @@ def get_EDR_formatters() -> dict:

formatter_plugins = [importlib.import_module("formatters."+i.name) for i in pkgutil.iter_modules(
formatters.__path__) if i.name != "base_formatter"]
print(formatter_plugins)
logger.info(f"Loaded plugins : {formatter_plugins}")
for formatter_module in formatter_plugins:
# Make instance of formatter and save
available_formatters[formatter_module.__name__.split(".")[-1]] = getattr(
formatter_module, formatter_module.formatter_name)

# Should also setup dict for alias discovery
class edr_formatters_model(BaseModel):
f: str
formatter_module, formatter_module.formatter_name)()

@validator("f")
def f_must_be_available_formatter(cls, f):
named_formatters = list(available_formatters.keys())
if f not in named_formatters:
raise ValueError(f"f must be a provided formatter, one of {named_formatters}")
return f
# Should also setup dict for alias discover

return available_formatters, edr_formatters_model
return available_formatters
6 changes: 4 additions & 2 deletions api/formatters/covjson.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,16 +82,18 @@ def convert(self, response):
)

coverages.append(Coverage(domain=domain, parameters=parameters, ranges=ranges))
if not coverages:
return {}
return CoverageCollection(coverages=coverages)

def _collect_data(ts_mdata, obs_mdata):
def _collect_data(self, ts_mdata, obs_mdata):
lat = obs_mdata[0].geo_point.lat # HACK: For now assume they all have the same position
lon = obs_mdata[0].geo_point.lon
tuples = (
(o.obstime_instant.ToDatetime(tzinfo=timezone.utc), float(o.value)) for o in obs_mdata
) # HACK: str -> float
(times, values) = zip(*tuples)
param_id = ts_mdata.instrument
param_id = ts_mdata.standard_name
unit = ts_mdata.unit

return (lat, lon, times), param_id, unit, values
6 changes: 4 additions & 2 deletions api/grpc_getter.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,11 @@
# These functions should be the only components that are
# dependent on external services.


async def getObsRequest(get_obs_request):
channel = grpc.aio.insecure_channel(
f"{os.getenv('DSHOST', 'localhost')}:{os.getenv('DSPORT', '50050')}")
channel = grpc.aio.secure_channel(
f"{os.getenv('DSHOST', 'localhost')}:{os.getenv('DSPORT', '50050')}",
grpc.ssl_channel_credentials())
grpc_stub = dstore_grpc.DatastoreStub(channel)
response = await grpc_stub.GetObservations(get_obs_request)

Expand Down
Loading

0 comments on commit ec8b581

Please sign in to comment.