Skip to content

Commit

Permalink
Set up simple integration test that depends on the KNMI data load.
Browse files Browse the repository at this point in the history
  • Loading branch information
lukas-phaf committed Sep 11, 2023
1 parent 633d415 commit 16b0427
Show file tree
Hide file tree
Showing 7 changed files with 170 additions and 0 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ jobs:
- name: Test loader runs without errors
run: docker compose run --rm loader

- name: Integration test
run: docker compose run --rm integration

- name: Test client runs without errors
run: docker compose run --rm client

Expand Down
12 changes: 12 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,5 +66,17 @@ services:
store:
condition: service_healthy

integration:
profiles: ["integration"]
build:
context: . # TODO: Needed to get proto file. Find a better solution
dockerfile: integration-test/Dockerfile
environment:
- DSHOST=store
- DSPORT=50050
depends_on:
store:
condition: service_healthy

volumes:
ts-data:
33 changes: 33 additions & 0 deletions integration-test/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
FROM python:3.11-slim-bookworm

SHELL ["/bin/bash", "-eux", "-o", "pipefail", "-c"]

ENV PROJECT_DATASTORE_PATH="datastore"
ENV PROJECT_PYTHON_PATH="integration-test"
ENV DOCKER_PATH="/clients/python"

# hadolint ignore=DL3013
RUN apt-get update \
&& apt-get -y upgrade \
# Cleanup
&& rm -rf /usr/tmp \
&& apt-get autoremove -y \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

COPY "${PROJECT_DATASTORE_PATH}/protobuf/datastore.proto" "/protobuf/datastore.proto"
COPY "${PROJECT_PYTHON_PATH}/requirements.txt" "${DOCKER_PATH}/requirements.txt"

RUN pip install --no-cache-dir --upgrade pip \
&& pip install --no-cache-dir --upgrade -r "${DOCKER_PATH}/requirements.txt"

# Compiling the protobuf file
RUN python -m grpc_tools.protoc \
--proto_path="protobuf" "protobuf/datastore.proto" \
--python_out="${DOCKER_PATH}" \
--grpc_python_out="${DOCKER_PATH}"

COPY "${PROJECT_PYTHON_PATH}/test_knmi.py" "${DOCKER_PATH}/test_knmi.py"

WORKDIR "${DOCKER_PATH}"
CMD ["pytest"]
17 changes: 17 additions & 0 deletions integration-test/discover.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# This code was used to double-check the values tested in test_knmi.py
from pathlib import Path


import pandas as pd
import xarray as xr

file_path = Path(Path(__file__).parents[1] / "test-data" / "KNMI" / "20221231.nc")
with xr.open_dataset(file_path, engine="netcdf4", chunks=None) as ds: # chunks=None to disable dask
# print(ds)
print(ds.sel(station='06260').isel(time=0).lat.values)
print(ds.sel(station='06260').isel(time=0).lon.values)

print(ds.dims)

print(ds.sel(station='06260').rh.values)

7 changes: 7 additions & 0 deletions integration-test/requirements.in
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# Generate requirements.txt using:
# pip-compile --upgrade --no-emit-index-url
# Install using:
# pip-sync

grpcio-tools~=1.56
pytest~=7.4
23 changes: 23 additions & 0 deletions integration-test/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#
# This file is autogenerated by pip-compile with Python 3.11
# by the following command:
#
# pip-compile --no-emit-index-url
#
grpcio==1.58.0
# via grpcio-tools
grpcio-tools==1.58.0
# via -r requirements.in
iniconfig==2.0.0
# via pytest
packaging==23.1
# via pytest
pluggy==1.3.0
# via pytest
protobuf==4.24.3
# via grpcio-tools
pytest==7.4.2
# via -r requirements.in

# The following packages are considered to be unsafe in a requirements file:
# setuptools
75 changes: 75 additions & 0 deletions integration-test/test_knmi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
# Note that this assumes that the KNMI test data is loader (using loader container)
import os
from datetime import datetime


import pytest

from google.protobuf.timestamp_pb2 import Timestamp

import datastore_pb2 as dstore
import datastore_pb2_grpc as dstore_grpc
import grpc


@pytest.fixture
def grpc_stub():
with grpc.insecure_channel(f"{os.getenv('DSHOST', 'localhost')}:{os.getenv('DSPORT', '50050')}") as channel:
yield dstore_grpc.DatastoreStub(channel)


def test_find_series_single_station_single_parameter(grpc_stub):
request = dstore.FindTSRequest(
station_ids=["06260"],
param_ids=["rh"]
)
response = grpc_stub.FindTimeSeries(request)

assert len(response.tseries) == 1
assert response.tseries[0].metadata.pos.lat == 52.098821802977
assert response.tseries[0].metadata.pos.lon == 5.1797058644882


def test_find_series_all_stations_single_parameter(grpc_stub):
request = dstore.FindTSRequest(
param_ids=["rh"]
)
response = grpc_stub.FindTimeSeries(request)

assert len(response.tseries) == 55


def test_find_series_single_station_all_parameters(grpc_stub):
request = dstore.FindTSRequest(
station_ids=["06260"],
)
response = grpc_stub.FindTimeSeries(request)

assert len(response.tseries) == 44


def test_get_values_single_station_single_paramters(grpc_stub):
ts_request = dstore.FindTSRequest(
station_ids=["06260"],
param_ids=["rh"]
)
ts_response = grpc_stub.FindTimeSeries(ts_request)
assert len(ts_response.tseries) == 1
ts_id = ts_response.tseries[0].id

from_time = Timestamp()
from_time.FromDatetime(datetime(2022, 12, 31))
to_time = Timestamp()
to_time.FromDatetime(datetime(2023, 11, 1))
request = dstore.GetObsRequest(
tsids=[ts_id],
fromtime=from_time,
totime=to_time,
)
response = grpc_stub.GetObservations(request)

assert len(response.tsobs) == 1
assert response.tsobs[0].tsid == ts_id
assert len(response.tsobs[0].obs) == 144
assert response.tsobs[0].obs[0].value == 95.0
assert response.tsobs[0].obs[-1].value == 59.0

0 comments on commit 16b0427

Please sign in to comment.