diff --git a/.github/workflows/python-test.yml b/.github/workflows/python-test.yml
index c86abbed..db3a9181 100644
--- a/.github/workflows/python-test.yml
+++ b/.github/workflows/python-test.yml
@@ -8,7 +8,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
- python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
+ python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@main
diff --git a/README.md b/README.md
index e834ed67..bd88b2b8 100644
--- a/README.md
+++ b/README.md
@@ -4,7 +4,7 @@
# PAINT
-[![](https://img.shields.io/badge/Python-3.8+-blue.svg)](https://www.python.org/downloads/)
+[![](https://img.shields.io/badge/Python-3.9+-blue.svg)](https://www.python.org/downloads/)
[![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
[![](https://img.shields.io/badge/Contact-artist%40lists.kit.edu-orange?label=Contact)](artist@lists.kit.edu)
![](./coverage.svg)
@@ -14,7 +14,7 @@
PAINT is a FAIR database for Concentrating Solar Power plants (CSP).
## Installation
-We heavily recommend installing the `PAINT` package in a dedicated `Python3.8+` virtual environment. You can
+We heavily recommend installing the `PAINT` package in a dedicated `Python3.9+` virtual environment. You can
install ``PAINT`` directly from the GitHub repository via:
```bash
pip install git+https://github.com/ARTIST-Association/PAINT
diff --git a/coverage.svg b/coverage.svg
index 636889bb..6bfc8faf 100644
--- a/coverage.svg
+++ b/coverage.svg
@@ -9,13 +9,13 @@
-
+
coverage
coverage
- 44%
- 44%
+ 99%
+ 99%
diff --git a/paint/data/binary_extractor.py b/paint/data/binary_extractor.py
new file mode 100644
index 00000000..88d1b449
--- /dev/null
+++ b/paint/data/binary_extractor.py
@@ -0,0 +1,236 @@
+import json
+import struct
+from pathlib import Path
+from typing import Union
+
+import h5py
+import torch
+
+import paint.util.paint_mappings as mappings
+from paint.util.utils import to_utc_single
+
+
+class BinaryExtractor:
+ """
+ Implement an extractor that extracts data from a binary file and saves it to h5 and json.
+
+ This extractor considers data form a binary file that contains deflectometry data and heliostat properties. The data
+ is extracted and the deflectometry data saved in a h5 format and the heliostat properties as a json.
+
+ Attributes
+ ----------
+ input_path : Path
+ The file path to the binary data file that will be converted.
+ output_path : Path
+ The file path to save the converted h5 file.
+ file_name : str
+ The file name of the converted h5 file.
+ raw_data : bool
+ Whether the raw data or filled data is extracted.
+ heliostat_id : str
+ The heliostat ID of the heliostat considered in the binary file.
+ json_handle : str
+ The file path to save the json containing the heliostat properties data.
+ deflectometry_created_at : str
+ The time stamp for when the deflectometry data was created. Required for properties later.
+ surface_header_name : str
+ The name for the surface header in the binary file.
+ facet_header_name : str
+ The name for the facet header in the binary file.
+ points_on_facet_struct_name : str
+ The name of the point on facet structure in the binary file.
+
+ Methods
+ -------
+ nwu_to_enu()
+ Cast from an NWU to an ENU coordinate system.
+ convert_to_h5_and_extract_properties()
+ Convert binary data to h5 and extract heliostat properties not to be saved in the deflectometry file.
+ """
+
+ def __init__(
+ self,
+ input_path: Union[str, Path],
+ output_path: Union[str, Path],
+ surface_header_name: str,
+ facet_header_name: str,
+ points_on_facet_struct_name: str,
+ ) -> None:
+ """
+ Initialize the extractor.
+
+ Parameters
+ ----------
+ input_path : Union[str, Path]
+ The file path to the binary data file that will be converted.
+ output_path : Union[str, Path]
+ The file path to save the converted h5 deflectometry file.
+ surface_header_name : str
+ The name for the surface header in the binary file.
+ facet_header_name : str
+ The name for the facet header in the binary file.
+ points_on_facet_struct_name : str
+ The name of the point on facet structure in the binary file.
+ """
+ self.input_path = Path(input_path)
+ self.output_path = Path(output_path)
+ name_string = self.input_path.name.split("_")
+ if len(name_string) == 6:
+ file_name = (
+ name_string[1]
+ + "-"
+ + name_string[4]
+ + "-"
+ + str(to_utc_single(name_string[-1].split(".")[0]))
+ )
+ self.raw_data = False
+ else:
+ file_name = (
+ name_string[1] + "-" + str(to_utc_single(name_string[-1].split(".")[0]))
+ )
+ self.raw_data = True
+ self.heliostat_id = name_string[1]
+ self.file_name = file_name + mappings.DEFLECTOMETRY_SUFFIX
+ self.json_handle = name_string[1] + mappings.FACET_PROPERTIES_SUFFIX
+ self.deflectometry_created_at = to_utc_single(name_string[-1].split(".")[0])
+ self.surface_header_name = surface_header_name
+ self.facet_header_name = facet_header_name
+ self.points_on_facet_struct_name = points_on_facet_struct_name
+
+ @staticmethod
+ def nwu_to_enu(nwu_tensor: torch.Tensor) -> torch.Tensor:
+ """
+ Cast the coordinate system from NWU to ENU.
+
+ Parameters
+ ----------
+ nwu_tensor : torch.Tensor
+ The tensor in the NWU coordinate system.
+
+ Returns
+ -------
+ torch.Tensor
+ The converted tensor in the ENU coordinate system.
+ """
+ return torch.tensor(
+ [-nwu_tensor[1], nwu_tensor[0], nwu_tensor[2]], dtype=torch.float
+ )
+
+ def convert_to_h5_and_extract_properties(
+ self,
+ ) -> None:
+ """
+ Extract data from a binary file and save the deflectometry measurements and heliostat properties.
+
+ The binary files we consider, contain both deflectometry measurements and certain heliostat properties, such as
+ the number of facets, the facet translation vectors, and the facet canting vectors. Therefore, the deflectometry
+ measurements are extracted and saved as a h5 file, whilst the heliostat properties are extracted and saved in a
+ json file.
+ """
+ # Create structures for reading binary file correctly.
+ surface_header_struct = struct.Struct(self.surface_header_name)
+ facet_header_struct = struct.Struct(self.facet_header_name)
+ points_on_facet_struct = struct.Struct(self.points_on_facet_struct_name)
+
+ with open(self.input_path, "rb") as file:
+ surface_header_data = surface_header_struct.unpack_from(
+ file.read(surface_header_struct.size)
+ )
+
+ # Calculate the number of facets.
+ n_xy = surface_header_data[5:7]
+ number_of_facets = int(n_xy[0] * n_xy[1])
+
+ # Create empty tensors for storing data.
+ facet_translation_vectors = torch.empty(number_of_facets, 3)
+ canting_e = torch.empty(number_of_facets, 3)
+ canting_n = torch.empty(number_of_facets, 3)
+ surface_points_with_facets = torch.empty(0)
+ surface_normals_with_facets = torch.empty(0)
+ for f in range(number_of_facets):
+ facet_header_data = facet_header_struct.unpack_from(
+ file.read(facet_header_struct.size)
+ )
+
+ facet_translation_vectors[f] = torch.tensor(
+ facet_header_data[1:4], dtype=torch.float
+ )
+ canting_n[f] = self.nwu_to_enu(
+ torch.tensor(
+ facet_header_data[4:7],
+ dtype=torch.float,
+ )
+ )
+ canting_e[f] = self.nwu_to_enu(
+ torch.tensor(
+ facet_header_data[7:10],
+ dtype=torch.float,
+ )
+ )
+ number_of_points = facet_header_data[10]
+ if f == 0:
+ surface_points_with_facets = torch.empty(
+ number_of_facets, number_of_points, 3
+ )
+ surface_normals_with_facets = torch.empty(
+ number_of_facets, number_of_points, 3
+ )
+
+ points_data = points_on_facet_struct.iter_unpack(
+ file.read(points_on_facet_struct.size * number_of_points)
+ )
+ for i, point_data in enumerate(points_data):
+ surface_points_with_facets[f, i, :] = torch.tensor(
+ point_data[:3], dtype=torch.float
+ )
+ surface_normals_with_facets[f, i, :] = torch.tensor(
+ point_data[3:6], dtype=torch.float
+ )
+
+ # to maintain consistency, we cast the west direction to east direction
+ canting_e[:, 0] = -canting_e[:, 0]
+
+ # extract deflectometry data and save
+ saved_deflectometry_path = (
+ Path(self.output_path)
+ / self.heliostat_id
+ / mappings.SAVE_DEFLECTOMETRY
+ / self.file_name
+ )
+ saved_deflectometry_path.parent.mkdir(parents=True, exist_ok=True)
+ with h5py.File(saved_deflectometry_path, "w") as file:
+ for i in range(number_of_facets):
+ facet = file.create_group(name=f"{mappings.FACET_KEY}{i+1}")
+ facet.create_dataset(
+ name=f"{mappings.SURFACE_NORMAL_KEY}",
+ data=surface_normals_with_facets[i, :, :],
+ )
+ facet.create_dataset(
+ name=f"{mappings.SURFACE_POINT_KEY}",
+ data=surface_points_with_facets[i, :, :],
+ )
+
+ # extract facet properties data and save
+ saved_facet_path = (
+ Path(self.output_path)
+ / self.heliostat_id
+ / mappings.SAVE_PROPERTIES
+ / self.json_handle
+ )
+ saved_facet_path.parent.mkdir(parents=True, exist_ok=True)
+ if self.raw_data:
+ with open(saved_facet_path, "w") as handle:
+ properties = {
+ mappings.NUM_FACETS: number_of_facets,
+ mappings.FACETS_LIST: [
+ {
+ mappings.TRANSLATION_VECTOR: facet_translation_vectors[
+ i, :
+ ].tolist(),
+ mappings.CANTING_E: canting_e[i, :].tolist(),
+ mappings.CANTING_N: canting_n[i, :].tolist(),
+ }
+ for i in range(number_of_facets)
+ ],
+ }
+ json.dump(properties, handle)
diff --git a/paint/data/calibration_stac.py b/paint/data/calibration_stac.py
new file mode 100755
index 00000000..ff3d5907
--- /dev/null
+++ b/paint/data/calibration_stac.py
@@ -0,0 +1,198 @@
+from typing import Any, Dict
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+
+
+def make_calibration_collection(
+ heliostat_id: str, data: pd.DataFrame
+) -> Dict[str, Any]:
+ """
+ Generate the STAC collection.
+
+ Parameters
+ ----------
+ heliostat_id: str
+ The heliostat id of the heliostat being considered.
+ data: pd.DataFrame
+ The dataframe containing all image data.
+
+ Returns
+ -------
+ dict[str, Any]
+ The STAC collection as dictionary.
+ """
+ return {
+ "stac_version": mappings.STAC_VERSION,
+ "stac_extensions": [mappings.ITEM_ASSETS_SCHEMA],
+ "id": mappings.CALIBRATION_COLLECTION_ID % heliostat_id,
+ "type": mappings.COLLECTION,
+ "title": f"Calibration images from heliostat {heliostat_id}",
+ "description": f"All calibration images from the heliostat {heliostat_id}",
+ "keywords": ["csp", "calibration", "tracking"],
+ "license": mappings.LICENSE,
+ "providers": [mappings.DLR, mappings.KIT],
+ "extent": {
+ "spatial": {
+ "bbox": [
+ mappings.POWER_PLANT_LAT,
+ mappings.POWER_PLANT_LON,
+ mappings.POWER_PLANT_LAT,
+ mappings.POWER_PLANT_LON,
+ ]
+ },
+ "temporal": {
+ "interval": [
+ data[mappings.CREATED_AT].min().strftime(mappings.TIME_FORMAT),
+ data[mappings.CREATED_AT].max().strftime(mappings.TIME_FORMAT),
+ ]
+ },
+ },
+ "summaries": {
+ "datetime": {
+ "minimum": data[mappings.CREATED_AT]
+ .min()
+ .strftime(mappings.TIME_FORMAT),
+ "maximum": data[mappings.CREATED_AT]
+ .max()
+ .strftime(mappings.TIME_FORMAT),
+ },
+ "view:sun_azimuth": {
+ "minimum": data[mappings.AZIMUTH].min(),
+ "maximum": data[mappings.AZIMUTH].max(),
+ },
+ "view:sun_elevation": {
+ "minimum": data[mappings.SUN_ELEVATION].min(),
+ "maximum": data[mappings.SUN_ELEVATION].max(),
+ },
+ "instruments": list(data[mappings.SYSTEM].unique()),
+ },
+ "links": [
+ mappings.LICENSE_LINK,
+ {
+ "rel": "self",
+ "href": mappings.CALIBRATION_COLLECTION_URL % heliostat_id,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "root",
+ "href": mappings.CATALOGUE_URL,
+ "type": mappings.MIME_GEOJSON,
+ "title": f"Reference to the entire catalogue for {mappings.POWER_PLANT_GPPD_ID}",
+ },
+ {
+ "rel": "collection",
+ "href": mappings.CALIBRATION_COLLECTION_URL % heliostat_id,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC collection file",
+ },
+ ]
+ + [
+ {
+ "rel": "item",
+ "href": data_row[mappings.URL_KEY],
+ "type": mappings.MIME_GEOJSON,
+ "title": f"STAC item of {data_row[mappings.TITLE_KEY]}",
+ }
+ for _, data_row in data.iterrows()
+ ],
+ }
+
+
+def make_calibration_item(image: int, heliostat_data: pd.Series) -> Dict[str, Any]:
+ """
+ Generate a STAC item for an image.
+
+ Parameters
+ ----------
+ image: int
+ The image id.
+ heliostat_data: pd.Series.
+ The data belonging to the heliostat.
+
+ Returns
+ -------
+ dict[str, Any]
+ The STAC item data as dictionary.
+ """
+ return {
+ "stac_version": mappings.STAC_VERSION,
+ "stac_extensions": [
+ "view",
+ ],
+ "id": f"{image}",
+ "type": "Feature",
+ "title": f"Calibration data from heliostat {heliostat_data[mappings.HELIOSTAT_ID]} for image {image}",
+ "description": f"Image of focused sunlight on the calibration target from heliostat "
+ f"{heliostat_data[mappings.HELIOSTAT_ID]} for image {image} with associated motor positions",
+ "collection": mappings.CALIBRATION_COLLECTION_ID
+ % heliostat_data[mappings.HELIOSTAT_ID],
+ "geometry": {
+ "type": "Point",
+ "coordinates": [mappings.POWER_PLANT_LON, mappings.POWER_PLANT_LAT],
+ },
+ "bbox": [
+ mappings.POWER_PLANT_LON,
+ mappings.POWER_PLANT_LAT,
+ mappings.POWER_PLANT_LON,
+ mappings.POWER_PLANT_LAT,
+ ],
+ "properties": {
+ "datetime": heliostat_data[mappings.CREATED_AT].strftime(
+ mappings.TIME_FORMAT
+ ),
+ "created": heliostat_data[mappings.CREATED_AT].strftime(
+ mappings.TIME_FORMAT
+ ),
+ "updated": heliostat_data[mappings.UPDATED_AT].strftime(
+ mappings.TIME_FORMAT
+ ),
+ "instruments": [heliostat_data[mappings.SYSTEM]],
+ },
+ "view:sun_azimuth": heliostat_data[mappings.AZIMUTH],
+ "view:sun_elevation": heliostat_data[mappings.SUN_ELEVATION],
+ "links": [
+ {
+ "rel": "self",
+ "href": f"./{image}-stac.json",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC file",
+ },
+ {
+ "rel": "root",
+ "href": f"./{mappings.CATALOGUE_URL}",
+ "type": mappings.MIME_GEOJSON,
+ "title": f"Reference to the entire catalogue for {mappings.POWER_PLANT_GPPD_ID}",
+ },
+ {
+ "rel": "parent",
+ "href": mappings.CALIBRATION_COLLECTION_URL
+ % heliostat_data[mappings.HELIOSTAT_ID],
+ "type": "application/geo+json",
+ "title": "Reference to the collection STAC file",
+ },
+ {
+ "rel": "collection",
+ "href": mappings.CALIBRATION_COLLECTION_URL
+ % heliostat_data[mappings.HELIOSTAT_ID],
+ "type": "application/geo+json",
+ "title": "Reference to the collection STAC file",
+ },
+ ],
+ "assets": {
+ mappings.CALIBRATION_TARGET_KEY: {
+ "href": f"./{image}.png",
+ "roles": ["data"],
+ "type": mappings.MIME_PNG,
+ "title": f"Calibration image with id {image}",
+ },
+ mappings.CALIBRATION_MOTOR_POS_KEY: {
+ "href": f"./{mappings.MOTOR_POS_NAME % (heliostat_data[mappings.HELIOSTAT_ID], image)}.json",
+ "roles": ["metadata"],
+ "type": mappings.MIME_PNG,
+ "title": f"Motor positions for the calibration image id {image}",
+ },
+ },
+ }
diff --git a/paint/data/catalog_stac.py b/paint/data/catalog_stac.py
new file mode 100644
index 00000000..fb768bfa
--- /dev/null
+++ b/paint/data/catalog_stac.py
@@ -0,0 +1,59 @@
+from typing import Any, Dict
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+
+
+def make_catalog(data: pd.DataFrame) -> Dict[str, Any]:
+ """
+ Generate the catalog STAC.
+
+ Parameters
+ ----------
+ data : pd.DataFrame
+ Data containing a list of heliostats.
+
+ Returns
+ -------
+ dict[str, Any]
+ The STAC catalog as dictionary
+ """
+ return {
+ "stac_version": mappings.STAC_VERSION,
+ "stac_extensions": [],
+ "id": mappings.CATALOG_ID,
+ "type": mappings.CATALOG,
+ "title": f"Operational data of concentrating solar power plant {mappings.POWER_PLANT_GPPD_ID}",
+ "description": "Calibration images, deflectometry measurements, heliostat properties, and weather data",
+ "links": [
+ {
+ "rel": "self",
+ "href": mappings.CATALOGUE_URL,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "root",
+ "href": mappings.CATALOGUE_URL,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "child",
+ "href": mappings.WEATHER_COLLECTION_URL,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to the STAC collection containing the weather data",
+ },
+ ]
+ + [
+ {
+ "rel": "child",
+ "href": mappings.HELIOSTAT_CATALOG_URL % helio_id,
+ "type": mappings.MIME_GEOJSON,
+ "title": f"Reference to the STAC catalog containing data for heliostat "
+ f"{helio_id}",
+ }
+ for helio_id, _ in data.iterrows()
+ ],
+ }
diff --git a/paint/data/deflectometry_stac.py b/paint/data/deflectometry_stac.py
new file mode 100644
index 00000000..c693d820
--- /dev/null
+++ b/paint/data/deflectometry_stac.py
@@ -0,0 +1,205 @@
+from typing import Any, Dict, Tuple
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+from paint.util.utils import add_offset_to_lat_lon
+
+
+def make_deflectometry_collection(
+ heliostat_id: str, data: pd.DataFrame
+) -> Dict[str, Any]:
+ """
+ Generate a deflectometry STAC collection.
+
+ Parameters
+ ----------
+ heliostat_id: str
+ The heliostat ID of the heliostat containing the collection.
+ data: pd.DataFrame
+ The dataframe containing all deflectometry metadata.
+
+ Returns
+ -------
+ dict[str, Any]
+ The STAC collection as dictionary.
+ """
+ return {
+ "stac_version": mappings.STAC_VERSION,
+ "stac_extensions": [],
+ "id": mappings.DEFLECTOMETRY_COLLECTION_ID % heliostat_id,
+ "type": mappings.COLLECTION,
+ "title": f"Deflectometry data for heliostat {heliostat_id}",
+ "description": f"All deflectometry data, including raw measurements, filled measurements and results summary "
+ f"for heliostat {heliostat_id}",
+ "keywords": ["csp", "deflectometry"],
+ "license": mappings.LICENSE,
+ "providers": [mappings.DLR, mappings.KIT],
+ "extent": {
+ "spatial": {
+ "bbox": [
+ [
+ data[mappings.LATITUDE_KEY].min(),
+ data[mappings.LONGITUDE_KEY].min(),
+ data[mappings.ELEVATION].min(),
+ data[mappings.LATITUDE_KEY].max(),
+ data[mappings.LONGITUDE_KEY].max(),
+ data[mappings.ELEVATION].max(),
+ ]
+ ]
+ },
+ "temporal": {
+ "interval": [
+ data[mappings.CREATED_AT].min(),
+ data[mappings.CREATED_AT].max(),
+ ]
+ },
+ },
+ "summaries": {
+ "datetime": {
+ "minimum": data[mappings.CREATED_AT].min(),
+ "maximum": data[mappings.CREATED_AT].max(),
+ },
+ "instruments": mappings.DEFLECTOMETRY_INSTRUMENTS,
+ },
+ "links": [
+ mappings.LICENSE_LINK,
+ {
+ "rel": "self",
+ "href": mappings.DEFLECTOMETRY_COLLECTION_URL % heliostat_id,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "root",
+ "href": mappings.CATALOGUE_URL,
+ "type": mappings.MIME_GEOJSON,
+ "title": f"Reference to the entire catalogue for {mappings.POWER_PLANT_GPPD_ID}",
+ },
+ {
+ "rel": "collection",
+ "href": mappings.DEFLECTOMETRY_COLLECTION_URL % heliostat_id,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC collection file",
+ },
+ ]
+ + [
+ {
+ "rel": "item",
+ "href": data_row[mappings.URL_KEY],
+ "type": mappings.MIME_GEOJSON,
+ "title": f"STAC item of {data_row[mappings.TITLE_KEY]}",
+ }
+ for _, data_row in data.iterrows()
+ ],
+ }
+
+
+def make_deflectometry_item(
+ heliostat_key: str,
+ heliostat_data: pd.Series,
+) -> Tuple[Tuple[float, float], Dict[str, Any]]:
+ """
+ Generate a STAC item for a deflectometry measurement.
+
+ Parameters
+ ----------
+ heliostat_key: str
+ The ID of the heliostat which was measured.
+ heliostat_data: pd.Series.
+ The metadata for the heliostat.
+
+ Returns
+ -------
+ Tuple[float, float]
+ The latitude and longitude coordinates of the heliostat that being measured.
+ dict[str, Any]
+ The STAC item data as dictionary.
+ """
+ resource = (
+ heliostat_key + "-" + heliostat_data[mappings.CREATED_AT] + "-deflectometry"
+ )
+ lat_lon = add_offset_to_lat_lon(
+ east_offset_m=heliostat_data[mappings.EAST_KEY],
+ north_offset_m=heliostat_data[mappings.NORTH_KEY],
+ )
+ return lat_lon, {
+ "stac_version": mappings.STAC_VERSION,
+ "stac_extensions": [],
+ "id": f"{resource}",
+ "type": "Feature",
+ "title": f"Deflectometry measurement of {heliostat_key}",
+ "description": f"Measured raw and filled deflectometry data containing point clouds and surface normals for "
+ f"heliosat {heliostat_key} and the deflectometry measurement results summary",
+ "collection": mappings.DEFLECTOMETRY_COLLECTION_ID % heliostat_key,
+ "geometry": {
+ "type": "Point",
+ "coordinates": [
+ lat_lon[0],
+ lat_lon[1],
+ heliostat_data[mappings.ALTITUDE_KEY],
+ ],
+ },
+ "bbox": [
+ lat_lon[0] - mappings.BBOX_LAT_LON_DEVIATION,
+ lat_lon[1] - mappings.BBOX_LAT_LON_DEVIATION,
+ heliostat_data[mappings.ALTITUDE_KEY] - mappings.BBOX_ALTITUDE_DEVIATION,
+ lat_lon[0] + mappings.BBOX_LAT_LON_DEVIATION,
+ lat_lon[1] + mappings.BBOX_LAT_LON_DEVIATION,
+ heliostat_data[mappings.ALTITUDE_KEY] + mappings.BBOX_ALTITUDE_DEVIATION,
+ ],
+ "properties": {
+ "datetime": heliostat_data[mappings.CREATED_AT],
+ "created": heliostat_data[mappings.CREATED_AT],
+ "instruments": f"{mappings.DEFLECTOMETRY_INSTRUMENTS}",
+ },
+ "links": [
+ {
+ "rel": "self",
+ "href": f"./{resource}-stac.json",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC file",
+ },
+ {
+ "rel": "root",
+ "href": f"./{mappings.CATALOGUE_URL}",
+ "type": mappings.MIME_GEOJSON,
+ "title": f"Reference to the entire catalogue for {mappings.POWER_PLANT_GPPD_ID}",
+ },
+ {
+ "rel": "parent",
+ "href": mappings.DEFLECTOMETRY_COLLECTION_URL % heliostat_key,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to the collection STAC file",
+ },
+ {
+ "rel": "collection",
+ "href": mappings.DEFLECTOMETRY_COLLECTION_URL % heliostat_key,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to the collection STAC file",
+ },
+ ],
+ "assets": {
+ mappings.DEFLECTOMETRY_RAW_KEY: {
+ "href": f"./{heliostat_key}-{heliostat_data[mappings.CREATED_AT]}-deflectometry.h5",
+ "roles": ["data"],
+ "type": mappings.MIME_HDF5,
+ "title": f"Raw deflectometry measurement of {heliostat_key} at "
+ f"{heliostat_data[mappings.CREATED_AT]}",
+ },
+ mappings.DEFLECTOMETRY_FILLED_KEY: {
+ "href": f"./{heliostat_key}-filled-{heliostat_data[mappings.CREATED_AT]}-deflectometry.h5",
+ "roles": ["data"],
+ "type": mappings.MIME_HDF5,
+ "title": f"Filled deflectometry measurement of {heliostat_key} at "
+ f"{heliostat_data[mappings.CREATED_AT]}",
+ },
+ mappings.DEFLECTOMETRY_RESULTS_KEY: {
+ "href": f"./{heliostat_key}-{heliostat_data[mappings.CREATED_AT]}-deflectometry-result.pdf",
+ "roles": ["metadata"],
+ "type": mappings.MIME_PDF,
+ "title": f"Summary of deflectometry measurement of {heliostat_key} at "
+ f"{heliostat_data[mappings.CREATED_AT]}",
+ },
+ },
+ }
diff --git a/paint/data/dwd_stac_item.py b/paint/data/dwd_stac_item.py
new file mode 100644
index 00000000..55a614c5
--- /dev/null
+++ b/paint/data/dwd_stac_item.py
@@ -0,0 +1,88 @@
+from typing import Any, Dict
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+
+
+def make_dwd_item(
+ data: pd.Series,
+) -> Dict[str, Any]:
+ """
+ Generate a STAC item for the DWD weather data.
+
+ Parameters
+ ----------
+ data : pd.Series
+ The metadata for the DWD weather data file.
+
+ Returns
+ -------
+ dict[str, Any]
+ The STAC item data as dictionary.
+ """
+ return {
+ "stac_version": mappings.STAC_VERSION,
+ "stac_extensions": [],
+ "id": "dwd-weather",
+ "type": "Feature",
+ "title": "Weather data from the DWD",
+ "description": f"Weather data from the DWD station ID {data[mappings.DWD_STATION_ID]}, i.e. "
+ f"{data[mappings.DWD_STATION_NAME]}.",
+ "collection": mappings.WEATHER_COLLECTION_ID,
+ "geometry": {
+ "type": "Point",
+ "coordinates": [
+ data[mappings.LATITUDE_KEY],
+ data[mappings.LONGITUDE_KEY],
+ data[mappings.ELEVATION],
+ ],
+ },
+ "bbox": [
+ data[mappings.LATITUDE_KEY],
+ data[mappings.LONGITUDE_KEY],
+ data[mappings.ELEVATION],
+ data[mappings.LATITUDE_KEY],
+ data[mappings.LONGITUDE_KEY],
+ data[mappings.ELEVATION],
+ ],
+ "properties": {
+ "datetime": "null",
+ },
+ "start_datetime": data[mappings.DWD_START],
+ "end_datetime": data[mappings.DWD_END],
+ "links": [
+ {
+ "rel": "self",
+ "href": f"./{mappings.DWD_STAC_NAME}",
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC file",
+ },
+ {
+ "rel": "root",
+ "href": f"./{mappings.CATALOGUE_URL}",
+ "type": mappings.MIME_GEOJSON,
+ "title": f"Reference to the entire catalogue for {mappings.POWER_PLANT_GPPD_ID}",
+ },
+ {
+ "rel": "parent",
+ "href": mappings.WEATHER_COLLECTION_URL,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to the collection STAC file",
+ },
+ {
+ "rel": "collection",
+ "href": mappings.WEATHER_COLLECTION_FILE,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to the collection STAC file",
+ },
+ ],
+ "assets": {
+ mappings.WEATHER_DATA_KEY: {
+ "href": "./dwd-weather.h5",
+ "roles": ["data"],
+ "type": mappings.MIME_HDF5,
+ "title": "Weather data from the DWD",
+ }
+ },
+ }
diff --git a/paint/data/dwd_weather.py b/paint/data/dwd_weather.py
index bb512258..cf2342d6 100755
--- a/paint/data/dwd_weather.py
+++ b/paint/data/dwd_weather.py
@@ -1,6 +1,5 @@
#!/usr/bin/env python
-import argparse
import pathlib
from typing import List, Tuple
@@ -9,7 +8,6 @@
from wetterdienst import Settings
from wetterdienst.provider.dwd.observation import DwdObservationRequest
-from paint import PAINT_ROOT
from paint.data.dwd_mappings import dwd_parameter_mapping
@@ -29,7 +27,7 @@ def __init__(
start_date: str,
end_date: str,
output_path: str,
- file_name: str = "dwd_weather.h5",
+ file_name: str = "dwd-weather.h5",
ts_shape: str = "long",
ts_humanize: bool = True,
ts_si_units: bool = False,
@@ -115,11 +113,20 @@ def _get_raw_data(
request_1h.values.all().df.to_pandas(),
)
- def download_and_save_data(self) -> None:
- """Download the desired DWD weather data and save it to an HDF5 file."""
+ def download_and_save_data(self) -> pd.DataFrame:
+ """
+ Download the desired DWD weather data and save it to an HDF5 file.
+
+ Returns
+ -------
+ pd.Dataframe
+ The metadata used for creating the STAC item.
+ """
# download the data
metadata_10min, metadata_1h, df_10min, df_1h = self._get_raw_data()
-
+ metadata_to_save = metadata_1h[
+ ["station_id", "latitude", "longitude", "height", "name"]
+ ]
assert metadata_10min.shape == metadata_1h.shape, (
"Data is not available for all stations at the given temporal resolutions. Please check the coverage of "
"different parameters and temporal resolutions here: "
@@ -170,47 +177,4 @@ def download_and_save_data(self) -> None:
file[
f"{station_id}/{dwd_parameter_mapping[parameter]}_1h/value"
] = group.value.to_numpy()
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser()
- parser.add_argument(
- "--parameters_10min",
- default=[
- "radiation_sky_short_wave_diffuse",
- "radiation_global",
- "sunshine_duration",
- "radiation_sky_long_wave",
- ],
- )
- parser.add_argument(
- "--parameters_1h",
- default=[
- "cloud_cover_total",
- "humidity",
- "pressure_vapor",
- "visibility_range",
- "weather",
- ],
- )
- parser.add_argument("--station_ids", default=["15000"])
- parser.add_argument("--start_date", type=str, default="2021-04-01")
- parser.add_argument("--end_date", type=str, default="2024-03-01")
- parser.add_argument("--output_path", type=str, default=f"{PAINT_ROOT}/DWD_data/")
- parser.add_argument("--file_name", type=str, default="dwd_weather.h5")
- parser.add_argument("--ts_shape", type=str, default="long")
- parser.add_argument("--ts_humanize", action="store_true", default=True)
- parser.add_argument("--ts_si_units", action="store_false", default=False)
- args = parser.parse_args()
- dwd_weather = DWDWeatherData(
- parameters_10min=args.parameters_10min,
- parameters_1h=args.parameters_1h,
- station_ids=args.station_ids,
- start_date=args.start_date,
- end_date=args.end_date,
- output_path=args.output_path,
- ts_shape=args.ts_shape,
- ts_humanize=args.ts_humanize,
- ts_si_units=args.ts_si_units,
- )
- dwd_weather.download_and_save_data()
+ return metadata_to_save
diff --git a/paint/data/facet_stac.py b/paint/data/facet_stac.py
new file mode 100644
index 00000000..69d9795b
--- /dev/null
+++ b/paint/data/facet_stac.py
@@ -0,0 +1,95 @@
+from typing import Any, Dict
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+from paint.util.utils import add_offset_to_lat_lon
+
+
+def make_facet_item(
+ heliostat_key: str,
+ heliostat_data: pd.Series,
+) -> Dict[str, Any]:
+ """
+ Generate a STAC item for the heliostat facet properties.
+
+ Parameters
+ ----------
+ heliostat_key: str
+ The ID of the heliostat which was measured.
+ heliostat_data: pd.Series.
+ The metadata for the heliostat.
+
+ Returns
+ -------
+ dict[str, Any]
+ The STAC item data as dictionary.
+ """
+ resource = heliostat_key + "-facet_properties"
+ lat_lon = add_offset_to_lat_lon(
+ east_offset_m=heliostat_data[mappings.EAST_KEY],
+ north_offset_m=heliostat_data[mappings.NORTH_KEY],
+ )
+ return {
+ "stac_version": mappings.STAC_VERSION,
+ "stac_extensions": [],
+ "id": f"{resource}",
+ "type": "Feature",
+ "title": f"Facet properties of {heliostat_key}",
+ "description": f"The facet properties, including canting and translation vectors for heliosat {heliostat_key}",
+ "collection": mappings.HELIOSTAT_PROPERTIES_COLLECTION_ID % heliostat_key,
+ "geometry": {
+ "type": "Point",
+ "coordinates": [
+ lat_lon[0],
+ lat_lon[1],
+ heliostat_data[mappings.ALTITUDE_KEY],
+ ],
+ },
+ "bbox": [
+ lat_lon[0] - mappings.BBOX_LAT_LON_DEVIATION,
+ lat_lon[1] - mappings.BBOX_LAT_LON_DEVIATION,
+ heliostat_data[mappings.ALTITUDE_KEY] - mappings.BBOX_ALTITUDE_DEVIATION,
+ lat_lon[0] + mappings.BBOX_LAT_LON_DEVIATION,
+ lat_lon[1] + mappings.BBOX_LAT_LON_DEVIATION,
+ heliostat_data[mappings.ALTITUDE_KEY] + mappings.BBOX_ALTITUDE_DEVIATION,
+ ],
+ "properties": {
+ "datetime": heliostat_data[mappings.CREATED_AT],
+ "created": heliostat_data[mappings.CREATED_AT],
+ },
+ "links": [
+ {
+ "rel": "self",
+ "href": f"./{resource}-stac.json",
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC file",
+ },
+ {
+ "rel": "root",
+ "href": f"./{mappings.CATALOGUE_URL}",
+ "type": mappings.MIME_GEOJSON,
+ "title": f"Reference to the entire catalogue for {mappings.POWER_PLANT_GPPD_ID}",
+ },
+ {
+ "rel": "parent",
+ "href": mappings.HELIOSTAT_PROPERTIES_COLLECTION_URL % heliostat_key,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to the collection STAC file",
+ },
+ {
+ "rel": "collection",
+ "href": mappings.HELIOSTAT_PROPERTIES_COLLECTION_URL % heliostat_key,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to the collection STAC file",
+ },
+ ],
+ "assets": {
+ mappings.FACET_PROPERTIES_KEY: {
+ "href": f"./{resource}.json",
+ "roles": ["data"],
+ "type": mappings.MIME_GEOJSON,
+ "title": f"Facet properties of {heliostat_key}",
+ }
+ },
+ }
diff --git a/paint/data/heliostat_catalog_stac.py b/paint/data/heliostat_catalog_stac.py
new file mode 100644
index 00000000..2129d33c
--- /dev/null
+++ b/paint/data/heliostat_catalog_stac.py
@@ -0,0 +1,60 @@
+from typing import Any, Dict
+
+import paint.util.paint_mappings as mappings
+
+
+# TODO: Fix, so that only the links are saved that are actually there!
+def make_heliostat_catalog(heliostat_id: str) -> Dict[str, Any]:
+ """
+ Generate a catalog for each heliostat STAC.
+
+ Parameters
+ ----------
+ heliostat_id : str
+ The heliostat ID for the considered heliostat.
+
+ Returns
+ -------
+ dict[str, Any]
+ The STAC catalog as dictionary
+ """
+ return {
+ "stac_version": mappings.STAC_VERSION,
+ "stac_extensions": [],
+ "id": mappings.HELIOSTAT_CATALOG_ID % heliostat_id,
+ "type": mappings.CATALOG,
+ "title": f"Operational data for the heliostat {heliostat_id}",
+ "description": "Calibration images, deflectometry measurements, heliostat properties, and weather data",
+ "links": [
+ {
+ "rel": "self",
+ "href": mappings.HELIOSTAT_CATALOG_URL % heliostat_id,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC catalog file",
+ },
+ {
+ "rel": "root",
+ "href": mappings.CATALOGUE_URL,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to the parent catalog",
+ },
+ {
+ "rel": "child",
+ "href": mappings.DEFLECTOMETRY_COLLECTION_URL % heliostat_id,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to the STAC collection containing the deflectometry data",
+ },
+ {
+ "rel": "child",
+ "href": mappings.CALIBRATION_COLLECTION_URL % heliostat_id,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to the STAC collection containing the calibration data",
+ },
+ {
+ "rel": "child",
+ "href": mappings.HELIOSTAT_PROPERTIES_COLLECTION_URL % heliostat_id,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to the STAC collection containing the heliostat properties",
+ },
+ ],
+ }
diff --git a/paint/data/kinematic_stac.py b/paint/data/kinematic_stac.py
new file mode 100644
index 00000000..f820f581
--- /dev/null
+++ b/paint/data/kinematic_stac.py
@@ -0,0 +1,97 @@
+from typing import Any, Dict, Tuple
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+from paint.util.utils import add_offset_to_lat_lon, to_utc_single
+
+
+def make_kinematic_item(
+ heliostat_key: str,
+ heliostat_data: pd.Series,
+) -> Tuple[Tuple[float, float], Dict[str, Any]]:
+ """
+ Generate a STAC item for the heliostat kinematic properties.
+
+ Parameters
+ ----------
+ heliostat_key: str
+ The ID of the heliostat which was measured.
+ heliostat_data: pd.Series.
+ The metadata for the heliostat.
+
+ Returns
+ -------
+ Tuple[float, float]
+ The latitude and longitude coordinates of the heliostat.
+ Dict[str, Any]
+ The STAC item data as dictionary.
+ """
+ resource = heliostat_key + "-kinematic_properties"
+ lat_lon = add_offset_to_lat_lon(
+ east_offset_m=heliostat_data[mappings.EAST_KEY],
+ north_offset_m=heliostat_data[mappings.NORTH_KEY],
+ )
+ return lat_lon, {
+ "stac_version": mappings.STAC_VERSION,
+ "stac_extensions": [],
+ "id": f"{resource}",
+ "type": "Feature",
+ "title": f"Kinematic properties of {heliostat_key}",
+ "description": f"The kinematic properties that describe the kinematic applied in {heliostat_key}",
+ "collection": mappings.HELIOSTAT_PROPERTIES_COLLECTION_ID % heliostat_key,
+ "geometry": {
+ "type": "Point",
+ "coordinates": [
+ lat_lon[0],
+ lat_lon[1],
+ heliostat_data[mappings.ALTITUDE_KEY],
+ ],
+ },
+ "bbox": [
+ lat_lon[0] - mappings.BBOX_LAT_LON_DEVIATION,
+ lat_lon[1] - mappings.BBOX_LAT_LON_DEVIATION,
+ heliostat_data[mappings.ALTITUDE_KEY] - mappings.BBOX_ALTITUDE_DEVIATION,
+ lat_lon[0] + mappings.BBOX_LAT_LON_DEVIATION,
+ lat_lon[1] + mappings.BBOX_LAT_LON_DEVIATION,
+ heliostat_data[mappings.ALTITUDE_KEY] + mappings.BBOX_ALTITUDE_DEVIATION,
+ ],
+ "properties": {
+ "datetime": to_utc_single(heliostat_data[mappings.CREATED_AT]),
+ "created": to_utc_single(heliostat_data[mappings.CREATED_AT]),
+ },
+ "links": [
+ {
+ "rel": "self",
+ "href": f"./{resource}-stac.json",
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC file",
+ },
+ {
+ "rel": "root",
+ "href": f"./{mappings.CATALOGUE_URL}",
+ "type": mappings.MIME_GEOJSON,
+ "title": f"Reference to the entire catalogue for {mappings.POWER_PLANT_GPPD_ID}",
+ },
+ {
+ "rel": "parent",
+ "href": mappings.HELIOSTAT_PROPERTIES_COLLECTION_URL % heliostat_key,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to the collection STAC file",
+ },
+ {
+ "rel": "collection",
+ "href": mappings.HELIOSTAT_PROPERTIES_COLLECTION_URL % heliostat_key,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to the collection STAC file",
+ },
+ ],
+ "assets": {
+ mappings.KINEMATIC_PROPERTIES_KEY: {
+ "href": f"./{resource}.json",
+ "roles": ["data"],
+ "type": mappings.MIME_GEOJSON,
+ "title": f"Kinematic properties of {heliostat_key}",
+ }
+ },
+ }
diff --git a/paint/data/properties_collection_stac.py b/paint/data/properties_collection_stac.py
new file mode 100644
index 00000000..561eba73
--- /dev/null
+++ b/paint/data/properties_collection_stac.py
@@ -0,0 +1,91 @@
+from typing import Any, Dict
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+
+
+def make_properties_collection(heliostat_id: str, data: pd.DataFrame) -> Dict[str, Any]:
+ """
+ Generate a heliostat properties STAC collection.
+
+ Parameters
+ ----------
+ heliostat_id: str
+ The heliostat ID of the heliostat containing the collection.
+ data: pd.DataFrame
+ The dataframe containing all deflectometry metadata.
+
+ Returns
+ -------
+ dict[str, Any]
+ The STAC collection as dictionary.
+ """
+ return {
+ "stac_version": mappings.STAC_VERSION,
+ "stac_extensions": [],
+ "id": mappings.HELIOSTAT_PROPERTIES_COLLECTION_ID % heliostat_id,
+ "type": mappings.COLLECTION,
+ "title": f"Heliostat properties data for {heliostat_id}",
+ "description": f"All heliostat properties, including the facet properties and kinematic properties for "
+ f"heliostat {heliostat_id}",
+ "keywords": ["csp", "facet", "kinematic", "properties"],
+ "license": mappings.LICENSE,
+ "providers": [mappings.DLR, mappings.KIT],
+ "extent": {
+ "spatial": {
+ "bbox": [
+ [
+ data[mappings.LATITUDE_KEY].min(),
+ data[mappings.LONGITUDE_KEY].min(),
+ data[mappings.ELEVATION].min(),
+ data[mappings.LATITUDE_KEY].max(),
+ data[mappings.LONGITUDE_KEY].max(),
+ data[mappings.ELEVATION].max(),
+ ]
+ ]
+ },
+ "temporal": {
+ "interval": [
+ data[mappings.CREATED_AT].min(),
+ data[mappings.CREATED_AT].max(),
+ ]
+ },
+ },
+ "summaries": {
+ "datetime": {
+ "minimum": data[mappings.CREATED_AT].min(),
+ "maximum": data[mappings.CREATED_AT].max(),
+ },
+ },
+ "links": [
+ mappings.LICENSE_LINK,
+ {
+ "rel": "self",
+ "href": mappings.HELIOSTAT_PROPERTIES_COLLECTION_URL % heliostat_id,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "root",
+ "href": mappings.CATALOGUE_URL,
+ "type": mappings.MIME_GEOJSON,
+ "title": f"Reference to the entire catalogue for {mappings.POWER_PLANT_GPPD_ID}",
+ },
+ {
+ "rel": "collection",
+ "href": mappings.HELIOSTAT_PROPERTIES_COLLECTION_URL % heliostat_id,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC collection file",
+ },
+ ]
+ + [
+ {
+ "rel": "item",
+ "href": data_row[mappings.URL_KEY],
+ "type": mappings.MIME_GEOJSON,
+ "title": f"STAC item of {data_row[mappings.TITLE_KEY]}",
+ }
+ for _, data_row in data.iterrows()
+ ],
+ }
diff --git a/paint/data/restructure_calib_data.py b/paint/data/restructure_calib_data.py
deleted file mode 100644
index 5c82f410..00000000
--- a/paint/data/restructure_calib_data.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# TODO: Find out why this code is here!
-
-# import pandas as pd
-#
-# from paint.util.utils import heliostat_id_to_heliostat_name
-#
-# measurement_path = "data/DatenHeliOS/calib_data.csv"
-# df_measurements = pd.read_csv(measurement_path)
-# df_measurements = df_measurements.set_index("id") # Set df id as index
-# # Get all existing heliostat IDs and their entry counts
-# heliostat_counts = df_measurements["HeliostatId"].value_counts()
-# # Replace HeliostatId with heliostat names in heliostat_counts dataframe
-# heliostat_counts.index = heliostat_counts.index.map(heliostat_id_to_heliostat_name)
-#
-# # remove unneeded columns
-# # Field ID always 1, Camera ID always 0 ,
-# # System always "HeliOS", Version always 1,
-# # LastScore and GeometryData are fitted,
-# # UpdatedAt same time as CreatedAt
-# df_measurements = df_measurements.drop(
-# columns=[
-# "FieldId",
-# "CameraId",
-# "System",
-# "Version",
-# "LastScore",
-# "GeometryData",
-# "UpdatedAt",
-# ]
-# )
-
-# TODO: Add column for Calibration Image and and Target
diff --git a/paint/data/weather_collection_stac.py b/paint/data/weather_collection_stac.py
new file mode 100644
index 00000000..4e5a9a91
--- /dev/null
+++ b/paint/data/weather_collection_stac.py
@@ -0,0 +1,88 @@
+from typing import Any, Dict
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+
+
+def make_weather_collection(data: pd.DataFrame) -> Dict[str, Any]:
+ """
+ Generate a weather STAC collection.
+
+ Parameters
+ ----------
+ data: pd.DataFrame
+ The dataframe containing all weather metadata.
+
+ Returns
+ -------
+ dict[str, Any]
+ The STAC collection as dictionary.
+ """
+ return {
+ "stac_version": mappings.STAC_VERSION,
+ "stac_extensions": [],
+ "id": mappings.WEATHER_COLLECTION_ID,
+ "type": mappings.COLLECTION,
+ "title": "All weather measurements",
+ "description": "All weather measurements",
+ "keywords": ["weather"],
+ "license": mappings.LICENSE,
+ "providers": [mappings.DLR, mappings.KIT],
+ "extent": {
+ "spatial": {
+ "bbox": [
+ [
+ data[mappings.LATITUDE_KEY].min(),
+ data[mappings.LONGITUDE_KEY].min(),
+ data[mappings.ELEVATION].min(),
+ data[mappings.LATITUDE_KEY].max(),
+ data[mappings.LONGITUDE_KEY].max(),
+ data[mappings.ELEVATION].max(),
+ ]
+ ]
+ },
+ "temporal": {
+ "interval": [
+ data[mappings.DWD_START].min(),
+ data[mappings.DWD_END].max(),
+ ]
+ },
+ },
+ "summaries": {
+ "datetime": {
+ "minimum": data[mappings.DWD_START].min(),
+ "maximum": data[mappings.DWD_END].max(),
+ },
+ },
+ "links": [
+ mappings.LICENSE_LINK,
+ {
+ "rel": "self",
+ "href": mappings.WEATHER_COLLECTION_URL,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "root",
+ "href": mappings.CATALOGUE_URL,
+ "type": mappings.MIME_GEOJSON,
+ "title": f"Reference to the entire catalogue for {mappings.POWER_PLANT_GPPD_ID}",
+ },
+ {
+ "rel": "collection",
+ "href": mappings.WEATHER_COLLECTION_FILE,
+ "type": mappings.MIME_GEOJSON,
+ "title": "Reference to this STAC collection file",
+ },
+ ]
+ + [
+ {
+ "rel": "item",
+ "href": data_row[mappings.URL_KEY],
+ "type": mappings.MIME_GEOJSON,
+ "title": f"STAC item of {data_row[mappings.TITLE_KEY]}",
+ }
+ for _, data_row in data.iterrows()
+ ],
+ }
diff --git a/paint/util/paint_mappings.py b/paint/util/paint_mappings.py
index d0322b9a..ff6dd4ee 100644
--- a/paint/util/paint_mappings.py
+++ b/paint/util/paint_mappings.py
@@ -1,22 +1,175 @@
+# CSV columns
INTERNAL_NAME_INDEX = "InternalName"
ID_INDEX = "id"
HELIOSTAT_ID = "HeliostatId"
X_Y_Z_POSITIONS = ["x", "y", "z"]
DEFLECTOMETRY_AVAILABLE = "DeflectometryAvailable"
CREATED_AT = "CreatedAt"
+UPDATED_AT = "UpdatedAt"
YEAR = "Year"
MONTH = "Month"
HOUR = "Hour"
AZIMUTH = "Azimuth"
ELEVATION = "Elevation"
+SUN_ELEVATION = "Sun_elevation"
+SYSTEM = "System"
+CALIBRATION_TARGET = "CalibrationTargetId"
+AXIS1_MOTOR = "Axis1MotorPosition"
+AXIS2_MOTOR = "Axis2MotorPosition"
MEASURED_SURFACE = "MeasuredSurface"
SUN_POSITION_EAST = "SunPosE"
SUN_POSITION_NORTH = "SunPosN"
SUN_POSITION_UP = "SunPosU"
+DATA_SET_AZIMUTH = "DataSet_Azimuth"
+JUNE_DISTANCE = "Jun_Distance"
+DECEMBER_DISTANCE = "Dec_Distance"
+
+# dataset
TOTAL_INDEX = "Total"
TRAIN_INDEX = "train"
TEST_INDEX = "test"
VALIDATION_INDEX = "validation"
-DATA_SET_AZIMUTH = "DataSet_Azimuth"
-DECEMBER_DISTANCE = "Dec_Distance"
-JUNE_DISTANCE = "Jun_Distance"
+
+# STAC
+STAC_VERSION = "1.0.0"
+ITEM_ASSETS_SCHEMA = (
+ f"https://stac-extensions.github.io/item-assets/v{STAC_VERSION}/schema.json"
+)
+LICENSE = "CDLA-2.0"
+LICENSE_LINK = {
+ "rel": "license",
+ "href": "https://cdla.dev/permissive-2-0/",
+ "type": "text/html",
+ "title": "Community Data License Agreement – Permissive – Version 2.0",
+}
+PAINT_URL = "https://github.com/ARTIST-Association/PAINT/"
+DLR = {
+ "name": "German Aerospace Center (DLR)",
+ "description": "National center for aerospace, energy and transportation research of Germany",
+ "roles": ["licensor", "producer", "processor"],
+ "url": PAINT_URL,
+}
+KIT = {
+ "name": "Karlsruhe Institute of Technology (KIT)",
+ "description": "Public research center and university in Karlsruhe, Germany",
+ "roles": ["producer", "processor", "host"],
+ "url": PAINT_URL,
+}
+POWER_PLANT_GPPD_ID = "WRI1030197"
+POWER_PLANT_LAT = 50.913296351383806
+POWER_PLANT_LON = 6.387514846666862
+POWER_PLANT_ALT = 87
+
+CATALOG = "Catalog"
+COLLECTION = "Collection"
+FEATURE = "Feature"
+
+TIME_FORMAT = "%Y-%m-%dZ%H:%M:%SZ"
+
+MIME_PNG = "image/png"
+MIME_GEOJSON = "application/geo+json"
+MIME_HDF5 = "application/x-hdf5"
+MIME_PDF = "application/pdf"
+
+CATALOG_ID = f"{POWER_PLANT_GPPD_ID}-catalog"
+CATALOG_FILE = f"{POWER_PLANT_GPPD_ID}-catalog-stac.json"
+
+HELIOSTAT_CATALOG_ID = "%s-heliostat-catalog"
+HELIOSTAT_CATALOG_FILE = "%s-heliostat-catalog-stac.json"
+HELIOSTAT_CATALOG_URL = f"INSERT/SOMETHING/HERE/{HELIOSTAT_CATALOG_FILE}?download=1"
+
+CATALOGUE_URL = "Insert/URL/Here"
+
+CALIBRATION_COLLECTION_ID = "%s-calibration-collection"
+CALIBRATION_COLLECTION_FILE = "%s-calibration-collection-stac.json"
+CALIBRATION_COLLECTION_URL = (
+ f"INSERT/SOMETHING/HERE/{CALIBRATION_COLLECTION_FILE}?download=1"
+)
+CALIBRATION_ITEM = "%s-%d-calibration-item-stac.json"
+CALIBRATION_ITEM_URL = f"INSERT/SOMETHING/HERE/{CALIBRATION_ITEM}"
+
+DEFLECTOMETRY_COLLECTION_ID = "%s-deflectometry-collection"
+DEFLECTOMETRY_COLLECTION_FILE = "%s-deflectometry-collection-stac.json"
+DEFLECTOMETRY_COLLECTION_URL = (
+ f"INSERT/SOMETHING/HERE/{DEFLECTOMETRY_COLLECTION_FILE}?download=1"
+)
+DEFLECTOMETRY_ITEM = "%s-%s-deflectometry-item-stac.json"
+DEFLECTOMETRY_ITEM_URL = f"INSERT/SOMETHING/HERE/{DEFLECTOMETRY_ITEM}?download=1"
+
+DEFLECTOMETRY_PDF_NAME = "%s-%s-deflectometry-result.pdf"
+DEFLECTOMETRY_INSTRUMENTS = "QDec_2014-101"
+
+HELIOSTAT_PROPERTIES_COLLECTION_ID = "%s-heliostat_properties-collection"
+HELIOSTAT_PROPERTIES_COLLECTION_FILE = "%s-heliostat_properties-collection-stac.json"
+HELIOSTAT_PROPERTIES_COLLECTION_URL = (
+ f"INSERT/SOMETHING/HERE/{HELIOSTAT_PROPERTIES_COLLECTION_FILE}?download=1"
+)
+FACET_PROPERTIES_ITEM = "%s-facet_properties-item-stac.json"
+FACET_PROPERTIES_ITEM_ITEM_URL = (
+ f"INSERT/SOMETHING/HERE/{FACET_PROPERTIES_ITEM}?download=1"
+)
+KINEMATIC_PROPERTIES_ITEM = "%s-kinematic_properties-item-stac.json"
+KINEMATIC_PROPERTIES_ITEM_URL = (
+ f"INSERT/SOMETHING/HERE/{KINEMATIC_PROPERTIES_ITEM}?download=1"
+)
+
+WEATHER_COLLECTION_ID = "weather-collection"
+WEATHER_COLLECTION_FILE = "weather-collection-stac.json"
+WEATHER_COLLECTION_URL = f"INSERT/SOMETHING/HERE/{WEATHER_COLLECTION_FILE}?download=1"
+
+
+URL_KEY = "url"
+TITLE_KEY = "title"
+LATITUDE_KEY = "latitude"
+LONGITUDE_KEY = "longitude"
+DEFLECTOMETRY_RAW_KEY = "raw_measurement"
+DEFLECTOMETRY_FILLED_KEY = "filled_measurement"
+DEFLECTOMETRY_RESULTS_KEY = "results_summary"
+CALIBRATION_TARGET_KEY = "target"
+CALIBRATION_MOTOR_POS_KEY = "motor_positions"
+FACET_PROPERTIES_KEY = "facet_properties"
+KINEMATIC_PROPERTIES_KEY = "kinematic_properties"
+MOTOR_POS_NAME = "%s-%d-motor-position"
+WEATHER_DATA_KEY = "weather_data"
+
+SAVE_DEFLECTOMETRY = "Deflectometry"
+SAVE_PROPERTIES = "Properties"
+SAVE_CALIBRATION = "Calibration"
+BBOX_LAT_LON_DEVIATION = 2e-05
+BBOX_ALTITUDE_DEVIATION = 2
+
+# Convert deflectometry
+FACET_KEY = "facet"
+SURFACE_NORMAL_KEY = "surface_normals"
+SURFACE_POINT_KEY = "surface_points"
+FACETS_LIST = "facets"
+NUM_FACETS = "number_of_facets"
+TRANSLATION_VECTOR = "translation_vector"
+CANTING_E = "canting_e"
+CANTING_N = "canting_n"
+DEFLECTOMETRY_SUFFIX = "-deflectometry.h5"
+FACET_PROPERTIES_SUFFIX = "-facet_properties.json"
+DEFLECTOMETRY_CREATED_AT = "deflectometry_created_at"
+KINEMATIC_PROPERTIES_SUFFIX = "-kinematic_properties.json"
+
+# Combine properties
+EAST_KEY = "East"
+NORTH_KEY = "North"
+ALTITUDE_KEY = "Altitude"
+FIELD_ID = "FieldId"
+HELIOSTAT_SIZE = "Heliostat Size"
+KINEMATIC_KEY = "kinematic"
+HEIGHT_ABOVE_GROUND = "HeightAboveGround"
+
+# DWD KEYS
+DWD_STATION_ID = "StationID"
+DWD_STATION_NAME = "StationName"
+DWD_START = "start"
+DWD_END = "end"
+DWD_STAC_NAME = "dwd-weather-item-stac"
+DWD_STAT_URL = f"INSERT/SOMETHING/HERE/{DWD_STAC_NAME}?download=1"
+
+# Constants for WGS84
+WGS84_A = 6378137.0 # Major axis in meters
+WGS84_B = 6356752.314245 # Minor axis in meters
+WGS84_E2 = (WGS84_A**2 - WGS84_B**2) / WGS84_A**2 # Eccentricity squared
diff --git a/paint/util/preprocessing.py b/paint/util/preprocessing.py
new file mode 100644
index 00000000..9f0ff211
--- /dev/null
+++ b/paint/util/preprocessing.py
@@ -0,0 +1,125 @@
+import argparse
+
+import pandas as pd
+
+from paint.util import paint_mappings as mappings
+from paint.util.utils import heliostat_id_to_name
+
+
+def load_and_format_heliostat_axis_data(arguments: argparse.Namespace) -> pd.DataFrame:
+ """
+ Prepare the axis csv for concatenation by changing certain column names and rearranging the order.
+
+ Parameters
+ ----------
+ arguments : argparse.Namespace
+ The arguments containing the input and output path.
+
+ Returns
+ -------
+ pd.DataFrame
+ The processed axis dataframe.
+ """
+ df_axis = pd.read_csv(
+ arguments.input_axis,
+ header=0,
+ decimal=",",
+ sep=";",
+ )
+ pivoted_df = df_axis.pivot(index=mappings.HELIOSTAT_ID, columns="Number")
+ # Flatten the multi-index columns
+ pivoted_df.columns = [
+ "_".join(map(str, col)).strip() for col in pivoted_df.columns.values
+ ]
+
+ # Reset index to bring 'HeliostatId' back as a column
+ pivoted_df = pivoted_df.reset_index()
+ # Rename columns that are always identical
+ pivoted_df = pivoted_df.rename(
+ columns={
+ "FieldId_1": mappings.FIELD_ID,
+ "CreatedAt_1": mappings.CREATED_AT,
+ "UpdatedAt_1": mappings.UPDATED_AT,
+ }
+ )
+ pivoted_df = pivoted_df.drop(columns=["FieldId_2", "CreatedAt_2", "UpdatedAt_2"])
+ pivoted_df.columns = [
+ col.replace("_1", "_axis_1").replace("_2", "_axis_2")
+ for col in pivoted_df.columns
+ ]
+ # Get list of columns ending with _axis_1 and _axis_2
+ axis_1_columns = [col for col in pivoted_df.columns if col.endswith("_axis_1")]
+ axis_2_columns = [col for col in pivoted_df.columns if col.endswith("_axis_2")]
+
+ # Sort the columns list to have _axis_1 columns first, followed by _axis_2 columns
+ sorted_columns = axis_1_columns + axis_2_columns
+
+ # Reorder columns in the dataframe
+ pivoted_df = pivoted_df[
+ [mappings.HELIOSTAT_ID, mappings.FIELD_ID, mappings.CREATED_AT] + sorted_columns
+ ]
+ pivoted_df.set_index(mappings.HELIOSTAT_ID, inplace=True)
+ pivoted_df.index = pivoted_df.index.map(heliostat_id_to_name)
+ return pivoted_df
+
+
+def load_and_format_heliostat_positions(
+ arguments: argparse.Namespace,
+) -> pd.DataFrame:
+ """
+ Prepare the heliostat positions csv for concatenation by changing certain column names and rearranging the order.
+
+ Parameters
+ ----------
+ arguments : argparse.Namespace
+ The arguments containing the input and output path.
+
+ Returns
+ -------
+ pd.DataFrame
+ The processed heliostat positions dataframe.
+ """
+ df_heliostat_positions = pd.read_excel(arguments.input_position, header=0)
+ df_heliostat_positions.set_index(mappings.INTERNAL_NAME_INDEX, inplace=True)
+ df_heliostat_positions.rename_axis(mappings.HELIOSTAT_ID, inplace=True)
+ # Drop the specified columns
+ df_heliostat_positions.drop(
+ columns=["RowName", "Number", "ColumnName"], inplace=True
+ )
+
+ # Rename the columns
+ df_heliostat_positions.rename(
+ columns={
+ "x": mappings.EAST_KEY,
+ "y": mappings.NORTH_KEY,
+ "z": mappings.ALTITUDE_KEY,
+ "Spalte1": mappings.HEIGHT_ABOVE_GROUND,
+ },
+ inplace=True,
+ )
+
+ return df_heliostat_positions
+
+
+def merge_and_sort_df(
+ df_heliostat_positions: pd.DataFrame, df_axis: pd.DataFrame
+) -> pd.DataFrame:
+ """
+ Concatenate the heliostat position and heliostat axis data and sort in the correct order.
+
+ Parameters
+ ----------
+ df_heliostat_positions : pd.DataFrame
+ The dataframe containing the heliostat positions.
+ df_axis : pd.DataFrame
+ The dataframe containing the heliostat axis data.
+
+ Returns
+ -------
+ pd.DataFrame
+ The concatenated and sorted data frame.
+ """
+ df_concatenated = pd.concat([df_heliostat_positions, df_axis], axis=1, join="inner")
+ created_at = df_concatenated.pop(mappings.CREATED_AT)
+ df_concatenated.insert(0, mappings.CREATED_AT, created_at)
+ return df_concatenated
diff --git a/paint/util/utils.py b/paint/util/utils.py
index 72b047a1..db475ca5 100644
--- a/paint/util/utils.py
+++ b/paint/util/utils.py
@@ -1,7 +1,11 @@
-from typing import Tuple, Union
+import math
+from datetime import datetime
+from typing import List, Tuple, Union
import numpy as np
import pandas as pd
+import pytz
+from dateutil import parser
import paint.util.paint_mappings as mappings
@@ -22,24 +26,25 @@ def calculate_azimuth_and_elevation(df: pd.DataFrame) -> Tuple[np.ndarray, np.nd
np.ndarray
The calculated elevation in degrees.
"""
- # Extract sun positions in each coordinate.
+ # extract sun positions in each coordinate
sun_position_east = np.array(df[mappings.SUN_POSITION_EAST])
sun_position_north = -np.array(df[mappings.SUN_POSITION_NORTH])
sun_position_up = np.array(df[mappings.SUN_POSITION_UP])
- # Calculate azimuth and evaluation and return.
+ # calculate azimuth and evaluation and return.
azimuth_degree = np.degrees(np.arctan2(sun_position_east, sun_position_north))
elevation_degree = np.degrees(
np.arctan2(
sun_position_up, np.sqrt(sun_position_east**2 + sun_position_north**2)
)
)
+
return azimuth_degree, elevation_degree
-def heliostat_id_to_heliostat_name(heliostat_id: Union[int, str]) -> str:
+def heliostat_id_to_name(heliostat_id: Union[int, str]) -> str:
"""
- Convert a heliostat id to a heliostat name.
+ Convert a heliostat id to its name.
Parameters
----------
@@ -52,10 +57,9 @@ def heliostat_id_to_heliostat_name(heliostat_id: Union[int, str]) -> str:
The heliostat name derived from the heliostat ID.
"""
str_ = str(heliostat_id)
- name = chr(ord("A") + int(str_[0]) - 1)
- name += chr(ord("A") + int(str_[1:3]) - 1)
- name += str_[3:]
- return name
+ return "".join(
+ [chr(ord("A") + int(str_[0]) - 1), chr(ord("A") + int(str_[1:3]) - 1), str_[3:]]
+ )
def to_utc(time_series: pd.Series) -> pd.Series:
@@ -77,3 +81,140 @@ def to_utc(time_series: pd.Series) -> pd.Series:
.dt.tz_localize("Europe/Berlin", ambiguous="infer")
.dt.tz_convert("UTC")
)
+
+
+def to_utc_single(datetime_str: str, local_tz: str = "Europe/Berlin") -> str:
+ """
+ Parse a single local datetime string and convert to UTC.
+
+ Parameters
+ ----------
+ datetime_str : str
+ The string containing the local datetime.
+ local_tz : str
+ The local timezone (Default: 'Europe/Berlin').
+
+ Returns
+ -------
+ str
+ The corresponding UTC datetime string.
+ """
+ try:
+ # Try parsing with dateutil.parser for general datetime strings
+ local_time = parser.parse(datetime_str)
+ except ValueError:
+ try:
+ # Fall back to manual parsing for specific format "%y%m%d%H%M%S"
+ local_time = datetime.strptime(datetime_str, "%y%m%d%H%M%S")
+ except ValueError as e:
+ raise ValueError(f"Unable to parse datetime string: {datetime_str}") from e
+
+ # Localize the datetime object to the specified local timezone
+ local_tz_obj = pytz.timezone(local_tz)
+ if local_time.tzinfo is None:
+ local_time = local_tz_obj.localize(local_time, is_dst=None)
+
+ # Convert the localized datetime to UTC
+ utc_time = local_time.astimezone(pytz.utc)
+
+ # Return the UTC datetime as a string
+ return utc_time.strftime(mappings.TIME_FORMAT)
+
+
+def add_offset_to_lat_lon(
+ north_offset_m: float, east_offset_m: float
+) -> Tuple[float, float]:
+ """
+ Add an offset to the given latitude and longitude coordinates.
+
+ Parameters
+ ----------
+ north_offset_m : float
+ The distance in meters to add to the latitude.
+ east_offset_m : float
+ The distance in meters to add to the longitude.
+
+ Returns
+ -------
+ float
+ The new latitude in degrees.
+ float
+ The new longitude in degrees.
+ """
+ # Convert latitude and longitude to radians
+ lat_rad = math.radians(mappings.POWER_PLANT_LAT)
+ lon_rad = math.radians(mappings.POWER_PLANT_LON)
+
+ # Calculate meridional radius of curvature
+ sin_lat = math.sin(lat_rad)
+ rn = mappings.WGS84_A / math.sqrt(1 - mappings.WGS84_E2 * sin_lat**2)
+
+ # Calculate transverse radius of curvature
+ rm = (mappings.WGS84_A * (1 - mappings.WGS84_E2)) / (
+ (1 - mappings.WGS84_E2 * sin_lat**2) ** 1.5
+ )
+
+ # Calculate new latitude
+ dlat = north_offset_m / rm
+ new_lat_rad = lat_rad + dlat
+
+ # Calculate new longitude using the original meridional radius of curvature
+ dlon = east_offset_m / (rn * math.cos(lat_rad))
+ new_lon_rad = lon_rad + dlon
+
+ # Convert back to degrees
+ new_lat = math.degrees(new_lat_rad)
+ new_lon = math.degrees(new_lon_rad)
+
+ return new_lat, new_lon
+
+
+def calculate_heliostat_position_in_m_from_lat_lon(
+ lat1: float, lon1: float, alt: float
+) -> List[float]:
+ """
+ Calculate the position of a heliostat in meters from given latitude, longitude, and altitude.
+
+ This function calculates the north and east offsets in meters of a heliostat from the power plant location.
+ It converts the latitude and longitude to radians, calculates the radius of curvature values,
+ and then computes the offsets based on the differences between the heliostat and power plant coordinates.
+ Finally, it returns a list containing these offsets along with the altitude difference.
+
+ Parameters
+ ----------
+ lat1 : float
+ The latitude of the heliostat in degrees.
+ lon1 : float
+ The longitude of the heliostat in degrees.
+ alt : float
+ The altitude of the heliostat.
+
+ Returns
+ -------
+ List[float, float, float]
+ The north offset in meters, east offset in meters, and the altitude difference from the power plant.
+ """
+ # Convert latitude and longitude to radians
+ lat_heliostat_rad = math.radians(lat1)
+ lon_heliostat_rad = math.radians(lon1)
+ alt_heliostat = alt - mappings.POWER_PLANT_ALT
+ lat_tower_rad = math.radians(mappings.POWER_PLANT_LAT)
+ lon_tower_rad = math.radians(mappings.POWER_PLANT_LON)
+
+ # Calculate meridional radius of curvature for the first latitude
+ sin_lat1 = math.sin(lat_heliostat_rad)
+ rn1 = mappings.WGS84_A / math.sqrt(1 - mappings.WGS84_E2 * sin_lat1**2)
+
+ # Calculate transverse radius of curvature for the first latitude
+ rm1 = (mappings.WGS84_A * (1 - mappings.WGS84_E2)) / (
+ (1 - mappings.WGS84_E2 * sin_lat1**2) ** 1.5
+ )
+
+ # Calculate delta latitude and delta longitude in radians
+ dlat_rad = lat_tower_rad - lat_heliostat_rad
+ dlon_rad = lon_tower_rad - lon_heliostat_rad
+
+ # Calculate north and east offsets in meters
+ north_offset_m = dlat_rad * rm1
+ east_offset_m = dlon_rad * rn1 * math.cos(lat_heliostat_rad)
+ return [-north_offset_m, -east_offset_m, alt_heliostat]
diff --git a/plots/01_heliostat_position.py b/plots/01_heliostat_position.py
index 869c753e..d2c83496 100755
--- a/plots/01_heliostat_position.py
+++ b/plots/01_heliostat_position.py
@@ -11,7 +11,7 @@
import paint.util.paint_mappings as mappings
from paint import PAINT_ROOT
-from paint.util.utils import heliostat_id_to_heliostat_name
+from paint.util.utils import heliostat_id_to_name
class HeliostatPositionPlot:
@@ -124,9 +124,7 @@ def load_data(
# Get all existing heliostat IDs and their entry counts
heliostat_counts = df_measurements[mappings.HELIOSTAT_ID].value_counts()
# Replace HeliostatId with heliostat names in heliostat_counts dataframe
- heliostat_counts.index = heliostat_counts.index.map(
- heliostat_id_to_heliostat_name
- )
+ heliostat_counts.index = heliostat_counts.index.map(heliostat_id_to_name)
# Load deflectometry availability from file
df_deflectometry = pd.read_excel(path_to_deflectometry)
diff --git a/pyproject.toml b/pyproject.toml
index 5a0805a1..70806681 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -18,13 +18,15 @@ classifiers = [
]
dependencies = [
"numpy",
+ "pandas",
"torch",
"matplotlib",
"colorlog",
- "pandas",
"wetterdienst",
"h5py",
- "seaborn"
+ "seaborn",
+ "deepdiff",
+ "openpyxl",
]
[project.optional-dependencies]
@@ -37,7 +39,7 @@ dev = [
"sphinx-autoapi",
"sphinx-rtd-theme",
"sphinxcontrib-napoleon",
- "sphinxemoji",
+ "sphinxemoji"
]
[project.urls]
diff --git a/scripts/download_dwd_data_and_generate_stac.py b/scripts/download_dwd_data_and_generate_stac.py
new file mode 100755
index 00000000..a63be219
--- /dev/null
+++ b/scripts/download_dwd_data_and_generate_stac.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+
+import argparse
+import json
+from pathlib import Path
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+from paint import PAINT_ROOT
+from paint.data.dwd_stac_item import make_dwd_item
+from paint.data.dwd_weather import DWDWeatherData
+from paint.util.utils import to_utc_single
+
+
+def main(arguments: argparse.Namespace) -> None:
+ """
+ Download and save the DWD weather as an HDF5 file and generate the associate STAC item.
+
+ This script downloads and saves the DWD weather as an HDF5 file and then generates the appropriate STAC item.
+ Additionally, the metadata for this item is saved for collection creation later.
+
+ Parameters
+ ----------
+ arguments: argparse.Namespace
+ The command line arguments.
+
+ """
+ # check if saved metadata exists and load if required
+ weather_items_path = Path(f"{PAINT_ROOT}/TEMPDATA/weather_items.csv")
+ if weather_items_path.exists():
+ weather_items = pd.read_csv(weather_items_path)
+ else:
+ weather_items_path.parent.mkdir(parents=True, exist_ok=True)
+ weather_items = pd.DataFrame(
+ columns=[
+ mappings.TITLE_KEY,
+ mappings.URL_KEY,
+ mappings.DWD_START,
+ mappings.DWD_END,
+ mappings.LATITUDE_KEY,
+ mappings.LONGITUDE_KEY,
+ mappings.ELEVATION,
+ ]
+ )
+
+ dwd_weather = DWDWeatherData(
+ parameters_10min=arguments.parameters_10min,
+ parameters_1h=arguments.parameters_1h,
+ station_ids=arguments.station_ids,
+ start_date=arguments.start_date,
+ end_date=arguments.end_date,
+ output_path=arguments.output_path,
+ file_name=arguments.file_name,
+ ts_shape=arguments.ts_shape,
+ ts_humanize=arguments.ts_humanize,
+ ts_si_units=arguments.ts_si_units,
+ )
+ metadata = dwd_weather.download_and_save_data()
+ metadata = metadata.rename(
+ columns={
+ "station_id": mappings.DWD_STATION_ID,
+ "latitude": mappings.LATITUDE_KEY,
+ "longitude": mappings.LONGITUDE_KEY,
+ "height": mappings.ELEVATION,
+ "name": mappings.DWD_STATION_NAME,
+ }
+ )
+ metadata[mappings.DWD_START] = to_utc_single(arguments.start_date)
+ metadata[mappings.DWD_END] = to_utc_single(arguments.end_date)
+ for _, data in metadata.iterrows():
+ dwd_stac = make_dwd_item(data=data)
+ save_path = Path(arguments.output_path) / (mappings.DWD_STAC_NAME + ".json")
+ save_path.parent.mkdir(parents=True, exist_ok=True)
+ with open(save_path, "w") as handle:
+ json.dump(dwd_stac, handle)
+ dwd_url = mappings.DWD_STAT_URL
+ weather_items.loc[len(weather_items)] = [
+ "DWD weather data",
+ dwd_url,
+ data[mappings.DWD_START],
+ data[mappings.DWD_END],
+ data[mappings.LATITUDE_KEY],
+ data[mappings.LONGITUDE_KEY],
+ data[mappings.ELEVATION],
+ ]
+ weather_items.to_csv(weather_items_path, index=False)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--parameters_10min",
+ default=[
+ "radiation_sky_short_wave_diffuse",
+ "radiation_global",
+ "sunshine_duration",
+ "radiation_sky_long_wave",
+ ],
+ )
+ parser.add_argument(
+ "--parameters_1h",
+ default=[
+ "cloud_cover_total",
+ "humidity",
+ "pressure_vapor",
+ "visibility_range",
+ "weather",
+ ],
+ )
+ parser.add_argument("--station_ids", default=["15000"])
+ parser.add_argument("--start_date", type=str, default="2021-04-01")
+ parser.add_argument("--end_date", type=str, default="2024-03-01")
+ parser.add_argument(
+ "--output_path", type=str, default=f"{PAINT_ROOT}/ConvertedData/Weather"
+ )
+ parser.add_argument("--file_name", type=str, default="dwd-weather.h5")
+ parser.add_argument("--ts_shape", type=str, default="long")
+ parser.add_argument("--ts_humanize", action="store_true", default=True)
+ parser.add_argument("--ts_si_units", action="store_false", default=False)
+ args = parser.parse_args()
+ main(arguments=args)
diff --git a/scripts/generate_calibration_stacs.py b/scripts/generate_calibration_stacs.py
new file mode 100755
index 00000000..f013719f
--- /dev/null
+++ b/scripts/generate_calibration_stacs.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python
+
+import argparse
+import json
+from pathlib import Path
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+from paint import PAINT_ROOT
+from paint.data.calibration_stac import (
+ make_calibration_collection,
+ make_calibration_item,
+)
+from paint.util.utils import (
+ calculate_azimuth_and_elevation,
+ heliostat_id_to_name,
+ to_utc,
+)
+
+
+def main(arguments: argparse.Namespace) -> None:
+ """
+ Generate STAC items and collections for calibration images.
+
+ Parameters
+ ----------
+ arguments: argparse.Namespace
+ The arguments containing input and output path.
+ """
+ # check if saved metadata exists and load if required
+ calibration_items_path = Path(f"{PAINT_ROOT}/TEMPDATA/calibration_items.csv")
+ if calibration_items_path.exists():
+ calibration_items = pd.read_csv(calibration_items_path)
+ else:
+ calibration_items_path.parent.mkdir(parents=True, exist_ok=True)
+ calibration_items = pd.DataFrame(
+ columns=[
+ mappings.HELIOSTAT_ID,
+ mappings.TITLE_KEY,
+ mappings.URL_KEY,
+ mappings.CREATED_AT,
+ mappings.AZIMUTH,
+ mappings.SUN_ELEVATION,
+ mappings.SYSTEM,
+ ]
+ )
+
+ # read in the data in CSV
+ data = pd.read_csv(arguments.input)
+ data.set_index(mappings.ID_INDEX, inplace=True)
+
+ # convert all timestamps to UTC
+ data[mappings.CREATED_AT] = to_utc(data[mappings.CREATED_AT])
+ data[mappings.UPDATED_AT] = to_utc(data[mappings.UPDATED_AT])
+
+ # compute azimuth and elevation
+ azimuth, elevation = calculate_azimuth_and_elevation(data)
+ data[mappings.AZIMUTH] = azimuth
+ data[mappings.SUN_ELEVATION] = elevation
+ data[mappings.HELIOSTAT_ID] = data[mappings.HELIOSTAT_ID].map(heliostat_id_to_name)
+
+ # generate the STAC item files for each image
+ for image, heliostat_data in data.iterrows():
+ assert isinstance(image, int)
+ stac_item = make_calibration_item(image=image, heliostat_data=heliostat_data)
+ url = mappings.CALIBRATION_ITEM_URL % (
+ heliostat_data[mappings.HELIOSTAT_ID],
+ image,
+ )
+ calibration_items.loc[len(calibration_items)] = [
+ heliostat_data[mappings.HELIOSTAT_ID],
+ f"calibration image {image} and associated motor positions for heliostat "
+ f"{heliostat_data[mappings.HELIOSTAT_ID]}",
+ url,
+ heliostat_data[mappings.CREATED_AT],
+ heliostat_data[mappings.AZIMUTH],
+ heliostat_data[mappings.SUN_ELEVATION],
+ heliostat_data[mappings.SYSTEM],
+ ]
+ calibration_item_stac_path = (
+ Path(arguments.output)
+ / heliostat_data[mappings.HELIOSTAT_ID]
+ / mappings.SAVE_CALIBRATION
+ / (
+ mappings.CALIBRATION_ITEM
+ % (heliostat_data[mappings.HELIOSTAT_ID], image)
+ )
+ )
+ calibration_item_stac_path.parent.mkdir(parents=True, exist_ok=True)
+ with open(calibration_item_stac_path, "w") as handle:
+ json.dump(stac_item, handle)
+
+ # save associated motorpositions
+ motor_pos_data = {
+ mappings.AXIS1_MOTOR: heliostat_data[mappings.AXIS1_MOTOR],
+ mappings.AXIS2_MOTOR: heliostat_data[mappings.AXIS2_MOTOR],
+ }
+ save_motor_pos_path = (
+ Path(arguments.output)
+ / heliostat_data[mappings.HELIOSTAT_ID]
+ / mappings.SAVE_CALIBRATION
+ / (
+ mappings.MOTOR_POS_NAME % (heliostat_data[mappings.HELIOSTAT_ID], image)
+ + ".json"
+ )
+ )
+ save_motor_pos_path.parent.mkdir(parents=True, exist_ok=True)
+ with open(save_motor_pos_path, "w") as handle:
+ json.dump(motor_pos_data, handle)
+
+ # create the STAC collections
+ for heliostat, data in calibration_items.groupby(mappings.HELIOSTAT_ID):
+ assert isinstance(heliostat, str)
+ collection = make_calibration_collection(heliostat_id=heliostat, data=data)
+ save_path = (
+ Path(arguments.output)
+ / heliostat
+ / mappings.SAVE_CALIBRATION
+ / (mappings.CALIBRATION_COLLECTION_FILE % heliostat)
+ )
+ save_path.parent.mkdir(exist_ok=True, parents=True)
+ with open(save_path, "w") as out:
+ json.dump(collection, out)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "-i",
+ "--input",
+ type=Path,
+ default=f"{PAINT_ROOT}/ExampleDataKIT/dataframe.csv",
+ )
+ parser.add_argument(
+ "-o",
+ "--output",
+ type=Path,
+ default=f"{PAINT_ROOT}/ConvertedData/",
+ )
+ args = parser.parse_args()
+
+ main(args)
diff --git a/scripts/generate_catalog.py b/scripts/generate_catalog.py
new file mode 100755
index 00000000..5aea7bb4
--- /dev/null
+++ b/scripts/generate_catalog.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+
+import argparse
+import json
+import sys
+from pathlib import Path
+
+import paint.util.paint_mappings as mappings
+from paint import PAINT_ROOT
+from paint.data.catalog_stac import make_catalog
+from paint.util.preprocessing import (
+ load_and_format_heliostat_axis_data,
+ load_and_format_heliostat_positions,
+ merge_and_sort_df,
+)
+
+
+def main(arguments: argparse.Namespace) -> None:
+ """
+ Save a catalog to disk.
+
+ Parameters
+ ----------
+ arguments: argparse.Namespace
+ The arguments containing the output path.
+ """
+ arguments.output_path.mkdir(parents=True, exist_ok=True)
+ df_axis = load_and_format_heliostat_axis_data(arguments)
+ df_position = load_and_format_heliostat_positions(arguments)
+ df = merge_and_sort_df(df_heliostat_positions=df_position, df_axis=df_axis)
+ catalog_stac = make_catalog(data=df)
+ with open(arguments.output_path / mappings.CATALOG_FILE, "w") as handle:
+ json.dump(catalog_stac, handle)
+
+
+if __name__ == "__main__":
+ # Simulate command-line arguments for testing or direct script execution
+ sys.argv = [
+ "generate_catalog.py",
+ "--input_axis",
+ f"{PAINT_ROOT}/ExampleDataKIT/axis_data.csv",
+ "--input_position",
+ f"{PAINT_ROOT}/ExampleDataKIT/Heliostatpositionen_xyz.xlsx",
+ "--output_path",
+ f"{PAINT_ROOT}/ConvertedData",
+ ]
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--input_axis", type=Path, default=f"f{PAINT_ROOT}/ExampleDataKIT/axis_data.csv"
+ )
+ parser.add_argument(
+ "--input_position",
+ type=Path,
+ default=f"{PAINT_ROOT}/ExampleDataKIT/Heliostatpositionen_xyz.xlsx",
+ )
+ parser.add_argument(
+ "--output_path", type=Path, default=f"{PAINT_ROOT}/ConvertedData"
+ )
+ args = parser.parse_args()
+ main(args)
diff --git a/scripts/generate_deflectometry_stacs_and_facet_items.py b/scripts/generate_deflectometry_stacs_and_facet_items.py
new file mode 100755
index 00000000..b3fa0a82
--- /dev/null
+++ b/scripts/generate_deflectometry_stacs_and_facet_items.py
@@ -0,0 +1,293 @@
+#!/usr/bin/env python
+
+import argparse
+import json
+import shutil
+import sys
+from pathlib import Path
+from typing import Tuple
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+from paint import PAINT_ROOT
+from paint.data.binary_extractor import BinaryExtractor
+from paint.data.deflectometry_stac import (
+ make_deflectometry_collection,
+ make_deflectometry_item,
+)
+from paint.data.facet_stac import make_facet_item
+from paint.util.preprocessing import load_and_format_heliostat_positions
+
+
+def extract_data_and_generate_stacs(
+ arguments: argparse.Namespace,
+ input_path: Path,
+ df_heliostat_positions: pd.DataFrame,
+ deflectometry_items: pd.DataFrame,
+ properties_items: pd.DataFrame,
+) -> Tuple[pd.DataFrame, pd.DataFrame]:
+ """
+ Extract the binary data and generate STACS.
+
+ This function extracts the binary data from the ``.binp`` file. After this data is extracted, it also generates
+ the STAC items for the deflectometry measurement and the heliostat facet properties. Additionally, it collects the
+ summary PDFs for the deflectometry measurement, renames them, copies them to the appropriate location and generates
+ a STAC for this PDF.
+
+ Parameters
+ ----------
+ arguments : argparse.Namespace
+ Arguments passed from the command line.
+ input_path : pathlib.Path
+ Path to the ``.binp`` file.
+ df_heliostat_positions : pd.DataFrame
+ A dataframe containing information on the heliostat positions.
+ deflectometry_items : pd.DataFrame
+ A dataframe containing the metadata for all items in the deflectometry collection.
+ properties_items : pd.DataFrame
+ A dataframe containing the metadata for all items in the heliostat properties collection.
+
+ Returns
+ -------
+ pd.DataFrame:
+ A dataframe containing the metadata for all items in the deflectometry collection.
+ pd.DataFrame:
+ A dataframe containing the metadata for all items in the heliostat properties collection.
+ """
+ # Extract binary data
+ converter = BinaryExtractor(
+ input_path=input_path,
+ output_path=arguments.output_path,
+ surface_header_name=arguments.surface_header_name,
+ facet_header_name=arguments.facet_header_name,
+ points_on_facet_struct_name=arguments.points_on_facet_struct_name,
+ )
+ converter.convert_to_h5_and_extract_properties()
+ metadata = df_heliostat_positions.loc[converter.heliostat_id][
+ [
+ mappings.EAST_KEY,
+ mappings.NORTH_KEY,
+ mappings.ALTITUDE_KEY,
+ mappings.HEIGHT_ABOVE_GROUND,
+ ]
+ ]
+ metadata[mappings.CREATED_AT] = converter.deflectometry_created_at
+
+ # STAC contains all deflectometry items, therefore, only create the stac once after the raw conversion
+ if converter.raw_data:
+ # find the associated PDF deflectometry results summary and copy it to the correct location with
+ # the correct name
+ split_name = input_path.name.split("_")
+ pdf_name = (
+ "_".join(split_name[0:3])
+ + "_Result_"
+ + split_name[-1].split(".")[0]
+ + ".pdf"
+ )
+ new_pdf_name = (
+ Path(arguments.output_path)
+ / converter.heliostat_id
+ / mappings.SAVE_DEFLECTOMETRY
+ / (
+ mappings.DEFLECTOMETRY_PDF_NAME
+ % (converter.heliostat_id, converter.deflectometry_created_at)
+ )
+ )
+ shutil.copy2(input_path.parent / pdf_name, new_pdf_name)
+
+ # create stac and extract latitude and longitude
+ lat_lon, stac_item = make_deflectometry_item(
+ heliostat_key=converter.heliostat_id,
+ heliostat_data=metadata,
+ )
+ # save item metadata for collection creation later
+ url = mappings.DEFLECTOMETRY_ITEM_URL % (
+ converter.heliostat_id,
+ converter.deflectometry_created_at,
+ )
+ deflectometry_items.loc[len(deflectometry_items)] = [
+ converter.heliostat_id,
+ f"Deflectometry measurements for {converter.heliostat_id} at {converter.deflectometry_created_at}",
+ url,
+ converter.deflectometry_created_at,
+ lat_lon[0],
+ lat_lon[1],
+ metadata[mappings.ALTITUDE_KEY],
+ ]
+
+ # save the deflectometry measurement stac
+ save_deflectometry_path = (
+ Path(arguments.output_path)
+ / converter.heliostat_id
+ / mappings.SAVE_DEFLECTOMETRY
+ / (
+ mappings.DEFLECTOMETRY_ITEM
+ % (converter.heliostat_id, converter.deflectometry_created_at)
+ )
+ )
+ save_deflectometry_path.parent.mkdir(parents=True, exist_ok=True)
+ with open(save_deflectometry_path, mode="w") as handle:
+ json.dump(stac_item, handle)
+
+ # extract the facet properties item STAC
+ facet_stac = make_facet_item(
+ heliostat_key=converter.heliostat_id, heliostat_data=metadata
+ )
+
+ # save the facet properties metadata for collection creation later
+ facet_url = mappings.FACET_PROPERTIES_ITEM_ITEM_URL % converter.heliostat_id
+ properties_items.loc[len(properties_items)] = [
+ converter.heliostat_id,
+ f"facet properties for {converter.heliostat_id}",
+ facet_url,
+ converter.deflectometry_created_at,
+ lat_lon[0],
+ lat_lon[1],
+ metadata[mappings.ALTITUDE_KEY],
+ ]
+
+ # save facet properties STAC
+ save_facet_path = (
+ Path(arguments.output_path)
+ / converter.heliostat_id
+ / mappings.SAVE_PROPERTIES
+ / (mappings.FACET_PROPERTIES_ITEM % converter.heliostat_id)
+ )
+ save_facet_path.parent.mkdir(parents=True, exist_ok=True)
+ with open(save_facet_path, mode="w") as handle:
+ json.dump(facet_stac, handle)
+
+ return deflectometry_items, properties_items
+
+
+def main(arguments: argparse.Namespace):
+ """
+ Generate deflectometry STACS and facet item STACS.
+
+ This function converts binary data to HDF5 for deflectometry measurements and JSON for facet properties.
+ Additionally, the deflectometry results summary PDF is moved to the correct location and renamed. Also, the STAC
+ items and collections for deflectometry measurements are created and the STAC items for the facet properties.
+ Finally, the metadata for the facet properties STAC items is saved for later collection creation.
+
+ Parameters
+ ----------
+ arguments : argparse.Namespace
+ The command line arguments.
+ """
+ # check if saved metadata exists and load if required
+ deflectometry_items_path = Path(f"{PAINT_ROOT}/TEMPDATA/deflectometry_items.csv")
+ properties_items_path = Path(f"{PAINT_ROOT}/TEMPDATA/properties_items.csv")
+ if deflectometry_items_path.exists():
+ deflectometry_items = pd.read_csv(deflectometry_items_path)
+ else:
+ deflectometry_items_path.parent.mkdir(parents=True, exist_ok=True)
+ deflectometry_items = pd.DataFrame(
+ columns=[
+ mappings.HELIOSTAT_ID,
+ mappings.TITLE_KEY,
+ mappings.URL_KEY,
+ mappings.CREATED_AT,
+ mappings.LATITUDE_KEY,
+ mappings.LONGITUDE_KEY,
+ mappings.ELEVATION,
+ ]
+ )
+ if properties_items_path.exists():
+ properties_items = pd.read_csv(properties_items_path)
+ else:
+ properties_items_path.parent.mkdir(parents=True, exist_ok=True)
+ properties_items = pd.DataFrame(
+ columns=[
+ mappings.HELIOSTAT_ID,
+ mappings.TITLE_KEY,
+ mappings.URL_KEY,
+ mappings.CREATED_AT,
+ mappings.LATITUDE_KEY,
+ mappings.LONGITUDE_KEY,
+ mappings.ELEVATION,
+ ]
+ )
+
+ # load heliostat position and reformat for easy parsing
+ df_heliostat_positions = load_and_format_heliostat_positions(arguments)
+
+ directory = Path(arguments.input_folder)
+ binp_files = directory.rglob("*.binp")
+
+ for input_path in binp_files:
+ deflectometry_items, properties_items = extract_data_and_generate_stacs(
+ arguments=arguments,
+ input_path=input_path,
+ df_heliostat_positions=df_heliostat_positions,
+ deflectometry_items=deflectometry_items,
+ properties_items=properties_items,
+ )
+
+ for heliostat, data in deflectometry_items.groupby(mappings.HELIOSTAT_ID):
+ assert isinstance(heliostat, str)
+ collection = make_deflectometry_collection(heliostat_id=heliostat, data=data)
+ save_path = (
+ Path(arguments.output_path)
+ / heliostat
+ / mappings.SAVE_DEFLECTOMETRY
+ / (mappings.DEFLECTOMETRY_COLLECTION_FILE % heliostat)
+ )
+ save_path.parent.mkdir(exist_ok=True, parents=True)
+ with open(save_path, "w") as out:
+ json.dump(collection, out)
+
+ # save facet items for creating collections
+ properties_items.to_csv(properties_items_path, index=False)
+
+
+if __name__ == "__main__":
+ # Simulate command-line arguments for testing or direct script execution
+ sys.argv = [
+ "generate_deflectometry_stacs_and_facet_items.py",
+ "--input_folder",
+ f"{PAINT_ROOT}/ExampleDataKIT",
+ "-i_position",
+ f"{PAINT_ROOT}/ExampleDataKIT/Heliostatpositionen_xyz.xlsx",
+ "--output_path",
+ f"{PAINT_ROOT}/ConvertedData",
+ "--surface_header_name",
+ "=5f2I2f",
+ "--facet_header_name",
+ "=i9fI",
+ "--points_on_facet_struct_name",
+ "=7f",
+ ]
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--input_folder", type=Path, help="Parent folder to search for binary folders."
+ )
+ parser.add_argument(
+ "-i_position",
+ "--input_position",
+ type=Path,
+ help="Path to the heliostat position input file",
+ )
+ parser.add_argument(
+ "--output_path",
+ type=Path,
+ help="Path to save the output files",
+ )
+ parser.add_argument(
+ "--surface_header_name",
+ type=str,
+ help="The header of the surface struct",
+ )
+ parser.add_argument(
+ "--facet_header_name",
+ type=str,
+ help="The header of the facet struct",
+ )
+ parser.add_argument(
+ "--points_on_facet_struct_name",
+ type=str,
+ help="The header of the points on the facet struct",
+ )
+ args = parser.parse_args()
+ main(arguments=args)
diff --git a/scripts/generate_heliostat_catalogs.py b/scripts/generate_heliostat_catalogs.py
new file mode 100755
index 00000000..08fa5538
--- /dev/null
+++ b/scripts/generate_heliostat_catalogs.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+
+import argparse
+import json
+import sys
+from pathlib import Path
+
+import paint.util.paint_mappings as mappings
+from paint import PAINT_ROOT
+from paint.data.heliostat_catalog_stac import make_heliostat_catalog
+from paint.util.preprocessing import (
+ load_and_format_heliostat_axis_data,
+ load_and_format_heliostat_positions,
+ merge_and_sort_df,
+)
+
+
+def main(arguments: argparse.Namespace) -> None:
+ """
+ Generate and save a catalog for each heliostat.
+
+ Parameters
+ ----------
+ arguments: argparse.Namespace
+ The command line arguments.
+ """
+ arguments.output_path.mkdir(parents=True, exist_ok=True)
+ df_axis = load_and_format_heliostat_axis_data(arguments)
+ df_position = load_and_format_heliostat_positions(arguments)
+ df = merge_and_sort_df(df_heliostat_positions=df_position, df_axis=df_axis)
+ for heliostat, _ in df.iterrows():
+ assert isinstance(heliostat, str)
+ helio_catalog = make_heliostat_catalog(heliostat_id=heliostat)
+ save_helio_path = (
+ Path(arguments.output_path)
+ / heliostat
+ / (mappings.HELIOSTAT_CATALOG_FILE % heliostat)
+ )
+ save_helio_path.parent.mkdir(parents=True, exist_ok=True)
+ with open(save_helio_path, "w") as handle:
+ json.dump(helio_catalog, handle)
+
+
+if __name__ == "__main__":
+ # Simulate command-line arguments for testing or direct script execution
+ sys.argv = [
+ "generate_heliostat_catalog.py",
+ "--input_axis",
+ f"{PAINT_ROOT}/ExampleDataKIT/axis_data.csv",
+ "--input_position",
+ f"{PAINT_ROOT}/ExampleDataKIT/Heliostatpositionen_xyz.xlsx",
+ "--output_path",
+ f"{PAINT_ROOT}/ConvertedData",
+ ]
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--input_axis", type=Path, default=f"f{PAINT_ROOT}/ExampleDataKIT/axis_data.csv"
+ )
+ parser.add_argument(
+ "--input_position",
+ type=Path,
+ default=f"{PAINT_ROOT}/ExampleDataKIT/Heliostatpositionen_xyz.xlsx",
+ )
+ parser.add_argument(
+ "--output_path", type=Path, default=f"{PAINT_ROOT}/ConvertedData"
+ )
+ args = parser.parse_args()
+ main(args)
diff --git a/scripts/generate_kinematic_stacs.py b/scripts/generate_kinematic_stacs.py
new file mode 100755
index 00000000..04f42340
--- /dev/null
+++ b/scripts/generate_kinematic_stacs.py
@@ -0,0 +1,181 @@
+#!/usr/bin/env python
+
+import argparse
+import json
+import sys
+from pathlib import Path
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+from paint import PAINT_ROOT
+from paint.data.kinematic_stac import make_kinematic_item
+from paint.util.preprocessing import (
+ load_and_format_heliostat_axis_data,
+ load_and_format_heliostat_positions,
+ merge_and_sort_df,
+)
+from paint.util.utils import (
+ to_utc_single,
+)
+
+
+def extract_kinematic_data_and_generate_stacs(
+ arguments: argparse.Namespace,
+ heliostat_id: str,
+ kinematic_data: pd.Series,
+ properties_items: pd.DataFrame,
+) -> pd.DataFrame:
+ """
+ Extract the kinematic data and generate associated stac items.
+
+ This function extracts the kinematic data for a given heliostat, saves this data as a json file, and also generates
+ the associated stac item.
+
+ Parameters
+ ----------
+ arguments : argparse.Namespace
+ The command line arguments.
+ heliostat_id : str
+ The id of the heliostat considered.
+ kinematic_data : pd.Series
+ The kinematic data for the heliostat.
+ properties_items : pd.DataFrame
+ The dataframe containing the metadata for each heliostat property to be used for collection creation later.
+
+ Returns
+ -------
+ pd.DataFrame
+ The dataframe containing the metadata for each heliostat property to be used for collection creation later.
+ """
+ lat_lon, kinematic_stac = make_kinematic_item(
+ heliostat_key=heliostat_id, heliostat_data=kinematic_data
+ )
+
+ # save item metadata for collection creation later
+ url = mappings.KINEMATIC_PROPERTIES_ITEM_URL % heliostat_id
+ properties_items.loc[len(properties_items)] = [
+ heliostat_id,
+ f"kinematic properties for {heliostat_id}",
+ url,
+ to_utc_single(kinematic_data[mappings.CREATED_AT]),
+ lat_lon[0],
+ lat_lon[1],
+ kinematic_data[mappings.ALTITUDE_KEY],
+ ]
+
+ # save kinematic properties STAC
+ save_kinematic_stac_path = (
+ Path(arguments.output_path)
+ / heliostat_id
+ / mappings.SAVE_PROPERTIES
+ / (mappings.KINEMATIC_PROPERTIES_ITEM % heliostat_id)
+ )
+ save_kinematic_stac_path.parent.mkdir(parents=True, exist_ok=True)
+ with open(save_kinematic_stac_path, mode="w") as handle:
+ json.dump(kinematic_stac, handle)
+
+ # convert kinematic data to dict and remove metadata
+ dict_data = kinematic_data.to_dict()
+ for key_to_remove in [
+ mappings.CREATED_AT,
+ mappings.EAST_KEY,
+ mappings.NORTH_KEY,
+ mappings.ALTITUDE_KEY,
+ mappings.FIELD_ID,
+ mappings.HEIGHT_ABOVE_GROUND,
+ ]:
+ dict_data.pop(key_to_remove, None)
+
+ # save kinematic properties measurements
+ save_kinematic_properties_path = (
+ Path(arguments.output_path)
+ / heliostat_id
+ / mappings.SAVE_PROPERTIES
+ / (heliostat_id + mappings.KINEMATIC_PROPERTIES_SUFFIX)
+ )
+ save_kinematic_properties_path.parent.mkdir(parents=True, exist_ok=True)
+ with open(save_kinematic_properties_path, mode="w") as handle:
+ json.dump(dict_data, handle)
+ return properties_items
+
+
+def main(arguments: argparse.Namespace):
+ """
+ Generate kinematic properties stac items and save raw data.
+
+ This function extracts the kinematic properties data for each heliostat and saves this as a json file. Additionally,
+ the stac items for each of these files are automatically generated. Finally, the metadata for each of these stac
+ items is saved for collection creation later.
+
+ Parameters
+ ----------
+ arguments : argparse.Namespace
+ The command line arguments.
+ """
+ # check if saved metadata exists and load if required
+ properties_items_path = Path(f"{PAINT_ROOT}/TEMPDATA/properties_items.csv")
+ if properties_items_path.exists():
+ properties_items = pd.read_csv(properties_items_path)
+ else:
+ properties_items_path.parent.mkdir(parents=True, exist_ok=True)
+ properties_items = pd.DataFrame(
+ columns=[
+ mappings.HELIOSTAT_ID,
+ mappings.TITLE_KEY,
+ mappings.URL_KEY,
+ mappings.CREATED_AT,
+ mappings.LATITUDE_KEY,
+ mappings.LONGITUDE_KEY,
+ mappings.ELEVATION,
+ ]
+ )
+
+ # load heliostat position and axis data and reformat for easy parsing
+ df_heliostat_positions = load_and_format_heliostat_positions(arguments)
+ df_axis = load_and_format_heliostat_axis_data(arguments)
+ df_concatenated = merge_and_sort_df(df_heliostat_positions, df_axis)
+
+ # extract kinematic properties data and STAC
+ for key, data in df_concatenated.iterrows():
+ assert isinstance(key, str)
+ properties_items = extract_kinematic_data_and_generate_stacs(
+ arguments=arguments,
+ heliostat_id=key,
+ kinematic_data=data,
+ properties_items=properties_items,
+ )
+
+ properties_items.to_csv(properties_items_path, index=False)
+
+
+if __name__ == "__main__":
+ # Simulate command-line arguments for testing or direct script execution
+ sys.argv = [
+ "generate_deflectometry_stacs_and_facet_items.py",
+ "--input_position",
+ f"{PAINT_ROOT}/ExampleDataKIT/Heliostatpositionen_xyz.xlsx",
+ "--input_axis",
+ f"{PAINT_ROOT}/ExampleDataKIT/axis_data.csv",
+ "--output_path",
+ f"{PAINT_ROOT}/ConvertedData",
+ ]
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--input_position",
+ type=Path,
+ help="Path to the heliostat position input file",
+ )
+ parser.add_argument(
+ "--input_axis",
+ type=Path,
+ help="Path to the axis data input file",
+ )
+ parser.add_argument(
+ "--output_path",
+ type=Path,
+ help="Path to save the output files",
+ )
+ args = parser.parse_args()
+ main(arguments=args)
diff --git a/scripts/generate_properties_collection.py b/scripts/generate_properties_collection.py
new file mode 100755
index 00000000..bd269de1
--- /dev/null
+++ b/scripts/generate_properties_collection.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+
+import argparse
+import json
+import sys
+from pathlib import Path
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+from paint import PAINT_ROOT
+from paint.data.properties_collection_stac import make_properties_collection
+
+
+def main(arguments: argparse.Namespace) -> None:
+ """
+ Generate heliostat properties collections.
+
+ This function uses the metadata for each item in the heliostat properties collection to generate a STAC collection.
+
+ Parameters
+ ----------
+ arguments : argparse.Namespace
+ The command line arguments.
+ """
+ df = pd.read_csv(arguments.input_path)
+
+ for heliostat, data in df.groupby(mappings.HELIOSTAT_ID):
+ assert isinstance(heliostat, str)
+ collection = make_properties_collection(heliostat_id=heliostat, data=data)
+ save_path = (
+ Path(arguments.output_path)
+ / heliostat
+ / mappings.SAVE_PROPERTIES
+ / (mappings.HELIOSTAT_PROPERTIES_COLLECTION_FILE % heliostat)
+ )
+ save_path.parent.mkdir(exist_ok=True, parents=True)
+ with open(save_path, "w") as out:
+ json.dump(collection, out)
+
+
+if __name__ == "__main__":
+ sys.argv = [
+ "generate_properties_collection.py",
+ "--input_path",
+ f"{PAINT_ROOT}/TEMPDATA/properties_items.csv",
+ "--output_path",
+ f"{PAINT_ROOT}/ConvertedData",
+ ]
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--input_path",
+ type=Path,
+ help="Input file containing the heliostat properties items metadata.",
+ )
+ parser.add_argument(
+ "--output_path",
+ type=Path,
+ help="Path to save the output files",
+ )
+ args = parser.parse_args()
+ main(arguments=args)
diff --git a/scripts/generate_weather_collection.py b/scripts/generate_weather_collection.py
new file mode 100755
index 00000000..a2f96b92
--- /dev/null
+++ b/scripts/generate_weather_collection.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+
+import argparse
+import json
+import sys
+from pathlib import Path
+
+import pandas as pd
+
+import paint.util.paint_mappings as mappings
+from paint import PAINT_ROOT
+from paint.data.weather_collection_stac import make_weather_collection
+
+
+def main(arguments: argparse.Namespace) -> None:
+ """
+ Generate a weather collection.
+
+ This function uses the metadata for each item in the weather collection to generate a STAC collection.
+
+ Parameters
+ ----------
+ arguments : argparse.Namespace
+ The command line arguments.
+ """
+ df = pd.read_csv(arguments.input_path)
+
+ collection = make_weather_collection(data=df)
+ save_path = Path(arguments.output_path) / mappings.WEATHER_COLLECTION_FILE
+ save_path.parent.mkdir(exist_ok=True, parents=True)
+ with open(save_path, "w") as out:
+ json.dump(collection, out)
+
+
+if __name__ == "__main__":
+ sys.argv = [
+ "generate_weather_collection.py",
+ "--input_path",
+ f"{PAINT_ROOT}/TEMPDATA/weather_items.csv",
+ "--output_path",
+ f"{PAINT_ROOT}/ConvertedData/Weather",
+ ]
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--input_path",
+ type=Path,
+ help="Input file containing the weather items metadata.",
+ )
+ parser.add_argument(
+ "--output_path",
+ type=Path,
+ help="Path to save the output files",
+ )
+ args = parser.parse_args()
+ main(arguments=args)
diff --git a/tests/data/test_binary_extractor.py b/tests/data/test_binary_extractor.py
new file mode 100644
index 00000000..a2cfb42c
--- /dev/null
+++ b/tests/data/test_binary_extractor.py
@@ -0,0 +1,118 @@
+import json
+import os
+import tempfile
+from pathlib import Path
+
+import h5py
+import pytest
+import torch
+
+import paint.util.paint_mappings as mappings
+from paint import PAINT_ROOT
+from paint.data.binary_extractor import BinaryExtractor
+from paint.util.utils import to_utc_single
+
+
+@pytest.mark.parametrize(
+ "test_data_path, surface_header_name, facet_header_name, points_on_facet_struct_name",
+ [
+ (
+ Path(
+ f"{PAINT_ROOT}/tests/data/test_data/Helio_AA23_test_data_230918133925.binp"
+ ),
+ "=5f2I2f",
+ "=i9fI",
+ "=7f",
+ ),
+ ],
+)
+def test_binary_extractor(
+ test_data_path: Path,
+ surface_header_name: str,
+ facet_header_name: str,
+ points_on_facet_struct_name: str,
+):
+ """
+ Test the binary extractor.
+
+ This test extracts the deflectometry data to h5 and the heliostat properties to json and saves these files in a
+ temporary directory. Then, we test if the appropriate keys are available and the shape of the data matches the
+ expected shape.
+
+ Parameters
+ ----------
+ test_data_path : str
+ The path to the test binary file.
+ surface_header_name : str
+ The name for the surface header in the test binary file.
+ facet_header_name : str
+ The name for the facet header in the test binary file.
+ points_on_facet_struct_name : str
+ The name of the point on facet structure in the test binary file.
+ """
+ with tempfile.TemporaryDirectory() as temp_dir:
+ output_path = temp_dir
+ file_name = (
+ test_data_path.name.split("_")[1]
+ + "-"
+ + str(to_utc_single(test_data_path.name.split("_")[-1].split(".")[0]))
+ + mappings.DEFLECTOMETRY_SUFFIX
+ )
+ json_handle = (
+ test_data_path.name.split("_")[1] + mappings.FACET_PROPERTIES_SUFFIX
+ )
+ converter = BinaryExtractor(
+ input_path=test_data_path,
+ output_path=output_path,
+ surface_header_name=surface_header_name,
+ facet_header_name=facet_header_name,
+ points_on_facet_struct_name=points_on_facet_struct_name,
+ )
+ converter.convert_to_h5_and_extract_properties()
+
+ # check the HDF5 file
+ file_path = (
+ Path(output_path)
+ / converter.heliostat_id
+ / mappings.SAVE_DEFLECTOMETRY
+ / file_name
+ )
+ assert os.path.exists(file_path)
+
+ # check the HDF5 file
+ json_file_path = (
+ Path(output_path)
+ / converter.heliostat_id
+ / mappings.SAVE_PROPERTIES
+ / json_handle
+ )
+ assert os.path.exists(json_file_path)
+
+ # check the extracted heliostat properties are correct
+ # Open the file and load the JSON data
+ with open(json_file_path, "r") as file:
+ data = json.load(file)
+ assert data[mappings.NUM_FACETS] == 4
+ num_facets = data[mappings.NUM_FACETS]
+ for i in range(num_facets):
+ assert torch.tensor(
+ data[mappings.FACETS_LIST][i][mappings.TRANSLATION_VECTOR]
+ ).shape == torch.Size([3])
+ assert torch.tensor(
+ data[mappings.FACETS_LIST][i][mappings.CANTING_E]
+ ).shape == torch.Size([3])
+ assert torch.tensor(
+ data[mappings.FACETS_LIST][i][mappings.CANTING_N]
+ ).shape == torch.Size([3])
+
+ # check the extracted deflectometry shapes are correct
+ with h5py.File(file_path, "r") as file:
+ for i in range(num_facets):
+ assert torch.tensor(
+ file[f"{mappings.FACET_KEY}{i+1}"][mappings.SURFACE_NORMAL_KEY],
+ dtype=torch.float32,
+ ).shape == torch.Size([80755, 3])
+ assert torch.tensor(
+ file[f"{mappings.FACET_KEY}{i + 1}"][mappings.SURFACE_POINT_KEY],
+ dtype=torch.float32,
+ ).shape == torch.Size([80755, 3])
diff --git a/tests/data/test_calibration_stac.py b/tests/data/test_calibration_stac.py
new file mode 100644
index 00000000..c008c2a1
--- /dev/null
+++ b/tests/data/test_calibration_stac.py
@@ -0,0 +1,320 @@
+from typing import Tuple
+
+import deepdiff
+import pandas as pd
+import pytest
+
+import paint.util.paint_mappings as mappings
+from paint.data.calibration_stac import (
+ make_calibration_collection,
+ make_calibration_item,
+)
+
+
+@pytest.fixture
+def calibration_item_data() -> Tuple[int, pd.Series]:
+ """
+ Make a fixture with data for generating a calibration item.
+
+ Returns
+ -------
+ int
+ The image ID.
+ pd.Series
+ The data for the calibration STAC item.
+ """
+ data = {
+ "FieldId": 1,
+ "HeliostatId": "BC52",
+ "CameraId": 0,
+ "CalibrationTargetId": 7,
+ "System": "HeliOS.FDM",
+ "Version": 1.0,
+ "Axis1MotorPosition": 44350,
+ "Axis2MotorPosition": 59345,
+ "ImageOffsetX": 343.2206783955819,
+ "ImageOffsetY": 336.57737110870715,
+ "TargetOffsetE": 0.8986945895769931,
+ "TargetOffsetN": -3.2362078192934702,
+ "TargetOffsetU": 123.49814527419454,
+ "TrackingOffsetE": 0.0,
+ "TrackingOffsetU": 0.0,
+ "SunPosE": -0.3783143931471522,
+ "SunPosN": -0.4191916363901756,
+ "SunPosU": 0.825322114036834,
+ "LastScore": 8.717597473226798,
+ "GeometryData": '{\r\n "alpha" : 1.586571429484214,\r\n "beta" : 1.574961871386958,\r\n "gamma" : 0.0,\r\n "delta" : 0.0,\r\n "axis1k" : 0.017594028615554223,\r\n "axis2k" : 0.9056999844505894,\r\n "axis3k" : 0.0,\r\n "axis1b" : 0.07894519658593206,\r\n "axis2b" : 0.07716455571024607,\r\n "axis3b" : 0.0\r\n}',
+ "IsDeleted": 1,
+ "CreatedAt": pd.Timestamp("2022-06-01 11:08:45+0000", tz="UTC"),
+ "UpdatedAt": pd.Timestamp("2022-10-27 07:05:55+0000", tz="UTC"),
+ "OverExpRatio": -1,
+ "Az": -1,
+ "iO": 1,
+ "ApX": 0.0,
+ "ApY": 0.0,
+ "ApZ": 0.0,
+ "OvrExp": 5.34028589409332,
+ "Azimuth": -42.06579562155874,
+ "Sun_elevation": 55.621162346471515,
+ }
+ return 115399, pd.Series(data)
+
+
+@pytest.fixture
+def calibration_collection_data():
+ """
+ Make a fixture with data for generating the calibration collection.
+
+ Returns
+ -------
+ pd.DataFrame
+ The data for the calibration collection as a test fixture.
+ """
+ data = {
+ mappings.HELIOSTAT_ID: ["BC52", "BC52", "BC52", "BC52"],
+ mappings.TITLE_KEY: [
+ "calibration image 115399 and associated motor positions for heliostat BC52",
+ "calibration image 116262 and associated motor positions for heliostat BC52",
+ "calibration image 116310 and associated motor positions for heliostat BC52",
+ "calibration image 116384 and associated motor positions for heliostat BC52",
+ ],
+ mappings.URL_KEY: [
+ "INSERT/SOMETHING/HERE/BC52-115399-calibration-item-stac.json",
+ "INSERT/SOMETHING/HERE/BC52-116262-calibration-item-stac.json",
+ "INSERT/SOMETHING/HERE/BC52-116310-calibration-item-stac.json",
+ "INSERT/SOMETHING/HERE/BC52-116384-calibration-item-stac.json",
+ ],
+ mappings.CREATED_AT: [
+ pd.Timestamp("2022-06-01 11:08:45+00:00", tz="UTC"),
+ pd.Timestamp("2022-06-02 10:10:19+00:00", tz="UTC"),
+ pd.Timestamp("2022-06-02 10:15:40+00:00", tz="UTC"),
+ pd.Timestamp("2022-06-02 10:26:01+00:00", tz="UTC"),
+ ],
+ mappings.AZIMUTH: [
+ -42.06579562155874,
+ -17.92779175130011,
+ -20.35899048157409,
+ -24.95280670914511,
+ ],
+ mappings.SUN_ELEVATION: [
+ 55.621162346471515,
+ 60.38321187895165,
+ 60.10724673519602,
+ 59.47986694914367,
+ ],
+ mappings.SYSTEM: ["HeliOS.FDM"] * 4,
+ }
+
+ return pd.DataFrame(data)
+
+
+def test_make_calibration_collection(calibration_collection_data: pd.DataFrame) -> None:
+ """
+ Test the creation of the calibration STAC collection.
+
+ Parameters
+ ----------
+ calibration_collection_data: pd.DataFrame
+ The test fixture.
+ """
+ for heliostat, data in calibration_collection_data.groupby(mappings.HELIOSTAT_ID):
+ assert isinstance(heliostat, str)
+ collection = make_calibration_collection(heliostat_id=heliostat, data=data)
+
+ expected = {
+ "stac_version": "1.0.0",
+ "stac_extensions": [
+ "https://stac-extensions.github.io/item-assets/v1.0.0/schema.json"
+ ],
+ "id": "BC52-calibration-collection",
+ "type": "Collection",
+ "title": "Calibration images from heliostat BC52",
+ "description": "All calibration images from the heliostat BC52",
+ "keywords": ["csp", "calibration", "tracking"],
+ "license": "CDLA-2.0",
+ "providers": [
+ {
+ "name": "German Aerospace Center (DLR)",
+ "description": "National center for aerospace, energy and transportation research of Germany",
+ "roles": ["licensor", "producer", "processor"],
+ "url": "https://github.com/ARTIST-Association/PAINT/",
+ },
+ {
+ "name": "Karlsruhe Institute of Technology (KIT)",
+ "description": "Public research center and university in Karlsruhe, Germany",
+ "roles": ["producer", "processor", "host"],
+ "url": "https://github.com/ARTIST-Association/PAINT/",
+ },
+ ],
+ "extent": {
+ "spatial": {
+ "bbox": [
+ 50.913296351383806,
+ 6.387514846666862,
+ 50.913296351383806,
+ 6.387514846666862,
+ ]
+ },
+ "temporal": {
+ "interval": ["2022-06-01Z11:08:45Z", "2022-06-02Z10:26:01Z"]
+ },
+ },
+ "summaries": {
+ "datetime": {
+ "minimum": "2022-06-01Z11:08:45Z",
+ "maximum": "2022-06-02Z10:26:01Z",
+ },
+ "view:sun_azimuth": {
+ "minimum": -42.06579562155874,
+ "maximum": -17.92779175130011,
+ },
+ "view:sun_elevation": {
+ "minimum": 55.621162346471515,
+ "maximum": 60.38321187895165,
+ },
+ "instruments": ["HeliOS.FDM"],
+ },
+ "links": [
+ {
+ "rel": "license",
+ "href": "https://cdla.dev/permissive-2-0/",
+ "type": "text/html",
+ "title": "Community Data License Agreement – Permissive – Version 2.0",
+ },
+ {
+ "rel": "self",
+ "href": "INSERT/SOMETHING/HERE/BC52-calibration-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "root",
+ "href": "Insert/URL/Here",
+ "type": "application/geo+json",
+ "title": "Reference to the entire catalogue for WRI1030197",
+ },
+ {
+ "rel": "collection",
+ "href": "INSERT/SOMETHING/HERE/BC52-calibration-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "item",
+ "href": "INSERT/SOMETHING/HERE/BC52-115399-calibration-item-stac.json",
+ "type": "application/geo+json",
+ "title": "STAC item of calibration image 115399 and associated motor positions for heliostat BC52",
+ },
+ {
+ "rel": "item",
+ "href": "INSERT/SOMETHING/HERE/BC52-116262-calibration-item-stac.json",
+ "type": "application/geo+json",
+ "title": "STAC item of calibration image 116262 and associated motor positions for heliostat BC52",
+ },
+ {
+ "rel": "item",
+ "href": "INSERT/SOMETHING/HERE/BC52-116310-calibration-item-stac.json",
+ "type": "application/geo+json",
+ "title": "STAC item of calibration image 116310 and associated motor positions for heliostat BC52",
+ },
+ {
+ "rel": "item",
+ "href": "INSERT/SOMETHING/HERE/BC52-116384-calibration-item-stac.json",
+ "type": "application/geo+json",
+ "title": "STAC item of calibration image 116384 and associated motor positions for heliostat BC52",
+ },
+ ],
+ }
+
+ assert not deepdiff.DeepDiff(
+ collection, expected, ignore_numeric_type_changes=True
+ )
+
+
+def test_make_calibration_item(calibration_item_data: Tuple[str, pd.Series]) -> None:
+ """
+ Test the creation of a STAC item.
+
+ Parameters
+ ----------
+ calibration_item_data : Tuple[str, pd.Series]
+ The test fixture.
+ """
+ image, data = calibration_item_data
+ assert isinstance(image, int)
+ item = make_calibration_item(image=image, heliostat_data=data)
+ expected = {
+ "stac_version": "1.0.0",
+ "stac_extensions": ["view"],
+ "id": "115399",
+ "type": "Feature",
+ "title": "Calibration data from heliostat BC52 for image 115399",
+ "description": "Image of focused sunlight on the calibration target from heliostat BC52 for image 115399 with associated motor positions",
+ "collection": "BC52-calibration-collection",
+ "geometry": {
+ "type": "Point",
+ "coordinates": [6.387514846666862, 50.913296351383806],
+ },
+ "bbox": [
+ 6.387514846666862,
+ 50.913296351383806,
+ 6.387514846666862,
+ 50.913296351383806,
+ ],
+ "properties": {
+ "datetime": "2022-06-01Z11:08:45Z",
+ "created": "2022-06-01Z11:08:45Z",
+ "updated": "2022-10-27Z07:05:55Z",
+ "instruments": ["HeliOS.FDM"],
+ },
+ "view:sun_azimuth": -42.06579562155874,
+ "view:sun_elevation": 55.621162346471515,
+ "links": [
+ {
+ "rel": "self",
+ "href": "./115399-stac.json",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC file",
+ },
+ {
+ "rel": "root",
+ "href": "./Insert/URL/Here",
+ "type": "application/geo+json",
+ "title": "Reference to the entire catalogue for WRI1030197",
+ },
+ {
+ "rel": "parent",
+ "href": "INSERT/SOMETHING/HERE/BC52-calibration-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the collection STAC file",
+ },
+ {
+ "rel": "collection",
+ "href": "INSERT/SOMETHING/HERE/BC52-calibration-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the collection STAC file",
+ },
+ ],
+ "assets": {
+ "target": {
+ "href": "./115399.png",
+ "roles": ["data"],
+ "type": "image/png",
+ "title": "Calibration image with id 115399",
+ },
+ "motor_positions": {
+ "href": "./BC52-115399-motor-position.json",
+ "roles": ["metadata"],
+ "type": "image/png",
+ "title": "Motor positions for the calibration image id 115399",
+ },
+ },
+ }
+
+ assert not deepdiff.DeepDiff(item, expected, ignore_numeric_type_changes=True)
+
+
+def test_make_calibration_collection_fail() -> None:
+ """Test conversion failure on incomplete input data."""
+ with pytest.raises(KeyError):
+ make_calibration_collection("AB123", pd.DataFrame())
diff --git a/tests/data/test_catalog_stac.py b/tests/data/test_catalog_stac.py
new file mode 100644
index 00000000..21b48beb
--- /dev/null
+++ b/tests/data/test_catalog_stac.py
@@ -0,0 +1,89 @@
+import deepdiff
+import pandas as pd
+import pytest
+
+import paint.data.catalog_stac
+import paint.util.paint_mappings
+
+
+@pytest.fixture
+def catalog_data():
+ """
+ Make a fixture with data for generating the catalog.
+
+ Returns
+ -------
+ pd.DataFrame
+ The test fixture.
+ """
+ data = {
+ "HeliostatId": ["AA23", "AA24", "AA25", "AA26"],
+ "CreatedAt": [
+ "2021-07-20 07:09:29",
+ "2021-07-20 07:09:33",
+ "2021-07-20 07:09:37",
+ "2021-07-20 07:09:41",
+ ],
+ }
+ df = pd.DataFrame(data)
+ df = df.set_index("HeliostatId")
+ return df
+
+
+def test_make_catalog(catalog_data: pd.DataFrame) -> None:
+ """Test STAC catalog generation."""
+ catalog = paint.data.catalog_stac.make_catalog(data=catalog_data)
+ expected = {
+ "stac_version": "1.0.0",
+ "stac_extensions": [],
+ "id": "WRI1030197-catalog",
+ "type": "Catalog",
+ "title": "Operational data of concentrating solar power plant WRI1030197",
+ "description": "Calibration images, deflectometry measurements, heliostat properties, and weather data",
+ "links": [
+ {
+ "rel": "self",
+ "href": "Insert/URL/Here",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "root",
+ "href": "Insert/URL/Here",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "child",
+ "href": "INSERT/SOMETHING/HERE/weather-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the STAC collection containing the weather data",
+ },
+ {
+ "rel": "child",
+ "href": "INSERT/SOMETHING/HERE/AA23-heliostat-catalog-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the STAC catalog containing data for heliostat AA23",
+ },
+ {
+ "rel": "child",
+ "href": "INSERT/SOMETHING/HERE/AA24-heliostat-catalog-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the STAC catalog containing data for heliostat AA24",
+ },
+ {
+ "rel": "child",
+ "href": "INSERT/SOMETHING/HERE/AA25-heliostat-catalog-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the STAC catalog containing data for heliostat AA25",
+ },
+ {
+ "rel": "child",
+ "href": "INSERT/SOMETHING/HERE/AA26-heliostat-catalog-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the STAC catalog containing data for heliostat AA26",
+ },
+ ],
+ }
+
+ assert not deepdiff.DeepDiff(catalog, expected)
diff --git a/tests/data/test_data/Helio_AA23_test_data_230918133925.binp b/tests/data/test_data/Helio_AA23_test_data_230918133925.binp
new file mode 100644
index 00000000..970af8ab
Binary files /dev/null and b/tests/data/test_data/Helio_AA23_test_data_230918133925.binp differ
diff --git a/tests/data/test_deflectometry_stac.py b/tests/data/test_deflectometry_stac.py
new file mode 100644
index 00000000..5a2af823
--- /dev/null
+++ b/tests/data/test_deflectometry_stac.py
@@ -0,0 +1,280 @@
+from typing import Tuple
+
+import deepdiff
+import pandas as pd
+import pytest
+
+import paint.util.paint_mappings as mappings
+from paint.data.deflectometry_stac import (
+ make_deflectometry_collection,
+ make_deflectometry_item,
+)
+
+
+@pytest.fixture
+def deflectometry_item_data() -> Tuple[str, pd.Series]:
+ """
+ Make a fixture with data for generating a deflectometry item.
+
+ Returns
+ -------
+ str
+ The heliostat ID.
+ pd.Series
+ The data for the deflectometry STAC item.
+ """
+ data = {
+ "East": 13.2,
+ "North": 154.7,
+ "Altitude": 88.66962,
+ "HeightAboveGround": 1.66962,
+ "CreatedAt": "2023-09-18Z11:39:25Z",
+ }
+ return "AY39", pd.Series(data)
+
+
+@pytest.fixture
+def deflectometry_collection_data():
+ """
+ Make a fixture with data for generating the deflectometry collection.
+
+ Returns
+ -------
+ pd.DataFrame
+ The data for the deflectometry collection as a test fixture.
+ """
+ import pandas as pd
+
+ # Define the data
+ data = {
+ "HeliostatId": ["AY39", "AY39", "AY39"],
+ "title": [
+ "Deflectometry measurements for AY39 at 2023-09-18Z11:39:25Z",
+ "Deflectometry measurements for AY39 at 2024-09-18Z11:39:25Z",
+ "Deflectometry measurements for AY39 at 2022-06-26Z07:07:07Z",
+ ],
+ "url": [
+ "INSERT/SOMETHING/HERE/AY39-2023-09-18Z11:39:25Z-deflectometry-item-stac.json?download=1",
+ "INSERT/SOMETHING/HERE/AY39-2024-09-18Z11:39:25Z-deflectometry-item-stac.json?download=1",
+ "INSERT/SOMETHING/HERE/AY39-2023-06-26Z07:07:07Z-deflectometry-item-stac.json?download=1",
+ ],
+ "CreatedAt": [
+ "2023-09-18Z11:39:25Z",
+ "2024-09-18Z11:39:25Z",
+ "2022-06-26Z07:07:07Z",
+ ],
+ "latitude": [50.914686955478864, 51.914686955478864, 49.914686955478864],
+ "longitude": [6.387702537483708, 5.387702537483708, 7.387702537483708],
+ "Elevation": [88.66962, 89.66962, 123.66962],
+ }
+
+ return pd.DataFrame(data)
+
+
+def test_make_deflectometry_collection(
+ deflectometry_collection_data: pd.DataFrame,
+) -> None:
+ """
+ Test the creation of the deflectometry STAC collection.
+
+ Parameters
+ ----------
+ deflectometry_collection_data: pd.DataFrame
+ The test fixture.
+ """
+ for heliostat, data in deflectometry_collection_data.groupby(mappings.HELIOSTAT_ID):
+ assert isinstance(heliostat, str)
+ collection = make_deflectometry_collection(heliostat_id=heliostat, data=data)
+
+ expected = {
+ "stac_version": "1.0.0",
+ "stac_extensions": [],
+ "id": "AY39-deflectometry-collection",
+ "type": "Collection",
+ "title": "Deflectometry data for heliostat AY39",
+ "description": "All deflectometry data, including raw measurements, filled measurements and results summary for heliostat AY39",
+ "keywords": ["csp", "deflectometry"],
+ "license": "CDLA-2.0",
+ "providers": [
+ {
+ "name": "German Aerospace Center (DLR)",
+ "description": "National center for aerospace, energy and transportation research of Germany",
+ "roles": ["licensor", "producer", "processor"],
+ "url": "https://github.com/ARTIST-Association/PAINT/",
+ },
+ {
+ "name": "Karlsruhe Institute of Technology (KIT)",
+ "description": "Public research center and university in Karlsruhe, Germany",
+ "roles": ["producer", "processor", "host"],
+ "url": "https://github.com/ARTIST-Association/PAINT/",
+ },
+ ],
+ "extent": {
+ "spatial": {
+ "bbox": [
+ [
+ 49.914686955478864,
+ 5.387702537483708,
+ 88.66962,
+ 51.914686955478864,
+ 7.387702537483708,
+ 123.66962,
+ ]
+ ]
+ },
+ "temporal": {
+ "interval": ["2022-06-26Z07:07:07Z", "2024-09-18Z11:39:25Z"]
+ },
+ },
+ "summaries": {
+ "datetime": {
+ "minimum": "2022-06-26Z07:07:07Z",
+ "maximum": "2024-09-18Z11:39:25Z",
+ },
+ "instruments": "QDec_2014-101",
+ },
+ "links": [
+ {
+ "rel": "license",
+ "href": "https://cdla.dev/permissive-2-0/",
+ "type": "text/html",
+ "title": "Community Data License Agreement – Permissive – Version 2.0",
+ },
+ {
+ "rel": "self",
+ "href": "INSERT/SOMETHING/HERE/AY39-deflectometry-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "root",
+ "href": "Insert/URL/Here",
+ "type": "application/geo+json",
+ "title": "Reference to the entire catalogue for WRI1030197",
+ },
+ {
+ "rel": "collection",
+ "href": "INSERT/SOMETHING/HERE/AY39-deflectometry-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "item",
+ "href": "INSERT/SOMETHING/HERE/AY39-2023-09-18Z11:39:25Z-deflectometry-item-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "STAC item of Deflectometry measurements for AY39 at 2023-09-18Z11:39:25Z",
+ },
+ {
+ "rel": "item",
+ "href": "INSERT/SOMETHING/HERE/AY39-2024-09-18Z11:39:25Z-deflectometry-item-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "STAC item of Deflectometry measurements for AY39 at 2024-09-18Z11:39:25Z",
+ },
+ {
+ "rel": "item",
+ "href": "INSERT/SOMETHING/HERE/AY39-2023-06-26Z07:07:07Z-deflectometry-item-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "STAC item of Deflectometry measurements for AY39 at 2022-06-26Z07:07:07Z",
+ },
+ ],
+ }
+
+ assert not deepdiff.DeepDiff(
+ collection, expected, ignore_numeric_type_changes=True
+ )
+
+
+def test_make_deflectometry_item(
+ deflectometry_item_data: Tuple[str, pd.Series],
+) -> None:
+ """
+ Test the creation of a STAC item.
+
+ Parameters
+ ----------
+ deflectometry_item_data : Tuple[str, pd.Series]
+ The test fixture.
+ """
+ heliostat_key, data = deflectometry_item_data
+ assert isinstance(heliostat_key, str)
+ _, item = make_deflectometry_item(heliostat_key=heliostat_key, heliostat_data=data)
+ expected = {
+ "stac_version": "1.0.0",
+ "stac_extensions": [],
+ "id": "AY39-2023-09-18Z11:39:25Z-deflectometry",
+ "type": "Feature",
+ "title": "Deflectometry measurement of AY39",
+ "description": "Measured raw and filled deflectometry data containing point clouds and surface normals for heliosat AY39 and the deflectometry measurement results summary",
+ "collection": "AY39-deflectometry-collection",
+ "geometry": {
+ "type": "Point",
+ "coordinates": [50.914686955478864, 6.387702537483708, 88.66962],
+ },
+ "bbox": [
+ 50.914666955478864,
+ 6.387682537483708,
+ 86.66962,
+ 50.91470695547886,
+ 6.387722537483708,
+ 90.66962,
+ ],
+ "properties": {
+ "datetime": "2023-09-18Z11:39:25Z",
+ "created": "2023-09-18Z11:39:25Z",
+ "instruments": "QDec_2014-101",
+ },
+ "links": [
+ {
+ "rel": "self",
+ "href": "./AY39-2023-09-18Z11:39:25Z-deflectometry-stac.json",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC file",
+ },
+ {
+ "rel": "root",
+ "href": "./Insert/URL/Here",
+ "type": "application/geo+json",
+ "title": "Reference to the entire catalogue for WRI1030197",
+ },
+ {
+ "rel": "parent",
+ "href": "INSERT/SOMETHING/HERE/AY39-deflectometry-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the collection STAC file",
+ },
+ {
+ "rel": "collection",
+ "href": "INSERT/SOMETHING/HERE/AY39-deflectometry-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the collection STAC file",
+ },
+ ],
+ "assets": {
+ "raw_measurement": {
+ "href": "./AY39-2023-09-18Z11:39:25Z-deflectometry.h5",
+ "roles": ["data"],
+ "type": "application/x-hdf5",
+ "title": "Raw deflectometry measurement of AY39 at 2023-09-18Z11:39:25Z",
+ },
+ "filled_measurement": {
+ "href": "./AY39-filled-2023-09-18Z11:39:25Z-deflectometry.h5",
+ "roles": ["data"],
+ "type": "application/x-hdf5",
+ "title": "Filled deflectometry measurement of AY39 at 2023-09-18Z11:39:25Z",
+ },
+ "results_summary": {
+ "href": "./AY39-2023-09-18Z11:39:25Z-deflectometry-result.pdf",
+ "roles": ["metadata"],
+ "type": "application/pdf",
+ "title": "Summary of deflectometry measurement of AY39 at 2023-09-18Z11:39:25Z",
+ },
+ },
+ }
+
+ assert not deepdiff.DeepDiff(item, expected, ignore_numeric_type_changes=True)
+
+
+def test_make_delfectometry_collection_fail() -> None:
+ """Test conversion failure on incomplete input data."""
+ with pytest.raises(KeyError):
+ make_deflectometry_collection("AB123", pd.DataFrame())
diff --git a/tests/data/test_dwd_stac.py b/tests/data/test_dwd_stac.py
new file mode 100644
index 00000000..5b4a70dc
--- /dev/null
+++ b/tests/data/test_dwd_stac.py
@@ -0,0 +1,89 @@
+import deepdiff
+import pandas as pd
+import pytest
+
+from paint.data.dwd_stac_item import make_dwd_item
+
+
+@pytest.fixture
+def dwd_item_data() -> pd.Series:
+ """
+ Make a fixture with data for generating a DWD weather STAC item.
+
+ Returns
+ -------
+ pd.Series
+ The data for the DWD stac item.
+ """
+ data = {
+ "StationID": 15000,
+ "latitude": 50.7983,
+ "longitude": 6.0244,
+ "Elevation": 231.0,
+ "StationName": "Aachen-Orsbach",
+ "start": "2021-03-31Z22:00:00Z",
+ "end": "2024-02-29Z23:00:00Z",
+ }
+ return pd.Series(data)
+
+
+def test_make_dwd_item(dwd_item_data: pd.Series) -> None:
+ """
+ Test the creation of a STAC item.
+
+ Parameters
+ ----------
+ dwd_item_data : pd.Series
+ The test fixture.
+ """
+ item = make_dwd_item(data=dwd_item_data)
+ expected = {
+ "stac_version": "1.0.0",
+ "stac_extensions": [],
+ "id": "dwd-weather",
+ "type": "Feature",
+ "title": "Weather data from the DWD",
+ "description": "Weather data from the DWD station ID 15000, i.e. Aachen-Orsbach.",
+ "collection": "weather-collection",
+ "geometry": {"type": "Point", "coordinates": [50.7983, 6.0244, 231.0]},
+ "bbox": [50.7983, 6.0244, 231.0, 50.7983, 6.0244, 231.0],
+ "properties": {"datetime": "null"},
+ "start_datetime": "2021-03-31Z22:00:00Z",
+ "end_datetime": "2024-02-29Z23:00:00Z",
+ "links": [
+ {
+ "rel": "self",
+ "href": "./dwd-weather-item-stac",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC file",
+ },
+ {
+ "rel": "root",
+ "href": "./Insert/URL/Here",
+ "type": "application/geo+json",
+ "title": "Reference to the entire catalogue for WRI1030197",
+ },
+ {
+ "rel": "parent",
+ "href": "INSERT/SOMETHING/HERE/weather-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the collection STAC file",
+ },
+ {
+ "rel": "collection",
+ "href": "weather-collection-stac.json",
+ "type": "application/geo+json",
+ "title": "Reference to the collection STAC file",
+ },
+ ],
+ "assets": {
+ "weather_data": {
+ "href": "./dwd-weather.h5",
+ "roles": ["data"],
+ "type": "application/x-hdf5",
+ "title": "Weather data from the DWD",
+ }
+ },
+ }
+
+ assert not deepdiff.DeepDiff(item, expected, ignore_numeric_type_changes=True)
diff --git a/tests/data/test_dwd_weather.py b/tests/data/test_dwd_weather.py
index efdfd8d5..6b75b13d 100644
--- a/tests/data/test_dwd_weather.py
+++ b/tests/data/test_dwd_weather.py
@@ -197,3 +197,27 @@ def test_dwd_weather(
assert (
station["weather_type_1h"]["value"][:] == [-1.0, 100.0, 110.0]
).all()
+
+
+def test_raw_download():
+ """Runs the raw download function to catch any errors in the request. No assertions are tested."""
+ with tempfile.TemporaryDirectory() as temp_dir:
+ output_path = temp_dir
+ file_name = "test_raw_downlaod.h5"
+
+ dwd_weather = DWDWeatherData(
+ parameters_10min=[
+ "radiation_sky_short_wave_diffuse",
+ "radiation_global",
+ ],
+ parameters_1h=[
+ "cloud_cover_total",
+ "humidity",
+ ],
+ station_ids=["15000"],
+ start_date="2021-04-01",
+ end_date="2021-05-01",
+ output_path=output_path,
+ file_name=file_name,
+ )
+ dwd_weather._get_raw_data()
diff --git a/tests/data/test_facet_stac.py b/tests/data/test_facet_stac.py
new file mode 100644
index 00000000..9dababf8
--- /dev/null
+++ b/tests/data/test_facet_stac.py
@@ -0,0 +1,104 @@
+from typing import Tuple
+
+import deepdiff
+import pandas as pd
+import pytest
+
+from paint.data.facet_stac import make_facet_item
+
+
+@pytest.fixture
+def facet_item_data() -> Tuple[str, pd.Series]:
+ """
+ Make a fixture with data for generating a facet item.
+
+ Returns
+ -------
+ str
+ The heliostat ID.
+ pd.Series
+ The data for the facet stac item.
+ """
+ data = {
+ "East": 13.2,
+ "North": 154.7,
+ "Altitude": 88.66962,
+ "HeightAboveGround": 1.66962,
+ "CreatedAt": "2023-09-18Z11:39:25Z",
+ }
+ return "AY39", pd.Series(data)
+
+
+def test_make_facet_item(facet_item_data: Tuple[str, pd.Series]) -> None:
+ """
+ Test the creation of a STAC item.
+
+ Parameters
+ ----------
+ facet_item_data : Tuple[str, pd.Series]
+ The test fixture.
+ """
+ heliostat_key, data = facet_item_data
+ assert isinstance(heliostat_key, str)
+ item = make_facet_item(heliostat_key=heliostat_key, heliostat_data=data)
+ expected = {
+ "stac_version": "1.0.0",
+ "stac_extensions": [],
+ "id": "AY39-facet_properties",
+ "type": "Feature",
+ "title": "Facet properties of AY39",
+ "description": "The facet properties, including canting and translation vectors for heliosat AY39",
+ "collection": "AY39-heliostat_properties-collection",
+ "geometry": {
+ "type": "Point",
+ "coordinates": [50.914686955478864, 6.387702537483708, 88.66962],
+ },
+ "bbox": [
+ 50.914666955478864,
+ 6.387682537483708,
+ 86.66962,
+ 50.91470695547886,
+ 6.387722537483708,
+ 90.66962,
+ ],
+ "properties": {
+ "datetime": "2023-09-18Z11:39:25Z",
+ "created": "2023-09-18Z11:39:25Z",
+ },
+ "links": [
+ {
+ "rel": "self",
+ "href": "./AY39-facet_properties-stac.json",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC file",
+ },
+ {
+ "rel": "root",
+ "href": "./Insert/URL/Here",
+ "type": "application/geo+json",
+ "title": "Reference to the entire catalogue for WRI1030197",
+ },
+ {
+ "rel": "parent",
+ "href": "INSERT/SOMETHING/HERE/AY39-heliostat_properties-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the collection STAC file",
+ },
+ {
+ "rel": "collection",
+ "href": "INSERT/SOMETHING/HERE/AY39-heliostat_properties-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the collection STAC file",
+ },
+ ],
+ "assets": {
+ "facet_properties": {
+ "href": "./AY39-facet_properties.json",
+ "roles": ["data"],
+ "type": "application/geo+json",
+ "title": "Facet properties of AY39",
+ }
+ },
+ }
+
+ assert not deepdiff.DeepDiff(item, expected, ignore_numeric_type_changes=True)
diff --git a/tests/data/test_heliostat_catalog_stac.py b/tests/data/test_heliostat_catalog_stac.py
new file mode 100644
index 00000000..a19eed9e
--- /dev/null
+++ b/tests/data/test_heliostat_catalog_stac.py
@@ -0,0 +1,104 @@
+from typing import Any, Dict
+
+import deepdiff
+import pytest
+
+from paint.data.heliostat_catalog_stac import make_heliostat_catalog
+
+
+@pytest.mark.parametrize(
+ "heliostat_id, expected",
+ [
+ (
+ "AA23",
+ {
+ "stac_version": "1.0.0",
+ "stac_extensions": [],
+ "id": "AA23-heliostat-catalog",
+ "type": "Catalog",
+ "title": "Operational data for the heliostat AA23",
+ "description": "Calibration images, deflectometry measurements, heliostat properties, and weather data",
+ "links": [
+ {
+ "rel": "self",
+ "href": "INSERT/SOMETHING/HERE/AA23-heliostat-catalog-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC catalog file",
+ },
+ {
+ "rel": "root",
+ "href": "Insert/URL/Here",
+ "type": "application/geo+json",
+ "title": "Reference to the parent catalog",
+ },
+ {
+ "rel": "child",
+ "href": "INSERT/SOMETHING/HERE/AA23-deflectometry-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the STAC collection containing the deflectometry data",
+ },
+ {
+ "rel": "child",
+ "href": "INSERT/SOMETHING/HERE/AA23-calibration-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the STAC collection containing the calibration data",
+ },
+ {
+ "rel": "child",
+ "href": "INSERT/SOMETHING/HERE/AA23-heliostat_properties-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the STAC collection containing the heliostat properties",
+ },
+ ],
+ },
+ ),
+ (
+ "AA41",
+ {
+ "stac_version": "1.0.0",
+ "stac_extensions": [],
+ "id": "AA41-heliostat-catalog",
+ "type": "Catalog",
+ "title": "Operational data for the heliostat AA41",
+ "description": "Calibration images, deflectometry measurements, heliostat properties, and weather data",
+ "links": [
+ {
+ "rel": "self",
+ "href": "INSERT/SOMETHING/HERE/AA41-heliostat-catalog-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC catalog file",
+ },
+ {
+ "rel": "root",
+ "href": "Insert/URL/Here",
+ "type": "application/geo+json",
+ "title": "Reference to the parent catalog",
+ },
+ {
+ "rel": "child",
+ "href": "INSERT/SOMETHING/HERE/AA41-deflectometry-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the STAC collection containing the deflectometry data",
+ },
+ {
+ "rel": "child",
+ "href": "INSERT/SOMETHING/HERE/AA41-calibration-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the STAC collection containing the calibration data",
+ },
+ {
+ "rel": "child",
+ "href": "INSERT/SOMETHING/HERE/AA41-heliostat_properties-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the STAC collection containing the heliostat properties",
+ },
+ ],
+ },
+ ),
+ ],
+)
+def test_make_heliostat_catalog(heliostat_id: str, expected: Dict[str, Any]) -> None:
+ """Test STAC heliostat catalog generation."""
+ catalog = make_heliostat_catalog(heliostat_id=heliostat_id)
+
+ assert not deepdiff.DeepDiff(catalog, expected)
diff --git a/tests/data/test_kinematic_stac.py b/tests/data/test_kinematic_stac.py
new file mode 100644
index 00000000..6085a14c
--- /dev/null
+++ b/tests/data/test_kinematic_stac.py
@@ -0,0 +1,133 @@
+from typing import Tuple
+
+import deepdiff
+import pandas as pd
+import pytest
+
+from paint.data.kinematic_stac import make_kinematic_item
+
+
+@pytest.fixture
+def kinematic_item_data() -> Tuple[str, pd.Series]:
+ """
+ Make a fixture with data for generating a kinematic item.
+
+ Returns
+ -------
+ str
+ The heliostat ID.
+ pd.Series
+ The data for the kinematic stac item.
+ """
+ data = {
+ "CreatedAt": "2021-07-20 07:09:29",
+ "East": -57.2,
+ "North": 25.0,
+ "Altitude": 88.711,
+ "HeightAboveGround": 1.711,
+ "FieldId": 1,
+ "Type_axis_1": "LINEAR",
+ "MinCounts_axis_1": 0,
+ "MaxCounts_axis_1": 69296,
+ "PulseRatio_axis_1": 154166.666667,
+ "A_axis_1": 0,
+ "B_axis_1": 0.075005,
+ "C_axis_1": 0.335308,
+ "D_axis_1": 0.338095,
+ "E_axis_1": 0,
+ "Reversed_axis_1": 0,
+ "AngleK_axis_1": 0.005843,
+ "AngleMin_axis_1": 0.004435,
+ "AngleMax_axis_1": 1.570796,
+ "AngleW_axis_1": 0.025,
+ "Type_axis_2": "LINEAR",
+ "MinCounts_axis_2": 0,
+ "MaxCounts_axis_2": 75451,
+ "PulseRatio_axis_2": 154166.666667,
+ "A_axis_2": 0,
+ "B_axis_2": 0.078887,
+ "C_axis_2": 0.340771,
+ "D_axis_2": 0.3191,
+ "E_axis_2": 0,
+ "Reversed_axis_2": 1,
+ "AngleK_axis_2": 0.939721,
+ "AngleMin_axis_2": -0.95993,
+ "AngleMax_axis_2": 0.929079,
+ "AngleW_axis_2": 0.025,
+ }
+ return "AA23", pd.Series(data)
+
+
+def test_make_kinematic_item(kinematic_item_data: Tuple[str, pd.Series]) -> None:
+ """
+ Test the creation of a STAC item.
+
+ Parameters
+ ----------
+ kinematic_item_data : Tuple[str, pd.Series]
+ The test fixture.
+ """
+ heliostat_key, data = kinematic_item_data
+ assert isinstance(heliostat_key, str)
+ _, item = make_kinematic_item(heliostat_key=heliostat_key, heliostat_data=data)
+ expected = {
+ "stac_version": "1.0.0",
+ "stac_extensions": [],
+ "id": "AA23-kinematic_properties",
+ "type": "Feature",
+ "title": "Kinematic properties of AA23",
+ "description": "The kinematic properties that describe the kinematic applied in AA23",
+ "collection": "AA23-heliostat_properties-collection",
+ "geometry": {
+ "type": "Point",
+ "coordinates": [50.913521077320304, 6.38670151979386, 88.711],
+ },
+ "bbox": [
+ 50.913501077320305,
+ 6.38668151979386,
+ 86.711,
+ 50.9135410773203,
+ 6.38672151979386,
+ 90.711,
+ ],
+ "properties": {
+ "datetime": "2021-07-20Z05:09:29Z",
+ "created": "2021-07-20Z05:09:29Z",
+ },
+ "links": [
+ {
+ "rel": "self",
+ "href": "./AA23-kinematic_properties-stac.json",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC file",
+ },
+ {
+ "rel": "root",
+ "href": "./Insert/URL/Here",
+ "type": "application/geo+json",
+ "title": "Reference to the entire catalogue for WRI1030197",
+ },
+ {
+ "rel": "parent",
+ "href": "INSERT/SOMETHING/HERE/AA23-heliostat_properties-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the collection STAC file",
+ },
+ {
+ "rel": "collection",
+ "href": "INSERT/SOMETHING/HERE/AA23-heliostat_properties-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to the collection STAC file",
+ },
+ ],
+ "assets": {
+ "kinematic_properties": {
+ "href": "./AA23-kinematic_properties.json",
+ "roles": ["data"],
+ "type": "application/geo+json",
+ "title": "Kinematic properties of AA23",
+ }
+ },
+ }
+
+ assert not deepdiff.DeepDiff(item, expected, ignore_numeric_type_changes=True)
diff --git a/tests/data/test_properties_collection_stac.py b/tests/data/test_properties_collection_stac.py
new file mode 100644
index 00000000..1e7e90d0
--- /dev/null
+++ b/tests/data/test_properties_collection_stac.py
@@ -0,0 +1,145 @@
+import deepdiff
+import pandas as pd
+import pytest
+
+import paint.util.paint_mappings as mappings
+from paint.data.properties_collection_stac import make_properties_collection
+
+
+@pytest.fixture
+def properties_collection_data() -> pd.DataFrame:
+ """
+ Make a fixture with data for generating the heliostat properties collection.
+
+ Returns
+ -------
+ pd.DataFrame
+ The data for the heliostat properties collection as a test fixture.
+ """
+ # Define the data
+ data = {
+ "HeliostatId": ["AY39", "AY39"],
+ "title": ["kinematic properties for AY39", "facet properties for AY39"],
+ "url": [
+ "INSERT/SOMETHING/HERE/AY39-kinematic_properties-item-stac.json?download=1",
+ "INSERT/SOMETHING/HERE/AY39-facet_properties-item-stac.json?download=1",
+ ],
+ "CreatedAt": ["2021-12-03Z12:28:26Z", "2023-09-18Z11:39:25Z"],
+ "latitude": [50.914686955478864, 50.914686955478864],
+ "longitude": [6.387702537483708, 6.387702537483708],
+ "Elevation": [88.66962, 88.66962],
+ }
+
+ return pd.DataFrame(data)
+
+
+def test_make_deflectometry_collection(
+ properties_collection_data: pd.DataFrame,
+) -> None:
+ """
+ Test the creation of the heliostat properties STAC collection.
+
+ Parameters
+ ----------
+ properties_collection_data: pd.DataFrame
+ The test fixture.
+ """
+ for heliostat, data in properties_collection_data.groupby(mappings.HELIOSTAT_ID):
+ assert isinstance(heliostat, str)
+ collection = make_properties_collection(heliostat_id=heliostat, data=data)
+
+ expected = {
+ "stac_version": "1.0.0",
+ "stac_extensions": [],
+ "id": "AY39-heliostat_properties-collection",
+ "type": "Collection",
+ "title": "Heliostat properties data for AY39",
+ "description": "All heliostat properties, including the facet properties and kinematic properties for heliostat AY39",
+ "keywords": ["csp", "facet", "kinematic", "properties"],
+ "license": "CDLA-2.0",
+ "providers": [
+ {
+ "name": "German Aerospace Center (DLR)",
+ "description": "National center for aerospace, energy and transportation research of Germany",
+ "roles": ["licensor", "producer", "processor"],
+ "url": "https://github.com/ARTIST-Association/PAINT/",
+ },
+ {
+ "name": "Karlsruhe Institute of Technology (KIT)",
+ "description": "Public research center and university in Karlsruhe, Germany",
+ "roles": ["producer", "processor", "host"],
+ "url": "https://github.com/ARTIST-Association/PAINT/",
+ },
+ ],
+ "extent": {
+ "spatial": {
+ "bbox": [
+ [
+ 50.914686955478864,
+ 6.387702537483708,
+ 88.66962,
+ 50.914686955478864,
+ 6.387702537483708,
+ 88.66962,
+ ]
+ ]
+ },
+ "temporal": {
+ "interval": ["2021-12-03Z12:28:26Z", "2023-09-18Z11:39:25Z"]
+ },
+ },
+ "summaries": {
+ "datetime": {
+ "minimum": "2021-12-03Z12:28:26Z",
+ "maximum": "2023-09-18Z11:39:25Z",
+ }
+ },
+ "links": [
+ {
+ "rel": "license",
+ "href": "https://cdla.dev/permissive-2-0/",
+ "type": "text/html",
+ "title": "Community Data License Agreement – Permissive – Version 2.0",
+ },
+ {
+ "rel": "self",
+ "href": "INSERT/SOMETHING/HERE/AY39-heliostat_properties-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "root",
+ "href": "Insert/URL/Here",
+ "type": "application/geo+json",
+ "title": "Reference to the entire catalogue for WRI1030197",
+ },
+ {
+ "rel": "collection",
+ "href": "INSERT/SOMETHING/HERE/AY39-heliostat_properties-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "item",
+ "href": "INSERT/SOMETHING/HERE/AY39-kinematic_properties-item-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "STAC item of kinematic properties for AY39",
+ },
+ {
+ "rel": "item",
+ "href": "INSERT/SOMETHING/HERE/AY39-facet_properties-item-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "STAC item of facet properties for AY39",
+ },
+ ],
+ }
+
+ assert not deepdiff.DeepDiff(
+ collection, expected, ignore_numeric_type_changes=True
+ )
+
+
+def test_make_weather_collection_fail() -> None:
+ """Test conversion failure on incomplete input data."""
+ with pytest.raises(KeyError):
+ make_properties_collection("AB123", pd.DataFrame())
diff --git a/tests/data/test_weather_collection_stac.py b/tests/data/test_weather_collection_stac.py
new file mode 100644
index 00000000..90a70137
--- /dev/null
+++ b/tests/data/test_weather_collection_stac.py
@@ -0,0 +1,118 @@
+import deepdiff
+import pandas as pd
+import pytest
+
+from paint.data.weather_collection_stac import make_weather_collection
+
+
+@pytest.fixture
+def weather_collection_data():
+ """
+ Make a fixture with data for generating the weather collection.
+
+ Returns
+ -------
+ pd.DataFrame
+ The data for the weather collection as a test fixture.
+ """
+ # Define the data
+ data = {
+ "title": ["DWD weather data"],
+ "url": ["INSERT/SOMETHING/HERE/dwd-weather-item-stac?download=1"],
+ "start": ["2021-03-31Z22:00:00Z"],
+ "end": ["2024-02-29Z23:00:00Z"],
+ "latitude": [50.7983],
+ "longitude": [6.0244],
+ "Elevation": [231.0],
+ }
+
+ return pd.DataFrame(data)
+
+
+def test_make_deflectometry_collection(
+ weather_collection_data: pd.DataFrame,
+) -> None:
+ """
+ Test the creation of the weather STAC collection.
+
+ Parameters
+ ----------
+ weather_collection_data: pd.DataFrame
+ The test fixture.
+ """
+ collection = make_weather_collection(data=weather_collection_data)
+
+ expected = {
+ "stac_version": "1.0.0",
+ "stac_extensions": [],
+ "id": "weather-collection",
+ "type": "Collection",
+ "title": "All weather measurements",
+ "description": "All weather measurements",
+ "keywords": ["weather"],
+ "license": "CDLA-2.0",
+ "providers": [
+ {
+ "name": "German Aerospace Center (DLR)",
+ "description": "National center for aerospace, energy and transportation research of Germany",
+ "roles": ["licensor", "producer", "processor"],
+ "url": "https://github.com/ARTIST-Association/PAINT/",
+ },
+ {
+ "name": "Karlsruhe Institute of Technology (KIT)",
+ "description": "Public research center and university in Karlsruhe, Germany",
+ "roles": ["producer", "processor", "host"],
+ "url": "https://github.com/ARTIST-Association/PAINT/",
+ },
+ ],
+ "extent": {
+ "spatial": {"bbox": [[50.7983, 6.0244, 231.0, 50.7983, 6.0244, 231.0]]},
+ "temporal": {"interval": ["2021-03-31Z22:00:00Z", "2024-02-29Z23:00:00Z"]},
+ },
+ "summaries": {
+ "datetime": {
+ "minimum": "2021-03-31Z22:00:00Z",
+ "maximum": "2024-02-29Z23:00:00Z",
+ }
+ },
+ "links": [
+ {
+ "rel": "license",
+ "href": "https://cdla.dev/permissive-2-0/",
+ "type": "text/html",
+ "title": "Community Data License Agreement – Permissive – Version 2.0",
+ },
+ {
+ "rel": "self",
+ "href": "INSERT/SOMETHING/HERE/weather-collection-stac.json?download=1",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "root",
+ "href": "Insert/URL/Here",
+ "type": "application/geo+json",
+ "title": "Reference to the entire catalogue for WRI1030197",
+ },
+ {
+ "rel": "collection",
+ "href": "weather-collection-stac.json",
+ "type": "application/geo+json",
+ "title": "Reference to this STAC collection file",
+ },
+ {
+ "rel": "item",
+ "href": "INSERT/SOMETHING/HERE/dwd-weather-item-stac?download=1",
+ "type": "application/geo+json",
+ "title": "STAC item of DWD weather data",
+ },
+ ],
+ }
+
+ assert not deepdiff.DeepDiff(collection, expected, ignore_numeric_type_changes=True)
+
+
+def test_make_weather_collection_fail() -> None:
+ """Test conversion failure on incomplete input data."""
+ with pytest.raises(KeyError):
+ make_weather_collection(pd.DataFrame())
diff --git a/tests/util/test_data/test_axis_data.csv b/tests/util/test_data/test_axis_data.csv
new file mode 100644
index 00000000..7c9e9656
--- /dev/null
+++ b/tests/util/test_data/test_axis_data.csv
@@ -0,0 +1,3 @@
+"FieldId";"HeliostatId";"Number";"Type";"MinCounts";"MaxCounts";"PulseRatio";"A";"B";"C";"D";"E";"Reversed";"AngleK";"AngleMin";"AngleMax";"AngleW";"CreatedAt";"UpdatedAt"
+"1";"10123";"1";"LINEAR";"0";"69296";"154166,66666666666";"0";"0,07500531132254991";"0,335308";"0,338095";"0";"0";"0,005842952618520847";"0,00443488173186779";"1,5707963267948966";"0,025";"2021-07-20 07:09:29";"2024-03-05 16:34:52"
+"1";"10123";"2";"LINEAR";"0";"75451";"154166,66666666666";"0";"0,07888741499699016";"0,340771";"0,3191";"0";"1";"0,9397207629980034";"-0,95993";"0,9290792094502279";"0,025";"2021-07-20 07:09:29";"2024-03-05 16:34:52"
diff --git a/tests/util/test_data/test_positions.xlsx b/tests/util/test_data/test_positions.xlsx
new file mode 100644
index 00000000..cbd00cce
Binary files /dev/null and b/tests/util/test_data/test_positions.xlsx differ
diff --git a/tests/util/test_preprocessing.py b/tests/util/test_preprocessing.py
new file mode 100644
index 00000000..d2337be5
--- /dev/null
+++ b/tests/util/test_preprocessing.py
@@ -0,0 +1,84 @@
+import argparse
+
+import pandas as pd
+import pytest
+
+from paint import PAINT_ROOT
+from paint.util.preprocessing import (
+ load_and_format_heliostat_axis_data,
+ load_and_format_heliostat_positions,
+ merge_and_sort_df,
+)
+
+
+@pytest.fixture
+def preprocessing_arguments() -> argparse.Namespace:
+ """
+ Make a fixture simulating the command line arguments required for testing the preprocessing functions.
+
+ Returns
+ -------
+ argparse.Namespace
+ The simulated command line arguments as a test fixture.
+ """
+ args = argparse.Namespace(
+ input_position=f"{PAINT_ROOT}/tests/util/test_data/test_positions.xlsx",
+ input_axis=f"{PAINT_ROOT}/tests/util/test_data/test_axis_data.csv",
+ )
+ return args
+
+
+def test_preprocessing(preprocessing_arguments: argparse.Namespace) -> None:
+ """
+ Test the preprocessing functions.
+
+ Parameters
+ ----------
+ preprocessing_arguments : argparse.Namespace
+ The simulated command line arguments.
+ """
+ expected_data = {
+ "HeliostatId": ["AA23"],
+ "CreatedAt": ["2021-07-20 07:09:29"],
+ "East": [-57.2],
+ "North": [25],
+ "Altitude": [88.711],
+ "HeightAboveGround": [1.7109999999999985],
+ "FieldId": [1],
+ "Type_axis_1": ["LINEAR"],
+ "MinCounts_axis_1": [0],
+ "MaxCounts_axis_1": [69296],
+ "PulseRatio_axis_1": [154166.66666666666],
+ "A_axis_1": [0],
+ "B_axis_1": [0.0750053113225499],
+ "C_axis_1": [0.335308],
+ "D_axis_1": [0.338095],
+ "E_axis_1": [0],
+ "Reversed_axis_1": [0],
+ "AngleK_axis_1": [0.0058429526185208],
+ "AngleMin_axis_1": [0.0044348817318677],
+ "AngleMax_axis_1": [1.5707963267948966],
+ "AngleW_axis_1": [0.025],
+ "Type_axis_2": ["LINEAR"],
+ "MinCounts_axis_2": [0],
+ "MaxCounts_axis_2": [75451],
+ "PulseRatio_axis_2": [154166.66666666666],
+ "A_axis_2": [0],
+ "B_axis_2": [0.0788874149969901],
+ "C_axis_2": [0.340771],
+ "D_axis_2": [0.3191],
+ "E_axis_2": [0],
+ "Reversed_axis_2": [1],
+ "AngleK_axis_2": [0.9397207629980034],
+ "AngleMin_axis_2": [-0.95993],
+ "AngleMax_axis_2": [0.929079209450228],
+ "AngleW_axis_2": [0.025],
+ }
+ expected_df = pd.DataFrame(expected_data)
+ expected_df.set_index("HeliostatId", inplace=True)
+
+ df_axis = load_and_format_heliostat_axis_data(preprocessing_arguments)
+ df_position = load_and_format_heliostat_positions(preprocessing_arguments)
+ df = merge_and_sort_df(df_heliostat_positions=df_position, df_axis=df_axis)
+
+ assert df.equals(expected_df)
diff --git a/tests/util/test_util.py b/tests/util/test_util.py
new file mode 100644
index 00000000..b2805c6f
--- /dev/null
+++ b/tests/util/test_util.py
@@ -0,0 +1,176 @@
+from typing import List
+
+import numpy as np
+import pandas as pd
+import pytest
+
+import paint.util.paint_mappings as mappings
+import paint.util.utils
+
+
+def test_calculate_azimuth_and_elevation() -> None:
+ """Test the calculation of azimuth and elevation based on sun position vectors."""
+ sun_positions = pd.DataFrame(
+ data={
+ mappings.SUN_POSITION_EAST: [1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.5, -1.0],
+ mappings.SUN_POSITION_NORTH: [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.5, -2.0],
+ mappings.SUN_POSITION_UP: [0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 10.0, -3.0],
+ }
+ )
+ azimuth, elevation = paint.util.utils.calculate_azimuth_and_elevation(sun_positions)
+
+ expected_azimuth = np.array(
+ [90.0, 180.0, 180.0, 135.0, 90.0, 180.0, 135.0, 135.0, -26.56505118]
+ )
+ expected_elevation = np.array(
+ [0.0, 0.0, 90.0, 0.0, 45.0, 45.0, 35.26438968, 85.95530876, -53.3007748]
+ )
+
+ assert (np.isclose(azimuth, expected_azimuth)).all()
+ assert (np.isclose(elevation, expected_elevation)).all()
+
+
+@pytest.mark.parametrize(
+ "heliostat_id, heliostat_name", [(10841, "AH41"), (11537, "AO37"), (20352, "BC52")]
+)
+def test_heliostat_id_to_name(heliostat_id: int, heliostat_name: str) -> None:
+ """
+ Test conversion of heliostat ids to their string representation.
+
+ Parameters
+ ----------
+ heliostat_id : int
+ The heliostat ID as number
+ heliostat_name : str
+ The expected heliostat ID as string
+ """
+ assert paint.util.utils.heliostat_id_to_name(heliostat_id) == heliostat_name
+
+
+def test_to_utc() -> None:
+ """Test conversion of datetime strings from the Europe/Berlin timezone to UTC timestamps."""
+ time_strings = pd.Series(
+ ["2022-06-01 11:08:45", "2022-10-27 03:05:55", "2022-06-23 13:07:36"]
+ )
+ utc_timestamps = paint.util.utils.to_utc(time_strings)
+
+ expected = pd.Series(
+ [
+ pd.Timestamp(
+ year=2022, month=6, day=1, hour=9, minute=8, second=45, tz="UTC"
+ ),
+ pd.Timestamp(
+ year=2022, month=10, day=27, hour=1, minute=5, second=55, tz="UTC"
+ ),
+ pd.Timestamp(
+ year=2022, month=6, day=23, hour=11, minute=7, second=36, tz="UTC"
+ ),
+ ]
+ )
+
+ assert (utc_timestamps == expected).all()
+
+
+@pytest.mark.parametrize(
+ "original_time, expected_utc_time",
+ [
+ ("03:23:45 01-11-2023", "2023-01-11Z02:23:45Z"),
+ ("20220405074517", "2022-04-05Z05:45:17Z"),
+ ],
+)
+def test_single_time_conversion(original_time: str, expected_utc_time: str) -> None:
+ """
+ Test conversion of single string local times to UTC times.
+
+ Parameters
+ ----------
+ original_time : str
+ The original time string in the local time zone.
+ expected_utc_time : str
+ The expected time string in UTC time zone.
+ """
+ assert paint.util.utils.to_utc_single(original_time) == expected_utc_time
+
+
+@pytest.mark.parametrize(
+ "north_offset_m, east_offset_m, expected_lat, expected_lon",
+ [
+ (0.0, 0.0, mappings.POWER_PLANT_LAT, mappings.POWER_PLANT_LON),
+ (10.0, 30.0, 50.91338624175841, 6.38794141670515),
+ (200.7, 37803.9, 50.91510045120202, 6.925048549014668),
+ ],
+)
+def test_add_offset_lat_lon(
+ north_offset_m: float,
+ east_offset_m: float,
+ expected_lat: float,
+ expected_lon: float,
+) -> None:
+ """
+ Test function that adds offset to latitude and longitude coordinates.
+
+ Parameters
+ ----------
+ north_offset_m : float
+ The offset in the north direction in meters.
+ east_offset_m : float
+ The offset in the east direction in meters.
+ expected_lat : float
+ The expected latitude in degrees.
+ expected_lon : float
+ The expected longitude in degrees.
+ """
+ lat, lon = paint.util.utils.add_offset_to_lat_lon(
+ north_offset_m=north_offset_m, east_offset_m=east_offset_m
+ )
+ assert lat == expected_lat
+ assert lon == expected_lon
+
+
+@pytest.mark.parametrize(
+ "heliostat_lat, heliostat_lon, heliostat_alt, expected_position",
+ [
+ (
+ mappings.POWER_PLANT_LAT,
+ mappings.POWER_PLANT_LON,
+ mappings.POWER_PLANT_ALT,
+ [0.0, 0.0, 0.0],
+ ),
+ (
+ 50.91338624175841,
+ 6.38794141670515,
+ 27.0,
+ [10.000000155562523, 29.99994221199063, -60.0],
+ ),
+ (
+ 50.91510045120202,
+ 6.925048549014668,
+ 450,
+ [200.7000623622324, 37802.43847705173, 363],
+ ),
+ ],
+)
+def test_calculate_heliostat_position(
+ heliostat_lat: float,
+ heliostat_lon: float,
+ heliostat_alt: float,
+ expected_position: List[float],
+) -> None:
+ """
+ Test function that calculates the heliostat position in meters given the latitude, longitude and altitude.
+
+ Parameters
+ ----------
+ heliostat_lat: float
+ The latitude of the heliostat in degrees.
+ heliostat_lon: float
+ The longitude of the heliostat in degrees.
+ heliostat_alt: float
+ The altitude of the heliostat in meters.
+ expected_position: List[float]
+ The expected heliostat position in meters.
+ """
+ position = paint.util.utils.calculate_heliostat_position_in_m_from_lat_lon(
+ lat1=heliostat_lat, lon1=heliostat_lon, alt=heliostat_alt
+ )
+ assert position == expected_position