Skip to content

Commit

Permalink
Assert response values instead of just printing them
Browse files Browse the repository at this point in the history
  • Loading branch information
Lun4m committed Jun 14, 2024
1 parent 35078ee commit 7938e51
Showing 1 changed file with 28 additions and 21 deletions.
49 changes: 28 additions & 21 deletions lard_tests/tests/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@ async fn api_test_wrapper<T: Future<Output = ()>>(test: T) {
#[tokio::test]
async fn test_stations_endpoint_irregular() {
api_test_wrapper(async {
// NOTE: tseries.header.totime == Utc.now()
let station_id = 18;
let param_id = 103;
let expected_data_len = 21;

let url = format!(
"http://localhost:3000/stations/{}/params/{}",
Expand All @@ -27,11 +27,12 @@ async fn test_stations_endpoint_irregular() {
let resp = reqwest::get(url).await.unwrap();
assert!(resp.status().is_success());

let json = resp.text().await.unwrap();
println!("\n{}\n", json);
// assert_eq!(json, expected);
let resp: common::StationsResponse = resp.json().await.unwrap();
assert_eq!(resp.tseries.len(), 1);

// TODO: do something else with the response
let ts = &resp.tseries[0];
assert_eq!(ts.regularity, "Irregular");
assert_eq!(ts.data.len(), expected_data_len);
})
.await
}
Expand All @@ -42,6 +43,9 @@ async fn test_stations_endpoint_regular() {
let station_id = 20000;
let param_id = 211;
let resolution = "PT1M";
// TODO: This works if the test runs less than a minute after the DB.
// It's probably better to use hourly data here or have a timeseries with a set totime
let expected_data_len = 181;

let url = format!(
"http://localhost:3000/stations/{}/params/{}?time_resolution={}",
Expand All @@ -50,51 +54,54 @@ async fn test_stations_endpoint_regular() {
let resp = reqwest::get(url).await.unwrap();
assert!(resp.status().is_success());

let json = resp.text().await.unwrap();
println!("\n{}\n", json);
// assert_eq!(json, expected);
let resp: common::StationsResponse = resp.json().await.unwrap();
assert_eq!(resp.tseries.len(), 1);

// TODO: do something else with the response
let ts = &resp.tseries[0];
assert_eq!(ts.regularity, "Regular");
assert_eq!(ts.data.len(), expected_data_len);
})
.await
}

#[tokio::test]
async fn test_latest_endpoint() {
api_test_wrapper(async {
// let expected = r#"{"data":[{"value":53.0,"timestamp":"2012-02-16T05:00:00Z","station_id":180,"loc":{"lat":46.0,"lon":-73.0,"hamsl":null,"hag":null}},{"value":100.0,"timestamp":"2023-05-05T01:40:00Z","station_id":1800,"loc":{"lat":65.89,"lon":13.61,"hamsl":null,"hag":null}}]}"#;
let query = "?latest_max_age=2012-02-14T12:00:00Z";
let url = format!("http://localhost:3000/latest{}", query);
let expected_data_len = 4;

let resp = reqwest::get(url).await.unwrap();
assert!(resp.status().is_success());

let json = resp.text().await.unwrap();
println!("{json}");
// assert_eq!(json, expected);

// TODO: do something else with the response
let json: common::LatestResponse = resp.json().await.unwrap();
assert_eq!(json.data.len(), expected_data_len);
})
.await
}

#[tokio::test]
async fn test_timeslice_endpoint() {
api_test_wrapper(async {
// TODO: this looks ugly as hell
// let expected = r#"{"tslices":[{"timestamp":"2023-05-05T00:30:00Z","param_id":3,"data":[{"value":30.0,"station_id":1800,"loc":{"lat":65.89,"lon":13.61,"hamsl":null,"hag":null}}]}]}"#;
let time = Utc.with_ymd_and_hms(2023, 5, 5, 00, 30, 00).unwrap();
let param_id = 3;
let expected_data_len = 0;

let url = format!(
"http://localhost:3000/timeslices/{}/params/{}",
time, param_id
);

let resp = reqwest::get(url).await.unwrap();
assert!(resp.status().is_success());

let json = resp.text().await.unwrap();
println!("{json}");
// assert_eq!(json, expected);
// TODO: do something else with the response
let json: common::TimesliceResponse = resp.json().await.unwrap();
assert_eq!(json.tslices.len(), 1);

let slice = &json.tslices[0];
assert_eq!(slice.param_id, param_id);
assert_eq!(slice.timestamp, time);
assert_eq!(slice.data.len(), expected_data_len);
})
.await
}

0 comments on commit 7938e51

Please sign in to comment.