Skip to content

Commit

Permalink
Add metadata information to API responses
Browse files Browse the repository at this point in the history
The metadata contains information for the pagination of results.
The following fields are available:
- `next_page`, capped to `total_pages` on reaching the last page
- `previous_page`, set to 1 on the first page
- `total_pages`, computed from the `limit` parameter
- `total_results` returned from `rows_before_limit_at_least` of
   ClickHouse's response
   (https://clickhouse.com/docs/en/interfaces/formats#json)
  • Loading branch information
0237h committed Apr 19, 2024
1 parent 3a338bf commit b507988
Show file tree
Hide file tree
Showing 6 changed files with 94 additions and 9 deletions.
16 changes: 13 additions & 3 deletions src/clickhouse/makeQuery.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ export interface Query<T> {
meta: Meta[],
data: T[],
rows: number,
rows_before_limit_at_least: number,
statistics: {
elapsed: number,
rows_read: number,
Expand All @@ -21,7 +22,7 @@ export async function makeQuery<T = unknown>(query: string) {
try {
const response = await client.query({ query })
const data: Query<T> = await response.json();

prometheus.query.inc();
prometheus.bytes_read.inc(data.statistics.bytes_read);
prometheus.rows_read.inc(data.statistics.rows_read);
Expand All @@ -32,7 +33,16 @@ export async function makeQuery<T = unknown>(query: string) {
} catch (e: any) {
logger.error(e.message)

return { data: [] }
return {
meta: [],
data: [],
rows: 0,
rows_before_limit_at_least: 0,
statistics: {
elapsed: 0,
rows_read: 0,
bytes_read: 0,
}
};
}

}
12 changes: 10 additions & 2 deletions src/fetch/balance.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ import { makeQuery } from "../clickhouse/makeQuery.js";
import { logger } from "../logger.js";
import { getBalanceChanges } from "../queries.js";
import * as prometheus from "../prometheus.js";
import { toJSON } from "./utils.js";
import { addMetadata, toJSON } from "./utils.js";
import { parseLimit, parsePage } from "../utils.js";

function verifyParams(searchParams: URLSearchParams) {
const account = searchParams.get("account");
Expand All @@ -20,7 +21,14 @@ export default async function (req: Request) {
const query = getBalanceChanges(searchParams);
const response = await makeQuery(query)

return toJSON(response.data);
return toJSON(
addMetadata(
response.data,
response.rows_before_limit_at_least,
parseLimit(searchParams.get("limit")),
parsePage(searchParams.get("page"))
)
);
} catch (e: any) {
logger.error(e);
prometheus.request_error.inc({ pathname: "/balance", status: 400 });
Expand Down
12 changes: 10 additions & 2 deletions src/fetch/supply.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ import { makeQuery } from "../clickhouse/makeQuery.js";
import { logger } from "../logger.js";
import { getTotalSupply } from "../queries.js";
import * as prometheus from "../prometheus.js";
import { toJSON } from "./utils.js";
import { addMetadata, toJSON } from "./utils.js";
import { parseLimit, parsePage } from "../utils.js";

function verifyParams(searchParams: URLSearchParams) {
const contract = searchParams.get("contract");
Expand All @@ -20,7 +21,14 @@ export default async function (req: Request) {
const query = getTotalSupply(searchParams);
const response = await makeQuery(query)

return toJSON(response.data);
return toJSON(
addMetadata(
response.data,
response.rows_before_limit_at_least,
parseLimit(searchParams.get("limit")),
parsePage(searchParams.get("page"))
)
);
} catch (e: any) {
logger.error(e);
prometheus.request_error.inc({ pathname: "/supply", status: 400 });
Expand Down
12 changes: 10 additions & 2 deletions src/fetch/transfers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ import { makeQuery } from "../clickhouse/makeQuery.js";
import { logger } from "../logger.js";
import { getTransfers } from "../queries.js";
import * as prometheus from "../prometheus.js";
import { toJSON } from "./utils.js";
import { addMetadata, toJSON } from "./utils.js";
import { parseLimit, parsePage } from "../utils.js";

export default async function (req: Request) {
try {
Expand All @@ -12,7 +13,14 @@ export default async function (req: Request) {
const query = getTransfers(searchParams);
const response = await makeQuery(query)

return toJSON(response.data);
return toJSON(
addMetadata(
response.data,
response.rows_before_limit_at_least,
parseLimit(searchParams.get("limit")),
parsePage(searchParams.get("page"))
)
);
} catch (e: any) {
logger.error(e);
prometheus.request_error.inc({ pathname: "/transfers", status: 400 });
Expand Down
38 changes: 38 additions & 0 deletions src/fetch/utils.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import { expect, test } from "bun:test";
import { addMetadata } from "./utils.js";

test("addMetadata pagination", () => {
const limit = 5;
const mock_query_reponse = {
data: Array(limit),
rows: limit,
rows_before_limit_at_least: 5*limit, // Simulate query with more total results than the query limit making pagination relevant
};

const first_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 1);
expect(first_page.meta.next_page).toBe(2);
expect(first_page.meta.previous_page).toBe(1); // Previous page should be set to 1 on first page
expect(first_page.meta.total_pages).toBe(5);
expect(first_page.meta.total_results).toBe(5*limit);

const odd_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 3);
expect(odd_page.meta.next_page).toBe(4);
expect(odd_page.meta.previous_page).toBe(2);
expect(odd_page.meta.total_pages).toBe(5);
expect(odd_page.meta.total_results).toBe(5*limit);

const even_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 4);
expect(even_page.meta.next_page).toBe(5);
expect(even_page.meta.previous_page).toBe(3);
expect(even_page.meta.total_pages).toBe(5);
expect(even_page.meta.total_results).toBe(5*limit);

const last_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 5);
expect(last_page.meta.next_page).toBe(last_page.meta.total_pages); // Next page should be capped to total_pages on last page
expect(last_page.meta.previous_page).toBe(4);
expect(last_page.meta.total_pages).toBe(5);
expect(last_page.meta.total_results).toBe(5*limit);

// TODO: Expect error message on beyond last page
// const beyond_last_page = addMetadata(mock_query_reponse.data, mock_query_reponse.rows_before_limit_at_least, limit, 6);
});
13 changes: 13 additions & 0 deletions src/fetch/utils.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,16 @@
export function toJSON(data: any, status: number = 200) {
return new Response(JSON.stringify(data), { status, headers: { "Content-Type": "application/json" } });
}

export function addMetadata(data: any[], total_before_limit: number, limit: number, page: number) {
// TODO: Catch page number greater than total_pages and return error
return {
data,
meta: {
"next_page": (page * limit >= total_before_limit) ? page : page + 1,
"previous_page": (page <= 1) ? page : page - 1,
"total_pages": Math.ceil(total_before_limit / limit),
"total_results": total_before_limit
}
}
}

0 comments on commit b507988

Please sign in to comment.