diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..c747410 --- /dev/null +++ b/.env.example @@ -0,0 +1,14 @@ +# API Server +PORT=8080 +HOSTNAME=localhost + +# Clickhouse Database +HOST=http://127.0.0.1:8123 +DATABASE=default +USERNAME=default +PASSWORD= +TABLE= +MAX_LIMIT=500 + +# Logging +VERBOSE=true diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml new file mode 100644 index 0000000..141dd73 --- /dev/null +++ b/.github/release-drafter.yml @@ -0,0 +1,54 @@ +name-template: 'v$RESOLVED_VERSION' +tag-template: 'v$RESOLVED_VERSION' +categories: + - title: '✨ Features' + labels: + - 'feature' + - title: '🐛 Bug Fixes' + labels: + - 'fix' + - 'bugfix' + - 'bug' + - title: '📝 Documentation' + labels: + - 'documentation' + - title: '🔧 Operations' + label: 'ops' +change-template: '- $TITLE @$AUTHOR (#$NUMBER)' +change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks. +version-resolver: + major: + labels: + - 'major' + minor: + labels: + - 'minor' + patch: + labels: + - 'patch' + default: patch +template: | + ## Changes + $CHANGES + + **Full Changelog**: https://github.com/pinax-network/substreams-clock-api/compare/$PREVIOUS_TAG...v$RESOLVED_VERSION + +autolabeler: + - label: 'documentation' + branch: + - '/docs\/.+/' + files: + - '*.md' + - label: 'bug' + branch: + - '/fix\/.+/' + - label: 'feature' + branch: + - '/feature\/.+/' + - label: 'ops' + files: + - '.github/*.yml' + - '.github/workflows/*.yml' + - '.gitignore' + - 'tsconfig.json' + - 'Dockerfile' \ No newline at end of file diff --git a/.github/workflows/bun-build.yml b/.github/workflows/bun-build.yml new file mode 100644 index 0000000..35ddef1 --- /dev/null +++ b/.github/workflows/bun-build.yml @@ -0,0 +1,27 @@ +name: Build +on: + release: + types: [ published ] + +permissions: + contents: write + +jobs: + bun-build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: oven-sh/setup-bun@v1 + with: + bun-version: latest + + - name: 'Install Dependencies' + run: bun install + + - name: 'Build app' + run: bun run build + + - uses: softprops/action-gh-release@v1 + with: + files: | + substreams-clock-api diff --git a/.github/workflows/bun-test.yml b/.github/workflows/bun-test.yml new file mode 100644 index 0000000..246edb3 --- /dev/null +++ b/.github/workflows/bun-test.yml @@ -0,0 +1,33 @@ +name: Test + +on: push + +jobs: + bun-test: + runs-on: ubuntu-latest + environment: dev-test + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Install bun + uses: oven-sh/setup-bun@v1 + + - name: 'Install Dependencies' + run: | + bun install + + - name: 'Run lint' + run: | + bun lint + + - name: 'Run test' + run: | + bun test + env: + PORT: ${{ vars.PORT }} + HOSTNAME: ${{ vars.HOSTNAME }} + HOST: ${{ vars.HOST }} + USERNAME: ${{ secrets.USERNAME }} + PASSWORD: ${{ secrets.PASSWORD }} + TABLE: ${{ secrets.TABLE }} diff --git a/.github/workflows/ghcr.yml b/.github/workflows/ghcr.yml new file mode 100644 index 0000000..feca44f --- /dev/null +++ b/.github/workflows/ghcr.yml @@ -0,0 +1,42 @@ +name: GitHub Container Registry +on: + release: + types: [ published ] + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + ghcr: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Log in to the Container registry + uses: docker/login-action@v2 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) for Docker + id: meta + uses: docker/metadata-action@v4 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=semver,pattern={{raw}} + + - name: Build and push Docker image + uses: docker/build-push-action@v4 + with: + context: . + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} \ No newline at end of file diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml new file mode 100644 index 0000000..12b1f6c --- /dev/null +++ b/.github/workflows/release-drafter.yml @@ -0,0 +1,22 @@ +name: Release Drafter + +on: + push: + branches: + - main + pull_request: + types: [opened, reopened, synchronize] + +permissions: + contents: read + +jobs: + update_release_draft: + permissions: + contents: write + pull-requests: write + runs-on: ubuntu-latest + steps: + - uses: release-drafter/release-drafter@v5 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..90c68ed --- /dev/null +++ b/.gitignore @@ -0,0 +1,140 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +lerna-debug.log* +.pnpm-debug.log* +package-lock.json + +# Diagnostic reports (https://nodejs.org/api/report.html) +report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json + +# Runtime data +pids +*.pid +*.seed +*.pid.lock + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage +*.lcov + +# nyc test coverage +.nyc_output + +# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Bower dependency directory (https://bower.io/) +bower_components + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (https://nodejs.org/api/addons.html) +build/Release + +# Dependency directories +node_modules/ +jspm_packages/ + +# Snowpack dependency directory (https://snowpack.dev/) +web_modules/ + +# TypeScript cache +*.tsbuildinfo + +# Optional npm cache directory +.npm + +# Optional eslint cache +.eslintcache + +# Optional stylelint cache +.stylelintcache + +# Microbundle cache +.rpt2_cache/ +.rts2_cache_cjs/ +.rts2_cache_es/ +.rts2_cache_umd/ + +# Optional REPL history +.node_repl_history + +# Output of 'npm pack' +*.tgz + +# Yarn Integrity file +.yarn-integrity + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# parcel-bundler cache (https://parceljs.org/) +.cache +.parcel-cache + +# Next.js build output +.next +out + +# Nuxt.js build / generate output +.nuxt +dist + +# Gatsby files +.cache/ +# Comment in the public line in if your project uses Gatsby and not Next.js +# https://nextjs.org/blog/next-9-1#public-directory-support +# public + +# vuepress build output +.vuepress/dist + +# vuepress v2.x temp and cache directory +.temp +.cache + +# Docusaurus cache and generated files +.docusaurus + +# Serverless directories +.serverless/ + +# FuseBox cache +.fusebox/ + +# DynamoDB Local files +.dynamodb/ + +# TernJS port file +.tern-port + +# Stores VSCode versions used for testing VSCode extensions +.vscode-test + +# yarn v2 +.yarn/cache +.yarn/unplugged +.yarn/build-state.yml +.yarn/install-state.gz +.pnp.* + +# Sublime Text +*.sublime* + +# Local clickhouse DB +cursor.lock + +# CLI +substreams-clock-api \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..4472b9c --- /dev/null +++ b/Dockerfile @@ -0,0 +1,4 @@ +FROM oven/bun +COPY . . +RUN bun install +ENTRYPOINT [ "bun", "./index.ts" ] \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..3f7af7b --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Pinax + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..107cb12 --- /dev/null +++ b/README.md @@ -0,0 +1,77 @@ +# Antelope Token API + +[![.github/workflows/bun-test.yml](https://github.com/pinax-network/antelope-token-api/actions/workflows/bun-test.yml/badge.svg)](https://github.com/pinax-network/antelope-token-api/actions/workflows/bun-test.yml) + +> Token prices from the Antelope blockchains + +## REST API + +| Pathname | Description | +|-------------------------------------------|-----------------------| +| GET `/chains` | Available `chains` +| GET `/health` | Health check +| GET `/metrics` | Prometheus metrics +| GET `/openapi` | [OpenAPI v3 JSON](https://spec.openapis.org/oas/v3.0.0) + +## Requirements + +- [ClickHouse](clickhouse.com/) +- [Substreams Sink ClickHouse](https://github.com/pinax-network/substreams-sink-clickhouse/) + +## Quickstart + +```console +$ bun install +$ bun dev +``` + +## [`Bun` Binary Releases](https://github.com/pinax-network/substreams-sink-websockets/releases) + +> Linux Only + +```console +$ wget https://github.com/pinax-network/antelope-token-api/releases/download/v0.1.0/antelope-token-api +$ chmod +x ./antelope-token-api +``` + +## `.env` Environment variables + +```env +# API Server +PORT=8080 +HOSTNAME=localhost + +# Clickhouse Database +HOST=http://127.0.0.1:8123 +DATABASE=default +USERNAME=default +PASSWORD= +TABLE= +MAX_LIMIT=500 + +# Logging +VERBOSE=true +``` + +## Help + +```console + +``` + +## Docker environment + +Pull from GitHub Container registry +```bash +docker pull ghcr.io/pinax-network/antelope-token-api:latest +``` + +Build from source +```bash +docker build -t antelope-token-api . +``` + +Run with `.env` file +```bash +docker run -it --rm --env-file .env ghcr.io/pinax-network/antelope-token-api +``` diff --git a/bun.lockb b/bun.lockb new file mode 100755 index 0000000..e0c0471 Binary files /dev/null and b/bun.lockb differ diff --git a/index.ts b/index.ts new file mode 100644 index 0000000..0829e2c --- /dev/null +++ b/index.ts @@ -0,0 +1,18 @@ +import { config } from "./src/config.js"; +import { logger } from "./src/logger.js"; +import GET from "./src/fetch/GET.js"; +import * as prometheus from "./src/prometheus.js"; + +if (config.verbose) logger.enable(); + +const app = Bun.serve({ + hostname: config.hostname, + port: config.port, + fetch(req: Request) { + if (req.method === "GET") return GET(req); + prometheus.request_error.inc({pathname: new URL(req.url).pathname, status: 400}); + return new Response("Invalid request", { status: 400 }); + } +}); + +logger.info(`Server listening on http://${app.hostname}:${app.port}`); \ No newline at end of file diff --git a/package.json b/package.json new file mode 100644 index 0000000..5612dd7 --- /dev/null +++ b/package.json @@ -0,0 +1,40 @@ +{ + "name": "antelope-token-api", + "description": "Token prices from the Antelope blockchains", + "version": "0.1.0", + "homepage": "https://github.com/pinax-network/antelope-token-api", + "license": "MIT", + "type": "module", + "authors": [ + { + "name": "Etienne Donneger", + "email": "etienne@pinax.network", + "url": "https://github.com/Krow10" + }, + { + "name": "Denis Carriere", + "email": "denis@pinax.network", + "url": "https://github.com/DenisCarriere/" + } + ], + "scripts": { + "start": "bun index.ts", + "dev": "bun --watch index.ts", + "lint": "bunx tsc --noEmit --skipLibCheck --pretty", + "test": "bun test --coverage", + "build": "bun build --compile ./index.ts --outfile antelope-token-api" + }, + "dependencies": { + "@clickhouse/client-web": "latest", + "commander": "latest", + "dotenv": "latest", + "openapi3-ts": "latest", + "prom-client": "latest", + "tslog": "latest", + "zod": "latest" + }, + "devDependencies": { + "bun-types": "latest", + "typescript": "latest" + } +} diff --git a/src/clickhouse/createClient.ts b/src/clickhouse/createClient.ts new file mode 100644 index 0000000..e566dfe --- /dev/null +++ b/src/clickhouse/createClient.ts @@ -0,0 +1,20 @@ +import { createClient } from "@clickhouse/client-web"; +import { ping } from "./ping.js"; +import { APP_NAME, config } from "../config.js"; + +const client = createClient({ + ...config, + clickhouse_settings: { + allow_experimental_object_type: 1, + readonly: "1", + }, + application: APP_NAME, +}) + +// These overrides should not be required but the @clickhouse/client-web instance +// does not work well with Bun's implementation of Node streams. +// https://github.com/oven-sh/bun/issues/5470 +client.command = client.exec; +client.ping = ping; + +export default client; \ No newline at end of file diff --git a/src/clickhouse/makeQuery.ts b/src/clickhouse/makeQuery.ts new file mode 100644 index 0000000..c61c0c9 --- /dev/null +++ b/src/clickhouse/makeQuery.ts @@ -0,0 +1,29 @@ +import { logger } from "../logger.js"; +import * as prometheus from "../prometheus.js"; +import client from "./createClient.js"; + +export interface Meta { + name: string, + type: string +} +export interface Query { + meta: Meta[], + data: T[], + rows: number, + statistics: { + elapsed: number, + rows_read: number, + bytes_read: number, + } +} + +export async function makeQuery(query: string) { + const response = await client.query({ query }) + const data: Query = await response.json(); + prometheus.query.inc(); + prometheus.bytes_read.inc(data.statistics.bytes_read); + prometheus.rows_read.inc(data.statistics.rows_read); + prometheus.elapsed.inc(data.statistics.elapsed); + logger.info({ query, statistics: data.statistics, rows: data.rows }); + return data; +} diff --git a/src/clickhouse/ping.ts b/src/clickhouse/ping.ts new file mode 100644 index 0000000..5646c6e --- /dev/null +++ b/src/clickhouse/ping.ts @@ -0,0 +1,13 @@ +import { PingResult } from "@clickhouse/client-web"; +import client from "./createClient.js"; + +// Does not work with Bun's implementation of Node streams. +export async function ping(): Promise { + try { + await client.exec({ query: "SELECT 1" }); + return { success: true }; + } catch (err) { + const message = typeof err === "string" ? err : JSON.stringify(err); + return { success: false, error: new Error(message) }; + } +}; \ No newline at end of file diff --git a/src/config.ts b/src/config.ts new file mode 100644 index 0000000..0a8dd73 --- /dev/null +++ b/src/config.ts @@ -0,0 +1,47 @@ +import "dotenv/config"; +import { z } from 'zod'; +import { Option, program } from "commander"; + +import pkg from "../package.json"; + +export const DEFAULT_PORT = "8080"; +export const DEFAULT_HOSTNAME = "localhost"; +export const DEFAULT_HOST = "http://localhost:8123"; +export const DEFAULT_DATABASE = "default"; +export const DEFAULT_TABLE = "blocks"; +export const DEFAULT_USERNAME = "default"; +export const DEFAULT_PASSWORD = ""; +export const DEFAULT_MAX_LIMIT = 500; +export const DEFAULT_VERBOSE = false; +export const APP_NAME = pkg.name; +export const DEFAULT_SORT_BY = "DESC"; + +// parse command line options +const opts = program + .name(pkg.name) + .version(pkg.version) + .description(pkg.description) + .showHelpAfterError() + .addOption(new Option("-p, --port ", "HTTP port on which to attach the API").env("PORT").default(DEFAULT_PORT)) + .addOption(new Option("-v, --verbose ", "Enable verbose logging").choices(["true", "false"]).env("VERBOSE").default(DEFAULT_VERBOSE)) + .addOption(new Option("--hostname ", "Server listen on HTTP hostname").env("HOSTNAME").default(DEFAULT_HOSTNAME)) + .addOption(new Option("--host ", "Database HTTP hostname").env("HOST").default(DEFAULT_HOST)) + .addOption(new Option("--username ", "Database user").env("USERNAME").default(DEFAULT_USERNAME)) + .addOption(new Option("--password ", "Password associated with the specified username").env("PASSWORD").default(DEFAULT_PASSWORD)) + .addOption(new Option("--database ", "The database to use inside ClickHouse").env("DATABASE").default(DEFAULT_DATABASE)) + .addOption(new Option("--table ", "Clickhouse table name").env("TABLE").default(DEFAULT_TABLE)) + .addOption(new Option("--max-limit ", "Maximum LIMIT queries").env("MAX_LIMIT").default(DEFAULT_MAX_LIMIT)) + .parse() + .opts(); + +export const config = z.object({ + port: z.string(), + hostname: z.string(), + host: z.string(), + table: z.string(), + database: z.string(), + username: z.string(), + password: z.string(), + maxLimit: z.coerce.number(), + verbose: z.coerce.boolean(), +}).parse(opts); diff --git a/src/fetch/GET.ts b/src/fetch/GET.ts new file mode 100644 index 0000000..2b75de3 --- /dev/null +++ b/src/fetch/GET.ts @@ -0,0 +1,24 @@ +import { registry } from "../prometheus.js"; +import openapi from "./openapi.js"; +import health from "./health.js"; +import chains from "./chains.js"; +import block from "./block.js"; +import * as prometheus from "../prometheus.js"; +import { logger } from "../logger.js"; +import swaggerHtml from "../../swagger/index.html" +import swaggerFavicon from "../../swagger/favicon.png" + +export default async function (req: Request) { + const { pathname} = new URL(req.url); + prometheus.request.inc({pathname}); + if ( pathname === "/" ) return new Response(Bun.file(swaggerHtml)); + if ( pathname === "/favicon.png" ) return new Response(Bun.file(swaggerFavicon)); + if ( pathname === "/health" ) return health(req); + if ( pathname === "/metrics" ) return new Response(await registry.metrics(), {headers: {"Content-Type": registry.contentType}}); + if ( pathname === "/openapi" ) return new Response(openapi, {headers: {"Content-Type": "application/json"}}); + if ( pathname === "/chains" ) return chains(req); + //if ( pathname === "/block" ) return block(req); + logger.warn(`Not found: ${pathname}`); + prometheus.request_error.inc({pathname, status: 404}); + return new Response("Not found", { status: 404 }); +} diff --git a/src/fetch/block.ts b/src/fetch/block.ts new file mode 100644 index 0000000..c58dd4a --- /dev/null +++ b/src/fetch/block.ts @@ -0,0 +1,18 @@ +import { makeQuery } from "../clickhouse/makeQuery.js"; +import { logger } from "../logger.js"; +import { Block, getBlock } from "../queries.js"; +import * as prometheus from "../prometheus.js"; + +export default async function (req: Request) { + try { + const { searchParams } = new URL(req.url); + logger.info({searchParams: Object.fromEntries(Array.from(searchParams))}); + const query = await getBlock(searchParams); + const response = await makeQuery(query) + return new Response(JSON.stringify(response.data), { headers: { "Content-Type": "application/json" } }); + } catch (e: any) { + logger.error(e); + prometheus.request_error.inc({pathname: "/block", status: 400}); + return new Response(e.message, { status: 400 }); + } +} \ No newline at end of file diff --git a/src/fetch/chains.ts b/src/fetch/chains.ts new file mode 100644 index 0000000..d8c192d --- /dev/null +++ b/src/fetch/chains.ts @@ -0,0 +1,20 @@ +import { makeQuery } from "../clickhouse/makeQuery.js"; +import { logger } from "../logger.js"; +import * as prometheus from "../prometheus.js"; +import { getChain } from "../queries.js"; + +export async function supportedChainsQuery() { + const response = await makeQuery<{chain: string}>(getChain()); + return response.data.map((r) => r.chain); +} + +export default async function (req: Request) { + try { + const chains = await supportedChainsQuery(); + return new Response(JSON.stringify(chains), { headers: { "Content-Type": "application/json" } }); + } catch (e: any) { + logger.error(e); + prometheus.request_error.inc({pathname: "/chains", status: 400}); + return new Response(e.message, { status: 400 }); + } +} \ No newline at end of file diff --git a/src/fetch/health.ts b/src/fetch/health.ts new file mode 100644 index 0000000..a0b6684 --- /dev/null +++ b/src/fetch/health.ts @@ -0,0 +1,16 @@ +import client from "../clickhouse/createClient.js"; +import { logger } from "../logger.js"; +import * as prometheus from "../prometheus.js"; + +export default async function (req: Request) { + try { + const response = await client.ping(); + if (response.success === false) throw new Error(response.error.message); + if (response.success === true ) return new Response("OK"); + return new Response("Unknown response from ClickHouse"); + } catch (e: any) { + logger.error(e); + prometheus.request_error.inc({ pathname: "/health", status: 500}); + return new Response(e.message, { status: 500 }); + } +} \ No newline at end of file diff --git a/src/fetch/openapi.ts b/src/fetch/openapi.ts new file mode 100644 index 0000000..dbdc721 --- /dev/null +++ b/src/fetch/openapi.ts @@ -0,0 +1,161 @@ +import pkg from "../../package.json" assert { type: "json" }; + +import { OpenApiBuilder, SchemaObject, ExampleObject, ParameterObject } from "openapi3-ts/oas31"; +import { config } from "../config.js"; +import { getBlock } from "../queries.js"; +import { registry } from "../prometheus.js"; +import { makeQuery } from "../clickhouse/makeQuery.js"; +import { supportedChainsQuery } from "./chains.js"; + +const TAGS = { + MONITORING: "Monitoring", + HEALTH: "Health", + USAGE: "Usage", + DOCS: "Documentation", +} as const; + +const chains = await supportedChainsQuery(); +const block_example = (await makeQuery(await getBlock( new URLSearchParams({limit: "2"})))).data; + +const timestampSchema: SchemaObject = { anyOf: [ + {type: "number"}, + {type: "string", format: "date"}, + {type: "string", format: "date-time"} + ] +}; +const timestampExamples: ExampleObject = { + unix: { summary: `Unix Timestamp (seconds)` }, + date: { summary: `Full-date notation`, value: '2023-10-18' }, + datetime: { summary: `Date-time notation`, value: '2023-10-18T00:00:00Z'}, +} + +export default new OpenApiBuilder() + .addInfo({ + title: pkg.name, + version: pkg.version, + description: pkg.description, + license: {name: pkg.license}, + }) + .addExternalDocs({ url: pkg.homepage, description: "Extra documentation" }) + .addSecurityScheme("auth-key", { type: "http", scheme: "bearer" }) + .addPath("/chains", { + get: { + tags: [TAGS.USAGE], + summary: 'Supported chains', + responses: { + 200: { + description: "Array of chains", + content: { + "application/json": { + schema: { type: "array" }, + example: chains, + } + }, + }, + }, + }, + }) + .addPath("/block", { + get: { + tags: [TAGS.USAGE], + summary: "Get block", + description: "Get block by `block_number`, `block_id` or `timestamp`", + parameters: [ + { + name: "chain", + in: "query", + description: "Filter by chain", + required: false, + schema: {enum: chains}, + }, + { + name: "block_number", + description: "Filter by Block number (ex: 18399498)", + in: "query", + required: false, + schema: { type: "number" }, + }, + { + name: "block_id", + in: "query", + description: "Filter by Block hash ID (ex: 00fef8cf2a2c73266f7c0b71fb5762f9a36419e51a7c05b0e82f9e3bacb859bc)", + required: false, + schema: { type: "string" }, + }, + { + name: 'timestamp', + in: 'query', + description: 'Filter by exact timestamp', + required: false, + schema: timestampSchema, + examples: timestampExamples, + }, + { + name: "final_block", + description: "If true, only returns final blocks", + in: "query", + required: false, + schema: { type: "boolean" }, + }, + { + name: "sort_by", + in: "query", + description: "Sort by `block_number`", + required: false, + schema: {enum: ['ASC', 'DESC'] }, + }, + ...["greater_or_equals_by_timestamp", "greater_by_timestamp", "less_or_equals_by_timestamp", "less_by_timestamp"].map(name => { + return { + name, + in: "query", + description: "Filter " + name.replace(/_/g, " "), + required: false, + schema: timestampSchema, + examples: timestampExamples, + } as ParameterObject + }), + ...["greater_or_equals_by_block_number", "greater_by_block_number", "less_or_equals_by_block_number", "less_by_block_number"].map(name => { + return { + name, + in: "query", + description: "Filter " + name.replace(/_/g, " "), + required: false, + schema: { type: "number" }, + } as ParameterObject + }), + { + name: "limit", + in: "query", + description: "Used to specify the number of records to return.", + required: false, + schema: { type: "number", maximum: config.maxLimit, minimum: 1 }, + }, + ], + responses: { + 200: { description: "Array of blocks", content: { "application/json": { example: block_example, schema: { type: "array" } } } }, + 400: { description: "Bad request" }, + }, + }, + }) + .addPath("/health", { + get: { + tags: [TAGS.HEALTH], + summary: "Performs health checks and checks if the database is accessible", + responses: {200: { description: "OK", content: { "text/plain": {example: "OK"}} } }, + }, + }) + .addPath("/metrics", { + get: { + tags: [TAGS.MONITORING], + summary: "Prometheus metrics", + responses: {200: { description: "Prometheus metrics", content: { "text/plain": { example: await registry.metrics(), schema: { type: "string" } } }}}, + }, + }) + .addPath("/openapi", { + get: { + tags: [TAGS.DOCS], + summary: "OpenAPI specification", + responses: {200: {description: "OpenAPI JSON Specification", content: { "application/json": { schema: { type: "string" } } } }}, + }, + }) + .getSpecAsJson(); \ No newline at end of file diff --git a/src/logger.ts b/src/logger.ts new file mode 100644 index 0000000..60b635a --- /dev/null +++ b/src/logger.ts @@ -0,0 +1,21 @@ +import { Logger, type ILogObj } from "tslog"; +import { name } from "../package.json" assert { type: "json" }; + +class TsLogger extends Logger { + constructor() { + super(); + this.settings.minLevel = 5; + this.settings.name = name; + } + + public enable(type: "pretty" | "json" = "pretty") { + this.settings.type = type; + this.settings.minLevel = 0; + } + + public disable() { + this.settings.type = "hidden"; + } +} + +export const logger = new TsLogger(); \ No newline at end of file diff --git a/src/prometheus.ts b/src/prometheus.ts new file mode 100644 index 0000000..0235628 --- /dev/null +++ b/src/prometheus.ts @@ -0,0 +1,40 @@ +// From https://github.com/pinax-network/substreams-sink-websockets/blob/main/src/prometheus.ts +import client, { Counter, CounterConfiguration, Gauge, GaugeConfiguration } from 'prom-client'; + +export const registry = new client.Registry(); + +// Metrics +export function registerCounter(name: string, help = "help", labelNames: string[] = [], config?: CounterConfiguration) { + try { + registry.registerMetric(new Counter({ name, help, labelNames, ...config })); + return registry.getSingleMetric(name) as Counter; + } catch (e) { + console.error({name, e}); + throw new Error(`${e}`); + } +} + +export function registerGauge(name: string, help = "help", labelNames: string[] = [], config?: GaugeConfiguration) { + try { + registry.registerMetric(new Gauge({ name, help, labelNames, ...config })); + return registry.getSingleMetric(name) as Gauge; + } catch (e) { + console.error({name, e}); + throw new Error(`${e}`); + } +} + +export async function getSingleMetric(name: string) { + const metric = registry.getSingleMetric(name); + const get = await metric?.get(); + return get?.values[0].value; +} + +// REST API metrics +export const request_error = registerCounter('request_error', 'Total Requests errors', ['pathname', 'status']); +export const request = registerCounter('request', 'Total Requests', ['pathname']); +export const query = registerCounter('query', 'Clickhouse DB queries made'); +export const bytes_read = registerCounter('bytes_read', 'Clickhouse DB Statistics bytes read'); +export const rows_read = registerCounter('rows_read', 'Clickhouse DB Statistics rows read'); +export const elapsed = registerCounter('elapsed', 'Clickhouse DB Statistics query elapsed time'); + diff --git a/src/queries.spec.ts b/src/queries.spec.ts new file mode 100644 index 0000000..55a290e --- /dev/null +++ b/src/queries.spec.ts @@ -0,0 +1,14 @@ +import { expect, test } from "bun:test"; +import { getBlock, getChain } from "./queries.js"; + +test.skip("getBlock", () => { + expect(getBlock(new URLSearchParams({ chain: "eth", block_number: "123" }))) + .toBe(`SELECT * FROM blocks WHERE (chain == 'eth' AND block_number == '123') ORDER BY block_number DESC LIMIT 1`); + + expect(getBlock(new URLSearchParams({ chain: "eth", greater_or_equals_by_timestamp: '1438270048', less_or_equals_by_timestamp: '1438270083', limit: '3' }))) + .toBe(`SELECT * FROM blocks WHERE (toUnixTimestamp(timestamp) >= 1438270048 AND toUnixTimestamp(timestamp) <= 1438270083 AND chain == 'eth') ORDER BY block_number DESC LIMIT 3`); +}); + +test("getChain", () => { + expect(getChain()).toBe(`SELECT DISTINCT chain FROM module_hashes`); +}); \ No newline at end of file diff --git a/src/queries.ts b/src/queries.ts new file mode 100644 index 0000000..1bcb53a --- /dev/null +++ b/src/queries.ts @@ -0,0 +1,56 @@ +import { DEFAULT_SORT_BY, config } from './config.js'; +import { parseBlockId, parseLimit, parseTimestamp } from './utils.js'; + +export interface Block { + block_number: number; + block_id: string; + timestamp: string; + chain: string; +} + +export function getBlock(searchParams: URLSearchParams) { + // TO-DO: Modulo block number (ex: search by every 1M blocks) + + // SQL Query + let query = `SELECT * FROM ${config.table}`; + const where = []; + + // Clickhouse Operators + // https://clickhouse.com/docs/en/sql-reference/operators + const operators = [ + ["greater_or_equals", ">="], + ["greater", ">"], + ["less_or_equals", "<="], + ["less", "<"], + ] + for ( const [key, operator] of operators ) { + const block_number = searchParams.get(`${key}_by_block_number`); + const timestamp = parseTimestamp(searchParams.get(`${key}_by_timestamp`)); + if (block_number) where.push(`block_number ${operator} ${block_number}`); + if (timestamp) where.push(`toUnixTimestamp(timestamp) ${operator} ${timestamp}`); + } + + // equals + const chain = searchParams.get("chain"); + const block_id = parseBlockId(searchParams.get("block_id")); + const block_number = searchParams.get('block_number'); + const timestamp = parseTimestamp(searchParams.get('timestamp')); + if (chain) where.push(`chain == '${chain}'`); + if (block_id) where.push(`block_id == '${block_id}'`); + if (block_number) where.push(`block_number == '${block_number}'`); + if (timestamp) where.push(`toUnixTimestamp(timestamp) == ${timestamp}`); + + // Join WHERE statements with AND + if ( where.length ) query += ` WHERE (${where.join(' AND ')})`; + + // Sort and Limit + const limit = parseLimit(searchParams.get("limit")); + const sort_by = searchParams.get("sort_by"); + query += ` ORDER BY block_number ${sort_by ?? DEFAULT_SORT_BY}` + query += ` LIMIT ${limit}` + return query; +} + +export function getChain() { + return `SELECT DISTINCT chain FROM module_hashes`; +} diff --git a/src/types.d.ts b/src/types.d.ts new file mode 100644 index 0000000..18c71fb --- /dev/null +++ b/src/types.d.ts @@ -0,0 +1,14 @@ +declare module "*.png" { + const content: string; + export default content; + } + +declare module "*.html" { + const content: string; + export default content; +} + +declare module "*.sql" { + const content: string; + export default content; +} \ No newline at end of file diff --git a/src/utils.spec.ts b/src/utils.spec.ts new file mode 100644 index 0000000..c74ddc3 --- /dev/null +++ b/src/utils.spec.ts @@ -0,0 +1,41 @@ +import { expect, test } from "bun:test"; +import { parseBlockId, parseLimit, parseTimestamp } from "./utils.js"; +import { DEFAULT_MAX_LIMIT } from "./config.js"; + +test("parseBlockId", () => { + expect(parseBlockId()).toBeUndefined(); + expect(parseBlockId(null)).toBeUndefined(); + expect(parseBlockId("00fef8cf2a2c73266f7c0b71fb5762f9a36419e51a7c05b0e82f9e3bacb859bc")).toBe("00fef8cf2a2c73266f7c0b71fb5762f9a36419e51a7c05b0e82f9e3bacb859bc"); + expect(parseBlockId("0x00fef8cf2a2c73266f7c0b71fb5762f9a36419e51a7c05b0e82f9e3bacb859bc")).toBe("00fef8cf2a2c73266f7c0b71fb5762f9a36419e51a7c05b0e82f9e3bacb859bc"); +}); + +test("parseLimit", () => { + expect(parseLimit()).toBe(1); + expect(parseLimit(null)).toBe(1); + expect(parseLimit("10")).toBe(10); + expect(parseLimit(10)).toBe(10); + expect(parseLimit(999999)).toBe(DEFAULT_MAX_LIMIT); +}); + +test("parseTimestamp", () => { + const seconds = 1672531200; + expect(parseTimestamp()).toBeUndefined(); + expect(parseTimestamp(null)).toBeUndefined(); + expect(parseTimestamp(1672531200000)).toBe(seconds); // Milliseconds (13 digits) => Seconds (10 digits) + expect(parseTimestamp("1672531200")).toBe(seconds); + expect(parseTimestamp(1672531200000)).toBe(seconds); + expect(parseTimestamp("2023-01-01T00:00:00.000Z")).toBe(seconds); + expect(parseTimestamp("2023-01-01T00:00:00.000")).toBe(seconds); + expect(parseTimestamp("2023-01-01 00:00:00")).toBe(seconds); // Datetime + expect(parseTimestamp("2023-01-01T00:00:00Z")).toBe(seconds); // ISO + expect(parseTimestamp("2023-01-01T00:00:00")).toBe(seconds); + expect(parseTimestamp("2023-01-01")).toBe(seconds); + expect(parseTimestamp("2023-01")).toBe(seconds); + expect(parseTimestamp(Number(new Date("2023")))).toBe(seconds); + + // errors + expect(() => parseTimestamp(10)).toThrow("Invalid timestamp"); + expect(() => parseTimestamp("10")).toThrow("Invalid timestamp"); +}); + + diff --git a/src/utils.ts b/src/utils.ts new file mode 100644 index 0000000..0d1d106 --- /dev/null +++ b/src/utils.ts @@ -0,0 +1,36 @@ +import { config } from "./config.js"; + +export function parseLimit(limit?: string|null|number) { + let value = 1; // default 1 + if (limit) { + if (typeof limit === "string") value = parseInt(limit); + if (typeof limit === "number") value = limit; + } + // limit must be between 1 and maxLimit + if ( value > config.maxLimit ) value = config.maxLimit; + return value; +} + +export function parseBlockId(block_id?: string|null) { + return block_id ? block_id.replace("0x", "") : undefined; +} + +export function parseTimestamp(timestamp?: string|null|number) { + if (timestamp !== undefined && timestamp !== null) { + if (typeof timestamp === "string") { + if (/^[0-9]+$/.test(timestamp)) { + return parseTimestamp(parseInt(timestamp)); + } + // append "Z" to timestamp if it doesn't have it + if (!timestamp.endsWith("Z")) timestamp += "Z"; + return Math.floor(Number(new Date(timestamp)) / 1000); + } + if (typeof timestamp === "number") { + const length = timestamp.toString().length; + if ( length === 10 ) return timestamp; // seconds + if ( length === 13 ) return Math.floor(timestamp / 1000); // convert milliseconds to seconds + throw new Error("Invalid timestamp"); + } + } + return undefined; +} diff --git a/swagger/favicon.png b/swagger/favicon.png new file mode 100644 index 0000000..ee33cc0 Binary files /dev/null and b/swagger/favicon.png differ diff --git a/swagger/index.html b/swagger/index.html new file mode 100644 index 0000000..77a6915 --- /dev/null +++ b/swagger/index.html @@ -0,0 +1,32 @@ + + + + + + + Substreams Antelope Token API - SwaggerUI + + + + +
+ + + + + \ No newline at end of file diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..7d8b6dd --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "target": "ESNext", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "resolveJsonModule": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "outDir": "./dist/", + "strict": true, + "noImplicitAny": true, + "strictNullChecks": true, + "alwaysStrict": true, + "skipLibCheck": true, + "types": ["bun-types"] + } +}