Skip to content

Commit

Permalink
feat(endpoint_onchain_checkpoints): Added onchain/checkpoints/ endpoint
Browse files Browse the repository at this point in the history
  • Loading branch information
akhercha committed May 28, 2024
1 parent a25465b commit df7a168
Show file tree
Hide file tree
Showing 9 changed files with 322 additions and 28 deletions.
23 changes: 12 additions & 11 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,19 @@ docker compose -f compose.dev.yaml up -d --build

### 2. Fill the database

To do so, you can either use a backup or run the indexer (or both):
The database tables are created automatically using the migrations in the `infra/pragma-node/postgres_migrations` folder.
However, you need to fill the tables with data. To do so, you can either run the indexer or use a backup:

#### A. Use the backup (ask for a file):
#### Run the indexer:

```bash
git clone [email protected]:astraly-labs/indexer-service.git
cd indexer-service
# Index & fill the spot_entry (testnet) table
apibara run examples/pragma/testnet/sepolia-script-spot.js -A [YOUR_APIBARA_API_KEY] --connection-string postgres://postgres:test-password@localhost:5433/pragma --table-name spot_entry --timeout-duration-seconds=240
```

#### Use the backup (ask for a file):

```bash
# copy the backup file to the container
Expand All @@ -114,15 +124,6 @@ docker exec -it pragma-node-postgre-db-1 bash
PGPASSWORD=test-password pg_restore -h postgre-db -U postgres -d pragma /backup.sql
```

#### B. Run the indexer:

```bash
git clone [email protected]:astraly-labs/indexer-service.git
cd indexer-service
# Index & fill the spot_entry (testnet) table
apibara run examples/pragma/testnet/sepolia-script-spot.js -A [YOUR_APIBARA_API_KEY] --connection-string postgres://postgres:test-password@localhost:5433/pragma --table-name spot_entry --timeout-duration-seconds=240
```

### 3. Export the required environment variables:

```bash
Expand Down
3 changes: 3 additions & 0 deletions compose.dev.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@ services:
- POSTGRES_DB=pragma
- POSTGRES_LISTEN_ADDRESSES=*
- PGPORT=5433
# Automatically run the migrations on the database
volumes:
- ./infra/pragma-node/postgres_migrations:/docker-entrypoint-initdb.d
networks:
- pragma-db-network
ports:
Expand Down
116 changes: 116 additions & 0 deletions infra/pragma-node/postgres_migrations/01-init.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
CREATE TABLE mainnet_spot_entry (
network character varying(255),
pair_id character varying(255),
data_id character varying(255) NOT NULL,
block_hash character varying(255),
block_number bigint,
block_timestamp timestamp without time zone,
transaction_hash character varying(255),
price numeric,
timestamp timestamp without time zone,
publisher character varying(255),
source character varying(255),
volume numeric,
_cursor bigint
);

CREATE TABLE spot_entry (
network character varying(255),
pair_id character varying(255),
data_id character varying(255) NOT NULL,
block_hash character varying(255),
block_number bigint,
block_timestamp timestamp without time zone,
transaction_hash character varying(255),
price numeric,
timestamp timestamp without time zone,
publisher character varying(255),
source character varying(255),
volume numeric,
_cursor bigint
);


CREATE TABLE mainnet_future_entry (
network character varying(255),
pair_id character varying(255),
data_id character varying(255),
block_hash character varying(255),
block_number bigint,
block_timestamp timestamp without time zone,
transaction_hash character varying(255),
price numeric,
timestamp timestamp without time zone,
publisher character varying(255),
source character varying(255),
volume numeric,
_cursor bigint,
expiration_timestamp timestamp without time zone
);

CREATE TABLE future_entry (
network character varying(255),
pair_id character varying(255),
data_id character varying(255),
block_hash character varying(255),
block_number bigint,
block_timestamp timestamp without time zone,
transaction_hash character varying(255),
price numeric,
timestamp timestamp without time zone,
publisher character varying(255),
source character varying(255),
volume numeric,
_cursor bigint,
expiration_timestamp timestamp without time zone
);

CREATE TABLE mainnet_spot_checkpoints (
network character varying(255),
pair_id character varying(255),
data_id character varying(255) NOT NULL,
block_hash character varying(255),
block_number bigint,
block_timestamp timestamp without time zone,
transaction_hash character varying(255),
price numeric,
sender_address character varying(255),
aggregation_mode numeric,
_cursor bigint,
timestamp timestamp without time zone,
nb_sources_aggregated numeric
);

CREATE TABLE spot_checkpoints (
network character varying(255),
pair_id character varying(255),
data_id character varying(255) NOT NULL,
block_hash character varying(255),
block_number bigint,
block_timestamp timestamp without time zone,
transaction_hash character varying(255),
price numeric,
sender_address character varying(255),
aggregation_mode numeric,
_cursor bigint,
timestamp timestamp without time zone,
nb_sources_aggregated numeric
);

CREATE TABLE vrf_requests (
network character varying(255),
request_id numeric,
seed numeric,
created_at timestamp without time zone,
created_at_tx character varying(255),
callback_address character varying(255),
callback_fee_limit numeric,
num_words numeric,
requestor_address character varying(255),
updated_at timestamp without time zone,
updated_at_tx character varying(255),
status numeric,
minimum_block_number numeric,
_cursor bigint,
data_id character varying(255)
);
2 changes: 2 additions & 0 deletions pragma-entities/src/models/entry_error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ pub enum EntryError {
PublishData(String),
#[error("can't build publish message: {0}")]
BuildPublish(String),
#[error("limit maximum is 1000, current is: {0}")]
InvalidLimit(u64),
}

impl IntoResponse for EntryError {
Expand Down
63 changes: 61 additions & 2 deletions pragma-node/src/handlers/entries/get_onchain/checkpoints.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1,61 @@
// TODO(akhercha): checkpoints endpoint
// https://buildonpragma.notion.site/Pragma-API-fc14ba680030470cab61ee58098b135f
use axum::extract::{Query, State};
use axum::Json;
use pragma_entities::EntryError;

use crate::handlers::entries::utils::currency_pair_to_pair_id;
use crate::handlers::entries::{GetOnchainCheckpointsParams, GetOnchainCheckpointsResponse};
use crate::infra::repositories::entry_repository::get_decimals;
use crate::infra::repositories::onchain_repository::get_checkpoints;
use crate::utils::PathExtractor;
use crate::AppState;

pub const DEFAULT_LIMIT: u64 = 100;
pub const MAX_LIMIT: u64 = 1000;

#[utoipa::path(
get,
path = "/node/v1/onchain/checkpoints/{base}/{quote}",
responses(
(status = 200, description = "Get the onchain checkpoints for a pair", body = GetOnchainCheckpointsResponse)
),
params(
("base" = String, Path, description = "Base Asset"),
("quote" = String, Path, description = "Quote Asset"),
("network" = Network, Query, description = "Network"),
("limit" = Option<u64>, Query, description = "Limit of response size")
),
)]
pub async fn get_onchain_checkpoints(
State(state): State<AppState>,
PathExtractor(pair): PathExtractor<(String, String)>,
Query(params): Query<GetOnchainCheckpointsParams>,
) -> Result<Json<GetOnchainCheckpointsResponse>, EntryError> {
tracing::info!("Received get onchain entry request for pair {:?}", pair);

let pair_id: String = currency_pair_to_pair_id(&pair.0, &pair.1);
let limit = if let Some(limit) = params.limit {
if (limit == 0) || (limit > MAX_LIMIT) {
// TODO(akhercha): not so great error kind
return Err(EntryError::InvalidLimit(limit));
}
limit
} else {
DEFAULT_LIMIT
};

let decimals = get_decimals(&state.timescale_pool, &pair_id)
.await
.map_err(|db_error| db_error.to_entry_error(&pair_id))?;

let checkpoints = get_checkpoints(
&state.postgres_pool,
params.network,
pair_id.clone(),
decimals,
limit,
)
.await
.map_err(|db_error| db_error.to_entry_error(&pair_id))?;

Ok(Json(GetOnchainCheckpointsResponse(checkpoints)))
}
26 changes: 26 additions & 0 deletions pragma-node/src/handlers/entries/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,32 @@ pub struct GetOnchainResponse {
components: Vec<OnchainEntry>,
}

#[derive(Debug, Deserialize, IntoParams, ToSchema)]
pub struct GetOnchainCheckpointsParams {
pub network: Network,
pub limit: Option<u64>,
}

impl Default for GetOnchainCheckpointsParams {
fn default() -> Self {
Self {
network: Network::default(),
limit: Some(100),
}
}
}

#[derive(Debug, Serialize, Deserialize, ToSchema)]
pub struct Checkpoint {
pub tx_hash: String,
pub price: String,
pub timestamp: u64,
pub sender_address: String,
}

#[derive(Debug, Serialize, Deserialize, ToSchema)]
pub struct GetOnchainCheckpointsResponse(pub Vec<Checkpoint>);

/// Query parameters structs
// Supported Aggregation Intervals
Expand Down
Loading

0 comments on commit df7a168

Please sign in to comment.