-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(endpoint_onchain_checkpoints): Added onchain/checkpoints/ endpoint
- Loading branch information
Showing
9 changed files
with
322 additions
and
28 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -101,9 +101,19 @@ docker compose -f compose.dev.yaml up -d --build | |
|
||
### 2. Fill the database | ||
|
||
To do so, you can either use a backup or run the indexer (or both): | ||
The database tables are created automatically using the migrations in the `infra/pragma-node/postgres_migrations` folder. | ||
However, you need to fill the tables with data. To do so, you can either run the indexer or use a backup: | ||
|
||
#### A. Use the backup (ask for a file): | ||
#### Run the indexer: | ||
|
||
```bash | ||
git clone [email protected]:astraly-labs/indexer-service.git | ||
cd indexer-service | ||
# Index & fill the spot_entry (testnet) table | ||
apibara run examples/pragma/testnet/sepolia-script-spot.js -A [YOUR_APIBARA_API_KEY] --connection-string postgres://postgres:test-password@localhost:5433/pragma --table-name spot_entry --timeout-duration-seconds=240 | ||
``` | ||
|
||
#### Use the backup (ask for a file): | ||
|
||
```bash | ||
# copy the backup file to the container | ||
|
@@ -114,15 +124,6 @@ docker exec -it pragma-node-postgre-db-1 bash | |
PGPASSWORD=test-password pg_restore -h postgre-db -U postgres -d pragma /backup.sql | ||
``` | ||
|
||
#### B. Run the indexer: | ||
|
||
```bash | ||
git clone [email protected]:astraly-labs/indexer-service.git | ||
cd indexer-service | ||
# Index & fill the spot_entry (testnet) table | ||
apibara run examples/pragma/testnet/sepolia-script-spot.js -A [YOUR_APIBARA_API_KEY] --connection-string postgres://postgres:test-password@localhost:5433/pragma --table-name spot_entry --timeout-duration-seconds=240 | ||
``` | ||
|
||
### 3. Export the required environment variables: | ||
|
||
```bash | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,116 @@ | ||
CREATE TABLE mainnet_spot_entry ( | ||
network character varying(255), | ||
pair_id character varying(255), | ||
data_id character varying(255) NOT NULL, | ||
block_hash character varying(255), | ||
block_number bigint, | ||
block_timestamp timestamp without time zone, | ||
transaction_hash character varying(255), | ||
price numeric, | ||
timestamp timestamp without time zone, | ||
publisher character varying(255), | ||
source character varying(255), | ||
volume numeric, | ||
_cursor bigint | ||
); | ||
|
||
CREATE TABLE spot_entry ( | ||
network character varying(255), | ||
pair_id character varying(255), | ||
data_id character varying(255) NOT NULL, | ||
block_hash character varying(255), | ||
block_number bigint, | ||
block_timestamp timestamp without time zone, | ||
transaction_hash character varying(255), | ||
price numeric, | ||
timestamp timestamp without time zone, | ||
publisher character varying(255), | ||
source character varying(255), | ||
volume numeric, | ||
_cursor bigint | ||
); | ||
|
||
|
||
CREATE TABLE mainnet_future_entry ( | ||
network character varying(255), | ||
pair_id character varying(255), | ||
data_id character varying(255), | ||
block_hash character varying(255), | ||
block_number bigint, | ||
block_timestamp timestamp without time zone, | ||
transaction_hash character varying(255), | ||
price numeric, | ||
timestamp timestamp without time zone, | ||
publisher character varying(255), | ||
source character varying(255), | ||
volume numeric, | ||
_cursor bigint, | ||
expiration_timestamp timestamp without time zone | ||
); | ||
|
||
CREATE TABLE future_entry ( | ||
network character varying(255), | ||
pair_id character varying(255), | ||
data_id character varying(255), | ||
block_hash character varying(255), | ||
block_number bigint, | ||
block_timestamp timestamp without time zone, | ||
transaction_hash character varying(255), | ||
price numeric, | ||
timestamp timestamp without time zone, | ||
publisher character varying(255), | ||
source character varying(255), | ||
volume numeric, | ||
_cursor bigint, | ||
expiration_timestamp timestamp without time zone | ||
); | ||
|
||
CREATE TABLE mainnet_spot_checkpoints ( | ||
network character varying(255), | ||
pair_id character varying(255), | ||
data_id character varying(255) NOT NULL, | ||
block_hash character varying(255), | ||
block_number bigint, | ||
block_timestamp timestamp without time zone, | ||
transaction_hash character varying(255), | ||
price numeric, | ||
sender_address character varying(255), | ||
aggregation_mode numeric, | ||
_cursor bigint, | ||
timestamp timestamp without time zone, | ||
nb_sources_aggregated numeric | ||
); | ||
|
||
CREATE TABLE spot_checkpoints ( | ||
network character varying(255), | ||
pair_id character varying(255), | ||
data_id character varying(255) NOT NULL, | ||
block_hash character varying(255), | ||
block_number bigint, | ||
block_timestamp timestamp without time zone, | ||
transaction_hash character varying(255), | ||
price numeric, | ||
sender_address character varying(255), | ||
aggregation_mode numeric, | ||
_cursor bigint, | ||
timestamp timestamp without time zone, | ||
nb_sources_aggregated numeric | ||
); | ||
|
||
CREATE TABLE vrf_requests ( | ||
network character varying(255), | ||
request_id numeric, | ||
seed numeric, | ||
created_at timestamp without time zone, | ||
created_at_tx character varying(255), | ||
callback_address character varying(255), | ||
callback_fee_limit numeric, | ||
num_words numeric, | ||
requestor_address character varying(255), | ||
updated_at timestamp without time zone, | ||
updated_at_tx character varying(255), | ||
status numeric, | ||
minimum_block_number numeric, | ||
_cursor bigint, | ||
data_id character varying(255) | ||
); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
63 changes: 61 additions & 2 deletions
63
pragma-node/src/handlers/entries/get_onchain/checkpoints.rs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,61 @@ | ||
// TODO(akhercha): checkpoints endpoint | ||
// https://buildonpragma.notion.site/Pragma-API-fc14ba680030470cab61ee58098b135f | ||
use axum::extract::{Query, State}; | ||
use axum::Json; | ||
use pragma_entities::EntryError; | ||
|
||
use crate::handlers::entries::utils::currency_pair_to_pair_id; | ||
use crate::handlers::entries::{GetOnchainCheckpointsParams, GetOnchainCheckpointsResponse}; | ||
use crate::infra::repositories::entry_repository::get_decimals; | ||
use crate::infra::repositories::onchain_repository::get_checkpoints; | ||
use crate::utils::PathExtractor; | ||
use crate::AppState; | ||
|
||
pub const DEFAULT_LIMIT: u64 = 100; | ||
pub const MAX_LIMIT: u64 = 1000; | ||
|
||
#[utoipa::path( | ||
get, | ||
path = "/node/v1/onchain/checkpoints/{base}/{quote}", | ||
responses( | ||
(status = 200, description = "Get the onchain checkpoints for a pair", body = GetOnchainCheckpointsResponse) | ||
), | ||
params( | ||
("base" = String, Path, description = "Base Asset"), | ||
("quote" = String, Path, description = "Quote Asset"), | ||
("network" = Network, Query, description = "Network"), | ||
("limit" = Option<u64>, Query, description = "Limit of response size") | ||
), | ||
)] | ||
pub async fn get_onchain_checkpoints( | ||
State(state): State<AppState>, | ||
PathExtractor(pair): PathExtractor<(String, String)>, | ||
Query(params): Query<GetOnchainCheckpointsParams>, | ||
) -> Result<Json<GetOnchainCheckpointsResponse>, EntryError> { | ||
tracing::info!("Received get onchain entry request for pair {:?}", pair); | ||
|
||
let pair_id: String = currency_pair_to_pair_id(&pair.0, &pair.1); | ||
let limit = if let Some(limit) = params.limit { | ||
if (limit == 0) || (limit > MAX_LIMIT) { | ||
// TODO(akhercha): not so great error kind | ||
return Err(EntryError::InvalidLimit(limit)); | ||
} | ||
limit | ||
} else { | ||
DEFAULT_LIMIT | ||
}; | ||
|
||
let decimals = get_decimals(&state.timescale_pool, &pair_id) | ||
.await | ||
.map_err(|db_error| db_error.to_entry_error(&pair_id))?; | ||
|
||
let checkpoints = get_checkpoints( | ||
&state.postgres_pool, | ||
params.network, | ||
pair_id.clone(), | ||
decimals, | ||
limit, | ||
) | ||
.await | ||
.map_err(|db_error| db_error.to_entry_error(&pair_id))?; | ||
|
||
Ok(Json(GetOnchainCheckpointsResponse(checkpoints))) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.