Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fast transactions paging using block_time #39

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 65 additions & 0 deletions endpoints/get_address_transactions.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from pydantic import BaseModel
from sqlalchemy import text, func
from sqlalchemy.future import select
from starlette.responses import Response

from dbsession import async_session
from endpoints import sql_db_only
Expand Down Expand Up @@ -129,6 +130,70 @@ async def get_full_transactions_for_address(
resolve_previous_outpoints)


@app.get("/addresses/{kaspaAddress}/full-transactions-page",
response_model=List[TxModel],
response_model_exclude_unset=True,
tags=["Kaspa addresses"])
@sql_db_only
async def get_full_transactions_for_address_page(
response: Response,
kaspaAddress: str = Path(
description="Kaspa address as string e.g. "
"kaspa:pzhh76qc82wzduvsrd9xh4zde9qhp0xc8rl7qu2mvl2e42uvdqt75zrcgpm00",
regex="^kaspa\:[a-z0-9]{61,63}$"),
limit: int = Query(
description="The max number of records to get. "
"For paging combine with using 'before' from oldest previous result, "
"repeat until an **empty** resultset is returned."
"The actual number of transactions returned can be higher if there are transactions with the same block time at the limit.",
ge=1,
le=500,
default=50),
before: int = Query(
description="Only include transactions with block time before this (epoch-millis)",
ge=0,
default=0),
fields: str = "",
resolve_previous_outpoints: PreviousOutpointLookupMode =
Query(default="no",
description=DESC_RESOLVE_PARAM)):
"""
Get all transactions for a given address from database.
And then get their related full transaction data
"""

async with async_session() as s:
# Doing it this way as opposed to adding it directly in the IN clause
# so I can re-use the same result in tx_list, TxInput and TxOutput
before = int(time.time() * 1000) if before == 0 else before
tx_within_limit_before = await s.execute(select(TxAddrMapping.transaction_id,
TxAddrMapping.block_time)
.filter(TxAddrMapping.address == kaspaAddress)
.filter(TxAddrMapping.block_time < before)
.limit(limit)
.order_by(TxAddrMapping.block_time.desc())
)

tx_ids_and_block_times = [(x.transaction_id, x.block_time) for x in tx_within_limit_before.all()]
tx_ids = {tx_id for tx_id, block_time in tx_ids_and_block_times}
oldest_block_time = tx_ids_and_block_times[-1][1]

if len(tx_ids_and_block_times) == limit:
# To avoid gaps when transactions with the same block_time are at the boundry between pages.
# Get the time of the last transaction and fetch additional transactions for the same address and timestamp
tx_with_same_block_time = await s.execute(select(TxAddrMapping.transaction_id)
.filter(TxAddrMapping.address == kaspaAddress)
.filter(TxAddrMapping.block_time == oldest_block_time))
tx_ids.update([x for x in tx_with_same_block_time.scalars().all()])

response.headers["X-Current-Page"] = str(len(tx_ids))
response.headers["X-Oldest-Epoch-Millis"] = str(oldest_block_time)

return await search_for_transactions(TxSearch(transactionIds=list(tx_ids)),
fields,
resolve_previous_outpoints)


@app.get("/addresses/{kaspaAddress}/transactions-count",
response_model=TransactionCount,
tags=["Kaspa addresses"])
Expand Down