Skip to content

Commit

Permalink
Only process relevant transactions (#1793)
Browse files Browse the repository at this point in the history
* Only process txs from relevant txs

* changeset

* bug fix + refactor identify txs

* Review updates
  • Loading branch information
grod220 authored Sep 19, 2024
1 parent 75ef4f5 commit e343d22
Show file tree
Hide file tree
Showing 7 changed files with 499 additions and 91 deletions.
5 changes: 5 additions & 0 deletions .changeset/eight-hounds-compare.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@penumbra-zone/storage': patch
---

Version DB to 47
5 changes: 5 additions & 0 deletions .changeset/late-peas-tap.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@penumbra-zone/query': patch
---

[bug fix] Only process relevant transactions for NFTs
5 changes: 5 additions & 0 deletions .changeset/twenty-tomatoes-travel.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'@penumbra-zone/query': patch
---

Extract, refactor, and test IdentifyTransactions
105 changes: 15 additions & 90 deletions packages/query/src/block-processor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,21 +4,13 @@ import {
PositionState,
PositionState_PositionStateEnum,
} from '@penumbra-zone/protobuf/penumbra/core/component/dex/v1/dex_pb';
import {
CommitmentSource,
Nullifier,
} from '@penumbra-zone/protobuf/penumbra/core/component/sct/v1/sct_pb';
import { Nullifier } from '@penumbra-zone/protobuf/penumbra/core/component/sct/v1/sct_pb';
import { ValidatorInfoResponse } from '@penumbra-zone/protobuf/penumbra/core/component/stake/v1/stake_pb';
import {
Action,
Transaction,
} from '@penumbra-zone/protobuf/penumbra/core/transaction/v1/transaction_pb';
import { TransactionId } from '@penumbra-zone/protobuf/penumbra/core/txhash/v1/txhash_pb';
import { Action } from '@penumbra-zone/protobuf/penumbra/core/transaction/v1/transaction_pb';
import { StateCommitment } from '@penumbra-zone/protobuf/penumbra/crypto/tct/v1/tct_pb';
import { SpendableNoteRecord, SwapRecord } from '@penumbra-zone/protobuf/penumbra/view/v1/view_pb';
import { auctionIdFromBech32 } from '@penumbra-zone/bech32m/pauctid';
import { bech32mIdentityKey } from '@penumbra-zone/bech32m/penumbravalid';
import { sha256Hash } from '@penumbra-zone/crypto-web/sha256';
import { getAssetId } from '@penumbra-zone/getters/metadata';
import {
getExchangeRateFromValidatorInfoResponse,
Expand Down Expand Up @@ -47,6 +39,7 @@ import { getSpendableNoteRecordCommitment } from '@penumbra-zone/getters/spendab
import { getSwapRecordCommitment } from '@penumbra-zone/getters/swap-record';
import { CompactBlock } from '@penumbra-zone/protobuf/penumbra/core/component/compact_block/v1/compact_block_pb';
import { shouldSkipTrialDecrypt } from './helpers/skip-trial-decrypt.js';
import { identifyTransactions, RelevantTx } from './helpers/identify-txs.js';

declare global {
// eslint-disable-next-line no-var -- expected globals
Expand Down Expand Up @@ -81,10 +74,6 @@ interface ProcessBlockParams {
skipTrialDecrypt?: boolean;
}

const BLANK_TX_SOURCE = new CommitmentSource({
source: { case: 'transaction', value: { id: new Uint8Array() } },
});

const POSITION_STATES: PositionState[] = [
new PositionState({ state: PositionState_PositionStateEnum.OPENED }),
new PositionState({ state: PositionState_PositionStateEnum.CLOSED }),
Expand Down Expand Up @@ -129,9 +118,7 @@ export class BlockProcessor implements BlockProcessorInterface {
numOfAttempts: Infinity,
maxDelay: 20_000, // 20 seconds
retry: async (e, attemptNumber) => {
if (globalThis.__DEV__) {
console.debug('Sync failure', attemptNumber, e);
}
console.error(`Sync failure #${attemptNumber}: `, e);
await this.viewServer.resetTreeToStored();
return !this.abortController.signal.aborted;
},
Expand Down Expand Up @@ -318,28 +305,24 @@ export class BlockProcessor implements BlockProcessorInterface {
// this is a network query
const blockTx = await this.querier.app.txsByHeight(compactBlock.height);

// identify tx that involve a new record
// - compare nullifiers
// - compare state commitments
// - collect relevant tx for info generation later
// - if matched by commitment, collect record with recovered source
const { relevantTx, recordsWithSources } = await this.identifyTransactions(
// Filter down to transactions & note records in block relevant to user
const { relevantTxs, recoveredSourceRecords } = await identifyTransactions(
spentNullifiers,
recordsByCommitment,
blockTx,
);

// this simply stores the new records with 'rehydrated' sources to idb
// TODO: this is the second time we save these records, after "saveScanResult"
await this.saveRecoveredCommitmentSources(recordsWithSources);
await this.saveRecoveredCommitmentSources(recoveredSourceRecords);

await this.processTransactions(blockTx);
await this.processTransactions(relevantTxs);

// at this point txinfo can be generated and saved. this will resolve
// pending broadcasts, and populate the transaction list.
// - calls wasm for each relevant tx
// - saves to idb
await this.saveTransactions(compactBlock.height, relevantTx);
await this.saveTransactions(compactBlock.height, relevantTxs);
}

/**
Expand Down Expand Up @@ -450,64 +433,6 @@ export class BlockProcessor implements BlockProcessorInterface {
}
}

private async identifyTransactions(
spentNullifiers: Set<Nullifier>,
commitmentRecordsByStateCommitment: Map<StateCommitment, SpendableNoteRecord | SwapRecord>,
blockTx: Transaction[],
) {
const relevantTx = new Map<TransactionId, Transaction>();
const recordsWithSources = new Array<SpendableNoteRecord | SwapRecord>();
for (const tx of blockTx) {
let txId: TransactionId | undefined;

const txCommitments = (tx.body?.actions ?? []).flatMap(({ action }) => {
switch (action.case) {
case 'output':
return action.value.body?.notePayload?.noteCommitment;
case 'swap':
return action.value.body?.payload?.commitment;
case 'swapClaim':
return [action.value.body?.output1Commitment, action.value.body?.output2Commitment];
default:
return;
}
});

const txNullifiers = (tx.body?.actions ?? []).map(({ action }) => {
switch (action.case) {
case 'spend':
case 'swapClaim':
return action.value.body?.nullifier;
default:
return;
}
});

for (const spentNullifier of spentNullifiers) {
if (txNullifiers.some(txNullifier => spentNullifier.equals(txNullifier))) {
txId = new TransactionId({ inner: await sha256Hash(tx.toBinary()) });
relevantTx.set(txId, tx);
spentNullifiers.delete(spentNullifier);
}
}

for (const [stateCommitment, spendableNoteRecord] of commitmentRecordsByStateCommitment) {
if (txCommitments.some(txCommitment => stateCommitment.equals(txCommitment))) {
txId ??= new TransactionId({ inner: await sha256Hash(tx.toBinary()) });
relevantTx.set(txId, tx);
if (BLANK_TX_SOURCE.equals(spendableNoteRecord.source)) {
spendableNoteRecord.source = new CommitmentSource({
source: { case: 'transaction', value: { id: txId.inner } },
});
recordsWithSources.push(spendableNoteRecord);
}
commitmentRecordsByStateCommitment.delete(stateCommitment);
}
}
}
return { relevantTx, recordsWithSources };
}

// TODO: refactor. there is definitely a better way to do this. batch
// endpoint issue https://github.com/penumbra-zone/penumbra/issues/4688
private async saveAndReturnMetadata(assetId: AssetId): Promise<Metadata | undefined> {
Expand Down Expand Up @@ -591,9 +516,9 @@ export class BlockProcessor implements BlockProcessorInterface {
* Identify various pieces of data from the transaction that we need to save,
* such as metadata, liquidity positions, etc.
*/
private async processTransactions(txs: Transaction[]) {
for (const tx of txs) {
for (const { action } of tx.body?.actions ?? []) {
private async processTransactions(txs: RelevantTx[]) {
for (const { data } of txs) {
for (const { action } of data.body?.actions ?? []) {
await Promise.all([this.identifyAuctionNfts(action), this.identifyLpNftPositions(action)]);
}
}
Expand Down Expand Up @@ -685,9 +610,9 @@ export class BlockProcessor implements BlockProcessorInterface {
});
}

private async saveTransactions(height: bigint, relevantTx: Map<TransactionId, Transaction>) {
for (const [id, transaction] of relevantTx) {
await this.indexedDb.saveTransaction(id, height, transaction);
private async saveTransactions(height: bigint, relevantTx: RelevantTx[]) {
for (const { id, data } of relevantTx) {
await this.indexedDb.saveTransaction(id, height, data);
}
}

Expand Down
Loading

0 comments on commit e343d22

Please sign in to comment.