diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 000000000..ff1b97a2e --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,3 @@ +{ + "recommendations": ["streetsidesoftware.code-spell-checker"] +} diff --git a/.vscode/settings.json b/.vscode/settings.json index 3662b3700..5f6fc7807 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,3 +1,4 @@ { + "typescript.preferences.importModuleSpecifier": "relative", "typescript.tsdk": "node_modules/typescript/lib" -} \ No newline at end of file +} diff --git a/cspell.json b/cspell.json new file mode 100644 index 000000000..1c6e2b58a --- /dev/null +++ b/cspell.json @@ -0,0 +1,23 @@ +{ + "$schema": "https://raw.githubusercontent.com/streetsidesoftware/cspell/main/cspell.schema.json", + "version": "0.2", + "dictionaries": [ + "companies", + "css", + "en_us", + "en-gb", + "fullstack", + "html", + "lorem-ipsum", + "node", + "npm", + "softwareTerms", + "sql", + "typescript" + ], + "ignorePaths": ["node_modules", "__generated__", "build", "dist", "out"], + "ignoreRegExpList": ["/.*[0-9].*/"], + "language": "en", + "minWordLength": 5, + "words": ["amounter", "avalabs", "locktime", "stakeable", "unstakeable", "utxo", "utxos"] +} diff --git a/examples/p-chain/etna/base.ts b/examples/p-chain/etna/base.ts new file mode 100644 index 000000000..78d40c0c5 --- /dev/null +++ b/examples/p-chain/etna/base.ts @@ -0,0 +1,42 @@ +import { TransferableOutput, addTxSignatures, pvm, utils } from '../../../src'; +import { getEnvVars } from '../../utils/getEnvVars'; +import { getEtnaContextFromURI } from './utils/etna-context'; + +/** + * The amount of AVAX to send to self. + */ +const SEND_AVAX_AMOUNT: number = 0.001; + +const main = async () => { + const { AVAX_PUBLIC_URL, P_CHAIN_ADDRESS, PRIVATE_KEY } = getEnvVars(); + + const pvmApi = new pvm.PVMApi(AVAX_PUBLIC_URL); + + const context = await getEtnaContextFromURI(AVAX_PUBLIC_URL); + + const { utxos } = await pvmApi.getUTXOs({ addresses: [P_CHAIN_ADDRESS] }); + + const tx = pvm.e.newBaseTx( + { + fromAddressesBytes: [utils.bech32ToBytes(P_CHAIN_ADDRESS)], + outputs: [ + TransferableOutput.fromNative( + context.avaxAssetID, + BigInt(SEND_AVAX_AMOUNT * 1e9), + [utils.bech32ToBytes(P_CHAIN_ADDRESS)], + ), + ], + utxos, + }, + context, + ); + + await addTxSignatures({ + unsignedTx: tx, + privateKeys: [utils.hexToBuffer(PRIVATE_KEY)], + }); + + return pvmApi.issueSignedTx(tx.getSignedTx()); +}; + +main().then(console.log); diff --git a/examples/p-chain/etna/delegate.ts b/examples/p-chain/etna/delegate.ts new file mode 100644 index 000000000..673a7319e --- /dev/null +++ b/examples/p-chain/etna/delegate.ts @@ -0,0 +1,50 @@ +import { addTxSignatures, networkIDs, pvm, utils } from '../../../src'; +import { getEnvVars } from '../../utils/getEnvVars'; +import { getEtnaContextFromURI } from './utils/etna-context'; + +const AMOUNT_TO_DELEGATE_AVAX: number = 1; +const DAYS_TO_DELEGATE: number = 14; + +const main = async () => { + const { AVAX_PUBLIC_URL, P_CHAIN_ADDRESS, PRIVATE_KEY } = getEnvVars(); + + const pvmApi = new pvm.PVMApi(AVAX_PUBLIC_URL); + + const context = await getEtnaContextFromURI(AVAX_PUBLIC_URL); + + const { utxos } = await pvmApi.getUTXOs({ addresses: [P_CHAIN_ADDRESS] }); + + const startTime = await pvmApi.getTimestamp(); + const startDate = new Date(startTime.timestamp); + const start: bigint = BigInt(startDate.getTime() / 1_000); + + const endTime = new Date(startTime.timestamp); + endTime.setDate(endTime.getDate() + DAYS_TO_DELEGATE); + const end: bigint = BigInt(endTime.getTime() / 1_000); + + // TODO: Get this from an argument. + const nodeId = 'NodeID-MqgFXT8JhorbEW2LpTDGePBBhv55SSp3M'; + + const tx = pvm.e.newAddPermissionlessDelegatorTx( + { + end, + fromAddressesBytes: [utils.bech32ToBytes(P_CHAIN_ADDRESS)], + nodeId, + rewardAddresses: [utils.bech32ToBytes(P_CHAIN_ADDRESS)], + start, + subnetId: networkIDs.PrimaryNetworkID.toString(), + utxos, + weight: BigInt(AMOUNT_TO_DELEGATE_AVAX * 1e9), + }, + context, + ); + + await addTxSignatures({ + unsignedTx: tx, + privateKeys: [utils.hexToBuffer(PRIVATE_KEY)], + }); + + return pvmApi.issueSignedTx(tx.getSignedTx()); +}; + +main().then(console.log); diff --git a/examples/p-chain/etna/export.ts b/examples/p-chain/etna/export.ts new file mode 100644 index 000000000..233560361 --- /dev/null +++ b/examples/p-chain/etna/export.ts @@ -0,0 +1,43 @@ +import { TransferableOutput, addTxSignatures, pvm, utils } from '../../../src'; +import { getEnvVars } from '../../utils/getEnvVars'; +import { getEtnaContextFromURI } from './utils/etna-context'; + +const AMOUNT_TO_EXPORT_AVAX: number = 0.001; + +const main = async () => { + const { AVAX_PUBLIC_URL, P_CHAIN_ADDRESS, PRIVATE_KEY, X_CHAIN_ADDRESS } = + getEnvVars(); + + const context = await getEtnaContextFromURI(AVAX_PUBLIC_URL); + + const pvmApi = new pvm.PVMApi(AVAX_PUBLIC_URL); + + const { utxos } = await pvmApi.getUTXOs({ + addresses: [P_CHAIN_ADDRESS], + }); + + const exportTx = pvm.e.newExportTx( + { + destinationChainId: context.xBlockchainID, + fromAddressesBytes: [utils.bech32ToBytes(P_CHAIN_ADDRESS)], + outputs: [ + TransferableOutput.fromNative( + context.avaxAssetID, + BigInt(AMOUNT_TO_EXPORT_AVAX * 1e9), + [utils.bech32ToBytes(X_CHAIN_ADDRESS)], + ), + ], + utxos, + }, + context, + ); + + await addTxSignatures({ + unsignedTx: exportTx, + privateKeys: [utils.hexToBuffer(PRIVATE_KEY)], + }); + + return pvmApi.issueSignedTx(exportTx.getSignedTx()); +}; + +main().then(console.log); diff --git a/examples/p-chain/etna/import.ts b/examples/p-chain/etna/import.ts new file mode 100644 index 000000000..82bb6299c --- /dev/null +++ b/examples/p-chain/etna/import.ts @@ -0,0 +1,36 @@ +import { addTxSignatures, pvm, utils } from '../../../src'; +import { getEnvVars } from '../../utils/getEnvVars'; +import { getEtnaContextFromURI } from './utils/etna-context'; + +const main = async () => { + const { AVAX_PUBLIC_URL, P_CHAIN_ADDRESS, PRIVATE_KEY, X_CHAIN_ADDRESS } = + getEnvVars(); + + const context = await getEtnaContextFromURI(AVAX_PUBLIC_URL); + + const pvmApi = new pvm.PVMApi(AVAX_PUBLIC_URL); + + const { utxos } = await pvmApi.getUTXOs({ + sourceChain: 'X', + addresses: [P_CHAIN_ADDRESS], + }); + + const importTx = pvm.e.newImportTx( + { + fromAddressesBytes: [utils.bech32ToBytes(X_CHAIN_ADDRESS)], + sourceChainId: context.xBlockchainID, + toAddresses: [utils.bech32ToBytes(P_CHAIN_ADDRESS)], + utxos, + }, + context, + ); + + await addTxSignatures({ + unsignedTx: importTx, + privateKeys: [utils.hexToBuffer(PRIVATE_KEY)], + }); + + return pvmApi.issueSignedTx(importTx.getSignedTx()); +}; + +main().then(console.log); diff --git a/examples/p-chain/etna/utils/etna-context.ts b/examples/p-chain/etna/utils/etna-context.ts new file mode 100644 index 000000000..230a421f9 --- /dev/null +++ b/examples/p-chain/etna/utils/etna-context.ts @@ -0,0 +1,16 @@ +import { Context } from '../../../../src'; + +/** + * Gets the context from URI and then modifies the context + * to be used for testing example Etna transactions until Etna is enabled. + */ +export const getEtnaContextFromURI = async ( + uri: string, +): Promise => { + const context = await Context.getContextFromURI(uri); + + return { + ...context, + gasPrice: 10_000n, + }; +}; diff --git a/examples/p-chain/etna/utils/random-node-id.ts b/examples/p-chain/etna/utils/random-node-id.ts new file mode 100644 index 000000000..819416748 --- /dev/null +++ b/examples/p-chain/etna/utils/random-node-id.ts @@ -0,0 +1,10 @@ +import { base58check } from '../../../../src/utils'; + +export const getRandomNodeId = (): string => { + const buffer = new Uint8Array(20); + const randomBuffer = crypto.getRandomValues(buffer); + + const nodeId = `NodeID-${base58check.encode(randomBuffer)}`; + + return nodeId; +}; diff --git a/examples/p-chain/etna/validate.ts b/examples/p-chain/etna/validate.ts new file mode 100644 index 000000000..29b446487 --- /dev/null +++ b/examples/p-chain/etna/validate.ts @@ -0,0 +1,64 @@ +import { addTxSignatures, networkIDs, pvm, utils } from '../../../src'; +import { getEnvVars } from '../../utils/getEnvVars'; +import { getEtnaContextFromURI } from './utils/etna-context'; +import { getRandomNodeId } from './utils/random-node-id'; + +const AMOUNT_TO_VALIDATE_AVAX: number = 1; +const DAYS_TO_VALIDATE: number = 21; + +const nodeId = getRandomNodeId(); + +const main = async () => { + const { AVAX_PUBLIC_URL, P_CHAIN_ADDRESS, PRIVATE_KEY } = getEnvVars(); + + const pvmApi = new pvm.PVMApi(AVAX_PUBLIC_URL); + + const context = await getEtnaContextFromURI(AVAX_PUBLIC_URL); + + const { utxos } = await pvmApi.getUTXOs({ addresses: [P_CHAIN_ADDRESS] }); + + const startTime = await pvmApi.getTimestamp(); + const startDate = new Date(startTime.timestamp); + const start: bigint = BigInt(startDate.getTime() / 1_000); + + const endTime = new Date(startTime.timestamp); + endTime.setDate(endTime.getDate() + DAYS_TO_VALIDATE); + const end: bigint = BigInt(endTime.getTime() / 1_000); + + const publicKey = utils.hexToBuffer( + '0x8f95423f7142d00a48e1014a3de8d28907d420dc33b3052a6dee03a3f2941a393c2351e354704ca66a3fc29870282e15', + ); + + const signature = utils.hexToBuffer( + '0x86a3ab4c45cfe31cae34c1d06f212434ac71b1be6cfe046c80c162e057614a94a5bc9f1ded1a7029deb0ba4ca7c9b71411e293438691be79c2dbf19d1ca7c3eadb9c756246fc5de5b7b89511c7d7302ae051d9e03d7991138299b5ed6a570a98', + ); + + const tx = pvm.e.newAddPermissionlessValidatorTx( + { + end, + delegatorRewardsOwner: [utils.bech32ToBytes(P_CHAIN_ADDRESS)], + fromAddressesBytes: [utils.bech32ToBytes(P_CHAIN_ADDRESS)], + nodeId, + publicKey, + rewardAddresses: [utils.bech32ToBytes(P_CHAIN_ADDRESS)], + shares: 20 * 1e4, + signature, + start, + subnetId: networkIDs.PrimaryNetworkID.toString(), + utxos, + weight: BigInt(AMOUNT_TO_VALIDATE_AVAX * 1e9), + }, + context, + ); + + await addTxSignatures({ + unsignedTx: tx, + privateKeys: [utils.hexToBuffer(PRIVATE_KEY)], + }); + + return pvmApi.issueSignedTx(tx.getSignedTx()); +}; + +main() + .then(console.log) + .then(() => console.log('Validate node ID:', nodeId)); diff --git a/examples/utils/getEnvVars.ts b/examples/utils/getEnvVars.ts new file mode 100644 index 000000000..a92f92930 --- /dev/null +++ b/examples/utils/getEnvVars.ts @@ -0,0 +1,17 @@ +const AVAX_PUBLIC_URL = process.env['AVAX_PUBLIC_URL']; +const P_CHAIN_ADDRESS = process.env['P_CHAIN_ADDRESS']; +const PRIVATE_KEY = process.env['PRIVATE_KEY']; +const X_CHAIN_ADDRESS = process.env['X_CHAIN_ADDRESS']; + +export const getEnvVars = () => { + if (!(AVAX_PUBLIC_URL && P_CHAIN_ADDRESS && PRIVATE_KEY && X_CHAIN_ADDRESS)) { + throw new Error('Missing environment variable(s).'); + } + + return { + AVAX_PUBLIC_URL, + P_CHAIN_ADDRESS, + PRIVATE_KEY, + X_CHAIN_ADDRESS, + }; +}; diff --git a/jest.config.ts b/jest.config.ts index 6d70b66db..fc32ddd1f 100644 --- a/jest.config.ts +++ b/jest.config.ts @@ -15,4 +15,7 @@ module.exports = { testEnvironment: 'node', coverageProvider: 'v8', extensionsToTreatAsEsm: ['.ts'], + // Experimental to fix issues with BigInt serialization + // See: https://jestjs.io/docs/configuration#workerthreads + workerThreads: true, }; diff --git a/src/crypto/secp256k1.ts b/src/crypto/secp256k1.ts index 0bd1d6131..891e17fcf 100644 --- a/src/crypto/secp256k1.ts +++ b/src/crypto/secp256k1.ts @@ -4,6 +4,9 @@ import * as secp from '@noble/secp256k1'; import { Address } from 'micro-eth-signer'; import { concatBytes, hexToBuffer } from '../utils/buffer'; +/** Number of bytes per signature */ +export const SIGNATURE_LENGTH = 65; + export function randomPrivateKey() { return secp.utils.randomPrivateKey(); } diff --git a/src/fixtures/context.ts b/src/fixtures/context.ts index 2e6aff484..0dc466304 100644 --- a/src/fixtures/context.ts +++ b/src/fixtures/context.ts @@ -1,3 +1,4 @@ +import { createDimensions } from '../vms/common/fees/dimensions'; import type { Context } from '../vms/context'; export const testContext: Context = { @@ -16,4 +17,8 @@ export const testContext: Context = { addSubnetDelegatorFee: 1000000n, networkID: 1, hrp: 'avax', + + // TODO: Adjust these based on what we want for the tests. + gasPrice: 1n, + complexityWeights: createDimensions(1, 1, 1, 1), }; diff --git a/src/fixtures/transactions.ts b/src/fixtures/transactions.ts index 8d755e95f..bdbdf0f7f 100644 --- a/src/fixtures/transactions.ts +++ b/src/fixtures/transactions.ts @@ -14,9 +14,10 @@ import { } from '../serializable/fxs/secp256k1'; import { BigIntPr, Int, Bytes } from '../serializable/primitives'; import { StakeableLockIn, StakeableLockOut } from '../serializable/pvm'; -import { hexToBuffer } from '../utils'; +import { hexToBuffer, unpackWithManager } from '../utils'; import { testContext } from './context'; import { stringToBytes } from '@scure/base'; +import type { VM } from '../serializable'; export const cAddressForTest = '0xfd4DFC8f567caD8a275989982c5f8f1fC82B7563'; export const privateKeyForTest = @@ -190,3 +191,9 @@ export const getOutputForTest = () => new BigIntPr(BigInt(0.1 * 1e9)), Id.fromString(testContext.avaxAssetID), ); + +export const txHexToTransaction = (vm: VM, txHex: string) => { + const txBytes = hexToBuffer(txHex); + + return unpackWithManager(vm, txBytes); +}; diff --git a/src/serializable/avax/avaxTx.ts b/src/serializable/avax/avaxTx.ts index 48d24a690..abcae7bad 100644 --- a/src/serializable/avax/avaxTx.ts +++ b/src/serializable/avax/avaxTx.ts @@ -5,7 +5,7 @@ import type { TransferableInput } from './transferableInput'; export abstract class AvaxTx extends Transaction { abstract baseTx?: BaseTx; - getInputs(): TransferableInput[] { + getInputs(): readonly TransferableInput[] { return this.baseTx?.inputs ?? []; } getBlockchainId() { diff --git a/src/serializable/avax/baseTx.ts b/src/serializable/avax/baseTx.ts index ceab6d8ea..b7ef88dbc 100644 --- a/src/serializable/avax/baseTx.ts +++ b/src/serializable/avax/baseTx.ts @@ -19,8 +19,8 @@ export class BaseTx { constructor( public readonly NetworkId: Int, public readonly BlockchainId: Id, - public readonly outputs: TransferableOutput[], - public readonly inputs: TransferableInput[], + public readonly outputs: readonly TransferableOutput[], + public readonly inputs: readonly TransferableInput[], public readonly memo: Bytes, ) {} @@ -45,8 +45,8 @@ export class BaseTx { static fromNative( networkId: number, blockchainId: string, - outputs: TransferableOutput[], - inputs: TransferableInput[], + outputs: readonly TransferableOutput[], + inputs: readonly TransferableInput[], memo: Uint8Array, ) { return new BaseTx( diff --git a/src/serializable/avax/transferableOutput.ts b/src/serializable/avax/transferableOutput.ts index 191efd959..d881d6680 100644 --- a/src/serializable/avax/transferableOutput.ts +++ b/src/serializable/avax/transferableOutput.ts @@ -23,7 +23,7 @@ export class TransferableOutput { static fromNative( assetId: string, amt: bigint, - addresses: Uint8Array[], + addresses: readonly Uint8Array[], locktime?: bigint, threshold?: number, ) { diff --git a/src/serializable/avax/utxo.ts b/src/serializable/avax/utxo.ts index ec71be13e..7e20af3c3 100644 --- a/src/serializable/avax/utxo.ts +++ b/src/serializable/avax/utxo.ts @@ -12,13 +12,13 @@ import { TypeSymbols } from '../constants'; * @see https://docs.avax.network/specs/avm-transaction-serialization#unsigned-Exporttx */ @serializable() -export class Utxo { +export class Utxo { _type = TypeSymbols.UTXO; constructor( public readonly utxoId: UTXOID, public readonly assetId: Id, - public readonly output: Serializable, + public readonly output: Output, ) {} static fromBytes(bytes: Uint8Array, codec: Codec): [Utxo, Uint8Array] { diff --git a/src/serializable/fxs/common/id.ts b/src/serializable/fxs/common/id.ts index 5595ac16a..411f7e02d 100644 --- a/src/serializable/fxs/common/id.ts +++ b/src/serializable/fxs/common/id.ts @@ -6,6 +6,11 @@ import { serializable } from '../../common/types'; import { Primitives } from '../../primitives/primatives'; import { TypeSymbols } from '../../constants'; +/** + * Number of bytes per ID. + */ +export const ID_LEN = 32; + @serializable() export class Id extends Primitives { _type = TypeSymbols.Id; @@ -14,7 +19,7 @@ export class Id extends Primitives { } static fromBytes(buf: Uint8Array): [Id, Uint8Array] { - return [new Id(buf.slice(0, 32)), buf.slice(32)]; + return [new Id(buf.slice(0, ID_LEN)), buf.slice(ID_LEN)]; } static compare(id1: Id, id2: Id): number { @@ -26,7 +31,7 @@ export class Id extends Primitives { } toBytes() { - return padLeft(this.idVal, 32); + return padLeft(this.idVal, ID_LEN); } toJSON() { diff --git a/src/serializable/fxs/common/nodeId.ts b/src/serializable/fxs/common/nodeId.ts index 62542d86b..eabb79c25 100644 --- a/src/serializable/fxs/common/nodeId.ts +++ b/src/serializable/fxs/common/nodeId.ts @@ -6,6 +6,12 @@ import { Primitives } from '../../primitives/primatives'; import { TypeSymbols } from '../../constants'; export const NodeIDPrefix = 'NodeID-'; + +/** + * Number of bytes per NodeId. + */ +export const SHORT_ID_LEN = 20; + @serializable() export class NodeId extends Primitives { _type = TypeSymbols.NodeId; @@ -14,7 +20,7 @@ export class NodeId extends Primitives { } static fromBytes(buf: Uint8Array): [NodeId, Uint8Array] { - return [new NodeId(buf.slice(0, 20)), buf.slice(20)]; + return [new NodeId(buf.slice(0, SHORT_ID_LEN)), buf.slice(SHORT_ID_LEN)]; } [customInspectSymbol](_, options: any) { @@ -22,7 +28,7 @@ export class NodeId extends Primitives { } toBytes() { - return padLeft(this.idVal, 20); + return padLeft(this.idVal, SHORT_ID_LEN); } toJSON() { diff --git a/src/serializable/fxs/secp256k1/input.ts b/src/serializable/fxs/secp256k1/input.ts index b31610399..9ba4f7af4 100644 --- a/src/serializable/fxs/secp256k1/input.ts +++ b/src/serializable/fxs/secp256k1/input.ts @@ -16,7 +16,7 @@ export class Input { constructor(private readonly sigIndices: Int[]) {} - static fromNative(sigIndicies: number[]) { + static fromNative(sigIndicies: readonly number[]) { return new Input(sigIndicies.map((i) => new Int(i))); } diff --git a/src/serializable/fxs/secp256k1/outputOwners.ts b/src/serializable/fxs/secp256k1/outputOwners.ts index 560209d27..4216e3aaa 100644 --- a/src/serializable/fxs/secp256k1/outputOwners.ts +++ b/src/serializable/fxs/secp256k1/outputOwners.ts @@ -21,7 +21,11 @@ export class OutputOwners { public readonly addrs: Address[], ) {} - static fromNative(address: Uint8Array[], locktime = 0n, threshold = 1) { + static fromNative( + address: readonly Uint8Array[], + locktime = 0n, + threshold = 1, + ) { return new OutputOwners( new BigIntPr(locktime), new Int(threshold), diff --git a/src/serializable/primitives/bytes.ts b/src/serializable/primitives/bytes.ts index 7f8362f91..f528acdd9 100644 --- a/src/serializable/primitives/bytes.ts +++ b/src/serializable/primitives/bytes.ts @@ -33,4 +33,13 @@ export class Bytes extends Primitives { toBytes() { return concatBytes(bytesForInt(this.bytes.length), this.bytes); } + + /** + * Returns the length of the bytes (Uint8Array). + * + * Useful for calculating tx complexity. + */ + get length() { + return this.bytes.length; + } } diff --git a/src/serializable/primitives/int.ts b/src/serializable/primitives/int.ts index 775630b40..2ff6f684d 100644 --- a/src/serializable/primitives/int.ts +++ b/src/serializable/primitives/int.ts @@ -4,6 +4,11 @@ import { serializable } from '../common/types'; import { Primitives } from './primatives'; import { TypeSymbols } from '../constants'; +/** + * Number of bytes per int. + */ +export const INT_LEN = 4; + @serializable() export class Int extends Primitives { _type = TypeSymbols.Int; @@ -12,7 +17,7 @@ export class Int extends Primitives { } static fromBytes(buf: Uint8Array): [Int, Uint8Array] { - return [new Int(bufferToNumber(buf.slice(0, 4))), buf.slice(4)]; + return [new Int(bufferToNumber(buf.slice(0, INT_LEN))), buf.slice(INT_LEN)]; } [customInspectSymbol]() { @@ -24,7 +29,7 @@ export class Int extends Primitives { } toBytes() { - return padLeft(hexToBuffer(this.int.toString(16)), 4); + return padLeft(hexToBuffer(this.int.toString(16)), INT_LEN); } value() { diff --git a/src/serializable/primitives/short.ts b/src/serializable/primitives/short.ts index 6662442e1..f7ff920ab 100644 --- a/src/serializable/primitives/short.ts +++ b/src/serializable/primitives/short.ts @@ -3,6 +3,11 @@ import { serializable } from '../common/types'; import { Primitives } from './primatives'; import { TypeSymbols } from '../constants'; +/** + * Number of bytes per short. + */ +export const SHORT_LEN = 2; + @serializable() export class Short extends Primitives { _type = TypeSymbols.Short; @@ -11,7 +16,10 @@ export class Short extends Primitives { } static fromBytes(buf: Uint8Array): [Short, Uint8Array] { - return [new Short(bufferToNumber(buf.slice(0, 2))), buf.slice(2)]; + return [ + new Short(bufferToNumber(buf.slice(0, SHORT_LEN))), + buf.slice(SHORT_LEN), + ]; } toJSON() { @@ -19,7 +27,7 @@ export class Short extends Primitives { } toBytes() { - return padLeft(hexToBuffer(this.short.toString(16)), 2); + return padLeft(hexToBuffer(this.short.toString(16)), SHORT_LEN); } value() { diff --git a/src/serializable/pvm/addPermissionlessDelegatorTx.ts b/src/serializable/pvm/addPermissionlessDelegatorTx.ts index 5be3a282f..0e3e5352f 100644 --- a/src/serializable/pvm/addPermissionlessDelegatorTx.ts +++ b/src/serializable/pvm/addPermissionlessDelegatorTx.ts @@ -21,7 +21,7 @@ export class AddPermissionlessDelegatorTx extends PVMTx { constructor( public readonly baseTx: BaseTx, public readonly subnetValidator: SubnetValidator, - public readonly stake: TransferableOutput[], + public readonly stake: readonly TransferableOutput[], public readonly delegatorRewardsOwner: Serializable, ) { super(); diff --git a/src/serializable/pvm/addPermissionlessValidatorTx.ts b/src/serializable/pvm/addPermissionlessValidatorTx.ts index 12dfb9155..9d0f99f23 100644 --- a/src/serializable/pvm/addPermissionlessValidatorTx.ts +++ b/src/serializable/pvm/addPermissionlessValidatorTx.ts @@ -24,7 +24,7 @@ export class AddPermissionlessValidatorTx extends PVMTx { public readonly baseTx: BaseTx, public readonly subnetValidator: SubnetValidator, public readonly signer: Signer | SignerEmpty, - public readonly stake: TransferableOutput[], + public readonly stake: readonly TransferableOutput[], public readonly validatorRewardsOwner: Serializable, public readonly delegatorRewardsOwner: Serializable, public readonly shares: Int, diff --git a/src/serializable/pvm/stakeableLockOut.ts b/src/serializable/pvm/stakeableLockOut.ts index 9c9a9b486..9c3d3a58a 100644 --- a/src/serializable/pvm/stakeableLockOut.ts +++ b/src/serializable/pvm/stakeableLockOut.ts @@ -11,12 +11,14 @@ import { TypeSymbols } from '../constants'; * @see https://docs.avax.network/specs/platform-transaction-serialization#stakeablelockin */ @serializable() -export class StakeableLockOut implements Amounter { +export class StakeableLockOut + implements Amounter +{ _type = TypeSymbols.StakeableLockOut; constructor( public readonly lockTime: BigIntPr, - public readonly transferOut: Amounter, + public readonly transferOut: TransferOut, ) {} amount() { diff --git a/src/utils/addressMap.ts b/src/utils/addressMap.ts index 0a6c52e06..b1bba060a 100644 --- a/src/utils/addressMap.ts +++ b/src/utils/addressMap.ts @@ -96,10 +96,10 @@ export class AddressMaps { // this is a stopgap to quickly fix AddressMap not deriving the order post sorting TransferableInputs. Can probably // be simplified a lot by just deriving the sigIndicies right before returning the unsingedTx static fromTransferableInputs( - inputs: TransferableInput[], - inputUtxos: Utxo[], + inputs: readonly TransferableInput[], + inputUtxos: readonly Utxo[], minIssuanceTime: bigint, - fromAddressesBytes?: Uint8Array[], + fromAddressesBytes?: readonly Uint8Array[], ) { const utxoMap = inputUtxos.reduce((agg, utxo) => { return agg.set(utxo.utxoId.ID(), utxo); diff --git a/src/utils/addressesFromBytes.ts b/src/utils/addressesFromBytes.ts index ffa53525e..cea2c5282 100644 --- a/src/utils/addressesFromBytes.ts +++ b/src/utils/addressesFromBytes.ts @@ -1,5 +1,5 @@ import { Address } from '../serializable/fxs/common'; -export function addressesFromBytes(bytes: Uint8Array[]): Address[] { +export function addressesFromBytes(bytes: readonly Uint8Array[]): Address[] { return bytes.map((b) => new Address(b)); } diff --git a/src/utils/builderUtils.ts b/src/utils/builderUtils.ts index 79a345322..d60be0387 100644 --- a/src/utils/builderUtils.ts +++ b/src/utils/builderUtils.ts @@ -11,8 +11,8 @@ type GetImportedInputsFromUtxosOutput = { }; export const getImportedInputsFromUtxos = ( - utxos: Utxo[], - fromAddressesBytes: Uint8Array[], + utxos: readonly Utxo[], + fromAddressesBytes: readonly Uint8Array[], minIssuanceTime: bigint, ): GetImportedInputsFromUtxosOutput => { const fromAddresses = addressesFromBytes(fromAddressesBytes); diff --git a/src/utils/consolidate.ts b/src/utils/consolidate.ts index 23f732843..86621e66a 100644 --- a/src/utils/consolidate.ts +++ b/src/utils/consolidate.ts @@ -9,7 +9,7 @@ * @returns an array combined elements */ export const consolidate = ( - arr: T[], + arr: readonly T[], canCombine: (a: T, b: T) => boolean, combine: (a: T, b: T) => T, ): T[] => { diff --git a/src/utils/getUtxoInfo.ts b/src/utils/getUtxoInfo.ts new file mode 100644 index 000000000..8a9c45162 --- /dev/null +++ b/src/utils/getUtxoInfo.ts @@ -0,0 +1,42 @@ +import type { Utxo } from '../serializable/avax/utxo'; +import { isStakeableLockOut, isTransferOut } from './typeGuards'; + +export type UtxoInfo = Readonly<{ + /** + * @default 0n + */ + amount: bigint; + assetId: string; + /** + * @default 0n + */ + locktime: bigint; + /** + * @default 0n + */ + stakeableLocktime: bigint; + /** + * @default 1 + */ + threshold: number; + utxoId: string; +}>; + +export const getUtxoInfo = (utxo: Utxo): UtxoInfo => { + const { output } = utxo; + const outputOwners = utxo.getOutputOwners(); + + return { + amount: + isTransferOut(output) || isStakeableLockOut(output) + ? output.amount() + : 0n, + assetId: utxo.getAssetId(), + locktime: outputOwners.locktime.value(), + stakeableLocktime: isStakeableLockOut(output) + ? output.getStakeableLocktime() + : 0n, + threshold: outputOwners.threshold.value(), + utxoId: utxo.ID(), + }; +}; diff --git a/src/utils/index.ts b/src/utils/index.ts index 1013b8a9f..b36f58155 100644 --- a/src/utils/index.ts +++ b/src/utils/index.ts @@ -11,6 +11,7 @@ export * from './addChecksum'; export * from './addressMap'; export * from './getTransferableInputsByTx'; export * from './getTransferableOutputsByTx'; +export * from './getUtxoInfo'; export * from './getBurnedAmountByTx'; export * from './validateBurnedAmount'; export { unpackWithManager, getManagerForVM, packTx } from './packTx'; diff --git a/src/utils/serializeList.ts b/src/utils/serializeList.ts index e62ef200f..1ddacfb28 100644 --- a/src/utils/serializeList.ts +++ b/src/utils/serializeList.ts @@ -54,7 +54,7 @@ export const unpackCodecList = { }; export const packList = ( - serializables: Serializable[], + serializables: readonly Serializable[], codec: Codec, ): Uint8Array => { return concatBytes( diff --git a/src/vms/common/builder.ts b/src/vms/common/builder.ts index 9ba179c7f..662ee08fc 100644 --- a/src/vms/common/builder.ts +++ b/src/vms/common/builder.ts @@ -31,8 +31,8 @@ export const baseTxUnsafeAvm = ( */ export const baseTxUnsafePvm = ( context: Context, - changeOutputs: TransferableOutput[], - inputs: TransferableInput[], + changeOutputs: readonly TransferableOutput[], + inputs: readonly TransferableInput[], memo: Uint8Array, ) => { return AvaxBaseTx.fromNative( diff --git a/src/vms/common/defaultSpendOptions.ts b/src/vms/common/defaultSpendOptions.ts index e9e0a4fbd..03e28d913 100644 --- a/src/vms/common/defaultSpendOptions.ts +++ b/src/vms/common/defaultSpendOptions.ts @@ -1,7 +1,7 @@ import type { SpendOptions, SpendOptionsRequired } from './models'; export const defaultSpendOptions = ( - fromAddress: Uint8Array[], + fromAddress: readonly Uint8Array[], options?: SpendOptions, ): SpendOptionsRequired => { return { @@ -10,6 +10,9 @@ export const defaultSpendOptions = ( threshold: 1, memo: new Uint8Array(), locktime: 0n, - ...options, + // Only include options that are not undefined + ...Object.fromEntries( + Object.entries(options || {}).filter(([, v]) => v !== undefined), + ), }; }; diff --git a/src/vms/common/fees/dimensions.ts b/src/vms/common/fees/dimensions.ts new file mode 100644 index 000000000..30de5a857 --- /dev/null +++ b/src/vms/common/fees/dimensions.ts @@ -0,0 +1,57 @@ +export enum FeeDimensions { + Bandwidth = 0, + DBRead = 1, + DBWrite = 2, + Compute = 3, +} + +type DimensionValue = number; + +export type Dimensions = Record; + +export const createEmptyDimensions = (): Dimensions => ({ + [FeeDimensions.Bandwidth]: 0, + [FeeDimensions.DBRead]: 0, + [FeeDimensions.DBWrite]: 0, + [FeeDimensions.Compute]: 0, +}); + +export const createDimensions = ( + bandwidth: DimensionValue, + dbRead: DimensionValue, + dbWrite: DimensionValue, + compute: DimensionValue, +): Dimensions => ({ + [FeeDimensions.Bandwidth]: bandwidth, + [FeeDimensions.DBRead]: dbRead, + [FeeDimensions.DBWrite]: dbWrite, + [FeeDimensions.Compute]: compute, +}); + +/** + * Adds a number of dimensions together. + * + * @returns The sum of the dimensions. + */ +export const addDimensions = (...dimensions: Dimensions[]): Dimensions => { + const result = createEmptyDimensions(); + for (const dimension of dimensions) { + result[FeeDimensions.Bandwidth] += dimension[FeeDimensions.Bandwidth]; + result[FeeDimensions.DBRead] += dimension[FeeDimensions.DBRead]; + result[FeeDimensions.DBWrite] += dimension[FeeDimensions.DBWrite]; + result[FeeDimensions.Compute] += dimension[FeeDimensions.Compute]; + } + return result; +}; + +export const dimensionsToGas = ( + dimensions: Dimensions, + weights: Dimensions, +): bigint => { + return BigInt( + dimensions[FeeDimensions.Bandwidth] * weights[FeeDimensions.Bandwidth] + + dimensions[FeeDimensions.DBRead] * weights[FeeDimensions.DBRead] + + dimensions[FeeDimensions.DBWrite] * weights[FeeDimensions.DBWrite] + + dimensions[FeeDimensions.Compute] * weights[FeeDimensions.Compute], + ); +}; diff --git a/src/vms/common/models.ts b/src/vms/common/models.ts index f7b043c30..2358725df 100644 --- a/src/vms/common/models.ts +++ b/src/vms/common/models.ts @@ -1,6 +1,6 @@ export type SpendOptions = { minIssuanceTime?: bigint; - changeAddresses?: Uint8Array[]; + changeAddresses?: readonly Uint8Array[]; threshold?: number; memo?: Uint8Array; locktime?: bigint; diff --git a/src/vms/common/unsignedTx.ts b/src/vms/common/unsignedTx.ts index 4d68a2017..49be51240 100644 --- a/src/vms/common/unsignedTx.ts +++ b/src/vms/common/unsignedTx.ts @@ -24,7 +24,7 @@ export class UnsignedTx { credentials: Credential[]; constructor( readonly tx: Transaction, - readonly utxos: Utxo[], + readonly utxos: readonly Utxo[], readonly addressMaps: AddressMaps, credentials?: Credential[], ) { diff --git a/src/vms/context/context.ts b/src/vms/context/context.ts index 9f999b5b9..2ab6c6142 100644 --- a/src/vms/context/context.ts +++ b/src/vms/context/context.ts @@ -1,6 +1,7 @@ import { getHRP } from '../../constants/networkIDs'; import { Info } from '../../info/info'; import { AVMApi } from '../avm/api'; +import { createDimensions } from '../common/fees/dimensions'; import type { Context } from './model'; /* @@ -49,5 +50,9 @@ export const getContextFromURI = async ( addSubnetDelegatorFee, networkID, hrp: getHRP(networkID), + + // TODO: Populate these values once they are exposed by the API + gasPrice: 0n, + complexityWeights: createDimensions(1, 1, 1, 1), }); }; diff --git a/src/vms/context/model.ts b/src/vms/context/model.ts index 1280a8a65..9f7498527 100644 --- a/src/vms/context/model.ts +++ b/src/vms/context/model.ts @@ -1,3 +1,5 @@ +import type { Dimensions } from '../common/fees/dimensions'; + export type Context = { readonly networkID: number; readonly hrp: string; @@ -14,4 +16,8 @@ export type Context = { readonly addPrimaryNetworkDelegatorFee: bigint; readonly addSubnetValidatorFee: bigint; readonly addSubnetDelegatorFee: bigint; + + // Post-etna + readonly gasPrice: bigint; + readonly complexityWeights: Dimensions; }; diff --git a/src/vms/pvm/builder.spec.ts b/src/vms/pvm/builder.spec.ts index 1700b9b77..1b34e30db 100644 --- a/src/vms/pvm/builder.spec.ts +++ b/src/vms/pvm/builder.spec.ts @@ -220,7 +220,7 @@ describe('pvmBuilder', () => { }); it('AddValidatorTx - stakeable locked', () => { - const utxos = testUtxos(); + const utxos: Utxo[] = testUtxos(); const lockTime = BigInt(Math.floor(new Date().getTime() / 1000)) + 10000n; const lockedUtxo = new Utxo( new UTXOID(testUTXOID1, new Int(0)), @@ -265,7 +265,7 @@ describe('pvmBuilder', () => { }); it('AddDelegatorTx', () => { - const utxos = testUtxos(); + const utxos: Utxo[] = testUtxos(); const lockTime = BigInt(Math.floor(new Date().getTime() / 1000)) + 10000n; const lockedUtxo = new Utxo( new UTXOID(testUTXOID1, new Int(0)), diff --git a/src/vms/pvm/builder.ts b/src/vms/pvm/builder.ts index eb6b42610..a962b14a3 100644 --- a/src/vms/pvm/builder.ts +++ b/src/vms/pvm/builder.ts @@ -44,10 +44,6 @@ import { NodeId } from '../../serializable/fxs/common/nodeId'; import { createSignerOrSignerEmptyFromStrings } from '../../serializable/pvm/signer'; import { baseTxUnsafePvm } from '../common'; -/* - Builder is useful for building transactions that are specific to a chain. - */ - /** * @param fromAddresses - used for selecting which utxos are signable * @param utxoSet - list of utxos to spend from @@ -104,7 +100,7 @@ export function newBaseTx( @param threshold - the threshold to write on the utxo @param locktime - the locktime to write onto the utxo - @returns a unsignedTx + @returns UnsignedTx */ export function newImportTx( context: Context, @@ -143,6 +139,7 @@ export function newImportTx( if (!importedInputs.length) { throw new Error('no UTXOs available to import'); } + let inputs: TransferableInput[] = []; let changeOutputs: TransferableOutput[] = []; @@ -216,7 +213,7 @@ const getToBurn = ( * @param start The Unix time based on p-chain timestamp when the validator starts validating the Primary Network. * @param end The Unix time based on p-chain timestamp when the validator stops validating the Primary Network (and staked AVAX is returned). * @param weight The amount being delegated in nAVAX - * @param rewardAddresses The addresses which will recieve the rewards from the delegated stake. + * @param rewardAddresses The addresses which will receive the rewards from the delegated stake. * @param shares A number for the percentage times 10,000 of reward to be given to the validator when someone delegates to them. * @param threshold Opional. The number of signatures required to spend the funds in the resultant reward UTXO. Default 1. * @param locktime Optional. The locktime field created in the resulting reward outputs @@ -276,7 +273,7 @@ export function newAddValidatorTx( * @param utxos list of utxos to choose from * @param outputs list of outputs to create. * @param options used for filtering UTXO's - * @returns unsingedTx containing an exportTx + * @returns UnsignedTx containing an exportTx */ export function newExportTx( @@ -328,11 +325,11 @@ export function newExportTx( * @param start The Unix time based on p-chain timestamp when the validator starts validating the Primary Network. * @param end The Unix time based on p-chain timestamp when the validator stops validating the Primary Network (and staked AVAX is returned). * @param weight The amount being delegated in nAVAX - * @param rewardAddresses The addresses which will recieve the rewards from the delegated stake. + * @param rewardAddresses The addresses which will receive the rewards from the delegated stake. * @param options - used for filtering utxos * @param threshold Opional. The number of signatures required to spend the funds in the resultant reward UTXO. Default 1. * @param locktime Optional. The locktime field created in the resulting reward outputs - * @returns UnsingedTx + * @returns UnsignedTx */ export function newAddDelegatorTx( @@ -385,11 +382,11 @@ export function newAddDelegatorTx( * @param context * @param utxos list of utxos to choose from * @param fromAddressesBytes used for filtering utxos - * @param rewardAddresses The addresses which will recieve the rewards from the delegated stake. + * @param rewardAddresses The addresses which will receive the rewards from the delegated stake. * @param options used for filtering utxos * @param threshold Opional. The number of signatures required to spend the funds in the resultant reward UTXO. Default 1. * @param locktime Optional. The locktime field created in the resulting reward outputs - * @returns UnsingedTx + * @returns UnsignedTx */ export function newCreateSubnetTx( context: Context, @@ -454,6 +451,10 @@ export function newCreateBlockchainTx( ) { const defaultedOptions = defaultSpendOptions(fromAddressesBytes, options); + const genesisBytes = new Bytes( + new TextEncoder().encode(JSON.stringify(genesisData)), + ); + const { inputs, addressMaps, changeOutputs, inputUTXOs } = calculateUTXOSpend( new Map([[context.avaxAssetID, context.createBlockchainTxFee]]), undefined, @@ -475,7 +476,7 @@ export function newCreateBlockchainTx( new Stringpr(chainName), Id.fromString(vmID), fxIds.map(Id.fromString.bind(Id)), - new Bytes(new TextEncoder().encode(JSON.stringify(genesisData))), + genesisBytes, Input.fromNative(subnetAuth), ); @@ -616,6 +617,18 @@ export function newAddPermissionlessValidatorTx( const toStake = new Map([[assetId, weight]]); const defaultedOptions = defaultSpendOptions(fromAddressesBytes, options); + + const signer = createSignerOrSignerEmptyFromStrings(publicKey, signature); + const validatorOutputOwners = OutputOwners.fromNative( + rewardAddresses, + locktime, + threshold, + ); + const delegatorOutputOwners = OutputOwners.fromNative( + delegatorRewardsOwner, + 0n, + ); + const { addressMaps, changeOutputs, inputUTXOs, inputs, stakeOutputs } = calculateUTXOSpend( toBurn, @@ -641,10 +654,10 @@ export function newAddPermissionlessValidatorTx( weight, Id.fromString(subnetID), ), - createSignerOrSignerEmptyFromStrings(publicKey, signature), + signer, stakeOutputs, - OutputOwners.fromNative(rewardAddresses, locktime, threshold), - OutputOwners.fromNative(delegatorRewardsOwner, 0n), + validatorOutputOwners, + delegatorOutputOwners, new Int(shares), ); return new UnsignedTx(validatorTx, inputUTXOs, addressMaps); @@ -700,6 +713,13 @@ export function newAddPermissionlessDelegatorTx( const toStake = new Map([[assetId, weight]]); const defaultedOptions = defaultSpendOptions(fromAddressesBytes, options); + + const delegatorRewardsOwner = OutputOwners.fromNative( + rewardAddresses, + locktime, + threshold, + ); + const { addressMaps, changeOutputs, inputUTXOs, inputs, stakeOutputs } = calculateUTXOSpend( toBurn, @@ -726,7 +746,7 @@ export function newAddPermissionlessDelegatorTx( Id.fromString(subnetID), ), stakeOutputs, - OutputOwners.fromNative(rewardAddresses, locktime, threshold), + delegatorRewardsOwner, ); return new UnsignedTx(delegatorTx, inputUTXOs, addressMaps); } @@ -751,7 +771,7 @@ export function newAddPermissionlessDelegatorTx( * @param uptimeRequirement the minimum percentage a validator must be online and responsive to receive a reward * @param subnetAuth specifies indices of existing subnet owners * @param options used for filtering utxos - * @returns UnsingedTx containing a TransformSubnetTx + * @returns UnsignedTx containing a TransformSubnetTx */ export function newTransformSubnetTx( context: Context, @@ -825,7 +845,7 @@ export function newTransformSubnetTx( * @param options used for filtering utxos * @param threshold Opional. The number of signatures required to spend the funds in the resultant reward UTXO. Default 1. * @param locktime Optional. The locktime field created in the resulting reward outputs - * @returns UnsingedTx containing a TransferSubnetOwnershipTx + * @returns UnsignedTx containing a TransferSubnetOwnershipTx */ export function newTransferSubnetOwnershipTx( context: Context, diff --git a/src/vms/pvm/etna-builder/builder.test.ts b/src/vms/pvm/etna-builder/builder.test.ts new file mode 100644 index 000000000..09828f68b --- /dev/null +++ b/src/vms/pvm/etna-builder/builder.test.ts @@ -0,0 +1,1016 @@ +import { testContext as _testContext } from '../../../fixtures/context'; +import { + getLockedUTXO, + getNotTransferOutput, + getTransferableInputForTest, + getTransferableOutForTest, + getValidUtxo, + testAvaxAssetID, + testGenesisData, + testOwnerXAddress, + testSubnetId, + testUtxos, + testVMId, +} from '../../../fixtures/transactions'; +import { expectTxs } from '../../../fixtures/utils/expectTx'; +import { + BigIntPr, + Bytes, + Id, + Input, + Int, + NodeId, + OutputOwners, + Stringpr, + TransferableInput, + TransferableOutput, +} from '../../../serializable'; +import { + AddSubnetValidatorTx, + SubnetValidator, + type BaseTx as PVMBaseTx, + RemoveSubnetValidatorTx, + ImportTx, + ExportTx, + CreateSubnetTx, + CreateChainTx, + AddPermissionlessValidatorTx, + Signer, + TransferSubnetOwnershipTx, + AddPermissionlessDelegatorTx, +} from '../../../serializable/pvm'; +import { BaseTx as AvaxBaseTx } from '../../../serializable/avax'; +import { hexToBuffer } from '../../../utils'; +import type { UnsignedTx } from '../../common'; +import { createDimensions } from '../../common/fees/dimensions'; +import type { Context } from '../../context'; +import { calculateFee } from '../txs/fee/calculator'; +import { + newAddPermissionlessDelegatorTx, + newAddPermissionlessValidatorTx, + newAddSubnetValidatorTx, + newBaseTx, + newCreateChainTx, + newCreateSubnetTx, + newExportTx, + newImportTx, + newRemoveSubnetValidatorTx, + newTransferSubnetOwnershipTx, +} from './builder'; +import { testAddress1 } from '../../../fixtures/vms'; +import { AvaxToNAvax } from '../../../utils/avaxToNAvax'; +import { PrimaryNetworkID } from '../../../constants/networkIDs'; +import { + blsPublicKeyBytes, + blsSignatureBytes, +} from '../../../fixtures/primitives'; +import { proofOfPossession } from '../../../fixtures/pvm'; + +const testContext: Context = { + ..._testContext, + + // Required context for post-Etna + gasPrice: 1n, + complexityWeights: createDimensions(1, 10, 100, 1000), +}; + +const addTransferableAmounts = ( + transferableItems: + | readonly TransferableOutput[] + | readonly TransferableInput[], +): Map => { + const amounts = new Map(); + + for (const transferable of transferableItems) { + const assetId = transferable.getAssetId(); + + amounts.set(assetId, (amounts.get(assetId) ?? 0n) + transferable.amount()); + } + + return amounts; +}; + +const addAmounts = (...amounts: Map[]): Map => { + const amount = new Map(); + + for (const m of amounts) { + for (const [assetID, value] of m) { + amount.set(assetID, (amount.get(assetID) ?? 0n) + value); + } + } + + return amount; +}; + +/** + * Given a bigint, returns a human-readable string of the value. + * + * @example + * ```ts + * formatBigIntToHumanReadable(123456789n); // '123_456_789n' + * formatBigIntToHumanReadable(1234567890n); // '1_234_567_890n' + * ``` + */ +const formatBigIntToHumanReadable = (value: bigint): string => { + const bigIntStr = value.toString(); + + return `${bigIntStr.replace(/\B(?=(\d{3})+(?!\d))/g, '_')}n`; +}; + +/** + * Calculates the required fee for the unsigned transaction + * and verifies that the burned amount is exactly the required fee. + */ +const checkFeeIsCorrect = ({ + unsignedTx, + inputs, + outputs, + additionalInputs = [], + additionalOutputs = [], +}: { + unsignedTx: UnsignedTx; + inputs: readonly TransferableInput[]; + outputs: readonly TransferableOutput[]; + additionalInputs?: readonly TransferableInput[]; + additionalOutputs?: readonly TransferableOutput[]; +}): [ + amountConsumed: Record, + expectedAmountConsumed: Record, + expectedFee: bigint, +] => { + const amountConsumed = addTransferableAmounts([ + ...inputs, + ...additionalInputs, + ]); + const amountProduced = addTransferableAmounts([ + ...outputs, + ...additionalOutputs, + ]); + + const expectedFee = calculateFee( + unsignedTx.getTx(), + testContext.complexityWeights, + testContext.gasPrice, + ); + + const expectedAmountBurned = addAmounts( + new Map([[testAvaxAssetID.toString(), expectedFee]]), + ); + + const expectedAmountConsumed = addAmounts( + amountProduced, + expectedAmountBurned, + ); + + // Convert each map into a object with a stringified bigint value. + const safeExpectedAmountConsumed = Object.fromEntries( + [...expectedAmountConsumed].map(([k, v]) => [ + k, + formatBigIntToHumanReadable(v), + ]), + ); + + const safeAmountConsumed = Object.fromEntries( + [...amountConsumed].map(([k, v]) => [k, formatBigIntToHumanReadable(v)]), + ); + + return [safeAmountConsumed, safeExpectedAmountConsumed, expectedFee]; +}; + +describe('./src/vms/pvm/etna-builder/builder.test.ts', () => { + const nodeId = 'NodeID-2m38qc95mhHXtrhjyGbe7r2NhniqHHJRB'; + const toAddress = hexToBuffer('0x5432112345123451234512'); + const fromAddressesBytes = [testOwnerXAddress.toBytes()]; + const getRewardsOwners = () => OutputOwners.fromNative([toAddress]); + + describe.each([ + { + name: 'no memo', + memo: undefined, + }, + { + name: 'with memo', + memo: Buffer.from('memo'), + }, + ])('$name', ({ memo }) => { + test('newBaseTx', () => { + const utxos = testUtxos(); + + const transferableOutput = TransferableOutput.fromNative( + testAvaxAssetID.toString(), + 1_000_000_000n, + [toAddress], + ); + + const utx = newBaseTx( + { + fromAddressesBytes, + outputs: [transferableOutput], + options: { + memo, + }, + utxos, + }, + testContext, + ); + + const { baseTx } = utx.getTx() as PVMBaseTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(inputs.length).toEqual(1); + expect(outputs.length).toEqual(2); + + expect(outputs).toContain(transferableOutput); + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed] = checkFeeIsCorrect({ + unsignedTx: utx, + inputs, + outputs, + }); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + }); + + test('newImportTx', () => { + const VALID_AMOUNT = BigInt(50 * 1e9); + const utxos = [ + getLockedUTXO(), + getNotTransferOutput(), + getValidUtxo(new BigIntPr(VALID_AMOUNT)), + ]; + + const unsignedTx = newImportTx( + { + fromAddressesBytes, + options: { + memo, + }, + sourceChainId: testContext.cBlockchainID, + toAddresses: [testAddress1], + utxos, + }, + testContext, + ); + + const { baseTx, ins: importedIns } = unsignedTx.getTx() as ImportTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ + unsignedTx, + inputs, + outputs, + additionalInputs: importedIns, + }); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + + const expectedTx = new ImportTx( + AvaxBaseTx.fromNative( + testContext.networkID, + testContext.pBlockchainID, + [ + TransferableOutput.fromNative( + testContext.avaxAssetID, + VALID_AMOUNT - expectedFee, + [testAddress1], + ), + ], + [], + memo ?? new Uint8Array(), + ), + Id.fromString(testContext.cBlockchainID), + [TransferableInput.fromUtxoAndSigindicies(utxos[2], [0])], + ); + + expectTxs(unsignedTx.getTx(), expectedTx); + }); + + test('newExportTx', () => { + const VALID_AMOUNT = BigInt(50 * 1e9); + const OUT_AMOUNT = BigInt(5 * 1e9); + const utxos = [ + getLockedUTXO(), + getNotTransferOutput(), + getValidUtxo(new BigIntPr(VALID_AMOUNT)), + ]; + const tnsOut = TransferableOutput.fromNative( + testContext.avaxAssetID, + OUT_AMOUNT, + [toAddress], + ); + + const unsignedTx = newExportTx( + { + destinationChainId: testContext.cBlockchainID, + fromAddressesBytes, + options: { + memo, + }, + outputs: [tnsOut], + utxos, + }, + testContext, + ); + + const { baseTx, outs: exportedOuts } = unsignedTx.getTx() as ExportTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ + unsignedTx, + inputs, + outputs, + additionalOutputs: exportedOuts, + }); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + + const expectedTx = new ExportTx( + AvaxBaseTx.fromNative( + testContext.networkID, + testContext.pBlockchainID, + [ + TransferableOutput.fromNative( + testContext.avaxAssetID, + VALID_AMOUNT - OUT_AMOUNT - expectedFee, + fromAddressesBytes, + ), + ], + [getTransferableInputForTest()], + memo ?? new Uint8Array(), + ), + Id.fromString(testContext.cBlockchainID), + [tnsOut], + ); + + expectTxs(unsignedTx.getTx(), expectedTx); + }); + + test('newCreateSubnetTx', () => { + const utxoInputAmt = BigInt(2 * 1e9); + + const unsignedTx = newCreateSubnetTx( + { + fromAddressesBytes, + options: { + memo, + }, + subnetOwners: [toAddress], + utxos: [getValidUtxo(new BigIntPr(utxoInputAmt))], + }, + testContext, + ); + + const { baseTx } = unsignedTx.getTx() as PVMBaseTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ unsignedTx, inputs, outputs }); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + + const expectedTx = new CreateSubnetTx( + AvaxBaseTx.fromNative( + testContext.networkID, + testContext.pBlockchainID, + [getTransferableOutForTest(utxoInputAmt - expectedFee)], + [getTransferableInputForTest(utxoInputAmt)], + memo ?? new Uint8Array(), + ), + getRewardsOwners(), + ); + + expectTxs(unsignedTx.getTx(), expectedTx); + }); + + test('newCreateChainTx', () => { + const utxoInputAmt = BigInt(2 * 1e9); + + const unsignedTx = newCreateChainTx( + { + chainName: 'Random Chain Name', + fromAddressesBytes, + fxIds: [], + genesisData: testGenesisData, + options: { + memo, + }, + subnetAuth: [0], + subnetId: Id.fromHex(testSubnetId).toString(), + utxos: [getValidUtxo(new BigIntPr(utxoInputAmt))], + vmId: Id.fromHex(testVMId).toString(), + }, + testContext, + ); + + const { baseTx } = unsignedTx.getTx() as PVMBaseTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ unsignedTx, inputs, outputs }); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + + const expectedTx = new CreateChainTx( + AvaxBaseTx.fromNative( + testContext.networkID, + testContext.pBlockchainID, + [getTransferableOutForTest(utxoInputAmt - expectedFee)], + [getTransferableInputForTest(utxoInputAmt)], + memo ?? new Uint8Array(), + ), + Id.fromHex(testSubnetId), + new Stringpr('Random Chain Name'), + Id.fromHex(testVMId), + [], + new Bytes(new TextEncoder().encode(JSON.stringify(testGenesisData))), + Input.fromNative([0]), + ); + + expectTxs(unsignedTx.getTx(), expectedTx); + }); + + test('newAddSubnetValidatorTx', () => { + const utxoInputAmt = BigInt(2 * 1e9); + + const unsignedTx = newAddSubnetValidatorTx( + { + end: 190_000_000n, + fromAddressesBytes, + nodeId, + options: { + memo, + }, + subnetAuth: [0], + subnetId: Id.fromHex(testSubnetId).toString(), + start: 100n, + utxos: [getValidUtxo(new BigIntPr(utxoInputAmt))], + weight: 1_800_000n, + }, + testContext, + ); + + const { baseTx } = unsignedTx.getTx() as PVMBaseTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ unsignedTx, inputs, outputs }); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + + const expectedTx = new AddSubnetValidatorTx( + AvaxBaseTx.fromNative( + testContext.networkID, + testContext.pBlockchainID, + [getTransferableOutForTest(utxoInputAmt - expectedFee)], + [getTransferableInputForTest(utxoInputAmt)], + memo ?? new Uint8Array(), + ), + SubnetValidator.fromNative( + nodeId, + 100n, + 190_000_000n, + 1_800_000n, + Id.fromHex(testSubnetId), + ), + Input.fromNative([0]), + ); + + expectTxs(unsignedTx.getTx(), expectedTx); + }); + + test('newRemoveSubnetValidatorTx', () => { + const utxoInputAmt = BigInt(2 * 1e9); + + const unsignedTx = newRemoveSubnetValidatorTx( + { + fromAddressesBytes, + nodeId, + options: { + memo, + }, + subnetAuth: [0], + subnetId: Id.fromHex(testSubnetId).toString(), + utxos: [getValidUtxo(new BigIntPr(utxoInputAmt))], + }, + testContext, + ); + + const { baseTx } = unsignedTx.getTx() as PVMBaseTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ unsignedTx, inputs, outputs }); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + + const expectedTx = new RemoveSubnetValidatorTx( + AvaxBaseTx.fromNative( + testContext.networkID, + testContext.pBlockchainID, + [getTransferableOutForTest(utxoInputAmt - expectedFee)], + [getTransferableInputForTest(utxoInputAmt)], + memo ?? new Uint8Array(), + ), + NodeId.fromString(nodeId), + Id.fromHex(testSubnetId), + Input.fromNative([0]), + ); + + expectTxs(unsignedTx.getTx(), expectedTx); + }); + + test('newAddPermissionlessValidatorTx - primary network', () => { + const utxoInputAmt = AvaxToNAvax(2); + const stakeAmount = 1_800_000n; + + const unsignedTx = newAddPermissionlessValidatorTx( + { + delegatorRewardsOwner: [], + end: 120n, + fromAddressesBytes, + nodeId, + options: { + memo, + }, + publicKey: blsPublicKeyBytes(), + rewardAddresses: [], + shares: 1, + signature: blsSignatureBytes(), + start: 0n, + subnetId: PrimaryNetworkID.toString(), + utxos: [getValidUtxo(new BigIntPr(utxoInputAmt))], + weight: stakeAmount, + }, + testContext, + ); + + const { baseTx, stake } = + unsignedTx.getTx() as AddPermissionlessValidatorTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ + unsignedTx, + inputs, + outputs, + additionalOutputs: stake, + }); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + + const expectedTx = new AddPermissionlessValidatorTx( + AvaxBaseTx.fromNative( + testContext.networkID, + testContext.pBlockchainID, + [getTransferableOutForTest(utxoInputAmt - stakeAmount - expectedFee)], + [getTransferableInputForTest(utxoInputAmt)], + memo ?? new Uint8Array(), + ), + SubnetValidator.fromNative( + NodeId.fromString(nodeId).toString(), + 0n, + 120n, + stakeAmount, + PrimaryNetworkID, + ), + new Signer(proofOfPossession()), + [getTransferableOutForTest(stakeAmount)], //stake + OutputOwners.fromNative([], 0n, 1), + OutputOwners.fromNative([], 0n, 1), + new Int(1), + ); + + expectTxs(unsignedTx.getTx(), expectedTx); + }); + + test('newAddPermissionlessValidatorTx - subnet', () => { + const utxoInputAmt = AvaxToNAvax(2); + const stakeAmount = 1_800_000n; + + const unsignedTx = newAddPermissionlessValidatorTx( + { + delegatorRewardsOwner: [], + end: 120n, + fromAddressesBytes, + nodeId, + options: { + memo, + }, + publicKey: blsPublicKeyBytes(), + rewardAddresses: [], + shares: 1, + signature: blsSignatureBytes(), + start: 0n, + subnetId: Id.fromHex(testSubnetId).toString(), + utxos: [getValidUtxo(new BigIntPr(utxoInputAmt))], + weight: stakeAmount, + }, + testContext, + ); + + const { baseTx, stake } = + unsignedTx.getTx() as AddPermissionlessValidatorTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ + unsignedTx, + inputs, + outputs, + additionalOutputs: stake, + }); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + + const expectedTx = new AddPermissionlessValidatorTx( + AvaxBaseTx.fromNative( + testContext.networkID, + testContext.pBlockchainID, + [getTransferableOutForTest(utxoInputAmt - stakeAmount - expectedFee)], + [getTransferableInputForTest(utxoInputAmt)], + memo ?? new Uint8Array(), + ), + SubnetValidator.fromNative( + NodeId.fromString(nodeId).toString(), + 0n, + 120n, + stakeAmount, + Id.fromHex(testSubnetId), + ), + new Signer(proofOfPossession()), + [getTransferableOutForTest(stakeAmount)], //stake + OutputOwners.fromNative([], 0n, 1), + OutputOwners.fromNative([], 0n, 1), + new Int(1), + ); + + expectTxs(unsignedTx.getTx(), expectedTx); + }); + + test('newAddPermissionlessValidatorTx - subnet with non avax staking token', () => { + const utxoInputAmt = AvaxToNAvax(2); + const stakingAssetId = Id.fromHex('0102'); + const stakeAmount = 1_000_000n; + + const unsignedTx = newAddPermissionlessValidatorTx( + { + delegatorRewardsOwner: [], + end: 120n, + fromAddressesBytes, + nodeId, + options: { + memo, + }, + publicKey: blsPublicKeyBytes(), + rewardAddresses: [], + shares: 1, + signature: blsSignatureBytes(), + stakingAssetId: stakingAssetId.toString(), + start: 0n, + subnetId: Id.fromHex(testSubnetId).toString(), + utxos: [ + getValidUtxo(new BigIntPr(utxoInputAmt)), + getValidUtxo(new BigIntPr(2n * stakeAmount), stakingAssetId), + ], + weight: stakeAmount, + }, + testContext, + ); + + const { baseTx, stake } = + unsignedTx.getTx() as AddPermissionlessValidatorTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ + unsignedTx, + inputs, + outputs, + additionalOutputs: stake, + }); + + expect(stake.length).toEqual(1); + // Expect correct stake out + expect(stake[0].assetId.toString()).toEqual(stakingAssetId.toString()); + expect(stake[0].amount()).toEqual(stakeAmount); + // Expect correct change utxos + expect(outputs.length).toEqual(2); + // Stake token change + expect(outputs[0].assetId.toString()).toEqual(stakingAssetId.toString()); + expect(outputs[0].amount()).toEqual(stakeAmount); + // AVAX Change + expect(outputs[1].assetId.toString()).toEqual(testContext.avaxAssetID); + expect(outputs[1].amount()).toEqual(utxoInputAmt - expectedFee); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + }); + + test('newAddPermissionlessDelegator - primary network', () => { + const utxoInputAmt = AvaxToNAvax(2); + const stakeAmount = 1_800_000n; + + const unsignedTx = newAddPermissionlessDelegatorTx( + { + end: 120n, + fromAddressesBytes, + nodeId, + options: { + memo, + }, + rewardAddresses: [], + start: 0n, + subnetId: PrimaryNetworkID.toString(), + utxos: [getValidUtxo(new BigIntPr(utxoInputAmt))], + weight: stakeAmount, + }, + testContext, + ); + + const { baseTx, stake } = + unsignedTx.getTx() as AddPermissionlessDelegatorTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ + unsignedTx, + inputs, + outputs, + additionalOutputs: stake, + }); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + + const expectedTx = new AddPermissionlessDelegatorTx( + AvaxBaseTx.fromNative( + testContext.networkID, + testContext.pBlockchainID, + [getTransferableOutForTest(utxoInputAmt - stakeAmount - expectedFee)], + [getTransferableInputForTest(utxoInputAmt)], + memo ?? new Uint8Array(), + ), + SubnetValidator.fromNative( + NodeId.fromString(nodeId).toString(), + 0n, + 120n, + stakeAmount, + PrimaryNetworkID, + ), + [getTransferableOutForTest(stakeAmount)], //stake + OutputOwners.fromNative([], 0n, 1), + ); + + expectTxs(unsignedTx.getTx(), expectedTx); + }); + + test('newAddPermissionlessDelegator - subnet', () => { + const utxoInputAmt = AvaxToNAvax(2); + const stakeAmount = 1_800_000n; + + const unsignedTx = newAddPermissionlessDelegatorTx( + { + end: 120n, + fromAddressesBytes, + nodeId, + options: { + memo, + }, + rewardAddresses: [], + start: 0n, + subnetId: Id.fromHex(testSubnetId).toString(), + utxos: [getValidUtxo(new BigIntPr(utxoInputAmt))], + weight: stakeAmount, + }, + testContext, + ); + + const { baseTx, stake } = + unsignedTx.getTx() as AddPermissionlessDelegatorTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ + unsignedTx, + inputs, + outputs, + additionalOutputs: stake, + }); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + + const expectedTx = new AddPermissionlessDelegatorTx( + AvaxBaseTx.fromNative( + testContext.networkID, + testContext.pBlockchainID, + [getTransferableOutForTest(utxoInputAmt - stakeAmount - expectedFee)], + [getTransferableInputForTest(utxoInputAmt)], + memo ?? new Uint8Array(), + ), + SubnetValidator.fromNative( + NodeId.fromString(nodeId).toString(), + 0n, + 120n, + stakeAmount, + Id.fromHex(testSubnetId), + ), + [getTransferableOutForTest(stakeAmount)], //stake + OutputOwners.fromNative([], 0n, 1), + ); + + expectTxs(unsignedTx.getTx(), expectedTx); + }); + + test('newAddPermissionlessDelegator - subnet with non avax staking token', () => { + const utxoInputAmt = AvaxToNAvax(2); + const stakingAssetId = Id.fromHex('0102'); + const stakeAmount = 1_000_000n; + + const unsignedTx = newAddPermissionlessDelegatorTx( + { + end: 120n, + fromAddressesBytes, + nodeId, + options: { + memo, + }, + rewardAddresses: [], + stakingAssetId: stakingAssetId.toString(), + start: 0n, + subnetId: Id.fromHex(testSubnetId).toString(), + utxos: [ + getValidUtxo(new BigIntPr(utxoInputAmt)), + getValidUtxo(new BigIntPr(2n * stakeAmount), stakingAssetId), + ], + weight: stakeAmount, + }, + testContext, + ); + + const { baseTx, stake } = + unsignedTx.getTx() as AddPermissionlessDelegatorTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ + unsignedTx, + inputs, + outputs, + additionalOutputs: stake, + }); + + expect(stake.length).toEqual(1); + // Expect correct stake out + expect(stake[0].assetId.toString()).toEqual(stakingAssetId.toString()); + expect(stake[0].amount()).toEqual(stakeAmount); + // Expect correct change utxos + expect(outputs.length).toEqual(2); + // Stake token change + expect(outputs[0].assetId.toString()).toEqual(stakingAssetId.toString()); + expect(outputs[0].amount()).toEqual(stakeAmount); + // AVAX Change + expect(outputs[1].assetId.toString()).toEqual(testContext.avaxAssetID); + expect(outputs[1].amount()).toEqual(utxoInputAmt - expectedFee); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + }); + + test('newTransferSubnetOwnershipTx', () => { + const utxoInputAmt = BigInt(2 * 1e9); + const subnetAuth = [0, 1]; + + const unsignedTx = newTransferSubnetOwnershipTx( + { + fromAddressesBytes, + options: { + memo, + }, + subnetAuth, + subnetId: Id.fromHex(testSubnetId).toString(), + subnetOwners: [toAddress], + utxos: [getValidUtxo(new BigIntPr(utxoInputAmt))], + }, + testContext, + ); + + const { baseTx } = unsignedTx.getTx() as PVMBaseTx; + const { inputs, outputs, memo: txMemo } = baseTx; + + expect(txMemo.toString()).toEqual(memo ? 'memo' : ''); + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ unsignedTx, inputs, outputs }); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + + const expectedTx = new TransferSubnetOwnershipTx( + AvaxBaseTx.fromNative( + testContext.networkID, + testContext.pBlockchainID, + [getTransferableOutForTest(utxoInputAmt - expectedFee)], + [getTransferableInputForTest(utxoInputAmt)], + memo ?? new Uint8Array(), + ), + Id.fromHex(testSubnetId), + Input.fromNative(subnetAuth), + getRewardsOwners(), + ); + + expectTxs(unsignedTx.getTx(), expectedTx); + }); + }); + + describe('ImportTx', () => { + it('should create an ImportTx with only AVAX and not non-AVAX assets', () => { + const utxos = [ + getLockedUTXO(), // Locked and should be ignored. + getNotTransferOutput(), // Invalid and should be ignored. + // AVAX Assets + getValidUtxo(new BigIntPr(BigInt(35 * 1e9)), testAvaxAssetID), + getValidUtxo(new BigIntPr(BigInt(28 * 1e9)), testAvaxAssetID), + // Non-AVAX Assets (Jupiter) + getValidUtxo(new BigIntPr(BigInt(15 * 1e9)), Id.fromString('jupiter')), + getValidUtxo(new BigIntPr(BigInt(11 * 1e9)), Id.fromString('jupiter')), + // Non-AVAX Asset (Mars) + getValidUtxo(new BigIntPr(BigInt(9 * 1e9)), Id.fromString('mars')), + ]; + + const unsignedTx = newImportTx( + { + fromAddressesBytes, + sourceChainId: testContext.cBlockchainID, + toAddresses: [testAddress1], + utxos, + }, + testContext, + ); + + const { baseTx, ins: importedIns } = unsignedTx.getTx() as ImportTx; + const { inputs, outputs } = baseTx; + + const [amountConsumed, expectedAmountConsumed, expectedFee] = + checkFeeIsCorrect({ + unsignedTx, + inputs, + outputs, + additionalInputs: importedIns, + }); + + expect(amountConsumed).toEqual(expectedAmountConsumed); + + const expectedTx = new ImportTx( + AvaxBaseTx.fromNative( + testContext.networkID, + testContext.pBlockchainID, + [ + // Only AVAX asset here. + // _If_ we did p-chain did support other assets, they would come first, + // sorted by TransferableInput.compare. + TransferableOutput.fromNative( + testContext.avaxAssetID, + BigInt((35 + 28) * 1e9) - expectedFee, + [testAddress1], + ), + ], + [], + new Uint8Array(), + ), + Id.fromString(testContext.cBlockchainID), + [ + TransferableInput.fromUtxoAndSigindicies(utxos[2], [0]), + TransferableInput.fromUtxoAndSigindicies(utxos[3], [0]), + ], + ); + + expectTxs(unsignedTx.getTx(), expectedTx); + }); + }); +}); diff --git a/src/vms/pvm/etna-builder/builder.ts b/src/vms/pvm/etna-builder/builder.ts new file mode 100644 index 000000000..0b40e13e0 --- /dev/null +++ b/src/vms/pvm/etna-builder/builder.ts @@ -0,0 +1,1206 @@ +/** + * @module + * + * This module contains builder functions which are responsible for building + * PVM transactions post e-upgrade (etna), which uses dynamic fees based on transaction complexity. + */ + +import { + PlatformChainID, + PrimaryNetworkID, +} from '../../../constants/networkIDs'; +import type { TransferOutput } from '../../../serializable'; +import { + Input, + NodeId, + OutputOwners, + Stringpr, + TransferInput, +} from '../../../serializable'; +import { + Bytes, + Id, + Int, + TransferableInput, + TransferableOutput, +} from '../../../serializable'; +import { BaseTx as AvaxBaseTx } from '../../../serializable/avax'; +import type { Utxo } from '../../../serializable/avax/utxo'; +import { ID_LEN } from '../../../serializable/fxs/common/id'; +import { + AddPermissionlessDelegatorTx, + AddPermissionlessValidatorTx, + AddSubnetValidatorTx, + BaseTx, + CreateChainTx, + CreateSubnetTx, + ExportTx, + ImportTx, + RemoveSubnetValidatorTx, + SubnetValidator, + TransferSubnetOwnershipTx, +} from '../../../serializable/pvm'; +import { createSignerOrSignerEmptyFromStrings } from '../../../serializable/pvm/signer'; +import { AddressMaps, addressesFromBytes, isTransferOut } from '../../../utils'; +import { matchOwners } from '../../../utils/matchOwners'; +import { compareTransferableOutputs } from '../../../utils/sort'; +import { baseTxUnsafePvm, type SpendOptions, UnsignedTx } from '../../common'; +import { defaultSpendOptions } from '../../common/defaultSpendOptions'; +import type { Dimensions } from '../../common/fees/dimensions'; +import { addDimensions, createDimensions } from '../../common/fees/dimensions'; +import type { Context } from '../../context'; +import { + INTRINSIC_ADD_PERMISSIONLESS_DELEGATOR_TX_COMPLEXITIES, + INTRINSIC_ADD_PERMISSIONLESS_VALIDATOR_TX_COMPLEXITIES, + INTRINSIC_ADD_SUBNET_VALIDATOR_TX_COMPLEXITIES, + INTRINSIC_BASE_TX_COMPLEXITIES, + INTRINSIC_CREATE_CHAIN_TX_COMPLEXITIES, + INTRINSIC_CREATE_SUBNET_TX_COMPLEXITIES, + INTRINSIC_EXPORT_TX_COMPLEXITIES, + INTRINSIC_IMPORT_TX_COMPLEXITIES, + INTRINSIC_REMOVE_SUBNET_VALIDATOR_TX_COMPLEXITIES, + INTRINSIC_TRANSFER_SUBNET_OWNERSHIP_TX_COMPLEXITIES, + getAuthComplexity, + getInputComplexity, + getOutputComplexity, + getOwnerComplexity, + getSignerComplexity, +} from '../txs/fee'; +import { spend } from './spend'; +import { useSpendableLockedUTXOs, useUnlockedUTXOs } from './spend-reducers'; + +const getAddressMaps = ({ + inputs, + inputUTXOs, + minIssuanceTime, + fromAddressesBytes, +}: { + inputs: readonly TransferableInput[]; + inputUTXOs: readonly Utxo[]; + minIssuanceTime: bigint; + fromAddressesBytes: readonly Uint8Array[]; +}): AddressMaps => { + return AddressMaps.fromTransferableInputs( + inputs, + inputUTXOs, + minIssuanceTime, + fromAddressesBytes, + ); +}; + +const getMemoComplexity = ( + spendOptions: Required, +): Dimensions => { + return createDimensions(spendOptions.memo.length, 0, 0, 0); +}; + +/** + * Common properties used in all PVM transaction builder functions. + */ +type CommonTxProps = Readonly<{ + /** + * List of addresses that are used for selecting which UTXOs are signable. + */ + fromAddressesBytes: readonly Uint8Array[]; + options?: SpendOptions; + /** + * List of UTXOs that are available to be spent. + */ + utxos: readonly Utxo[]; +}>; + +type TxProps> = CommonTxProps & Readonly; + +type TxBuilderFn>> = ( + props: T, + context: Context, +) => UnsignedTx; + +export type NewBaseTxProps = TxProps<{ + /** + * The desired output (change outputs will be added to them automatically). + */ + outputs: readonly TransferableOutput[]; +}>; + +/** + * Creates a new unsigned PVM base transaction (`BaseTx`) using calculated dynamic fees. + * + * @param props {NewBaseTxProps} + * @param context {Context} + * @returns {UnsignedTx} An UnsignedTx. + */ +export const newBaseTx: TxBuilderFn = ( + { fromAddressesBytes, options, outputs, utxos }, + context, +) => { + const fromAddresses = addressesFromBytes(fromAddressesBytes); + const defaultedOptions = defaultSpendOptions( + [...fromAddressesBytes], + options, + ); + const toBurn = new Map(); + + outputs.forEach((out) => { + const assetId = out.assetId.value(); + const amountToBurn = (toBurn.get(assetId) ?? 0n) + out.amount(); + + toBurn.set(assetId, amountToBurn); + }); + + const memoComplexity = getMemoComplexity(defaultedOptions); + + const outputComplexity = getOutputComplexity(outputs); + + const complexity = addDimensions( + INTRINSIC_BASE_TX_COMPLEXITIES, + memoComplexity, + outputComplexity, + ); + + const spendResults = spend( + { + excessAVAX: 0n, + fromAddresses, + initialComplexity: complexity, + shouldConsolidateOutputs: true, + spendOptions: defaultedOptions, + toBurn, + utxos, + }, + [useUnlockedUTXOs], + context, + ); + + const { changeOutputs, inputs, inputUTXOs } = spendResults; + const addressMaps = getAddressMaps({ + inputs, + inputUTXOs, + minIssuanceTime: defaultedOptions.minIssuanceTime, + fromAddressesBytes, + }); + + const allOutputs = [...outputs, ...changeOutputs].sort( + compareTransferableOutputs, + ); + + return new UnsignedTx( + new BaseTx( + baseTxUnsafePvm(context, allOutputs, inputs, defaultedOptions.memo), + ), + inputUTXOs, + addressMaps, + ); +}; + +export type NewImportTxProps = TxProps<{ + /** + * The locktime to write onto the UTXO. + */ + locktime?: bigint; + /** + * Base58 string of the source chain ID. + */ + sourceChainId: string; + /** + * The threshold to write on the UTXO. + */ + threshold?: number; + /** + * List of addresses to import into. + */ + toAddresses: readonly Uint8Array[]; +}>; + +/** + * Creates a new unsigned PVM import transaction (`ImportTx`) using calculated dynamic fees. + * + * @param props {NewImportTxProps} + * @param context {Context} + * @returns {UnsignedTx} An UnsignedTx. + */ +export const newImportTx: TxBuilderFn = ( + { + fromAddressesBytes, + locktime, + options, + sourceChainId, + threshold, + toAddresses, + utxos, + }, + context, +) => { + const fromAddresses = addressesFromBytes(fromAddressesBytes); + const defaultedOptions = defaultSpendOptions(fromAddressesBytes, options); + + const { importedInputs, importedAmounts } = utxos + .filter( + (utxo): utxo is Utxo => + isTransferOut(utxo.output) && + // Currently - only AVAX is allowed to be imported to the P-Chain + utxo.assetId.toString() === context.avaxAssetID, + ) + .reduce<{ + importedInputs: TransferableInput[]; + importedAmounts: Record; + }>( + (acc, utxo) => { + const { sigIndicies: inputSigIndices } = + matchOwners( + utxo.getOutputOwners(), + fromAddresses, + defaultedOptions.minIssuanceTime, + ) || {}; + + if (inputSigIndices === undefined) { + // We couldn't spend this UTXO, so we skip to the next one. + return acc; + } + + const assetId = utxo.getAssetId(); + + return { + importedInputs: [ + ...acc.importedInputs, + new TransferableInput( + utxo.utxoId, + utxo.assetId, + new TransferInput( + utxo.output.amt, + new Input(inputSigIndices.map((value) => new Int(value))), + ), + ), + ], + importedAmounts: { + ...acc.importedAmounts, + [assetId]: + (acc.importedAmounts[assetId] ?? 0n) + utxo.output.amount(), + }, + }; + }, + { importedInputs: [], importedAmounts: {} }, + ); + + if (importedInputs.length === 0) { + throw new Error('no UTXOs available to import'); + } + + const importedAvax = importedAmounts[context.avaxAssetID]; + + const addressMaps = AddressMaps.fromTransferableInputs( + importedInputs, + utxos, + defaultedOptions.minIssuanceTime, + fromAddressesBytes, + ); + + const outputs: TransferableOutput[] = Object.entries(importedAmounts) + .filter(([assetID]) => assetID !== context.avaxAssetID) + .map(([assetID, amount]) => + TransferableOutput.fromNative( + assetID, + amount, + toAddresses, + locktime, + threshold, + ), + ); + + const memoComplexity = getMemoComplexity(defaultedOptions); + + const inputComplexity = getInputComplexity(importedInputs); + + const outputComplexity = getOutputComplexity(outputs); + + const complexity = addDimensions( + INTRINSIC_IMPORT_TX_COMPLEXITIES, + memoComplexity, + inputComplexity, + outputComplexity, + ); + + const spendResults = spend( + { + excessAVAX: importedAvax, + fromAddresses, + initialComplexity: complexity, + ownerOverride: OutputOwners.fromNative(toAddresses, locktime, threshold), + spendOptions: defaultedOptions, + utxos, + }, + [useUnlockedUTXOs], + context, + ); + + const { changeOutputs, inputs, inputUTXOs } = spendResults; + + return new UnsignedTx( + new ImportTx( + new AvaxBaseTx( + new Int(context.networkID), + PlatformChainID, + [...outputs, ...changeOutputs].sort(compareTransferableOutputs), + inputs, + new Bytes(defaultedOptions.memo), + ), + Id.fromString(sourceChainId), + importedInputs.sort(TransferableInput.compare), + ), + inputUTXOs, + addressMaps, + ); +}; + +export type NewExportTxProps = TxProps<{ + /** + * Base58 string of the destination chain ID. + */ + destinationChainId: string; + /** + * List of outputs to create. + */ + outputs: readonly TransferableOutput[]; +}>; + +/** + * Creates a new unsigned PVM export transaction (`ExportTx`) using calculated dynamic fees. + * + * @param props {NewExportTxProps} + * @param context {Context} + * @returns {UnsignedTx} An UnsignedTx. + */ +export const newExportTx: TxBuilderFn = ( + { destinationChainId, fromAddressesBytes, options, outputs, utxos }, + context, +) => { + const fromAddresses = addressesFromBytes(fromAddressesBytes); + + const defaultedOptions = defaultSpendOptions(fromAddressesBytes, options); + const toBurn = new Map(); + + outputs.forEach((output) => { + const assetId = output.assetId.value(); + toBurn.set(assetId, (toBurn.get(assetId) ?? 0n) + output.output.amount()); + }); + + const memoComplexity = getMemoComplexity(defaultedOptions); + + const outputComplexity = getOutputComplexity(outputs); + + const complexity = addDimensions( + INTRINSIC_EXPORT_TX_COMPLEXITIES, + memoComplexity, + outputComplexity, + ); + + const spendResults = spend( + { + excessAVAX: 0n, + fromAddresses, + initialComplexity: complexity, + spendOptions: defaultedOptions, + toBurn, + utxos, + }, + [useUnlockedUTXOs], + context, + ); + + const { changeOutputs, inputs, inputUTXOs } = spendResults; + const addressMaps = getAddressMaps({ + inputs, + inputUTXOs, + minIssuanceTime: defaultedOptions.minIssuanceTime, + fromAddressesBytes, + }); + + return new UnsignedTx( + new ExportTx( + new AvaxBaseTx( + new Int(context.networkID), + PlatformChainID, + changeOutputs, + inputs, + new Bytes(defaultedOptions.memo), + ), + Id.fromString(destinationChainId), + [...outputs].sort(compareTransferableOutputs), + ), + inputUTXOs, + addressMaps, + ); +}; + +export type NewCreateSubnetTxProps = TxProps<{ + /** + * The locktime to write onto the UTXO. + */ + locktime?: bigint; + subnetOwners: readonly Uint8Array[]; + /** + * The threshold to write on the UTXO. + */ + threshold?: number; +}>; + +/** + * Creates a new unsigned PVM create subnet transaction (`CreateSubnetTx`) using calculated dynamic fees. + * + * @param props {NewCreateSubnetTxProps} + * @param context {Context} + * @returns {UnsignedTx} An UnsignedTx. + */ +export const newCreateSubnetTx: TxBuilderFn = ( + { fromAddressesBytes, locktime, options, subnetOwners, threshold, utxos }, + context, +) => { + const defaultedOptions = defaultSpendOptions(fromAddressesBytes, options); + + const memoComplexity = getMemoComplexity(defaultedOptions); + + const ownerComplexity = getOwnerComplexity( + OutputOwners.fromNative(subnetOwners, locktime, threshold), + ); + + const complexity = addDimensions( + INTRINSIC_CREATE_SUBNET_TX_COMPLEXITIES, + memoComplexity, + ownerComplexity, + ); + + const spendResults = spend( + { + excessAVAX: 0n, + fromAddresses: addressesFromBytes(fromAddressesBytes), + initialComplexity: complexity, + spendOptions: defaultedOptions, + utxos, + }, + [useUnlockedUTXOs], + context, + ); + + const { changeOutputs, inputs, inputUTXOs } = spendResults; + const addressMaps = getAddressMaps({ + inputs, + inputUTXOs, + minIssuanceTime: defaultedOptions.minIssuanceTime, + fromAddressesBytes, + }); + + const createSubnetTx = new CreateSubnetTx( + AvaxBaseTx.fromNative( + context.networkID, + context.pBlockchainID, + changeOutputs, + inputs, + defaultedOptions.memo, + ), + OutputOwners.fromNative(subnetOwners, locktime, threshold), + ); + + return new UnsignedTx(createSubnetTx, inputUTXOs, addressMaps); +}; + +export type NewCreateChainTxProps = TxProps<{ + /** + * A human readable name for the chain. + */ + chainName: string; + /** + * IDs of the feature extensions running on the new chain. + */ + fxIds: readonly string[]; + /** + * JSON config for the genesis data. + */ + genesisData: Record; + /** + * Indices of subnet owners. + */ + subnetAuth: readonly number[]; + /** + * ID of the subnet (Avalanche L1) that validates this chain. + */ + subnetId: string; + /** + * ID of the VM running on the new chain. + */ + vmId: string; +}>; + +/** + * Creates a new unsigned PVM create chain transaction (`CreateChainTx`) using calculated dynamic fees. + * + * @param props {NewCreateChainTxProps} + * @param context {Context} + * @returns {UnsignedTx} An UnsignedTx. + */ +export const newCreateChainTx: TxBuilderFn = ( + { + chainName, + fromAddressesBytes, + fxIds, + genesisData, + options, + subnetAuth, + subnetId, + utxos, + vmId, + }, + context, +) => { + const defaultedOptions = defaultSpendOptions(fromAddressesBytes, options); + + const genesisBytes = new Bytes( + new TextEncoder().encode(JSON.stringify(genesisData)), + ); + + const subnetAuthInput = Input.fromNative(subnetAuth); + + const dynamicComplexity = createDimensions( + fxIds.length * ID_LEN + + chainName.length + + genesisBytes.length + + defaultedOptions.memo.length, + 0, + 0, + 0, + ); + + const authComplexity = getAuthComplexity(subnetAuthInput); + + const complexity = addDimensions( + INTRINSIC_CREATE_CHAIN_TX_COMPLEXITIES, + dynamicComplexity, + authComplexity, + ); + + const spendResults = spend( + { + excessAVAX: 0n, + fromAddresses: addressesFromBytes(fromAddressesBytes), + initialComplexity: complexity, + spendOptions: defaultedOptions, + utxos, + }, + [useUnlockedUTXOs], + context, + ); + + const { changeOutputs, inputs, inputUTXOs } = spendResults; + const addressMaps = getAddressMaps({ + inputs, + inputUTXOs, + minIssuanceTime: defaultedOptions.minIssuanceTime, + fromAddressesBytes, + }); + + const createChainTx = new CreateChainTx( + AvaxBaseTx.fromNative( + context.networkID, + context.pBlockchainID, + changeOutputs, + inputs, + defaultedOptions.memo, + ), + Id.fromString(subnetId), + new Stringpr(chainName), + Id.fromString(vmId), + fxIds.map(Id.fromString.bind(Id)), + genesisBytes, + subnetAuthInput, + ); + + return new UnsignedTx(createChainTx, inputUTXOs, addressMaps); +}; + +export type NewAddSubnetValidatorTxProps = TxProps<{ + end: bigint; + nodeId: string; + start: bigint; + /** + * Indices of subnet owners. + */ + subnetAuth: readonly number[]; + /** + * ID of the subnet (Avalanche L1) that validates this chain. + */ + subnetId: string; + weight: bigint; +}>; + +/** + * Creates a new unsigned PVM add subnet validator transaction + * (`AddSubnetValidatorTx`) using calculated dynamic fees. + * + * @param props {NewAddSubnetValidatorTxProps} + * @param context {Context} + * @returns {UnsignedTx} An UnsignedTx. + */ +export const newAddSubnetValidatorTx: TxBuilderFn< + NewAddSubnetValidatorTxProps +> = ( + { + end, + fromAddressesBytes, + nodeId, + options, + start, + subnetAuth, + subnetId, + utxos, + weight, + }, + context, +) => { + const defaultedOptions = defaultSpendOptions(fromAddressesBytes, options); + + const memoComplexity = getMemoComplexity(defaultedOptions); + + const authComplexity = getAuthComplexity(Input.fromNative(subnetAuth)); + + const complexity = addDimensions( + INTRINSIC_ADD_SUBNET_VALIDATOR_TX_COMPLEXITIES, + memoComplexity, + authComplexity, + ); + + const spendResults = spend( + { + excessAVAX: 0n, + fromAddresses: addressesFromBytes(fromAddressesBytes), + initialComplexity: complexity, + spendOptions: defaultedOptions, + utxos, + }, + [useUnlockedUTXOs], + context, + ); + + const { changeOutputs, inputs, inputUTXOs } = spendResults; + const addressMaps = getAddressMaps({ + inputs, + inputUTXOs, + minIssuanceTime: defaultedOptions.minIssuanceTime, + fromAddressesBytes, + }); + + const addSubnetValidatorTx = new AddSubnetValidatorTx( + AvaxBaseTx.fromNative( + context.networkID, + context.pBlockchainID, + changeOutputs, + inputs, + defaultedOptions.memo, + ), + SubnetValidator.fromNative( + nodeId, + start, + end, + weight, + Id.fromString(subnetId), + ), + Input.fromNative(subnetAuth), + ); + + return new UnsignedTx(addSubnetValidatorTx, inputUTXOs, addressMaps); +}; + +export type NewRemoveSubnetValidatorTxProps = TxProps<{ + nodeId: string; + /** + * Indices of subnet owners. + */ + subnetAuth: readonly number[]; + /** + * ID of the subnet (Avalanche L1) that validates this chain. + */ + subnetId: string; +}>; + +/** + * Creates a new unsigned PVM remove subnet validator transaction + * (`RemoveSubnetValidatorTx`) using calculated dynamic fees. + * + * @param props {NewRemoveSubnetValidatorTxProps} + * @param context {Context} + * @returns {UnsignedTx} An UnsignedTx. + */ +export const newRemoveSubnetValidatorTx: TxBuilderFn< + NewRemoveSubnetValidatorTxProps +> = ( + { fromAddressesBytes, nodeId, options, subnetAuth, subnetId, utxos }, + context, +) => { + const defaultedOptions = defaultSpendOptions(fromAddressesBytes, options); + + const memoComplexity = getMemoComplexity(defaultedOptions); + + const authComplexity = getAuthComplexity(Input.fromNative(subnetAuth)); + + const complexity = addDimensions( + INTRINSIC_REMOVE_SUBNET_VALIDATOR_TX_COMPLEXITIES, + memoComplexity, + authComplexity, + ); + + const spendResults = spend( + { + excessAVAX: 0n, + fromAddresses: addressesFromBytes(fromAddressesBytes), + initialComplexity: complexity, + spendOptions: defaultedOptions, + utxos, + }, + [useUnlockedUTXOs], + context, + ); + + const { changeOutputs, inputs, inputUTXOs } = spendResults; + const addressMaps = getAddressMaps({ + inputs, + inputUTXOs, + minIssuanceTime: defaultedOptions.minIssuanceTime, + fromAddressesBytes, + }); + + const removeSubnetValidatorTx = new RemoveSubnetValidatorTx( + AvaxBaseTx.fromNative( + context.networkID, + context.pBlockchainID, + changeOutputs, + inputs, + defaultedOptions.memo, + ), + NodeId.fromString(nodeId), + Id.fromString(subnetId), + Input.fromNative(subnetAuth), + ); + + return new UnsignedTx(removeSubnetValidatorTx, inputUTXOs, addressMaps); +}; + +export type NewAddPermissionlessValidatorTxProps = TxProps<{ + delegatorRewardsOwner: readonly Uint8Array[]; + /** + * The Unix time based on p-chain timestamp when the validator + * stops validating the Primary Network (and staked AVAX is returned). + */ + end: bigint; + /** + * Optional. The number locktime field created in the resulting reward outputs. + * @default 0n + */ + locktime?: bigint; + /** + * The node ID of the validator being added. + */ + nodeId: string; + /** + * The BLS public key. + */ + publicKey: Uint8Array; + /** + * The addresses which will receive the rewards from the delegated stake. + * Given addresses will share the reward UTXO. + */ + rewardAddresses: readonly Uint8Array[]; + /** + * A number for the percentage times 10,000 of reward to be given to the + * validator when someone delegates to them. + */ + shares: number; + /** + * The BLS signature. + */ + signature: Uint8Array; + /** + * Which asset to stake. Defaults to AVAX. + */ + stakingAssetId?: string; + /** + * The Unix time based on p-chain timestamp when the validator + * starts validating the Primary Network. + */ + start: bigint; + /** + * ID of the subnet (Avalanche L1) that validates this chain. + */ + subnetId: string; + /** + * Optional. The number of signatures required to spend the funds in the + * resultant reward UTXO. + * + * @default 1 + */ + threshold?: number; + /** + * The amount being locked for validation in nAVAX. + */ + weight: bigint; +}>; + +/** + * Creates a new unsigned PVM add permissionless validator transaction + * (`AddPermissionlessValidatorTx`) using calculated dynamic fees. + * + * @param props {NewAddPermissionlessValidatorTxProps} + * @param context {Context} + * @returns {UnsignedTx} An UnsignedTx. + */ +export const newAddPermissionlessValidatorTx: TxBuilderFn< + NewAddPermissionlessValidatorTxProps +> = ( + { + delegatorRewardsOwner, + end, + fromAddressesBytes, + locktime = 0n, + nodeId, + options, + publicKey, + rewardAddresses, + shares, + signature, + stakingAssetId, + start, + subnetId, + threshold = 1, + utxos, + weight, + }, + context, +) => { + const isPrimaryNetwork = subnetId === PrimaryNetworkID.toString(); + + const assetId = stakingAssetId ?? context.avaxAssetID; + + // Check if we use correct asset if on primary network + if (isPrimaryNetwork && assetId !== context.avaxAssetID) + throw new Error('Staking asset ID must be AVAX for the primary network.'); + + const toStake = new Map([[assetId, weight]]); + + const defaultedOptions = defaultSpendOptions(fromAddressesBytes, options); + + const signer = createSignerOrSignerEmptyFromStrings(publicKey, signature); + const validatorOutputOwners = OutputOwners.fromNative( + rewardAddresses, + locktime, + threshold, + ); + const delegatorOutputOwners = OutputOwners.fromNative( + delegatorRewardsOwner, + 0n, + ); + + const memoComplexity = getMemoComplexity(defaultedOptions); + + const signerComplexity = getSignerComplexity(signer); + const validatorOwnerComplexity = getOwnerComplexity(validatorOutputOwners); + const delegatorOwnerComplexity = getOwnerComplexity(delegatorOutputOwners); + + const complexity = addDimensions( + INTRINSIC_ADD_PERMISSIONLESS_VALIDATOR_TX_COMPLEXITIES, + memoComplexity, + signerComplexity, + validatorOwnerComplexity, + delegatorOwnerComplexity, + ); + + const spendResults = spend( + { + excessAVAX: 0n, + fromAddresses: addressesFromBytes(fromAddressesBytes), + initialComplexity: complexity, + shouldConsolidateOutputs: true, + spendOptions: defaultedOptions, + toStake, + utxos, + }, + [useSpendableLockedUTXOs, useUnlockedUTXOs], + context, + ); + + const { changeOutputs, inputs, inputUTXOs, stakeOutputs } = spendResults; + const addressMaps = getAddressMaps({ + inputs, + inputUTXOs, + minIssuanceTime: defaultedOptions.minIssuanceTime, + fromAddressesBytes, + }); + + const validatorTx = new AddPermissionlessValidatorTx( + AvaxBaseTx.fromNative( + context.networkID, + context.pBlockchainID, + changeOutputs, + inputs, + defaultedOptions.memo, + ), + SubnetValidator.fromNative( + nodeId, + start, + end, + weight, + Id.fromString(subnetId), + ), + signer, + stakeOutputs, + validatorOutputOwners, + delegatorOutputOwners, + new Int(shares), + ); + return new UnsignedTx(validatorTx, inputUTXOs, addressMaps); +}; + +export type NewAddPermissionlessDelegatorTxProps = TxProps<{ + /** + * The Unix time based on p-chain timestamp when the delegation stops + * (and staked AVAX is returned). + */ + end: bigint; + /** + * Optional. The number locktime field created in the resulting reward outputs. + * @default 0n + */ + locktime?: bigint; + /** + * The node ID of the validator being delegated to. + */ + nodeId: string; + /** + * The addresses which will receive the rewards from the delegated stake. + * Given addresses will share the reward UTXO. + */ + rewardAddresses: readonly Uint8Array[]; + /** + * Which asset to stake. Defaults to AVAX. + */ + stakingAssetId?: string; + /** + * The Unix time based on p-chain timestamp when the delegation starts. + */ + start: bigint; + /** + * ID of the subnet (Avalanche L1) being delegated to. + */ + subnetId: string; + /** + * Optional. The number of signatures required to spend the funds in the + * resultant reward UTXO. + * + * @default 1 + */ + threshold?: number; + /** + * The amount being delegated in nAVAX. + */ + weight: bigint; +}>; + +/** + * Creates a new unsigned PVM add permissionless delegator transaction + * (`AddPermissionlessDelegatorTx`) using calculated dynamic fees. + * + * @param props {NewAddPermissionlessDelegatorTxProps} + * @param context {Context} + * @returns {UnsignedTx} An UnsignedTx. + */ +export const newAddPermissionlessDelegatorTx: TxBuilderFn< + NewAddPermissionlessDelegatorTxProps +> = ( + { + end, + fromAddressesBytes, + locktime = 0n, + nodeId, + options, + rewardAddresses, + stakingAssetId, + start, + subnetId, + threshold = 1, + utxos, + weight, + }, + context, +) => { + const isPrimaryNetwork = subnetId === PrimaryNetworkID.toString(); + + const assetId = stakingAssetId ?? context.avaxAssetID; + + // Check if we use correct asset if on primary network + if (isPrimaryNetwork && assetId !== context.avaxAssetID) + throw new Error('Staking asset ID must be AVAX for the primary network.'); + + const toStake = new Map([[assetId, weight]]); + + const defaultedOptions = defaultSpendOptions(fromAddressesBytes, options); + + const delegatorRewardsOwner = OutputOwners.fromNative( + rewardAddresses, + locktime, + threshold, + ); + + const memoComplexity = getMemoComplexity(defaultedOptions); + + const ownerComplexity = getOwnerComplexity(delegatorRewardsOwner); + + const complexity = addDimensions( + INTRINSIC_ADD_PERMISSIONLESS_DELEGATOR_TX_COMPLEXITIES, + memoComplexity, + ownerComplexity, + ); + + const spendResults = spend( + { + excessAVAX: 0n, + fromAddresses: addressesFromBytes(fromAddressesBytes), + initialComplexity: complexity, + shouldConsolidateOutputs: true, + spendOptions: defaultedOptions, + toStake, + utxos, + }, + [useSpendableLockedUTXOs, useUnlockedUTXOs], + context, + ); + + const { changeOutputs, inputs, inputUTXOs, stakeOutputs } = spendResults; + const addressMaps = getAddressMaps({ + inputs, + inputUTXOs, + minIssuanceTime: defaultedOptions.minIssuanceTime, + fromAddressesBytes, + }); + + const delegatorTx = new AddPermissionlessDelegatorTx( + AvaxBaseTx.fromNative( + context.networkID, + context.pBlockchainID, + changeOutputs, + inputs, + defaultedOptions.memo, + ), + SubnetValidator.fromNative( + nodeId, + start, + end, + weight, + Id.fromString(subnetId), + ), + stakeOutputs, + delegatorRewardsOwner, + ); + + return new UnsignedTx(delegatorTx, inputUTXOs, addressMaps); +}; + +export type NewTransferSubnetOwnershipTxProps = TxProps<{ + /** + * Optional. The number locktime field created in the resulting reward outputs. + * @default 0n + */ + locktime?: bigint; + /** + * Indices of existing subnet owners. + */ + subnetAuth: readonly number[]; + /** + * ID of the subnet (Avalanche L1). + */ + subnetId: string; + /** + * The new owner(s) addresses. + */ + subnetOwners: readonly Uint8Array[]; + /** + * Optional. The number of signatures required to spend the funds in the + * resultant reward UTXO. + * + * @default 1 + */ + threshold?: number; +}>; + +/** + * Creates a new unsigned PVM transfer subnet ownership transaction + * (`TransferSubnetOwnershipTx`) using calculated dynamic fees. + * + * @param props {NewTransferSubnetOwnershipTxProps} + * @param context {Context} + * @returns {UnsignedTx} An UnsignedTx. + */ +export const newTransferSubnetOwnershipTx: TxBuilderFn< + NewTransferSubnetOwnershipTxProps +> = ( + { + fromAddressesBytes, + locktime = 0n, + options, + subnetAuth, + subnetId, + subnetOwners, + threshold = 1, + utxos, + }, + context, +) => { + const defaultedOptions = defaultSpendOptions(fromAddressesBytes, options); + + const memoComplexity = getMemoComplexity(defaultedOptions); + + const authComplexity = getAuthComplexity(Input.fromNative(subnetAuth)); + + const ownerComplexity = getOwnerComplexity( + OutputOwners.fromNative(subnetOwners, locktime, threshold), + ); + + const complexity = addDimensions( + INTRINSIC_TRANSFER_SUBNET_OWNERSHIP_TX_COMPLEXITIES, + memoComplexity, + authComplexity, + ownerComplexity, + ); + + const spendResults = spend( + { + excessAVAX: 0n, + fromAddresses: addressesFromBytes(fromAddressesBytes), + initialComplexity: complexity, + spendOptions: defaultedOptions, + utxos, + }, + [useUnlockedUTXOs], + context, + ); + + const { changeOutputs, inputs, inputUTXOs } = spendResults; + const addressMaps = getAddressMaps({ + inputs, + inputUTXOs, + minIssuanceTime: defaultedOptions.minIssuanceTime, + fromAddressesBytes, + }); + + return new UnsignedTx( + new TransferSubnetOwnershipTx( + AvaxBaseTx.fromNative( + context.networkID, + context.pBlockchainID, + changeOutputs, + inputs, + defaultedOptions.memo, + ), + Id.fromString(subnetId), + Input.fromNative(subnetAuth), + OutputOwners.fromNative(subnetOwners, locktime, threshold), + ), + inputUTXOs, + addressMaps, + ); +}; diff --git a/src/vms/pvm/etna-builder/index.ts b/src/vms/pvm/etna-builder/index.ts new file mode 100644 index 000000000..ecea700bc --- /dev/null +++ b/src/vms/pvm/etna-builder/index.ts @@ -0,0 +1 @@ +export * from './builder'; diff --git a/src/vms/pvm/etna-builder/spend-reducers/errors.ts b/src/vms/pvm/etna-builder/spend-reducers/errors.ts new file mode 100644 index 000000000..cbc6c4c39 --- /dev/null +++ b/src/vms/pvm/etna-builder/spend-reducers/errors.ts @@ -0,0 +1,3 @@ +export const IncorrectStakeableLockOutError = new Error( + 'StakeableLockOut transferOut must be a TransferOutput.', +); diff --git a/src/vms/pvm/etna-builder/spend-reducers/fixtures/reducers.ts b/src/vms/pvm/etna-builder/spend-reducers/fixtures/reducers.ts new file mode 100644 index 000000000..9673dbd6d --- /dev/null +++ b/src/vms/pvm/etna-builder/spend-reducers/fixtures/reducers.ts @@ -0,0 +1,61 @@ +import { testContext } from '../../../../../fixtures/context'; +import { Address, OutputOwners } from '../../../../../serializable'; +import type { SpendOptions } from '../../../../common'; +import { defaultSpendOptions } from '../../../../common/defaultSpendOptions'; +import { createDimensions } from '../../../../common/fees/dimensions'; +import type { SpendHelperProps } from '../../spendHelper'; +import { SpendHelper } from '../../spendHelper'; +import type { SpendReducerState } from '../types'; + +export const CHANGE_ADDRESS = Address.fromString( + 'P-fuji1y50xa9363pn3d5gjhcz3ltp3fj6vq8x8a5txxg', +); +export const CHANGE_OWNERS: OutputOwners = OutputOwners.fromNative([ + CHANGE_ADDRESS.toBytes(), +]); + +export const getInitialReducerState = ({ + spendOptions, + ...state +}: Partial> & { + spendOptions?: SpendOptions; +} = {}): SpendReducerState => ({ + excessAVAX: 0n, + initialComplexity: createDimensions(1, 1, 1, 1), + fromAddresses: [CHANGE_ADDRESS], + ownerOverride: null, + spendOptions: defaultSpendOptions( + state?.fromAddresses?.map((address) => address.toBytes()) ?? [ + CHANGE_ADDRESS.toBytes(), + ], + spendOptions, + ), + toBurn: new Map(), + toStake: new Map(), + utxos: [], + ...state, +}); + +export const getSpendHelper = ({ + initialComplexity = createDimensions(1, 1, 1, 1), + shouldConsolidateOutputs = false, + toBurn = new Map(), + toStake = new Map(), +}: Partial< + Pick< + SpendHelperProps, + 'initialComplexity' | 'shouldConsolidateOutputs' | 'toBurn' | 'toStake' + > +> = {}) => { + return new SpendHelper({ + changeOutputs: [], + gasPrice: testContext.gasPrice, + initialComplexity, + inputs: [], + shouldConsolidateOutputs, + stakeOutputs: [], + toBurn, + toStake, + weights: testContext.complexityWeights, + }); +}; diff --git a/src/vms/pvm/etna-builder/spend-reducers/handleFeeAndChange.test.ts b/src/vms/pvm/etna-builder/spend-reducers/handleFeeAndChange.test.ts new file mode 100644 index 000000000..2f233833d --- /dev/null +++ b/src/vms/pvm/etna-builder/spend-reducers/handleFeeAndChange.test.ts @@ -0,0 +1,85 @@ +import { jest } from '@jest/globals'; + +import { testContext } from '../../../../fixtures/context'; +import { handleFeeAndChange } from './handleFeeAndChange'; +import { + CHANGE_OWNERS, + getInitialReducerState, + getSpendHelper, +} from './fixtures/reducers'; +import { + BigIntPr, + Id, + TransferOutput, + TransferableOutput, +} from '../../../../serializable'; + +describe('handleFeeAndChange', () => { + test('throws an error if excessAVAX is less than the required fee', () => { + expect(() => + handleFeeAndChange( + getInitialReducerState(), + getSpendHelper(), + testContext, + ), + ).toThrow( + `Insufficient funds: provided UTXOs need 4 more nAVAX (asset id: ${testContext.avaxAssetID})`, + ); + }); + + test('returns original state if excessAVAX equals the required fee', () => { + const state = getInitialReducerState({ excessAVAX: 4n }); + const spendHelper = getSpendHelper(); + const addChangeOutputSpy = jest.spyOn(spendHelper, 'addChangeOutput'); + const calculateFeeSpy = jest.spyOn(spendHelper, 'calculateFee'); + + expect(handleFeeAndChange(state, spendHelper, testContext)).toEqual(state); + expect(calculateFeeSpy).toHaveBeenCalledTimes(1); + expect(calculateFeeSpy).toHaveBeenCalledWith(); + expect(addChangeOutputSpy).not.toHaveBeenCalled(); + }); + + test('adds a change output if excessAVAX is greater than the required fee', () => { + const excessAVAX = 1_000n; + const state = getInitialReducerState({ + excessAVAX, + }); + const spendHelper = getSpendHelper(); + + const addChangeOutputSpy = jest.spyOn(spendHelper, 'addChangeOutput'); + const calculateFeeSpy = jest.spyOn(spendHelper, 'calculateFee'); + + expect(handleFeeAndChange(state, spendHelper, testContext)).toEqual({ + ...state, + excessAVAX, + }); + expect(calculateFeeSpy).toHaveBeenCalledTimes(2); + expect(calculateFeeSpy).toHaveBeenCalledWith( + new TransferableOutput( + Id.fromString(testContext.avaxAssetID), + new TransferOutput(new BigIntPr(0n), CHANGE_OWNERS), + ), + ); + expect(addChangeOutputSpy).toHaveBeenCalledTimes(1); + + expect( + spendHelper.hasChangeOutput(testContext.avaxAssetID, CHANGE_OWNERS), + ).toBe(true); + + expect(spendHelper.getInputsOutputs().changeOutputs).toHaveLength(1); + }); + + test('does not add change output if fee with temporary output complexity and excessAVAX are equal or less', () => { + const excessAVAX = 5n; + const state = getInitialReducerState({ + excessAVAX, + }); + const spendHelper = getSpendHelper(); + + const addChangeOutputSpy = jest.spyOn(spendHelper, 'addChangeOutput'); + + expect(handleFeeAndChange(state, spendHelper, testContext)).toEqual(state); + + expect(addChangeOutputSpy).not.toHaveBeenCalled(); + }); +}); diff --git a/src/vms/pvm/etna-builder/spend-reducers/handleFeeAndChange.ts b/src/vms/pvm/etna-builder/spend-reducers/handleFeeAndChange.ts new file mode 100644 index 000000000..94f7f9a08 --- /dev/null +++ b/src/vms/pvm/etna-builder/spend-reducers/handleFeeAndChange.ts @@ -0,0 +1,100 @@ +import { + BigIntPr, + Id, + OutputOwners, + TransferOutput, + TransferableOutput, +} from '../../../../serializable'; +import type { Context } from '../../../context'; +import type { SpendReducerFunction } from './types'; + +/** + * Determines if the fee can be covered by the excess AVAX. + * + * @returns {boolean} - Whether the excess AVAX exceeds the fee. `true` greater than the fee, `false` if equal. + * @throws {Error} - If the excess AVAX is less than the required fee. + */ +const canPayFeeAndNeedsChange = ( + excessAVAX: bigint, + requiredFee: bigint, + context: Context, +): boolean => { + // Not enough funds to pay the fee. + if (excessAVAX < requiredFee) { + throw new Error( + `Insufficient funds: provided UTXOs need ${ + requiredFee - excessAVAX + } more nAVAX (asset id: ${context.avaxAssetID})`, + ); + } + + // No need to add a change to change output. + // Just burn the fee. + if (excessAVAX === requiredFee) { + return false; + } + + return true; +}; + +export const handleFeeAndChange: SpendReducerFunction = ( + state, + spendHelper, + context, +) => { + // Use the change owner override if it exists, otherwise use the default change owner. + // This is used on "import" transactions. + const changeOwners = + state.ownerOverride ?? + OutputOwners.fromNative(state.spendOptions.changeAddresses); + + const requiredFee = spendHelper.calculateFee(); + + // Checks for an existing change output that is for the AVAX asset assigned to the change owner. + const hasExistingChangeOutput: boolean = spendHelper.hasChangeOutput( + context.avaxAssetID, + changeOwners, + ); + + if (canPayFeeAndNeedsChange(state.excessAVAX, requiredFee, context)) { + if (hasExistingChangeOutput) { + // Excess exceeds fee, return the change. + // This output will get consolidated with the existing output. + spendHelper.addChangeOutput( + new TransferableOutput( + Id.fromString(context.avaxAssetID), + new TransferOutput( + new BigIntPr(state.excessAVAX - requiredFee), + changeOwners, + ), + ), + ); + } else { + // Calculate the fee with a temporary output complexity + // as if we added the change output. + const requiredFeeWithChangeOutput = spendHelper.calculateFee( + new TransferableOutput( + Id.fromString(context.avaxAssetID), + new TransferOutput(new BigIntPr(0n), changeOwners), + ), + ); + + // If the excess AVAX is greater than the new fee, add a change output. + // Otherwise, ignore and burn the excess because it can't be returned + // (ie there is no point in adding a change output if you can't afford to add it). + if (state.excessAVAX > requiredFeeWithChangeOutput) { + spendHelper.addChangeOutput( + new TransferableOutput( + Id.fromString(context.avaxAssetID), + new TransferOutput( + new BigIntPr(state.excessAVAX - requiredFeeWithChangeOutput), + changeOwners, + ), + ), + ); + } + } + } + + return state; +}; diff --git a/src/vms/pvm/etna-builder/spend-reducers/index.ts b/src/vms/pvm/etna-builder/spend-reducers/index.ts new file mode 100644 index 000000000..e81dd077c --- /dev/null +++ b/src/vms/pvm/etna-builder/spend-reducers/index.ts @@ -0,0 +1,6 @@ +export { handleFeeAndChange } from './handleFeeAndChange'; +export { useSpendableLockedUTXOs } from './useSpendableLockedUTXOs'; +export { useUnlockedUTXOs } from './useUnlockedUTXOs'; +export { verifyAssetsConsumed } from './verifyAssetsConsumed'; + +export type * from './types'; diff --git a/src/vms/pvm/etna-builder/spend-reducers/types.ts b/src/vms/pvm/etna-builder/spend-reducers/types.ts new file mode 100644 index 000000000..efffe2c94 --- /dev/null +++ b/src/vms/pvm/etna-builder/spend-reducers/types.ts @@ -0,0 +1,13 @@ +import type { Context } from '../../../context'; +import type { SpendProps } from '../spend'; +import type { SpendHelper } from '../spendHelper'; + +export type SpendReducerState = Readonly< + Required> +>; + +export type SpendReducerFunction = ( + state: SpendReducerState, + spendHelper: SpendHelper, + context: Context, +) => SpendReducerState; diff --git a/src/vms/pvm/etna-builder/spend-reducers/useSpendableLockedUTXOs.test.ts b/src/vms/pvm/etna-builder/spend-reducers/useSpendableLockedUTXOs.test.ts new file mode 100644 index 000000000..b83793049 --- /dev/null +++ b/src/vms/pvm/etna-builder/spend-reducers/useSpendableLockedUTXOs.test.ts @@ -0,0 +1,229 @@ +import { testContext } from '../../../../fixtures/context'; +import { + getUsableUTXOsFilter, + useSpendableLockedUTXOs, +} from './useSpendableLockedUTXOs'; +import { getInitialReducerState, getSpendHelper } from './fixtures/reducers'; +import { + getLockedUTXO, + getStakeableLockoutOutput, + testAvaxAssetID, + testOwnerXAddress, + testUTXOID1, + testUTXOID2, +} from '../../../../fixtures/transactions'; +import { + Address, + BigIntPr, + Id, + Int, + TransferableOutput, +} from '../../../../serializable'; +import { Utxo } from '../../../../serializable/avax/utxo'; +import { + StakeableLockIn, + StakeableLockOut, +} from '../../../../serializable/pvm'; +import { IncorrectStakeableLockOutError } from './errors'; +import { hexToBuffer } from '../../../../utils'; +import { UTXOID } from '../../../../serializable/avax'; +import { NoSigMatchError } from '../../../utils/calculateSpend/utils'; + +describe('useSpendableLockedUTXOs', () => { + describe('getUsableUTXOsFilter', () => { + test('returns `false` if UTXO output not a stakeable lockout', () => { + expect( + getUsableUTXOsFilter(getInitialReducerState())(getLockedUTXO()), + ).toBe(false); + }); + + test('returns `false` if UTXO output is a stakeable lockout but locktime is greater than minIssuanceTime', () => { + const state = getInitialReducerState({ + spendOptions: { + minIssuanceTime: 100n, + }, + }); + + const utxo = getStakeableLockoutOutput(testUTXOID1, 50n, 200n); + + expect(getUsableUTXOsFilter(state)(utxo)).toBe(false); + }); + + test('returns `false` if UTXO output is a stakeable lockout with valid locktime but not used in toStake', () => { + const state = getInitialReducerState({ + spendOptions: { + minIssuanceTime: 300n, + }, + }); + + const utxo = getStakeableLockoutOutput(testUTXOID1, 50n, 100n); + + expect(getUsableUTXOsFilter(state)(utxo)).toBe(false); + }); + + test('returns `true` if UTXO output is a stakeable lockout with valid locktime and used in toStake', () => { + const testAssetId = Id.fromString('testasset'); + + const state = getInitialReducerState({ + spendOptions: { + minIssuanceTime: 100n, + }, + toStake: new Map([[testAssetId.toString(), 100n]]), + }); + + const utxo = getStakeableLockoutOutput( + testUTXOID1, + 50n, + 300n, + testAssetId, + ); + + expect(getUsableUTXOsFilter(state)(utxo)).toBe(true); + }); + + test('throws an error if UTXO output is a StakeableLockOut and the transferOut is not a TransferOutput', () => { + const state = getInitialReducerState({ + spendOptions: { + minIssuanceTime: 100n, + }, + }); + + const invalidUTXO = new Utxo( + new UTXOID(testUTXOID2, new Int(0)), + testAvaxAssetID, + new StakeableLockOut( + new BigIntPr(300n), + new StakeableLockIn( + new BigIntPr(2000000000n), + TransferableOutput.fromNative(testAvaxAssetID.toString(), 20n, [ + hexToBuffer('0x12345678901234578901234567890123457890'), + ]), + ), + ), + ); + + expect(() => getUsableUTXOsFilter(state)(invalidUTXO)).toThrow( + IncorrectStakeableLockOutError, + ); + }); + }); + + it('should ignore UTXOs that signatures do not match', () => { + const toBurn = new Map([[testContext.avaxAssetID, 4_900n]]); + const toStake = new Map([[testContext.avaxAssetID, 4_900n]]); + + const initialState = getInitialReducerState({ + fromAddresses: [ + Address.fromString('P-fuji1y50xa9363pn3d5gjhcz3ltp3fj6vq8x8a5txxg'), + ], + excessAVAX: 0n, + spendOptions: { + minIssuanceTime: 100n, + }, + toBurn, + toStake, + utxos: [getStakeableLockoutOutput(testUTXOID1, 10_000n, 300n)], + }); + + const spendHelper = getSpendHelper({ toBurn, toStake }); + + expect(() => + useSpendableLockedUTXOs(initialState, spendHelper, testContext), + ).toThrow(NoSigMatchError); + }); + + it('should do nothing if UTXO has no remaining amount to stake', () => { + const toBurn = new Map(); + const toStake = new Map(); + + const initialState = getInitialReducerState({ + excessAVAX: 0n, + spendOptions: { + minIssuanceTime: 100n, + }, + toBurn, + toStake, + utxos: [getStakeableLockoutOutput(testUTXOID1, 10_000n, 300n)], + }); + + const spendHelper = getSpendHelper({ toBurn, toStake }); + const state = useSpendableLockedUTXOs( + initialState, + spendHelper, + testContext, + ); + const { changeOutputs, inputs, inputUTXOs, stakeOutputs } = + spendHelper.getInputsOutputs(); + + expect(state).toEqual(initialState); + expect(changeOutputs).toHaveLength(0); + expect(inputs).toHaveLength(0); + expect(inputUTXOs).toHaveLength(0); + expect(stakeOutputs).toHaveLength(0); + }); + + it('should add spendable locked UTXO with change', () => { + const toBurn = new Map(); + const toStake = new Map([[testAvaxAssetID.toString(), 1_000n]]); + + const initialState = getInitialReducerState({ + fromAddresses: [testOwnerXAddress], + excessAVAX: 0n, + spendOptions: { + minIssuanceTime: 100n, + }, + toBurn, + toStake, + utxos: [ + getStakeableLockoutOutput(testUTXOID1, 10_000n, 300n, testAvaxAssetID), + ], + }); + + const spendHelper = getSpendHelper({ toBurn, toStake }); + + useSpendableLockedUTXOs(initialState, spendHelper, testContext); + + const { changeOutputs, inputs, inputUTXOs, stakeOutputs } = + spendHelper.getInputsOutputs(); + + expect(inputs).toHaveLength(1); + expect(inputUTXOs).toHaveLength(1); + expect(changeOutputs).toHaveLength(1); + expect(stakeOutputs).toHaveLength(1); + + expect(stakeOutputs[0].amount()).toEqual(1_000n); + expect(changeOutputs[0].amount()).toEqual(9_000n); + }); + + it('should add spendable locked UTXO without change', () => { + const toBurn = new Map(); + const toStake = new Map([[testAvaxAssetID.toString(), 1_000n]]); + + const initialState = getInitialReducerState({ + fromAddresses: [testOwnerXAddress], + excessAVAX: 0n, + spendOptions: { + minIssuanceTime: 100n, + }, + toBurn, + toStake, + utxos: [ + getStakeableLockoutOutput(testUTXOID1, 1_000n, 300n, testAvaxAssetID), + ], + }); + + const spendHelper = getSpendHelper({ toBurn, toStake }); + + useSpendableLockedUTXOs(initialState, spendHelper, testContext); + + const { changeOutputs, inputs, inputUTXOs, stakeOutputs } = + spendHelper.getInputsOutputs(); + + expect(inputs).toHaveLength(1); + expect(inputUTXOs).toHaveLength(1); + expect(changeOutputs).toHaveLength(0); + expect(stakeOutputs).toHaveLength(1); + + expect(stakeOutputs[0].amount()).toEqual(1_000n); + }); +}); diff --git a/src/vms/pvm/etna-builder/spend-reducers/useSpendableLockedUTXOs.ts b/src/vms/pvm/etna-builder/spend-reducers/useSpendableLockedUTXOs.ts new file mode 100644 index 000000000..4f2870230 --- /dev/null +++ b/src/vms/pvm/etna-builder/spend-reducers/useSpendableLockedUTXOs.ts @@ -0,0 +1,132 @@ +import { + BigIntPr, + TransferInput, + TransferOutput, + TransferableInput, + TransferableOutput, +} from '../../../../serializable'; +import type { Utxo } from '../../../../serializable/avax/utxo'; +import { + StakeableLockIn, + StakeableLockOut, +} from '../../../../serializable/pvm'; +import { + getUtxoInfo, + isStakeableLockOut, + isTransferOut, +} from '../../../../utils'; +import { verifySignaturesMatch } from '../../../utils/calculateSpend/utils'; +import { IncorrectStakeableLockOutError } from './errors'; +import type { SpendReducerFunction, SpendReducerState } from './types'; + +/** + * Is responsible for filtering out the usable UTXOs from the list of UTXOs. + * + * @internal - Only exported for testing. + */ +export const getUsableUTXOsFilter = + (state: SpendReducerState) => + (utxo: Utxo): utxo is Utxo> => { + // 1a. Ensure UTXO output is a StakeableLockOut. + if (!isStakeableLockOut(utxo.output)) { + return false; + } + + // 1b. Ensure UTXO is stakeable. + if (state.spendOptions.minIssuanceTime >= utxo.output.getLocktime()) { + return false; + } + + // 1c. Ensure transferOut is a TransferOutput. + if (!isTransferOut(utxo.output.transferOut)) { + throw IncorrectStakeableLockOutError; + } + + // 1d. Filter out UTXOs that aren't needed for staking. + if (!state.toStake.has(utxo.assetId.value())) { + return false; + } + + return true; + }; + +export const useSpendableLockedUTXOs: SpendReducerFunction = ( + state, + spendHelper, +) => { + // 1. Filter out the UTXOs that are not usable. + const usableUTXOs: Utxo>[] = state.utxos + // Filter out non stakeable lockouts and lockouts that are not stakeable yet. + .filter(getUsableUTXOsFilter(state)); + + // 2. Verify signatures match. + const verifiedUsableUTXOs = verifySignaturesMatch( + usableUTXOs, + (utxo) => utxo.output.transferOut, + state.fromAddresses, + state.spendOptions, + ); + + // 3. Do all the logic for spending based on the UTXOs. + for (const { sigData, data: utxo } of verifiedUsableUTXOs) { + const utxoInfo = getUtxoInfo(utxo); + const remainingAmountToStake: bigint = + state.toStake.get(utxoInfo.assetId) ?? 0n; + + // 3a. If we have already reached the stake amount, there is nothing left to run beyond here. + if (remainingAmountToStake === 0n) { + continue; + } + + // 3b. Add the input. + spendHelper.addInput( + utxo, + new TransferableInput( + utxo.utxoId, + utxo.assetId, + new StakeableLockIn( + new BigIntPr(utxoInfo.stakeableLocktime), + TransferInput.fromNative(utxoInfo.amount, sigData.sigIndicies), + ), + ), + ); + + // 3c. Consume the locked asset and get the remaining amount. + const [remainingAmount] = spendHelper.consumeLockedStakableAsset( + utxoInfo.assetId, + utxoInfo.amount, + ); + + // 3d. Add the stake output. + spendHelper.addStakedOutput( + new TransferableOutput( + utxo.assetId, + new StakeableLockOut( + new BigIntPr(utxoInfo.stakeableLocktime), + new TransferOutput( + new BigIntPr(utxoInfo.amount - remainingAmount), + utxo.getOutputOwners(), + ), + ), + ), + ); + + // 3e. Add the change output if there is any remaining amount. + if (remainingAmount > 0n) { + spendHelper.addChangeOutput( + new TransferableOutput( + utxo.assetId, + new StakeableLockOut( + new BigIntPr(utxoInfo.stakeableLocktime), + new TransferOutput( + new BigIntPr(remainingAmount), + utxo.getOutputOwners(), + ), + ), + ), + ); + } + } + + return state; +}; diff --git a/src/vms/pvm/etna-builder/spend-reducers/useUnlockedUTXOs.test.ts b/src/vms/pvm/etna-builder/spend-reducers/useUnlockedUTXOs.test.ts new file mode 100644 index 000000000..ad85fafc8 --- /dev/null +++ b/src/vms/pvm/etna-builder/spend-reducers/useUnlockedUTXOs.test.ts @@ -0,0 +1,220 @@ +import { testContext } from '../../../../fixtures/context'; +import { getUsableUTXOsFilter, useUnlockedUTXOs } from './useUnlockedUTXOs'; +import { getInitialReducerState, getSpendHelper } from './fixtures/reducers'; +import { + fromAddressBytes, + getLockedUTXO, + getNotTransferOutput, + getStakeableLockoutOutput, + getValidUtxo, + testAvaxAssetID, + testUTXOID1, + testUTXOID2, +} from '../../../../fixtures/transactions'; +import { + Address, + BigIntPr, + Id, + Int, + TransferableOutput, +} from '../../../../serializable'; +import { Utxo } from '../../../../serializable/avax/utxo'; +import { + StakeableLockIn, + StakeableLockOut, +} from '../../../../serializable/pvm'; +import { IncorrectStakeableLockOutError } from './errors'; +import { addressesFromBytes, hexToBuffer } from '../../../../utils'; +import { UTXOID } from '../../../../serializable/avax'; +import { NoSigMatchError } from '../../../utils/calculateSpend/utils'; + +describe('useUnlockedUTXOs', () => { + describe('getUsableUTXOsFilter', () => { + test('returns `true` if UTXO output is a TransferOutput and the locktime is less than the minIssuanceTime', () => { + const state = getInitialReducerState({ + spendOptions: { + minIssuanceTime: 100n, + }, + }); + const utxo = getValidUtxo(); + expect(getUsableUTXOsFilter(state)(utxo)).toBe(true); + }); + + test('returns `false` if UTXO output is a TransferOutput and the locktime is equal or greater than the minIssuanceTime', () => { + const state = getInitialReducerState({ + spendOptions: { + minIssuanceTime: 100n, + }, + }); + const utxo = getLockedUTXO(new BigIntPr(100n), 100n); + expect(getUsableUTXOsFilter(state)(utxo)).toBe(false); + }); + + test('returns `true` if UTXO output is a StakeableLockOut and the locktime is less than the minIssuanceTime', () => { + const state = getInitialReducerState({ + spendOptions: { + minIssuanceTime: 100n, + }, + }); + + const utxo = getStakeableLockoutOutput(testUTXOID1, 100n, 50n); + + expect(getUsableUTXOsFilter(state)(utxo)).toBe(true); + }); + + test('returns `false` if UTXO output is a StakeableLockOut and the locktime is equal or greater than the minIssuanceTime', () => { + const state = getInitialReducerState({ + spendOptions: { + minIssuanceTime: 100n, + }, + }); + + const utxo = getStakeableLockoutOutput(testUTXOID1, 100n, 100n); + + expect(getUsableUTXOsFilter(state)(utxo)).toBe(false); + }); + + test('throws an error if UTXO output is a StakeableLockOut and the transferOut is not a TransferOutput', () => { + const state = getInitialReducerState({ + spendOptions: { + minIssuanceTime: 100n, + }, + }); + + const invalidUTXO = new Utxo( + new UTXOID(testUTXOID2, new Int(0)), + testAvaxAssetID, + new StakeableLockOut( + new BigIntPr(50n), + new StakeableLockIn( + new BigIntPr(2000000000n), + TransferableOutput.fromNative(testAvaxAssetID.toString(), 20n, [ + hexToBuffer('0x12345678901234578901234567890123457890'), + ]), + ), + ), + ); + + expect(() => getUsableUTXOsFilter(state)(invalidUTXO)).toThrow( + IncorrectStakeableLockOutError, + ); + }); + + test('returns `false` if UTXO output is not a TransferOutput or a StakeableLockOut', () => { + expect( + getUsableUTXOsFilter(getInitialReducerState())(getNotTransferOutput()), + ).toBe(false); + }); + }); + + it('should handle verified usable AVAX UTXOs', () => { + const toBurn = new Map([[testContext.avaxAssetID, 4_900n]]); + const toStake = new Map([[testContext.avaxAssetID, 4_900n]]); + + const initialState = getInitialReducerState({ + fromAddresses: addressesFromBytes(fromAddressBytes), + excessAVAX: 0n, + toBurn, + toStake, + utxos: [getValidUtxo(new BigIntPr(10_000n))], + }); + + const spendHelper = getSpendHelper({ toBurn, toStake }); + + const state = useUnlockedUTXOs(initialState, spendHelper, testContext); + const { inputs } = spendHelper.getInputsOutputs(); + + expect(state.excessAVAX).toEqual(10_000n - 4_900n - 4_900n); + expect(state.ownerOverride).toBe(null); + expect(inputs).toHaveLength(1); + expect(inputs[0].getAssetId()).toEqual(testContext.avaxAssetID); + }); + + it('should skip other verified usable UTXOs with no toBurn or toStake match', () => { + const toBurn = new Map([[testContext.avaxAssetID, 4_900n]]); + const toStake = new Map([[testContext.avaxAssetID, 4_900n]]); + + const initialState = getInitialReducerState({ + fromAddresses: addressesFromBytes(fromAddressBytes), + excessAVAX: 0n, + toBurn, + toStake, + utxos: [ + getValidUtxo(new BigIntPr(10_000n)), + getValidUtxo(new BigIntPr(5_000n), Id.fromString('testasset')), + ], + }); + + const spendHelper = getSpendHelper({ toBurn, toStake }); + + useUnlockedUTXOs(initialState, spendHelper, testContext); + const { inputs, inputUTXOs } = spendHelper.getInputsOutputs(); + + // Should only be the AVAX UTXO + expect(inputUTXOs).toHaveLength(1); + expect(inputs).toHaveLength(1); + expect(inputs[0].getAssetId()).not.toEqual('testasset'); + }); + + it('should consume other verified usable UTXOs with a toBurn or toStake match', () => { + const testAssetId = Id.fromString('testasset'); + const testAssetId2 = Id.fromString('testasset2'); + const toBurn = new Map([ + [testContext.avaxAssetID, 4_900n], + [testAssetId.toString(), 1_900n], + [testAssetId2.toString(), 100n], + ]); + const toStake = new Map([ + [testContext.avaxAssetID, 4_900n], + [testAssetId.toString(), 1_900n], + ]); + + const initialState = getInitialReducerState({ + fromAddresses: addressesFromBytes(fromAddressBytes), + excessAVAX: 0n, + toBurn, + toStake, + utxos: [ + getValidUtxo(new BigIntPr(10_000n)), + getValidUtxo(new BigIntPr(5_000n), testAssetId), + getValidUtxo(new BigIntPr(100n), testAssetId2), + ], + }); + + const spendHelper = getSpendHelper({ toBurn, toStake }); + + useUnlockedUTXOs(initialState, spendHelper, testContext); + const { changeOutputs, inputs, inputUTXOs } = + spendHelper.getInputsOutputs(); + + expect(inputUTXOs).toHaveLength(3); + expect(inputs).toHaveLength(3); + + // Only expect 1 for now. The AVAX UTXOs aren't added as part of this reducer. + // Only testAssetId is given back change. testAssetId2 is consumed fully with no change. + expect(changeOutputs).toHaveLength(1); + + expect(changeOutputs[0].amount()).toEqual(5_000n - 1_900n - 1_900n); + }); + + it('should ignore UTXOs that signatures do not match', () => { + const toBurn = new Map([[testContext.avaxAssetID, 4_900n]]); + const toStake = new Map([[testContext.avaxAssetID, 4_900n]]); + + const initialState = getInitialReducerState({ + fromAddresses: [ + Address.fromString('P-fuji1y50xa9363pn3d5gjhcz3ltp3fj6vq8x8a5txxg'), + ], + excessAVAX: 0n, + toBurn, + toStake, + utxos: [getValidUtxo(new BigIntPr(10_000n))], + }); + + const spendHelper = getSpendHelper({ toBurn, toStake }); + + expect(() => + useUnlockedUTXOs(initialState, spendHelper, testContext), + ).toThrow(NoSigMatchError); + }); +}); diff --git a/src/vms/pvm/etna-builder/spend-reducers/useUnlockedUTXOs.ts b/src/vms/pvm/etna-builder/spend-reducers/useUnlockedUTXOs.ts new file mode 100644 index 000000000..c95406269 --- /dev/null +++ b/src/vms/pvm/etna-builder/spend-reducers/useUnlockedUTXOs.ts @@ -0,0 +1,200 @@ +import { + BigIntPr, + OutputOwners, + TransferInput, + TransferOutput, + TransferableInput, + TransferableOutput, +} from '../../../../serializable'; +import type { Utxo } from '../../../../serializable/avax/utxo'; +import type { StakeableLockOut } from '../../../../serializable/pvm'; +import { + getUtxoInfo, + isStakeableLockOut, + isTransferOut, +} from '../../../../utils'; +import { verifySignaturesMatch } from '../../../utils/calculateSpend/utils'; +import { IncorrectStakeableLockOutError } from './errors'; +import type { SpendReducerFunction, SpendReducerState } from './types'; + +/** + * Is responsible for filtering out the usable UTXOs from the list of UTXOs. + * + * @internal - Only exported for testing. + */ +export const getUsableUTXOsFilter = + (state: SpendReducerState) => + ( + utxo: Utxo, + ): utxo is Utxo> => { + if (!(isStakeableLockOut(utxo.output) || isTransferOut(utxo.output))) { + return false; + } + + if ( + isStakeableLockOut(utxo.output) && + !isTransferOut(utxo.output.transferOut) + ) { + throw IncorrectStakeableLockOutError; + } + + return utxo.output.getLocktime() < state.spendOptions.minIssuanceTime; + }; + +export const useUnlockedUTXOs: SpendReducerFunction = ( + state, + spendHelper, + context, +) => { + // 1. Filter out the UTXOs that are not usable. + const usableUTXOs: Utxo>[] = + state.utxos + // Filter out non stakeable lockouts and lockouts that are not stakeable yet. + .filter(getUsableUTXOsFilter(state)); + + // 2. Verify signatures match. + const verifiedUsableUTXOs = verifySignaturesMatch( + usableUTXOs, + (utxo) => + isTransferOut(utxo.output) ? utxo.output : utxo.output.transferOut, + state.fromAddresses, + state.spendOptions, + ); + + // 3. Split verified usable UTXOs into AVAX assetId UTXOs and other assetId UTXOs. + const { otherVerifiedUsableUTXOs, avaxVerifiedUsableUTXOs } = + verifiedUsableUTXOs.reduce<{ + avaxVerifiedUsableUTXOs: typeof verifiedUsableUTXOs; + otherVerifiedUsableUTXOs: typeof verifiedUsableUTXOs; + }>( + (result, verifiedUsableUTXO) => { + if (verifiedUsableUTXO.data.assetId.value() === context.avaxAssetID) { + return { + ...result, + avaxVerifiedUsableUTXOs: [ + ...result.avaxVerifiedUsableUTXOs, + verifiedUsableUTXO, + ], + }; + } + + return { + ...result, + otherVerifiedUsableUTXOs: [ + ...result.otherVerifiedUsableUTXOs, + verifiedUsableUTXO, + ], + }; + }, + { otherVerifiedUsableUTXOs: [], avaxVerifiedUsableUTXOs: [] }, + ); + + const changeOwner = OutputOwners.fromNative( + state.spendOptions.changeAddresses, + 0n, + 1, + ); + + // 4. Handle all the non-AVAX asset UTXOs first. + for (const { sigData, data: utxo } of otherVerifiedUsableUTXOs) { + const utxoInfo = getUtxoInfo(utxo); + const remainingAmountToBurn: bigint = + state.toBurn.get(utxoInfo.assetId) ?? 0n; + const remainingAmountToStake: bigint = + state.toStake.get(utxoInfo.assetId) ?? 0n; + + // 4a. If we have already reached the burn/stake amount, there is nothing left to run beyond here. + if (remainingAmountToBurn === 0n && remainingAmountToStake === 0n) { + continue; + } + + // 4b. Add the input. + spendHelper.addInput( + utxo, + new TransferableInput( + utxo.utxoId, + utxo.assetId, + TransferInput.fromNative(utxoInfo.amount, sigData.sigIndicies), + ), + ); + + // 4c. Consume the asset and get the remaining amount. + const [remainingAmount, amountToStake] = spendHelper.consumeAsset( + utxoInfo.assetId, + utxoInfo.amount, + ); + + // 4d. If "amountToStake" is greater than 0, add the stake output. + if (amountToStake > 0n) { + spendHelper.addStakedOutput( + new TransferableOutput( + utxo.assetId, + new TransferOutput(new BigIntPr(amountToStake), changeOwner), + ), + ); + } + + // 4e. Add the change output if there is any remaining amount. + if (remainingAmount > 0n) { + spendHelper.addChangeOutput( + new TransferableOutput( + utxo.assetId, + new TransferOutput(new BigIntPr(remainingAmount), changeOwner), + ), + ); + } + } + + // 5. Handle AVAX asset UTXOs last to account for fees. + let excessAVAX = state.excessAVAX; + let clearOwnerOverride = false; + for (const { sigData, data: utxo } of avaxVerifiedUsableUTXOs) { + const requiredFee = spendHelper.calculateFee(); + + // If we don't need to burn or stake additional AVAX and we have + // consumed enough AVAX to pay the required fee, we should stop + // consuming UTXOs. + if ( + !spendHelper.shouldConsumeAsset(context.avaxAssetID) && + excessAVAX >= requiredFee + ) { + break; + } + + const utxoInfo = getUtxoInfo(utxo); + + spendHelper.addInput( + utxo, + new TransferableInput( + utxo.utxoId, + utxo.assetId, + TransferInput.fromNative(utxoInfo.amount, sigData.sigIndicies), + ), + ); + + const [remainingAmount, amountToStake] = spendHelper.consumeAsset( + context.avaxAssetID, + utxoInfo.amount, + ); + + if (amountToStake > 0n) { + spendHelper.addStakedOutput( + new TransferableOutput( + utxo.assetId, + new TransferOutput(new BigIntPr(amountToStake), changeOwner), + ), + ); + } + + excessAVAX += remainingAmount; + + // The ownerOverride is no longer needed. Clear it. + clearOwnerOverride = true; + } + + return { + ...state, + excessAVAX, + ownerOverride: clearOwnerOverride ? null : state.ownerOverride, + }; +}; diff --git a/src/vms/pvm/etna-builder/spend-reducers/verifyAssetsConsumed.test.ts b/src/vms/pvm/etna-builder/spend-reducers/verifyAssetsConsumed.test.ts new file mode 100644 index 000000000..3367eb560 --- /dev/null +++ b/src/vms/pvm/etna-builder/spend-reducers/verifyAssetsConsumed.test.ts @@ -0,0 +1,33 @@ +import { jest } from '@jest/globals'; + +import { testContext } from '../../../../fixtures/context'; +import { getInitialReducerState, getSpendHelper } from './fixtures/reducers'; +import { verifyAssetsConsumed } from './verifyAssetsConsumed'; + +describe('verifyAssetsConsumed', () => { + test('returns original state if all assets are consumed', () => { + const initialState = getInitialReducerState(); + const spendHelper = getSpendHelper(); + const spy = jest.spyOn(spendHelper, 'verifyAssetsConsumed'); + + const state = verifyAssetsConsumed(initialState, spendHelper, testContext); + + expect(state).toBe(initialState); + expect(spy).toHaveBeenCalledTimes(1); + }); + + test('throws an error if some assets are not consumed', () => { + const initialState = getInitialReducerState(); + const spendHelper = getSpendHelper(); + + // Mock the verifyAssetsConsumed method to throw an error + // Testing for this function can be found in the spendHelper.test.ts file + spendHelper.verifyAssetsConsumed = jest.fn(() => { + throw new Error('Test error'); + }); + + expect(() => + verifyAssetsConsumed(initialState, spendHelper, testContext), + ).toThrow('Test error'); + }); +}); diff --git a/src/vms/pvm/etna-builder/spend-reducers/verifyAssetsConsumed.ts b/src/vms/pvm/etna-builder/spend-reducers/verifyAssetsConsumed.ts new file mode 100644 index 000000000..546e010ea --- /dev/null +++ b/src/vms/pvm/etna-builder/spend-reducers/verifyAssetsConsumed.ts @@ -0,0 +1,19 @@ +import type { SpendReducerFunction } from './types'; + +/** + * Verify that all assets have been consumed. + * + * Calls the spendHelper's verifyAssetsConsumed method. + */ +export const verifyAssetsConsumed: SpendReducerFunction = ( + state, + spendHelper, +) => { + const verifyError = spendHelper.verifyAssetsConsumed(); + + if (verifyError) { + throw verifyError; + } + + return state; +}; diff --git a/src/vms/pvm/etna-builder/spend.test.ts b/src/vms/pvm/etna-builder/spend.test.ts new file mode 100644 index 000000000..8c88703d6 --- /dev/null +++ b/src/vms/pvm/etna-builder/spend.test.ts @@ -0,0 +1,118 @@ +import { jest } from '@jest/globals'; + +import { testContext } from '../../../fixtures/context'; +import { Address, OutputOwners } from '../../../serializable'; +import { defaultSpendOptions } from '../../common/defaultSpendOptions'; +import { createDimensions } from '../../common/fees/dimensions'; +import { + verifyAssetsConsumed, + type SpendReducerFunction, + type SpendReducerState, + handleFeeAndChange, +} from './spend-reducers'; +import { spend } from './spend'; + +jest.mock('./spend-reducers', () => ({ + verifyAssetsConsumed: jest.fn((state) => state), + handleFeeAndChange: jest.fn((state) => state), +})); + +const CHANGE_ADDRESS = Address.fromString( + 'P-fuji1y50xa9363pn3d5gjhcz3ltp3fj6vq8x8a5txxg', +); +const CHANGE_OWNERS: OutputOwners = OutputOwners.fromNative([ + CHANGE_ADDRESS.toBytes(), +]); + +const getInitialReducerState = ( + state: Partial = {}, +): SpendReducerState => ({ + excessAVAX: 0n, + initialComplexity: createDimensions(1, 1, 1, 1), + fromAddresses: [CHANGE_ADDRESS], + ownerOverride: null, + spendOptions: defaultSpendOptions( + state?.fromAddresses?.map((address) => address.toBytes()) ?? [ + CHANGE_ADDRESS.toBytes(), + ], + ), + toBurn: new Map(), + toStake: new Map(), + utxos: [], + ...state, +}); + +describe('./src/vms/pvm/etna-builder/spend.test.ts', () => { + // TODO: Enable. + // Test is broken due to mocks not working. Needs investigation. + test.skip('calls spend reducers', () => { + const testReducer = jest.fn((state) => state); + + spend( + getInitialReducerState({ excessAVAX: 1_000n }), + [testReducer], + testContext, + ); + + expect(testReducer).toHaveBeenCalledTimes(1); + expect(verifyAssetsConsumed).toHaveBeenCalledTimes(1); + expect(handleFeeAndChange).toHaveBeenCalledTimes(1); + }); + + test('catches thrown errors and re-throws', () => { + const testReducer = jest.fn(() => { + throw new Error('Test error'); + }); + + expect(() => + spend( + getInitialReducerState({ excessAVAX: 1_000n }), + [testReducer], + testContext, + ), + ).toThrow('Test error'); + }); + + test('catches thrown non-error and throws error', () => { + const testReducer = jest.fn(() => { + throw 'not-an-error'; + }); + + expect(() => + spend( + getInitialReducerState({ excessAVAX: 1_000n }), + [testReducer], + testContext, + ), + ).toThrow('An unexpected error occurred during spend calculation'); + }); + + test('change owners in state should default to change addresses', () => { + expect.assertions(1); + + const initialState = getInitialReducerState({ excessAVAX: 1_000n }); + const testReducer = jest.fn((state) => { + expect(state.ownerOverride).toEqual( + OutputOwners.fromNative(initialState.spendOptions.changeAddresses), + ); + return state; + }); + + spend(initialState, [testReducer], testContext); + }); + + test('change owners in state should be ownerOverride if provided', () => { + expect.assertions(1); + + const initialState = getInitialReducerState({ + excessAVAX: 1_000n, + ownerOverride: CHANGE_OWNERS, + }); + const testReducer = jest.fn((state) => { + expect(state.ownerOverride).toBe(CHANGE_OWNERS); + return state; + }); + + spend(initialState, [testReducer], testContext); + }); +}); diff --git a/src/vms/pvm/etna-builder/spend.ts b/src/vms/pvm/etna-builder/spend.ts new file mode 100644 index 000000000..3471e729e --- /dev/null +++ b/src/vms/pvm/etna-builder/spend.ts @@ -0,0 +1,163 @@ +import type { + Address, + TransferableInput, + TransferableOutput, +} from '../../../serializable'; +import { OutputOwners } from '../../../serializable'; +import type { Utxo } from '../../../serializable/avax/utxo'; +import type { SpendOptions } from '../../common'; +import type { Dimensions } from '../../common/fees/dimensions'; +import type { Context } from '../../context'; +import type { SpendReducerFunction, SpendReducerState } from './spend-reducers'; +import { handleFeeAndChange, verifyAssetsConsumed } from './spend-reducers'; +import { SpendHelper } from './spendHelper'; + +type SpendResult = Readonly<{ + /** + * The consolidated and sorted change outputs. + */ + changeOutputs: readonly TransferableOutput[]; + /** + * The total calculated fee for the transaction. + */ + fee: bigint; + /** + * The sorted inputs. + */ + inputs: readonly TransferableInput[]; + /** + * The UTXOs that were used as inputs. + */ + inputUTXOs: readonly Utxo[]; + /** + * The consolidated and sorted staked outputs. + */ + stakeOutputs: readonly TransferableOutput[]; +}>; + +export type SpendProps = Readonly<{ + /** + * The extra AVAX that spend can produce in + * the change outputs in addition to the consumed and not burned AVAX. + */ + excessAVAX?: bigint; + /** + * List of Addresses that are used for selecting which UTXOs are signable. + */ + fromAddresses: readonly Address[]; + /** + * The initial complexity of the transaction. + */ + initialComplexity: Dimensions; + /** + * Optionally specifies the output owners to use for the unlocked + * AVAX change output if no additional AVAX was needed to be burned. + * If this value is `undefined` or `null`, the default change owner is used. + * + * Used in ImportTx. + */ + ownerOverride?: OutputOwners | null; + /** + * Whether to consolidate change and stake outputs. + * + * @default false + */ + shouldConsolidateOutputs?: boolean; + spendOptions: Required; + /** + * Maps `assetID` to the amount of the asset to spend without + * producing an output. This is typically used for fees. + * However, it can also be used to consume some of an asset that + * will be produced in separate outputs, such as ExportedOutputs. + * + * Only unlocked UTXOs are able to be burned here. + */ + toBurn?: Map; + /** + * Maps `assetID` to the amount of the asset to spend and place info + * the staked outputs. First locked UTXOs are attempted to be used for + * these funds, and then unlocked UTXOs will be attempted to be used. + * There is no preferential ordering on the unlock times. + */ + toStake?: Map; + /** + * List of UTXOs that are available to be spent. + */ + utxos: readonly Utxo[]; +}>; + +/** + * Processes the spending of assets, including burning and staking, from a list of UTXOs. + * + * @param {SpendProps} props - The properties required to execute the spend operation. + * @param {SpendReducerFunction[]} spendReducers - The list of functions that will be executed to process the spend operation. + * @param {Context} context - The context in which the spend operation is executed. + * + * @returns {SpendResult} - A tuple where the first element is either null or an error, + * and the second element is either the result of the spend operation or null. + * + * @throws {Error} - Thrown error or an unexpected error if is not an instance of Error. + */ +export const spend = ( + { + excessAVAX = 0n, + fromAddresses, + initialComplexity, + ownerOverride, + shouldConsolidateOutputs = false, + spendOptions, + toBurn = new Map(), + toStake = new Map(), + utxos, + }: SpendProps, + spendReducers: readonly SpendReducerFunction[], + context: Context, +): SpendResult => { + try { + const changeOwners = + ownerOverride || OutputOwners.fromNative(spendOptions.changeAddresses); + + const spendHelper = new SpendHelper({ + changeOutputs: [], + gasPrice: context.gasPrice, + initialComplexity, + inputs: [], + shouldConsolidateOutputs, + stakeOutputs: [], + toBurn, + toStake, + weights: context.complexityWeights, + }); + + const initialState: SpendReducerState = { + excessAVAX, + initialComplexity, + fromAddresses, + ownerOverride: changeOwners, + spendOptions, + toBurn, + toStake, + utxos, + }; + + const spendReducerFunctions: readonly SpendReducerFunction[] = [ + ...spendReducers, + verifyAssetsConsumed, + handleFeeAndChange, + // Consolidation and sorting happens in the SpendHelper. + ]; + + // Run all the spend calculation reducer logic. + spendReducerFunctions.reduce((state, reducer) => { + return reducer(state, spendHelper, context); + }, initialState); + + return spendHelper.getInputsOutputs(); + } catch (error) { + if (error instanceof Error) { + throw error; + } + + throw new Error('An unexpected error occurred during spend calculation'); + } +}; diff --git a/src/vms/pvm/etna-builder/spendHelper.test.ts b/src/vms/pvm/etna-builder/spendHelper.test.ts new file mode 100644 index 000000000..2fda02eca --- /dev/null +++ b/src/vms/pvm/etna-builder/spendHelper.test.ts @@ -0,0 +1,431 @@ +import { + transferableInput, + transferableOutput, + utxo, +} from '../../../fixtures/avax'; +import { id } from '../../../fixtures/common'; +import { stakeableLockOut } from '../../../fixtures/pvm'; +import { TransferableOutput } from '../../../serializable'; +import { isTransferOut } from '../../../utils'; +import { + createDimensions, + dimensionsToGas, +} from '../../common/fees/dimensions'; +import type { SpendHelperProps } from './spendHelper'; +import { SpendHelper } from './spendHelper'; + +const DEFAULT_GAS_PRICE = 3n; + +const DEFAULT_WEIGHTS = createDimensions(1, 2, 3, 4); + +const DEFAULT_PROPS: SpendHelperProps = { + changeOutputs: [], + gasPrice: DEFAULT_GAS_PRICE, + initialComplexity: createDimensions(1, 1, 1, 1), + inputs: [], + shouldConsolidateOutputs: false, + stakeOutputs: [], + toBurn: new Map(), + toStake: new Map(), + weights: DEFAULT_WEIGHTS, +}; + +describe('src/vms/pvm/etna-builder/spendHelper', () => { + test('initialized with correct values', () => { + const spendHelper = new SpendHelper(DEFAULT_PROPS); + + expect(spendHelper).toBeInstanceOf(SpendHelper); + + const results = spendHelper.getInputsOutputs(); + + expect(results.changeOutputs).toEqual([]); + expect(results.fee).toBe( + dimensionsToGas(DEFAULT_PROPS.initialComplexity, DEFAULT_WEIGHTS) * + DEFAULT_GAS_PRICE, + ); + expect(results.inputs).toEqual([]); + expect(results.inputUTXOs).toEqual([]); + expect(results.stakeOutputs).toEqual([]); + }); + + test('adding inputs and outputs', () => { + const spendHelper = new SpendHelper(DEFAULT_PROPS); + + expect(spendHelper.getInputsOutputs()).toEqual({ + changeOutputs: [], + fee: + dimensionsToGas(DEFAULT_PROPS.initialComplexity, DEFAULT_WEIGHTS) * + DEFAULT_GAS_PRICE, + inputs: [], + inputUTXOs: [], + stakeOutputs: [], + }); + + const inputUtxo = utxo(); + const inputTransferableInput = transferableInput(); + + spendHelper.addInput(inputUtxo, inputTransferableInput); + + expect(spendHelper.getInputsOutputs()).toEqual({ + changeOutputs: [], + fee: 942n, + inputs: [inputTransferableInput], + inputUTXOs: [inputUtxo], + stakeOutputs: [], + }); + + const changeOutput = transferableOutput(); + + spendHelper.addChangeOutput(changeOutput); + + expect(spendHelper.getInputsOutputs()).toEqual({ + changeOutputs: [changeOutput], + fee: 1251n, + inputs: [inputTransferableInput], + inputUTXOs: [inputUtxo], + stakeOutputs: [], + }); + + const stakeOutput = transferableOutput(); + + spendHelper.addStakedOutput(stakeOutput); + + expect(spendHelper.getInputsOutputs()).toEqual({ + changeOutputs: [changeOutput], + fee: 1560n, + inputs: [inputTransferableInput], + inputUTXOs: [inputUtxo], + stakeOutputs: [stakeOutput], + }); + }); + + describe('SpendHelper.shouldConsumeLockedStakeableAsset', () => { + test('returns false for asset not in toStake', () => { + const spendHelper = new SpendHelper(DEFAULT_PROPS); + + expect(spendHelper.shouldConsumeLockedStakeableAsset('asset')).toBe( + false, + ); + }); + + test('returns false for asset in toStake with 0 value', () => { + const spendHelper = new SpendHelper({ + ...DEFAULT_PROPS, + toStake: new Map([['asset', 0n]]), + }); + + expect(spendHelper.shouldConsumeLockedStakeableAsset('asset')).toBe( + false, + ); + }); + + test('returns true for asset in toStake with non-0 value', () => { + const spendHelper = new SpendHelper({ + ...DEFAULT_PROPS, + toStake: new Map([['asset', 1n]]), + }); + + expect(spendHelper.shouldConsumeLockedStakeableAsset('asset')).toBe(true); + }); + }); + + describe('SpendHelper.shouldConsumeAsset', () => { + test('returns false for asset not in toBurn', () => { + const spendHelper = new SpendHelper(DEFAULT_PROPS); + + expect(spendHelper.shouldConsumeAsset('asset')).toBe(false); + }); + + test('returns false for asset in toBurn with 0 value', () => { + const spendHelper = new SpendHelper({ + ...DEFAULT_PROPS, + toBurn: new Map([['asset', 0n]]), + }); + + expect(spendHelper.shouldConsumeAsset('asset')).toBe(false); + }); + + test('returns true for asset in toBurn with non-0 value', () => { + const spendHelper = new SpendHelper({ + ...DEFAULT_PROPS, + toBurn: new Map([['asset', 1n]]), + }); + + expect(spendHelper.shouldConsumeAsset('asset')).toBe(true); + }); + + test('returns true for asset in toStake with non-0 value', () => { + const spendHelper = new SpendHelper({ + ...DEFAULT_PROPS, + toStake: new Map([['asset', 1n]]), + }); + + expect(spendHelper.shouldConsumeAsset('asset')).toBe(true); + }); + + test('returns false for asset in toStake with 0 value', () => { + const spendHelper = new SpendHelper({ + ...DEFAULT_PROPS, + toStake: new Map([['asset', 0n]]), + }); + + expect(spendHelper.shouldConsumeAsset('asset')).toBe(false); + }); + }); + + describe('SpendHelper.consumeLockedStakeableAsset', () => { + const testCases = [ + { + description: 'consumes the full amount', + toStake: new Map([['asset', 1n]]), + asset: 'asset', + amount: 1n, + expected: 0n, + }, + { + description: 'consumes a partial amount', + toStake: new Map([['asset', 1n]]), + asset: 'asset', + amount: 2n, + expected: 1n, + }, + { + description: 'consumes nothing', + toStake: new Map([['asset', 1n]]), + asset: 'asset', + amount: 0n, + expected: 0n, + }, + { + description: 'consumes nothing when asset not in toStake', + toStake: new Map(), + asset: 'asset', + amount: 1n, + expected: 1n, + }, + { + description: 'consumes nothing when asset in toStake with 0 value', + toStake: new Map([['asset', 0n]]), + asset: 'asset', + amount: 1n, + expected: 1n, + }, + ]; + + test.each(testCases)( + '$description', + ({ toStake, asset, amount, expected }) => { + const spendHelper = new SpendHelper({ + ...DEFAULT_PROPS, + toStake, + }); + + expect(spendHelper.consumeLockedStakableAsset(asset, amount)[0]).toBe( + expected, + ); + }, + ); + + test('throws an error when amount is negative', () => { + const spendHelper = new SpendHelper(DEFAULT_PROPS); + + expect(() => { + spendHelper.consumeLockedStakableAsset('asset', -1n); + }).toThrow('Amount to consume must be greater than or equal to 0'); + }); + }); + + describe('SpendHelper.consumeAsset', () => { + const testCases = [ + { + description: 'consumes the full amount', + toBurn: new Map([['asset', 1n]]), + asset: 'asset', + amount: 1n, + expected: 0n, + }, + { + description: 'consumes a partial amount', + toBurn: new Map([['asset', 1n]]), + asset: 'asset', + amount: 2n, + expected: 1n, + }, + { + description: 'consumes nothing', + toBurn: new Map([['asset', 1n]]), + asset: 'asset', + amount: 0n, + expected: 0n, + }, + { + description: 'consumes nothing when asset not in toBurn', + toBurn: new Map(), + asset: 'asset', + amount: 1n, + expected: 1n, + }, + { + description: 'consumes nothing when asset in toBurn with 0 value', + toBurn: new Map([['asset', 0n]]), + asset: 'asset', + amount: 1n, + expected: 1n, + }, + { + description: 'consumes nothing when asset in toStake with 0 value', + toBurn: new Map([['asset', 1n]]), + toStake: new Map([['asset', 0n]]), + asset: 'asset', + amount: 1n, + expected: 0n, + }, + ]; + + test.each(testCases)( + '$description', + ({ toBurn, asset, amount, expected }) => { + const spendHelper = new SpendHelper({ + ...DEFAULT_PROPS, + toBurn, + }); + + expect(spendHelper.consumeAsset(asset, amount)[0]).toBe(expected); + }, + ); + + test('throws an error when amount is negative', () => { + const spendHelper = new SpendHelper(DEFAULT_PROPS); + + expect(() => { + spendHelper.consumeAsset('asset', -1n); + }).toThrow('Amount to consume must be greater than or equal to 0'); + }); + }); + + describe('SpendHelper.verifyAssetsConsumed', () => { + test('returns null when all assets consumed', () => { + const spendHelper = new SpendHelper({ + ...DEFAULT_PROPS, + toBurn: new Map([['asset', 0n]]), + toStake: new Map([['asset', 0n]]), + }); + + expect(spendHelper.verifyAssetsConsumed()).toBe(null); + }); + + test('returns an error when stake assets not consumed', () => { + const spendHelper = new SpendHelper({ + ...DEFAULT_PROPS, + toBurn: new Map([['test-asset', 1n]]), + toStake: new Map([['test-asset', 1n]]), + }); + + expect(spendHelper.verifyAssetsConsumed()).toEqual( + new Error( + 'Insufficient funds! Provided UTXOs need 1 more units of asset test-asset to stake', + ), + ); + }); + + test('returns an error when burn assets not consumed', () => { + const spendHelper = new SpendHelper({ + ...DEFAULT_PROPS, + toBurn: new Map([['test-asset', 1n]]), + toStake: new Map([['test-asset', 0n]]), + }); + + expect(spendHelper.verifyAssetsConsumed()).toEqual( + new Error( + 'Insufficient funds! Provided UTXOs need 1 more units of asset test-asset', + ), + ); + }); + }); + + test('no consolidated outputs when `shouldConsolidateOutputs` is `false`', () => { + const spendHelper = new SpendHelper(DEFAULT_PROPS); + + spendHelper.addChangeOutput(transferableOutput()); + spendHelper.addChangeOutput(transferableOutput()); + + const stakedTransferableOutput = new TransferableOutput( + id(), + stakeableLockOut(), + ); + + spendHelper.addStakedOutput(stakedTransferableOutput); + spendHelper.addStakedOutput(stakedTransferableOutput); + + // Calculate fee to trigger potential consolidation. + spendHelper.calculateFee(); + + const result = spendHelper.getInputsOutputs(); + + expect(result.changeOutputs).toHaveLength(2); + expect(result.stakeOutputs).toHaveLength(2); + }); + + test('consolidating outputs when `shouldConsolidateOutputs` is `true`', () => { + const spendHelper = new SpendHelper({ + ...DEFAULT_PROPS, + shouldConsolidateOutputs: true, + }); + + spendHelper.addChangeOutput(transferableOutput()); + spendHelper.addChangeOutput(transferableOutput()); + + const stakedTransferableOutput = new TransferableOutput( + id(), + stakeableLockOut(), + ); + + spendHelper.addStakedOutput(stakedTransferableOutput); + spendHelper.addStakedOutput(stakedTransferableOutput); + + // Calculate fee to trigger potential consolidation. + spendHelper.calculateFee(); + + const result = spendHelper.getInputsOutputs(); + + expect(result.changeOutputs).toHaveLength(1); + expect(result.stakeOutputs).toHaveLength(1); + }); + + test('calculate fee with temporary output complexity', () => { + const spendHelper = new SpendHelper(DEFAULT_PROPS); + + const originalFee = spendHelper.calculateFee(); + + const temporaryOutput = transferableOutput(); + + expect(spendHelper.calculateFee(temporaryOutput)).toBeGreaterThan( + originalFee, + ); + + expect(spendHelper.calculateFee()).toBe(originalFee); + }); + + test('hasChangeOutput returns `true` when there is an AVAX change output', () => { + const spendHelper = new SpendHelper(DEFAULT_PROPS); + + const changeOutput = transferableOutput(); + + if (!isTransferOut(changeOutput.output)) { + throw new Error('Output is not a TransferOutput'); + } + + const assetId = changeOutput.getAssetId(); + const outputOwners = changeOutput.output.outputOwners; + + expect(spendHelper.hasChangeOutput(assetId, outputOwners)).toBe(false); + + spendHelper.addChangeOutput(changeOutput); + + expect(spendHelper.hasChangeOutput(assetId, outputOwners)).toBe(true); + + expect(spendHelper.hasChangeOutput('other-asset', outputOwners)).toBe( + false, + ); + }); +}); diff --git a/src/vms/pvm/etna-builder/spendHelper.ts b/src/vms/pvm/etna-builder/spendHelper.ts new file mode 100644 index 000000000..f464a3024 --- /dev/null +++ b/src/vms/pvm/etna-builder/spendHelper.ts @@ -0,0 +1,314 @@ +import type { OutputOwners, TransferableOutput } from '../../../serializable'; +import { TransferableInput } from '../../../serializable'; +import type { Utxo } from '../../../serializable/avax/utxo'; +import { isTransferOut } from '../../../utils'; +import { bigIntMin } from '../../../utils/bigintMath'; +import { compareTransferableOutputs } from '../../../utils/sort'; +import type { Dimensions } from '../../common/fees/dimensions'; +import { + addDimensions, + createEmptyDimensions, + dimensionsToGas, +} from '../../common/fees/dimensions'; +import { consolidateOutputs } from '../../utils/consolidateOutputs'; +import { getInputComplexity, getOutputComplexity } from '../txs/fee'; + +export interface SpendHelperProps { + changeOutputs: readonly TransferableOutput[]; + gasPrice: bigint; + initialComplexity: Dimensions; + inputs: readonly TransferableInput[]; + shouldConsolidateOutputs: boolean; + stakeOutputs: readonly TransferableOutput[]; + toBurn: Map; + toStake: Map; + weights: Dimensions; +} + +/** + * The SpendHelper class assists in managing and processing the spending of assets, + * including handling complexities, gas prices, and various outputs and inputs. + * + * @class + */ +export class SpendHelper { + private readonly gasPrice: bigint; + private readonly initialComplexity: Dimensions; + private readonly shouldConsolidateOutputs: boolean; + private readonly toBurn: Map; + private readonly toStake: Map; + private readonly weights: Dimensions; + + private changeOutputs: readonly TransferableOutput[]; + private inputs: readonly TransferableInput[]; + private stakeOutputs: readonly TransferableOutput[]; + + private inputUTXOs: readonly Utxo[] = []; + + constructor({ + changeOutputs, + gasPrice, + initialComplexity, + inputs, + shouldConsolidateOutputs, + stakeOutputs, + toBurn, + toStake, + weights, + }: SpendHelperProps) { + this.gasPrice = gasPrice; + this.initialComplexity = initialComplexity; + this.shouldConsolidateOutputs = shouldConsolidateOutputs; + this.toBurn = toBurn; + this.toStake = toStake; + this.weights = weights; + + this.changeOutputs = changeOutputs; + this.inputs = inputs; + this.stakeOutputs = stakeOutputs; + } + + /** + * Adds an input UTXO and its corresponding transferable input to the SpendHelper. + * + * @param {Utxo} utxo - The UTXO to be added. + * @param {TransferableInput} transferableInput - The transferable input corresponding to the UTXO. + * @returns {SpendHelper} The current instance of SpendHelper for chaining. + */ + addInput(utxo: Utxo, transferableInput: TransferableInput): SpendHelper { + this.inputs = [...this.inputs, transferableInput]; + this.inputUTXOs = [...this.inputUTXOs, utxo]; + + return this; + } + + /** + * Adds a change output to the SpendHelper. + * Change outputs are outputs that are sent back to the sender. + * + * @param {TransferableOutput} transferableOutput - The change output to be added. + * @returns {SpendHelper} The current instance of SpendHelper for chaining. + */ + addChangeOutput(transferableOutput: TransferableOutput): SpendHelper { + this.changeOutputs = [...this.changeOutputs, transferableOutput]; + + return this; + } + + /** + * Adds a staked output to the SpendHelper. + * Staked outputs are outputs that are staked by the sender. + * + * @param {TransferableOutput} transferableOutput - The staked output to be added. + * @returns {SpendHelper} The current instance of SpendHelper for chaining. + */ + addStakedOutput(transferableOutput: TransferableOutput): SpendHelper { + this.stakeOutputs = [...this.stakeOutputs, transferableOutput]; + + return this; + } + + /** + * When computing the complexity/fee of a transaction that needs change but doesn't yet have + * a corresponding change output, `additionalComplexity` may be used to calculate the complexity + * and therefore the fee as if the change output was already added. + */ + private getComplexity( + additionalComplexity: Dimensions = createEmptyDimensions(), + ): Dimensions { + return addDimensions( + this.initialComplexity, + getInputComplexity(this.inputs), + getOutputComplexity(this.changeOutputs), + getOutputComplexity(this.stakeOutputs), + additionalComplexity, + ); + } + + private consolidateOutputs(): void { + if (this.shouldConsolidateOutputs) { + this.changeOutputs = consolidateOutputs(this.changeOutputs); + this.stakeOutputs = consolidateOutputs(this.stakeOutputs); + } + } + + /** + * Determines if a locked stakeable asset should be consumed based on its asset ID. + * + * @param {string} assetId - The ID of the asset to check. + * @returns {boolean} - Returns true if the asset should be consumed, false otherwise. + */ + shouldConsumeLockedStakeableAsset(assetId: string): boolean { + return this.toStake.has(assetId) && this.toStake.get(assetId) !== 0n; + } + + /** + * Determines if an asset should be consumed based on its asset ID. + * + * @param {string} assetId - The ID of the asset to check. + * @returns {boolean} - Returns true if the asset should be consumed, false otherwise. + */ + shouldConsumeAsset(assetId: string): boolean { + return ( + (this.toBurn.has(assetId) && this.toBurn.get(assetId) !== 0n) || + this.shouldConsumeLockedStakeableAsset(assetId) + ); + } + + /** + * Consumes a locked stakeable asset based on its asset ID and amount. + * + * @param {string} assetId - The ID of the asset to consume. + * @param {bigint} amount - The amount of the asset to consume. + * @returns A tuple of the remaining amount in the first position and the amount to stake in the second position. + */ + consumeLockedStakableAsset( + assetId: string, + amount: bigint, + ): [remainingAmount: bigint, amountToStake: bigint] { + if (amount < 0n) { + throw new Error('Amount to consume must be greater than or equal to 0'); + } + + const remainingAmountToStake = this.toStake.get(assetId) ?? 0n; + + // Stake any value that should be staked + const amountToStake = bigIntMin( + // Amount we still need to stake + remainingAmountToStake, + // Amount available to stake + amount, + ); + + this.toStake.set(assetId, remainingAmountToStake - amountToStake); + + return [amount - amountToStake, amountToStake]; + } + + /** + * Consumes an asset based on its asset ID and amount. + * + * @param {string} assetId - The ID of the asset to consume. + * @param {bigint} amount - The amount of the asset to consume. + * @returns A tuple of the remaining amount in the first position and the amount to stake in the second position. + */ + consumeAsset( + assetId: string, + amount: bigint, + ): [remainingAmount: bigint, amountToStake: bigint] { + if (amount < 0n) { + throw new Error('Amount to consume must be greater than or equal to 0'); + } + + const remainingAmountToBurn = this.toBurn.get(assetId) ?? 0n; + + // Burn any value that should be burned + const amountToBurn = bigIntMin( + // Amount we still need to burn + remainingAmountToBurn, + // Amount available to burn + amount, + ); + + this.toBurn.set(assetId, remainingAmountToBurn - amountToBurn); + + // Stake any remaining value that should be staked + return this.consumeLockedStakableAsset(assetId, amount - amountToBurn); + } + + /** + * Calculates the fee for the SpendHelper based on its complexity and gas price. + * Provide an empty change output as a parameter to calculate the fee as if the change output was already added. + * + * @param {TransferableOutput} additionalOutput - The change output that has not yet been added to the SpendHelper. + * @returns {bigint} The fee for the SpendHelper. + */ + calculateFee(additionalOutput?: TransferableOutput): bigint { + this.consolidateOutputs(); + + const gas = dimensionsToGas( + this.getComplexity( + additionalOutput ? getOutputComplexity([additionalOutput]) : undefined, + ), + this.weights, + ); + + return gas * this.gasPrice; + } + + /** + * Determines if a change output with a matching asset ID and output owners exists. + * + * @param assetId The asset ID to check + * @param outputOwners The expected output owners on the asset ID + * @returns {boolean} True if a change output with matching assetId and outputOwners exists, false otherwise + */ + hasChangeOutput(assetId: string, outputOwners: OutputOwners): boolean { + return this.changeOutputs.some( + (transferableOutput) => + transferableOutput.assetId.value() === assetId && + isTransferOut(transferableOutput.output) && + transferableOutput.output.outputOwners.equals(outputOwners), + ); + } + + /** + * Verifies that all assets have been consumed. + * + * @returns {Error | null} An error if any assets have not been consumed, null otherwise. + */ + verifyAssetsConsumed(): Error | null { + for (const [assetId, amount] of this.toStake) { + if (amount === 0n) { + continue; + } + + return new Error( + `Insufficient funds! Provided UTXOs need ${amount} more units of asset ${assetId} to stake`, + ); + } + + for (const [assetId, amount] of this.toBurn) { + if (amount === 0n) { + continue; + } + + return new Error( + `Insufficient funds! Provided UTXOs need ${amount} more units of asset ${assetId}`, + ); + } + + return null; + } + + /** + * Gets the inputs, outputs, and UTXOs for the SpendHelper. + * + * @returns {object} The inputs, outputs, and UTXOs for the SpendHelper + */ + getInputsOutputs(): { + changeOutputs: readonly TransferableOutput[]; + fee: bigint; + inputs: readonly TransferableInput[]; + inputUTXOs: readonly Utxo[]; + stakeOutputs: readonly TransferableOutput[]; + } { + const fee = this.calculateFee(); + + const sortedInputs = [...this.inputs].sort(TransferableInput.compare); + const sortedChangeOutputs = [...this.changeOutputs].sort( + compareTransferableOutputs, + ); + const sortedStakeOutputs = [...this.stakeOutputs].sort( + compareTransferableOutputs, + ); + + return { + changeOutputs: sortedChangeOutputs, + fee, + inputs: sortedInputs, + inputUTXOs: this.inputUTXOs, + stakeOutputs: sortedStakeOutputs, + }; + } +} diff --git a/src/vms/pvm/index.ts b/src/vms/pvm/index.ts index b83ba1f78..063e2d988 100644 --- a/src/vms/pvm/index.ts +++ b/src/vms/pvm/index.ts @@ -1,3 +1,6 @@ export * from './builder'; export * from './models'; export * from './api'; + +// Exposed Etna builder functions under `e` namespace +export * as e from './etna-builder'; diff --git a/src/vms/pvm/txs/fee/calculator.test.ts b/src/vms/pvm/txs/fee/calculator.test.ts new file mode 100644 index 000000000..14af4378b --- /dev/null +++ b/src/vms/pvm/txs/fee/calculator.test.ts @@ -0,0 +1,36 @@ +import { txHexToTransaction } from '../../../../fixtures/transactions'; +import { calculateFee } from './calculator'; +import { + TEST_DYNAMIC_PRICE, + TEST_DYNAMIC_WEIGHTS, + TEST_TRANSACTIONS, + TEST_UNSUPPORTED_TRANSACTIONS, +} from './fixtures/transactions'; + +describe('Calculator', () => { + describe('calculateFee', () => { + test.each(TEST_TRANSACTIONS)( + 'calculates the fee for $name', + ({ txHex, expectedDynamicFee }) => { + const result = calculateFee( + txHexToTransaction('PVM', txHex), + TEST_DYNAMIC_WEIGHTS, + TEST_DYNAMIC_PRICE, + ); + + expect(result).toBe(expectedDynamicFee); + }, + ); + + test.each(TEST_UNSUPPORTED_TRANSACTIONS)( + 'unsupported tx - $name', + ({ txHex }) => { + const tx = txHexToTransaction('PVM', txHex); + + expect(() => { + calculateFee(tx, TEST_DYNAMIC_WEIGHTS, TEST_DYNAMIC_PRICE); + }).toThrow('Unsupported transaction type.'); + }, + ); + }); +}); diff --git a/src/vms/pvm/txs/fee/calculator.ts b/src/vms/pvm/txs/fee/calculator.ts new file mode 100644 index 000000000..da8139ec1 --- /dev/null +++ b/src/vms/pvm/txs/fee/calculator.ts @@ -0,0 +1,22 @@ +import type { Transaction } from '../../../common'; +import { + dimensionsToGas, + type Dimensions, +} from '../../../common/fees/dimensions'; +import { getTxComplexity } from './complexity'; + +/** + * Calculates the minimum required fee, in nAVAX, that an unsigned + * transaction must pay for valid inclusion into a block. + */ +export const calculateFee = ( + tx: Transaction, + weights: Dimensions, + price: bigint, +): bigint => { + const complexity = getTxComplexity(tx); + + const gas = dimensionsToGas(complexity, weights); + + return gas * price; +}; diff --git a/src/vms/pvm/txs/fee/complexity.test.ts b/src/vms/pvm/txs/fee/complexity.test.ts new file mode 100644 index 000000000..7f56fe163 --- /dev/null +++ b/src/vms/pvm/txs/fee/complexity.test.ts @@ -0,0 +1,275 @@ +import { utxoId } from '../../../../fixtures/avax'; +import { address, id } from '../../../../fixtures/common'; +import { bigIntPr, int, ints } from '../../../../fixtures/primitives'; +import { signer } from '../../../../fixtures/pvm'; +import { txHexToTransaction } from '../../../../fixtures/transactions'; +import { + Input, + OutputOwners, + TransferInput, + TransferOutput, + TransferableInput, + TransferableOutput, +} from '../../../../serializable'; +import { + SignerEmpty, + StakeableLockIn, + StakeableLockOut, +} from '../../../../serializable/pvm'; +import { createDimensions } from '../../../common/fees/dimensions'; +import { + getAuthComplexity, + getInputComplexity, + getOutputComplexity, + getOwnerComplexity, + getSignerComplexity, + getTxComplexity, +} from './complexity'; +import { + TEST_TRANSACTIONS, + TEST_UNSUPPORTED_TRANSACTIONS, +} from './fixtures/transactions'; + +const makeOutputOwners = (numOfAddresses = 0) => + new OutputOwners( + bigIntPr(), + int(), + new Array(numOfAddresses).fill(address()), + ); + +const makeTransferableOutput = (numOfAddresses = 0) => + new TransferableOutput( + id(), + new TransferOutput(bigIntPr(), makeOutputOwners(numOfAddresses)), + ); + +const makeTransferableInput = (numOfSigInts = 0) => + new TransferableInput( + utxoId(), + id(), + new TransferInput( + bigIntPr(), + new Input(new Array(numOfSigInts).fill(int())), + ), + ); + +/** + * These tests are based off the tests found in the AvalancheGo repository: + * @see https://github.com/ava-labs/avalanchego/blob/master/vms/platformvm/txs/fee/complexity_test.go + */ +describe('Complexity', () => { + describe('getOutputComplexity', () => { + test('empty transferable output', () => { + const result = getOutputComplexity([]); + + expect(result).toEqual(createDimensions(0, 0, 0, 0)); + }); + + test('any can spend', () => { + const result = getOutputComplexity([makeTransferableOutput()]); + + expect(result).toEqual(createDimensions(60, 0, 1, 0)); + }); + + test('one owner', () => { + const result = getOutputComplexity([makeTransferableOutput(1)]); + + expect(result).toEqual(createDimensions(80, 0, 1, 0)); + }); + + test('three owners', () => { + const result = getOutputComplexity([makeTransferableOutput(3)]); + + expect(result).toEqual(createDimensions(120, 0, 1, 0)); + }); + + test('locked stakeable', () => { + const result = getOutputComplexity([ + new TransferableOutput( + id(), + new StakeableLockOut( + bigIntPr(), + new TransferOutput(bigIntPr(), makeOutputOwners(3)), + ), + ), + ]); + + expect(result).toEqual(createDimensions(132, 0, 1, 0)); + }); + }); + + describe('getInputComplexity', () => { + test('any can spend', () => { + const result = getInputComplexity([makeTransferableInput()]); + + expect(result).toEqual( + createDimensions( + 92, + 1, + 1, + 0, // TODO: Implement + ), + ); + }); + + test('one owner', () => { + const result = getInputComplexity([makeTransferableInput(1)]); + + expect(result).toEqual( + createDimensions( + 161, + 1, + 1, + 0, // TODO: Implement + ), + ); + }); + + test('three owners', () => { + const result = getInputComplexity([makeTransferableInput(3)]); + + expect(result).toEqual( + createDimensions( + 299, + 1, + 1, + 0, // TODO: Implement + ), + ); + }); + + test('locked stakeable', () => { + const result = getInputComplexity([ + new TransferableInput( + utxoId(), + id(), + new StakeableLockIn( + bigIntPr(), + new TransferInput(bigIntPr(), new Input(new Array(3).fill(int()))), + ), + ), + ]); + + expect(result).toEqual( + createDimensions( + 311, + 1, + 1, + 0, // TODO: Implement + ), + ); + }); + }); + + describe('getOwnerComplexity', () => { + test('any can spend', () => { + const result = getOwnerComplexity(makeOutputOwners()); + + expect(result).toEqual(createDimensions(16, 0, 0, 0)); + }); + + test('one owner', () => { + const result = getOwnerComplexity(makeOutputOwners(1)); + + expect(result).toEqual(createDimensions(36, 0, 0, 0)); + }); + + test('three owners', () => { + const result = getOwnerComplexity(makeOutputOwners(3)); + + expect(result).toEqual(createDimensions(76, 0, 0, 0)); + }); + }); + + describe('getSignerComplexity', () => { + test('empty signer', () => { + const result = getSignerComplexity(new SignerEmpty()); + + expect(result).toEqual(createDimensions(0, 0, 0, 0)); + }); + + test('bls pop', () => { + const result = getSignerComplexity(signer()); + + expect(result).toEqual( + createDimensions( + 144, + 0, + 0, + // TODO: Implement compute + 0, + ), + ); + }); + }); + + describe('getAuthComplexity', () => { + test('any can spend', () => { + const result = getAuthComplexity(new Input([])); + + expect(result).toEqual( + createDimensions( + 8, + 0, + 0, + 0, // TODO: Implement + ), + ); + }); + + test('one owner', () => { + const result = getAuthComplexity(new Input([int()])); + + expect(result).toEqual( + createDimensions( + 77, + 0, + 0, + 0, // TODO: Implement + ), + ); + }); + + test('three owners', () => { + const result = getAuthComplexity(new Input(ints())); + + expect(result).toEqual( + createDimensions( + 215, + 0, + 0, + 0, // TODO: Implement + ), + ); + }); + + test('invalid auth type', () => { + expect(() => { + getAuthComplexity(int()); + }).toThrow( + 'Unable to calculate auth complexity of transaction. Expected Input as subnet auth.', + ); + }); + }); + + describe('getTxComplexity', () => { + test.each(TEST_TRANSACTIONS)('$name', ({ txHex, expectedComplexity }) => { + const tx = txHexToTransaction('PVM', txHex); + + const result = getTxComplexity(tx); + + expect(result).toEqual(expectedComplexity); + }); + + test.each(TEST_UNSUPPORTED_TRANSACTIONS)( + 'unsupported tx - $name', + ({ txHex }) => { + const tx = txHexToTransaction('PVM', txHex); + + expect(() => { + getTxComplexity(tx); + }).toThrow('Unsupported transaction type.'); + }, + ); + }); +}); diff --git a/src/vms/pvm/txs/fee/complexity.ts b/src/vms/pvm/txs/fee/complexity.ts new file mode 100644 index 000000000..b2adfb3b8 --- /dev/null +++ b/src/vms/pvm/txs/fee/complexity.ts @@ -0,0 +1,344 @@ +/** + * @module + * + * The functions in this module are based off the complexity calculations found in the AvalancheGo repository. + * @see https://github.com/ava-labs/avalanchego/blob/master/vms/platformvm/txs/fee/complexity.go + */ + +import type { OutputOwners } from '../../../../serializable'; +import { Input } from '../../../../serializable/fxs/secp256k1'; +import { SHORT_ID_LEN } from '../../../../serializable/fxs/common/nodeId'; +import { ID_LEN } from '../../../../serializable/fxs/common/id'; +import { + type BaseTx, + type TransferableInput, + type TransferableOutput, +} from '../../../../serializable/avax'; +import type { + AddPermissionlessDelegatorTx, + AddPermissionlessValidatorTx, + AddSubnetValidatorTx, + BaseTx as PvmBaseTx, + CreateChainTx, + CreateSubnetTx, + ExportTx, + ImportTx, + RemoveSubnetValidatorTx, + Signer, + TransferSubnetOwnershipTx, +} from '../../../../serializable/pvm'; +import { + SignerEmpty, + isAddPermissionlessDelegatorTx, + isAddPermissionlessValidatorTx, + isAddSubnetValidatorTx, + isCreateChainTx, + isCreateSubnetTx, + isExportTx, + isImportTx, + isPvmBaseTx, + isRemoveSubnetValidatorTx, + isTransferSubnetOwnershipTx, +} from '../../../../serializable/pvm'; +import { + isStakeableLockIn, + isStakeableLockOut, + isTransferOut, +} from '../../../../utils'; +import type { Dimensions } from '../../../common/fees/dimensions'; +import { + FeeDimensions, + addDimensions, + createEmptyDimensions, + createDimensions, +} from '../../../common/fees/dimensions'; +import type { Serializable } from '../../../common/types'; +import type { Transaction } from '../../../common'; +import { + INTRINSIC_ADD_PERMISSIONLESS_DELEGATOR_TX_COMPLEXITIES, + INTRINSIC_ADD_PERMISSIONLESS_VALIDATOR_TX_COMPLEXITIES, + INTRINSIC_ADD_SUBNET_VALIDATOR_TX_COMPLEXITIES, + INTRINSIC_BASE_TX_COMPLEXITIES, + INTRINSIC_CREATE_CHAIN_TX_COMPLEXITIES, + INTRINSIC_CREATE_SUBNET_TX_COMPLEXITIES, + INTRINSIC_EXPORT_TX_COMPLEXITIES, + INTRINSIC_IMPORT_TX_COMPLEXITIES, + INTRINSIC_INPUT_BANDWIDTH, + INTRINSIC_INPUT_DB_READ, + INTRINSIC_INPUT_DB_WRITE, + INTRINSIC_OUTPUT_BANDWIDTH, + INTRINSIC_OUTPUT_DB_WRITE, + INTRINSIC_POP_BANDWIDTH, + INTRINSIC_REMOVE_SUBNET_VALIDATOR_TX_COMPLEXITIES, + INTRINSIC_SECP256K1_FX_INPUT_BANDWIDTH, + INTRINSIC_SECP256K1_FX_OUTPUT_BANDWIDTH, + INTRINSIC_SECP256K1_FX_OUTPUT_OWNERS_BANDWIDTH, + INTRINSIC_SECP256K1_FX_SIGNATURE_BANDWIDTH, + INTRINSIC_SECP256K1_FX_TRANSFERABLE_INPUT_BANDWIDTH, + INTRINSIC_STAKEABLE_LOCKED_INPUT_BANDWIDTH, + INTRINSIC_STAKEABLE_LOCKED_OUTPUT_BANDWIDTH, + INTRINSIC_TRANSFER_SUBNET_OWNERSHIP_TX_COMPLEXITIES, +} from './constants'; + +/** + * Returns the complexity outputs add to a transaction. + */ +export const getOutputComplexity = ( + transferableOutputs: readonly TransferableOutput[], +): Dimensions => { + let complexity = createEmptyDimensions(); + + for (const transferableOutput of transferableOutputs) { + const outComplexity: Dimensions = { + [FeeDimensions.Bandwidth]: + INTRINSIC_OUTPUT_BANDWIDTH + INTRINSIC_SECP256K1_FX_OUTPUT_BANDWIDTH, + [FeeDimensions.DBRead]: 0, + [FeeDimensions.DBWrite]: INTRINSIC_OUTPUT_DB_WRITE, + [FeeDimensions.Compute]: 0, + }; + + let numberOfAddresses = 0; + + if (isStakeableLockOut(transferableOutput.output)) { + outComplexity[FeeDimensions.Bandwidth] += + INTRINSIC_STAKEABLE_LOCKED_OUTPUT_BANDWIDTH; + numberOfAddresses = + transferableOutput.output.getOutputOwners().addrs.length; + } else if (isTransferOut(transferableOutput.output)) { + numberOfAddresses = transferableOutput.output.outputOwners.addrs.length; + } + + const addressBandwidth = numberOfAddresses * SHORT_ID_LEN; + + outComplexity[FeeDimensions.Bandwidth] += addressBandwidth; + + complexity = addDimensions(complexity, outComplexity); + } + + return complexity; +}; + +/** + * Returns the complexity inputs add to a transaction. + * + * It includes the complexity that the corresponding credentials will add. + */ +export const getInputComplexity = ( + transferableInputs: readonly TransferableInput[], +): Dimensions => { + let complexity = createEmptyDimensions(); + + for (const transferableInput of transferableInputs) { + const inputComplexity: Dimensions = { + [FeeDimensions.Bandwidth]: + INTRINSIC_INPUT_BANDWIDTH + + INTRINSIC_SECP256K1_FX_TRANSFERABLE_INPUT_BANDWIDTH, + [FeeDimensions.DBRead]: INTRINSIC_INPUT_DB_READ, + [FeeDimensions.DBWrite]: INTRINSIC_INPUT_DB_WRITE, + [FeeDimensions.Compute]: 0, // TODO: Add compute complexity. + }; + + if (isStakeableLockIn(transferableInput.input)) { + inputComplexity[FeeDimensions.Bandwidth] += + INTRINSIC_STAKEABLE_LOCKED_INPUT_BANDWIDTH; + } + + const numberOfSignatures = transferableInput.sigIndicies().length; + + const signatureBandwidth = + numberOfSignatures * INTRINSIC_SECP256K1_FX_SIGNATURE_BANDWIDTH; + + inputComplexity[FeeDimensions.Bandwidth] += signatureBandwidth; + + complexity = addDimensions(complexity, inputComplexity); + } + + return complexity; +}; + +export const getSignerComplexity = ( + signer: Signer | SignerEmpty, +): Dimensions => { + if (signer instanceof SignerEmpty) { + return createEmptyDimensions(); + } + + return createDimensions( + INTRINSIC_POP_BANDWIDTH, + 0, + 0, + 0, // TODO: Add compute complexity. + ); +}; + +export const getOwnerComplexity = (outputOwners: OutputOwners): Dimensions => { + const numberOfAddresses = outputOwners.addrs.length; + const addressBandwidth = numberOfAddresses * SHORT_ID_LEN; + + const bandwidth = + addressBandwidth + INTRINSIC_SECP256K1_FX_OUTPUT_OWNERS_BANDWIDTH; + + return createDimensions(bandwidth, 0, 0, 0); +}; + +/** + * Returns the complexity an authorization adds to a transaction. + * It does not include the typeID of the authorization. + * It does include the complexity that the corresponding credential will add. + * It does not include the typeID of the credential. + */ +export const getAuthComplexity = (input: Serializable): Dimensions => { + if (!(input instanceof Input)) { + throw new Error( + 'Unable to calculate auth complexity of transaction. Expected Input as subnet auth.', + ); + } + + const numberOfSignatures = input.values().length; + + const signatureBandwidth = + numberOfSignatures * INTRINSIC_SECP256K1_FX_SIGNATURE_BANDWIDTH; + + const bandwidth = signatureBandwidth + INTRINSIC_SECP256K1_FX_INPUT_BANDWIDTH; + + return createDimensions( + bandwidth, + 0, + 0, + 0, // TODO: Add compute complexity. + ); +}; + +const getBaseTxComplexity = (baseTx: BaseTx): Dimensions => { + const outputsComplexity = getOutputComplexity(baseTx.outputs); + const inputsComplexity = getInputComplexity(baseTx.inputs); + + const complexity = addDimensions(outputsComplexity, inputsComplexity); + + complexity[FeeDimensions.Bandwidth] += baseTx.memo.length; + + return complexity; +}; + +const addPermissionlessValidatorTx = ( + tx: AddPermissionlessValidatorTx, +): Dimensions => { + return addDimensions( + INTRINSIC_ADD_PERMISSIONLESS_VALIDATOR_TX_COMPLEXITIES, + getBaseTxComplexity(tx.baseTx), + getSignerComplexity(tx.signer), + getOutputComplexity(tx.stake), + getOwnerComplexity(tx.getValidatorRewardsOwner()), + getOwnerComplexity(tx.getDelegatorRewardsOwner()), + ); +}; + +const addPermissionlessDelegatorTx = ( + tx: AddPermissionlessDelegatorTx, +): Dimensions => { + return addDimensions( + INTRINSIC_ADD_PERMISSIONLESS_DELEGATOR_TX_COMPLEXITIES, + getBaseTxComplexity(tx.baseTx), + getOwnerComplexity(tx.getDelegatorRewardsOwner()), + getOutputComplexity(tx.stake), + ); +}; + +const addSubnetValidatorTx = (tx: AddSubnetValidatorTx): Dimensions => { + return addDimensions( + INTRINSIC_ADD_SUBNET_VALIDATOR_TX_COMPLEXITIES, + getBaseTxComplexity(tx.baseTx), + getAuthComplexity(tx.subnetAuth), + ); +}; + +const baseTx = (tx: PvmBaseTx): Dimensions => { + return addDimensions( + INTRINSIC_BASE_TX_COMPLEXITIES, + getBaseTxComplexity(tx.baseTx), + ); +}; + +const createChainTx = (tx: CreateChainTx): Dimensions => { + let bandwidth: number = tx.fxIds.length * ID_LEN; + bandwidth += tx.chainName.value().length; + bandwidth += tx.genesisData.length; + + const dynamicComplexity = createDimensions(bandwidth, 0, 0, 0); + + return addDimensions( + INTRINSIC_CREATE_CHAIN_TX_COMPLEXITIES, + dynamicComplexity, + getBaseTxComplexity(tx.baseTx), + getAuthComplexity(tx.subnetAuth), + ); +}; + +const createSubnetTx = (tx: CreateSubnetTx): Dimensions => { + return addDimensions( + INTRINSIC_CREATE_SUBNET_TX_COMPLEXITIES, + getBaseTxComplexity(tx.baseTx), + getOwnerComplexity(tx.getSubnetOwners()), + ); +}; + +const exportTx = (tx: ExportTx): Dimensions => { + return addDimensions( + INTRINSIC_EXPORT_TX_COMPLEXITIES, + getBaseTxComplexity(tx.baseTx), + getOutputComplexity(tx.outs), + ); +}; + +const importTx = (tx: ImportTx): Dimensions => { + return addDimensions( + INTRINSIC_IMPORT_TX_COMPLEXITIES, + getBaseTxComplexity(tx.baseTx), + getInputComplexity(tx.ins), + ); +}; + +const removeSubnetValidatorTx = (tx: RemoveSubnetValidatorTx): Dimensions => { + return addDimensions( + INTRINSIC_REMOVE_SUBNET_VALIDATOR_TX_COMPLEXITIES, + getBaseTxComplexity(tx.baseTx), + getAuthComplexity(tx.subnetAuth), + ); +}; + +const transferSubnetOwnershipTx = ( + tx: TransferSubnetOwnershipTx, +): Dimensions => { + return addDimensions( + INTRINSIC_TRANSFER_SUBNET_OWNERSHIP_TX_COMPLEXITIES, + getBaseTxComplexity(tx.baseTx), + getAuthComplexity(tx.subnetAuth), + getOwnerComplexity(tx.getSubnetOwners()), + ); +}; + +export const getTxComplexity = (tx: Transaction): Dimensions => { + if (isAddPermissionlessValidatorTx(tx)) { + return addPermissionlessValidatorTx(tx); + } else if (isAddPermissionlessDelegatorTx(tx)) { + return addPermissionlessDelegatorTx(tx); + } else if (isAddSubnetValidatorTx(tx)) { + return addSubnetValidatorTx(tx); + } else if (isCreateChainTx(tx)) { + return createChainTx(tx); + } else if (isCreateSubnetTx(tx)) { + return createSubnetTx(tx); + } else if (isExportTx(tx)) { + return exportTx(tx); + } else if (isImportTx(tx)) { + return importTx(tx); + } else if (isRemoveSubnetValidatorTx(tx)) { + return removeSubnetValidatorTx(tx); + } else if (isTransferSubnetOwnershipTx(tx)) { + return transferSubnetOwnershipTx(tx); + } else if (isPvmBaseTx(tx)) { + return baseTx(tx); + } else { + throw new Error('Unsupported transaction type.'); + } +}; diff --git a/src/vms/pvm/txs/fee/constants.ts b/src/vms/pvm/txs/fee/constants.ts new file mode 100644 index 000000000..c8d5532e5 --- /dev/null +++ b/src/vms/pvm/txs/fee/constants.ts @@ -0,0 +1,200 @@ +/** + * The INTRINSIC constants are based on the following constants from the AvalancheGo codebase: + * @see https://github.com/ava-labs/avalanchego/blob/master/vms/platformvm/txs/fee/complexity.go + */ +import type { Dimensions } from '../../../common/fees/dimensions'; +import { FeeDimensions } from '../../../common/fees/dimensions'; +import { + PUBLIC_KEY_LENGTH, + SIGNATURE_LENGTH as BLS_SIGNATURE_LENGTH, +} from '../../../../crypto/bls'; +import { SIGNATURE_LENGTH } from '../../../../crypto/secp256k1'; +import { INT_LEN } from '../../../../serializable/primitives/int'; +import { SHORT_LEN } from '../../../../serializable/primitives/short'; +import { SHORT_ID_LEN } from '../../../../serializable/fxs/common/nodeId'; +import { ID_LEN } from '../../../../serializable/fxs/common/id'; + +/** + * Number of bytes per long. + */ +const LONG_LEN = 8; + +const INTRINSIC_VALIDATOR_BANDWIDTH = + SHORT_ID_LEN + // Node ID (Short ID = 20) + LONG_LEN + // Start + LONG_LEN + // End + LONG_LEN; // Weight + +const INTRINSIC_SUBNET_VALIDATOR_BANDWIDTH = + INTRINSIC_VALIDATOR_BANDWIDTH + // Validator + ID_LEN; // Subnet ID (ID Length = 32) + +export const INTRINSIC_OUTPUT_BANDWIDTH = + ID_LEN + // assetID + INT_LEN; // output typeID + +export const INTRINSIC_STAKEABLE_LOCKED_OUTPUT_BANDWIDTH = + LONG_LEN + // locktime + INT_LEN; // output typeID + +export const INTRINSIC_SECP256K1_FX_OUTPUT_OWNERS_BANDWIDTH = + LONG_LEN + // locktime + INT_LEN + // threshold + INT_LEN; // number of addresses + +export const INTRINSIC_SECP256K1_FX_OUTPUT_BANDWIDTH = + LONG_LEN + // amount + INTRINSIC_SECP256K1_FX_OUTPUT_OWNERS_BANDWIDTH; + +export const INTRINSIC_INPUT_BANDWIDTH = + ID_LEN + // txID + INT_LEN + // output index + ID_LEN + // assetID + INT_LEN + // input typeID + INT_LEN; // credential typeID + +export const INTRINSIC_STAKEABLE_LOCKED_INPUT_BANDWIDTH = + LONG_LEN + // locktime + INT_LEN; // input typeID + +export const INTRINSIC_SECP256K1_FX_INPUT_BANDWIDTH = + INT_LEN + // num indices + INT_LEN; // num signatures + +export const INTRINSIC_SECP256K1_FX_TRANSFERABLE_INPUT_BANDWIDTH = + LONG_LEN + // amount + INTRINSIC_SECP256K1_FX_INPUT_BANDWIDTH; + +export const INTRINSIC_SECP256K1_FX_SIGNATURE_BANDWIDTH = + INT_LEN + // Signature index + SIGNATURE_LENGTH; // Signature + +export const INTRINSIC_POP_BANDWIDTH = + PUBLIC_KEY_LENGTH + // Public key + BLS_SIGNATURE_LENGTH; // Signature + +export const INTRINSIC_INPUT_DB_READ = 1; +export const INTRINSIC_INPUT_DB_WRITE = 1; +export const INTRINSIC_OUTPUT_DB_WRITE = 1; + +export const INTRINSIC_BASE_TX_COMPLEXITIES: Dimensions = { + [FeeDimensions.Bandwidth]: + 2 + // codec version + INT_LEN + // typeID + INT_LEN + // networkID + ID_LEN + // blockchainID + INT_LEN + // number of outputs + INT_LEN + // number of inputs + INT_LEN + // length of memo + INT_LEN, // number of credentials + [FeeDimensions.DBRead]: 0, + [FeeDimensions.DBWrite]: 0, + [FeeDimensions.Compute]: 0, +}; + +export const INTRINSIC_CREATE_CHAIN_TX_COMPLEXITIES: Dimensions = { + [FeeDimensions.Bandwidth]: + INTRINSIC_BASE_TX_COMPLEXITIES[FeeDimensions.Bandwidth] + + ID_LEN + // Subnet ID + SHORT_LEN + // Chain name length + ID_LEN + // vmID + INT_LEN + // num fIds + INT_LEN + // genesis length + INT_LEN + // subnetAuth typeID + INT_LEN, // subnetAuthCredential typeID + [FeeDimensions.DBRead]: 1, + [FeeDimensions.DBWrite]: 1, + [FeeDimensions.Compute]: 0, +}; + +export const INTRINSIC_CREATE_SUBNET_TX_COMPLEXITIES: Dimensions = { + [FeeDimensions.Bandwidth]: + INTRINSIC_BASE_TX_COMPLEXITIES[FeeDimensions.Bandwidth] + INT_LEN, // owner typeID + [FeeDimensions.DBRead]: 0, + [FeeDimensions.DBWrite]: 1, + [FeeDimensions.Compute]: 0, +}; + +export const INTRINSIC_ADD_PERMISSIONLESS_VALIDATOR_TX_COMPLEXITIES: Dimensions = + { + [FeeDimensions.Bandwidth]: + INTRINSIC_BASE_TX_COMPLEXITIES[FeeDimensions.Bandwidth] + + INTRINSIC_VALIDATOR_BANDWIDTH + // Validator + ID_LEN + // Subnet ID + INT_LEN + // Signer typeID + INT_LEN + // Num stake outs + INT_LEN + // Validator rewards typeID + INT_LEN + // Delegator rewards typeID + INT_LEN, // Delegation shares + [FeeDimensions.DBRead]: 1, + [FeeDimensions.DBWrite]: 1, + [FeeDimensions.Compute]: 0, + }; + +export const INTRINSIC_ADD_PERMISSIONLESS_DELEGATOR_TX_COMPLEXITIES: Dimensions = + { + [FeeDimensions.Bandwidth]: + INTRINSIC_BASE_TX_COMPLEXITIES[FeeDimensions.Bandwidth] + + INTRINSIC_VALIDATOR_BANDWIDTH + // Validator + ID_LEN + // Subnet ID + INT_LEN + // Num stake outs + INT_LEN, // Delegator rewards typeID + [FeeDimensions.DBRead]: 1, + [FeeDimensions.DBWrite]: 1, + [FeeDimensions.Compute]: 0, + }; + +export const INTRINSIC_ADD_SUBNET_VALIDATOR_TX_COMPLEXITIES: Dimensions = { + [FeeDimensions.Bandwidth]: + INTRINSIC_BASE_TX_COMPLEXITIES[FeeDimensions.Bandwidth] + + INTRINSIC_SUBNET_VALIDATOR_BANDWIDTH + // Subnet Validator + INT_LEN + // Subnet auth typeID + INT_LEN, // Subnet auth credential typeID + [FeeDimensions.DBRead]: 2, + [FeeDimensions.DBWrite]: 1, + [FeeDimensions.Compute]: 0, +}; + +export const INTRINSIC_EXPORT_TX_COMPLEXITIES: Dimensions = { + [FeeDimensions.Bandwidth]: + INTRINSIC_BASE_TX_COMPLEXITIES[FeeDimensions.Bandwidth] + + ID_LEN + // destination chain ID + INT_LEN, // num exported outputs + [FeeDimensions.DBRead]: 0, + [FeeDimensions.DBWrite]: 0, + [FeeDimensions.Compute]: 0, +}; + +export const INTRINSIC_IMPORT_TX_COMPLEXITIES: Dimensions = { + [FeeDimensions.Bandwidth]: + INTRINSIC_BASE_TX_COMPLEXITIES[FeeDimensions.Bandwidth] + + ID_LEN + // source chain ID + INT_LEN, // num imported inputs + [FeeDimensions.DBRead]: 0, + [FeeDimensions.DBWrite]: 0, + [FeeDimensions.Compute]: 0, +}; + +export const INTRINSIC_REMOVE_SUBNET_VALIDATOR_TX_COMPLEXITIES: Dimensions = { + [FeeDimensions.Bandwidth]: + INTRINSIC_BASE_TX_COMPLEXITIES[FeeDimensions.Bandwidth] + + SHORT_ID_LEN + // nodeID + ID_LEN + // subnetID + INT_LEN + // subnetAuth typeId + INT_LEN, // subnetAuth credential typeId + [FeeDimensions.DBRead]: 2, + [FeeDimensions.DBWrite]: 1, + [FeeDimensions.Compute]: 0, +}; + +export const INTRINSIC_TRANSFER_SUBNET_OWNERSHIP_TX_COMPLEXITIES: Dimensions = { + [FeeDimensions.Bandwidth]: + INTRINSIC_BASE_TX_COMPLEXITIES[FeeDimensions.Bandwidth] + + ID_LEN + // subnetID + INT_LEN + // subnetAuth typeID + INT_LEN + // owner typeID + INT_LEN, // subnetAuth credential typeID + [FeeDimensions.DBRead]: 1, + [FeeDimensions.DBWrite]: 1, + [FeeDimensions.Compute]: 0, +}; diff --git a/src/vms/pvm/txs/fee/fixtures/transactions.ts b/src/vms/pvm/txs/fee/fixtures/transactions.ts new file mode 100644 index 000000000..838fed08c --- /dev/null +++ b/src/vms/pvm/txs/fee/fixtures/transactions.ts @@ -0,0 +1,276 @@ +/** + * These test transactions are based off of AvalancheGo's test transactions. + * @see https://github.com/ava-labs/avalanchego/blob/master/vms/platformvm/txs/fee/calculator_test.go + */ + +import type { Dimensions } from '../../../../common/fees/dimensions'; +import { + FeeDimensions, + createDimensions, +} from '../../../../common/fees/dimensions'; +import { + INTRINSIC_ADD_PERMISSIONLESS_DELEGATOR_TX_COMPLEXITIES, + INTRINSIC_ADD_PERMISSIONLESS_VALIDATOR_TX_COMPLEXITIES, + INTRINSIC_ADD_SUBNET_VALIDATOR_TX_COMPLEXITIES, + INTRINSIC_BASE_TX_COMPLEXITIES, + INTRINSIC_CREATE_CHAIN_TX_COMPLEXITIES, + INTRINSIC_CREATE_SUBNET_TX_COMPLEXITIES, + INTRINSIC_EXPORT_TX_COMPLEXITIES, + INTRINSIC_IMPORT_TX_COMPLEXITIES, + INTRINSIC_INPUT_DB_READ, + INTRINSIC_INPUT_DB_WRITE, + INTRINSIC_OUTPUT_DB_WRITE, + INTRINSIC_REMOVE_SUBNET_VALIDATOR_TX_COMPLEXITIES, + INTRINSIC_TRANSFER_SUBNET_OWNERSHIP_TX_COMPLEXITIES, +} from '../constants'; + +export const TEST_DYNAMIC_PRICE = 100n; + +export const TEST_DYNAMIC_WEIGHTS: Dimensions = createDimensions( + 1, + 200, + 300, + 0, // TODO: Populate +); + +export const TEST_TRANSACTIONS: ReadonlyArray<{ + name: string; + txHex: string; + expectedComplexity: Dimensions; + expectedDynamicFee: bigint; +}> = [ + { + name: 'BaseTx', + txHex: + '00000000002200003039000000000000000000000000000000000000000000000000000000000000000000000002dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db00000007000000003b9aca00000000000000000100000002000000024a177205df5c29929d06db9d941f83d5ea985de3e902a9a86640bfdb1cd0e36c0cc982b83e5765fadbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db000000070023834ed587af80000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c00000001fa4ff39749d44f29563ed9da03193d4a19ef419da4ce326594817ca266fda5ed00000000dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db000000050023834f1131bbc00000000100000000000000000000000100000009000000014a7b54c63dd25a532b5fe5045b6d0e1db876e067422f12c9c327333c2c792d9273405ac8bbbc2cce549bbd3d0f9274242085ee257adfdb859b0f8d55bdd16fb000', + expectedComplexity: createDimensions( + 399, + INTRINSIC_BASE_TX_COMPLEXITIES[FeeDimensions.DBRead] + + INTRINSIC_INPUT_DB_READ, + INTRINSIC_BASE_TX_COMPLEXITIES[FeeDimensions.DBWrite] + + INTRINSIC_INPUT_DB_WRITE + + 2 * INTRINSIC_OUTPUT_DB_WRITE, + 0, // TODO: Implement + ), + expectedDynamicFee: 149_900n, + }, + + { + name: 'AddPermissionlessValidatorTx for primary network', + txHex: + '00000000001900003039000000000000000000000000000000000000000000000000000000000000000000000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db0000000700238520ba8b1e00000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c00000001043c91e9d508169329034e2a68110427a311f945efc53ed3f3493d335b393fd100000000dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db00000005002386f263d53e00000000010000000000000000c582872c37c81efa2c94ea347af49cdc23a830aa00000000669ae35f0000000066b692df000001d1a94a200000000000000000000000000000000000000000000000000000000000000000000000001ca3783a891cb41cadbfcf456da149f30e7af972677a162b984bef0779f254baac51ec042df1781d1295df80fb41c801269731fc6c25e1e5940dc3cb8509e30348fa712742cfdc83678acc9f95908eb98b89b28802fb559b4a2a6ff3216707c07f0ceb0b45a95f4f9a9540bbd3331d8ab4f233bffa4abb97fad9d59a1695f31b92a2b89e365facf7ab8c30de7c4a496d1e00000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db00000007000001d1a94a2000000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c0000000b000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c0000000b000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c0007a12000000001000000090000000135f122f90bcece0d6c43e07fed1829578a23bc1734f8a4b46203f9f192ea1aec7526f3dca8fddec7418988615e6543012452bae1544275aae435313ec006ec9000', + expectedComplexity: createDimensions( + 691, + INTRINSIC_ADD_PERMISSIONLESS_VALIDATOR_TX_COMPLEXITIES[ + FeeDimensions.DBRead + ] + INTRINSIC_INPUT_DB_READ, + INTRINSIC_ADD_PERMISSIONLESS_VALIDATOR_TX_COMPLEXITIES[ + FeeDimensions.DBWrite + ] + + INTRINSIC_INPUT_DB_WRITE + + 2 * INTRINSIC_OUTPUT_DB_WRITE, + 0, // TODO: Implement + ), + expectedDynamicFee: 229_100n, + }, + + { + name: 'AddPermissionlessValidatorTx for subnet', + txHex: + '000000000019000030390000000000000000000000000000000000000000000000000000000000000000000000022f6399f3e626fe1e75f9daa5e726cb64b7bfec0b6e6d8930eaa9dfa336edca7a000000070000000000006091000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29cdbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db0000000700238520ba6c9980000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c00000002038b42b73d3dc695c76ca12f966e97fe0681b1200f9a5e28d088720a18ea23c9000000002f6399f3e626fe1e75f9daa5e726cb64b7bfec0b6e6d8930eaa9dfa336edca7a00000005000000000000609b0000000100000000a378b74b3293a9d885bd9961f2cc2e1b3364d393c9be875964f2bd614214572c00000000dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db0000000500238520ba7bdbc0000000010000000000000000c582872c37c81efa2c94ea347af49cdc23a830aa0000000066a57a160000000066b7ef16000000000000000a97ea88082100491617204ed70c19fc1a2fce4474bee962904359d0b59e84c1240000001b000000012f6399f3e626fe1e75f9daa5e726cb64b7bfec0b6e6d8930eaa9dfa336edca7a00000007000000000000000a000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c0000000b000000000000000000000000000000000000000b00000000000000000000000000000000000f4240000000020000000900000001593fc20f88a8ce0b3470b0bb103e5f7e09f65023b6515d36660da53f9a15dedc1037ee27a8c4a27c24e20ad3b0ab4bd1ff3a02a6fcc2cbe04282bfe9902c9ae6000000000900000001593fc20f88a8ce0b3470b0bb103e5f7e09f65023b6515d36660da53f9a15dedc1037ee27a8c4a27c24e20ad3b0ab4bd1ff3a02a6fcc2cbe04282bfe9902c9ae600', + expectedComplexity: createDimensions( + 748, + INTRINSIC_ADD_PERMISSIONLESS_VALIDATOR_TX_COMPLEXITIES[ + FeeDimensions.DBRead + ] + + 2 * INTRINSIC_INPUT_DB_READ, + INTRINSIC_ADD_PERMISSIONLESS_VALIDATOR_TX_COMPLEXITIES[ + FeeDimensions.DBWrite + ] + + 2 * INTRINSIC_INPUT_DB_WRITE + + 3 * INTRINSIC_OUTPUT_DB_WRITE, + 0, // TODO: Implement + ), + expectedDynamicFee: 314_800n, + }, + + { + name: 'AddPermissionlessDelegatorTx for primary network', + txHex: + '00000000001a00003039000000000000000000000000000000000000000000000000000000000000000000000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db000000070023834f1140fe00000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c000000017d199179744b3b82d0071c83c2fb7dd6b95a2cdbe9dde295e0ae4f8c2287370300000000dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db0000000500238520ba8b1e00000000010000000000000000c582872c37c81efa2c94ea347af49cdc23a830aa00000000669ae6080000000066ad5b08000001d1a94a2000000000000000000000000000000000000000000000000000000000000000000000000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db00000007000001d1a94a2000000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c0000000b000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c0000000100000009000000012261556f74a29f02ffc2725a567db2c81f75d0892525dbebaa1cf8650534cc70061123533a9553184cb02d899943ff0bf0b39c77b173c133854bc7c8bc7ab9a400', + expectedComplexity: createDimensions( + 499, + INTRINSIC_ADD_PERMISSIONLESS_DELEGATOR_TX_COMPLEXITIES[ + FeeDimensions.DBRead + ] + + 1 * INTRINSIC_INPUT_DB_READ, + INTRINSIC_ADD_PERMISSIONLESS_DELEGATOR_TX_COMPLEXITIES[ + FeeDimensions.DBWrite + ] + + 1 * INTRINSIC_INPUT_DB_WRITE + + 2 * INTRINSIC_OUTPUT_DB_WRITE, + 0, // TODO: Implement + ), + expectedDynamicFee: 209_900n, + }, + + { + name: 'AddPermissionlessDelegatorTx for subnet', + txHex: + '00000000001a000030390000000000000000000000000000000000000000000000000000000000000000000000022f6399f3e626fe1e75f9daa5e726cb64b7bfec0b6e6d8930eaa9dfa336edca7a000000070000000000006087000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29cdbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db0000000700470c1336195b80000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c000000029494c80361884942e4292c3531e8e790fcf7561e74404ded27eab8634e3fb30f000000002f6399f3e626fe1e75f9daa5e726cb64b7bfec0b6e6d8930eaa9dfa336edca7a00000005000000000000609100000001000000009494c80361884942e4292c3531e8e790fcf7561e74404ded27eab8634e3fb30f00000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db0000000500470c1336289dc0000000010000000000000000c582872c37c81efa2c94ea347af49cdc23a830aa0000000066a57c1d0000000066b7f11d000000000000000a97ea88082100491617204ed70c19fc1a2fce4474bee962904359d0b59e84c124000000012f6399f3e626fe1e75f9daa5e726cb64b7bfec0b6e6d8930eaa9dfa336edca7a00000007000000000000000a000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c0000000b00000000000000000000000000000000000000020000000900000001764190e2405fef72fce0d355e3dcc58a9f5621e583ae718cb2c23b55957995d1206d0b5efcc3cef99815e17a4b2cccd700147a759b7279a131745b237659666a000000000900000001764190e2405fef72fce0d355e3dcc58a9f5621e583ae718cb2c23b55957995d1206d0b5efcc3cef99815e17a4b2cccd700147a759b7279a131745b237659666a00', + expectedComplexity: createDimensions( + 720, + INTRINSIC_ADD_PERMISSIONLESS_DELEGATOR_TX_COMPLEXITIES[ + FeeDimensions.DBRead + ] + + 2 * INTRINSIC_INPUT_DB_READ, + INTRINSIC_ADD_PERMISSIONLESS_DELEGATOR_TX_COMPLEXITIES[ + FeeDimensions.DBWrite + ] + + 2 * INTRINSIC_INPUT_DB_WRITE + + 3 * INTRINSIC_OUTPUT_DB_WRITE, + 0, // TODO: Implement + ), + expectedDynamicFee: 312_000n, + }, + + { + name: 'AddSubnetValidatorTx', + txHex: + '00000000000d00003039000000000000000000000000000000000000000000000000000000000000000000000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db000000070023834f1131bbc0000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c0000000138f94d1a0514eaabdaf4c52cad8d62b26cee61eaa951f5b75a5e57c2ee3793c800000000dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db000000050023834f1140fe00000000010000000000000000c582872c37c81efa2c94ea347af49cdc23a830aa00000000669ae7c90000000066ad5cc9000000000000c13797ea88082100491617204ed70c19fc1a2fce4474bee962904359d0b59e84c1240000000a00000001000000000000000200000009000000012127130d37877fb1ec4b2374ef72571d49cd7b0319a3769e5da19041a138166c10b1a5c07cf5ccf0419066cbe3bab9827cf29f9fa6213ebdadf19d4849501eb60000000009000000012127130d37877fb1ec4b2374ef72571d49cd7b0319a3769e5da19041a138166c10b1a5c07cf5ccf0419066cbe3bab9827cf29f9fa6213ebdadf19d4849501eb600', + expectedComplexity: createDimensions( + 460, + INTRINSIC_ADD_SUBNET_VALIDATOR_TX_COMPLEXITIES[FeeDimensions.DBRead] + + INTRINSIC_INPUT_DB_READ, + INTRINSIC_ADD_SUBNET_VALIDATOR_TX_COMPLEXITIES[FeeDimensions.DBWrite] + + INTRINSIC_INPUT_DB_WRITE + + INTRINSIC_OUTPUT_DB_WRITE, + 0, // TODO: Implement + ), + expectedDynamicFee: 196_000n, + }, + + { + name: 'CreateChainTx', + txHex: + '00000000000f00003039000000000000000000000000000000000000000000000000000000000000000000000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db00000007002386f263d53e00000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c0000000197ea88082100491617204ed70c19fc1a2fce4474bee962904359d0b59e84c12400000000dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db00000005002386f269cb1f0000000001000000000000000097ea88082100491617204ed70c19fc1a2fce4474bee962904359d0b59e84c12400096c65742074686572657873766d00000000000000000000000000000000000000000000000000000000000000000000002a000000000000669ae21e000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29cffffffffffffffff0000000a0000000100000000000000020000000900000001cf8104877b1a59b472f4f34d360c0e4f38e92c5fa334215430d0b99cf78eae8f621b6daf0b0f5c3a58a9497601f978698a1e5545d1873db8f2f38ecb7496c2f8010000000900000001cf8104877b1a59b472f4f34d360c0e4f38e92c5fa334215430d0b99cf78eae8f621b6daf0b0f5c3a58a9497601f978698a1e5545d1873db8f2f38ecb7496c2f801', + expectedComplexity: createDimensions( + 509, + INTRINSIC_CREATE_CHAIN_TX_COMPLEXITIES[FeeDimensions.DBRead] + + INTRINSIC_INPUT_DB_READ, + INTRINSIC_CREATE_CHAIN_TX_COMPLEXITIES[FeeDimensions.DBWrite] + + INTRINSIC_INPUT_DB_WRITE + + INTRINSIC_OUTPUT_DB_WRITE, + 0, // TODO: Implement + ), + expectedDynamicFee: 180_900n, + }, + + { + name: 'CreateSubnetTx', + txHex: + '00000000001000003039000000000000000000000000000000000000000000000000000000000000000000000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db00000007002386f269cb1f00000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c00000001000000000000000000000000000000000000000000000000000000000000000000000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db00000005002386f26fc100000000000100000000000000000000000b000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c000000010000000900000001b3c905e7227e619bd6b98c164a8b2b4a8ce89ac5142bbb1c42b139df2d17fd777c4c76eae66cef3de90800e567407945f58d918978f734f8ca4eda6923c78eb201', + expectedComplexity: createDimensions( + 339, + INTRINSIC_CREATE_SUBNET_TX_COMPLEXITIES[FeeDimensions.DBRead] + + INTRINSIC_INPUT_DB_READ, + INTRINSIC_CREATE_SUBNET_TX_COMPLEXITIES[FeeDimensions.DBWrite] + + INTRINSIC_INPUT_DB_WRITE + + INTRINSIC_OUTPUT_DB_WRITE, + 0, // TODO: Implement + ), + expectedDynamicFee: 143_900n, + }, + + { + name: 'ExportTx', + txHex: + '00000000001200003039000000000000000000000000000000000000000000000000000000000000000000000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db000000070023834e99dda340000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c00000001f62c03574790b6a31a988f90c3e91c50fdd6f5d93baf200057463021ff23ec5c00000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db000000050023834ed587af800000000100000000000000009d0775f450604bd2fbc49ce0c5c1c6dfeb2dc2acb8c92c26eeae6e6df4502b1900000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db00000007000000003b9aca00000000000000000100000002000000024a177205df5c29929d06db9d941f83d5ea985de3e902a9a86640bfdb1cd0e36c0cc982b83e5765fa000000010000000900000001129a07c92045e0b9d0a203fcb5b53db7890fabce1397ff6a2ad16c98ef0151891ae72949d240122abf37b1206b95e05ff171df164a98e6bdf2384432eac2c30200', + expectedComplexity: createDimensions( + 435, + INTRINSIC_EXPORT_TX_COMPLEXITIES[FeeDimensions.DBRead] + + INTRINSIC_INPUT_DB_READ, + INTRINSIC_EXPORT_TX_COMPLEXITIES[FeeDimensions.DBWrite] + + INTRINSIC_INPUT_DB_WRITE + + 2 * INTRINSIC_OUTPUT_DB_WRITE, + 0, // TODO: Implement + ), + expectedDynamicFee: 153_500n, + }, + + { + name: 'ImportTx', + txHex: + '00000000001100003039000000000000000000000000000000000000000000000000000000000000000000000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db00000007000000003b8b87c0000000000000000100000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c0000000000000000d891ad56056d9c01f18f43f58b5c784ad07a4a49cf3d1f11623804b5cba2c6bf0000000163684415710a7d65f4ccb095edff59f897106b94d38937fc60e3ffc29892833b00000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db00000005000000003b9aca00000000010000000000000001000000090000000148ea12cb0950e47d852b99765208f5a811d3c8a47fa7b23fd524bd970019d157029f973abb91c31a146752ef8178434deb331db24c8dca5e61c961e6ac2f3b6700', + expectedComplexity: createDimensions( + 335, + INTRINSIC_IMPORT_TX_COMPLEXITIES[FeeDimensions.DBRead] + + INTRINSIC_INPUT_DB_READ, + INTRINSIC_IMPORT_TX_COMPLEXITIES[FeeDimensions.DBWrite] + + INTRINSIC_INPUT_DB_WRITE + + INTRINSIC_OUTPUT_DB_WRITE, + 0, // TODO: Implement + ), + expectedDynamicFee: 113_500n, + }, + + { + name: 'RemoveSubnetValidatorTx', + txHex: + '00000000001700003039000000000000000000000000000000000000000000000000000000000000000000000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db000000070023834e99ce6100000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c00000001cd4569cfd044d50636fa597c700710403b3b52d3b75c30c542a111cc52c911ec00000000dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db000000050023834e99dda340000000010000000000000000c582872c37c81efa2c94ea347af49cdc23a830aa97ea88082100491617204ed70c19fc1a2fce4474bee962904359d0b59e84c1240000000a0000000100000000000000020000000900000001673ee3e5a3a1221935274e8ff5c45b27ebe570e9731948e393a8ebef6a15391c189a54de7d2396095492ae171103cd4bfccfc2a4dafa001d48c130694c105c2d010000000900000001673ee3e5a3a1221935274e8ff5c45b27ebe570e9731948e393a8ebef6a15391c189a54de7d2396095492ae171103cd4bfccfc2a4dafa001d48c130694c105c2d01', + expectedComplexity: createDimensions( + 436, + INTRINSIC_REMOVE_SUBNET_VALIDATOR_TX_COMPLEXITIES[FeeDimensions.DBRead] + + INTRINSIC_INPUT_DB_READ, + INTRINSIC_REMOVE_SUBNET_VALIDATOR_TX_COMPLEXITIES[FeeDimensions.DBWrite] + + INTRINSIC_INPUT_DB_WRITE + + INTRINSIC_OUTPUT_DB_WRITE, + 0, // TODO: Implement + ), + expectedDynamicFee: 193_600n, + }, + + { + name: 'TransferSubnetOwnershipTx', + txHex: + '00000000002100003039000000000000000000000000000000000000000000000000000000000000000000000001dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db000000070023834e99bf1ec0000000000000000000000001000000013cb7d3842e8cee6a0ebd09f1fe884f6861e1b29c000000018f6e5f2840e34f9a375f35627a44bb0b9974285d280dc3220aa9489f97b17ebd00000000dbcf890f77f49b96857648b72b77f9f82937f28a68704af05da0dc12ba53f2db000000050023834e99ce610000000001000000000000000097ea88082100491617204ed70c19fc1a2fce4474bee962904359d0b59e84c1240000000a00000001000000000000000b00000000000000000000000000000000000000020000000900000001e3479034ed8134dd23e154e1ec6e61b25073a20750ebf808e50ec1aae180ef430f8151347afdf6606bc7866f7f068b01719e4dad12e2976af1159fb048f73f7f010000000900000001e3479034ed8134dd23e154e1ec6e61b25073a20750ebf808e50ec1aae180ef430f8151347afdf6606bc7866f7f068b01719e4dad12e2976af1159fb048f73f7f01', + expectedComplexity: createDimensions( + 436, + INTRINSIC_TRANSFER_SUBNET_OWNERSHIP_TX_COMPLEXITIES[ + FeeDimensions.DBRead + ] + INTRINSIC_INPUT_DB_READ, + INTRINSIC_TRANSFER_SUBNET_OWNERSHIP_TX_COMPLEXITIES[ + FeeDimensions.DBWrite + ] + + INTRINSIC_INPUT_DB_WRITE + + INTRINSIC_OUTPUT_DB_WRITE, + 0, // TODO: Implement + ), + expectedDynamicFee: 173_600n, + }, +]; + +export const TEST_UNSUPPORTED_TRANSACTIONS = [ + { + name: 'AddDelegatorTx', + txHex: + '00000000000e000000050000000000000000000000000000000000000000000000000000000000000000000000013d9bdac0ed1d761330cf680efdeb1a42159eb387d6d2950c96f7d28f61bbe2aa00000007000000003b9aca0000000000000000000000000100000001f887b4c7030e95d2495603ae5d8b14cc0a66781a000000011767be999a49ca24fe705de032fa613b682493110fd6468ae7fb56bde1b9d729000000003d9bdac0ed1d761330cf680efdeb1a42159eb387d6d2950c96f7d28f61bbe2aa00000005000000012a05f20000000001000000000000000400000000c51c552c49174e2e18b392049d3e4cd48b11490f000000005f692452000000005f73b05200000000ee6b2800000000013d9bdac0ed1d761330cf680efdeb1a42159eb387d6d2950c96f7d28f61bbe2aa0000000700000000ee6b280000000000000000000000000100000001e0cfe8cae22827d032805ded484e393ce51cbedb0000000b00000000000000000000000100000001e0cfe8cae22827d032805ded484e393ce51cbedb00000001000000090000000135cd78758035ed528d230317e5d880083a86a2b68c4a95655571828fe226548f235031c8dabd1fe06366a57613c4370ac26c4c59d1a1c46287a59906ec41b88f00', + }, + + { + name: 'AddValidatorTx', + txHex: + '00000000000c0000000100000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000f4b21e67317cbc4be2aeb00677ad6462778a8f52274b9d605df2591b23027a87dff00000015000000006134088000000005000001d1a94a200000000001000000000000000400000000b3da694c70b8bee4478051313621c3f2282088b4000000005f6976d500000000614aaa19000001d1a94a20000000000121e67317cbc4be2aeb00677ad6462778a8f52274b9d605df2591b23027a87dff00000016000000006134088000000007000001d1a94a20000000000000000000000000010000000120868ed5ac611711b33d2e4f97085347415db1c40000000b0000000000000000000000010000000120868ed5ac611711b33d2e4f97085347415db1c400009c40000000010000000900000001620513952dd17c8726d52e9e621618cb38f09fd194abb4cd7b4ee35ecd10880a562ad968dc81a89beab4e87d88d5d582aa73d0d265c87892d1ffff1f6e00f0ef00', + }, + + { + name: 'RewardValidatorTx', + txHex: + '0000000000143d0ad12b8ee8928edf248ca91ca55600fb383f07c32bff1d6dec472b25cf59a700000000', + }, + + { + name: 'AdvanceTimeTx', + txHex: '0000000000130000000066a56fe700000000', + }, +]; diff --git a/src/vms/pvm/txs/fee/index.ts b/src/vms/pvm/txs/fee/index.ts new file mode 100644 index 000000000..41e6534c3 --- /dev/null +++ b/src/vms/pvm/txs/fee/index.ts @@ -0,0 +1,21 @@ +export { + INTRINSIC_ADD_PERMISSIONLESS_DELEGATOR_TX_COMPLEXITIES, + INTRINSIC_ADD_PERMISSIONLESS_VALIDATOR_TX_COMPLEXITIES, + INTRINSIC_ADD_SUBNET_VALIDATOR_TX_COMPLEXITIES, + INTRINSIC_BASE_TX_COMPLEXITIES, + INTRINSIC_CREATE_CHAIN_TX_COMPLEXITIES, + INTRINSIC_CREATE_SUBNET_TX_COMPLEXITIES, + INTRINSIC_EXPORT_TX_COMPLEXITIES, + INTRINSIC_IMPORT_TX_COMPLEXITIES, + INTRINSIC_REMOVE_SUBNET_VALIDATOR_TX_COMPLEXITIES, + INTRINSIC_TRANSFER_SUBNET_OWNERSHIP_TX_COMPLEXITIES, +} from './constants'; + +export { + getAuthComplexity, + getInputComplexity, + getOutputComplexity, + getOwnerComplexity, + getSignerComplexity, + getTxComplexity, +} from './complexity'; diff --git a/src/vms/utils/calculateSpend/calculateSpend.ts b/src/vms/utils/calculateSpend/calculateSpend.ts index 9bbaf0736..ff04b66ae 100644 --- a/src/vms/utils/calculateSpend/calculateSpend.ts +++ b/src/vms/utils/calculateSpend/calculateSpend.ts @@ -4,7 +4,11 @@ import type { Address } from '../../../serializable/fxs/common'; import { AddressMaps } from '../../../utils/addressMap'; import { compareTransferableOutputs } from '../../../utils/sort'; import type { SpendOptionsRequired } from '../../common'; -import type { UTXOCalculationFn, UTXOCalculationResult } from './models'; +import type { + UTXOCalculationFn, + UTXOCalculationResult, + UTXOCalculationState, +} from './models'; export const defaultSpendResult = (): UTXOCalculationResult => ({ inputs: [], @@ -21,7 +25,7 @@ export const defaultSpendResult = (): UTXOCalculationResult => ({ * @param state the state from previous action function * @returns UTXOCalculationResult */ -function deepCopyState(state) { +function deepCopyState(state: UTXOCalculationState): UTXOCalculationState { return { ...state, amountsToBurn: new Map([...state.amountsToBurn]), @@ -57,7 +61,7 @@ export function calculateUTXOSpend( options: SpendOptionsRequired, utxoCalculationFns: [UTXOCalculationFn, ...UTXOCalculationFn[]], ): UTXOCalculationResult { - const startState = { + const startState: UTXOCalculationState = { amountsToBurn, utxos, amountsToStake, @@ -102,7 +106,7 @@ export function calculateUTXOSpend( stakeOutputs.sort(compareTransferableOutputs); return { stakeOutputs, ...state }; }, - function getAdressMaps({ inputs, inputUTXOs, ...state }) { + function getAddressMaps({ inputs, inputUTXOs, ...state }) { const addressMaps = AddressMaps.fromTransferableInputs( inputs, inputUTXOs, @@ -111,21 +115,20 @@ export function calculateUTXOSpend( ); return { inputs, inputUTXOs, ...state, addressMaps }; }, - ] as UTXOCalculationFn[] + ] satisfies UTXOCalculationFn[] ).reduce((state, next) => { // to prevent mutation we deep copy the arrays and maps before passing off to // the next operator return next(deepCopyState(state)); }, startState); - const { - /* eslint-disable @typescript-eslint/no-unused-vars */ - amountsToBurn: _amountsToBurn, - amountsToStake: _amountsToStake, - fromAddresses: _fromAddresses, - options: _options, - utxos: _utxos, - /* eslint-enable @typescript-eslint/no-unused-vars */ - ...calculationResults - } = result; - return calculationResults; + + const calculationResult: UTXOCalculationResult = { + inputs: result.inputs, + inputUTXOs: result.inputUTXOs, + stakeOutputs: result.stakeOutputs, + changeOutputs: result.changeOutputs, + addressMaps: result.addressMaps, + }; + + return calculationResult; } diff --git a/src/vms/utils/calculateSpend/utils/verifySignaturesMatch.ts b/src/vms/utils/calculateSpend/utils/verifySignaturesMatch.ts index 31159cb8a..cc6f970a8 100644 --- a/src/vms/utils/calculateSpend/utils/verifySignaturesMatch.ts +++ b/src/vms/utils/calculateSpend/utils/verifySignaturesMatch.ts @@ -1,6 +1,7 @@ import type { MatchOwnerResult } from '../../../../utils/matchOwners'; import { matchOwners } from '../../../../utils/matchOwners'; -import type { TransferOutput } from '../../../../serializable'; +import type { Address, TransferOutput } from '../../../../serializable'; +import type { SpendOptionsRequired } from '../../../common'; export type verifySigMatchItem = Required<{ sigData: MatchOwnerResult; @@ -12,26 +13,27 @@ export const NoSigMatchError = new Error('No addresses match UTXO owners'); /** * The idea here is to verify that a given set of utxos contains any utxos that share addresses * with the fromAddresses array. If not we should be throwing an error as the tx is being formulated - * incoreectly + * incorrectly * - * @param set the utxo or data set, this can change depening on the calcFn + * @param set the utxo or data set, this can change depending on the calcFn * @param getTransferOutput a callback that takes a utxo and gets the output * @param fromAddresses the addresses the utxos should belong to * @param options * @returns T[] + * @throws Error */ export function verifySignaturesMatch( set: T[], getTransferOutput: (utxo: T) => TransferOutput, - fromAddresses, - options, -): verifySigMatchItem[] { + fromAddresses: readonly Address[], + options: SpendOptionsRequired, +): readonly verifySigMatchItem[] { const outs = set.reduce((acc, data) => { const out = getTransferOutput(data); const sigData = matchOwners( out.outputOwners, - fromAddresses, + [...fromAddresses], options.minIssuanceTime, ); diff --git a/src/vms/utils/consolidateOutputs.ts b/src/vms/utils/consolidateOutputs.ts index 4f7d6f714..c2150b7f4 100644 --- a/src/vms/utils/consolidateOutputs.ts +++ b/src/vms/utils/consolidateOutputs.ts @@ -46,7 +46,7 @@ const combine = (a: TransferableOutput, b: TransferableOutput) => { }; export const consolidateOutputs = ( - outputs: TransferableOutput[], + outputs: readonly TransferableOutput[], ): TransferableOutput[] => { return consolidate(outputs, canCombine, combine); };