diff --git a/package-lock.json b/package-lock.json index b3c183e..54358d2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,7 +12,6 @@ "@ethersproject/bignumber": "^5.7.0", "@ethersproject/providers": "^5.7.2", "ethereumjs-util": "^7.1.5", - "ethers": "^6.13.2", "rlp": "^3.0.0", "tslib": "^2.6.2", "viem": "^2.1.1" @@ -2220,11 +2219,6 @@ "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/aes-js": { - "version": "4.0.0-beta.5", - "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz", - "integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==" - }, "node_modules/agent-base": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", @@ -3486,68 +3480,6 @@ "rlp": "bin/rlp" } }, - "node_modules/ethers": { - "version": "6.13.2", - "resolved": "https://registry.npmjs.org/ethers/-/ethers-6.13.2.tgz", - "integrity": "sha512-9VkriTTed+/27BGuY1s0hf441kqwHJ1wtN2edksEtiRvXx+soxRX3iSXTfFqq2+YwrOqbDoTHjIhQnjJRlzKmg==", - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/ethers-io/" - }, - { - "type": "individual", - "url": "https://www.buymeacoffee.com/ricmoo" - } - ], - "dependencies": { - "@adraffy/ens-normalize": "1.10.1", - "@noble/curves": "1.2.0", - "@noble/hashes": "1.3.2", - "@types/node": "18.15.13", - "aes-js": "4.0.0-beta.5", - "tslib": "2.4.0", - "ws": "8.17.1" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/ethers/node_modules/@adraffy/ens-normalize": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz", - "integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==" - }, - "node_modules/ethers/node_modules/@types/node": { - "version": "18.15.13", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.13.tgz", - "integrity": "sha512-N+0kuo9KgrUQ1Sn/ifDXsvg0TTleP7rIy4zOBGECxAljqvqfqpTfzx0Q1NUedOixRMBfe2Whhb056a42cWs26Q==" - }, - "node_modules/ethers/node_modules/tslib": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", - "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" - }, - "node_modules/ethers/node_modules/ws": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", - "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, "node_modules/evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", @@ -11304,11 +11236,6 @@ "dev": true, "requires": {} }, - "aes-js": { - "version": "4.0.0-beta.5", - "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-4.0.0-beta.5.tgz", - "integrity": "sha512-G965FqalsNyrPqgEGON7nIx1e/OVENSgiEIzyC63haUMuvNnwIgIjMs52hlTCKhkBny7A2ORNlfY9Zu+jmGk1Q==" - }, "agent-base": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.0.tgz", @@ -12262,43 +12189,6 @@ } } }, - "ethers": { - "version": "6.13.2", - "resolved": "https://registry.npmjs.org/ethers/-/ethers-6.13.2.tgz", - "integrity": "sha512-9VkriTTed+/27BGuY1s0hf441kqwHJ1wtN2edksEtiRvXx+soxRX3iSXTfFqq2+YwrOqbDoTHjIhQnjJRlzKmg==", - "requires": { - "@adraffy/ens-normalize": "1.10.1", - "@noble/curves": "1.2.0", - "@noble/hashes": "1.3.2", - "@types/node": "18.15.13", - "aes-js": "4.0.0-beta.5", - "tslib": "2.4.0", - "ws": "8.17.1" - }, - "dependencies": { - "@adraffy/ens-normalize": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/@adraffy/ens-normalize/-/ens-normalize-1.10.1.tgz", - "integrity": "sha512-96Z2IP3mYmF1Xg2cDm8f1gWGf/HUVedQ3FMifV4kG/PQ4yEP51xDtRAEfhVNt5f/uzpNkZHwWQuUcu6D6K+Ekw==" - }, - "@types/node": { - "version": "18.15.13", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.13.tgz", - "integrity": "sha512-N+0kuo9KgrUQ1Sn/ifDXsvg0TTleP7rIy4zOBGECxAljqvqfqpTfzx0Q1NUedOixRMBfe2Whhb056a42cWs26Q==" - }, - "tslib": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", - "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" - }, - "ws": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", - "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", - "requires": {} - } - } - }, "evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", diff --git a/package.json b/package.json index f3c9f8b..99a1f89 100644 --- a/package.json +++ b/package.json @@ -57,7 +57,6 @@ "@ethersproject/bignumber": "^5.7.0", "@ethersproject/providers": "^5.7.2", "ethereumjs-util": "^7.1.5", - "ethers": "^6.13.2", "rlp": "^3.0.0", "tslib": "^2.6.2", "viem": "^2.1.1" diff --git a/src/batches/RawSpanBatch.ts b/src/batches/RawSpanBatch.ts index 8edea15..388d696 100644 --- a/src/batches/RawSpanBatch.ts +++ b/src/batches/RawSpanBatch.ts @@ -23,19 +23,9 @@ type spanBatchPayload struct { export class RawSpanBatch { // eslint-disable-next-line @typescript-eslint/no-unused-vars static decode(data: Uint8Array | NestedUint8Array): InnerBatch { - console.log('data0') - // console.log(data[0]) // TODO: implement: prefix ++ payload - // https://github.com/ethereum-optimism/optimism/blob/375b9766bdf4678253932beae8234cc52f1f46ee/op-node/rollup/derive/span_batch.go#L49 // const decoded = rlp.decode(data) - // return { data } as InnerBatch - - return { - parentHash: data.toString(), - epochNum: 0, - epochHash: '0x', - timestamp: 3, - transactions: [] - } + // https://github.com/ethereum-optimism/optimism/blob/375b9766bdf4678253932beae8234cc52f1f46ee/op-node/rollup/derive/span_batch.go#L49 + return {} as InnerBatch } } diff --git a/src/batches/batch.ts b/src/batches/batch.ts index ec9e7cd..b88dcf9 100644 --- a/src/batches/batch.ts +++ b/src/batches/batch.ts @@ -1,4 +1,3 @@ -import fs from 'fs' import rlp, { NestedUint8Array } from 'rlp' import stream from 'stream' import zlib from 'zlib' @@ -43,8 +42,7 @@ enum BatchType { const MAX_BYTES_PER_CHANNEL = 10_000_000 export const parseBatchesData = async (compressedBatches: string): Promise => { - console.log('parsing') - const decompressed = await decompressBatches_v0(compressedBatches) + const decompressed = await decompressBatches(compressedBatches) const decodedBatches: Batches = [] let dataToDecode: Uint8Array = decompressed while (dataToDecode?.length) { @@ -55,24 +53,15 @@ export const parseBatchesData = async (compressedBatches: string): Promise => { +export const decompressBatches = async (compressedBatches: string): Promise => { const inputBuffer = Buffer.from(compressedBatches, 'hex') - console.log('decompressing', inputBuffer.length, 'bytes') - - fs.writeFileSync('blob1_ts.test', inputBuffer) - console.log('written blob1_ts.test') - - //console.log(inputBuffer) - console.log(compressedBatches.slice(0, 100)) - console.log(inputBuffer.toString('hex').slice(0, 100)) try { // Decompress the input buffer const decompress = zlib.createInflate({ maxOutputLength: MAX_BYTES_PER_CHANNEL, - finishFlush: zlib.constants.Z_SYNC_FLUSH + finishFlush: zlib.constants.Z_SYNC_FLUSH // required when decompressing span batches, otherwise "Error: unexpected end of file" }) - //const decompress = zlib.createInflate() const decompressStream = stream.Readable.from(inputBuffer) const chunks: Buffer[] = [] @@ -86,15 +75,13 @@ export const decompressBatches_v0 = async (compressedBatches: string): Promise { +const decodeBatch = (decodedBatch: Uint8Array | NestedUint8Array): Batch => { if (decodedBatch.length < 1) throw new Error('Batch too short') // first byte is the batch type switch (decodedBatch[0]) { case BatchType.SingularBatch: return { inner: SingularBatch.decode(decodedBatch.slice(1)) } case BatchType.SpanBatch: - console.error('SpanBatch is not implemented') - //return { inner: decodedBatch } return { inner: RawSpanBatch.decode(decodedBatch.slice(1)) } default: throw new Error(`Unrecognized batch type: ${decodedBatch[0]}`) diff --git a/src/frames/frame.ts b/src/frames/frame.ts index f97e75f..853c0be 100644 --- a/src/frames/frame.ts +++ b/src/frames/frame.ts @@ -24,7 +24,7 @@ const BYTES_4_LENGTH = 4 * BYTE_CHARS const BYTES_13_LENGTH = 13 * BYTE_CHARS const BYTES_16_LENGTH = 16 * BYTE_CHARS -export const extractFrames_v0 = (data: string): FramesWithCompressedData => { +export const extractFrames = (data: string): FramesWithCompressedData => { const frames: FramesWithCompressedData = [] let offset = 0 while (offset < data.length) { @@ -50,9 +50,7 @@ export const extractFrames_v0 = (data: string): FramesWithCompressedData => { const frameDataLength = frameDataLengthInBytes * BYTE_CHARS if (frameDataLengthInBytes > MAX_FRAME_LENGTH || offset + frameDataLength > data.length) { - throw new Error( - `Frame data length is too large or exceeds buffer length: ${frameDataLengthInBytes}, ${data.length}, ${offset + frameDataLength}` - ) + throw new Error('Frame data length is too large or exceeds buffer length') } const frameData = `${data.slice(offset, offset + frameDataLength)}` @@ -73,7 +71,7 @@ export const extractFrames_v0 = (data: string): FramesWithCompressedData => { return frames } -export const addBatchesToFrame_v0 = async (frame: FrameWithCompressedData): Promise => { +export const addBatchesToFrame = async (frame: FrameWithCompressedData): Promise => { const batches = await parseBatchesData(frame.data) return { channelId: frame.channelId, @@ -82,14 +80,3 @@ export const addBatchesToFrame_v0 = async (frame: FrameWithCompressedData): Prom batches } } - -export const addBatchesToFrame_v1 = async (channel: string): Promise => { - const batches = await parseBatchesData(channel) - return { - // FIXME - channelId: 'asdfg', - frameNumber: 0, - isLast: true, - batches - } -} diff --git a/src/index.ts b/src/index.ts index b39fae8..c9f38f4 100644 --- a/src/index.ts +++ b/src/index.ts @@ -3,26 +3,6 @@ import fs from 'fs' import path from 'path' import { BatcherTransaction, extractBatcherTransaction } from './transactions/batcherTransaction' -/** - * Convert a binary file to a text file where text is the hexadecimal representation. - * @param inputFilePath Path to the binary input file. - * @param outputFilePath Path to the output text file. - */ -function convertBinaryToHex(inputFilePath: string, outputFilePath: string): void { - // Read the binary file into a Buffer - const binaryData = fs.readFileSync(inputFilePath) - - // Convert the binary data to a hexadecimal string - const hexString = binaryData.toString('hex') - - // TODO: add leading 0x - - // Write the hexadecimal string to the output file - fs.writeFileSync(outputFilePath, hexString) - - console.log(`Successfully converted ${inputFilePath} to hexadecimal format and saved as ${outputFilePath}`) -} - export const testWithExampleData = async ( filePath: string = 'example-data/calldata.txt' ): Promise => { @@ -43,42 +23,16 @@ export const decodeBatcherTransactionCalldata = async (calldata: string): Promis return await extractBatcherTransaction(calldata) } -//convertBinaryToHex('opstack_blobs_19538908.bin', 'opstack_blobs_19538908.txt') -// -// testWithExampleData() -// .then((result) => { -// console.log('Batch:') -// console.log(result) -// // console.log('Frames:') -// // console.log(result['frames']) -// // console.log('Frame batches:') -// // console.log(result['frames'][0]['batches']) -// // console.log('Transactions:') -// // console.log(result['frames'][0]['batches'][0]['inner']['transactions']) -// }) -// .catch((error) => { -// console.error('An error occurred:', error) -// }) - -/* -testWithExampleData( - 'example-data/calldata_tx_0xa47e5c4c1b03e60c878612737ff777484d21da0f0740c42d0343aa73d92764c6-pre-delta' -) - .then((result) => { - console.log(result) // Output the result - //decodeOptimismBlob('opstack_blobs_19538908.txt') - //decodeOptimismBlob() - }) - .catch((error) => { - console.error('An error occurred:', error) - }) -*/ - -testWithExampleData('opstack_blobs_19538908.txt') +testWithExampleData() .then((result) => { - console.log(result) // Output the result - //decodeOptimismBlob('opstack_blobs_19538908.txt') - //decodeOptimismBlob() + console.log('Batch:') + console.log(result) + // console.log('Frames:') + // console.log(result['frames']) + // console.log('Frame batches:') + // console.log(result['frames'][0]['batches']) + // console.log('Transactions:') + // console.log(result['frames'][0]['batches'][0]['inner']['transactions']) }) .catch((error) => { console.error('An error occurred:', error) diff --git a/src/index2.ts b/src/index2.ts index 3e1b16d..bbf9c44 100644 --- a/src/index2.ts +++ b/src/index2.ts @@ -1,10 +1,8 @@ import fs from 'fs' import rlp from 'rlp' -import { Readable } from 'stream' -import zlib from 'zlib' -import { decompressBatches_v0 } from './batches/batch' -import type { Frames, FramesWithCompressedData } from './frames/frame' -//import { extractFrames_v0 } from './frames/frame' +import { decompressBatches } from './batches/batch' +import type { FramesWithCompressedData } from './frames/frame' + /** * Read the binary file and split it into chunks of the specified size. * @param buffer - The binary data from the file. @@ -80,46 +78,6 @@ function processChannelData(datas: Uint8Array[]): FramesWithCompressedData { return frames } -/** - * Function to incrementally decompress a zlib-compressed data stream. - * @param inputBuffer - The input buffer containing zlib-compressed data. - * @returns A promise that resolves with the decompressed data. - */ -function decompressIncrementally(inputBuffer: Buffer): Promise { - return new Promise((resolve, reject) => { - const inflate = zlib.createInflate({ finishFlush: zlib.constants.Z_SYNC_FLUSH }) - // zlib.createInflate complains like "Error: unexpected end of file" - // zlib.createInflateRaw() complains like "Error: invalid stored block lengths" - const chunks: Buffer[] = [] - - // Create a readable stream from the input buffer - const inputStream = new Readable({ - read() { - this.push(inputBuffer) - this.push(null) // Signal end of input - } - }) - - // Pipe the input stream into the inflate stream - inputStream.pipe(inflate) - - // Collect the decompressed chunks - inflate.on('data', (chunk) => { - chunks.push(chunk) - }) - - // Resolve the promise once decompression is complete - inflate.on('end', () => { - resolve(Buffer.concat(chunks)) - }) - - // Handle errors during decompression - inflate.on('error', (err) => { - reject(err) - }) - }) -} - /** * Reads a bit list from a Uint8Array. * @param length - The number of bits to read. @@ -238,20 +196,10 @@ async function processFile(filename: string): Promise { datas.push(blobData.slice(4, declaredLength + 4)) } - //const rawFrames = extractFrames_v0(calldata.slice(4)) - //const rawFrames2 = extractFrames_v0(datas.toString()) - const frames: Frames = [] const channel_parts: string[] = [] - //const rawFrames = processChannelData(datas.slice(4)) const rawFrames = processChannelData(datas) - // console.log(rawFrames) for (const rawFrame of rawFrames) { - console.log('adding frame') - console.log(rawFrame.data.slice(0, 100)) - const buffer = Buffer.from(rawFrame.data, 'hex') - console.log(buffer.slice(0, 100)) - channel_parts.push(rawFrame.data) } const channel = Buffer.from(channel_parts.join(''), 'hex') @@ -260,33 +208,13 @@ async function processFile(filename: string): Promise { //console.log(channel.slice(0, 100).toString()) console.log(channel.toString('hex').slice(0, 100)) - /* - decompressIncrementally(channel) - .then((decompressedData) => { - console.log('Decompressed data:', decompressedData.toString()) - }) - .catch((err) => { - console.error('Error decompressing data:', err) - }) - - decompressBatches_v0(channel_parts.join('')) - .then((result) => { - console.log(result) // Output the result decompressed - console.log('result of', result.length, 'bytes:', result.slice(0, 100)) - }) - .catch((error) => { - console.error('An error occurred:', error) - }) - */ - const fullChannel = channel_parts.join('') - const decompressed = await decompressBatches_v0(fullChannel) + const decompressed = await decompressBatches(fullChannel) const dataToDecode: Uint8Array = decompressed const { data: decoded, remainder } = rlp.decode(dataToDecode, true) - console.log('DECODED:', typeof decoded) - console.log(decoded) + console.log('result of', decoded.length, 'bytes:', decoded.slice(0, 100), '\n') if (decoded[0] !== 1) { throw new Error('decoded value is not a span batch') } @@ -295,10 +223,6 @@ async function processFile(filename: string): Promise { return } - //console.log('timestamp since L2 genesis:', readVarint(decoded.slice(1))) // Decode the varint - - //console.log('result of', result.length, 'bytes:', result.slice(0, 100)) - let currentOffset = 1 const timestampResult = readVarint(decoded, currentOffset) diff --git a/src/transactions/batcherTransaction.ts b/src/transactions/batcherTransaction.ts index 8e66675..6def1ea 100644 --- a/src/transactions/batcherTransaction.ts +++ b/src/transactions/batcherTransaction.ts @@ -1,4 +1,4 @@ -import { Frames, addBatchesToFrame_v0, extractFrames_v0 } from '../frames/frame' +import { Frames, addBatchesToFrame, extractFrames } from '../frames/frame' export type BatcherTransaction = { version: number @@ -13,8 +13,6 @@ export const extractBatcherTransaction = async (calldata: string): Promise