Skip to content

Commit

Permalink
cleanup
Browse files Browse the repository at this point in the history
  • Loading branch information
PabloCastellano committed Sep 3, 2024
1 parent 9b90918 commit 0323938
Show file tree
Hide file tree
Showing 8 changed files with 27 additions and 298 deletions.
110 changes: 0 additions & 110 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@
"@ethersproject/bignumber": "^5.7.0",
"@ethersproject/providers": "^5.7.2",
"ethereumjs-util": "^7.1.5",
"ethers": "^6.13.2",
"rlp": "^3.0.0",
"tslib": "^2.6.2",
"viem": "^2.1.1"
Expand Down
14 changes: 2 additions & 12 deletions src/batches/RawSpanBatch.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,19 +23,9 @@ type spanBatchPayload struct {
export class RawSpanBatch {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
static decode(data: Uint8Array | NestedUint8Array): InnerBatch {
console.log('data0')
// console.log(data[0])
// TODO: implement: prefix ++ payload
// https://github.com/ethereum-optimism/optimism/blob/375b9766bdf4678253932beae8234cc52f1f46ee/op-node/rollup/derive/span_batch.go#L49
// const decoded = rlp.decode(data)
// return { data } as InnerBatch

return {
parentHash: data.toString(),
epochNum: 0,
epochHash: '0x',
timestamp: 3,
transactions: []
}
// https://github.com/ethereum-optimism/optimism/blob/375b9766bdf4678253932beae8234cc52f1f46ee/op-node/rollup/derive/span_batch.go#L49
return {} as InnerBatch
}
}
21 changes: 4 additions & 17 deletions src/batches/batch.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import fs from 'fs'
import rlp, { NestedUint8Array } from 'rlp'
import stream from 'stream'
import zlib from 'zlib'
Expand Down Expand Up @@ -43,8 +42,7 @@ enum BatchType {
const MAX_BYTES_PER_CHANNEL = 10_000_000

export const parseBatchesData = async (compressedBatches: string): Promise<Batches> => {
console.log('parsing')
const decompressed = await decompressBatches_v0(compressedBatches)
const decompressed = await decompressBatches(compressedBatches)
const decodedBatches: Batches = []
let dataToDecode: Uint8Array = decompressed
while (dataToDecode?.length) {
Expand All @@ -55,24 +53,15 @@ export const parseBatchesData = async (compressedBatches: string): Promise<Batch
return decodedBatches
}

export const decompressBatches_v0 = async (compressedBatches: string): Promise<Buffer> => {
export const decompressBatches = async (compressedBatches: string): Promise<Buffer> => {
const inputBuffer = Buffer.from(compressedBatches, 'hex')
console.log('decompressing', inputBuffer.length, 'bytes')

fs.writeFileSync('blob1_ts.test', inputBuffer)
console.log('written blob1_ts.test')

//console.log(inputBuffer)
console.log(compressedBatches.slice(0, 100))
console.log(inputBuffer.toString('hex').slice(0, 100))

try {
// Decompress the input buffer
const decompress = zlib.createInflate({
maxOutputLength: MAX_BYTES_PER_CHANNEL,
finishFlush: zlib.constants.Z_SYNC_FLUSH
finishFlush: zlib.constants.Z_SYNC_FLUSH // required when decompressing span batches, otherwise "Error: unexpected end of file"
})
//const decompress = zlib.createInflate()
const decompressStream = stream.Readable.from(inputBuffer)

const chunks: Buffer[] = []
Expand All @@ -86,15 +75,13 @@ export const decompressBatches_v0 = async (compressedBatches: string): Promise<B
}
}

export const decodeBatch = (decodedBatch: Uint8Array | NestedUint8Array): Batch => {
const decodeBatch = (decodedBatch: Uint8Array | NestedUint8Array): Batch => {
if (decodedBatch.length < 1) throw new Error('Batch too short')
// first byte is the batch type
switch (decodedBatch[0]) {
case BatchType.SingularBatch:
return { inner: SingularBatch.decode(decodedBatch.slice(1)) }
case BatchType.SpanBatch:
console.error('SpanBatch is not implemented')
//return { inner: decodedBatch }
return { inner: RawSpanBatch.decode(decodedBatch.slice(1)) }
default:
throw new Error(`Unrecognized batch type: ${decodedBatch[0]}`)
Expand Down
19 changes: 3 additions & 16 deletions src/frames/frame.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ const BYTES_4_LENGTH = 4 * BYTE_CHARS
const BYTES_13_LENGTH = 13 * BYTE_CHARS
const BYTES_16_LENGTH = 16 * BYTE_CHARS

export const extractFrames_v0 = (data: string): FramesWithCompressedData => {
export const extractFrames = (data: string): FramesWithCompressedData => {
const frames: FramesWithCompressedData = []
let offset = 0
while (offset < data.length) {
Expand All @@ -50,9 +50,7 @@ export const extractFrames_v0 = (data: string): FramesWithCompressedData => {
const frameDataLength = frameDataLengthInBytes * BYTE_CHARS

if (frameDataLengthInBytes > MAX_FRAME_LENGTH || offset + frameDataLength > data.length) {
throw new Error(
`Frame data length is too large or exceeds buffer length: ${frameDataLengthInBytes}, ${data.length}, ${offset + frameDataLength}`
)
throw new Error('Frame data length is too large or exceeds buffer length')
}

const frameData = `${data.slice(offset, offset + frameDataLength)}`
Expand All @@ -73,7 +71,7 @@ export const extractFrames_v0 = (data: string): FramesWithCompressedData => {
return frames
}

export const addBatchesToFrame_v0 = async (frame: FrameWithCompressedData): Promise<Frame> => {
export const addBatchesToFrame = async (frame: FrameWithCompressedData): Promise<Frame> => {
const batches = await parseBatchesData(frame.data)
return {
channelId: frame.channelId,
Expand All @@ -82,14 +80,3 @@ export const addBatchesToFrame_v0 = async (frame: FrameWithCompressedData): Prom
batches
}
}

export const addBatchesToFrame_v1 = async (channel: string): Promise<Frame> => {
const batches = await parseBatchesData(channel)
return {
// FIXME
channelId: 'asdfg',
frameNumber: 0,
isLast: true,
batches
}
}
64 changes: 9 additions & 55 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,26 +3,6 @@ import fs from 'fs'
import path from 'path'
import { BatcherTransaction, extractBatcherTransaction } from './transactions/batcherTransaction'

/**
* Convert a binary file to a text file where text is the hexadecimal representation.
* @param inputFilePath Path to the binary input file.
* @param outputFilePath Path to the output text file.
*/
function convertBinaryToHex(inputFilePath: string, outputFilePath: string): void {
// Read the binary file into a Buffer
const binaryData = fs.readFileSync(inputFilePath)

// Convert the binary data to a hexadecimal string
const hexString = binaryData.toString('hex')

// TODO: add leading 0x

// Write the hexadecimal string to the output file
fs.writeFileSync(outputFilePath, hexString)

console.log(`Successfully converted ${inputFilePath} to hexadecimal format and saved as ${outputFilePath}`)
}

export const testWithExampleData = async (
filePath: string = 'example-data/calldata.txt'
): Promise<BatcherTransaction> => {
Expand All @@ -43,42 +23,16 @@ export const decodeBatcherTransactionCalldata = async (calldata: string): Promis
return await extractBatcherTransaction(calldata)
}

//convertBinaryToHex('opstack_blobs_19538908.bin', 'opstack_blobs_19538908.txt')
//
// testWithExampleData()
// .then((result) => {
// console.log('Batch:')
// console.log(result)
// // console.log('Frames:')
// // console.log(result['frames'])
// // console.log('Frame batches:')
// // console.log(result['frames'][0]['batches'])
// // console.log('Transactions:')
// // console.log(result['frames'][0]['batches'][0]['inner']['transactions'])
// })
// .catch((error) => {
// console.error('An error occurred:', error)
// })

/*
testWithExampleData(
'example-data/calldata_tx_0xa47e5c4c1b03e60c878612737ff777484d21da0f0740c42d0343aa73d92764c6-pre-delta'
)
.then((result) => {
console.log(result) // Output the result
//decodeOptimismBlob('opstack_blobs_19538908.txt')
//decodeOptimismBlob()
})
.catch((error) => {
console.error('An error occurred:', error)
})
*/

testWithExampleData('opstack_blobs_19538908.txt')
testWithExampleData()
.then((result) => {
console.log(result) // Output the result
//decodeOptimismBlob('opstack_blobs_19538908.txt')
//decodeOptimismBlob()
console.log('Batch:')
console.log(result)
// console.log('Frames:')
// console.log(result['frames'])
// console.log('Frame batches:')
// console.log(result['frames'][0]['batches'])
// console.log('Transactions:')
// console.log(result['frames'][0]['batches'][0]['inner']['transactions'])
})
.catch((error) => {
console.error('An error occurred:', error)
Expand Down
Loading

0 comments on commit 0323938

Please sign in to comment.