Skip to content

Commit

Permalink
style: ran prettier
Browse files Browse the repository at this point in the history
  • Loading branch information
iuwqyir committed Jan 17, 2024
1 parent f7686de commit 8d556a1
Show file tree
Hide file tree
Showing 5 changed files with 63 additions and 62 deletions.
6 changes: 3 additions & 3 deletions src/batches/RawSpanBatch.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import rlp, { NestedUint8Array } from 'rlp'
import { InnerBatch } from './batch';
import { InnerBatch } from './batch'

export class RawSpanBatch {
static decode(data: Uint8Array | NestedUint8Array): InnerBatch {
// TODO: implement
const decoded = rlp.decode(data);
const decoded = rlp.decode(data)
return {} as InnerBatch
}
}
}
32 changes: 17 additions & 15 deletions src/batches/SingularBatch.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,23 @@ import { parseTransaction } from 'viem/op-stack'

export class SingularBatch {
static decode(data: Uint8Array | NestedUint8Array): InnerBatch {
const decoded = rlp.decode(data);
const transactionList = (decoded[4] as NestedUint8Array).map(tx => `0x${Buffer.from(tx as Uint8Array).toString('hex')}`)
const decoded = rlp.decode(data)
const transactionList = (decoded[4] as NestedUint8Array).map(
(tx) => `0x${Buffer.from(tx as Uint8Array).toString('hex')}`
)
return {
parentHash: `0x${Buffer.from(decoded[0] as Uint8Array).toString('hex')}`,
epochNum: BigNumber.from(decoded[1]).toNumber(),
epochHash: `0x${Buffer.from(decoded[2] as Uint8Array).toString('hex')}`,
timestamp: BigNumber.from(decoded[3]).toNumber(),
transactions: transactionList.map((txData: any) => {
const transactionBuffer = Buffer.from(txData.slice(2), 'hex');
const transactionHash: `0x${string}` = `0x${keccak(transactionBuffer).toString('hex')}`;
return {
...parseTransaction(txData),
hash: transactionHash
}
})
};
parentHash: `0x${Buffer.from(decoded[0] as Uint8Array).toString('hex')}`,
epochNum: BigNumber.from(decoded[1]).toNumber(),
epochHash: `0x${Buffer.from(decoded[2] as Uint8Array).toString('hex')}`,
timestamp: BigNumber.from(decoded[3]).toNumber(),
transactions: transactionList.map((txData: any) => {
const transactionBuffer = Buffer.from(txData.slice(2), 'hex')
const transactionHash: `0x${string}` = `0x${keccak(transactionBuffer).toString('hex')}`
return {
...parseTransaction(txData),
hash: transactionHash
}
})
}
}
}
33 changes: 16 additions & 17 deletions src/batches/batch.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ export type InnerBatch = {
transactions: Transactions
}


type Batch = {
inner: InnerBatch
}
Expand All @@ -55,32 +54,32 @@ export const parseBatchesData = async (compressedBatches: string): Promise<Batch
}

const decompressBatches = async (compressedBatches: string): Promise<Buffer> => {
const inputBuffer = Buffer.from(compressedBatches, 'hex');
const inputBuffer = Buffer.from(compressedBatches, 'hex')
try {
// Decompress the input buffer
const decompress = zlib.createInflate({ maxOutputLength: MAX_BYTES_PER_CHANNEL });
const decompressStream = stream.Readable.from(inputBuffer);
const decompress = zlib.createInflate({ maxOutputLength: MAX_BYTES_PER_CHANNEL })
const decompressStream = stream.Readable.from(inputBuffer)

const chunks: Buffer[] = [];
const chunks: Buffer[] = []
for await (const chunk of decompressStream.pipe(decompress)) {
chunks.push(chunk);
chunks.push(chunk)
}
return Buffer.concat(chunks);
return Buffer.concat(chunks)
} catch (err) {
console.error('Error in decompression:', err);
throw err;
console.error('Error in decompression:', err)
throw err
}
}

const decodeBatch = (decodedBatch: Uint8Array | NestedUint8Array): Batch => {
if (decodedBatch.length < 1) throw new Error('Batch too short')
// first byte is the batch type
// first byte is the batch type
switch (decodedBatch[0]) {
case BatchType.SingularBatch:
return { inner: SingularBatch.decode(decodedBatch.slice(1)) }
case BatchType.SpanBatch:
return { inner: RawSpanBatch.decode(decodedBatch.slice(1)) }
default:
throw new Error(`Unrecognized batch type: ${decodedBatch[0]}`)
}
case BatchType.SingularBatch:
return { inner: SingularBatch.decode(decodedBatch.slice(1)) }
case BatchType.SpanBatch:
return { inner: RawSpanBatch.decode(decodedBatch.slice(1)) }
default:
throw new Error(`Unrecognized batch type: ${decodedBatch[0]}`)
}
}
44 changes: 22 additions & 22 deletions src/frames/frame.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import { Batches, parseBatchesData } from "../batches/batch";
import { Batches, parseBatchesData } from '../batches/batch'

type FrameWithCompressedData = {
channelId: string;
frameNumber: number;
data: string;
isLast: boolean;
};
channelId: string
frameNumber: number
data: string
isLast: boolean
}

export type FramesWithCompressedData = FrameWithCompressedData[]

Expand All @@ -15,7 +15,7 @@ export type Frame = Omit<FrameWithCompressedData, 'data'> & {

export type Frames = Frame[]

const MAX_FRAME_LENGTH = 1_000_000;
const MAX_FRAME_LENGTH = 1_000_000

const BYTE_CHARS = 2
const BYTES_1_LENGTH = 1 * BYTE_CHARS
Expand All @@ -25,42 +25,42 @@ const BYTES_13_LENGTH = 13 * BYTE_CHARS
const BYTES_16_LENGTH = 16 * BYTE_CHARS

export const extractFrames = (data: string): FramesWithCompressedData => {
const frames: FramesWithCompressedData = [];
let offset = 0;
const frames: FramesWithCompressedData = []
let offset = 0
while (offset < data.length) {
if (data.length - offset < BYTES_13_LENGTH) { // Minimum frame size
throw new Error("Incomplete frame data");
if (data.length - offset < BYTES_13_LENGTH) {
// Minimum frame size
throw new Error('Incomplete frame data')
}

const channelId = data.slice(offset, offset + BYTES_16_LENGTH)
offset += BYTES_16_LENGTH;
offset += BYTES_16_LENGTH

const frameNumber = Number(`0x${data.slice(offset, offset + BYTES_2_LENGTH)}`)
offset += BYTES_2_LENGTH;
offset += BYTES_2_LENGTH

const frameDataLengthInBytes = Number(`0x${data.slice(offset, offset + BYTES_4_LENGTH)}`)
offset += BYTES_4_LENGTH;
offset += BYTES_4_LENGTH
const frameDataLength = frameDataLengthInBytes * BYTE_CHARS


if (frameDataLengthInBytes > MAX_FRAME_LENGTH || offset + frameDataLength > data.length) {
throw new Error("Frame data length is too large or exceeds buffer length");
throw new Error('Frame data length is too large or exceeds buffer length')
}

const frameData = `${data.slice(offset, offset + frameDataLength)}`;
offset += frameDataLength;
const frameData = `${data.slice(offset, offset + frameDataLength)}`
offset += frameDataLength

const isLast = Number(`0x${data.slice(offset, offset + BYTES_1_LENGTH)}`) !== 0
offset += BYTES_1_LENGTH;
offset += BYTES_1_LENGTH

frames.push({ channelId, frameNumber, data: frameData, isLast });
frames.push({ channelId, frameNumber, data: frameData, isLast })
}

if (frames.length === 0) {
throw new Error("Was not able to find any frames");
throw new Error('Was not able to find any frames')
}

return frames;
return frames
}

export const addBatchesToFrame = async (frame: FrameWithCompressedData): Promise<Frame> => {
Expand Down
10 changes: 5 additions & 5 deletions src/transactions/batcherTransaction.ts
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
import { Frames, extractFrames, addBatchesToFrame } from "../frames/frame"
import { Frames, extractFrames, addBatchesToFrame } from '../frames/frame'

export type BatcherTransaction = {
version: number
frames: Frames
}

const DERIVATION_VERSION_0 = 0;
const DERIVATION_VERSION_0 = 0

export const extractBatcherTransaction = async (calldata: string): Promise<BatcherTransaction> => {
if (calldata.length === 0) {
throw new Error("data array must not be empty");
throw new Error('data array must not be empty')
}

const version = Number(calldata.slice(0, 4))
if (version !== DERIVATION_VERSION_0) {
throw new Error(`invalid derivation format byte: got ${version}`);
throw new Error(`invalid derivation format byte: got ${version}`)
}

// Skip the derivation version byte and 0x at the start
Expand All @@ -25,4 +25,4 @@ export const extractBatcherTransaction = async (calldata: string): Promise<Batch
frames.push(frame)
}
return { version, frames }
}
}

0 comments on commit 8d556a1

Please sign in to comment.