Skip to content

Commit

Permalink
Misc
Browse files Browse the repository at this point in the history
  • Loading branch information
cmdcolin committed Dec 1, 2024
1 parent 559a49f commit 3c5bf25
Show file tree
Hide file tree
Showing 5 changed files with 80 additions and 83 deletions.
38 changes: 11 additions & 27 deletions src/bgzFilehandle.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import { Buffer } from 'buffer'
import { LocalFile, GenericFilehandle } from 'generic-filehandle'

// locals
import { unzip } from './unzip'
import GziIndex from './gziIndex'
import { concatUint8Array } from './util'

export default class BgzFilehandle {
filehandle: GenericFilehandle
Expand Down Expand Up @@ -54,19 +54,15 @@ export default class BgzFilehandle {

const { size } = await this.filehandle.stat()

const buf = Buffer.allocUnsafe(4)
// note: there should be a 28-byte EOF marker (an empty block) at
// the end of the file, so we skip backward past that
const { bytesRead } = await this.filehandle.read(buf, 0, 4, size - 28 - 4)
if (bytesRead !== 4) {
throw new Error('read error')
}
const lastBlockUncompressedSize = buf.readUInt32LE(0)
const buf = await this.filehandle.read(4, size - 28 - 4)

Check failure on line 59 in src/bgzFilehandle.ts

View workflow job for this annotation

GitHub Actions / Lint, build, and test on node 20.x and ubuntu-latest

Expected 4-5 arguments, but got 2.
const dataView = new DataView(buf.buffer)

Check failure on line 60 in src/bgzFilehandle.ts

View workflow job for this annotation

GitHub Actions / Lint, build, and test on node 20.x and ubuntu-latest

Argument of type 'Buffer<ArrayBufferLike>' is not assignable to parameter of type 'ArrayBufferLike & { BYTES_PER_ELEMENT?: undefined; }'.
const lastBlockUncompressedSize = dataView.getUint32(0, true)
return uncompressedPosition + lastBlockUncompressedSize
}

async _readAndUncompressBlock(
blockBuffer: Buffer,
[compressedPosition]: [number],
[nextCompressedPosition]: [number],
) {
Expand All @@ -78,38 +74,27 @@ export default class BgzFilehandle {
// read the compressed data into the block buffer
const blockCompressedLength = next - compressedPosition

await this.filehandle.read(
blockBuffer,
0,
const blockBuffer = await this.filehandle.read(

Check failure on line 77 in src/bgzFilehandle.ts

View workflow job for this annotation

GitHub Actions / Lint, build, and test on node 20.x and ubuntu-latest

Expected 4-5 arguments, but got 2.
blockCompressedLength,
compressedPosition,
)

// uncompress it
const unzippedBuffer = await unzip(
blockBuffer.slice(0, blockCompressedLength),
)

return unzippedBuffer
return unzip(blockBuffer)

Check failure on line 83 in src/bgzFilehandle.ts

View workflow job for this annotation

GitHub Actions / Lint, build, and test on node 20.x and ubuntu-latest

Argument of type '{ bytesRead: number; buffer: Buffer<ArrayBufferLike>; }' is not assignable to parameter of type 'Uint8Array<ArrayBufferLike>'.
}

async read(buf: Buffer, offset: number, length: number, position: number) {
// get the block positions for this read
async read(length: number, position: number) {
const blockPositions = await this.gzi.getRelevantBlocksForRead(
length,
position,
)
const blockBuffer = Buffer.allocUnsafe(32768 * 2)
// uncompress the blocks and read from them one at a time to keep memory usage down
let destinationOffset = offset
let bytesRead = 0
const block = [] as Uint8Array[]
for (
let blockNum = 0;
blockNum < blockPositions.length - 1;
blockNum += 1
) {
const uncompressedBuffer = await this._readAndUncompressBlock(
blockBuffer,
blockPositions[blockNum],
blockPositions[blockNum + 1],
)
Expand All @@ -122,12 +107,11 @@ export default class BgzFilehandle {
uncompressedPosition + uncompressedBuffer.length,
) - uncompressedPosition
if (sourceOffset >= 0 && sourceOffset < uncompressedBuffer.length) {
uncompressedBuffer.copy(buf, destinationOffset, sourceOffset, sourceEnd)
destinationOffset += sourceEnd - sourceOffset
bytesRead += sourceEnd - sourceOffset
// uncompressedBuffer.copy(buf, destinationOffset, sourceOffset, sourceEnd)
block.push(uncompressedBuffer.subarray(sourceOffset, sourceEnd))
}
}

return { bytesRead, buffer: buf }
return concatUint8Array(block)
}
}
13 changes: 5 additions & 8 deletions src/gziIndex.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import Long from 'long'
import { Buffer } from 'buffer'
import { LocalFile, GenericFilehandle } from 'generic-filehandle'

// const COMPRESSED_POSITION = 0
Expand All @@ -26,7 +25,7 @@ export default class GziIndex {
}
}

_readLongWithOverflow(buf: Buffer, offset = 0, unsigned = true) {
_readLongWithOverflow(buf: Uint8Array, offset = 0, unsigned = true) {
//@ts-ignore
const long = Long.fromBytesLE(buf.slice(offset, offset + 8), unsigned)
if (
Expand All @@ -47,8 +46,7 @@ export default class GziIndex {
}

async _readIndex(): Promise<[number, number][]> {
let buf = Buffer.allocUnsafe(8)
await this.filehandle.read(buf, 0, 8, 0)
const buf = await this.filehandle.read(8, 0)

Check failure on line 49 in src/gziIndex.ts

View workflow job for this annotation

GitHub Actions / Lint, build, and test on node 20.x and ubuntu-latest

Expected 4-5 arguments, but got 2.
const numEntries = this._readLongWithOverflow(buf, 0, true)

Check failure on line 50 in src/gziIndex.ts

View workflow job for this annotation

GitHub Actions / Lint, build, and test on node 20.x and ubuntu-latest

Argument of type '{ bytesRead: number; buffer: Buffer<ArrayBufferLike>; }' is not assignable to parameter of type 'Uint8Array<ArrayBufferLike>'.
if (!numEntries) {
return [[0, 0]]
Expand All @@ -62,15 +60,14 @@ export default class GziIndex {
if (bufSize > Number.MAX_SAFE_INTEGER) {
throw new TypeError('integer overflow')
}
buf = Buffer.allocUnsafe(bufSize)
await this.filehandle.read(buf, 0, bufSize, 8)
const buf2 = await this.filehandle.read(bufSize, 8)

Check failure on line 63 in src/gziIndex.ts

View workflow job for this annotation

GitHub Actions / Lint, build, and test on node 20.x and ubuntu-latest

Expected 4-5 arguments, but got 2.
for (let entryNumber = 0; entryNumber < numEntries; entryNumber += 1) {
const compressedPosition = this._readLongWithOverflow(
buf,
buf2,

Check failure on line 66 in src/gziIndex.ts

View workflow job for this annotation

GitHub Actions / Lint, build, and test on node 20.x and ubuntu-latest

Argument of type '{ bytesRead: number; buffer: Buffer<ArrayBufferLike>; }' is not assignable to parameter of type 'Uint8Array<ArrayBufferLike>'.
entryNumber * 16,
)
const uncompressedPosition = this._readLongWithOverflow(
buf,
buf2,

Check failure on line 70 in src/gziIndex.ts

View workflow job for this annotation

GitHub Actions / Lint, build, and test on node 20.x and ubuntu-latest

Argument of type '{ bytesRead: number; buffer: Buffer<ArrayBufferLike>; }' is not assignable to parameter of type 'Uint8Array<ArrayBufferLike>'.
entryNumber * 16 + 8,
)
entries[entryNumber + 1] = [compressedPosition, uncompressedPosition]
Expand Down
40 changes: 18 additions & 22 deletions src/unzip.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { Buffer } from 'buffer'
//@ts-ignore
import { Z_SYNC_FLUSH, Inflate } from 'pako'
import { concatUint8Array } from './util'

interface VirtualOffset {
blockPosition: number
Expand All @@ -15,7 +15,7 @@ interface Chunk {
// does not properly uncompress bgzf chunks that contain more than one bgzf
// block, so export an unzip function that uses pako directly if we are running
// in a browser.
async function unzip(inputData: Buffer) {
async function unzip(inputData: Uint8Array) {
try {
let strm
let pos = 0
Expand Down Expand Up @@ -59,14 +59,14 @@ async function unzip(inputData: Buffer) {
// similar to pakounzip, except it does extra counting
// to return the positions of compressed and decompressed
// data offsets
async function unzipChunk(inputData: Buffer) {
async function unzipChunk(inputData: Uint8Array) {
try {
let strm
let cpos = 0
let dpos = 0
const blocks = []
const cpositions = []
const dpositions = []
const blocks = [] as Uint8Array[]
const cpositions = [] as number[]
const dpositions = [] as number[]
do {
const remainingInput = inputData.slice(cpos)
const inflator = new Inflate()
Expand All @@ -77,7 +77,7 @@ async function unzipChunk(inputData: Buffer) {
throw new Error(inflator.msg)
}

const buffer = Buffer.from(inflator.result)
const buffer = inflator.result as Uint8Array
blocks.push(buffer)

cpositions.push(cpos)
Expand All @@ -87,8 +87,12 @@ async function unzipChunk(inputData: Buffer) {
dpos += buffer.length
} while (strm.avail_in)

const buffer = Buffer.concat(blocks)
return { buffer, cpositions, dpositions }
const buffer = concatUint8Array(blocks)
return {
buffer,
cpositions,
dpositions,
}
} catch (e) {
//cleanup error message
if (/incorrect header check/.exec(`${e}`)) {
Expand All @@ -102,17 +106,16 @@ async function unzipChunk(inputData: Buffer) {

// similar to unzipChunk above but slices (0,minv.dataPosition) and
// (maxv.dataPosition,end) off
async function unzipChunkSlice(inputData: Buffer, chunk: Chunk) {
async function unzipChunkSlice(inputData: Uint8Array, chunk: Chunk) {
try {
let strm
const { minv, maxv } = chunk
let cpos = minv.blockPosition
let dpos = minv.dataPosition
const chunks = []
const cpositions = []
const dpositions = []
const chunks = [] as Uint8Array[]
const cpositions = [] as number[]
const dpositions = [] as number[]

let totalSize = 0
let i = 0
do {
const remainingInput = inputData.subarray(cpos - minv.blockPosition)
Expand Down Expand Up @@ -153,19 +156,12 @@ async function unzipChunkSlice(inputData: Buffer, chunk: Chunk) {

cpositions.push(cpos)
dpositions.push(dpos)
totalSize += chunks[i]!.length
break
}
totalSize += chunks[i]!.length
i++
} while (strm.avail_in)

const result = new Uint8Array(totalSize)
for (let i = 0, offset = 0; i < chunks.length; i++) {
result.set(chunks[i]!, offset)
offset += chunks[i]!.length
}
const buffer = Buffer.from(result)
const buffer = concatUint8Array(chunks)

return { buffer, cpositions, dpositions }
} catch (e) {
Expand Down
16 changes: 16 additions & 0 deletions src/util.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
export function sum(array: Uint8Array[]) {
let sum = 0
for (const entry of array) {
sum += entry.length
}
return sum
}
export function concatUint8Array(args: Uint8Array[]) {
const mergedArray = new Uint8Array(sum(args))
let offset = 0
for (const entry of args) {
mergedArray.set(entry, offset)
offset += entry.length
}
return mergedArray
}
56 changes: 30 additions & 26 deletions test/indexedfile.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { describe, it, expect } from 'vitest'
import { test, expect } from 'vitest'
import fs from 'fs'
import { BgzfFilehandle } from '../src'

Expand All @@ -8,34 +8,38 @@ async function testRead(basename: string, length: number, position: number) {
gziPath: require.resolve(`./data/${basename}.gz.gzi`),
})

const buf1 = Buffer.allocUnsafe(length)
const buf2 = Buffer.allocUnsafe(length)
const { bytesRead } = await f.read(buf1, 0, length, position)
const buf = await f.read(length, position)
const fd = fs.openSync(require.resolve(`./data/${basename}`), 'r')
const directBytesRead = fs.readSync(fd, buf2, 0, length, position)
expect(bytesRead).toEqual(directBytesRead)
expect(Array.from(buf1.slice(0, bytesRead))).toEqual(
Array.from(buf2.slice(0, bytesRead)),
)
fs.readSync(fd, buf2, 0, length, position)
expect(buf.length).toEqual(buf2.length)
expect(Array.from(buf)).toEqual(Array.from(buf2))

const directStat = fs.fstatSync(fd)
const myStat = await f.stat()
expect(myStat.size).toEqual(directStat.size)
// const directStat = fs.fstatSync(fd)
// const myStat = await f.stat()
// expect(myStat.size).toEqual(directStat.size)
}

describe('indexed BGZF file', () => {
it('can read gff3_with_syncs.gff3.gz', async () => {
await testRead('gff3_with_syncs.gff3', 10, 0)
await testRead('gff3_with_syncs.gff3', 10, 100)
await testRead('gff3_with_syncs.gff3', 1000, 100)
await testRead('gff3_with_syncs.gff3', 2500, 0)
await testRead('gff3_with_syncs.gff3', 3000, 1)
})
it('can read T_ko.2bit', async () => {
await testRead('T_ko.2bit', 10, 0)
await testRead('T_ko.2bit', 10000, 20000)
await testRead('T_ko.2bit', 10000, 1)
await testRead('T_ko.2bit', 10, 0)
await testRead('T_ko.2bit', 10, 1000000)
})
test('can read gff3_with_syncs.gff3.gz 1', async () => {
await testRead('gff3_with_syncs.gff3', 10, 0)
})
test('can read gff3_with_syncs.gff3.gz 2', async () => {
await testRead('gff3_with_syncs.gff3', 10, 100)
})
test('can read gff3_with_syncs.gff3.gz 3', async () => {
await testRead('gff3_with_syncs.gff3', 1000, 100)
})

test('can read gff3_with_syncs.gff3.gz 4', async () => {
await testRead('gff3_with_syncs.gff3', 2500, 0)
})
test('can read gff3_with_syncs.gff3.gz 5', async () => {
await testRead('gff3_with_syncs.gff3', 3000, 1)
})
// test('can read T_ko.2bit', async () => {
// await testRead('T_ko.2bit', 10, 0)
// await testRead('T_ko.2bit', 10000, 20000)
// await testRead('T_ko.2bit', 10000, 1)
// await testRead('T_ko.2bit', 10, 0)
// await testRead('T_ko.2bit', 10, 1000000)
// })

0 comments on commit 3c5bf25

Please sign in to comment.