From 0aefaeae47cd9a60e604a7003e56aa5235954a29 Mon Sep 17 00:00:00 2001 From: Ib Green Date: Sun, 14 May 2023 14:57:50 -0400 Subject: [PATCH] feat(compression): Integrate fflate library --- .../src/lib/deflate-compression-pako.ts | 108 +++++++++++++++ .../src/lib/deflate-compression-zlib.ts | 52 ++++++++ .../src/lib/deflate-compression.ts | 89 +++---------- .../src/lib/gzip-compression-pako.ts | 126 ++++++++++++++++++ .../src/lib/gzip-compression-zlib.ts | 52 ++++++++ .../compression/src/lib/gzip-compression.ts | 92 +++++++++++-- .../compression/src/lib/utils/pako-utils.ts | 24 ++++ modules/compression/src/workers/worker.ts | 3 + modules/compression/test/compression.spec.js | 25 +++- 9 files changed, 493 insertions(+), 78 deletions(-) create mode 100644 modules/compression/src/lib/deflate-compression-pako.ts create mode 100644 modules/compression/src/lib/deflate-compression-zlib.ts create mode 100644 modules/compression/src/lib/gzip-compression-pako.ts create mode 100644 modules/compression/src/lib/gzip-compression-zlib.ts create mode 100644 modules/compression/src/lib/utils/pako-utils.ts diff --git a/modules/compression/src/lib/deflate-compression-pako.ts b/modules/compression/src/lib/deflate-compression-pako.ts new file mode 100644 index 0000000000..bc8464bf54 --- /dev/null +++ b/modules/compression/src/lib/deflate-compression-pako.ts @@ -0,0 +1,108 @@ +// loaders.gl, MIT license +import type {CompressionOptions} from './compression'; +import {Compression} from './compression'; +import {getPakoError} from './utils/pako-utils'; +import pako from 'pako'; // https://bundlephobia.com/package/pako + +export type DeflateCompressionOptions = CompressionOptions & { + deflate?: pako.InflateOptions & pako.DeflateOptions & {useZlib?: boolean}; +}; + +/** + * DEFLATE compression / decompression + * Using PAKO library + */ +export class DeflateCompression extends Compression { + readonly name: string = 'deflate'; + readonly extensions: string[] = []; + readonly contentEncodings = ['deflate']; + readonly isSupported = true; + + readonly options: DeflateCompressionOptions; + + private _chunks: ArrayBuffer[] = []; + + constructor(options: DeflateCompressionOptions = {}) { + super(options); + this.options = options; + } + + async compress(input: ArrayBuffer): Promise { + return this.compressSync(input); + } + + async decompress(input: ArrayBuffer): Promise { + return this.decompressSync(input); + } + + compressSync(input: ArrayBuffer): ArrayBuffer { + const pakoOptions: pako.DeflateOptions = this.options?.deflate || {}; + const inputArray = new Uint8Array(input); + return pako.deflate(inputArray, pakoOptions).buffer; + } + + decompressSync(input: ArrayBuffer): ArrayBuffer { + const pakoOptions: pako.InflateOptions = this.options?.deflate || {}; + const inputArray = new Uint8Array(input); + return pako.inflate(inputArray, pakoOptions).buffer; + } + + async *compressBatches( + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + const pakoOptions: pako.DeflateOptions = this.options?.deflate || {}; + const pakoProcessor = new pako.Deflate(pakoOptions); + yield* this.transformBatches(pakoProcessor, asyncIterator); + } + + async *decompressBatches( + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + const pakoOptions: pako.InflateOptions = this.options?.deflate || {}; + const pakoProcessor = new pako.Inflate(pakoOptions); + yield* this.transformBatches(pakoProcessor, asyncIterator); + } + + async *transformBatches( + pakoProcessor: pako.Inflate | pako.Deflate, + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + pakoProcessor.onData = this._onData.bind(this); + pakoProcessor.onEnd = this._onEnd.bind(this); + for await (const chunk of asyncIterator) { + const uint8Array = new Uint8Array(chunk); + const ok = pakoProcessor.push(uint8Array, false); // false -> not last chunk + if (!ok) { + throw new Error(`${getPakoError()}write`); + } + const chunks = this._getChunks(); + yield* chunks; + } + + // End + const emptyChunk = new Uint8Array(0); + const ok = pakoProcessor.push(emptyChunk, true); // true -> last chunk + if (!ok) { + // For some reason we get error but it still works??? + // throw new Error(getPakoError() + 'end'); + } + const chunks = this._getChunks(); + yield* chunks; + } + + _onData(chunk) { + this._chunks.push(chunk); + } + + _onEnd(status) { + if (status !== 0) { + throw new Error(getPakoError(status) + this._chunks.length); + } + } + + _getChunks(): ArrayBuffer[] { + const chunks = this._chunks; + this._chunks = []; + return chunks; + } +} diff --git a/modules/compression/src/lib/deflate-compression-zlib.ts b/modules/compression/src/lib/deflate-compression-zlib.ts new file mode 100644 index 0000000000..900a760174 --- /dev/null +++ b/modules/compression/src/lib/deflate-compression-zlib.ts @@ -0,0 +1,52 @@ +// loaders.gl, MIT license +import {isBrowser, toArrayBuffer} from '@loaders.gl/loader-utils'; +import {promisify2} from '@loaders.gl/loader-utils'; +import type {CompressionOptions} from './compression'; +import {Compression} from './compression'; +import * as zlib from 'zlib'; +import type {ZlibOptions} from 'zlib'; + +export type DeflateCompressionOptions = CompressionOptions & { + deflate?: ZlibOptions; +}; + +/** + * DEFLATE compression / decompression + * Using Node.js zlib library (works under Node only) + */ +export class DeflateCompression extends Compression { + readonly name: string = 'deflate'; + readonly extensions: string[] = []; + readonly contentEncodings = ['deflate']; + readonly isSupported = isBrowser; + + readonly options: DeflateCompressionOptions; + + constructor(options: DeflateCompressionOptions = {}) { + super(options); + this.options = options; + if (!isBrowser) { + throw new Error('zlib only available under Node.js'); + } + } + + async compress(input: ArrayBuffer): Promise { + const buffer = await promisify2(zlib.deflate)(input, this.options.deflate || {}); + return toArrayBuffer(buffer); + } + + async decompress(input: ArrayBuffer): Promise { + const buffer = await promisify2(zlib.inflate)(input, this.options.deflate || {}); + return toArrayBuffer(buffer); + } + + compressSync(input: ArrayBuffer): ArrayBuffer { + const buffer = zlib.deflateSync(input, this.options.deflate || {}); + return toArrayBuffer(buffer); + } + + decompressSync(input: ArrayBuffer): ArrayBuffer { + const buffer = zlib.inflateSync(input, this.options.deflate || {}); + return toArrayBuffer(buffer); + } +} diff --git a/modules/compression/src/lib/deflate-compression.ts b/modules/compression/src/lib/deflate-compression.ts index 5068485b09..15c86a52b6 100644 --- a/modules/compression/src/lib/deflate-compression.ts +++ b/modules/compression/src/lib/deflate-compression.ts @@ -1,81 +1,60 @@ -// DEFLATE +// loaders.gl, MIT license +import {promisify1} from '@loaders.gl/loader-utils'; import type {CompressionOptions} from './compression'; import {Compression} from './compression'; -import {isBrowser, toArrayBuffer} from '@loaders.gl/loader-utils'; -import pako from 'pako'; // https://bundlephobia.com/package/pako -import zlib from 'zlib'; -import {promisify1} from '@loaders.gl/loader-utils'; + +import {deflate, inflate, deflateSync, inflateSync} from 'fflate'; +import type {DeflateOptions} from 'fflate'; // https://bundlephobia.com/package/pako export type DeflateCompressionOptions = CompressionOptions & { - deflate?: pako.InflateOptions & pako.DeflateOptions & {useZlib?: boolean}; + deflate?: DeflateOptions; }; /** * DEFLATE compression / decompression */ export class DeflateCompression extends Compression { - readonly name: string = 'deflate'; + readonly name: string = 'fflate'; readonly extensions: string[] = []; - readonly contentEncodings = ['deflate']; + readonly contentEncodings = ['fflate', 'gzip, zlib']; readonly isSupported = true; readonly options: DeflateCompressionOptions; - private _chunks: ArrayBuffer[] = []; - constructor(options: DeflateCompressionOptions = {}) { super(options); this.options = options; } async compress(input: ArrayBuffer): Promise { - // On Node.js we can use built-in zlib - if (!isBrowser && this.options.deflate?.useZlib) { - const buffer = this.options.deflate?.gzip - ? await promisify1(zlib.gzip)(input) - : await promisify1(zlib.deflate)(input); - return toArrayBuffer(buffer); - } - return this.compressSync(input); + // const options = this.options?.gzip || {}; + const inputArray = new Uint8Array(input); + const outputArray = await promisify1(deflate)(inputArray); // options - overload pick + return outputArray.buffer; } async decompress(input: ArrayBuffer): Promise { - // On Node.js we can use built-in zlib - if (!isBrowser && this.options.deflate?.useZlib) { - const buffer = this.options.deflate?.gzip - ? await promisify1(zlib.gunzip)(input) - : await promisify1(zlib.inflate)(input); - return toArrayBuffer(buffer); - } - return this.decompressSync(input); + const inputArray = new Uint8Array(input); + const outputArray = await promisify1(inflate)(inputArray); + return outputArray.buffer; } compressSync(input: ArrayBuffer): ArrayBuffer { - // On Node.js we can use built-in zlib - if (!isBrowser && this.options.deflate?.useZlib) { - const buffer = this.options.deflate?.gzip ? zlib.gzipSync(input) : zlib.deflateSync(input); - return toArrayBuffer(buffer); - } - const pakoOptions: pako.DeflateOptions = this.options?.deflate || {}; + const options = this.options?.deflate || {}; const inputArray = new Uint8Array(input); - return pako.deflate(inputArray, pakoOptions).buffer; + return deflateSync(inputArray, options).buffer; } decompressSync(input: ArrayBuffer): ArrayBuffer { - // On Node.js we can use built-in zlib - if (!isBrowser && this.options.deflate?.useZlib) { - const buffer = this.options.deflate?.gzip ? zlib.gunzipSync(input) : zlib.inflateSync(input); - return toArrayBuffer(buffer); - } - const pakoOptions: pako.InflateOptions = this.options?.deflate || {}; const inputArray = new Uint8Array(input); - return pako.inflate(inputArray, pakoOptions).buffer; + return inflateSync(inputArray).buffer; } + /* async *compressBatches( asyncIterator: AsyncIterable | Iterable ): AsyncIterable { - const pakoOptions: pako.DeflateOptions = this.options?.deflate || {}; + const pakoOptions: pako.DeflateOptions = this.options?.fflate || {}; const pakoProcessor = new pako.Deflate(pakoOptions); yield* this.transformBatches(pakoProcessor, asyncIterator); } @@ -83,7 +62,7 @@ export class DeflateCompression extends Compression { async *decompressBatches( asyncIterator: AsyncIterable | Iterable ): AsyncIterable { - const pakoOptions: pako.InflateOptions = this.options?.deflate || {}; + const pakoOptions: pako.InflateOptions = this.options?.fflate || {}; const pakoProcessor = new pako.Inflate(pakoOptions); yield* this.transformBatches(pakoProcessor, asyncIterator); } @@ -130,29 +109,5 @@ export class DeflateCompression extends Compression { this._chunks = []; return chunks; } - - // TODO - For some reason we don't get the error message from pako in _onEnd? - _getError(code: number = 0): string { - const MESSAGES = { - /* Z_NEED_DICT 2 */ - 2: 'need dictionary', - /* Z_STREAM_END 1 */ - 1: 'stream end', - /* Z_OK 0 */ - 0: '', - /* Z_ERRNO (-1) */ - '-1': 'file error', - /* Z_STREAM_ERROR (-2) */ - '-2': 'stream error', - /* Z_DATA_ERROR (-3) */ - '-3': 'data error', - /* Z_MEM_ERROR (-4) */ - '-4': 'insufficient memory', - /* Z_BUF_ERROR (-5) */ - '-5': 'buffer error', - /* Z_VERSION_ERROR (-6) */ - '-6': 'incompatible version' - }; - return `${this.name}: ${MESSAGES[code]}`; - } + */ } diff --git a/modules/compression/src/lib/gzip-compression-pako.ts b/modules/compression/src/lib/gzip-compression-pako.ts new file mode 100644 index 0000000000..021d96d2e5 --- /dev/null +++ b/modules/compression/src/lib/gzip-compression-pako.ts @@ -0,0 +1,126 @@ +// loaders.gl, MIT license +import type {CompressionOptions} from './compression'; +import {Compression} from './compression'; +import {getPakoError} from './utils/pako-utils'; +import pako from 'pako'; // https://bundlephobia.com/package/pako + +export type DeflateCompressionOptions = CompressionOptions & { + deflate?: pako.InflateOptions & pako.DeflateOptions & {useZlib?: boolean}; +}; + +export type GZipCompressionOptions = CompressionOptions & { + gzip?: pako.InflateOptions & pako.DeflateOptions; +}; + +/** + * GZIP compression / decompression + * Using PAKO library. + */ +export class GZipCompression extends Compression { + readonly name: string = 'gzip'; + readonly extensions = ['gz', 'gzip']; + readonly contentEncodings = ['gzip', 'x-gzip']; + readonly isSupported = true; + + constructor(options?: GZipCompressionOptions) { + super({...options}); + } +} + +/** + * DEFLATE compression / decompression + */ +export class DeflateCompression extends Compression { + readonly name: string = 'deflate'; + readonly extensions: string[] = []; + readonly contentEncodings = ['deflate']; + readonly isSupported = true; + + readonly options: DeflateCompressionOptions; + + private _chunks: ArrayBuffer[] = []; + + constructor(options: DeflateCompressionOptions = {}) { + super(options); + this.options = options; + } + + async compress(input: ArrayBuffer): Promise { + return this.compressSync(input); + } + + async decompress(input: ArrayBuffer): Promise { + return this.decompressSync(input); + } + + compressSync(input: ArrayBuffer): ArrayBuffer { + const pakoOptions: pako.DeflateOptions = this.options?.deflate || {}; + const inputArray = new Uint8Array(input); + return pako.deflate(inputArray, pakoOptions).buffer; + } + + decompressSync(input: ArrayBuffer): ArrayBuffer { + const pakoOptions: pako.InflateOptions = this.options?.deflate || {}; + const inputArray = new Uint8Array(input); + return pako.inflate(inputArray, pakoOptions).buffer; + } + + async *compressBatches( + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + const pakoOptions: pako.DeflateOptions = this.options?.deflate || {}; + const pakoProcessor = new pako.Deflate(pakoOptions); + yield* this.transformBatches(pakoProcessor, asyncIterator); + } + + async *decompressBatches( + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + const pakoOptions: pako.InflateOptions = this.options?.deflate || {}; + const pakoProcessor = new pako.Inflate(pakoOptions); + yield* this.transformBatches(pakoProcessor, asyncIterator); + } + + async *transformBatches( + pakoProcessor: pako.Inflate | pako.Deflate, + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + pakoProcessor.onData = this._onData.bind(this); + pakoProcessor.onEnd = this._onEnd.bind(this); + for await (const chunk of asyncIterator) { + const uint8Array = new Uint8Array(chunk); + const ok = pakoProcessor.push(uint8Array, false); // false -> not last chunk + if (!ok) { + throw new Error(`${getPakoError()}write`); + } + const chunks = this._getChunks(); + yield* chunks; + } + + // End + const emptyChunk = new Uint8Array(0); + const ok = pakoProcessor.push(emptyChunk, true); // true -> last chunk + if (!ok) { + // For some reason we get error but it still works??? + // throw new Error(getPakoError() + 'end'); + } + const chunks = this._getChunks(); + yield* chunks; + } + + _onData(chunk) { + this._chunks.push(chunk); + } + + _onEnd(status) { + if (status !== 0) { + throw new Error(getPakoError(status) + this._chunks.length); + } + } + + _getChunks(): ArrayBuffer[] { + const chunks = this._chunks; + this._chunks = []; + return chunks; + } +} diff --git a/modules/compression/src/lib/gzip-compression-zlib.ts b/modules/compression/src/lib/gzip-compression-zlib.ts new file mode 100644 index 0000000000..6b9a8c8cb6 --- /dev/null +++ b/modules/compression/src/lib/gzip-compression-zlib.ts @@ -0,0 +1,52 @@ +// loaders.gl, MIT license +import type {CompressionOptions} from './compression'; +import {Compression} from './compression'; +import {isBrowser, toArrayBuffer} from '@loaders.gl/loader-utils'; +import {promisify2} from '@loaders.gl/loader-utils'; +import * as zlib from 'zlib'; +import type {ZlibOptions} from 'zlib'; + +export type DeflateCompressionOptions = CompressionOptions & { + deflate?: ZlibOptions; +}; + +/** + * GZIP compression / decompression + * Using Node.js zlib library (works under Node only) + */ +export class GzipCompression extends Compression { + readonly name: string = 'gzip'; + readonly extensions = ['gz', 'gzip']; + readonly contentEncodings = ['gzip', 'x-gzip']; + readonly isSupported = isBrowser; + + readonly options: DeflateCompressionOptions; + + constructor(options: DeflateCompressionOptions = {}) { + super(options); + this.options = options; + if (!isBrowser) { + throw new Error('zlib only available under Node.js'); + } + } + + async compress(input: ArrayBuffer): Promise { + const buffer = await promisify2(zlib.gzip)(input, this.options.deflate || {}); + return toArrayBuffer(buffer); + } + + async decompress(input: ArrayBuffer): Promise { + const buffer = await promisify2(zlib.gunzip)(input, this.options.deflate || {}); + return toArrayBuffer(buffer); + } + + compressSync(input: ArrayBuffer): ArrayBuffer { + const buffer = zlib.gzipSync(input, this.options.deflate || {}); + return toArrayBuffer(buffer); + } + + decompressSync(input: ArrayBuffer): ArrayBuffer { + const buffer = zlib.gunzipSync(input, this.options.deflate || {}); + return toArrayBuffer(buffer); + } +} diff --git a/modules/compression/src/lib/gzip-compression.ts b/modules/compression/src/lib/gzip-compression.ts index 11312fe277..e19152d4e5 100644 --- a/modules/compression/src/lib/gzip-compression.ts +++ b/modules/compression/src/lib/gzip-compression.ts @@ -1,23 +1,99 @@ -// GZIP -// import {isBrowser} from '@loaders.gl/loader-utils'; +// loaders.gl, MIT license +import {promisify1} from '@loaders.gl/loader-utils'; import type {CompressionOptions} from './compression'; -import {DeflateCompression} from './deflate-compression'; -import pako from 'pako'; // https://bundlephobia.com/package/pako +import {Compression} from './compression'; +import type {GzipOptions, AsyncGzipOptions} from 'fflate'; +import {gzip, gunzip, gzipSync, gunzipSync, Gzip, Gunzip} from 'fflate'; // https://bundlephobia.com/package/pako export type GZipCompressionOptions = CompressionOptions & { - gzip?: pako.InflateOptions & pako.DeflateOptions; + gzip?: GzipOptions | AsyncGzipOptions; }; /** * GZIP compression / decompression */ -export class GZipCompression extends DeflateCompression { +export class GZipCompression extends Compression { readonly name: string = 'gzip'; readonly extensions = ['gz', 'gzip']; readonly contentEncodings = ['gzip', 'x-gzip']; readonly isSupported = true; - constructor(options?: GZipCompressionOptions) { - super({...options, deflate: {...options?.gzip, gzip: true}}); + readonly options: GZipCompressionOptions; + private _chunks: ArrayBuffer[] = []; + + constructor(options: GZipCompressionOptions = {}) { + super({...options}); + this.options = options; + } + + async compress(input: ArrayBuffer): Promise { + // const options = this.options?.gzip || {}; + const inputArray = new Uint8Array(input); + const outputArray = await promisify1(gzip)(inputArray); // options - overload pick + return outputArray.buffer; + } + + async decompress(input: ArrayBuffer): Promise { + // const options = this.options?.gzip || {}; + const inputArray = new Uint8Array(input); + const outputArray = await promisify1(gunzip)(inputArray); // options - overload pick + return outputArray.buffer; + } + + compressSync(input: ArrayBuffer): ArrayBuffer { + const options = this.options?.gzip || {}; + const inputArray = new Uint8Array(input); + return gzipSync(inputArray, options).buffer; + } + + decompressSync(input: ArrayBuffer): ArrayBuffer { + // const options = this.options?.gzip || {}; + const inputArray = new Uint8Array(input); + return gunzipSync(inputArray).buffer; + } + + async *compressBatches( + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + const options = this.options?.gzip || {}; + const streamProcessor = new Gzip(options); + streamProcessor.ondata = this._onData.bind(this); + yield* this.transformBatches(streamProcessor, asyncIterator); + } + + async *decompressBatches( + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + const streamProcessor = new Gunzip(); + streamProcessor.ondata = this._onData.bind(this); + yield* this.transformBatches(streamProcessor, asyncIterator); + } + + protected async *transformBatches( + streamProcessor: Gzip | Gunzip, + asyncIterator: AsyncIterable | Iterable + ): AsyncIterable { + for await (const chunk of asyncIterator) { + const uint8Array = new Uint8Array(chunk); + streamProcessor.push(uint8Array, false); // false -> not last chunk + const chunks = this._getChunks(); + yield* chunks; + } + + // End + const emptyChunk = new Uint8Array(0); + streamProcessor.push(emptyChunk, true); // true -> last chunk + const chunks = this._getChunks(); + yield* chunks; + } + + _onData(data: Uint8Array, final: boolean): void { + this._chunks.push(data); + } + + _getChunks(): ArrayBuffer[] { + const chunks = this._chunks; + this._chunks = []; + return chunks; } } diff --git a/modules/compression/src/lib/utils/pako-utils.ts b/modules/compression/src/lib/utils/pako-utils.ts new file mode 100644 index 0000000000..cd877873ee --- /dev/null +++ b/modules/compression/src/lib/utils/pako-utils.ts @@ -0,0 +1,24 @@ +// TODO - For some reason we don't get the error message from pako in _onEnd? +export function getPakoError(code: number = 0): string { + const MESSAGES = { + /* Z_NEED_DICT 2 */ + 2: 'need dictionary', + /* Z_STREAM_END 1 */ + 1: 'stream end', + /* Z_OK 0 */ + 0: '', + /* Z_ERRNO (-1) */ + '-1': 'file error', + /* Z_STREAM_ERROR (-2) */ + '-2': 'stream error', + /* Z_DATA_ERROR (-3) */ + '-3': 'data error', + /* Z_MEM_ERROR (-4) */ + '-4': 'insufficient memory', + /* Z_BUF_ERROR (-5) */ + '-5': 'buffer error', + /* Z_VERSION_ERROR (-6) */ + '-6': 'incompatible version' + }; + return MESSAGES[code] || 'unknown Pako library error'; +} diff --git a/modules/compression/src/workers/worker.ts b/modules/compression/src/workers/worker.ts index ce52494f0a..b827cbe6a7 100644 --- a/modules/compression/src/workers/worker.ts +++ b/modules/compression/src/workers/worker.ts @@ -18,6 +18,9 @@ import lz4js from 'lz4js'; // import lzo from 'lzo'; // import {ZstdCodec} from 'zstd-codec'; +// globalThis.Worker = globalThis.Worker || {}; +// globalThis.Blob = globalThis.Blob || {}; + // Inject large dependencies through Compression constructor options const modules = { // brotli has problems with decompress in browsers diff --git a/modules/compression/test/compression.spec.js b/modules/compression/test/compression.spec.js index f34af8aeaa..3a50f83bc8 100644 --- a/modules/compression/test/compression.spec.js +++ b/modules/compression/test/compression.spec.js @@ -55,7 +55,8 @@ const TEST_CASES = [ compressedLength: 10903 }, gzip: { - compressedLength: 10915 + compressedLength: 10913 // fflate + // compressedLength: 10915 // pako }, lz4: { compressedLength: 10422 @@ -137,7 +138,10 @@ test('compression#batched', async (t) => { let compressedBatches = compression.compressBatches(inputChunks); const compressedData = await concatenateArrayBuffersAsync(compressedBatches); if (name === 'gzip') { - t.equals(compressedData.byteLength, 29, `${name}(${title}) batches: length correct`); // Header overhead + // FFLATE + t.equals(compressedData.byteLength, 47, `${name}(${title}) batches: length correct`); // Header overhead + // PAKO + // t.equals(compressedData.byteLength, 29, `${name}(${title}) batches: length correct`); // Header overhead } // test chained iterators @@ -160,6 +164,15 @@ test('compression#batched', async (t) => { // WORKER TESTS test('gzip#worker', async (t) => { + if (!isBrowser) { + t.comment('not testing worker under Node.js'); + t.end(); + return; + } + + const COMPRESSED_LENGTH_EXPECTED = 12824; // fflate + // const COMPRESSED_LENGTH_EXPECTED = 12825; // pako + const {binaryData} = getData(); t.equal(binaryData.byteLength, 100000, 'Length correct'); @@ -173,7 +186,7 @@ test('gzip#worker', async (t) => { } }); - t.equal(compressedData.byteLength, 12825, 'Length correct'); + t.equal(compressedData.byteLength, COMPRESSED_LENGTH_EXPECTED, 'Length correct'); const decompressdData = await processOnWorker(CompressionWorker, compressedData, { compression: 'gzip', @@ -197,6 +210,12 @@ test('gzip#worker', async (t) => { }); test('lz4#worker', async (t) => { + if (!isBrowser) { + t.comment('not testing worker under Node.js'); + t.end(); + return; + } + const {binaryData} = getData(); t.equal(binaryData.byteLength, 100000, 'Length correct');