Define common gzip helper methods

Also we no longer need to define the interfaces as they're now in
@types/node/stream/web.d.ts

Bug:432043263
Change-Id: I268a0b377457e71cb6528a436ef1369572853d2d
Reviewed-on: https://chromium-review.googlesource.com/c/devtools/devtools-frontend/+/6761794
Commit-Queue: Paul Irish <[email protected]>
Reviewed-by: Connor Clark <[email protected]>
Auto-Submit: Paul Irish <[email protected]>
diff --git a/config/gni/devtools_grd_files.gni b/config/gni/devtools_grd_files.gni
index 9fb66f7..c7c5ea5 100644
--- a/config/gni/devtools_grd_files.gni
+++ b/config/gni/devtools_grd_files.gni
@@ -812,6 +812,7 @@
   "front_end/core/common/Console.js",
   "front_end/core/common/Debouncer.js",
   "front_end/core/common/EventTarget.js",
+  "front_end/core/common/Gzip.js",
   "front_end/core/common/JavaScriptMetaData.js",
   "front_end/core/common/Lazy.js",
   "front_end/core/common/Linkifier.js",
diff --git a/front_end/core/common/BUILD.gn b/front_end/core/common/BUILD.gn
index e72ccb9..abd52ee 100644
--- a/front_end/core/common/BUILD.gn
+++ b/front_end/core/common/BUILD.gn
@@ -21,6 +21,7 @@
     "Console.ts",
     "Debouncer.ts",
     "EventTarget.ts",
+    "Gzip.ts",
     "JavaScriptMetaData.ts",
     "Lazy.ts",
     "Linkifier.ts",
@@ -83,6 +84,7 @@
     "Console.test.ts",
     "Debouncer.test.ts",
     "EventTarget.test.ts",
+    "Gzip.test.ts",
     "Lazy.test.ts",
     "MapWithDefault.test.ts",
     "Mutex.test.ts",
diff --git a/front_end/core/common/Gzip.test.ts b/front_end/core/common/Gzip.test.ts
new file mode 100644
index 0000000..3763e78
--- /dev/null
+++ b/front_end/core/common/Gzip.test.ts
@@ -0,0 +1,63 @@
+// Copyright 2025 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import * as Common from './common.js';
+
+describe('Gzip', () => {
+  it('can compress and decompress a string', async () => {
+    const text = 'Hello, world!';
+    const compressed = await Common.Gzip.compress(text);
+    const decompressed = await Common.Gzip.decompress(compressed);
+    assert.strictEqual(decompressed, text);
+  });
+
+  it('can compress and decompress a stream', async () => {
+    const text = 'Hello, world! This is a stream test.';
+    const textEncoder = new TextEncoder();
+    const inputStream = new ReadableStream({
+      start(controller) {
+        controller.enqueue(textEncoder.encode(text));
+        controller.close();
+      },
+    });
+
+    const compressedStream = Common.Gzip.compressStream(inputStream);
+    const decompressedStream = Common.Gzip.decompressStream(compressedStream);
+
+    const buffer = await new Response(decompressedStream).arrayBuffer();
+    const decodedText = new TextDecoder().decode(buffer);
+
+    assert.strictEqual(decodedText, text);
+  });
+});
+
+describe('arrayBufferToString', () => {
+  it('can decompress a gzipped buffer', async () => {
+    const text = 'Hello, world!';
+    const compressed = await Common.Gzip.compress(text);
+    const result = await Common.Gzip.arrayBufferToString(compressed);
+    assert.strictEqual(result, text);
+  });
+  it('can decode a plaintext buffer', async () => {
+    const text = 'Hello, buddy!';
+    const buffer = new TextEncoder().encode(text).buffer as ArrayBuffer;
+    const result = await Common.Gzip.arrayBufferToString(buffer);
+    assert.strictEqual(result, text);
+  });
+});
+
+describe('fileToString', () => {
+  it('can decompress a gzipped file', async () => {
+    const text = '{"key": "value"}';
+    const compressed = await Common.Gzip.compress(text);
+    const result = await Common.Gzip.fileToString(new File([compressed], 'file.json.gz', {type: 'application/gzip'}));
+    assert.strictEqual(result, text);
+  });
+  it('can decode a plaintext file', async () => {
+    const text = 'Hello, buddy!';
+    const file = new File([text], 'test.txt');
+    const result = await Common.Gzip.fileToString(file);
+    assert.strictEqual(result, text);
+  });
+});
diff --git a/front_end/core/common/Gzip.ts b/front_end/core/common/Gzip.ts
new file mode 100644
index 0000000..a1c3ac5
--- /dev/null
+++ b/front_end/core/common/Gzip.ts
@@ -0,0 +1,72 @@
+// Copyright 2025 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/**
+ * Quickly determine if gzipped, by seeing if the first 3 bytes of the file header match the gzip signature
+ */
+export function isGzip(ab: ArrayBuffer): boolean {
+  const buf = new Uint8Array(ab);
+  if (!buf || buf.length < 3) {
+    return false;
+  }
+  // https://www.rfc-editor.org/rfc/rfc1952#page-6
+  return buf[0] === 0x1F && buf[1] === 0x8B && buf[2] === 0x08;
+}
+
+/** Decode a gzipped _or_ plain text ArrayBuffer to a decoded string */
+export async function arrayBufferToString(ab: ArrayBuffer): Promise<string> {
+  if (isGzip(ab)) {
+    return await decompress(ab);
+  }
+  const str = new TextDecoder('utf-8').decode(ab);
+  return str;
+}
+
+export async function fileToString(file: File): Promise<string> {
+  let stream = file.stream();
+  if (file.type.endsWith('gzip')) {
+    stream = decompressStream(stream);
+  }
+  const arrayBuffer = await new Response(stream).arrayBuffer();
+  const str = new TextDecoder('utf-8').decode(arrayBuffer);
+  return str;
+}
+
+/**
+ * Decompress a gzipped ArrayBuffer to a string.
+ * Consider using `arrayBufferToString` instead, which can handle both gzipped and plain text buffers.
+ */
+export async function decompress(gzippedBuffer: ArrayBuffer): Promise<string> {
+  const buffer = await gzipCodec(gzippedBuffer, new DecompressionStream('gzip'));
+  const str = new TextDecoder('utf-8').decode(buffer);
+  return str;
+}
+export async function compress(str: string): Promise<ArrayBuffer> {
+  const encoded = new TextEncoder().encode(str);
+  const buffer = await gzipCodec(encoded, new CompressionStream('gzip'));
+  return buffer;
+}
+
+// Private coder/decoder
+function gzipCodec(buffer: Uint8Array<ArrayBufferLike>|ArrayBuffer, codecStream: CompressionStream|DecompressionStream):
+    Promise<ArrayBuffer> {
+  const {readable, writable} = new TransformStream();
+  const codecReadable = readable.pipeThrough(codecStream);
+
+  const writer = writable.getWriter();
+  void writer.write(buffer);
+  void writer.close();
+  // A response is a convenient way to get an ArrayBuffer from a ReadableStream.
+  return new Response(codecReadable).arrayBuffer();
+}
+
+export function decompressStream(stream: ReadableStream): ReadableStream {
+  // https://github.com/wicg/compression/blob/main/explainer.md#deflate-compress-an-arraybuffer
+  const ds = new DecompressionStream('gzip');
+  return stream.pipeThrough(ds);
+}
+export function compressStream(stream: ReadableStream): ReadableStream {
+  const cs = new CompressionStream('gzip');
+  return stream.pipeThrough(cs);
+}
diff --git a/front_end/core/common/common.ts b/front_end/core/common/common.ts
index 05eec10..da8b88e 100644
--- a/front_end/core/common/common.ts
+++ b/front_end/core/common/common.ts
@@ -12,6 +12,7 @@
 import * as Console from './Console.js';
 import * as Debouncer from './Debouncer.js';
 import * as EventTarget from './EventTarget.js';
+import * as Gzip from './Gzip.js';
 import * as JavaScriptMetaData from './JavaScriptMetaData.js';
 import * as Lazy from './Lazy.js';
 import * as Linkifier from './Linkifier.js';
@@ -52,6 +53,7 @@
   Console,
   Debouncer,
   EventTarget,
+  Gzip,
   JavaScriptMetaData,
   Lazy,
   Linkifier,
diff --git a/front_end/core/host/InspectorFrontendHost.ts b/front_end/core/host/InspectorFrontendHost.ts
index 2a262f3..2ff9929 100644
--- a/front_end/core/host/InspectorFrontendHost.ts
+++ b/front_end/core/host/InspectorFrontendHost.ts
@@ -63,14 +63,6 @@
 } from './InspectorFrontendHostAPI.js';
 import {streamWrite as resourceLoaderStreamWrite} from './ResourceLoader.js';
 
-interface DecompressionStream extends GenericTransformStream {
-  readonly format: string;
-}
-declare const DecompressionStream: {
-  prototype: DecompressionStream,
-  new (format: string): DecompressionStream,
-};
-
 const UIStrings = {
   /**
    *@description Document title in Inspector Frontend Host of the DevTools window
@@ -331,28 +323,10 @@
 
   loadNetworkResource(
       url: string, _headers: string, streamId: number, callback: (arg0: LoadNetworkResourceResult) => void): void {
-    // Read the first 3 bytes looking for the gzip signature in the file header
-    function isGzip(ab: ArrayBuffer): boolean {
-      const buf = new Uint8Array(ab);
-      if (!buf || buf.length < 3) {
-        return false;
-      }
-
-      // https://www.rfc-editor.org/rfc/rfc1952#page-6
-      return buf[0] === 0x1F && buf[1] === 0x8B && buf[2] === 0x08;
-    }
     fetch(url)
         .then(async result => {
-          const resultArrayBuf = await result.arrayBuffer();
-          let decoded: ReadableStream|ArrayBuffer = resultArrayBuf;
-          if (isGzip(resultArrayBuf)) {
-            const ds = new DecompressionStream('gzip');
-            const writer = ds.writable.getWriter();
-            void writer.write(resultArrayBuf);
-            void writer.close();
-            decoded = ds.readable;
-          }
-          const text = await new Response(decoded).text();
+          const respBuffer = await result.arrayBuffer();
+          const text = await Common.Gzip.arrayBufferToString(respBuffer);
           return text;
         })
         .then(function(text) {
diff --git a/front_end/models/bindings/FileUtils.test.ts b/front_end/models/bindings/FileUtils.test.ts
index 47782ea..47046da 100644
--- a/front_end/models/bindings/FileUtils.test.ts
+++ b/front_end/models/bindings/FileUtils.test.ts
@@ -9,14 +9,6 @@
 const ChunkedFileReader = Bindings.FileUtils.ChunkedFileReader;
 const StringOutputStream = Common.StringOutputStream.StringOutputStream;
 
-interface CompressionStream extends GenericTransformStream {
-  readonly format: string;
-}
-declare const CompressionStream: {
-  prototype: CompressionStream,
-  new (format: string): CompressionStream,
-};
-
 describe('FileUtils', () => {
   describe('ChunkedFileReader', () => {
     it('re-assembles chunks including multibyte characters', async () => {
@@ -45,8 +37,7 @@
     it('can decompress gzipped data', async () => {
       async function getAsCompressedFile(text: string) {
         const blob = new Blob([text], {type: 'text/plain'});
-        // https://github.com/wicg/compression/blob/main/explainer.md#deflate-compress-an-arraybuffer
-        const cstream = blob.stream().pipeThrough(new CompressionStream('gzip'));
+        const cstream = Common.Gzip.compressStream(blob.stream());
         const creader = cstream.getReader();
         const values: string[] = [];
 
diff --git a/front_end/models/bindings/FileUtils.ts b/front_end/models/bindings/FileUtils.ts
index 8536f0a..be3bbde 100644
--- a/front_end/models/bindings/FileUtils.ts
+++ b/front_end/models/bindings/FileUtils.ts
@@ -28,7 +28,7 @@
  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  */
 
-import type * as Common from '../../core/common/common.js';
+import * as Common from '../../core/common/common.js';
 import type * as Platform from '../../core/platform/platform.js';
 import * as TextUtils from '../text_utils/text_utils.js';
 import * as Workspace from '../workspace/workspace.js';
@@ -44,13 +44,6 @@
 
   error(): DOMError|null;
 }
-interface DecompressionStream extends GenericTransformStream {
-  readonly format: string;
-}
-declare const DecompressionStream: {
-  prototype: DecompressionStream,
-  new (format: string): DecompressionStream,
-};
 
 export class ChunkedFileReader implements ChunkedReader {
   #file: File|null;
@@ -84,11 +77,8 @@
     }
 
     if (this.#file?.type.endsWith('gzip')) {
-      // TypeScript can't tell if to use @types/node or lib.webworker.d.ts
-      // types, so we force it to here.
-      // crbug.com/1392092
-      const fileStream = this.#file.stream() as unknown as ReadableStream<Uint8Array>;
-      const stream = this.decompressStream(fileStream);
+      const fileStream = this.#file.stream();
+      const stream = Common.Gzip.decompressStream(fileStream);
       this.#streamReader = stream.getReader();
     } else {
       this.#reader = new FileReader();
@@ -127,13 +117,6 @@
     return this.#errorInternal;
   }
 
-  // Decompress gzip natively thanks to https://wicg.github.io/compression/
-  private decompressStream(stream: ReadableStream): ReadableStream {
-    const ds = new DecompressionStream('gzip');
-    const decompressionStream = stream.pipeThrough(ds);
-    return decompressionStream;
-  }
-
   private onChunkLoaded(event: Event): void {
     if (this.#isCanceled) {
       return;
diff --git a/front_end/models/trace/extras/ScriptDuplication.test.ts b/front_end/models/trace/extras/ScriptDuplication.test.ts
index 99cd395..c3b9095 100644
--- a/front_end/models/trace/extras/ScriptDuplication.test.ts
+++ b/front_end/models/trace/extras/ScriptDuplication.test.ts
@@ -6,15 +6,15 @@
 import * as SDK from '../../../core/sdk/sdk.js';
 import type * as Protocol from '../../../generated/protocol.js';
 import {describeWithEnvironment, expectConsoleLogs} from '../../../testing/EnvironmentHelpers.js';
-import {fetchFixture} from '../../../testing/TraceLoader.js';
+import {fetchFileAsText} from '../../../testing/TraceLoader.js';
 import * as Trace from '../trace.js';
 
 async function loadScriptFixture(
     name: string, modify?: (fixture: {content: string, sourceMapJson: SDK.SourceMap.SourceMapV3Object}) => void):
     Promise<Trace.Handlers.ModelHandlers.Scripts.Script> {
   const content =
-      await fetchFixture(new URL(`../../../panels/timeline/fixtures/traces/scripts/${name}.js.gz`, import.meta.url));
-  const mapText = await fetchFixture(
+      await fetchFileAsText(new URL(`../../../panels/timeline/fixtures/traces/scripts/${name}.js.gz`, import.meta.url));
+  const mapText = await fetchFileAsText(
       new URL(`../../../panels/timeline/fixtures/traces/scripts/${name}.js.map.gz`, import.meta.url));
   const sourceMapJson = JSON.parse(mapText) as SDK.SourceMap.SourceMapV3Object;
   const fixture = {content, sourceMapJson};
diff --git a/front_end/models/trace/insights/DuplicatedJavaScript.test.ts b/front_end/models/trace/insights/DuplicatedJavaScript.test.ts
index 5f5e7f4..f20087c 100644
--- a/front_end/models/trace/insights/DuplicatedJavaScript.test.ts
+++ b/front_end/models/trace/insights/DuplicatedJavaScript.test.ts
@@ -4,7 +4,7 @@
 
 import {describeWithEnvironment} from '../../../testing/EnvironmentHelpers.js';
 import {getFirstOrError, getInsightOrError, processTrace} from '../../../testing/InsightHelpers.js';
-import {fetchFixture, TraceLoader} from '../../../testing/TraceLoader.js';
+import {TraceLoader} from '../../../testing/TraceLoader.js';
 import * as Trace from '../trace.js';
 
 describeWithEnvironment('DuplicatedJavaScript', function() {
@@ -95,9 +95,8 @@
 
   it('works (inline source maps in metadata)', async function() {
     // Load this trace in a way that mutating it is safe.
-    const traceText = await fetchFixture(
+    const fileContents = await TraceLoader.loadTraceFileFromURL(
         new URL('../../../panels/timeline/fixtures/traces/dupe-js-inline-maps.json.gz', import.meta.url));
-    const fileContents = JSON.parse(traceText) as Trace.Types.File.TraceFile;
 
     // Remove the source map data urls from the trace, and move to metadata.
     // This reflects how Chromium will elide data source map urls.
diff --git a/front_end/panels/timeline/utils/Treemap.ts b/front_end/panels/timeline/utils/Treemap.ts
index f4b9f2a..6314c64 100644
--- a/front_end/panels/timeline/utils/Treemap.ts
+++ b/front_end/panels/timeline/utils/Treemap.ts
@@ -2,6 +2,7 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
+import * as Common from '../../../core/common/common.js';
 import * as i18n from '../../../core/i18n/i18n.js';
 import * as Trace from '../../../models/trace/trace.js';
 
@@ -20,28 +21,12 @@
 type SourceData = Omit<TreemapNode, 'name'|'children'>;
 
 /**
- * Takes an UTF-8 string and returns a base64 encoded string. The UTF-8 bytes are
- * gzipped before base64'd using CompressionStream.
+ * Takes an UTF-8, gzips then base64's it.
  */
 async function toCompressedBase64(string: string): Promise<string> {
-  let bytes = new TextEncoder().encode(string);
-
-  const cs = new CompressionStream('gzip');
-  const writer = cs.writable.getWriter();
-  void writer.write(bytes);
-  void writer.close();
-  const compAb = await new Response(cs.readable).arrayBuffer();
-  bytes = new Uint8Array(compAb);
-
-  let binaryString = '';
-  // This is ~25% faster than building the string one character at a time.
-  // https://jsbench.me/2gkoxazvjl
-  const chunkSize = 5000;
-  for (let i = 0; i < bytes.length; i += chunkSize) {
-    binaryString += String.fromCharCode(...bytes.subarray(i, i + chunkSize));
-  }
-
-  return btoa(binaryString);
+  const compAb = await Common.Gzip.compress(string);
+  const strb64 = await Common.Base64.encode(compAb);
+  return strb64;
 }
 
 /**
diff --git a/front_end/testing/TraceLoader.ts b/front_end/testing/TraceLoader.ts
index 2c925da..682ab32 100644
--- a/front_end/testing/TraceLoader.ts
+++ b/front_end/testing/TraceLoader.ts
@@ -2,6 +2,7 @@
 // Use of this source code is governed by a BSD-style license that can be
 // found in the LICENSE file.
 
+import * as Common from '../core/common/common.js';
 import * as SDK from '../core/sdk/sdk.js';
 import type * as Protocol from '../generated/protocol.js';
 import * as Trace from '../models/trace/trace.js';
@@ -79,7 +80,7 @@
     // Required URLs differ across the component server and the unit tests, so try both.
     const urlForTest = new URL(`../panels/timeline/fixtures/traces/${name}`, import.meta.url);
 
-    const contents = await loadTraceFileFromURL(urlForTest);
+    const contents = await TraceLoader.loadTraceFileFromURL(urlForTest);
     fileContentsCache.set(name, contents);
     return contents;
   }
@@ -274,75 +275,29 @@
           .catch(e => console.error(e));
     });
   }
+
+  static async loadTraceFileFromURL(url: URL): Promise<Trace.Types.File.TraceFile> {
+    const contents = await fetchFileAsText(url);
+    const traceContents = JSON.parse(contents) as Trace.Types.File.TraceFile;
+    return traceContents;
+  }
+}
+
+export async function fetchFileAsText(url: URL): Promise<string> {
+  const response = await fetch(url);
+  if (response.status !== 200) {
+    throw new Error(`Unable to load ${url}`);
+  }
+
+  const buffer = await response.arrayBuffer();
+  const contents = await Common.Gzip.arrayBufferToString(buffer);
+  return contents;
 }
 
 // Below this point are private methods used in the TraceLoader class. These
 // are purposefully not exported, you should use one of the static methods
 // defined above.
 
-async function loadTraceFileFromURL(url: URL): Promise<Trace.Types.File.Contents> {
-  const response = await fetch(url);
-  if (response.status !== 200) {
-    throw new Error(`Unable to load ${url}`);
-  }
-
-  const contentType = response.headers.get('content-type');
-  const isGzipEncoded = contentType?.includes('gzip');
-  let buffer = await response.arrayBuffer();
-  if (isGzipEncoded) {
-    buffer = await decodeGzipBuffer(buffer);
-  }
-  const decoder = new TextDecoder('utf-8');
-  const contents = JSON.parse(decoder.decode(buffer)) as Trace.Types.File.Contents;
-  return contents;
-}
-
-interface CompressionStream extends ReadableWritablePair<Uint8Array, Uint8Array> {}
-interface DecompressionStream extends ReadableWritablePair<Uint8Array, Uint8Array> {}
-declare const CompressionStream: {
-  prototype: CompressionStream,
-  new (type: string): CompressionStream,
-};
-
-declare const DecompressionStream: {
-  prototype: DecompressionStream,
-  new (type: string): DecompressionStream,
-};
-
-function codec(buffer: ArrayBuffer, codecStream: CompressionStream|DecompressionStream): Promise<ArrayBuffer> {
-  const {readable, writable} = new TransformStream();
-  const codecReadable = readable.pipeThrough(codecStream);
-
-  const writer = writable.getWriter();
-  void writer.write(buffer);
-  void writer.close();
-
-  // Wrap in a response for convenience.
-  const response = new Response(codecReadable);
-  return response.arrayBuffer();
-}
-
-function decodeGzipBuffer(buffer: ArrayBuffer): Promise<ArrayBuffer> {
-  return codec(buffer, new DecompressionStream('gzip'));
-}
-
-export async function fetchFixture(url: URL): Promise<string> {
-  const response = await fetch(url);
-  if (response.status !== 200) {
-    throw new Error(`Unable to load ${url}`);
-  }
-
-  const contentType = response.headers.get('content-type');
-  const isGzipEncoded = contentType?.includes('gzip');
-  let buffer = await response.arrayBuffer();
-  if (isGzipEncoded) {
-    buffer = await decodeGzipBuffer(buffer);
-  }
-  const decoder = new TextDecoder('utf-8');
-  const contents = decoder.decode(buffer);
-  return contents;
-}
-
 /**
  * Wraps an async Promise with a timeout. We use this to break down and
  * instrument `TraceLoader` to understand on CQ where timeouts occur.