Skip to main content
The node:zlib module provides compression and decompression functionality using Gzip, Deflate/Inflate, Brotli, and Zstd algorithms.

Installation

import zlib from 'node:zlib';
// or
const zlib = require('node:zlib');

Compression Algorithms

  • Gzip - Most common, good compression, widely supported
  • Deflate - Used by Gzip, smaller overhead
  • Brotli - Better compression ratio, slower
  • Zstd - Fast compression/decompression, modern

Stream-based Compression

Compressing Files

import { createReadStream, createWriteStream } from 'node:fs';
import { createGzip } from 'node:zlib';
import { pipeline } from 'node:stream/promises';

// Compress a file
const source = createReadStream('input.txt');
const destination = createWriteStream('input.txt.gz');
const gzip = createGzip();

await pipeline(source, gzip, destination);
console.log('File compressed successfully');

Decompressing Files

import { createReadStream, createWriteStream } from 'node:fs';
import { createGunzip } from 'node:zlib';
import { pipeline } from 'node:stream/promises';

// Decompress a file
const source = createReadStream('input.txt.gz');
const destination = createWriteStream('output.txt');
const gunzip = createGunzip();

await pipeline(source, gunzip, destination);
console.log('File decompressed successfully');

Promise-based Compression

Compressing Data

import { promisify } from 'node:util';
import { gzip, deflate, brotliCompress } from 'node:zlib';

const gzipAsync = promisify(gzip);
const deflateAsync = promisify(deflate);
const brotliAsync = promisify(brotliCompress);

const input = 'Hello, World! '.repeat(100);

// Gzip compression
const gzipped = await gzipAsync(input);
console.log(`Original: ${input.length} bytes`);
console.log(`Gzipped: ${gzipped.length} bytes`);

// Deflate compression
const deflated = await deflateAsync(input);
console.log(`Deflated: ${deflated.length} bytes`);

// Brotli compression
const brotlied = await brotliAsync(input);
console.log(`Brotli: ${brotlied.length} bytes`);

Decompressing Data

import { promisify } from 'node:util';
import { gunzip, inflate, brotliDecompress } from 'node:zlib';
import { Buffer } from 'node:buffer';

const gunzipAsync = promisify(gunzip);
const inflateAsync = promisify(inflate);
const brotliDecompressAsync = promisify(brotliDecompress);

// Decompress gzipped data
const gzippedBuffer = Buffer.from('...');
const decompressed = await gunzipAsync(gzippedBuffer);
console.log(decompressed.toString());

// Auto-detect compression (gunzip or inflate)
import { unzip } from 'node:zlib';
const unzipAsync = promisify(unzip);
const result = await unzipAsync(compressedBuffer);

Callback-based API

import { gzip, gunzip } from 'node:zlib';
import { Buffer } from 'node:buffer';

const input = 'Compress this text';

// Compress with callback
gzip(input, (err, compressed) => {
  if (err) {
    console.error('Compression error:', err);
    return;
  }
  
  console.log('Compressed:', compressed.toString('base64'));
  
  // Decompress
  gunzip(compressed, (err, decompressed) => {
    if (err) {
      console.error('Decompression error:', err);
      return;
    }
    
    console.log('Decompressed:', decompressed.toString());
  });
});

Synchronous API

import { gzipSync, gunzipSync, deflateSync, inflateSync } from 'node:zlib';

const input = 'Compress this synchronously';

// Synchronous compression
const compressed = gzipSync(input);
console.log('Compressed:', compressed.length, 'bytes');

// Synchronous decompression
const decompressed = gunzipSync(compressed);
console.log('Decompressed:', decompressed.toString());

// Deflate/Inflate
const deflated = deflateSync(input);
const inflated = inflateSync(deflated);
console.log(inflated.toString());
Synchronous methods block the event loop. Use async methods for production applications.

Compression Methods

Gzip

import { createGzip, createGunzip, gzip, gunzip } from 'node:zlib';

// Stream API
const gzipStream = createGzip();
const gunzipStream = createGunzip();

// Promise API
import { promisify } from 'node:util';
const gzipAsync = promisify(gzip);
const gunzipAsync = promisify(gunzip);

const compressed = await gzipAsync('data');
const decompressed = await gunzipAsync(compressed);

Deflate/Inflate

import { createDeflate, createInflate, deflate, inflate } from 'node:zlib';

// Stream API
const deflateStream = createDeflate();
const inflateStream = createInflate();

// Promise API
import { promisify } from 'node:util';
const deflateAsync = promisify(deflate);
const inflateAsync = promisify(inflate);

const compressed = await deflateAsync('data');
const decompressed = await inflateAsync(compressed);

Brotli

import {
  createBrotliCompress,
  createBrotliDecompress,
  brotliCompress,
  brotliDecompress,
  constants
} from 'node:zlib';

// Stream API with options
const brotliStream = createBrotliCompress({
  params: {
    [constants.BROTLI_PARAM_QUALITY]: 11, // 0-11, higher = better compression
    [constants.BROTLI_PARAM_SIZE_HINT]: 1024
  }
});

// Promise API
import { promisify } from 'node:util';
const brotliAsync = promisify(brotliCompress);
const debrotliAsync = promisify(brotliDecompress);

const compressed = await brotliAsync('data');
const decompressed = await debrotliAsync(compressed);

Zstd

import {
  createZstdCompress,
  createZstdDecompress,
  zstdCompress,
  zstdDecompress
} from 'node:zlib';

// Stream API
const zstdStream = createZstdCompress();

// Promise API
import { promisify } from 'node:util';
const zstdAsync = promisify(zstdCompress);
const unzstdAsync = promisify(zstdDecompress);

const compressed = await zstdAsync('data');
const decompressed = await unzstdAsync(compressed);

Compression Options

Gzip/Deflate Options

import { createGzip, constants } from 'node:zlib';

const gzip = createGzip({
  level: constants.Z_BEST_COMPRESSION, // 0-9, 9 = best compression
  memLevel: 8, // 1-9, memory usage
  strategy: constants.Z_DEFAULT_STRATEGY,
  chunkSize: 16 * 1024, // Buffer size
  windowBits: 15 // Compression window size
});
Compression Levels:
  • Z_NO_COMPRESSION (0) - No compression
  • Z_BEST_SPEED (1) - Fastest
  • Z_DEFAULT_COMPRESSION (6) - Default
  • Z_BEST_COMPRESSION (9) - Best compression

Brotli Options

import { createBrotliCompress, constants } from 'node:zlib';

const brotli = createBrotliCompress({
  params: {
    [constants.BROTLI_PARAM_QUALITY]: 11, // 0-11
    [constants.BROTLI_PARAM_MODE]: constants.BROTLI_MODE_TEXT,
    [constants.BROTLI_PARAM_SIZE_HINT]: 1024
  }
});
Brotli Modes:
  • BROTLI_MODE_GENERIC - Default
  • BROTLI_MODE_TEXT - UTF-8 text
  • BROTLI_MODE_FONT - Font data

HTTP Compression

Server-side Compression

import { createServer } from 'node:http';
import { createGzip, createBrotliCompress } from 'node:zlib';
import { pipeline } from 'node:stream';
import { createReadStream } from 'node:fs';

const server = createServer((req, res) => {
  const acceptEncoding = req.headers['accept-encoding'] || '';
  
  // Create read stream for response data
  const raw = createReadStream('large-file.json');
  
  if (acceptEncoding.includes('br')) {
    // Brotli compression
    res.writeHead(200, { 'Content-Encoding': 'br' });
    pipeline(raw, createBrotliCompress(), res, (err) => {
      if (err) console.error('Stream error:', err);
    });
  } else if (acceptEncoding.includes('gzip')) {
    // Gzip compression
    res.writeHead(200, { 'Content-Encoding': 'gzip' });
    pipeline(raw, createGzip(), res, (err) => {
      if (err) console.error('Stream error:', err);
    });
  } else {
    // No compression
    res.writeHead(200, {});
    pipeline(raw, res, (err) => {
      if (err) console.error('Stream error:', err);
    });
  }
});

server.listen(3000);

Client-side Decompression

import https from 'node:https';
import { createGunzip, createBrotliDecompress } from 'node:zlib';
import { pipeline } from 'node:stream';
import { createWriteStream } from 'node:fs';

const request = https.get({
  host: 'api.example.com',
  path: '/data',
  headers: { 'Accept-Encoding': 'br,gzip' }
});

request.on('response', (response) => {
  const output = createWriteStream('output.json');
  const encoding = response.headers['content-encoding'];
  
  let stream = response;
  
  if (encoding === 'br') {
    stream = pipeline(response, createBrotliDecompress(), (err) => {
      if (err) console.error('Decompression error:', err);
    });
  } else if (encoding === 'gzip') {
    stream = pipeline(response, createGunzip(), (err) => {
      if (err) console.error('Decompression error:', err);
    });
  }
  
  stream.pipe(output);
});

Performance Optimization

Caching Compressed Data

import { promisify } from 'node:util';
import { gzip } from 'node:zlib';

const gzipAsync = promisify(gzip);
const cache = new Map();

async function getCompressedData(key, data) {
  if (cache.has(key)) {
    return cache.get(key);
  }
  
  const compressed = await gzipAsync(data);
  cache.set(key, compressed);
  return compressed;
}

Choosing the Right Algorithm

import { promisify } from 'node:util';
import { gzip, brotliCompress, deflate } from 'node:zlib';

const algorithms = {
  gzip: promisify(gzip),
  brotli: promisify(brotliCompress),
  deflate: promisify(deflate)
};

async function compareCompression(data) {
  const results = {};
  
  for (const [name, compress] of Object.entries(algorithms)) {
    const start = Date.now();
    const compressed = await compress(data);
    const time = Date.now() - start;
    
    results[name] = {
      size: compressed.length,
      time,
      ratio: (compressed.length / data.length * 100).toFixed(2) + '%'
    };
  }
  
  return results;
}

Error Handling

import { gzip } from 'node:zlib';
import { promisify } from 'node:util';

const gzipAsync = promisify(gzip);

try {
  const compressed = await gzipAsync(invalidData);
} catch (err) {
  if (err.code === 'ERR_INVALID_ARG_TYPE') {
    console.error('Invalid input type');
  } else if (err.code === 'Z_DATA_ERROR') {
    console.error('Corrupted data');
  } else {
    console.error('Compression failed:', err.message);
  }
}

Best Practices

  1. Use async methods - Avoid blocking the event loop
  2. Cache compressed data - Don’t recompress repeatedly
  3. Choose appropriate compression level - Balance speed vs size
  4. Handle errors properly - Always catch compression errors
  5. Use streams for large files - Avoid loading entire file into memory
  6. Set appropriate chunk sizes - Optimize for your use case
  7. Consider compression ratio - Some data doesn’t compress well

Common Use Cases

API Response Compression

import express from 'express';
import compression from 'compression';

const app = express();

// Middleware for automatic compression
app.use(compression({
  level: 6,
  threshold: 1024, // Only compress responses > 1KB
  filter: (req, res) => {
    if (req.headers['x-no-compression']) {
      return false;
    }
    return compression.filter(req, res);
  }
}));

app.get('/api/data', (req, res) => {
  res.json({ large: 'data' });
});

File Archive Creation

import { createGzip } from 'node:zlib';
import { createReadStream, createWriteStream } from 'node:fs';
import { pipeline } from 'node:stream/promises';
import tar from 'tar-stream';

async function createTarGz(files, output) {
  const pack = tar.pack();
  const gzip = createGzip({ level: 9 });
  const dest = createWriteStream(output);
  
  // Add files to archive
  for (const file of files) {
    const entry = pack.entry({ name: file });
    const stream = createReadStream(file);
    stream.pipe(entry);
  }
  
  pack.finalize();
  
  await pipeline(pack, gzip, dest);
}
  • stream - Stream processing
  • fs - File system operations
  • http - HTTP server and client
  • buffer - Binary data handling