Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { MethodEnum, Request } from '@algolia/requester-common';
import nock from 'nock';
// @ts-ignore
import { Readable } from 'readable-stream';
import * as zlib from 'zlib';

import { createNodeHttpRequester } from '../..';

Expand Down Expand Up @@ -279,3 +280,205 @@ describe('requesterOptions', () => {
expect(response.content).toBe(body);
});
});

describe('gzip compression', () => {
afterEach(() => {
nock.cleanAll();
});

// Build a payload >= 1KB to exceed the compression threshold
const largePayload: Record<string, string> = {};
for (let i = 0; i < 50; i++) {
largePayload[`key${i}`] = `value${i}_padding`;
}

const gzipRequestStub: Request = {
url: 'https://algolia-dns.net/foo?x-algolia-header=foo',
method: MethodEnum.Post,
headers: {
'accept-encoding': 'gzip',
'content-type': 'application/json',
},
data: JSON.stringify(largePayload),
responseTimeout: 2,
connectTimeout: 1,
};

const isGzipBodyMatching = (expected: string) => (body: string): boolean => {
const decompressed = zlib.gunzipSync(Buffer.from(body, 'hex')).toString();

return decompressed === expected;
};

it('compresses request body when accept-encoding: gzip header is present', async () => {
const expectedBody = JSON.stringify(largePayload);

nock('https://algolia-dns.net', {
reqheaders: {
'content-encoding': 'gzip',
'accept-encoding': 'gzip',
},
})
.post('/foo', isGzipBodyMatching(expectedBody))
.query({ 'x-algolia-header': 'foo' })
.reply(200, 'ok');

const response = await requester.send(gzipRequestStub);

expect(response.status).toBe(200);
expect(response.content).toBe('ok');
});

it('does not compress request body when accept-encoding header is absent', async () => {
const body = JSON.stringify({ foo: 'bar' });

nock('https://algolia-dns.net')
.post('/foo', body)
.query({ 'x-algolia-header': 'foo' })
.reply(200, 'ok');

const response = await requester.send(requestStub);

expect(response.status).toBe(200);
expect(response.content).toBe('ok');
});

it('decompresses gzip response when content-encoding: gzip header is present', async () => {
const responseBody = JSON.stringify({ foo: 'bar' });
const gzipBuffer = zlib.gzipSync(responseBody);

nock('https://algolia-dns.net')
.post('/foo')
.query({ 'x-algolia-header': 'foo' })
.reply(200, gzipBuffer, { 'content-encoding': 'gzip' });

const response = await requester.send(gzipRequestStub);

expect(response.status).toBe(200);
expect(response.content).toBe(responseBody);
expect(response.isTimedOut).toBe(false);
});

it('does not decompress response when content-encoding header is absent', async () => {
const responseBody = JSON.stringify({ hello: 'world' });

nock('https://algolia-dns.net', {
reqheaders: {
'content-type': 'application/x-www-form-urlencoded',
},
})
.post('/foo', JSON.stringify({ foo: 'bar' }))
.query({ 'x-algolia-header': 'foo' })
.reply(200, responseBody);

const response = await requester.send(requestStub);

expect(response.status).toBe(200);
expect(response.content).toBe(responseBody);
expect(response.isTimedOut).toBe(false);
});

it('handles decompression errors gracefully', async () => {
const invalidGzipData = Buffer.from('not-gzip-data');

nock('https://algolia-dns.net')
.post('/foo')
.query({ 'x-algolia-header': 'foo' })
.reply(200, invalidGzipData, { 'content-encoding': 'gzip' });

const response = await requester.send(gzipRequestStub);

expect(response.status).toBe(0);
expect(response.isTimedOut).toBe(false);
expect(response.content).toBeTruthy();
});

it('does not compress request body when data is undefined', async () => {
const getRequest: Request = {
url: 'https://algolia-dns.net/foo?x-algolia-header=foo',
method: MethodEnum.Get,
headers: {
'accept-encoding': 'gzip',
},
data: undefined,
responseTimeout: 2,
connectTimeout: 1,
};

nock('https://algolia-dns.net')
.get('/foo')
.query({ 'x-algolia-header': 'foo' })
.reply(200, 'ok');

const response = await requester.send(getRequest);

expect(response.status).toBe(200);
expect(response.content).toBe('ok');
});

it('handles accept-encoding with multiple values (gzip, deflate)', async () => {
const multiEncodingRequest: Request = {
...gzipRequestStub,
headers: {
'accept-encoding': 'gzip, deflate',
'content-type': 'application/json',
},
};

const expectedBody = JSON.stringify(largePayload);

nock('https://algolia-dns.net', {
reqheaders: {
'content-encoding': 'gzip',
},
})
.post('/foo', isGzipBodyMatching(expectedBody))
.query({ 'x-algolia-header': 'foo' })
.reply(200, 'ok');

const response = await requester.send(multiEncodingRequest);

expect(response.status).toBe(200);
expect(response.content).toBe('ok');
});

it('does not compress request body when below size threshold', async () => {
const smallBody = JSON.stringify({ foo: 'bar' });
const smallRequest: Request = {
...gzipRequestStub,
data: smallBody,
};

nock('https://algolia-dns.net')
.post('/foo', smallBody)
.query({ 'x-algolia-header': 'foo' })
.reply(200, 'ok');

const response = await requester.send(smallRequest);

expect(response.status).toBe(200);
expect(response.content).toBe('ok');
});

it('full round-trip: compressed request + gzip response', async () => {
const requestBody = JSON.stringify(largePayload);
const responseBody = JSON.stringify({ result: 'success' });
const gzipResponseBuffer = zlib.gzipSync(responseBody);

nock('https://algolia-dns.net', {
reqheaders: {
'content-encoding': 'gzip',
'accept-encoding': 'gzip',
},
})
.post('/foo', isGzipBodyMatching(requestBody))
.query({ 'x-algolia-header': 'foo' })
.reply(200, gzipResponseBuffer, { 'content-encoding': 'gzip' });

const response = await requester.send(gzipRequestStub);

expect(response.status).toBe(200);
expect(response.content).toBe(responseBody);
expect(response.isTimedOut).toBe(false);
});
});
101 changes: 80 additions & 21 deletions packages/requester-node-http/src/createNodeHttpRequester.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { Destroyable, Request, Requester, Response } from '@algolia/requester-co
import * as http from 'http';
import * as https from 'https';
import * as URL from 'url';
import * as zlib from 'zlib';

export type NodeHttpRequesterOptions = {
agent?: https.Agent | http.Agent;
Expand Down Expand Up @@ -33,6 +34,14 @@ export function createNodeHttpRequester({

const path = url.query === null ? url.pathname : `${url.pathname}?${url.query}`;

const COMPRESSION_THRESHOLD = 750;
const acceptEncoding = request.headers['accept-encoding'];
const shouldCompress =
request.data !== undefined &&
Buffer.byteLength(request.data) >= COMPRESSION_THRESHOLD &&
acceptEncoding !== undefined &&
acceptEncoding.toLowerCase().includes('gzip');

const options: https.RequestOptions = {
...requesterOptions,
agent: url.protocol === 'https:' ? httpsAgent : httpAgent,
Expand All @@ -42,36 +51,78 @@ export function createNodeHttpRequester({
headers: {
...(requesterOptions && requesterOptions.headers ? requesterOptions.headers : {}),
...request.headers,
...(shouldCompress ? { 'content-encoding': 'gzip' } : {}),
},
...(url.port !== undefined ? { port: url.port || '' } : {}),
};

// eslint-disable-next-line functional/no-let, prefer-const
let connectTimeout: NodeJS.Timeout;
// eslint-disable-next-line functional/no-let
let responseTimeout: NodeJS.Timeout | undefined;
// eslint-disable-next-line functional/no-let
let gunzip: zlib.Gunzip | undefined;

const cleanup = (): void => {
clearTimeout(connectTimeout);
clearTimeout(responseTimeout as NodeJS.Timeout);

if (gunzip) {
gunzip.destroy();
}
};

const onError = (error: Error): void => {
cleanup();
resolve({ status: 0, content: error.message, isTimedOut: false });
};

const req = (url.protocol === 'https:' ? https : http).request(options, response => {
const contentEncoding = response.headers['content-encoding'];
const isGzipResponse =
contentEncoding !== undefined && contentEncoding.toLowerCase().includes('gzip');

// eslint-disable-next-line functional/no-let
let contentBuffers: Buffer[] = [];

response.on('data', chunk => {
const onData = (chunk: Buffer): void => {
contentBuffers = contentBuffers.concat(chunk);
});
};

response.on('end', () => {
// eslint-disable-next-line @typescript-eslint/no-use-before-define
clearTimeout(connectTimeout);
// eslint-disable-next-line @typescript-eslint/no-use-before-define
clearTimeout(responseTimeout as NodeJS.Timeout);
const onEnd = (): void => {
cleanup();

resolve({
status: response.statusCode || 0,
content: Buffer.concat(contentBuffers).toString(),
isTimedOut: false,
});
});
};

response.on('error', onError);

if (isGzipResponse) {
gunzip = zlib.createGunzip();

response.pipe(gunzip);

gunzip.on('data', onData);
gunzip.on('end', onEnd);
gunzip.on('error', onError);
} else {
response.on('data', onData);
response.on('end', onEnd);
}
});

const createTimeout = (timeout: number, content: string): NodeJS.Timeout => {
return setTimeout(() => {
req.abort();

if (gunzip) {
gunzip.destroy();
}

resolve({
status: 0,
content,
Expand All @@ -80,27 +131,35 @@ export function createNodeHttpRequester({
}, timeout * 1000);
};

const connectTimeout = createTimeout(request.connectTimeout, 'Connection timeout');
connectTimeout = createTimeout(request.connectTimeout, 'Connection timeout');

// eslint-disable-next-line functional/no-let
let responseTimeout: NodeJS.Timeout | undefined;

req.on('error', error => {
clearTimeout(connectTimeout);
clearTimeout(responseTimeout as NodeJS.Timeout);
resolve({ status: 0, content: error.message, isTimedOut: false });
});
req.on('error', onError);

req.once('response', () => {
clearTimeout(connectTimeout);
responseTimeout = createTimeout(request.responseTimeout, 'Socket timeout');
});

if (request.data !== undefined) {
req.write(request.data);
}
if (request.data !== undefined && shouldCompress) {
zlib.gzip(request.data, (error, compressedBody) => {
if (error) {
onError(error);

req.end();
return;
}

req.setHeader('content-length', compressedBody.byteLength);
req.write(compressedBody);
req.end();
});
} else {
if (request.data !== undefined) {
req.setHeader('content-length', Buffer.byteLength(request.data));
req.write(request.data);
}

req.end();
}
});
},

Expand Down