0
0
mirror of https://github.com/honojs/hono.git synced 2024-11-22 02:27:49 +01:00

feat(compress): improve compress middleware (#3317)

* improve compression middleware

* run lint & format

* improve test

* refactor

* fix setting headers

* fix ctx.res setter
add tests

* revert change

* minor

* remove workaround and simplify

* remove diff

* update test

* improvements
This commit is contained in:
nitedani 2024-08-28 10:03:44 +02:00 committed by GitHub
parent f9349ecca2
commit 12893e26ea
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 207 additions and 79 deletions

View File

@ -1,92 +1,187 @@
import { Hono } from '../../hono'
import { compress } from '.'
import { stream } from '../../helper/streaming'
import { Hono } from '../../hono'
describe('Parse Compress Middleware', () => {
describe('Compress Middleware', () => {
const app = new Hono()
// Apply compress middleware to all routes
app.use('*', compress())
app.get('/hello', async (ctx) => {
ctx.header('Content-Length', '5')
return ctx.text('hello')
})
app.notFound((c) => {
return c.text('Custom NotFound', 404)
})
it('gzip', async () => {
const req = new Request('http://localhost/hello', {
// Test routes
app.get('/small', (c) => {
c.header('Content-Type', 'text/plain')
c.header('Content-Length', '5')
return c.text('small')
})
app.get('/large', (c) => {
c.header('Content-Type', 'text/plain')
c.header('Content-Length', '1024')
return c.text('a'.repeat(1024))
})
app.get('/small-json', (c) => {
c.header('Content-Type', 'application/json')
c.header('Content-Length', '26')
return c.json({ message: 'Hello, World!' })
})
app.get('/large-json', (c) => {
c.header('Content-Type', 'application/json')
c.header('Content-Length', '1024')
return c.json({ data: 'a'.repeat(1024), message: 'Large JSON' })
})
app.get('/no-transform', (c) => {
c.header('Content-Type', 'text/plain')
c.header('Content-Length', '1024')
c.header('Cache-Control', 'no-transform')
return c.text('a'.repeat(1024))
})
app.get('/jpeg-image', (c) => {
c.header('Content-Type', 'image/jpeg')
c.header('Content-Length', '1024')
return c.body(new Uint8Array(1024)) // Simulated JPEG data
})
app.get('/already-compressed', (c) => {
c.header('Content-Type', 'application/octet-stream')
c.header('Content-Encoding', 'br')
c.header('Content-Length', '1024')
return c.body(new Uint8Array(1024)) // Simulated compressed data
})
app.get('/stream', (c) =>
stream(c, async (stream) => {
c.header('Content-Type', 'text/plain')
// 60000 bytes
for (let i = 0; i < 10000; i++) {
await stream.write('chunk ')
}
})
)
app.get('/already-compressed-stream', (c) =>
stream(c, async (stream) => {
c.header('Content-Type', 'text/plain')
c.header('Content-Encoding', 'br')
// 60000 bytes
for (let i = 0; i < 10000; i++) {
await stream.write(new Uint8Array([0, 1, 2, 3, 4, 5])) // Simulated compressed data
}
})
)
app.notFound((c) => c.text('Custom NotFound', 404))
const testCompression = async (
path: string,
acceptEncoding: string,
expectedEncoding: string | null
) => {
const req = new Request(`http://localhost${path}`, {
method: 'GET',
headers: new Headers({ 'Accept-Encoding': 'gzip' }),
headers: new Headers({ 'Accept-Encoding': acceptEncoding }),
})
const res = await app.request(req)
expect(res).not.toBeNull()
expect(res.status).toBe(200)
expect(res.headers.get('Content-Encoding')).toEqual('gzip')
expect(res.headers.get('Content-Encoding')).toBe(expectedEncoding)
return res
}
describe('Compression Behavior', () => {
it('should compress large responses with gzip', async () => {
const res = await testCompression('/large', 'gzip', 'gzip')
expect(res.headers.get('Content-Length')).toBeNull()
expect((await res.arrayBuffer()).byteLength).toBeLessThan(1024)
})
it('deflate', async () => {
const req = new Request('http://localhost/hello', {
method: 'GET',
headers: new Headers({ 'Accept-Encoding': 'deflate' }),
})
const res = await app.request(req)
expect(res).not.toBeNull()
expect(res.status).toBe(200)
expect(res.headers.get('Content-Encoding')).toEqual('deflate')
expect(res.headers.get('Content-Length')).toBeNull()
it('should compress large responses with deflate', async () => {
const res = await testCompression('/large', 'deflate', 'deflate')
expect((await res.arrayBuffer()).byteLength).toBeLessThan(1024)
})
it('gzip or deflate', async () => {
const req = new Request('http://localhost/hello', {
method: 'GET',
headers: new Headers({ 'Accept-Encoding': 'gzip, deflate' }),
})
const res = await app.request(req)
expect(res).not.toBeNull()
expect(res.status).toBe(200)
expect(res.headers.get('Content-Encoding')).toEqual('gzip')
expect(res.headers.get('Content-Length')).toBeNull()
it('should prioritize gzip over deflate when both are accepted', async () => {
await testCompression('/large', 'gzip, deflate', 'gzip')
})
it('raw', async () => {
const req = new Request('http://localhost/hello', {
method: 'GET',
})
const res = await app.request(req)
expect(res).not.toBeNull()
expect(res.status).toBe(200)
expect(res.headers.get('Content-Encoding')).toBeNull()
it('should not compress small responses', async () => {
const res = await testCompression('/small', 'gzip, deflate', null)
expect(res.headers.get('Content-Length')).toBe('5')
})
it('Should handle Custom 404 Not Found', async () => {
const req = new Request('http://localhost/not-found', {
method: 'GET',
it('should not compress when no Accept-Encoding is provided', async () => {
await testCompression('/large', '', null)
})
it('should not compress images', async () => {
const res = await testCompression('/jpeg-image', 'gzip', null)
expect(res.headers.get('Content-Type')).toBe('image/jpeg')
expect(res.headers.get('Content-Length')).toBe('1024')
})
it('should not compress already compressed responses', async () => {
const res = await testCompression('/already-compressed', 'gzip', 'br')
expect(res.headers.get('Content-Length')).toBe('1024')
})
it('should remove Content-Length when compressing', async () => {
const res = await testCompression('/large', 'gzip', 'gzip')
expect(res.headers.get('Content-Length')).toBeNull()
})
it('should not remove Content-Length when not compressing', async () => {
const res = await testCompression('/jpeg-image', 'gzip', null)
expect(res.headers.get('Content-Length')).toBeDefined()
})
})
describe('JSON Handling', () => {
it('should not compress small JSON responses', async () => {
const res = await testCompression('/small-json', 'gzip', null)
expect(res.headers.get('Content-Length')).toBe('26')
})
it('should compress large JSON responses', async () => {
const res = await testCompression('/large-json', 'gzip', 'gzip')
expect(res.headers.get('Content-Length')).toBeNull()
const decompressed = await decompressResponse(res)
const json = JSON.parse(decompressed)
expect(json.data.length).toBe(1024)
expect(json.message).toBe('Large JSON')
})
})
describe('Streaming Responses', () => {
it('should compress streaming responses written in multiple chunks', async () => {
const res = await testCompression('/stream', 'gzip', 'gzip')
const decompressed = await decompressResponse(res)
expect(decompressed.length).toBe(60000)
})
it('should not compress already compressed streaming responses', async () => {
const res = await testCompression('/already-compressed-stream', 'gzip', 'br')
expect((await res.arrayBuffer()).byteLength).toBe(60000)
})
})
describe('Edge Cases', () => {
it('should not compress responses with Cache-Control: no-transform', async () => {
await testCompression('/no-transform', 'gzip', null)
})
it('should handle HEAD requests without compression', async () => {
const req = new Request('http://localhost/large', {
method: 'HEAD',
headers: new Headers({ 'Accept-Encoding': 'gzip' }),
})
const res = await app.request(req)
expect(res).not.toBeNull()
expect(res.headers.get('Content-Encoding')).toBeNull()
})
it('should compress custom 404 Not Found responses', async () => {
const res = await testCompression('/not-found', 'gzip', 'gzip')
expect(res.status).toBe(404)
expect(res.headers.get('Content-Encoding')).toEqual('gzip')
// decompress response body
const decompressionStream = new DecompressionStream('gzip')
const decompressedStream = res.body!.pipeThrough(decompressionStream)
const textDecoder = new TextDecoder()
const reader = decompressedStream.getReader()
let text = ''
for (;;) {
const { done, value } = await reader.read()
if (done) {
break
}
text += textDecoder.decode(value, { stream: true })
}
text += textDecoder.decode()
expect(text).toBe('Custom NotFound')
const decompressed = await decompressResponse(res)
expect(decompressed).toBe('Custom NotFound')
})
})
})
async function decompressResponse(res: Response): Promise<string> {
const decompressedStream = res.body!.pipeThrough(new DecompressionStream('gzip'))
const decompressedResponse = new Response(decompressedStream)
return await decompressedResponse.text()
}

View File

@ -6,9 +6,13 @@
import type { MiddlewareHandler } from '../../types'
const ENCODING_TYPES = ['gzip', 'deflate'] as const
const cacheControlNoTransformRegExp = /(?:^|,)\s*?no-transform\s*?(?:,|$)/i
const compressibleContentTypeRegExp =
/^\s*(?:text\/[^;\s]+|application\/(?:javascript|json|xml|xml-dtd|ecmascript|dart|postscript|rtf|tar|toml|vnd\.dart|vnd\.ms-fontobject|vnd\.ms-opentype|wasm|x-httpd-php|x-javascript|x-ns-proxy-autoconfig|x-sh|x-tar|x-virtualbox-hdd|x-virtualbox-ova|x-virtualbox-ovf|x-virtualbox-vbox|x-virtualbox-vdi|x-virtualbox-vhd|x-virtualbox-vmdk|x-www-form-urlencoded)|font\/(?:otf|ttf)|image\/(?:bmp|vnd\.adobe\.photoshop|vnd\.microsoft\.icon|vnd\.ms-dds|x-icon|x-ms-bmp)|message\/rfc822|model\/gltf-binary|x-shader\/x-fragment|x-shader\/x-vertex|[^;\s]+?\+(?:json|text|xml|yaml))(?:[;\s]|$)/i
interface CompressionOptions {
encoding?: (typeof ENCODING_TYPES)[number]
threshold?: number
}
/**
@ -18,6 +22,7 @@ interface CompressionOptions {
*
* @param {CompressionOptions} [options] - The options for the compress middleware.
* @param {'gzip' | 'deflate'} [options.encoding] - The compression scheme to allow for response compression. Either 'gzip' or 'deflate'. If not defined, both are allowed and will be used based on the Accept-Encoding header. 'gzip' is prioritized if this option is not provided and the client provides both in the Accept-Encoding header.
* @param {number} [options.threshold=1024] - The minimum size in bytes to compress. Defaults to 1024 bytes.
* @returns {MiddlewareHandler} The middleware handler function.
*
* @example
@ -28,19 +33,47 @@ interface CompressionOptions {
* ```
*/
export const compress = (options?: CompressionOptions): MiddlewareHandler => {
const threshold = options?.threshold ?? 1024
return async function compress(ctx, next) {
await next()
const contentLength = ctx.res.headers.get('Content-Length')
// Check if response should be compressed
if (
ctx.res.headers.has('Content-Encoding') || // already encoded
ctx.req.method === 'HEAD' || // HEAD request
(contentLength && Number(contentLength) < threshold) || // content-length below threshold
!shouldCompress(ctx.res) || // not compressible type
!shouldTransform(ctx.res) // cache-control: no-transform
) {
return
}
const accepted = ctx.req.header('Accept-Encoding')
const encoding =
options?.encoding ?? ENCODING_TYPES.find((encoding) => accepted?.includes(encoding))
if (!encoding || !ctx.res.body) {
return
}
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
// Compress the response
const stream = new CompressionStream(encoding)
ctx.res = new Response(ctx.res.body.pipeThrough(stream), ctx.res)
ctx.res.headers.delete('Content-Length')
ctx.res.headers.set('Content-Encoding', encoding)
}
}
const shouldCompress = (res: Response) => {
const type = res.headers.get('Content-Type')
return type && compressibleContentTypeRegExp.test(type)
}
const shouldTransform = (res: Response) => {
const cacheControl = res.headers.get('Cache-Control')
// Don't compress for Cache-Control: no-transform
// https://tools.ietf.org/html/rfc7234#section-5.2.2.4
return !cacheControl || !cacheControlNoTransformRegExp.test(cacheControl)
}