Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: http caching #3562

Open
wants to merge 34 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 26 commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
f69a8b2
feat: http caching
flakey5 Aug 28, 2024
57c0f45
fixup! feat: http caching
flakey5 Sep 14, 2024
c88fb25
fixup! fixup! feat: http caching
flakey5 Sep 15, 2024
0a442b2
fixup! fixup! fixup! feat: http caching
flakey5 Sep 15, 2024
ae6edca
Update lib/handler/cache-handler.js
flakey5 Sep 15, 2024
26b2227
Apply suggestions from code review
flakey5 Sep 16, 2024
81cb021
fixup! fixup! fixup! fixup! feat: http caching
flakey5 Sep 16, 2024
6bff376
fixup! fixup! fixup! fixup! fixup! feat: http caching
flakey5 Sep 16, 2024
2edee29
clarify type for MemoryCacheStore
flakey5 Sep 17, 2024
f128e9a
Apply suggestions from code review
flakey5 Sep 17, 2024
807e764
Apply suggestions from code review
flakey5 Sep 17, 2024
4f8139a
tmp
flakey5 Sep 17, 2024
fabf558
fixup! tmp
flakey5 Sep 17, 2024
4546799
Apply suggestions from code review
flakey5 Sep 19, 2024
5a215d2
perf things, deleteByOrigin
flakey5 Sep 19, 2024
73564e8
incredibly messy and broken impl of streaming idea
flakey5 Sep 20, 2024
3a08528
Merge branch 'main' into flakey5/3231
metcoder95 Sep 20, 2024
cbe7b97
fix tests
flakey5 Sep 20, 2024
edc0772
check if the response is already cached again
flakey5 Sep 21, 2024
d7b24a4
backpressure patch
flakey5 Sep 21, 2024
0877f95
move body out of CacheStoreValue, remove size property
flakey5 Sep 22, 2024
e065a8e
Apply suggestions from code review
flakey5 Sep 24, 2024
230533a
add some comments on createWriteStream
flakey5 Sep 24, 2024
bcd7fa1
fix type tests, make staleAt and deleteAt absolute
flakey5 Sep 24, 2024
9ef03ef
empty the body when overwriting the response
flakey5 Sep 24, 2024
85a99d5
Merge branch 'main' into flakey5/3231
metcoder95 Sep 24, 2024
58839ee
update onError calls
flakey5 Sep 24, 2024
e49a32c
remove request deduplication for now
flakey5 Sep 25, 2024
6469aab
rename value -> opts, storedValue -> value
flakey5 Sep 25, 2024
969deb2
fix types
flakey5 Sep 25, 2024
3370f66
Apply suggestions from code review
flakey5 Sep 25, 2024
263718e
simplify parsing for qualified no-cache and private
flakey5 Sep 26, 2024
b5e483a
fix header omission, some cleanup
flakey5 Sep 26, 2024
7ea49d3
Merge branch 'main' into flakey5/3231
flakey5 Sep 29, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion index.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,12 @@ module.exports.RedirectHandler = RedirectHandler
module.exports.interceptors = {
redirect: require('./lib/interceptor/redirect'),
retry: require('./lib/interceptor/retry'),
dump: require('./lib/interceptor/dump')
dump: require('./lib/interceptor/dump'),
cache: require('./lib/interceptor/cache')
}

module.exports.cacheStores = {
MemoryCacheStore: require('./lib/cache/memory-cache-store')
}

module.exports.buildConnector = buildConnector
Expand Down
384 changes: 384 additions & 0 deletions lib/cache/memory-cache-store.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,384 @@
'use strict'

flakey5 marked this conversation as resolved.
Show resolved Hide resolved
const EventEmitter = require('node:events')
const { Writable, Readable } = require('node:stream')

/**
* @typedef {import('../../types/cache-interceptor.d.ts').default.CacheStore} CacheStore
* @implements {CacheStore}
*
* @typedef {{
* complete: boolean
* value: import('../../types/cache-interceptor.d.ts').default.CacheStoreValue
* body: Buffer[]
* emitter: EventEmitter
* }} MemoryStoreValue
*/
class MemoryCacheStore {
#maxEntries = Infinity

#maxEntrySize = Infinity

#entryCount = 0

/**
* @type {Map<string, Map<string, MemoryStoreValue>>}
*/
#data = new Map()

/**
* @param {import('../../types/cache-interceptor.d.ts').default.MemoryCacheStoreOpts | undefined} opts
*/
constructor (opts) {
if (opts) {
if (typeof opts !== 'object') {
throw new TypeError('MemoryCacheStore options must be an object')
}

if (opts.maxEntries !== undefined) {
if (
typeof opts.maxEntries !== 'number' ||
!Number.isInteger(opts.maxEntries) ||
opts.maxEntries < 0
) {
throw new TypeError('MemoryCacheStore options.maxEntries must be a non-negative integer')
}
this.#maxEntries = opts.maxEntries
}

if (opts.maxEntrySize !== undefined) {
if (
typeof opts.maxEntrySize !== 'number' ||
!Number.isInteger(opts.maxEntrySize) ||
opts.maxEntrySize < 0
) {
throw new TypeError('MemoryCacheStore options.maxEntrySize must be a non-negative integer')
}
this.#maxEntrySize = opts.maxEntrySize
}
}
}

get isFull () {
return this.#entryCount >= this.#maxEntries
}

createReadStream (req) {
if (typeof req !== 'object') {
throw new TypeError(`expected req to be object, got ${typeof req}`)
}

const values = this.#getValuesForRequest(req, false)
if (!values) {
return undefined
}

const value = this.#findValue(req, values)

return value ? new MemoryStoreReadableStream(value) : undefined
}

createWriteStream (req, value) {
if (typeof req !== 'object') {
throw new TypeError(`expected req to be object, got ${typeof req}`)
}
if (typeof value !== 'object') {
throw new TypeError(`expected value to be object, got ${typeof value}`)
}

if (this.isFull) {
return undefined
}

const values = this.#getValuesForRequest(req, true)

let storedValue = this.#findValue(req, values)
if (!storedValue) {
flakey5 marked this conversation as resolved.
Show resolved Hide resolved
// The value doesn't already exist, meaning we haven't cached this
// response before. Let's assign it a value and insert it into our data
// property.

if (this.#entryCount >= this.#maxEntries) {
// Or not, we don't have space to add another response
return undefined
}

this.#entryCount++

// TODO better name for this
storedValue = {
complete: false,
value,
body: [],
emitter: new EventEmitter()
}

// We want to sort our responses in decending order by their deleteAt
// timestamps so that deleting expired responses is faster
if (
values.length === 0 ||
value.deleteAt < values[values.length - 1].deleteAt
) {
// Our value is either the only response for this path or our deleteAt
// time is sooner than all the other responses
values.push(storedValue)
} else if (value.deleteAt >= values[0].deleteAt) {
// Our deleteAt is later than everyone elses
values.unshift(storedValue)
} else {
// We're neither in the front or the end, let's just binary search to
// find our stop we need to be in
let startIndex = 0
let endIndex = values.length
while (true) {
if (startIndex === endIndex) {
values.splice(startIndex, 0, storedValue)
break
}

const middleIndex = Math.floor((startIndex + endIndex) / 2)
const middleValue = values[middleIndex]
if (value.deleteAt === middleIndex) {
values.splice(middleIndex, 0, storedValue)
break
} else if (value.deleteAt > middleValue.value.deleteAt) {
endIndex = middleIndex
continue
} else {
startIndex = middleIndex
continue
}
}
}
} else {
// Empty it so we can overwrite it
storedValue.body = []
}

const writable = new MemoryStoreWritableStream(
storedValue,
this.#maxEntrySize
)

// Remove the value if there was some error
writable.on('error', () => {
values.filter(value => value !== storedValue)
})
}

/**
* @param {string} origin
*/
deleteByOrigin (origin) {
this.#data.delete(origin)
}
ronag marked this conversation as resolved.
Show resolved Hide resolved

/**
* Gets all of the requests of the same origin, path, and method. Does not
* take the `vary` property into account.
* @param {import('../../types/dispatcher.d.ts').default.RequestOptions} req
* @returns {MemoryStoreValue[] | undefined}
*/
#getValuesForRequest (req, makeIfDoesntExist) {
// https://www.rfc-editor.org/rfc/rfc9111.html#section-2-3
let cachedPaths = this.#data.get(req.origin)
if (!cachedPaths) {
if (!makeIfDoesntExist) {
return undefined
}

cachedPaths = new Map()
this.#data.set(req.origin, cachedPaths)
}

let values = cachedPaths.get(`${req.path}:${req.method}`)
if (!values && makeIfDoesntExist) {
values = []
cachedPaths.set(`${req.path}:${req.method}`, values)
}

return values
}

/**
* Given a list of values of a certain request, this decides the best value
* to respond with.
* @param {import('../../types/dispatcher.d.ts').default.RequestOptions} req
* @param {MemoryStoreValue[]} values
* @returns {MemoryStoreValue | undefined}
*/
#findValue (req, values) {
/**
* @type {MemoryStoreValue}
*/
let value
const now = Date.now()
flakey5 marked this conversation as resolved.
Show resolved Hide resolved
for (let i = values.length - 1; i >= 0; i--) {
const current = values[i]
const currentCacheValue = current.value
if (now >= currentCacheValue.deleteAt) {
// We've reached expired values, let's delete them
this.#entryCount -= values.length - i
values.length = i
break
}

let matches = true

if (currentCacheValue.vary) {
if (!req.headers) {
matches = false
break
}

for (const key in currentCacheValue.vary) {
if (currentCacheValue.vary[key] !== req.headers[key]) {
matches = false
break
}
}
}

if (matches) {
value = current
break
}
}

return value
}
}

class MemoryStoreReadableStream extends Readable {
/**
* @type {MemoryStoreValue}
*/
#value
/**
* @type {Buffer[]}
*/
#chunksToSend = []

/**
* @param {MemoryStoreValue} value
*/
constructor (value) {
super()

this.#value = value

if (value.complete) {
this.#chunksToSend = [...this.#value.body, null]
} else {
value.emitter.on('body', () => {
this.#chunksToSend = [...this.#value.body, null]
})

value.emitter.on('error', () => {
this.#chunksToSend.push(null)
})
}
}

get value () {
return this.#value.value
}

/**
* @param {number} size
*/
_read (size) {
if (this.#chunksToSend.length === 0) {
return
}

if (size > this.#chunksToSend.length) {
size = this.#chunksToSend.length
}

for (let i = 0; i < size; i++) {
this.push(this.#chunksToSend.shift())
}
}
}

class MemoryStoreWritableStream extends Writable {
/**
* @type {MemoryStoreValue}
*/
#value
#currentSize = 0
#maxEntrySize = 0
/**
* @type {Buffer[]}
*/
#body = []

/**
* @param {MemoryCacheStore} value
* @param {number} maxEntrySize
*/
constructor (value, maxEntrySize) {
super()
this.#value = value
this.#maxEntrySize = maxEntrySize
}

get rawTrailers () {
return this.#value.value.rawTrailers
}

/**
* @param {Buffer[] | undefined} trailers
*/
set rawTrailers (trailers) {
this.#value.value.rawTrailers = trailers
}

/**
* @param {Buffer} chunk
* @param {*} _
* @param {() => void} callback
*/
_write (chunk, encoding, callback) {
if (typeof chunk === 'string') {
chunk = Buffer.from(chunk, encoding)
}

this.#currentSize += chunk.byteLength
if (this.#currentSize < this.#maxEntrySize) {
this.#body.push(chunk)
} else {
this.#body = null // release memory as early as possible
}
flakey5 marked this conversation as resolved.
Show resolved Hide resolved

callback()
}

/**
* @param {() => void} callback
*/
_final (callback) {
if (this.#currentSize < this.#maxEntrySize) {
this.#value.complete = true
this.#value.body = this.#body

this.#value.emitter.emit('body')
}

callback()
}

/**
* @param {Error} err
* @param {() => void} callback
*/
_destroy (err, callback) {
if (err) {
this.#value.emitter.emit('error', err)
}
callback(err)
}
}

module.exports = MemoryCacheStore
Loading
Loading