rsnext/packages/next/server/response-cache.ts
Gerald Monaco 707afe1d4d
Add RenderResult (#27319)
Adds `RenderResult`, replacing the `string` that `renderToHTML` used to return, with an `Observable`-like API that callers can use to subscribe and get a callback when chunks are available to flush, etc.

This is the last architectural change needed for streaming. There are, however, other things currently standing in the way of streaming. For example, it is common to mutate `res` in `getServerSideProps` to do routing work, or write headers, before fetching page data. This pattern effectively nullifies any advantages of streaming. I may do a follow-up PR that adds an experimental alternative for applications not using React 18, but the main purpose for this support is for Suspense and Server Components.

For that reason, there's no actual streaming here yet: instead we just flush a single chunk. A follow-up PR will add support for streaming suspense boundaries in React 18.
2021-07-27 19:18:21 +00:00

119 lines
3.4 KiB
TypeScript

import { IncrementalCache } from './incremental-cache'
import { RenderResult, resultFromChunks, resultToChunks } from './utils'
interface CachedRedirectValue {
kind: 'REDIRECT'
props: Object
}
interface CachedPageValue {
kind: 'PAGE'
html: RenderResult
pageData: Object
}
export type ResponseCacheValue = CachedRedirectValue | CachedPageValue
export type ResponseCacheEntry = {
revalidate?: number | false
value: ResponseCacheValue | null
}
type ResponseGenerator = (
hasResolved: boolean
) => Promise<ResponseCacheEntry | null>
export default class ResponseCache {
incrementalCache: IncrementalCache
pendingResponses: Map<string, Promise<ResponseCacheEntry | null>>
constructor(incrementalCache: IncrementalCache) {
this.incrementalCache = incrementalCache
this.pendingResponses = new Map()
}
public get(
key: string | null,
responseGenerator: ResponseGenerator
): Promise<ResponseCacheEntry | null> {
const pendingResponse = key ? this.pendingResponses.get(key) : null
if (pendingResponse) {
return pendingResponse
}
let resolver: (cacheEntry: ResponseCacheEntry | null) => void = () => {}
let rejecter: (error: Error) => void = () => {}
const promise: Promise<ResponseCacheEntry | null> = new Promise(
(resolve, reject) => {
resolver = resolve
rejecter = reject
}
)
if (key) {
this.pendingResponses.set(key, promise)
}
let resolved = false
const resolve = (cacheEntry: ResponseCacheEntry | null) => {
if (key) {
// Ensure all reads from the cache get the latest value.
this.pendingResponses.set(key, Promise.resolve(cacheEntry))
}
if (!resolved) {
resolved = true
resolver(cacheEntry)
}
}
// We wait to do any async work until after we've added our promise to
// `pendingResponses` to ensure that any any other calls will reuse the
// same promise until we've fully finished our work.
;(async () => {
try {
const cachedResponse = key ? await this.incrementalCache.get(key) : null
if (cachedResponse) {
resolve({
revalidate: cachedResponse.curRevalidate,
value:
cachedResponse.value?.kind === 'PAGE'
? {
kind: 'PAGE',
html: resultFromChunks([cachedResponse.value.html]),
pageData: cachedResponse.value.pageData,
}
: cachedResponse.value,
})
if (!cachedResponse.isStale) {
// The cached value is still valid, so we don't need
// to update it yet.
return
}
}
const cacheEntry = await responseGenerator(resolved)
resolve(cacheEntry)
if (key && cacheEntry && typeof cacheEntry.revalidate !== 'undefined') {
await this.incrementalCache.set(
key,
cacheEntry.value?.kind === 'PAGE'
? {
kind: 'PAGE',
html: (await resultToChunks(cacheEntry.value.html)).join(''),
pageData: cacheEntry.value.pageData,
}
: cacheEntry.value,
cacheEntry.revalidate
)
}
} catch (err) {
rejecter(err)
} finally {
if (key) {
this.pendingResponses.delete(key)
}
}
})()
return promise
}
}