2019-04-10 21:19:50 +02:00
|
|
|
import crypto from 'crypto'
|
|
|
|
import findUp from 'find-up'
|
2019-04-10 05:15:35 +02:00
|
|
|
import fs from 'fs'
|
2019-04-10 18:22:10 +02:00
|
|
|
import path from 'path'
|
2019-04-10 21:19:50 +02:00
|
|
|
import { promisify } from 'util'
|
2019-04-10 18:22:10 +02:00
|
|
|
|
2019-04-10 05:15:35 +02:00
|
|
|
import { recursiveReadDir } from '../lib/recursive-readdir'
|
|
|
|
|
2019-04-10 21:19:50 +02:00
|
|
|
const fsExists = promisify(fs.exists)
|
|
|
|
const fsReadFile = promisify(fs.readFile)
|
|
|
|
|
2019-04-10 05:15:35 +02:00
|
|
|
export function collectPages(
|
|
|
|
directory: string,
|
|
|
|
pageExtensions: string[]
|
|
|
|
): Promise<string[]> {
|
|
|
|
return recursiveReadDir(
|
|
|
|
directory,
|
|
|
|
new RegExp(`\\.(?:${pageExtensions.join('|')})$`)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2019-04-24 10:48:43 +02:00
|
|
|
export function printTreeView(list: string[], ampPages: Set<string>) {
|
2019-04-10 05:15:35 +02:00
|
|
|
list
|
|
|
|
.sort((a, b) => (a > b ? 1 : -1))
|
|
|
|
.forEach((item, i) => {
|
|
|
|
const corner =
|
|
|
|
i === 0
|
|
|
|
? list.length === 1
|
|
|
|
? '─'
|
|
|
|
: '┌'
|
|
|
|
: i === list.length - 1
|
|
|
|
? '└'
|
|
|
|
: '├'
|
2019-04-24 10:48:43 +02:00
|
|
|
console.log(` \x1b[90m${corner}\x1b[39m ${item}${ampPages.has(item) ? ' (AMP)' : ''}`)
|
2019-04-10 05:15:35 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
console.log()
|
|
|
|
}
|
|
|
|
|
|
|
|
function flatten<T>(arr: T[][]): T[] {
|
|
|
|
return arr.reduce((acc, val) => acc.concat(val), [] as T[])
|
|
|
|
}
|
|
|
|
|
|
|
|
function getPossibleFiles(pageExtensions: string[], pages: string[]) {
|
|
|
|
const res = pages.map(page =>
|
2019-04-10 18:22:10 +02:00
|
|
|
pageExtensions
|
|
|
|
.map(e => `${page}.${e}`)
|
2019-04-10 05:15:35 +02:00
|
|
|
.concat(pageExtensions.map(e => `${path.join(page, 'index')}.${e}`))
|
2019-04-10 18:22:10 +02:00
|
|
|
.concat(page)
|
2019-04-10 05:15:35 +02:00
|
|
|
)
|
|
|
|
return flatten<string>(res)
|
|
|
|
}
|
|
|
|
|
2019-04-10 18:37:30 +02:00
|
|
|
export async function getFileForPage({
|
|
|
|
page,
|
|
|
|
pagesDirectory,
|
|
|
|
pageExtensions,
|
|
|
|
}: {
|
|
|
|
page: string
|
|
|
|
pagesDirectory: string
|
|
|
|
pageExtensions: string[]
|
|
|
|
}) {
|
|
|
|
const theFile = getPossibleFiles(pageExtensions, [
|
|
|
|
path.join(pagesDirectory, page),
|
|
|
|
]).find(f => fs.existsSync(f) && fs.lstatSync(f).isFile())
|
|
|
|
if (theFile) {
|
|
|
|
return path.sep + path.relative(pagesDirectory, theFile)
|
|
|
|
}
|
|
|
|
return theFile
|
|
|
|
}
|
|
|
|
|
2019-04-10 05:15:35 +02:00
|
|
|
export async function getSpecifiedPages(
|
|
|
|
dir: string,
|
|
|
|
pagesString: string,
|
|
|
|
pageExtensions: string[]
|
|
|
|
) {
|
|
|
|
const pagesDir = path.join(dir, 'pages')
|
|
|
|
|
|
|
|
const reservedPages = ['/_app', '/_document', '/_error']
|
|
|
|
|
|
|
|
const explodedPages = [
|
|
|
|
...new Set([...pagesString.split(','), ...reservedPages]),
|
|
|
|
].map(p => {
|
|
|
|
let resolvedPage: string | undefined
|
|
|
|
if (path.isAbsolute(p)) {
|
|
|
|
resolvedPage = getPossibleFiles(pageExtensions, [
|
|
|
|
path.join(pagesDir, p),
|
|
|
|
p,
|
|
|
|
]).find(f => fs.existsSync(f) && fs.lstatSync(f).isFile())
|
|
|
|
} else {
|
|
|
|
resolvedPage = getPossibleFiles(pageExtensions, [
|
|
|
|
path.join(pagesDir, p),
|
|
|
|
path.join(dir, p),
|
|
|
|
]).find(f => fs.existsSync(f) && fs.lstatSync(f).isFile())
|
|
|
|
}
|
|
|
|
return { original: p, resolved: resolvedPage || null }
|
|
|
|
})
|
|
|
|
|
|
|
|
const missingPage = explodedPages.find(
|
|
|
|
({ original, resolved }) => !resolved && !reservedPages.includes(original)
|
|
|
|
)
|
|
|
|
if (missingPage) {
|
|
|
|
throw new Error(`Unable to identify page: ${missingPage.original}`)
|
|
|
|
}
|
|
|
|
|
|
|
|
const resolvedPagePaths = explodedPages
|
|
|
|
.filter(page => page.resolved)
|
|
|
|
.map(page => '/' + path.relative(pagesDir, page.resolved!))
|
|
|
|
return resolvedPagePaths.sort()
|
|
|
|
}
|
2019-04-10 21:19:50 +02:00
|
|
|
|
|
|
|
export async function getCacheIdentifier({
|
|
|
|
pagesDirectory,
|
|
|
|
env = {},
|
|
|
|
}: {
|
|
|
|
pagesDirectory: string
|
|
|
|
env?: any
|
|
|
|
}) {
|
|
|
|
let selectivePageBuildingCacheIdentifier = ''
|
|
|
|
|
|
|
|
const envObject = env
|
|
|
|
? Object.keys(env)
|
|
|
|
.sort()
|
|
|
|
// eslint-disable-next-line
|
|
|
|
.reduce((a, c) => ((a[c] = env[c]), a), {} as any)
|
|
|
|
: {}
|
|
|
|
|
|
|
|
selectivePageBuildingCacheIdentifier += JSON.stringify(envObject)
|
|
|
|
|
|
|
|
const pkgPath = await findUp('package.json', { cwd: pagesDirectory })
|
|
|
|
if (pkgPath) {
|
|
|
|
const yarnLock = path.join(path.dirname(pkgPath), 'yarn.lock')
|
|
|
|
const packageLock = path.join(path.dirname(pkgPath), 'package-lock.json')
|
|
|
|
|
|
|
|
if (await fsExists(yarnLock)) {
|
|
|
|
selectivePageBuildingCacheIdentifier += await fsReadFile(yarnLock, 'utf8')
|
|
|
|
} else if (await fsExists(packageLock)) {
|
|
|
|
selectivePageBuildingCacheIdentifier += await fsReadFile(
|
|
|
|
packageLock,
|
|
|
|
'utf8'
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
selectivePageBuildingCacheIdentifier += JSON.stringify(require(pkgPath))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return crypto
|
|
|
|
.createHash('sha1')
|
|
|
|
.update(selectivePageBuildingCacheIdentifier)
|
|
|
|
.digest('hex')
|
|
|
|
}
|