Support multiple pages directories for linting (#25565)

Monorepos may contain multiple NextJS apps, but linting occurs at top-level so all directories must be declared.

Declaring multiple directories via an Array allows loading all to generate a full list of potential URLs.

Updated schema and tests. Also optimized some of the `fs.*Sync` requests that can add up to lots of blocking lookups.

## Feature

- [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR.
- [ ] Related issues linked using `fixes #number`
- [x] Integration tests added
- [x] Documentation added
- [ ] Telemetry added. In case of a feature if it's used or not.

Closes: https://github.com/vercel/next.js/pull/27223
This commit is contained in:
JacobLey 2021-07-20 17:29:54 -04:00 committed by GitHub
parent 7b0731af0c
commit 527cb97b56
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3 changed files with 85 additions and 17 deletions

View file

@ -1,7 +1,7 @@
const path = require('path')
const fs = require('fs')
const {
getUrlFromPagesDirectory,
getUrlFromPagesDirectories,
normalizeURL,
execOnce,
} = require('../utils/url')
@ -13,6 +13,10 @@ const pagesDirWarning = execOnce((pagesDirs) => {
)
})
// Cache for fs.existsSync lookup.
// Prevent multiple blocking IO requests that have already been calculated.
const fsExistsSyncCache = {}
module.exports = {
meta: {
docs: {
@ -21,24 +25,44 @@ module.exports = {
recommended: true,
},
fixable: null, // or "code" or "whitespace"
schema: ['pagesDirectory'],
schema: [
{
oneOf: [
{
type: 'string',
},
{
type: 'array',
uniqueItems: true,
items: {
type: 'string',
},
},
],
},
],
},
create: function (context) {
const [customPagesDirectory] = context.options
const pagesDirs = customPagesDirectory
? [customPagesDirectory]
? [customPagesDirectory].flat()
: [
path.join(context.getCwd(), 'pages'),
path.join(context.getCwd(), 'src', 'pages'),
]
const pagesDir = pagesDirs.find((dir) => fs.existsSync(dir))
if (!pagesDir) {
const foundPagesDirs = pagesDirs.filter((dir) => {
if (fsExistsSyncCache[dir] === undefined) {
fsExistsSyncCache[dir] = fs.existsSync(dir)
}
return fsExistsSyncCache[dir]
})
if (foundPagesDirs.length === 0) {
pagesDirWarning(pagesDirs)
return {}
}
const urls = getUrlFromPagesDirectory('/', pagesDir)
const urls = getUrlFromPagesDirectories('/', foundPagesDirs)
return {
JSXOpeningElement(node) {
if (node.name.name !== 'a') {

View file

@ -1,12 +1,20 @@
const fs = require('fs')
const path = require('path')
// Cache for fs.lstatSync lookup.
// Prevent multiple blocking IO requests that have already been calculated.
const fsLstatSyncCache = {}
const fsLstatSync = (source) => {
fsLstatSyncCache[source] = fsLstatSyncCache[source] || fs.lstatSync(source)
return fsLstatSyncCache[source]
}
/**
* Checks if the source is a directory.
* @param {string} source
*/
function isDirectory(source) {
return fs.lstatSync(source).isDirectory()
return fsLstatSync(source).isDirectory()
}
/**
@ -14,30 +22,43 @@ function isDirectory(source) {
* @param {string} source
*/
function isSymlink(source) {
return fs.lstatSync(source).isSymbolicLink()
return fsLstatSync(source).isSymbolicLink()
}
/**
* Gets the possible URLs from a directory.
* @param {string} urlprefix
* @param {string} directory
* @param {string[]} directories
*/
function getUrlFromPagesDirectory(urlPrefix, directory) {
return parseUrlForPages(urlPrefix, directory).map(
// Since the URLs are normalized we add `^` and `$` to the RegExp to make sure they match exactly.
(url) => new RegExp(`^${normalizeURL(url)}$`)
)
function getUrlFromPagesDirectories(urlPrefix, directories) {
return Array.from(
// De-duplicate similar pages across multiple directories.
new Set(
directories
.map((directory) => parseUrlForPages(urlPrefix, directory))
.flat()
.map(
// Since the URLs are normalized we add `^` and `$` to the RegExp to make sure they match exactly.
(url) => `^${normalizeURL(url)}$`
)
)
).map((urlReg) => new RegExp(urlReg))
}
// Cache for fs.readdirSync lookup.
// Prevent multiple blocking IO requests that have already been calculated.
const fsReadDirSyncCache = {}
/**
* Recursively parse directory for page URLs.
* @param {string} urlprefix
* @param {string} directory
*/
function parseUrlForPages(urlprefix, directory) {
const files = fs.readdirSync(directory)
fsReadDirSyncCache[directory] =
fsReadDirSyncCache[directory] || fs.readdirSync(directory)
const res = []
files.forEach((fname) => {
fsReadDirSyncCache[directory].forEach((fname) => {
if (/(\.(j|t)sx?)$/.test(fname)) {
fname = fname.replace(/\[.*\]/g, '.*')
if (/^index(\.(j|t)sx?)$/.test(fname)) {
@ -90,7 +111,7 @@ function execOnce(fn) {
}
module.exports = {
getUrlFromPagesDirectory,
getUrlFromPagesDirectories,
normalizeURL,
execOnce,
}

View file

@ -18,6 +18,18 @@ const linterConfig = {
},
},
}
const linterConfigWithMultipleDirectories = {
...linterConfig,
rules: {
'no-html-link-for-pages': [
2,
[
path.join(__dirname, 'custom-pages'),
path.join(__dirname, 'custom-pages/list'),
],
],
},
}
linter.defineRules({
'no-html-link-for-pages': rule,
@ -108,6 +120,17 @@ describe('no-html-link-for-pages', function () {
assert.deepEqual(report, [])
})
it('valid link element with multiple directories', function () {
const report = linter.verify(
validCode,
linterConfigWithMultipleDirectories,
{
filename: 'foo.js',
}
)
assert.deepEqual(report, [])
})
it('valid anchor element', function () {
const report = linter.verify(validAnchorCode, linterConfig, {
filename: 'foo.js',