chore: reduce fs-extra
usage in scripts/
(#56917)
The PR follows #56536 and #56491, replacing `fs-extra` usages inside the `scripts/` folder. Note that the `copy` and `move` haven't been replaced yet. Currently, there is no better recursive copy (lightweight, promise-based, Node.js built-in `copyFile` API-based, support the `filter` option) library alternative available on npm, and Node.js built-in `fs.rename` doesn't support `overwrite`. The PR also replaces many async fs API usage with their sync versions. cc @wbinnssmith
This commit is contained in:
parent
255cc9b9a7
commit
17553c5e25
6 changed files with 74 additions and 57 deletions
28
run-tests.js
28
run-tests.js
|
@ -1,7 +1,8 @@
|
||||||
const os = require('os')
|
const os = require('os')
|
||||||
const path = require('path')
|
const path = require('path')
|
||||||
const _glob = require('glob')
|
const _glob = require('glob')
|
||||||
const fs = require('fs-extra')
|
const { existsSync } = require('fs')
|
||||||
|
const fsp = require('fs/promises')
|
||||||
const nodeFetch = require('node-fetch')
|
const nodeFetch = require('node-fetch')
|
||||||
const vercelFetch = require('@vercel/fetch')
|
const vercelFetch = require('@vercel/fetch')
|
||||||
const fetch = vercelFetch(nodeFetch)
|
const fetch = vercelFetch(nodeFetch)
|
||||||
|
@ -116,10 +117,16 @@ ${output}
|
||||||
|
|
||||||
const cleanUpAndExit = async (code) => {
|
const cleanUpAndExit = async (code) => {
|
||||||
if (process.env.NEXT_TEST_STARTER) {
|
if (process.env.NEXT_TEST_STARTER) {
|
||||||
await fs.remove(process.env.NEXT_TEST_STARTER)
|
await fsp.rm(process.env.NEXT_TEST_STARTER, {
|
||||||
|
recursive: true,
|
||||||
|
force: true,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
if (process.env.NEXT_TEST_TEMP_REPO) {
|
if (process.env.NEXT_TEST_TEMP_REPO) {
|
||||||
await fs.remove(process.env.NEXT_TEST_TEMP_REPO)
|
await fsp.rm(process.env.NEXT_TEST_TEMP_REPO, {
|
||||||
|
recursive: true,
|
||||||
|
force: true,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
if (process.env.CI) {
|
if (process.env.CI) {
|
||||||
await maybeLogSummary()
|
await maybeLogSummary()
|
||||||
|
@ -250,7 +257,7 @@ async function main() {
|
||||||
try {
|
try {
|
||||||
const timingsFile = path.join(process.cwd(), 'test-timings.json')
|
const timingsFile = path.join(process.cwd(), 'test-timings.json')
|
||||||
try {
|
try {
|
||||||
prevTimings = JSON.parse(await fs.readFile(timingsFile, 'utf8'))
|
prevTimings = JSON.parse(await fsp.readFile(timingsFile, 'utf8'))
|
||||||
console.log('Loaded test timings from disk successfully')
|
console.log('Loaded test timings from disk successfully')
|
||||||
} catch (_) {
|
} catch (_) {
|
||||||
console.error('failed to load from disk', _)
|
console.error('failed to load from disk', _)
|
||||||
|
@ -261,7 +268,7 @@ async function main() {
|
||||||
console.log('Fetched previous timings data successfully')
|
console.log('Fetched previous timings data successfully')
|
||||||
|
|
||||||
if (writeTimings) {
|
if (writeTimings) {
|
||||||
await fs.writeFile(timingsFile, JSON.stringify(prevTimings))
|
await fsp.writeFile(timingsFile, JSON.stringify(prevTimings))
|
||||||
console.log('Wrote previous timings data to', timingsFile)
|
console.log('Wrote previous timings data to', timingsFile)
|
||||||
await cleanUpAndExit(0)
|
await cleanUpAndExit(0)
|
||||||
}
|
}
|
||||||
|
@ -544,15 +551,16 @@ ${ENDGROUP}`)
|
||||||
|
|
||||||
return reject(err)
|
return reject(err)
|
||||||
}
|
}
|
||||||
await fs
|
await fsp
|
||||||
.remove(
|
.rm(
|
||||||
path.join(
|
path.join(
|
||||||
__dirname,
|
__dirname,
|
||||||
'test/traces',
|
'test/traces',
|
||||||
path
|
path
|
||||||
.relative(path.join(__dirname, 'test'), test.file)
|
.relative(path.join(__dirname, 'test'), test.file)
|
||||||
.replace(/\//g, '-')
|
.replace(/\//g, '-')
|
||||||
)
|
),
|
||||||
|
{ recursive: true, force: true }
|
||||||
)
|
)
|
||||||
.catch(() => {})
|
.catch(() => {})
|
||||||
resolve(new Date().getTime() - start)
|
resolve(new Date().getTime() - start)
|
||||||
|
@ -645,7 +653,7 @@ ${ENDGROUP}`)
|
||||||
// Emit test output if test failed or if we're continuing tests on error
|
// Emit test output if test failed or if we're continuing tests on error
|
||||||
if ((!passed || shouldContinueTestsOnError) && isTestJob) {
|
if ((!passed || shouldContinueTestsOnError) && isTestJob) {
|
||||||
try {
|
try {
|
||||||
const testsOutput = await fs.readFile(
|
const testsOutput = await fsp.readFile(
|
||||||
`${test.file}${RESULTS_EXT}`,
|
`${test.file}${RESULTS_EXT}`,
|
||||||
'utf8'
|
'utf8'
|
||||||
)
|
)
|
||||||
|
@ -708,7 +716,7 @@ ${ENDGROUP}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const test of Object.keys(newTimings)) {
|
for (const test of Object.keys(newTimings)) {
|
||||||
if (!(await fs.pathExists(path.join(__dirname, test)))) {
|
if (!existsSync(path.join(__dirname, test))) {
|
||||||
console.log('removing stale timing', test)
|
console.log('removing stale timing', test)
|
||||||
delete newTimings[test]
|
delete newTimings[test]
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
import os from 'os'
|
import os from 'os'
|
||||||
import path from 'path'
|
import path from 'path'
|
||||||
import execa from 'execa'
|
import execa from 'execa'
|
||||||
import fs from 'fs-extra'
|
import fs from 'fs'
|
||||||
|
import fsp from 'fs/promises'
|
||||||
;(async function () {
|
;(async function () {
|
||||||
if (process.env.NEXT_SKIP_NATIVE_POSTINSTALL) {
|
if (process.env.NEXT_SKIP_NATIVE_POSTINSTALL) {
|
||||||
console.log(
|
console.log(
|
||||||
|
@ -10,34 +11,34 @@ import fs from 'fs-extra'
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
let cwd = process.cwd()
|
let cwd = process.cwd()
|
||||||
const { version: nextVersion } = await fs.readJSON(
|
const { version: nextVersion } = JSON.parse(
|
||||||
path.join(cwd, 'packages', 'next', 'package.json')
|
fs.readFileSync(path.join(cwd, 'packages', 'next', 'package.json'))
|
||||||
|
)
|
||||||
|
const { packageManager } = JSON.parse(
|
||||||
|
fs.readFileSync(path.join(cwd, 'package.json'))
|
||||||
)
|
)
|
||||||
const { packageManager } = await fs.readJSON(path.join(cwd, 'package.json'))
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// if installed swc package version matches monorepo version
|
// if installed swc package version matches monorepo version
|
||||||
// we can skip re-installing
|
// we can skip re-installing
|
||||||
for (const pkg of await fs.readdir(
|
for (const pkg of fs.readdirSync(path.join(cwd, 'node_modules', '@next'))) {
|
||||||
path.join(cwd, 'node_modules', '@next')
|
|
||||||
)) {
|
|
||||||
if (
|
if (
|
||||||
pkg.startsWith('swc-') &&
|
pkg.startsWith('swc-') &&
|
||||||
(
|
JSON.parse(
|
||||||
await fs.readJSON(
|
fs.readFileSync(
|
||||||
path.join(cwd, 'node_modules', '@next', pkg, 'package.json')
|
path.join(cwd, 'node_modules', '@next', pkg, 'package.json')
|
||||||
)
|
)
|
||||||
).version === nextVersion
|
).version === nextVersion
|
||||||
) {
|
) {
|
||||||
console.log(`@next/${pkg}@${nextVersion} already installed skipping`)
|
console.log(`@next/${pkg}@${nextVersion} already installed, skipping`)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (_) {}
|
} catch {}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let tmpdir = path.join(os.tmpdir(), `next-swc-${Date.now()}`)
|
let tmpdir = path.join(os.tmpdir(), `next-swc-${Date.now()}`)
|
||||||
await fs.ensureDir(tmpdir)
|
fs.mkdirSync(tmpdir, { recursive: true })
|
||||||
let pkgJson = {
|
let pkgJson = {
|
||||||
name: 'dummy-package',
|
name: 'dummy-package',
|
||||||
version: '1.0.0',
|
version: '1.0.0',
|
||||||
|
@ -54,28 +55,27 @@ import fs from 'fs-extra'
|
||||||
},
|
},
|
||||||
packageManager,
|
packageManager,
|
||||||
}
|
}
|
||||||
await fs.writeFile(
|
fs.writeFileSync(path.join(tmpdir, 'package.json'), JSON.stringify(pkgJson))
|
||||||
path.join(tmpdir, 'package.json'),
|
fs.writeFileSync(path.join(tmpdir, '.npmrc'), 'node-linker=hoisted')
|
||||||
JSON.stringify(pkgJson)
|
|
||||||
)
|
|
||||||
await fs.writeFile(path.join(tmpdir, '.npmrc'), 'node-linker=hoisted')
|
|
||||||
let { stdout } = await execa('pnpm', ['add', 'next@canary'], {
|
let { stdout } = await execa('pnpm', ['add', 'next@canary'], {
|
||||||
cwd: tmpdir,
|
cwd: tmpdir,
|
||||||
})
|
})
|
||||||
console.log(stdout)
|
console.log(stdout)
|
||||||
let pkgs = await fs.readdir(path.join(tmpdir, 'node_modules/@next'))
|
|
||||||
await fs.ensureDir(path.join(cwd, 'node_modules/@next'))
|
let pkgs = fs.readdirSync(path.join(tmpdir, 'node_modules/@next'))
|
||||||
|
fs.mkdirSync(path.join(cwd, 'node_modules/@next'), { recursive: true })
|
||||||
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
pkgs.map((pkg) =>
|
pkgs.map(async (pkg) => {
|
||||||
fs.move(
|
const from = path.join(tmpdir, 'node_modules/@next', pkg)
|
||||||
path.join(tmpdir, 'node_modules/@next', pkg),
|
const to = path.join(cwd, 'node_modules/@next', pkg)
|
||||||
path.join(cwd, 'node_modules/@next', pkg),
|
// overwriting by removing the target first
|
||||||
{ overwrite: true }
|
await fsp.rm(to, { recursive: true, force: true })
|
||||||
)
|
return fsp.rename(from, to)
|
||||||
)
|
})
|
||||||
)
|
)
|
||||||
await fs.remove(tmpdir)
|
fs.rmSync(tmpdir, { recursive: true, force: true })
|
||||||
console.log('Installed the following binary packages:', pkgs)
|
console.log('Installed the following binary packages:', pkgs)
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e)
|
console.error(e)
|
||||||
|
|
|
@ -5,7 +5,7 @@ const path = require('path')
|
||||||
const execa = require('execa')
|
const execa = require('execa')
|
||||||
const { Sema } = require('async-sema')
|
const { Sema } = require('async-sema')
|
||||||
const { execSync } = require('child_process')
|
const { execSync } = require('child_process')
|
||||||
const { readJson, readdir } = require('fs-extra')
|
const fs = require('fs')
|
||||||
|
|
||||||
const cwd = process.cwd()
|
const cwd = process.cwd()
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ const cwd = process.cwd()
|
||||||
}
|
}
|
||||||
|
|
||||||
const packagesDir = path.join(cwd, 'packages')
|
const packagesDir = path.join(cwd, 'packages')
|
||||||
const packageDirs = await readdir(packagesDir)
|
const packageDirs = fs.readdirSync(packagesDir)
|
||||||
const publishSema = new Sema(2)
|
const publishSema = new Sema(2)
|
||||||
|
|
||||||
const publish = async (pkg, retry = 0) => {
|
const publish = async (pkg, retry = 0) => {
|
||||||
|
@ -88,8 +88,11 @@ const cwd = process.cwd()
|
||||||
|
|
||||||
await Promise.allSettled(
|
await Promise.allSettled(
|
||||||
packageDirs.map(async (packageDir) => {
|
packageDirs.map(async (packageDir) => {
|
||||||
const pkgJson = await readJson(
|
const pkgJson = JSON.parse(
|
||||||
path.join(packagesDir, packageDir, 'package.json')
|
await fs.promises.readFile(
|
||||||
|
path.join(packagesDir, packageDir, 'package.json'),
|
||||||
|
'utf-8'
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if (pkgJson.private) {
|
if (pkgJson.private) {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import path from 'path'
|
import path from 'path'
|
||||||
import { readFile, writeFile } from 'fs/promises'
|
import fs from 'fs'
|
||||||
import { copy, pathExists } from 'fs-extra'
|
import { copy } from 'fs-extra'
|
||||||
;(async function () {
|
;(async function () {
|
||||||
try {
|
try {
|
||||||
let wasmDir = path.join(process.cwd(), 'packages/next-swc/crates/wasm')
|
let wasmDir = path.join(process.cwd(), 'packages/next-swc/crates/wasm')
|
||||||
|
@ -8,16 +8,16 @@ import { copy, pathExists } from 'fs-extra'
|
||||||
|
|
||||||
// CI restores artifact at pkg-${wasmTarget}
|
// CI restores artifact at pkg-${wasmTarget}
|
||||||
// This only runs locally
|
// This only runs locally
|
||||||
let folderName = (await pathExists(path.join(wasmDir, 'pkg')))
|
let folderName = fs.existsSync(path.join(wasmDir, 'pkg'))
|
||||||
? 'pkg'
|
? 'pkg'
|
||||||
: `pkg-${wasmTarget}`
|
: `pkg-${wasmTarget}`
|
||||||
|
|
||||||
let wasmPkg = JSON.parse(
|
let wasmPkg = JSON.parse(
|
||||||
await readFile(path.join(wasmDir, `${folderName}/package.json`))
|
fs.readFileSync(path.join(wasmDir, `${folderName}/package.json`))
|
||||||
)
|
)
|
||||||
wasmPkg.name = `@next/swc-wasm-${wasmTarget}`
|
wasmPkg.name = `@next/swc-wasm-${wasmTarget}`
|
||||||
|
|
||||||
await writeFile(
|
fs.writeFileSync(
|
||||||
path.join(wasmDir, `${folderName}/package.json`),
|
path.join(wasmDir, `${folderName}/package.json`),
|
||||||
JSON.stringify(wasmPkg, null, 2)
|
JSON.stringify(wasmPkg, null, 2)
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
// @ts-check
|
// @ts-check
|
||||||
|
|
||||||
const path = require('path')
|
const path = require('path')
|
||||||
const { readJson, writeJson } = require('fs-extra')
|
const fsp = require('fs/promises')
|
||||||
const execa = require('execa')
|
const execa = require('execa')
|
||||||
|
|
||||||
/** @type {any} */
|
/** @type {any} */
|
||||||
|
@ -52,7 +52,9 @@ Or, run this command with no arguments to use the most recently published versio
|
||||||
}
|
}
|
||||||
|
|
||||||
const cwd = process.cwd()
|
const cwd = process.cwd()
|
||||||
const pkgJson = await readJson(path.join(cwd, 'package.json'))
|
const pkgJson = JSON.parse(
|
||||||
|
await fsp.readFile(path.join(cwd, 'package.json'), 'utf-8')
|
||||||
|
)
|
||||||
const devDependencies = pkgJson.devDependencies
|
const devDependencies = pkgJson.devDependencies
|
||||||
const baseVersionStr = devDependencies[
|
const baseVersionStr = devDependencies[
|
||||||
useExperimental ? 'react-experimental-builtin' : 'react-builtin'
|
useExperimental ? 'react-experimental-builtin' : 'react-builtin'
|
||||||
|
@ -90,7 +92,10 @@ Or, run this command with no arguments to use the most recently published versio
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
await writeJson(path.join(cwd, 'package.json'), pkgJson, { spaces: 2 })
|
await fsp.writeFile(
|
||||||
|
path.join(cwd, 'package.json'),
|
||||||
|
JSON.stringify(pkgJson, null, 2)
|
||||||
|
)
|
||||||
console.log('Successfully updated React dependencies in package.json.\n')
|
console.log('Successfully updated React dependencies in package.json.\n')
|
||||||
|
|
||||||
// Install the updated dependencies and build the vendored React files.
|
// Install the updated dependencies and build the vendored React files.
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
const os = require('os')
|
const os = require('os')
|
||||||
const path = require('path')
|
const path = require('path')
|
||||||
const execa = require('execa')
|
const execa = require('execa')
|
||||||
const fs = require('fs-extra')
|
const fsp = require('fs/promises')
|
||||||
|
const { copy } = require('fs-extra')
|
||||||
const prettyBytes = require('pretty-bytes')
|
const prettyBytes = require('pretty-bytes')
|
||||||
const gzipSize = require('next/dist/compiled/gzip-size')
|
const gzipSize = require('next/dist/compiled/gzip-size')
|
||||||
const { nodeFileTrace } = require('next/dist/compiled/@vercel/nft')
|
const { nodeFileTrace } = require('next/dist/compiled/@vercel/nft')
|
||||||
|
@ -24,7 +25,7 @@ async function main() {
|
||||||
const origTestDir = path.join(origRepoDir, 'test')
|
const origTestDir = path.join(origRepoDir, 'test')
|
||||||
const dotDir = path.join(origRepoDir, './') + '.'
|
const dotDir = path.join(origRepoDir, './') + '.'
|
||||||
|
|
||||||
await fs.copy(origRepoDir, repoDir, {
|
await copy(origRepoDir, repoDir, {
|
||||||
filter: (item) => {
|
filter: (item) => {
|
||||||
return (
|
return (
|
||||||
!item.startsWith(origTestDir) &&
|
!item.startsWith(origTestDir) &&
|
||||||
|
@ -36,11 +37,11 @@ async function main() {
|
||||||
|
|
||||||
console.log('using workdir', workDir)
|
console.log('using workdir', workDir)
|
||||||
console.log('using repodir', repoDir)
|
console.log('using repodir', repoDir)
|
||||||
await fs.ensureDir(workDir)
|
await fsp.mkdir(workDir, { recursive: true })
|
||||||
|
|
||||||
const pkgPaths = await linkPackages({ repoDir: origRepoDir })
|
const pkgPaths = await linkPackages({ repoDir: origRepoDir })
|
||||||
|
|
||||||
await fs.writeFile(
|
await fsp.writeFile(
|
||||||
path.join(workDir, 'package.json'),
|
path.join(workDir, 'package.json'),
|
||||||
JSON.stringify(
|
JSON.stringify(
|
||||||
{
|
{
|
||||||
|
@ -95,7 +96,7 @@ async function main() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
tracedDeps.add(file.replace(/\\/g, '/'))
|
tracedDeps.add(file.replace(/\\/g, '/'))
|
||||||
const stat = await fs.stat(path.join(workDir, file))
|
const stat = await fsp.stat(path.join(workDir, file))
|
||||||
|
|
||||||
if (stat.isFile()) {
|
if (stat.isFile()) {
|
||||||
const compressedSize = await gzipSize(path.join(workDir, file))
|
const compressedSize = await gzipSize(path.join(workDir, file))
|
||||||
|
@ -112,7 +113,7 @@ async function main() {
|
||||||
totalUncompressedSize: prettyBytes(totalUncompressedSize),
|
totalUncompressedSize: prettyBytes(totalUncompressedSize),
|
||||||
})
|
})
|
||||||
|
|
||||||
await fs.writeFile(
|
await fsp.writeFile(
|
||||||
path.join(
|
path.join(
|
||||||
__dirname,
|
__dirname,
|
||||||
'../packages/next/dist/server/next-server.js.nft.json'
|
'../packages/next/dist/server/next-server.js.nft.json'
|
||||||
|
@ -122,8 +123,8 @@ async function main() {
|
||||||
version: 1,
|
version: 1,
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
await fs.remove(workDir)
|
await fsp.rm(workDir, { recursive: true, force: true })
|
||||||
await fs.remove(repoDir)
|
await fsp.rm(repoDir, { recursive: true, force: true })
|
||||||
|
|
||||||
console.timeEnd(traceLabel)
|
console.timeEnd(traceLabel)
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue