chore: reduce fs-extra
usage in scripts/
(#56917)
The PR follows #56536 and #56491, replacing `fs-extra` usages inside the `scripts/` folder. Note that the `copy` and `move` haven't been replaced yet. Currently, there is no better recursive copy (lightweight, promise-based, Node.js built-in `copyFile` API-based, support the `filter` option) library alternative available on npm, and Node.js built-in `fs.rename` doesn't support `overwrite`. The PR also replaces many async fs API usage with their sync versions. cc @wbinnssmith
This commit is contained in:
parent
255cc9b9a7
commit
17553c5e25
6 changed files with 74 additions and 57 deletions
28
run-tests.js
28
run-tests.js
|
@ -1,7 +1,8 @@
|
|||
const os = require('os')
|
||||
const path = require('path')
|
||||
const _glob = require('glob')
|
||||
const fs = require('fs-extra')
|
||||
const { existsSync } = require('fs')
|
||||
const fsp = require('fs/promises')
|
||||
const nodeFetch = require('node-fetch')
|
||||
const vercelFetch = require('@vercel/fetch')
|
||||
const fetch = vercelFetch(nodeFetch)
|
||||
|
@ -116,10 +117,16 @@ ${output}
|
|||
|
||||
const cleanUpAndExit = async (code) => {
|
||||
if (process.env.NEXT_TEST_STARTER) {
|
||||
await fs.remove(process.env.NEXT_TEST_STARTER)
|
||||
await fsp.rm(process.env.NEXT_TEST_STARTER, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
})
|
||||
}
|
||||
if (process.env.NEXT_TEST_TEMP_REPO) {
|
||||
await fs.remove(process.env.NEXT_TEST_TEMP_REPO)
|
||||
await fsp.rm(process.env.NEXT_TEST_TEMP_REPO, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
})
|
||||
}
|
||||
if (process.env.CI) {
|
||||
await maybeLogSummary()
|
||||
|
@ -250,7 +257,7 @@ async function main() {
|
|||
try {
|
||||
const timingsFile = path.join(process.cwd(), 'test-timings.json')
|
||||
try {
|
||||
prevTimings = JSON.parse(await fs.readFile(timingsFile, 'utf8'))
|
||||
prevTimings = JSON.parse(await fsp.readFile(timingsFile, 'utf8'))
|
||||
console.log('Loaded test timings from disk successfully')
|
||||
} catch (_) {
|
||||
console.error('failed to load from disk', _)
|
||||
|
@ -261,7 +268,7 @@ async function main() {
|
|||
console.log('Fetched previous timings data successfully')
|
||||
|
||||
if (writeTimings) {
|
||||
await fs.writeFile(timingsFile, JSON.stringify(prevTimings))
|
||||
await fsp.writeFile(timingsFile, JSON.stringify(prevTimings))
|
||||
console.log('Wrote previous timings data to', timingsFile)
|
||||
await cleanUpAndExit(0)
|
||||
}
|
||||
|
@ -544,15 +551,16 @@ ${ENDGROUP}`)
|
|||
|
||||
return reject(err)
|
||||
}
|
||||
await fs
|
||||
.remove(
|
||||
await fsp
|
||||
.rm(
|
||||
path.join(
|
||||
__dirname,
|
||||
'test/traces',
|
||||
path
|
||||
.relative(path.join(__dirname, 'test'), test.file)
|
||||
.replace(/\//g, '-')
|
||||
)
|
||||
),
|
||||
{ recursive: true, force: true }
|
||||
)
|
||||
.catch(() => {})
|
||||
resolve(new Date().getTime() - start)
|
||||
|
@ -645,7 +653,7 @@ ${ENDGROUP}`)
|
|||
// Emit test output if test failed or if we're continuing tests on error
|
||||
if ((!passed || shouldContinueTestsOnError) && isTestJob) {
|
||||
try {
|
||||
const testsOutput = await fs.readFile(
|
||||
const testsOutput = await fsp.readFile(
|
||||
`${test.file}${RESULTS_EXT}`,
|
||||
'utf8'
|
||||
)
|
||||
|
@ -708,7 +716,7 @@ ${ENDGROUP}`)
|
|||
}
|
||||
|
||||
for (const test of Object.keys(newTimings)) {
|
||||
if (!(await fs.pathExists(path.join(__dirname, test)))) {
|
||||
if (!existsSync(path.join(__dirname, test))) {
|
||||
console.log('removing stale timing', test)
|
||||
delete newTimings[test]
|
||||
}
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import os from 'os'
|
||||
import path from 'path'
|
||||
import execa from 'execa'
|
||||
import fs from 'fs-extra'
|
||||
import fs from 'fs'
|
||||
import fsp from 'fs/promises'
|
||||
;(async function () {
|
||||
if (process.env.NEXT_SKIP_NATIVE_POSTINSTALL) {
|
||||
console.log(
|
||||
|
@ -10,34 +11,34 @@ import fs from 'fs-extra'
|
|||
return
|
||||
}
|
||||
let cwd = process.cwd()
|
||||
const { version: nextVersion } = await fs.readJSON(
|
||||
path.join(cwd, 'packages', 'next', 'package.json')
|
||||
const { version: nextVersion } = JSON.parse(
|
||||
fs.readFileSync(path.join(cwd, 'packages', 'next', 'package.json'))
|
||||
)
|
||||
const { packageManager } = JSON.parse(
|
||||
fs.readFileSync(path.join(cwd, 'package.json'))
|
||||
)
|
||||
const { packageManager } = await fs.readJSON(path.join(cwd, 'package.json'))
|
||||
|
||||
try {
|
||||
// if installed swc package version matches monorepo version
|
||||
// we can skip re-installing
|
||||
for (const pkg of await fs.readdir(
|
||||
path.join(cwd, 'node_modules', '@next')
|
||||
)) {
|
||||
for (const pkg of fs.readdirSync(path.join(cwd, 'node_modules', '@next'))) {
|
||||
if (
|
||||
pkg.startsWith('swc-') &&
|
||||
(
|
||||
await fs.readJSON(
|
||||
JSON.parse(
|
||||
fs.readFileSync(
|
||||
path.join(cwd, 'node_modules', '@next', pkg, 'package.json')
|
||||
)
|
||||
).version === nextVersion
|
||||
) {
|
||||
console.log(`@next/${pkg}@${nextVersion} already installed skipping`)
|
||||
console.log(`@next/${pkg}@${nextVersion} already installed, skipping`)
|
||||
return
|
||||
}
|
||||
}
|
||||
} catch (_) {}
|
||||
} catch {}
|
||||
|
||||
try {
|
||||
let tmpdir = path.join(os.tmpdir(), `next-swc-${Date.now()}`)
|
||||
await fs.ensureDir(tmpdir)
|
||||
fs.mkdirSync(tmpdir, { recursive: true })
|
||||
let pkgJson = {
|
||||
name: 'dummy-package',
|
||||
version: '1.0.0',
|
||||
|
@ -54,28 +55,27 @@ import fs from 'fs-extra'
|
|||
},
|
||||
packageManager,
|
||||
}
|
||||
await fs.writeFile(
|
||||
path.join(tmpdir, 'package.json'),
|
||||
JSON.stringify(pkgJson)
|
||||
)
|
||||
await fs.writeFile(path.join(tmpdir, '.npmrc'), 'node-linker=hoisted')
|
||||
fs.writeFileSync(path.join(tmpdir, 'package.json'), JSON.stringify(pkgJson))
|
||||
fs.writeFileSync(path.join(tmpdir, '.npmrc'), 'node-linker=hoisted')
|
||||
|
||||
let { stdout } = await execa('pnpm', ['add', 'next@canary'], {
|
||||
cwd: tmpdir,
|
||||
})
|
||||
console.log(stdout)
|
||||
let pkgs = await fs.readdir(path.join(tmpdir, 'node_modules/@next'))
|
||||
await fs.ensureDir(path.join(cwd, 'node_modules/@next'))
|
||||
|
||||
let pkgs = fs.readdirSync(path.join(tmpdir, 'node_modules/@next'))
|
||||
fs.mkdirSync(path.join(cwd, 'node_modules/@next'), { recursive: true })
|
||||
|
||||
await Promise.all(
|
||||
pkgs.map((pkg) =>
|
||||
fs.move(
|
||||
path.join(tmpdir, 'node_modules/@next', pkg),
|
||||
path.join(cwd, 'node_modules/@next', pkg),
|
||||
{ overwrite: true }
|
||||
)
|
||||
)
|
||||
pkgs.map(async (pkg) => {
|
||||
const from = path.join(tmpdir, 'node_modules/@next', pkg)
|
||||
const to = path.join(cwd, 'node_modules/@next', pkg)
|
||||
// overwriting by removing the target first
|
||||
await fsp.rm(to, { recursive: true, force: true })
|
||||
return fsp.rename(from, to)
|
||||
})
|
||||
)
|
||||
await fs.remove(tmpdir)
|
||||
fs.rmSync(tmpdir, { recursive: true, force: true })
|
||||
console.log('Installed the following binary packages:', pkgs)
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
|
|
|
@ -5,7 +5,7 @@ const path = require('path')
|
|||
const execa = require('execa')
|
||||
const { Sema } = require('async-sema')
|
||||
const { execSync } = require('child_process')
|
||||
const { readJson, readdir } = require('fs-extra')
|
||||
const fs = require('fs')
|
||||
|
||||
const cwd = process.cwd()
|
||||
|
||||
|
@ -38,7 +38,7 @@ const cwd = process.cwd()
|
|||
}
|
||||
|
||||
const packagesDir = path.join(cwd, 'packages')
|
||||
const packageDirs = await readdir(packagesDir)
|
||||
const packageDirs = fs.readdirSync(packagesDir)
|
||||
const publishSema = new Sema(2)
|
||||
|
||||
const publish = async (pkg, retry = 0) => {
|
||||
|
@ -88,8 +88,11 @@ const cwd = process.cwd()
|
|||
|
||||
await Promise.allSettled(
|
||||
packageDirs.map(async (packageDir) => {
|
||||
const pkgJson = await readJson(
|
||||
path.join(packagesDir, packageDir, 'package.json')
|
||||
const pkgJson = JSON.parse(
|
||||
await fs.promises.readFile(
|
||||
path.join(packagesDir, packageDir, 'package.json'),
|
||||
'utf-8'
|
||||
)
|
||||
)
|
||||
|
||||
if (pkgJson.private) {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import path from 'path'
|
||||
import { readFile, writeFile } from 'fs/promises'
|
||||
import { copy, pathExists } from 'fs-extra'
|
||||
import fs from 'fs'
|
||||
import { copy } from 'fs-extra'
|
||||
;(async function () {
|
||||
try {
|
||||
let wasmDir = path.join(process.cwd(), 'packages/next-swc/crates/wasm')
|
||||
|
@ -8,16 +8,16 @@ import { copy, pathExists } from 'fs-extra'
|
|||
|
||||
// CI restores artifact at pkg-${wasmTarget}
|
||||
// This only runs locally
|
||||
let folderName = (await pathExists(path.join(wasmDir, 'pkg')))
|
||||
let folderName = fs.existsSync(path.join(wasmDir, 'pkg'))
|
||||
? 'pkg'
|
||||
: `pkg-${wasmTarget}`
|
||||
|
||||
let wasmPkg = JSON.parse(
|
||||
await readFile(path.join(wasmDir, `${folderName}/package.json`))
|
||||
fs.readFileSync(path.join(wasmDir, `${folderName}/package.json`))
|
||||
)
|
||||
wasmPkg.name = `@next/swc-wasm-${wasmTarget}`
|
||||
|
||||
await writeFile(
|
||||
fs.writeFileSync(
|
||||
path.join(wasmDir, `${folderName}/package.json`),
|
||||
JSON.stringify(wasmPkg, null, 2)
|
||||
)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
// @ts-check
|
||||
|
||||
const path = require('path')
|
||||
const { readJson, writeJson } = require('fs-extra')
|
||||
const fsp = require('fs/promises')
|
||||
const execa = require('execa')
|
||||
|
||||
/** @type {any} */
|
||||
|
@ -52,7 +52,9 @@ Or, run this command with no arguments to use the most recently published versio
|
|||
}
|
||||
|
||||
const cwd = process.cwd()
|
||||
const pkgJson = await readJson(path.join(cwd, 'package.json'))
|
||||
const pkgJson = JSON.parse(
|
||||
await fsp.readFile(path.join(cwd, 'package.json'), 'utf-8')
|
||||
)
|
||||
const devDependencies = pkgJson.devDependencies
|
||||
const baseVersionStr = devDependencies[
|
||||
useExperimental ? 'react-experimental-builtin' : 'react-builtin'
|
||||
|
@ -90,7 +92,10 @@ Or, run this command with no arguments to use the most recently published versio
|
|||
)
|
||||
}
|
||||
}
|
||||
await writeJson(path.join(cwd, 'package.json'), pkgJson, { spaces: 2 })
|
||||
await fsp.writeFile(
|
||||
path.join(cwd, 'package.json'),
|
||||
JSON.stringify(pkgJson, null, 2)
|
||||
)
|
||||
console.log('Successfully updated React dependencies in package.json.\n')
|
||||
|
||||
// Install the updated dependencies and build the vendored React files.
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
const os = require('os')
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const fs = require('fs-extra')
|
||||
const fsp = require('fs/promises')
|
||||
const { copy } = require('fs-extra')
|
||||
const prettyBytes = require('pretty-bytes')
|
||||
const gzipSize = require('next/dist/compiled/gzip-size')
|
||||
const { nodeFileTrace } = require('next/dist/compiled/@vercel/nft')
|
||||
|
@ -24,7 +25,7 @@ async function main() {
|
|||
const origTestDir = path.join(origRepoDir, 'test')
|
||||
const dotDir = path.join(origRepoDir, './') + '.'
|
||||
|
||||
await fs.copy(origRepoDir, repoDir, {
|
||||
await copy(origRepoDir, repoDir, {
|
||||
filter: (item) => {
|
||||
return (
|
||||
!item.startsWith(origTestDir) &&
|
||||
|
@ -36,11 +37,11 @@ async function main() {
|
|||
|
||||
console.log('using workdir', workDir)
|
||||
console.log('using repodir', repoDir)
|
||||
await fs.ensureDir(workDir)
|
||||
await fsp.mkdir(workDir, { recursive: true })
|
||||
|
||||
const pkgPaths = await linkPackages({ repoDir: origRepoDir })
|
||||
|
||||
await fs.writeFile(
|
||||
await fsp.writeFile(
|
||||
path.join(workDir, 'package.json'),
|
||||
JSON.stringify(
|
||||
{
|
||||
|
@ -95,7 +96,7 @@ async function main() {
|
|||
continue
|
||||
}
|
||||
tracedDeps.add(file.replace(/\\/g, '/'))
|
||||
const stat = await fs.stat(path.join(workDir, file))
|
||||
const stat = await fsp.stat(path.join(workDir, file))
|
||||
|
||||
if (stat.isFile()) {
|
||||
const compressedSize = await gzipSize(path.join(workDir, file))
|
||||
|
@ -112,7 +113,7 @@ async function main() {
|
|||
totalUncompressedSize: prettyBytes(totalUncompressedSize),
|
||||
})
|
||||
|
||||
await fs.writeFile(
|
||||
await fsp.writeFile(
|
||||
path.join(
|
||||
__dirname,
|
||||
'../packages/next/dist/server/next-server.js.nft.json'
|
||||
|
@ -122,8 +123,8 @@ async function main() {
|
|||
version: 1,
|
||||
})
|
||||
)
|
||||
await fs.remove(workDir)
|
||||
await fs.remove(repoDir)
|
||||
await fsp.rm(workDir, { recursive: true, force: true })
|
||||
await fsp.rm(repoDir, { recursive: true, force: true })
|
||||
|
||||
console.timeEnd(traceLabel)
|
||||
|
||||
|
|
Loading…
Reference in a new issue