Use node:fs instead of fs-extra in .github/actions (#56536)

Test Plan: Stats for this PR?


Closes WEB-1730
This commit is contained in:
Will Binns-Smith 2023-10-10 17:52:55 -07:00 committed by GitHub
parent d79f8a2f03
commit d73b8366af
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 39 additions and 61 deletions

View file

@ -4,7 +4,6 @@
"dependencies": {
"async-sema": "^3.1.0",
"execa": "2.0.3",
"fs-extra": "^8.1.0",
"get-port": "^5.0.0",
"glob": "^7.1.4",
"gzip-size": "^5.1.1",

View file

@ -1,5 +1,6 @@
const path = require('path')
const fs = require('fs-extra')
const fs = require('fs/promises')
const { existsSync } = require('fs')
const exec = require('./util/exec')
const logger = require('./util/logger')
const runConfigs = require('./run')
@ -21,7 +22,7 @@ if (!allowedActions.has(actionInfo.actionName) && !actionInfo.isRelease) {
;(async () => {
try {
if (await fs.pathExists(path.join(__dirname, '../SKIP_NEXT_STATS.txt'))) {
if (existsSync(path.join(__dirname, '../SKIP_NEXT_STATS.txt'))) {
console.log(
'SKIP_NEXT_STATS.txt file present, exiting stats generation..'
)
@ -100,7 +101,7 @@ if (!allowedActions.has(actionInfo.actionName) && !actionInfo.isRelease) {
for (const dir of repoDirs) {
logger(`Running initial build for ${dir}`)
if (!actionInfo.skipClone) {
const usePnpm = await fs.pathExists(path.join(dir, 'pnpm-lock.yaml'))
const usePnpm = existsSync(path.join(dir, 'pnpm-lock.yaml'))
if (!statsConfig.skipInitialInstall) {
await exec.spawnPromise(
@ -121,9 +122,10 @@ if (!allowedActions.has(actionInfo.actionName) && !actionInfo.isRelease) {
}
await fs
.copy(
.cp(
path.join(__dirname, '../native'),
path.join(dir, 'packages/next-swc/native')
path.join(dir, 'packages/next-swc/native'),
{ recursive: true, force: true }
)
.catch(console.error)

View file

@ -1,16 +1,14 @@
const path = require('path')
const fse = require('fs-extra')
const fs = require('fs')
const fsp = require('fs/promises')
const fs = require('fs/promises')
const { existsSync } = require('fs')
const exec = require('../util/exec')
const { remove } = require('fs-extra')
const logger = require('../util/logger')
const execa = require('execa')
module.exports = (actionInfo) => {
return {
async cloneRepo(repoPath = '', dest = '', branch = '', depth = '20') {
await remove(dest)
await fs.rm(dest, { recursive: true, force: true })
await exec(
`git clone ${actionInfo.gitRoot}${repoPath} --single-branch --branch ${branch} --depth=${depth} ${dest}`
)
@ -72,7 +70,7 @@ module.exports = (actionInfo) => {
let pkgs
try {
pkgs = await fsp.readdir(path.join(repoDir, 'packages'))
pkgs = await fs.readdir(path.join(repoDir, 'packages'))
} catch (err) {
if (err.code === 'ENOENT') {
require('console').log('no packages to link')
@ -87,8 +85,8 @@ module.exports = (actionInfo) => {
const packedPkgPath = path.join(pkgPath, `${pkg}-packed.tgz`)
const pkgDataPath = path.join(pkgPath, 'package.json')
if (fs.existsSync(pkgDataPath)) {
const pkgData = JSON.parse(await fsp.readFile(pkgDataPath))
if (existsSync(pkgDataPath)) {
const pkgData = JSON.parse(await fs.readFile(pkgDataPath))
const { name } = pkgData
pkgDatas.set(name, {
@ -122,7 +120,7 @@ module.exports = (actionInfo) => {
pkgData.files.push('native')
try {
const swcBinariesDirContents = await fsp.readdir(
const swcBinariesDirContents = await fs.readdir(
path.join(pkgPath, 'native')
)
require('console').log(
@ -155,7 +153,7 @@ module.exports = (actionInfo) => {
}
}
await fsp.writeFile(
await fs.writeFile(
pkgDataPath,
JSON.stringify(pkgData, null, 2),
'utf8'
@ -186,9 +184,9 @@ module.exports = (actionInfo) => {
'disabled-native-gitignore'
)
await fsp.rename(nativeGitignorePath, renamedGitignorePath)
await fs.rename(nativeGitignorePath, renamedGitignorePath)
cleanup = async () => {
await fsp.rename(renamedGitignorePath, nativeGitignorePath)
await fs.rename(renamedGitignorePath, nativeGitignorePath)
}
}
@ -201,7 +199,7 @@ module.exports = (actionInfo) => {
})
return Promise.all([
fsp.rename(path.resolve(pkgPath, stdout.trim()), packedPkgPath),
fs.rename(path.resolve(pkgPath, stdout.trim()), packedPkgPath),
cleanup?.(),
])
}

View file

@ -1,5 +1,6 @@
const path = require('path')
const fs = require('fs-extra')
const fs = require('fs/promises')
const { existsSync } = require('fs')
const exec = require('../util/exec')
const glob = require('../util/glob')
const logger = require('../util/logger')
@ -12,15 +13,17 @@ module.exports = async function collectDiffs(
if (initial) {
logger('Setting up directory for diffing')
// set-up diffing directory
await fs.remove(diffingDir)
await fs.mkdirp(diffingDir)
await fs.rm(diffingDir, { recursive: true, force: true })
await fs.mkdir(diffingDir, { recursive: true })
await exec(`cd ${diffingDir} && git init`)
} else {
// remove any previous files in case they won't be overwritten
const toRemove = await glob('!(.git)', { cwd: diffingDir, dot: true })
await Promise.all(
toRemove.map((file) => fs.remove(path.join(diffingDir, file)))
toRemove.map((file) =>
fs.rm(path.join(diffingDir, file), { recursive: true, force: true })
)
)
}
const diffs = {}
@ -40,7 +43,7 @@ module.exports = async function collectDiffs(
const absPath = path.join(statsAppDir, file)
const diffDest = path.join(diffingDir, file)
await fs.copy(absPath, diffDest)
await fs.cp(absPath, diffDest, { recursive: true, force: true })
}
if (curFiles.length > 0) {
@ -75,7 +78,7 @@ module.exports = async function collectDiffs(
for (const line of renamedFiles) {
const [, prev, cur] = line.split('\t')
await fs.move(path.join(diffingDir, cur), path.join(diffingDir, prev))
await fs.rename(path.join(diffingDir, cur), path.join(diffingDir, prev))
diffs._renames.push({
prev,
cur,
@ -91,7 +94,7 @@ module.exports = async function collectDiffs(
for (const file of changedFiles) {
const fileKey = path.basename(file)
const hasFile = await fs.exists(path.join(diffingDir, file))
const hasFile = existsSync(path.join(diffingDir, file))
if (!hasFile) {
diffs[fileKey] = 'deleted'

View file

@ -1,5 +1,5 @@
const path = require('path')
const fs = require('fs-extra')
const fs = require('fs/promises')
const getPort = require('get-port')
const fetch = require('node-fetch')
const glob = require('../util/glob')
@ -84,7 +84,7 @@ module.exports = async function collectStats(
if (hasPagesToFetch) {
const fetchedPagesDir = path.join(curDir, 'fetched-pages')
await fs.mkdirp(fetchedPagesDir)
await fs.mkdir(fetchedPagesDir, { recursive: true })
for (let url of runConfig.pagesToFetch) {
url = url.replace('$PORT', port)

View file

@ -1,5 +1,5 @@
const path = require('path')
const fs = require('fs-extra')
const fs = require('fs/promises')
// getDirSize recursively gets size of all files in a directory
async function getDirSize(dir, ctx = { size: 0 }) {

View file

@ -1,5 +1,5 @@
const path = require('path')
const fs = require('fs-extra')
const fs = require('fs/promises')
const glob = require('../util/glob')
const exec = require('../util/exec')
const logger = require('../util/logger')
@ -36,8 +36,8 @@ async function runConfigs(
const curStatsAppPath = path.join(diffRepoDir, relativeStatsAppDir)
// clean statsAppDir
await fs.remove(statsAppDir)
await fs.copy(curStatsAppPath, statsAppDir)
await fs.rm(statsAppDir, { recursive: true, force: true })
await fs.cp(curStatsAppPath, statsAppDir, { recursive: true })
logger(`Copying ${curStatsAppPath} ${statsAppDir}`)
@ -70,7 +70,7 @@ async function runConfigs(
? result.replace(/(\.|-)[0-9a-f]{16}(\.|-)/g, '$1HASH$2')
: rename.dest
if (result === dest) continue
await fs.move(
await fs.rename(
path.join(statsAppDir, result),
path.join(statsAppDir, dest)
)
@ -172,7 +172,10 @@ async function runConfigs(
}
async function linkPkgs(pkgDir = '', pkgPaths) {
await fs.remove(path.join(pkgDir, 'node_modules'))
await fs.rm(path.join(pkgDir, 'node_modules'), {
recursive: true,
force: true,
})
const pkgJsonPath = path.join(pkgDir, 'package.json')
const pkgData = require(pkgJsonPath)

View file

@ -33,9 +33,6 @@ importers:
execa:
specifier: 2.0.3
version: 2.0.3
fs-extra:
specifier: ^8.1.0
version: 8.1.0
get-port:
specifier: ^5.0.0
version: 5.1.1
@ -480,15 +477,6 @@ packages:
mime-types: 2.1.35
dev: false
/fs-extra@8.1.0:
resolution: {integrity: sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==}
engines: {node: '>=6 <7 || >=8'}
dependencies:
graceful-fs: 4.2.11
jsonfile: 4.0.0
universalify: 0.1.2
dev: false
/fs.realpath@1.0.0:
resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==}
dev: false
@ -522,10 +510,6 @@ packages:
path-is-absolute: 1.0.1
dev: false
/graceful-fs@4.2.11:
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
dev: false
/gray-matter@4.0.2:
resolution: {integrity: sha512-7hB/+LxrOjq/dd8APlK0r24uL/67w7SkYnfwhNFwg/VDIGWGmduTDYf3WNstLW2fbbmRwrDGCVSJ2isuf2+4Hw==}
engines: {node: '>=6.0'}
@ -693,12 +677,6 @@ packages:
esprima: 4.0.1
dev: false
/jsonfile@4.0.0:
resolution: {integrity: sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==}
optionalDependencies:
graceful-fs: 4.2.11
dev: false
/kind-of@6.0.3:
resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==}
engines: {node: '>=0.10.0'}
@ -1135,11 +1113,6 @@ packages:
resolution: {integrity: sha512-isyNax3wXoKaulPDZWHQqbmIx1k2tb9fb3GGDBRxCscfYV2Ch7WxPArBsFEG8s/safwXTT7H4QGhaIkTp9447w==}
dev: false
/universalify@0.1.2:
resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==}
engines: {node: '>= 4.0.0'}
dev: false
/uuid@8.3.2:
resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==}
hasBin: true