2021-09-13 14:36:25 +02:00
|
|
|
const os = require('os')
|
2019-09-10 19:11:55 +02:00
|
|
|
const path = require('path')
|
|
|
|
const _glob = require('glob')
|
2021-09-13 14:36:25 +02:00
|
|
|
const fs = require('fs-extra')
|
2021-08-06 17:20:05 +02:00
|
|
|
const nodeFetch = require('node-fetch')
|
|
|
|
const vercelFetch = require('@vercel/fetch')
|
|
|
|
const fetch = vercelFetch(nodeFetch)
|
2019-09-10 19:11:55 +02:00
|
|
|
const { promisify } = require('util')
|
|
|
|
const { Sema } = require('async-sema')
|
|
|
|
const { spawn, exec: execOrig } = require('child_process')
|
2021-09-13 14:36:25 +02:00
|
|
|
const { createNextInstall } = require('./test/lib/create-next-install')
|
2019-09-10 19:11:55 +02:00
|
|
|
const glob = promisify(_glob)
|
|
|
|
const exec = promisify(execOrig)
|
|
|
|
|
2023-05-09 19:46:54 +02:00
|
|
|
// Try to read an external array-based json to filter tests to be executed.
|
|
|
|
// If process.argv contains a test to be executed, this'll append it to the list.
|
|
|
|
const externalTestsFilterLists = process.env.NEXT_EXTERNAL_TESTS_FILTERS
|
|
|
|
? require(process.env.NEXT_EXTERNAL_TESTS_FILTERS)
|
|
|
|
: []
|
2020-03-04 09:54:49 +01:00
|
|
|
const timings = []
|
2021-09-13 14:36:25 +02:00
|
|
|
const DEFAULT_NUM_RETRIES = os.platform() === 'win32' ? 2 : 1
|
2019-10-28 18:24:29 +01:00
|
|
|
const DEFAULT_CONCURRENCY = 2
|
2020-03-04 09:54:49 +01:00
|
|
|
const RESULTS_EXT = `.results.json`
|
|
|
|
const isTestJob = !!process.env.NEXT_TEST_JOB
|
2023-02-14 22:30:34 +01:00
|
|
|
// Check env to see if test should continue even if some of test fails
|
2023-02-08 02:51:54 +01:00
|
|
|
const shouldContinueTestsOnError = !!process.env.NEXT_TEST_CONTINUE_ON_ERROR
|
2023-02-14 22:30:34 +01:00
|
|
|
// Check env to load a list of test paths to skip retry. This is to be used in conjuction with NEXT_TEST_CONTINUE_ON_ERROR,
|
|
|
|
// When try to run all of the tests regardless of pass / fail and want to skip retrying `known` failed tests.
|
|
|
|
// manifest should be a json file with an array of test paths.
|
|
|
|
const skipRetryTestManifest = process.env.NEXT_TEST_SKIP_RETRY_MANIFEST
|
|
|
|
? require(process.env.NEXT_TEST_SKIP_RETRY_MANIFEST)
|
|
|
|
: []
|
2021-09-28 17:15:04 +02:00
|
|
|
const TIMINGS_API = `https://api.github.com/gists/4500dd89ae2f5d70d9aaceb191f528d1`
|
|
|
|
const TIMINGS_API_HEADERS = {
|
|
|
|
Accept: 'application/vnd.github.v3+json',
|
2023-01-04 00:36:59 +01:00
|
|
|
...(process.env.TEST_TIMINGS_TOKEN
|
|
|
|
? {
|
|
|
|
Authorization: `Bearer ${process.env.TEST_TIMINGS_TOKEN}`,
|
|
|
|
}
|
|
|
|
: {}),
|
2021-09-28 17:15:04 +02:00
|
|
|
}
|
2019-09-10 19:11:55 +02:00
|
|
|
|
2021-08-24 14:52:45 +02:00
|
|
|
const testFilters = {
|
|
|
|
unit: 'unit/',
|
2021-09-13 14:36:25 +02:00
|
|
|
e2e: 'e2e/',
|
|
|
|
production: 'production/',
|
|
|
|
development: 'development/',
|
2023-02-27 11:54:24 +01:00
|
|
|
examples: 'examples/',
|
2021-08-24 14:52:45 +02:00
|
|
|
}
|
|
|
|
|
2022-12-16 09:58:04 +01:00
|
|
|
const mockTrace = () => ({
|
|
|
|
traceAsyncFn: (fn) => fn(mockTrace()),
|
|
|
|
traceChild: () => mockTrace(),
|
|
|
|
})
|
|
|
|
|
2020-11-10 18:25:50 +01:00
|
|
|
// which types we have configured to run separate
|
2021-08-24 14:52:45 +02:00
|
|
|
const configuredTestTypes = Object.values(testFilters)
|
2020-11-10 18:25:50 +01:00
|
|
|
|
2021-09-13 14:36:25 +02:00
|
|
|
const cleanUpAndExit = async (code) => {
|
|
|
|
if (process.env.NEXT_TEST_STARTER) {
|
|
|
|
await fs.remove(process.env.NEXT_TEST_STARTER)
|
|
|
|
}
|
2023-04-13 08:23:59 +02:00
|
|
|
if (process.env.NEXT_TEST_TEMP_REPO) {
|
|
|
|
await fs.remove(process.env.NEXT_TEST_TEMP_REPO)
|
|
|
|
}
|
2022-01-14 16:43:30 +01:00
|
|
|
console.log(`exiting with code ${code}`)
|
2022-08-03 22:48:17 +02:00
|
|
|
|
|
|
|
setTimeout(() => {
|
|
|
|
process.exit(code)
|
|
|
|
}, 1)
|
2021-09-13 14:36:25 +02:00
|
|
|
}
|
|
|
|
|
2021-09-28 17:15:04 +02:00
|
|
|
async function getTestTimings() {
|
2022-12-31 09:12:42 +01:00
|
|
|
let timingsRes
|
|
|
|
|
|
|
|
const doFetch = () =>
|
|
|
|
fetch(TIMINGS_API, {
|
|
|
|
headers: {
|
|
|
|
...TIMINGS_API_HEADERS,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
timingsRes = await doFetch()
|
|
|
|
|
|
|
|
if (timingsRes.status === 403) {
|
|
|
|
const delay = 15
|
|
|
|
console.log(`Got 403 response waiting ${delay} seconds before retry`)
|
|
|
|
await new Promise((resolve) => setTimeout(resolve, delay * 1000))
|
|
|
|
timingsRes = await doFetch()
|
|
|
|
}
|
2021-09-28 17:15:04 +02:00
|
|
|
|
|
|
|
if (!timingsRes.ok) {
|
|
|
|
throw new Error(`request status: ${timingsRes.status}`)
|
|
|
|
}
|
|
|
|
const timingsData = await timingsRes.json()
|
|
|
|
return JSON.parse(timingsData.files['test-timings.json'].content)
|
|
|
|
}
|
|
|
|
|
2021-03-16 22:08:35 +01:00
|
|
|
async function main() {
|
2021-08-24 14:52:45 +02:00
|
|
|
let numRetries = DEFAULT_NUM_RETRIES
|
2019-09-10 19:11:55 +02:00
|
|
|
let concurrencyIdx = process.argv.indexOf('-c')
|
2021-08-24 14:52:45 +02:00
|
|
|
let concurrency =
|
2021-09-13 14:36:25 +02:00
|
|
|
(concurrencyIdx > -1 && parseInt(process.argv[concurrencyIdx + 1], 10)) ||
|
|
|
|
DEFAULT_CONCURRENCY
|
2019-09-10 19:11:55 +02:00
|
|
|
|
2021-09-13 14:36:25 +02:00
|
|
|
const hideOutput = !process.argv.includes('--debug')
|
|
|
|
const outputTimings = process.argv.includes('--timings')
|
|
|
|
const writeTimings = process.argv.includes('--write-timings')
|
2019-09-10 19:11:55 +02:00
|
|
|
const groupIdx = process.argv.indexOf('-g')
|
|
|
|
const groupArg = groupIdx !== -1 && process.argv[groupIdx + 1]
|
|
|
|
|
2020-11-10 18:25:50 +01:00
|
|
|
const testTypeIdx = process.argv.indexOf('--type')
|
2021-09-13 14:36:25 +02:00
|
|
|
const testType = testTypeIdx > -1 ? process.argv[testTypeIdx + 1] : undefined
|
2020-11-10 18:25:50 +01:00
|
|
|
let filterTestsBy
|
|
|
|
|
|
|
|
switch (testType) {
|
2021-08-24 14:52:45 +02:00
|
|
|
case 'unit': {
|
|
|
|
numRetries = 0
|
|
|
|
filterTestsBy = testFilters.unit
|
2020-11-10 18:25:50 +01:00
|
|
|
break
|
2021-08-24 14:52:45 +02:00
|
|
|
}
|
2021-09-13 14:36:25 +02:00
|
|
|
case 'development': {
|
|
|
|
filterTestsBy = testFilters.development
|
|
|
|
break
|
|
|
|
}
|
|
|
|
case 'production': {
|
|
|
|
filterTestsBy = testFilters.production
|
|
|
|
break
|
|
|
|
}
|
|
|
|
case 'e2e': {
|
|
|
|
filterTestsBy = testFilters.e2e
|
|
|
|
break
|
|
|
|
}
|
2023-02-27 11:54:24 +01:00
|
|
|
case 'examples': {
|
|
|
|
filterTestsBy = testFilters.examples
|
|
|
|
break
|
|
|
|
}
|
2020-11-10 18:25:50 +01:00
|
|
|
case 'all':
|
|
|
|
filterTestsBy = 'none'
|
|
|
|
break
|
|
|
|
default:
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
2019-09-10 19:11:55 +02:00
|
|
|
console.log('Running tests with concurrency:', concurrency)
|
2021-09-13 14:36:25 +02:00
|
|
|
|
2023-05-09 19:46:54 +02:00
|
|
|
let tests = process.argv
|
|
|
|
.filter((arg) => arg.match(/\.test\.(js|ts|tsx)/))
|
|
|
|
.concat(externalTestsFilterLists)
|
2020-01-23 18:37:01 +01:00
|
|
|
let prevTimings
|
2019-09-10 19:11:55 +02:00
|
|
|
|
|
|
|
if (tests.length === 0) {
|
2020-11-10 18:25:50 +01:00
|
|
|
tests = (
|
2021-08-24 14:52:45 +02:00
|
|
|
await glob('**/*.test.{js,ts,tsx}', {
|
2020-11-10 18:25:50 +01:00
|
|
|
nodir: true,
|
|
|
|
cwd: path.join(__dirname, 'test'),
|
|
|
|
})
|
|
|
|
).filter((test) => {
|
|
|
|
if (filterTestsBy) {
|
2021-08-24 14:52:45 +02:00
|
|
|
// only include the specified type
|
|
|
|
return filterTestsBy === 'none' ? true : test.startsWith(filterTestsBy)
|
2020-11-10 18:25:50 +01:00
|
|
|
} else {
|
2021-08-24 14:52:45 +02:00
|
|
|
// include all except the separately configured types
|
|
|
|
return !configuredTestTypes.some((type) => test.startsWith(type))
|
2020-11-10 18:25:50 +01:00
|
|
|
}
|
2019-09-10 19:11:55 +02:00
|
|
|
})
|
2020-01-23 18:37:01 +01:00
|
|
|
|
2020-01-27 21:07:31 +01:00
|
|
|
if (outputTimings && groupArg) {
|
2020-01-23 18:37:01 +01:00
|
|
|
console.log('Fetching previous timings data')
|
2020-01-27 21:07:31 +01:00
|
|
|
try {
|
2020-11-09 06:56:39 +01:00
|
|
|
const timingsFile = path.join(__dirname, 'test-timings.json')
|
|
|
|
try {
|
|
|
|
prevTimings = JSON.parse(await fs.readFile(timingsFile, 'utf8'))
|
|
|
|
console.log('Loaded test timings from disk successfully')
|
|
|
|
} catch (_) {}
|
2020-01-23 18:37:01 +01:00
|
|
|
|
2020-11-09 06:56:39 +01:00
|
|
|
if (!prevTimings) {
|
2021-09-28 17:15:04 +02:00
|
|
|
prevTimings = await getTestTimings()
|
2020-11-09 06:56:39 +01:00
|
|
|
console.log('Fetched previous timings data successfully')
|
|
|
|
|
|
|
|
if (writeTimings) {
|
|
|
|
await fs.writeFile(timingsFile, JSON.stringify(prevTimings))
|
|
|
|
console.log('Wrote previous timings data to', timingsFile)
|
2021-09-13 14:36:25 +02:00
|
|
|
await cleanUpAndExit(0)
|
2020-11-09 06:56:39 +01:00
|
|
|
}
|
2020-01-23 18:37:01 +01:00
|
|
|
}
|
2020-01-27 21:07:31 +01:00
|
|
|
} catch (err) {
|
|
|
|
console.log(`Failed to fetch timings data`, err)
|
2021-09-13 14:36:25 +02:00
|
|
|
await cleanUpAndExit(1)
|
2020-01-23 18:37:01 +01:00
|
|
|
}
|
|
|
|
}
|
2019-09-10 19:11:55 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
let testNames = [
|
|
|
|
...new Set(
|
2020-05-18 21:24:37 +02:00
|
|
|
tests.map((f) => {
|
2019-11-25 23:50:46 +01:00
|
|
|
let name = `${f.replace(/\\/g, '/').replace(/\/test$/, '')}`
|
2019-09-10 19:11:55 +02:00
|
|
|
if (!name.startsWith('test/')) name = `test/${name}`
|
|
|
|
return name
|
|
|
|
})
|
2019-11-11 04:24:53 +01:00
|
|
|
),
|
2019-09-10 19:11:55 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
if (groupArg) {
|
|
|
|
const groupParts = groupArg.split('/')
|
|
|
|
const groupPos = parseInt(groupParts[0], 10)
|
|
|
|
const groupTotal = parseInt(groupParts[1], 10)
|
|
|
|
|
2020-01-23 18:37:01 +01:00
|
|
|
if (prevTimings) {
|
|
|
|
const groups = [[]]
|
|
|
|
const groupTimes = [0]
|
|
|
|
|
|
|
|
for (const testName of testNames) {
|
|
|
|
let smallestGroup = groupTimes[0]
|
|
|
|
let smallestGroupIdx = 0
|
|
|
|
|
2020-11-10 18:25:50 +01:00
|
|
|
// get the smallest group time to add current one to
|
2020-01-23 18:37:01 +01:00
|
|
|
for (let i = 1; i < groupTotal; i++) {
|
|
|
|
if (!groups[i]) {
|
|
|
|
groups[i] = []
|
|
|
|
groupTimes[i] = 0
|
|
|
|
}
|
|
|
|
|
|
|
|
const time = groupTimes[i]
|
|
|
|
if (time < smallestGroup) {
|
|
|
|
smallestGroup = time
|
|
|
|
smallestGroupIdx = i
|
|
|
|
}
|
|
|
|
}
|
|
|
|
groups[smallestGroupIdx].push(testName)
|
|
|
|
groupTimes[smallestGroupIdx] += prevTimings[testName] || 1
|
|
|
|
}
|
|
|
|
|
|
|
|
const curGroupIdx = groupPos - 1
|
|
|
|
testNames = groups[curGroupIdx]
|
|
|
|
|
|
|
|
console.log(
|
|
|
|
'Current group previous accumulated times:',
|
|
|
|
Math.round(groupTimes[curGroupIdx]) + 's'
|
|
|
|
)
|
|
|
|
} else {
|
2021-10-11 21:23:33 +02:00
|
|
|
const numPerGroup = Math.ceil(testNames.length / groupTotal)
|
|
|
|
let offset = (groupPos - 1) * numPerGroup
|
|
|
|
testNames = testNames.slice(offset, offset + numPerGroup)
|
2020-01-23 18:37:01 +01:00
|
|
|
}
|
|
|
|
}
|
2021-09-13 14:36:25 +02:00
|
|
|
|
|
|
|
if (testNames.length === 0) {
|
|
|
|
console.log('No tests found for', testType, 'exiting..')
|
|
|
|
return cleanUpAndExit(0)
|
|
|
|
}
|
|
|
|
|
2020-05-18 21:24:37 +02:00
|
|
|
console.log('Running tests:', '\n', ...testNames.map((name) => `${name}\n`))
|
2019-11-25 23:50:46 +01:00
|
|
|
|
2021-09-13 14:36:25 +02:00
|
|
|
const hasIsolatedTests = testNames.some((test) => {
|
|
|
|
return configuredTestTypes.some(
|
|
|
|
(type) => type !== testFilters.unit && test.startsWith(`test/${type}`)
|
|
|
|
)
|
|
|
|
})
|
|
|
|
|
2022-05-24 00:37:21 +02:00
|
|
|
if (
|
2022-12-31 09:12:42 +01:00
|
|
|
process.platform !== 'win32' &&
|
2022-05-24 00:37:21 +02:00
|
|
|
process.env.NEXT_TEST_MODE !== 'deploy' &&
|
|
|
|
((testType && testType !== 'unit') || hasIsolatedTests)
|
|
|
|
) {
|
2021-09-13 14:36:25 +02:00
|
|
|
// for isolated next tests: e2e, dev, prod we create
|
|
|
|
// a starter Next.js install to re-use to speed up tests
|
|
|
|
// to avoid having to run yarn each time
|
|
|
|
console.log('Creating Next.js install for isolated tests')
|
2022-04-01 00:35:00 +02:00
|
|
|
const reactVersion = process.env.NEXT_TEST_REACT_VERSION || 'latest'
|
2023-04-13 08:23:59 +02:00
|
|
|
const { installDir, pkgPaths, tmpRepoDir } = await createNextInstall({
|
2022-12-16 09:58:04 +01:00
|
|
|
parentSpan: mockTrace(),
|
|
|
|
dependencies: {
|
|
|
|
react: reactVersion,
|
|
|
|
'react-dom': reactVersion,
|
|
|
|
},
|
2023-04-13 08:23:59 +02:00
|
|
|
keepRepoDir: true,
|
2021-09-13 14:36:25 +02:00
|
|
|
})
|
2023-04-13 08:23:59 +02:00
|
|
|
|
|
|
|
const serializedPkgPaths = []
|
|
|
|
|
|
|
|
for (const key of pkgPaths.keys()) {
|
|
|
|
serializedPkgPaths.push([key, pkgPaths.get(key)])
|
|
|
|
}
|
|
|
|
process.env.NEXT_TEST_PKG_PATHS = JSON.stringify(serializedPkgPaths)
|
|
|
|
process.env.NEXT_TEST_TEMP_REPO = tmpRepoDir
|
|
|
|
process.env.NEXT_TEST_STARTER = installDir
|
2021-09-13 14:36:25 +02:00
|
|
|
}
|
|
|
|
|
2019-09-10 19:11:55 +02:00
|
|
|
const sema = new Sema(concurrency, { capacity: testNames.length })
|
2022-05-29 06:35:16 +02:00
|
|
|
const children = new Set()
|
2019-09-10 19:11:55 +02:00
|
|
|
const jestPath = path.join(
|
2022-05-29 06:35:16 +02:00
|
|
|
__dirname,
|
|
|
|
'node_modules',
|
|
|
|
'.bin',
|
|
|
|
`jest${process.platform === 'win32' ? '.CMD' : ''}`
|
2019-09-10 19:11:55 +02:00
|
|
|
)
|
|
|
|
|
2022-09-29 23:45:10 +02:00
|
|
|
const runTest = (test = '', isFinalRun, isRetry) =>
|
2019-09-10 19:11:55 +02:00
|
|
|
new Promise((resolve, reject) => {
|
2019-11-25 23:50:46 +01:00
|
|
|
const start = new Date().getTime()
|
2021-07-29 17:35:13 +02:00
|
|
|
let outputChunks = []
|
2022-09-29 23:45:10 +02:00
|
|
|
|
|
|
|
const shouldRecordTestWithReplay = process.env.RECORD_REPLAY && isRetry
|
|
|
|
|
2019-09-10 19:11:55 +02:00
|
|
|
const child = spawn(
|
2022-05-29 06:35:16 +02:00
|
|
|
jestPath,
|
2020-03-04 09:54:49 +01:00
|
|
|
[
|
2022-09-29 23:45:10 +02:00
|
|
|
...(shouldRecordTestWithReplay
|
|
|
|
? [`--config=jest.replay.config.js`]
|
|
|
|
: []),
|
2020-03-04 09:54:49 +01:00
|
|
|
'--runInBand',
|
|
|
|
'--forceExit',
|
|
|
|
'--verbose',
|
2023-02-02 16:46:17 +01:00
|
|
|
'--silent',
|
2020-03-04 09:54:49 +01:00
|
|
|
...(isTestJob
|
|
|
|
? ['--json', `--outputFile=${test}${RESULTS_EXT}`]
|
|
|
|
: []),
|
|
|
|
test,
|
|
|
|
],
|
2019-09-10 19:11:55 +02:00
|
|
|
{
|
2021-07-29 17:35:13 +02:00
|
|
|
stdio: ['ignore', 'pipe', 'pipe'],
|
2020-01-23 18:37:01 +01:00
|
|
|
env: {
|
|
|
|
...process.env,
|
2022-09-29 23:45:10 +02:00
|
|
|
RECORD_REPLAY: shouldRecordTestWithReplay,
|
2021-09-13 14:36:25 +02:00
|
|
|
// run tests in headless mode by default
|
|
|
|
HEADLESS: 'true',
|
2021-11-05 02:09:37 +01:00
|
|
|
TRACE_PLAYWRIGHT: 'true',
|
2023-01-16 08:20:16 +01:00
|
|
|
NEXT_TELEMETRY_DISABLED: '1',
|
2021-10-27 21:06:36 +02:00
|
|
|
...(isFinalRun
|
|
|
|
? {
|
|
|
|
// Events can be finicky in CI. This switches to a more
|
|
|
|
// reliable polling method.
|
|
|
|
// CHOKIDAR_USEPOLLING: 'true',
|
|
|
|
// CHOKIDAR_INTERVAL: 500,
|
|
|
|
// WATCHPACK_POLLING: 500,
|
|
|
|
}
|
|
|
|
: {}),
|
2020-01-23 18:37:01 +01:00
|
|
|
},
|
2019-09-10 19:11:55 +02:00
|
|
|
}
|
|
|
|
)
|
2022-07-27 18:41:42 +02:00
|
|
|
const handleOutput = (type) => (chunk) => {
|
2022-08-16 16:14:37 +02:00
|
|
|
if (hideOutput && !isFinalRun) {
|
2022-07-27 18:41:42 +02:00
|
|
|
outputChunks.push({ type, chunk })
|
2021-09-13 14:36:25 +02:00
|
|
|
} else {
|
|
|
|
process.stderr.write(chunk)
|
|
|
|
}
|
|
|
|
}
|
2022-07-27 18:41:42 +02:00
|
|
|
child.stdout.on('data', handleOutput('stdout'))
|
|
|
|
child.stderr.on('data', handleOutput('stderr'))
|
2021-09-13 14:36:25 +02:00
|
|
|
|
2019-09-17 19:01:46 +02:00
|
|
|
children.add(child)
|
2021-09-13 14:36:25 +02:00
|
|
|
|
2021-12-21 19:52:07 +01:00
|
|
|
child.on('exit', async (code, signal) => {
|
2019-09-10 19:11:55 +02:00
|
|
|
children.delete(child)
|
2021-12-21 19:52:07 +01:00
|
|
|
if (code !== 0 || signal !== null) {
|
2022-08-16 16:14:37 +02:00
|
|
|
if (hideOutput) {
|
2021-09-07 17:02:29 +02:00
|
|
|
// limit out to last 64kb so that we don't
|
|
|
|
// run out of log room in CI
|
2022-07-27 18:41:42 +02:00
|
|
|
outputChunks.forEach(({ type, chunk }) => {
|
|
|
|
if (type === 'stdout') {
|
|
|
|
process.stdout.write(chunk)
|
|
|
|
} else {
|
|
|
|
process.stderr.write(chunk)
|
|
|
|
}
|
|
|
|
})
|
2021-08-24 14:52:45 +02:00
|
|
|
}
|
2021-12-21 19:52:07 +01:00
|
|
|
return reject(
|
|
|
|
new Error(
|
|
|
|
code
|
|
|
|
? `failed with code: ${code}`
|
|
|
|
: `failed with signal: ${signal}`
|
|
|
|
)
|
|
|
|
)
|
2021-07-29 17:35:13 +02:00
|
|
|
}
|
2021-10-25 08:21:57 +02:00
|
|
|
await fs
|
|
|
|
.remove(
|
|
|
|
path.join(
|
|
|
|
__dirname,
|
|
|
|
'test/traces',
|
|
|
|
path
|
|
|
|
.relative(path.join(__dirname, 'test'), test)
|
|
|
|
.replace(/\//g, '-')
|
|
|
|
)
|
|
|
|
)
|
|
|
|
.catch(() => {})
|
2019-11-25 23:50:46 +01:00
|
|
|
resolve(new Date().getTime() - start)
|
2019-09-17 19:01:46 +02:00
|
|
|
})
|
2019-09-10 19:11:55 +02:00
|
|
|
})
|
|
|
|
|
2021-10-11 21:23:33 +02:00
|
|
|
const directorySemas = new Map()
|
|
|
|
|
2023-02-15 06:56:08 +01:00
|
|
|
const originalRetries = numRetries
|
2019-09-10 19:11:55 +02:00
|
|
|
await Promise.all(
|
2020-05-18 21:24:37 +02:00
|
|
|
testNames.map(async (test) => {
|
2021-10-11 21:23:33 +02:00
|
|
|
const dirName = path.dirname(test)
|
|
|
|
let dirSema = directorySemas.get(dirName)
|
|
|
|
if (dirSema === undefined)
|
|
|
|
directorySemas.set(dirName, (dirSema = new Sema(1)))
|
|
|
|
await dirSema.acquire()
|
2019-09-10 19:11:55 +02:00
|
|
|
await sema.acquire()
|
|
|
|
let passed = false
|
|
|
|
|
2023-02-15 06:56:08 +01:00
|
|
|
const shouldSkipRetries = skipRetryTestManifest.find((t) =>
|
|
|
|
t.includes(test)
|
|
|
|
)
|
|
|
|
const numRetries = shouldSkipRetries ? 0 : originalRetries
|
|
|
|
if (shouldSkipRetries) {
|
|
|
|
console.log(`Skipping retry for ${test} due to skipRetryTestManifest`)
|
|
|
|
}
|
|
|
|
|
2021-08-24 14:52:45 +02:00
|
|
|
for (let i = 0; i < numRetries + 1; i++) {
|
2019-09-10 19:11:55 +02:00
|
|
|
try {
|
2021-08-24 14:52:45 +02:00
|
|
|
console.log(`Starting ${test} retry ${i}/${numRetries}`)
|
2023-02-15 06:56:08 +01:00
|
|
|
const time = await runTest(
|
|
|
|
test,
|
|
|
|
shouldSkipRetries || i === numRetries,
|
|
|
|
shouldSkipRetries || i > 0
|
|
|
|
)
|
2019-11-25 23:50:46 +01:00
|
|
|
timings.push({
|
|
|
|
file: test,
|
|
|
|
time,
|
|
|
|
})
|
2019-09-10 19:11:55 +02:00
|
|
|
passed = true
|
2021-07-29 17:35:13 +02:00
|
|
|
console.log(
|
2021-08-24 14:52:45 +02:00
|
|
|
`Finished ${test} on retry ${i}/${numRetries} in ${time / 1000}s`
|
2021-07-29 17:35:13 +02:00
|
|
|
)
|
2019-09-10 19:11:55 +02:00
|
|
|
break
|
|
|
|
} catch (err) {
|
2021-08-24 14:52:45 +02:00
|
|
|
if (i < numRetries) {
|
2019-09-10 19:11:55 +02:00
|
|
|
try {
|
2022-11-16 22:16:35 +01:00
|
|
|
let testDir = path.dirname(path.join(__dirname, test))
|
|
|
|
|
|
|
|
// if test is nested in a test folder traverse up a dir to ensure
|
|
|
|
// we clean up relevant test files
|
|
|
|
if (testDir.endsWith('/test') || testDir.endsWith('\\test')) {
|
|
|
|
testDir = path.join(testDir, '../')
|
|
|
|
}
|
2020-01-23 18:37:01 +01:00
|
|
|
console.log('Cleaning test files at', testDir)
|
|
|
|
await exec(`git clean -fdx "${testDir}"`)
|
|
|
|
await exec(`git checkout "${testDir}"`)
|
2019-09-10 19:11:55 +02:00
|
|
|
} catch (err) {}
|
2021-12-21 19:52:07 +01:00
|
|
|
} else {
|
|
|
|
console.error(`${test} failed due to ${err}`)
|
2019-09-10 19:11:55 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-02-17 00:44:38 +01:00
|
|
|
|
2019-09-10 19:11:55 +02:00
|
|
|
if (!passed) {
|
2021-08-24 14:52:45 +02:00
|
|
|
console.error(`${test} failed to pass within ${numRetries} retries`)
|
2020-05-18 21:24:37 +02:00
|
|
|
children.forEach((child) => child.kill())
|
2020-03-04 09:54:49 +01:00
|
|
|
|
2023-02-08 02:51:54 +01:00
|
|
|
if (!shouldContinueTestsOnError) {
|
|
|
|
cleanUpAndExit(1)
|
|
|
|
} else {
|
|
|
|
console.log(
|
|
|
|
`CONTINUE_ON_ERROR enabled, continuing tests after ${test} failed`
|
|
|
|
)
|
|
|
|
}
|
2019-09-10 19:11:55 +02:00
|
|
|
}
|
2023-02-17 00:44:38 +01:00
|
|
|
|
|
|
|
// Emit test output if test failed or if we're continuing tests on error
|
|
|
|
if ((!passed || shouldContinueTestsOnError) && isTestJob) {
|
|
|
|
try {
|
|
|
|
const testsOutput = await fs.readFile(`${test}${RESULTS_EXT}`, 'utf8')
|
|
|
|
console.log(
|
|
|
|
`--test output start--`,
|
|
|
|
testsOutput,
|
|
|
|
`--test output end--`
|
|
|
|
)
|
|
|
|
} catch (err) {
|
|
|
|
console.log(`Failed to load test output`, err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-10 19:11:55 +02:00
|
|
|
sema.release()
|
2021-10-11 21:23:33 +02:00
|
|
|
dirSema.release()
|
2019-09-10 19:11:55 +02:00
|
|
|
})
|
|
|
|
)
|
2019-11-25 23:50:46 +01:00
|
|
|
|
|
|
|
if (outputTimings) {
|
2020-01-27 21:07:31 +01:00
|
|
|
const curTimings = {}
|
|
|
|
// let junitData = `<testsuites name="jest tests">`
|
2019-11-25 23:50:46 +01:00
|
|
|
/*
|
|
|
|
<testsuite name="/__tests__/bar.test.js" tests="1" errors="0" failures="0" skipped="0" timestamp="2017-10-10T21:56:49" time="0.323">
|
|
|
|
<testcase classname="bar-should be bar" name="bar-should be bar" time="0.004">
|
|
|
|
</testcase>
|
|
|
|
</testsuite>
|
|
|
|
*/
|
|
|
|
|
|
|
|
for (const timing of timings) {
|
|
|
|
const timeInSeconds = timing.time / 1000
|
2020-01-27 21:07:31 +01:00
|
|
|
curTimings[timing.file] = timeInSeconds
|
|
|
|
|
|
|
|
// junitData += `
|
|
|
|
// <testsuite name="${timing.file}" file="${
|
|
|
|
// timing.file
|
|
|
|
// }" tests="1" errors="0" failures="0" skipped="0" timestamp="${new Date().toJSON()}" time="${timeInSeconds}">
|
|
|
|
// <testcase classname="tests suite should pass" name="${
|
|
|
|
// timing.file
|
|
|
|
// }" time="${timeInSeconds}"></testcase>
|
|
|
|
// </testsuite>
|
|
|
|
// `
|
2019-11-25 23:50:46 +01:00
|
|
|
}
|
2020-01-27 21:07:31 +01:00
|
|
|
// junitData += `</testsuites>`
|
|
|
|
// console.log('output timing data to junit.xml')
|
|
|
|
|
2021-09-28 17:15:04 +02:00
|
|
|
if (prevTimings && process.env.TEST_TIMINGS_TOKEN) {
|
2020-01-27 21:07:31 +01:00
|
|
|
try {
|
2021-09-28 17:15:04 +02:00
|
|
|
const newTimings = {
|
|
|
|
...(await getTestTimings()),
|
|
|
|
...curTimings,
|
|
|
|
}
|
|
|
|
|
|
|
|
for (const test of Object.keys(newTimings)) {
|
|
|
|
if (!(await fs.pathExists(path.join(__dirname, test)))) {
|
|
|
|
console.log('removing stale timing', test)
|
|
|
|
delete newTimings[test]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-27 21:07:31 +01:00
|
|
|
const timingsRes = await fetch(TIMINGS_API, {
|
2021-09-28 17:15:04 +02:00
|
|
|
method: 'PATCH',
|
2020-01-27 21:07:31 +01:00
|
|
|
headers: {
|
2021-09-28 17:15:04 +02:00
|
|
|
...TIMINGS_API_HEADERS,
|
2020-01-27 21:07:31 +01:00
|
|
|
},
|
2020-04-21 22:11:04 +02:00
|
|
|
body: JSON.stringify({
|
2021-09-28 17:15:04 +02:00
|
|
|
files: {
|
|
|
|
'test-timings.json': {
|
|
|
|
content: JSON.stringify(newTimings),
|
|
|
|
},
|
|
|
|
},
|
2020-04-21 22:11:04 +02:00
|
|
|
}),
|
2020-01-27 21:07:31 +01:00
|
|
|
})
|
2019-11-25 23:50:46 +01:00
|
|
|
|
2020-01-27 21:07:31 +01:00
|
|
|
if (!timingsRes.ok) {
|
|
|
|
throw new Error(`request status: ${timingsRes.status}`)
|
|
|
|
}
|
2023-03-23 19:53:05 +01:00
|
|
|
const result = await timingsRes.json()
|
2020-01-27 21:07:31 +01:00
|
|
|
console.log(
|
2023-03-23 19:53:05 +01:00
|
|
|
`Sent updated timings successfully. API URL: "${result?.url}" HTML URL: "${result?.html_url}"`
|
2020-01-27 21:07:31 +01:00
|
|
|
)
|
|
|
|
} catch (err) {
|
|
|
|
console.log('Failed to update timings data', err)
|
|
|
|
}
|
|
|
|
}
|
2019-11-25 23:50:46 +01:00
|
|
|
}
|
2021-03-16 22:08:35 +01:00
|
|
|
}
|
|
|
|
|
2023-04-13 08:23:59 +02:00
|
|
|
main()
|
|
|
|
.then(() => cleanUpAndExit(0))
|
|
|
|
.catch((err) => {
|
|
|
|
console.error(err)
|
|
|
|
cleanUpAndExit(1)
|
|
|
|
})
|