rsnext/run-tests.js

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

823 lines
24 KiB
JavaScript
Raw Normal View History

//@ts-check
const os = require('os')
const path = require('path')
const _glob = require('glob')
const { existsSync } = require('fs')
const fsp = require('fs/promises')
const nodeFetch = require('node-fetch')
const vercelFetch = require('@vercel/fetch')
// @ts-expect-error
const fetch = vercelFetch(nodeFetch)
const { promisify } = require('util')
const { Sema } = require('async-sema')
const { spawn, exec: execOrig } = require('child_process')
const { createNextInstall } = require('./test/lib/create-next-install')
const glob = promisify(_glob)
const exec = promisify(execOrig)
const core = require('@actions/core')
const { getTestFilter } = require('./test/get-test-filter')
let argv = require('yargs/yargs')(process.argv.slice(2))
.string('type')
.string('test-pattern')
.boolean('timings')
.boolean('write-timings')
.number('retries')
.boolean('debug')
.string('g')
.alias('g', 'group')
.number('c')
.boolean('related')
Add dry and local options for run-tests.js (#67058) Add support for `--dry` to the Next.js repo test runner, makes sure that you can see all commands that are going to run before actually running them. Adds support for `--local` to strip some of the environment variables that are entirely CI related. <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: ## For Contributors ### Improving Documentation - Run `pnpm prettier-fix` to fix formatting issues before opening the PR. - Read the Docs Contribution Guide to ensure your contribution follows the docs guidelines: https://nextjs.org/docs/community/contribution-guide ### Adding or Updating Examples - The "examples guidelines" are followed from our contributing doc https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md - Make sure the linting passes by running `pnpm build && pnpm lint`. See https://github.com/vercel/next.js/blob/canary/contributing/repository/linting.md ### Fixing a bug - Related issues linked using `fixes #number` - Tests added. See: https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ### Adding a feature - Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. (A discussion must be opened, see https://github.com/vercel/next.js/discussions/new?category=ideas) - Related issues/discussions are linked using `fixes #number` - e2e tests added (https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) - Documentation added - Telemetry added. In case of a feature if it's used or not. - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ## For Maintainers - Minimal description (aim for explaining to someone not on the team to understand the PR) - When linking to a Slack thread, you might want to share details of the conclusion - Link both the Linear (Fixes NEXT-xxx) and the GitHub issues - Add review comments if necessary to explain to the reviewer the logic behind a change ### What? ### Why? ### How? Closes NEXT- Fixes # -->
2024-06-20 17:11:25 +02:00
.boolean('dry')
.boolean('local')
.alias('r', 'related')
.alias('c', 'concurrency').argv
function escapeRegexp(str) {
return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&')
}
/**
* @typedef {{ file: string, excludedCases: string[] }} TestFile
*/
const GROUP = process.env.CI ? '##[group]' : ''
const ENDGROUP = process.env.CI ? '##[endgroup]' : ''
const externalTestsFilter = getTestFilter()
const timings = []
const DEFAULT_NUM_RETRIES = os.platform() === 'win32' ? 2 : 1
const DEFAULT_CONCURRENCY = 2
const RESULTS_EXT = `.results.json`
const isTestJob = !!process.env.NEXT_TEST_JOB
// Check env to see if test should continue even if some of test fails
test(integration): allow to conitnue when test suite fails (#45682) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: --> Partially resolves WEB-544. This PR adds a new detection to the external env variable `NEXT_TEST_CONTINUE_ON_ERROR`. If those variable is set, even if a suite of test fails test runner will keep executing remaining tests. Turbopack (vercel/turbo) runs Next.js integration tests to track its feature parity to the existing Next.js features and currently it is expected to fail on certain test: however, we still want to execute all of the tests to track how many tests are passing / fails. Once this change lands turbopack need further verification to see if all of tests are actually running. ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] [e2e](https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Documentation / Examples - [ ] Make sure the linting passes by running `pnpm build && pnpm lint` - [ ] The "examples guidelines" are followed from [our contributing doc](https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md)
2023-02-08 02:51:54 +01:00
const shouldContinueTestsOnError = !!process.env.NEXT_TEST_CONTINUE_ON_ERROR
Fix typo in run-tests.js (#57821) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: ## For Contributors ### Improving Documentation - Run `pnpm prettier-fix` to fix formatting issues before opening the PR. - Read the Docs Contribution Guide to ensure your contribution follows the docs guidelines: https://nextjs.org/docs/community/contribution-guide ### Adding or Updating Examples - The "examples guidelines" are followed from our contributing doc https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md - Make sure the linting passes by running `pnpm build && pnpm lint`. See https://github.com/vercel/next.js/blob/canary/contributing/repository/linting.md ### Fixing a bug - Related issues linked using `fixes #number` - Tests added. See: https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ### Adding a feature - Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. (A discussion must be opened, see https://github.com/vercel/next.js/discussions/new?category=ideas) - Related issues/discussions are linked using `fixes #number` - e2e tests added (https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) - Documentation added - Telemetry added. In case of a feature if it's used or not. - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ## For Maintainers - Minimal description (aim for explaining to someone not on the team to understand the PR) - When linking to a Slack thread, you might want to share details of the conclusion - Link both the Linear (Fixes NEXT-xxx) and the GitHub issues - Add review comments if necessary to explain to the reviewer the logic behind a change ### What? ### Why? ### How? Closes NEXT- Fixes # --> Co-authored-by: JJ Kasper <jj@jjsweb.site>
2023-10-31 22:26:17 +01:00
// Check env to load a list of test paths to skip retry. This is to be used in conjunction with NEXT_TEST_CONTINUE_ON_ERROR,
// When try to run all of the tests regardless of pass / fail and want to skip retrying `known` failed tests.
// manifest should be a json file with an array of test paths.
const skipRetryTestManifest = process.env.NEXT_TEST_SKIP_RETRY_MANIFEST
? require(process.env.NEXT_TEST_SKIP_RETRY_MANIFEST)
: []
const TIMINGS_API = `https://api.github.com/gists/4500dd89ae2f5d70d9aaceb191f528d1`
const TIMINGS_API_HEADERS = {
Accept: 'application/vnd.github.v3+json',
...(process.env.TEST_TIMINGS_TOKEN
? {
Authorization: `Bearer ${process.env.TEST_TIMINGS_TOKEN}`,
}
: {}),
}
const testFilters = {
development: new RegExp(
'^(test/(development|e2e)|packages/.*/src/.*)/.*\\.test\\.(js|jsx|ts|tsx)$'
),
production: new RegExp(
'^(test/(production|e2e))/.*\\.test\\.(js|jsx|ts|tsx)$'
),
unit: new RegExp(
'^test/unit|packages/.*/src/.*/.*\\.test\\.(js|jsx|ts|tsx)$'
),
examples: 'examples/',
integration: 'test/integration/',
e2e: 'test/e2e/',
}
2022-12-16 09:58:04 +01:00
const mockTrace = () => ({
traceAsyncFn: (fn) => fn(mockTrace()),
Speed up createNext test suite isolation (#64909) ## What? Before: 25.71s ![CleanShot 2024-04-23 at 12 19 42@2x](https://github.com/vercel/next.js/assets/6324199/3a0ebb81-ac55-4b0c-8bfc-9a61ce138e6f) After: 11.05s (-57%) ![CleanShot 2024-04-23 at 12 16 35@2x](https://github.com/vercel/next.js/assets/6324199/d7b6cd4c-d1e4-4dc2-a423-20b539186d25) ## How? Currently the system for isolation looks like this: - Copy `packages` folder to an isolated directory - Run `pnpm pack` for all folders in `packages` - Collect the pack files, add them as `dependencies` in package.json of the isolated application - Run `pnpm install` Because the `next-swc` (Turbopack + SWC, yes we still need to rename the package) binary file is quite large in development (900MB+) it means we have to copy, then zip (pnpm pack), then unzip (pnpm install) that binary, which takes about 3+ seconds in each step. The change in this PR is to skip the copy/zip/unzip completely by providing the folder path for the binary directly to Next.js, as it's a binary we don't need the special isolation for this, it's already standalone so running it directly allows us to skip 14,6 seconds of work that is required for each isolated test in development. This will likely have little effect on CI times as we already do some tricks like only running the packing at the start of the CI process. Only thing that could be better and is probably worth doing is adopting this change for the time it saves for unzipping, that's almost 4 seconds per test still. <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: ## For Contributors ### Improving Documentation - Run `pnpm prettier-fix` to fix formatting issues before opening the PR. - Read the Docs Contribution Guide to ensure your contribution follows the docs guidelines: https://nextjs.org/docs/community/contribution-guide ### Adding or Updating Examples - The "examples guidelines" are followed from our contributing doc https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md - Make sure the linting passes by running `pnpm build && pnpm lint`. See https://github.com/vercel/next.js/blob/canary/contributing/repository/linting.md ### Fixing a bug - Related issues linked using `fixes #number` - Tests added. See: https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ### Adding a feature - Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. (A discussion must be opened, see https://github.com/vercel/next.js/discussions/new?category=ideas) - Related issues/discussions are linked using `fixes #number` - e2e tests added (https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) - Documentation added - Telemetry added. In case of a feature if it's used or not. - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ## For Maintainers - Minimal description (aim for explaining to someone not on the team to understand the PR) - When linking to a Slack thread, you might want to share details of the conclusion - Link both the Linear (Fixes NEXT-xxx) and the GitHub issues - Add review comments if necessary to explain to the reviewer the logic behind a change ### What? ### Why? ### How? Closes NEXT- Fixes # --> Closes NEXT-3200 --------- Co-authored-by: JJ Kasper <jj@jjsweb.site>
2024-04-23 21:12:59 +02:00
traceFn: (fn) => fn(mockTrace()),
2022-12-16 09:58:04 +01:00
traceChild: () => mockTrace(),
})
// which types we have configured to run separate
const configuredTestTypes = Object.values(testFilters)
const errorsPerTests = new Map()
async function maybeLogSummary() {
if (process.env.CI && errorsPerTests.size > 0) {
const outputTemplate = `
${Array.from(errorsPerTests.entries())
.map(([test, output]) => {
return `
<details>
<summary>${test}</summary>
\`\`\`
${output}
\`\`\`
</details>
`
})
.join('\n')}`
await core.summary
.addHeading('Tests failures')
.addTable([
[
{
data: 'Test suite',
header: true,
},
],
...Array.from(errorsPerTests.entries()).map(([test]) => {
return [
`<a href="https://github.com/vercel/next.js/blob/canary/${test}">${test}</a>`,
]
}),
])
.addRaw(outputTemplate)
.write()
}
}
let exiting = false
const cleanUpAndExit = async (code) => {
if (exiting) {
return
}
exiting = true
console.log(`exiting with code ${code}`)
if (process.env.NEXT_TEST_STARTER) {
await fsp.rm(process.env.NEXT_TEST_STARTER, {
recursive: true,
force: true,
})
}
if (process.env.NEXT_TEST_TEMP_REPO) {
await fsp.rm(process.env.NEXT_TEST_TEMP_REPO, {
recursive: true,
force: true,
})
}
if (process.env.CI) {
await maybeLogSummary()
}
process.exit(code)
}
const isMatchingPattern = (pattern, file) => {
if (pattern instanceof RegExp) {
return pattern.test(file)
} else {
return file.startsWith(pattern)
}
}
async function getTestTimings() {
let timingsRes
const doFetch = () =>
fetch(TIMINGS_API, {
headers: {
...TIMINGS_API_HEADERS,
},
})
timingsRes = await doFetch()
if (timingsRes.status === 403) {
const delay = 15
console.log(`Got 403 response waiting ${delay} seconds before retry`)
await new Promise((resolve) => setTimeout(resolve, delay * 1000))
timingsRes = await doFetch()
}
if (!timingsRes.ok) {
throw new Error(`request status: ${timingsRes.status}`)
}
const timingsData = await timingsRes.json()
return JSON.parse(timingsData.files['test-timings.json'].content)
}
async function main() {
// Ensure we have the arguments awaited from yargs.
argv = await argv
const options = {
concurrency: argv.concurrency || DEFAULT_CONCURRENCY,
debug: argv.debug ?? false,
timings: argv.timings ?? false,
writeTimings: argv.writeTimings ?? false,
group: argv.group ?? false,
testPattern: argv.testPattern ?? false,
type: argv.type ?? false,
related: argv.related ?? false,
retries: argv.retries ?? DEFAULT_NUM_RETRIES,
Add dry and local options for run-tests.js (#67058) Add support for `--dry` to the Next.js repo test runner, makes sure that you can see all commands that are going to run before actually running them. Adds support for `--local` to strip some of the environment variables that are entirely CI related. <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: ## For Contributors ### Improving Documentation - Run `pnpm prettier-fix` to fix formatting issues before opening the PR. - Read the Docs Contribution Guide to ensure your contribution follows the docs guidelines: https://nextjs.org/docs/community/contribution-guide ### Adding or Updating Examples - The "examples guidelines" are followed from our contributing doc https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md - Make sure the linting passes by running `pnpm build && pnpm lint`. See https://github.com/vercel/next.js/blob/canary/contributing/repository/linting.md ### Fixing a bug - Related issues linked using `fixes #number` - Tests added. See: https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ### Adding a feature - Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. (A discussion must be opened, see https://github.com/vercel/next.js/discussions/new?category=ideas) - Related issues/discussions are linked using `fixes #number` - e2e tests added (https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) - Documentation added - Telemetry added. In case of a feature if it's used or not. - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ## For Maintainers - Minimal description (aim for explaining to someone not on the team to understand the PR) - When linking to a Slack thread, you might want to share details of the conclusion - Link both the Linear (Fixes NEXT-xxx) and the GitHub issues - Add review comments if necessary to explain to the reviewer the logic behind a change ### What? ### Why? ### How? Closes NEXT- Fixes # -->
2024-06-20 17:11:25 +02:00
dry: argv.dry ?? false,
local: argv.local ?? false,
}
let numRetries = options.retries
Add dry and local options for run-tests.js (#67058) Add support for `--dry` to the Next.js repo test runner, makes sure that you can see all commands that are going to run before actually running them. Adds support for `--local` to strip some of the environment variables that are entirely CI related. <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: ## For Contributors ### Improving Documentation - Run `pnpm prettier-fix` to fix formatting issues before opening the PR. - Read the Docs Contribution Guide to ensure your contribution follows the docs guidelines: https://nextjs.org/docs/community/contribution-guide ### Adding or Updating Examples - The "examples guidelines" are followed from our contributing doc https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md - Make sure the linting passes by running `pnpm build && pnpm lint`. See https://github.com/vercel/next.js/blob/canary/contributing/repository/linting.md ### Fixing a bug - Related issues linked using `fixes #number` - Tests added. See: https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ### Adding a feature - Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. (A discussion must be opened, see https://github.com/vercel/next.js/discussions/new?category=ideas) - Related issues/discussions are linked using `fixes #number` - e2e tests added (https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) - Documentation added - Telemetry added. In case of a feature if it's used or not. - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ## For Maintainers - Minimal description (aim for explaining to someone not on the team to understand the PR) - When linking to a Slack thread, you might want to share details of the conclusion - Link both the Linear (Fixes NEXT-xxx) and the GitHub issues - Add review comments if necessary to explain to the reviewer the logic behind a change ### What? ### Why? ### How? Closes NEXT- Fixes # -->
2024-06-20 17:11:25 +02:00
const hideOutput = !options.debug && !options.dry
let filterTestsBy
switch (options.type) {
case 'unit': {
numRetries = 0
filterTestsBy = testFilters.unit
break
}
case 'all': {
filterTestsBy = 'none'
break
}
default: {
filterTestsBy = testFilters[options.type]
break
}
}
fix e2e deployment test action (#66721) [Test Run](https://github.com/vercel/next.js/actions/runs/9471783416/job/26095882422) Changes: - Add a setup step that clears the project so it doesn't happen in each runner - Run when a release is published rather than on cron - Notify via Slack when a failure occurs - Leverage build_reusable for the test runner to match the build_and_test workflow - Fixes to `next-deploy` script: not properly logging/catching errors - Adds manifest to ignore known issues - Split into 6 runners with 2 concurrency (12 deploys at a time) - Adds some logging so we know what's happening - Disable Playwright trace mode (it kept failing to find a trace file and cluttering the output. Don't think we need it here anyway) <details> - <summary>Removed noisy output</summary> ![CleanShot 2024-06-10 at 14 08 05@2x](https://github.com/vercel/next.js/assets/1939140/f227e71c-95b4-4859-90de-a23c88c55ea8) </details> <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: ## For Contributors ### Improving Documentation - Run `pnpm prettier-fix` to fix formatting issues before opening the PR. - Read the Docs Contribution Guide to ensure your contribution follows the docs guidelines: https://nextjs.org/docs/community/contribution-guide ### Adding or Updating Examples - The "examples guidelines" are followed from our contributing doc https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md - Make sure the linting passes by running `pnpm build && pnpm lint`. See https://github.com/vercel/next.js/blob/canary/contributing/repository/linting.md ### Fixing a bug - Related issues linked using `fixes #number` - Tests added. See: https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ### Adding a feature - Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. (A discussion must be opened, see https://github.com/vercel/next.js/discussions/new?category=ideas) - Related issues/discussions are linked using `fixes #number` - e2e tests added (https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) - Documentation added - Telemetry added. In case of a feature if it's used or not. - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ## For Maintainers - Minimal description (aim for explaining to someone not on the team to understand the PR) - When linking to a Slack thread, you might want to share details of the conclusion - Link both the Linear (Fixes NEXT-xxx) and the GitHub issues - Add review comments if necessary to explain to the reviewer the logic behind a change ### What? ### Why? ### How? Closes NEXT- Fixes # -->
2024-06-11 22:59:43 +02:00
console.log(
'Running tests with concurrency:',
options.concurrency,
'in test mode',
process.env.NEXT_TEST_MODE
)
/** @type TestFile[] */
let tests = argv._.filter((arg) =>
arg.toString().match(/\.test\.(js|ts|tsx)/)
).map((file) => ({ file: file.toString(), excludedCases: [] }))
let prevTimings
if (tests.length === 0) {
/** @type {RegExp | undefined} */
let testPatternRegex
if (options.testPattern && typeof options.testPattern === 'string') {
testPatternRegex = new RegExp(options.testPattern)
}
if (options.related) {
const { getRelatedTests } = await import('./scripts/run-related-test.mjs')
const tests = await getRelatedTests()
if (tests.length)
testPatternRegex = new RegExp(tests.map(escapeRegexp).join('|'))
if (testPatternRegex) {
console.log('Running related tests:', testPatternRegex.toString())
} else {
console.log('No matching related tests, exiting.')
process.exit(0)
}
}
tests = (
await glob('**/*.test.{js,ts,tsx}', {
nodir: true,
cwd: __dirname,
ignore: '**/node_modules/**',
})
)
.filter((file) => {
if (testPatternRegex) {
return testPatternRegex.test(file)
}
if (filterTestsBy) {
// only include the specified type
if (filterTestsBy === 'none') {
return true
}
return isMatchingPattern(filterTestsBy, file)
}
// include all except the separately configured types
return !configuredTestTypes.some((type) =>
isMatchingPattern(type, file)
)
})
.map((file) => ({
file,
excludedCases: [],
}))
}
if (options.timings && options.group) {
console.log('Fetching previous timings data')
try {
const timingsFile = path.join(process.cwd(), 'test-timings.json')
try {
prevTimings = JSON.parse(await fsp.readFile(timingsFile, 'utf8'))
console.log('Loaded test timings from disk successfully')
} catch (_) {
console.error('failed to load from disk', _)
}
if (!prevTimings) {
prevTimings = await getTestTimings()
console.log('Fetched previous timings data successfully')
if (options.writeTimings) {
await fsp.writeFile(timingsFile, JSON.stringify(prevTimings))
console.log('Wrote previous timings data to', timingsFile)
await cleanUpAndExit(0)
}
}
} catch (err) {
console.log(`Failed to fetch timings data`, err)
await cleanUpAndExit(1)
}
}
test(turbopack): run next.js integration tests with denylist (#50172) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: ## For Contributors ### Improving Documentation or adding/fixing Examples - The "examples guidelines" are followed from our contributing doc https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md - Make sure the linting passes by running `pnpm build && pnpm lint`. See https://github.com/vercel/next.js/blob/canary/contributing/repository/linting.md ### Fixing a bug - Related issues linked using `fixes #number` - Tests added. See: https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ### Adding a feature - Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. (A discussion must be opened, see https://github.com/vercel/next.js/discussions/new?category=ideas) - Related issues/discussions are linked using `fixes #number` - e2e tests added (https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Documentation added - Telemetry added. In case of a feature if it's used or not. - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ## For Maintainers - Minimal description (aim for explaining to someone not on the team to understand the PR) - When linking to a Slack thread, you might want to share details of the conclusion - Link both the Linear (Fixes NEXT-xxx) and the GitHub issues - Add review comments if necessary to explain to the reviewer the logic behind a change ### Why? ### How? Closes NEXT- Fixes # --> ### What? WEB-1086. This PR flips the way running next.js integration test with Turbopack, instead of using allowlist to run selectively enabled test now uses denylists. The immediate effect is now any new test being added (as a new file) will be executed with Turbopack by default, and should be manually excluded if it fails. Still, the number of tests we run remains same.
2023-05-23 10:55:33 +02:00
// If there are external manifest contains list of tests, apply it to the test lists.
if (externalTestsFilter) {
tests = externalTestsFilter(tests)
test(turbopack): run next.js integration tests with denylist (#50172) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: ## For Contributors ### Improving Documentation or adding/fixing Examples - The "examples guidelines" are followed from our contributing doc https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md - Make sure the linting passes by running `pnpm build && pnpm lint`. See https://github.com/vercel/next.js/blob/canary/contributing/repository/linting.md ### Fixing a bug - Related issues linked using `fixes #number` - Tests added. See: https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ### Adding a feature - Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. (A discussion must be opened, see https://github.com/vercel/next.js/discussions/new?category=ideas) - Related issues/discussions are linked using `fixes #number` - e2e tests added (https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Documentation added - Telemetry added. In case of a feature if it's used or not. - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ## For Maintainers - Minimal description (aim for explaining to someone not on the team to understand the PR) - When linking to a Slack thread, you might want to share details of the conclusion - Link both the Linear (Fixes NEXT-xxx) and the GitHub issues - Add review comments if necessary to explain to the reviewer the logic behind a change ### Why? ### How? Closes NEXT- Fixes # --> ### What? WEB-1086. This PR flips the way running next.js integration test with Turbopack, instead of using allowlist to run selectively enabled test now uses denylists. The immediate effect is now any new test being added (as a new file) will be executed with Turbopack by default, and should be manually excluded if it fails. Still, the number of tests we run remains same.
2023-05-23 10:55:33 +02:00
}
let testSet = new Set()
tests = tests
.map((test) => {
test.file = test.file.replace(/\\/g, '/').replace(/\/test$/, '')
return test
})
.filter((test) => {
if (testSet.has(test.file)) return false
testSet.add(test.file)
return true
})
if (options.group && typeof options.group === 'string') {
const groupParts = options.group.split('/')
const groupPos = parseInt(groupParts[0], 10)
const groupTotal = parseInt(groupParts[1], 10)
if (prevTimings) {
/** @type {TestFile[][]} */
const groups = [[]]
const groupTimes = [0]
for (const test of tests) {
let smallestGroup = groupTimes[0]
let smallestGroupIdx = 0
// get the smallest group time to add current one to
for (let i = 1; i < groupTotal; i++) {
if (!groups[i]) {
groups[i] = []
groupTimes[i] = 0
}
const time = groupTimes[i]
if (time < smallestGroup) {
smallestGroup = time
smallestGroupIdx = i
}
}
groups[smallestGroupIdx].push(test)
groupTimes[smallestGroupIdx] += prevTimings[test.file] || 1
}
const curGroupIdx = groupPos - 1
tests = groups[curGroupIdx]
console.log(
'Current group previous accumulated times:',
Math.round(groupTimes[curGroupIdx]) + 's'
)
} else {
const numPerGroup = Math.ceil(tests.length / groupTotal)
let offset = (groupPos - 1) * numPerGroup
tests = tests.slice(offset, offset + numPerGroup)
console.log('Splitting without timings')
}
}
Rename turbopack-tests-manifest to turbopack-dev-tests-manifest (#63409) ## What? - Renames the Turbopack tests manifest to reflect that it only holds development tests. - Creates initial plumbing for Turbopack build tests manifest (currently empty) - Added running tests in the Turbopack builds test manifest on PRs - Implements uploading the Turbopack builds manifest to areweturboyet What this doesn't implement: - Updating the Turbopack builds manifest Open questions: - Since the manifest is empty there are no test results, I had to add handling for that in `run-tests.js`: https://github.com/vercel/next.js/pull/63409/files#diff-269567847b110d8ecaaade3ab592df207bba02c7b446db23d72f18ff0e61d335R359 but not sure if that exit case was added for a specific special reason. <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: ## For Contributors ### Improving Documentation - Run `pnpm prettier-fix` to fix formatting issues before opening the PR. - Read the Docs Contribution Guide to ensure your contribution follows the docs guidelines: https://nextjs.org/docs/community/contribution-guide ### Adding or Updating Examples - The "examples guidelines" are followed from our contributing doc https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md - Make sure the linting passes by running `pnpm build && pnpm lint`. See https://github.com/vercel/next.js/blob/canary/contributing/repository/linting.md ### Fixing a bug - Related issues linked using `fixes #number` - Tests added. See: https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ### Adding a feature - Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. (A discussion must be opened, see https://github.com/vercel/next.js/discussions/new?category=ideas) - Related issues/discussions are linked using `fixes #number` - e2e tests added (https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) - Documentation added - Telemetry added. In case of a feature if it's used or not. - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ## For Maintainers - Minimal description (aim for explaining to someone not on the team to understand the PR) - When linking to a Slack thread, you might want to share details of the conclusion - Link both the Linear (Fixes NEXT-xxx) and the GitHub issues - Add review comments if necessary to explain to the reviewer the logic behind a change ### What? ### Why? ### How? Closes NEXT- Fixes # --> Closes NEXT-2837
2024-03-19 10:26:14 +01:00
if (!tests) {
tests = []
}
if (tests.length === 0) {
console.log('No tests found for', options.type, 'exiting..')
}
console.log(`${GROUP}Running tests:
${tests.map((t) => t.file).join('\n')}
${ENDGROUP}`)
console.log(`total: ${tests.length}`)
const hasIsolatedTests = tests.some((test) => {
return configuredTestTypes.some(
(type) =>
type !== testFilters.unit && test.file.startsWith(`test/${type}`)
)
})
if (
Add dry and local options for run-tests.js (#67058) Add support for `--dry` to the Next.js repo test runner, makes sure that you can see all commands that are going to run before actually running them. Adds support for `--local` to strip some of the environment variables that are entirely CI related. <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: ## For Contributors ### Improving Documentation - Run `pnpm prettier-fix` to fix formatting issues before opening the PR. - Read the Docs Contribution Guide to ensure your contribution follows the docs guidelines: https://nextjs.org/docs/community/contribution-guide ### Adding or Updating Examples - The "examples guidelines" are followed from our contributing doc https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md - Make sure the linting passes by running `pnpm build && pnpm lint`. See https://github.com/vercel/next.js/blob/canary/contributing/repository/linting.md ### Fixing a bug - Related issues linked using `fixes #number` - Tests added. See: https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ### Adding a feature - Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. (A discussion must be opened, see https://github.com/vercel/next.js/discussions/new?category=ideas) - Related issues/discussions are linked using `fixes #number` - e2e tests added (https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) - Documentation added - Telemetry added. In case of a feature if it's used or not. - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ## For Maintainers - Minimal description (aim for explaining to someone not on the team to understand the PR) - When linking to a Slack thread, you might want to share details of the conclusion - Link both the Linear (Fixes NEXT-xxx) and the GitHub issues - Add review comments if necessary to explain to the reviewer the logic behind a change ### What? ### Why? ### How? Closes NEXT- Fixes # -->
2024-06-20 17:11:25 +02:00
!options.dry &&
process.platform !== 'win32' &&
process.env.NEXT_TEST_MODE !== 'deploy' &&
((options.type && options.type !== 'unit') || hasIsolatedTests)
) {
// for isolated next tests: e2e, dev, prod we create
// a starter Next.js install to re-use to speed up tests
// to avoid having to run yarn each time
console.log(`${GROUP}Creating Next.js install for isolated tests`)
const reactVersion = process.env.NEXT_TEST_REACT_VERSION || '19.0.0-rc.0'
const { installDir, pkgPaths, tmpRepoDir } = await createNextInstall({
2022-12-16 09:58:04 +01:00
parentSpan: mockTrace(),
dependencies: {
react: reactVersion,
'react-dom': reactVersion,
},
keepRepoDir: true,
})
const serializedPkgPaths = []
for (const key of pkgPaths.keys()) {
serializedPkgPaths.push([key, pkgPaths.get(key)])
}
process.env.NEXT_TEST_PKG_PATHS = JSON.stringify(serializedPkgPaths)
process.env.NEXT_TEST_TEMP_REPO = tmpRepoDir
process.env.NEXT_TEST_STARTER = installDir
console.log(`${ENDGROUP}`)
}
const sema = new Sema(options.concurrency, { capacity: tests.length })
const outputSema = new Sema(1, { capacity: tests.length })
const children = new Set()
const jestPath = path.join(
__dirname,
'node_modules',
'.bin',
`jest${process.platform === 'win32' ? '.CMD' : ''}`
)
let firstError = true
let killed = false
const runTest = (/** @type {TestFile} */ test, isFinalRun, isRetry) =>
new Promise((resolve, reject) => {
const start = new Date().getTime()
let outputChunks = []
const shouldRecordTestWithReplay = process.env.RECORD_REPLAY && isRetry
const args = [
...(shouldRecordTestWithReplay
? [`--config=jest.replay.config.js`]
: []),
Add --ci to jest tests in CI (#60432) ## What? Ensures snapshot tests fail instead of being written in CI. <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: ## For Contributors ### Improving Documentation - Run `pnpm prettier-fix` to fix formatting issues before opening the PR. - Read the Docs Contribution Guide to ensure your contribution follows the docs guidelines: https://nextjs.org/docs/community/contribution-guide ### Adding or Updating Examples - The "examples guidelines" are followed from our contributing doc https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md - Make sure the linting passes by running `pnpm build && pnpm lint`. See https://github.com/vercel/next.js/blob/canary/contributing/repository/linting.md ### Fixing a bug - Related issues linked using `fixes #number` - Tests added. See: https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ### Adding a feature - Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. (A discussion must be opened, see https://github.com/vercel/next.js/discussions/new?category=ideas) - Related issues/discussions are linked using `fixes #number` - e2e tests added (https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) - Documentation added - Telemetry added. In case of a feature if it's used or not. - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ## For Maintainers - Minimal description (aim for explaining to someone not on the team to understand the PR) - When linking to a Slack thread, you might want to share details of the conclusion - Link both the Linear (Fixes NEXT-xxx) and the GitHub issues - Add review comments if necessary to explain to the reviewer the logic behind a change ### What? ### Why? ### How? Closes NEXT- Fixes # --> Closes NEXT-2035
2024-01-11 10:23:20 +01:00
...(process.env.CI ? ['--ci'] : []),
'--runInBand',
'--forceExit',
'--verbose',
'--silent',
...(isTestJob
? ['--json', `--outputFile=${test.file}${RESULTS_EXT}`]
: []),
test.file,
...(test.excludedCases.length === 0
? []
: [
'--testNamePattern',
`^(?!(?:${test.excludedCases.map(escapeRegexp).join('|')})$).`,
]),
]
const env = {
// run tests in headless mode by default
HEADLESS: 'true',
NEXT_TELEMETRY_DISABLED: '1',
// unset CI env so CI behavior is only explicitly
// tested when enabled
CI: '',
Add dry and local options for run-tests.js (#67058) Add support for `--dry` to the Next.js repo test runner, makes sure that you can see all commands that are going to run before actually running them. Adds support for `--local` to strip some of the environment variables that are entirely CI related. <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: ## For Contributors ### Improving Documentation - Run `pnpm prettier-fix` to fix formatting issues before opening the PR. - Read the Docs Contribution Guide to ensure your contribution follows the docs guidelines: https://nextjs.org/docs/community/contribution-guide ### Adding or Updating Examples - The "examples guidelines" are followed from our contributing doc https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md - Make sure the linting passes by running `pnpm build && pnpm lint`. See https://github.com/vercel/next.js/blob/canary/contributing/repository/linting.md ### Fixing a bug - Related issues linked using `fixes #number` - Tests added. See: https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ### Adding a feature - Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. (A discussion must be opened, see https://github.com/vercel/next.js/discussions/new?category=ideas) - Related issues/discussions are linked using `fixes #number` - e2e tests added (https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) - Documentation added - Telemetry added. In case of a feature if it's used or not. - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ## For Maintainers - Minimal description (aim for explaining to someone not on the team to understand the PR) - When linking to a Slack thread, you might want to share details of the conclusion - Link both the Linear (Fixes NEXT-xxx) and the GitHub issues - Add review comments if necessary to explain to the reviewer the logic behind a change ### What? ### Why? ### How? Closes NEXT- Fixes # -->
2024-06-20 17:11:25 +02:00
...(options.local
? {}
: {
IS_RETRY: isRetry ? 'true' : undefined,
RECORD_REPLAY: shouldRecordTestWithReplay,
TRACE_PLAYWRIGHT:
process.env.NEXT_TEST_MODE === 'deploy' ? undefined : 'true',
CIRCLECI: '',
GITHUB_ACTIONS: '',
CONTINUOUS_INTEGRATION: '',
RUN_ID: '',
BUILD_NUMBER: '',
// Format the output of junit report to include the test name
// For the debugging purpose to compare actual run list to the generated reports
// [NOTE]: This won't affect if junit reporter is not enabled
// @ts-expect-error .replaceAll() does exist. Follow-up why TS is not recognizing it
JEST_JUNIT_OUTPUT_NAME: test.file.replaceAll('/', '_'),
// Specify suite name for the test to avoid unexpected merging across different env / grouped tests
// This is not individual suites name (corresponding 'describe'), top level suite name which have redundant names by default
// [NOTE]: This won't affect if junit reporter is not enabled
JEST_SUITE_NAME: [
`${process.env.NEXT_TEST_MODE ?? 'default'}`,
options.group,
options.type,
test.file,
]
.filter(Boolean)
.join(':'),
}),
...(isFinalRun
? {
// Events can be finicky in CI. This switches to a more
// reliable polling method.
// CHOKIDAR_USEPOLLING: 'true',
// CHOKIDAR_INTERVAL: 500,
// WATCHPACK_POLLING: 500,
}
: {}),
}
const handleOutput = (type) => (chunk) => {
if (hideOutput) {
outputChunks.push({ type, chunk })
} else {
process.stdout.write(chunk)
}
}
const stdout = handleOutput('stdout')
stdout(
[
...Object.entries(env).map((e) => `${e[0]}=${e[1]}`),
jestPath,
...args.map((a) => `'${a}'`),
].join(' ') + '\n'
)
Add dry and local options for run-tests.js (#67058) Add support for `--dry` to the Next.js repo test runner, makes sure that you can see all commands that are going to run before actually running them. Adds support for `--local` to strip some of the environment variables that are entirely CI related. <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: ## For Contributors ### Improving Documentation - Run `pnpm prettier-fix` to fix formatting issues before opening the PR. - Read the Docs Contribution Guide to ensure your contribution follows the docs guidelines: https://nextjs.org/docs/community/contribution-guide ### Adding or Updating Examples - The "examples guidelines" are followed from our contributing doc https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md - Make sure the linting passes by running `pnpm build && pnpm lint`. See https://github.com/vercel/next.js/blob/canary/contributing/repository/linting.md ### Fixing a bug - Related issues linked using `fixes #number` - Tests added. See: https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ### Adding a feature - Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. (A discussion must be opened, see https://github.com/vercel/next.js/discussions/new?category=ideas) - Related issues/discussions are linked using `fixes #number` - e2e tests added (https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) - Documentation added - Telemetry added. In case of a feature if it's used or not. - Errors have a helpful link attached, see https://github.com/vercel/next.js/blob/canary/contributing.md ## For Maintainers - Minimal description (aim for explaining to someone not on the team to understand the PR) - When linking to a Slack thread, you might want to share details of the conclusion - Link both the Linear (Fixes NEXT-xxx) and the GitHub issues - Add review comments if necessary to explain to the reviewer the logic behind a change ### What? ### Why? ### How? Closes NEXT- Fixes # -->
2024-06-20 17:11:25 +02:00
// Don't execute tests when in dry run mode
if (options.dry) {
return resolve(new Date().getTime() - start)
}
const child = spawn(jestPath, args, {
stdio: ['ignore', 'pipe', 'pipe'],
env: {
...process.env,
...env,
},
// See: https://nodejs.org/en/blog/vulnerability/april-2024-security-releases-2
shell: process.platform === 'win32',
})
child.stdout.on('data', stdout)
child.stderr.on('data', handleOutput('stderr'))
children.add(child)
child.on('exit', async (code, signal) => {
children.delete(child)
const isChildExitWithNonZero = code !== 0 || signal !== null
if (isChildExitWithNonZero) {
if (hideOutput) {
await outputSema.acquire()
const isExpanded =
firstError && !killed && !shouldContinueTestsOnError
if (isExpanded) {
firstError = false
process.stdout.write(`${test.file} output:\n`)
} else if (killed) {
process.stdout.write(`${GROUP}${test.file} output (killed)\n`)
} else {
process.stdout.write(`${GROUP}${test.file} output\n`)
}
let output = ''
// limit out to last 64kb so that we don't
// run out of log room in CI
for (const { chunk } of outputChunks) {
process.stdout.write(chunk)
output += chunk.toString()
}
if (process.env.CI && !killed) {
errorsPerTests.set(test.file, output)
}
if (isExpanded) {
process.stdout.write(`end of ${test.file} output\n`)
} else {
process.stdout.write(`end of ${test.file} output\n${ENDGROUP}\n`)
}
outputSema.release()
}
const err = new Error(
code ? `failed with code: ${code}` : `failed with signal: ${signal}`
)
// @ts-expect-error
err.output = outputChunks
.map(({ chunk }) => chunk.toString())
.join('')
return reject(err)
}
// If environment is CI and if this test execution is failed after retry, preserve test traces
// to upload into github actions artifacts for debugging purpose
const shouldPreserveTracesOutput =
(process.env.CI && isRetry && isChildExitWithNonZero) ||
process.env.PRESERVE_TRACES_OUTPUT
if (!shouldPreserveTracesOutput) {
await fsp
.rm(
path.join(
__dirname,
'test/traces',
path
.relative(path.join(__dirname, 'test'), test.file)
.replace(/\//g, '-')
),
{ recursive: true, force: true }
)
.catch(() => {})
}
resolve(new Date().getTime() - start)
})
})
const directorySemas = new Map()
test(integration): fix skip retry count logic (#45930) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: --> This is improved followup for https://github.com/vercel/next.js/pull/45914, I realized I applied retry count logic only for the teardown, not for the actual execution. PR changes whole retrycount if predicate matches, also changes minor ergonomics for the turbopack output with custom binary. ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] [e2e](https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Documentation / Examples - [ ] Make sure the linting passes by running `pnpm build && pnpm lint` - [ ] The "examples guidelines" are followed from [our contributing doc](https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md) ---------
2023-02-15 06:56:08 +01:00
const originalRetries = numRetries
await Promise.all(
tests.map(async (test) => {
const dirName = path.dirname(test.file)
let dirSema = directorySemas.get(dirName)
// we only restrict 1 test per directory for
// legacy integration tests
if (test.file.startsWith('test/integration') && dirSema === undefined) {
directorySemas.set(dirName, (dirSema = new Sema(1)))
}
if (dirSema) await dirSema.acquire()
await sema.acquire()
let passed = false
test(integration): fix skip retry count logic (#45930) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: --> This is improved followup for https://github.com/vercel/next.js/pull/45914, I realized I applied retry count logic only for the teardown, not for the actual execution. PR changes whole retrycount if predicate matches, also changes minor ergonomics for the turbopack output with custom binary. ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] [e2e](https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Documentation / Examples - [ ] Make sure the linting passes by running `pnpm build && pnpm lint` - [ ] The "examples guidelines" are followed from [our contributing doc](https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md) ---------
2023-02-15 06:56:08 +01:00
const shouldSkipRetries = skipRetryTestManifest.find((t) =>
t.includes(test.file)
test(integration): fix skip retry count logic (#45930) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: --> This is improved followup for https://github.com/vercel/next.js/pull/45914, I realized I applied retry count logic only for the teardown, not for the actual execution. PR changes whole retrycount if predicate matches, also changes minor ergonomics for the turbopack output with custom binary. ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] [e2e](https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Documentation / Examples - [ ] Make sure the linting passes by running `pnpm build && pnpm lint` - [ ] The "examples guidelines" are followed from [our contributing doc](https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md) ---------
2023-02-15 06:56:08 +01:00
)
const numRetries = shouldSkipRetries ? 0 : originalRetries
if (shouldSkipRetries) {
console.log(
`Skipping retry for ${test.file} due to skipRetryTestManifest`
)
test(integration): fix skip retry count logic (#45930) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: --> This is improved followup for https://github.com/vercel/next.js/pull/45914, I realized I applied retry count logic only for the teardown, not for the actual execution. PR changes whole retrycount if predicate matches, also changes minor ergonomics for the turbopack output with custom binary. ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] [e2e](https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Documentation / Examples - [ ] Make sure the linting passes by running `pnpm build && pnpm lint` - [ ] The "examples guidelines" are followed from [our contributing doc](https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md) ---------
2023-02-15 06:56:08 +01:00
}
for (let i = 0; i < numRetries + 1; i++) {
try {
console.log(`Starting ${test.file} retry ${i}/${numRetries}`)
test(integration): fix skip retry count logic (#45930) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: --> This is improved followup for https://github.com/vercel/next.js/pull/45914, I realized I applied retry count logic only for the teardown, not for the actual execution. PR changes whole retrycount if predicate matches, also changes minor ergonomics for the turbopack output with custom binary. ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] [e2e](https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Documentation / Examples - [ ] Make sure the linting passes by running `pnpm build && pnpm lint` - [ ] The "examples guidelines" are followed from [our contributing doc](https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md) ---------
2023-02-15 06:56:08 +01:00
const time = await runTest(
test,
shouldSkipRetries || i === numRetries,
shouldSkipRetries || i > 0
)
timings.push({
file: test.file,
time,
})
passed = true
console.log(
`Finished ${test.file} on retry ${i}/${numRetries} in ${
time / 1000
}s`
)
break
} catch (err) {
if (i < numRetries) {
try {
let testDir = path.dirname(path.join(__dirname, test.file))
// if test is nested in a test folder traverse up a dir to ensure
// we clean up relevant test files
if (testDir.endsWith('/test') || testDir.endsWith('\\test')) {
testDir = path.join(testDir, '../')
}
console.log('Cleaning test files at', testDir)
await exec(`git clean -fdx "${testDir}"`)
await exec(`git checkout "${testDir}"`)
} catch (err) {}
} else {
console.error(`${test.file} failed due to ${err}`)
}
}
}
test(integration): emits successful test output for continue on error (#46008) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: --> - closes WEB-600 - partially resolves WEB-544 This PR applies minor ergonomics changes to the test runner. First, allows to emit successful test reports if continue_on_error is enabled: this allows to track total test stats with --turbo runs. Secondly allows to specify custom timeouts for the e2e - as written in comment otherwise it can exceed total 6 hours of job limit due to having lots of timeout-related failing tests. ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] [e2e](https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Documentation / Examples - [ ] Make sure the linting passes by running `pnpm build && pnpm lint` - [ ] The "examples guidelines" are followed from [our contributing doc](https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md)
2023-02-17 00:44:38 +01:00
if (!passed) {
console.error(
`${test.file} failed to pass within ${numRetries} retries`
)
test(integration): allow to conitnue when test suite fails (#45682) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: --> Partially resolves WEB-544. This PR adds a new detection to the external env variable `NEXT_TEST_CONTINUE_ON_ERROR`. If those variable is set, even if a suite of test fails test runner will keep executing remaining tests. Turbopack (vercel/turbo) runs Next.js integration tests to track its feature parity to the existing Next.js features and currently it is expected to fail on certain test: however, we still want to execute all of the tests to track how many tests are passing / fails. Once this change lands turbopack need further verification to see if all of tests are actually running. ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] [e2e](https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Documentation / Examples - [ ] Make sure the linting passes by running `pnpm build && pnpm lint` - [ ] The "examples guidelines" are followed from [our contributing doc](https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md)
2023-02-08 02:51:54 +01:00
if (!shouldContinueTestsOnError) {
killed = true
children.forEach((child) => child.kill())
test(integration): allow to conitnue when test suite fails (#45682) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: --> Partially resolves WEB-544. This PR adds a new detection to the external env variable `NEXT_TEST_CONTINUE_ON_ERROR`. If those variable is set, even if a suite of test fails test runner will keep executing remaining tests. Turbopack (vercel/turbo) runs Next.js integration tests to track its feature parity to the existing Next.js features and currently it is expected to fail on certain test: however, we still want to execute all of the tests to track how many tests are passing / fails. Once this change lands turbopack need further verification to see if all of tests are actually running. ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] [e2e](https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Documentation / Examples - [ ] Make sure the linting passes by running `pnpm build && pnpm lint` - [ ] The "examples guidelines" are followed from [our contributing doc](https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md)
2023-02-08 02:51:54 +01:00
cleanUpAndExit(1)
} else {
console.log(
`CONTINUE_ON_ERROR enabled, continuing tests after ${test.file} failed`
test(integration): allow to conitnue when test suite fails (#45682) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: --> Partially resolves WEB-544. This PR adds a new detection to the external env variable `NEXT_TEST_CONTINUE_ON_ERROR`. If those variable is set, even if a suite of test fails test runner will keep executing remaining tests. Turbopack (vercel/turbo) runs Next.js integration tests to track its feature parity to the existing Next.js features and currently it is expected to fail on certain test: however, we still want to execute all of the tests to track how many tests are passing / fails. Once this change lands turbopack need further verification to see if all of tests are actually running. ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] [e2e](https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Documentation / Examples - [ ] Make sure the linting passes by running `pnpm build && pnpm lint` - [ ] The "examples guidelines" are followed from [our contributing doc](https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md)
2023-02-08 02:51:54 +01:00
)
}
}
test(integration): emits successful test output for continue on error (#46008) <!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: --> - closes WEB-600 - partially resolves WEB-544 This PR applies minor ergonomics changes to the test runner. First, allows to emit successful test reports if continue_on_error is enabled: this allows to track total test stats with --turbo runs. Secondly allows to specify custom timeouts for the e2e - as written in comment otherwise it can exceed total 6 hours of job limit due to having lots of timeout-related failing tests. ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] [e2e](https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Documentation / Examples - [ ] Make sure the linting passes by running `pnpm build && pnpm lint` - [ ] The "examples guidelines" are followed from [our contributing doc](https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md)
2023-02-17 00:44:38 +01:00
// Emit test output if test failed or if we're continuing tests on error
// This is parsed by the commenter webhook to notify about failing tests
if ((!passed || shouldContinueTestsOnError) && isTestJob) {
try {
const testsOutput = await fsp.readFile(
`${test.file}${RESULTS_EXT}`,
'utf8'
)
const obj = JSON.parse(testsOutput)
obj.processEnv = {
NEXT_TEST_MODE: process.env.NEXT_TEST_MODE,
HEADLESS: process.env.HEADLESS,
}
await outputSema.acquire()
if (GROUP) console.log(`${GROUP}Result as JSON for tooling`)
console.log(
`--test output start--`,
JSON.stringify(obj),
`--test output end--`
)
if (ENDGROUP) console.log(ENDGROUP)
outputSema.release()
} catch (err) {
console.log(`Failed to load test output`, err)
}
}
sema.release()
if (dirSema) dirSema.release()
})
)
if (options.timings) {
const curTimings = {}
// let junitData = `<testsuites name="jest tests">`
/*
<testsuite name="/__tests__/bar.test.js" tests="1" errors="0" failures="0" skipped="0" timestamp="2017-10-10T21:56:49" time="0.323">
<testcase classname="bar-should be bar" name="bar-should be bar" time="0.004">
</testcase>
</testsuite>
*/
for (const timing of timings) {
const timeInSeconds = timing.time / 1000
curTimings[timing.file] = timeInSeconds
// junitData += `
// <testsuite name="${timing.file}" file="${
// timing.file
// }" tests="1" errors="0" failures="0" skipped="0" timestamp="${new Date().toJSON()}" time="${timeInSeconds}">
// <testcase classname="tests suite should pass" name="${
// timing.file
// }" time="${timeInSeconds}"></testcase>
// </testsuite>
// `
}
// junitData += `</testsuites>`
// console.log('output timing data to junit.xml')
if (prevTimings && process.env.TEST_TIMINGS_TOKEN) {
try {
const newTimings = {
...(await getTestTimings()),
...curTimings,
}
for (const test of Object.keys(newTimings)) {
if (!existsSync(path.join(__dirname, test))) {
console.log('removing stale timing', test)
delete newTimings[test]
}
}
const timingsRes = await fetch(TIMINGS_API, {
method: 'PATCH',
headers: {
...TIMINGS_API_HEADERS,
},
body: JSON.stringify({
files: {
'test-timings.json': {
content: JSON.stringify(newTimings),
},
},
}),
})
if (!timingsRes.ok) {
throw new Error(`request status: ${timingsRes.status}`)
}
const result = await timingsRes.json()
console.log(
`Sent updated timings successfully. API URL: "${result?.url}" HTML URL: "${result?.html_url}"`
)
} catch (err) {
console.log('Failed to update timings data', err)
}
}
}
}
main()
.then(() => cleanUpAndExit(0))
.catch((err) => {
console.error(err)
cleanUpAndExit(1)
})