test(integration): allow to conitnue when test suite fails (#45682)
<!-- Thanks for opening a PR! Your contribution is much appreciated. To make sure your PR is handled as smoothly as possible we request that you follow the checklist sections below. Choose the right checklist for the change(s) that you're making: --> Partially resolves WEB-544. This PR adds a new detection to the external env variable `NEXT_TEST_CONTINUE_ON_ERROR`. If those variable is set, even if a suite of test fails test runner will keep executing remaining tests. Turbopack (vercel/turbo) runs Next.js integration tests to track its feature parity to the existing Next.js features and currently it is expected to fail on certain test: however, we still want to execute all of the tests to track how many tests are passing / fails. Once this change lands turbopack need further verification to see if all of tests are actually running. ## Bug - [ ] Related issues linked using `fixes #number` - [ ] Integration tests added - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Feature - [ ] Implements an existing feature request or RFC. Make sure the feature request has been accepted for implementation before opening a PR. - [ ] Related issues linked using `fixes #number` - [ ] [e2e](https://github.com/vercel/next.js/blob/canary/contributing/core/testing.md#writing-tests-for-nextjs) tests added - [ ] Documentation added - [ ] Telemetry added. In case of a feature if it's used or not. - [ ] Errors have a helpful link attached, see [`contributing.md`](https://github.com/vercel/next.js/blob/canary/contributing.md) ## Documentation / Examples - [ ] Make sure the linting passes by running `pnpm build && pnpm lint` - [ ] The "examples guidelines" are followed from [our contributing doc](https://github.com/vercel/next.js/blob/canary/contributing/examples/adding-examples.md)
This commit is contained in:
parent
434a8cb7d0
commit
3d05c13ba4
1 changed files with 9 additions and 1 deletions
10
run-tests.js
10
run-tests.js
|
@ -17,6 +17,7 @@ const DEFAULT_NUM_RETRIES = os.platform() === 'win32' ? 2 : 1
|
|||
const DEFAULT_CONCURRENCY = 2
|
||||
const RESULTS_EXT = `.results.json`
|
||||
const isTestJob = !!process.env.NEXT_TEST_JOB
|
||||
const shouldContinueTestsOnError = !!process.env.NEXT_TEST_CONTINUE_ON_ERROR
|
||||
const TIMINGS_API = `https://api.github.com/gists/4500dd89ae2f5d70d9aaceb191f528d1`
|
||||
const TIMINGS_API_HEADERS = {
|
||||
Accept: 'application/vnd.github.v3+json',
|
||||
|
@ -418,7 +419,14 @@ async function main() {
|
|||
console.log(`Failed to load test output`, err)
|
||||
}
|
||||
}
|
||||
cleanUpAndExit(1)
|
||||
|
||||
if (!shouldContinueTestsOnError) {
|
||||
cleanUpAndExit(1)
|
||||
} else {
|
||||
console.log(
|
||||
`CONTINUE_ON_ERROR enabled, continuing tests after ${test} failed`
|
||||
)
|
||||
}
|
||||
}
|
||||
sema.release()
|
||||
dirSema.release()
|
||||
|
|
Loading…
Reference in a new issue