From a532e32ecaae506ddd67d976cc54110aaa90c49c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bal=C3=A1zs=20Orb=C3=A1n?= Date: Fri, 19 Apr 2024 11:02:43 +0200 Subject: [PATCH] Reapply "chore(test): run related E2E deploy tests on PRs" (#64682) (#64712) --- .github/workflows/test_e2e_deploy.yml | 72 --------------- .github/workflows/test_e2e_deploy_related.yml | 81 +++++++++++++++++ .../workflows/test_e2e_deploy_scheduled.yml | 87 +++++++++++++++++++ packages/next/src/server/base-server.ts | 8 +- packages/next/src/server/lib/trace/tracer.ts | 19 +++- packages/next/src/server/next-server.ts | 7 +- run-tests.js | 36 ++++++-- scripts/run-related-test.mjs | 4 +- test/e2e/app-dir/ppr-full/ppr-full.test.ts | 10 ++- test/related-tests-manifest.json | 2 +- 10 files changed, 232 insertions(+), 94 deletions(-) delete mode 100644 .github/workflows/test_e2e_deploy.yml create mode 100644 .github/workflows/test_e2e_deploy_related.yml create mode 100644 .github/workflows/test_e2e_deploy_scheduled.yml diff --git a/.github/workflows/test_e2e_deploy.yml b/.github/workflows/test_e2e_deploy.yml deleted file mode 100644 index feb37aec4c..0000000000 --- a/.github/workflows/test_e2e_deploy.yml +++ /dev/null @@ -1,72 +0,0 @@ -name: test-e2e-deploy - -on: - schedule: - # run every day at midnight - - cron: '0 0 * * *' - # allow triggering manually as well - workflow_dispatch: - -jobs: - build: - if: github.repository_owner == 'vercel' - runs-on: ubuntu-latest - - env: - VERCEL_TEST_TOKEN: ${{ secrets.VERCEL_TEST_TOKEN }} - VERCEL_TEST_TEAM: vtest314-next-e2e-tests - DATADOG_API_KEY: ${{ secrets.DATA_DOG_API_KEY }} - NAPI_CLI_VERSION: 2.16.2 - TURBO_VERSION: 1.12.5 - NODE_LTS_VERSION: 20 - CARGO_PROFILE_RELEASE_LTO: 'true' - TURBO_TEAM: 'vercel' - TURBO_REMOTE_ONLY: 'true' - TEST_TIMINGS_TOKEN: ${{ secrets.TEST_TIMINGS_TOKEN }} - NEXT_TELEMETRY_DISABLED: 1 - - strategy: - fail-fast: false - matrix: - group: [1, 2] - - steps: - - name: Setup node - uses: actions/setup-node@v4 - with: - node-version: ${{ env.NODE_LTS_VERSION }} - check-latest: true - - run: corepack enable - - - uses: actions/checkout@v4 - with: - fetch-depth: 25 - - - run: pnpm install - - - run: pnpm run build - - - run: npm i -g vercel@latest - - - run: node scripts/run-e2e-test-project-reset.mjs - name: Reset test project - - - run: docker run --rm -v $(pwd):/work mcr.microsoft.com/playwright:v1.35.1-jammy /bin/bash -c "cd /work && NODE_VERSION=${{ env.NODE_LTS_VERSION }} ./scripts/setup-node.sh && corepack enable > /dev/null && NEXT_JUNIT_TEST_REPORT=true DATADOG_API_KEY=${DATADOG_API_KEY} DD_ENV=ci VERCEL_TEST_TOKEN=${{ secrets.VERCEL_TEST_TOKEN }} VERCEL_TEST_TEAM=vtest314-next-e2e-tests NEXT_TEST_JOB=1 NEXT_TEST_MODE=deploy TEST_TIMINGS_TOKEN=${{ secrets.TEST_TIMINGS_TOKEN }} xvfb-run node run-tests.js --type e2e --timings -g ${{ matrix.group }}/2 -c 1 >> /proc/1/fd/1" - name: Run test/e2e (deploy) - - - name: Upload test report - if: always() - uses: actions/upload-artifact@v4 - with: - name: test-reports - if-no-files-found: ignore - retention-days: 2 - path: | - test/test-junit-report - - - name: Upload test report to datadog - continue-on-error: true - run: | - ls -al ./test/*junit - - DD_ENV=ci npx @datadog/datadog-ci@2.23.1 junit upload --tags test.type:nextjs_deploy_e2e --service nextjs ./test/test-junit-report diff --git a/.github/workflows/test_e2e_deploy_related.yml b/.github/workflows/test_e2e_deploy_related.yml new file mode 100644 index 0000000000..f0ad0c51e8 --- /dev/null +++ b/.github/workflows/test_e2e_deploy_related.yml @@ -0,0 +1,81 @@ +name: Test E2E (Vercel Deploy), related + +on: + pull_request: + types: [opened, synchronize] + +jobs: + test: + if: github.repository_owner == 'vercel' + runs-on: ubuntu-latest + + env: + CARGO_PROFILE_RELEASE_LTO: 'true' + DATADOG_API_KEY: ${{ secrets.DATA_DOG_API_KEY }} + DD_ENV: 'ci' + NAPI_CLI_VERSION: 2.16.2 + NEXT_JUNIT_TEST_REPORT: 'true' + NEXT_TELEMETRY_DISABLED: 1 + NEXT_TEST_JOB: 1 + NEXT_TEST_MODE: 'deploy' + NODE_LTS_VERSION: 20 + TEST_TIMINGS_TOKEN: ${{ secrets.TEST_TIMINGS_TOKEN }} + TURBO_REMOTE_ONLY: 'true' + TURBO_TEAM: 'vercel' + TURBO_VERSION: 1.12.5 + VERCEL_TEST_TEAM: vtest314-next-e2e-tests + VERCEL_TEST_TOKEN: ${{ secrets.VERCEL_TEST_TOKEN }} + + strategy: + fail-fast: false + + steps: + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_LTS_VERSION }} + check-latest: true + + - name: Setup pnpm + run: corepack enable + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 25 + + - name: Setup tests + run: | + pnpm install + pnpm run build + npm i -g vercel@latest + node scripts/run-e2e-test-project-reset.mjs + + - name: Run tests + run: | + docker run --rm -v $(pwd):/work mcr.microsoft.com/playwright:v1.41.2-jammy /bin/bash -c "cd /work && \ + NODE_VERSION=${{ env.NODE_LTS_VERSION }} ./scripts/setup-node.sh && \ + corepack enable > /dev/null && \ + NEXT_JUNIT_TEST_REPORT=${{ env.NEXT_JUNIT_TEST_REPORT }} \ + DATADOG_API_KEY=${{ env.DATADOG_API_KEY }} \ + DD_ENV=${{ env.DD_ENV }} \ + VERCEL_TEST_TOKEN=${{ env.VERCEL_TEST_TOKEN }} \ + VERCEL_TEST_TEAM=${{ env.VERCEL_TEST_TEAM }} \ + NEXT_TEST_JOB=${{ env.NEXT_TEST_JOB }} \ + NEXT_TEST_MODE=${{ env.NEXT_TEST_MODE }} \ + TEST_TIMINGS_TOKEN=${{ env.TEST_TIMINGS_TOKEN }} \ + xvfb-run node run-tests.js --related --timings -c 1 >> /proc/1/fd/1" + + - name: Save test report as artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: test-reports + if-no-files-found: ignore + retention-days: 2 + path: test/test-junit-report + + - name: Upload test report to Datadog + continue-on-error: true + run: | + pnpx @datadog/datadog-ci@2.23.1 junit upload --tags test.type:nextjs_deploy_e2e --service nextjs ./test/test-junit-report diff --git a/.github/workflows/test_e2e_deploy_scheduled.yml b/.github/workflows/test_e2e_deploy_scheduled.yml new file mode 100644 index 0000000000..c0690525f2 --- /dev/null +++ b/.github/workflows/test_e2e_deploy_scheduled.yml @@ -0,0 +1,87 @@ +name: Test E2E (Vercel Deploy), scheduled + +on: + schedule: + # run every day at midnight + - cron: '0 0 * * *' + # allow triggering manually as well + workflow_dispatch: + +jobs: + test: + if: github.repository_owner == 'vercel' + runs-on: ubuntu-latest + + env: + CARGO_PROFILE_RELEASE_LTO: 'true' + DATADOG_API_KEY: ${{ secrets.DATA_DOG_API_KEY }} + DD_ENV: 'ci' + NAPI_CLI_VERSION: 2.16.2 + NEXT_JUNIT_TEST_REPORT: 'true' + NEXT_TELEMETRY_DISABLED: 1 + NEXT_TEST_CONTINUE_ON_ERROR: 1 + NEXT_TEST_JOB: 1 + NEXT_TEST_MODE: 'deploy' + NODE_LTS_VERSION: 20 + TEST_TIMINGS_TOKEN: ${{ secrets.TEST_TIMINGS_TOKEN }} + TURBO_REMOTE_ONLY: 'true' + TURBO_TEAM: 'vercel' + TURBO_VERSION: 1.12.5 + VERCEL_TEST_TEAM: vtest314-next-e2e-tests + VERCEL_TEST_TOKEN: ${{ secrets.VERCEL_TEST_TOKEN }} + + strategy: + fail-fast: false + matrix: + group: [1, 2] + + steps: + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_LTS_VERSION }} + check-latest: true + + - name: Setup pnpm + run: corepack enable + + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 25 + + - name: Setup tests + run: | + pnpm install + pnpm run build + npm i -g vercel@latest + node scripts/run-e2e-test-project-reset.mjs + + - name: Run tests + run: | + docker run --rm -v $(pwd):/work mcr.microsoft.com/playwright:v1.41.2-jammy /bin/bash -c "cd /work && \ + NODE_VERSION=${{ env.NODE_LTS_VERSION }} ./scripts/setup-node.sh && \ + corepack enable > /dev/null && \ + NEXT_JUNIT_TEST_REPORT=${{ env.NEXT_JUNIT_TEST_REPORT }} \ + DATADOG_API_KEY=${{ env.DATADOG_API_KEY }} \ + DD_ENV=${{ env.DD_ENV }} \ + VERCEL_TEST_TOKEN=${{ env.VERCEL_TEST_TOKEN }} \ + VERCEL_TEST_TEAM=${{ env.VERCEL_TEST_TEAM }} \ + NEXT_TEST_JOB=${{ env.NEXT_TEST_JOB }} \ + NEXT_TEST_MODE=${{ env.NEXT_TEST_MODE }} \ + TEST_TIMINGS_TOKEN=${{ env.TEST_TIMINGS_TOKEN }} \ + xvfb-run node run-tests.js --type e2e --timings -g ${{ matrix.group }}/2 -c 1 >> /proc/1/fd/1" + + - name: Save test report as artifacts + if: always() + uses: actions/upload-artifact@v4 + with: + name: test-reports + if-no-files-found: ignore + retention-days: 2 + path: test/test-junit-report + + - name: Upload test report to Datadog + continue-on-error: true + run: | + pnpx @datadog/datadog-ci@2.23.1 junit upload --tags test.type:nextjs_deploy_e2e --service nextjs ./test/test-junit-report diff --git a/packages/next/src/server/base-server.ts b/packages/next/src/server/base-server.ts index 270b7c87f5..0854bac176 100644 --- a/packages/next/src/server/base-server.ts +++ b/packages/next/src/server/base-server.ts @@ -102,7 +102,7 @@ import { AppRouteRouteMatcherProvider } from './future/route-matcher-providers/a import { PagesAPIRouteMatcherProvider } from './future/route-matcher-providers/pages-api-route-matcher-provider' import { PagesRouteMatcherProvider } from './future/route-matcher-providers/pages-route-matcher-provider' import { ServerManifestLoader } from './future/route-matcher-providers/helpers/manifest-loaders/server-manifest-loader' -import { getTracer, SpanKind } from './lib/trace/tracer' +import { getTracer, isBubbledError, SpanKind } from './lib/trace/tracer' import { BaseServerSpan } from './lib/trace/constants' import { I18NProvider } from './future/helpers/i18n-provider' import { sendResponse } from './send-response' @@ -1396,7 +1396,11 @@ export default abstract class Server< return this.renderError(null, req, res, '/_error', {}) } - if (this.minimalMode || this.renderOpts.dev || (err as any).bubble) { + if ( + this.minimalMode || + this.renderOpts.dev || + (isBubbledError(err) && err.bubble) + ) { throw err } this.logError(getProperError(err)) diff --git a/packages/next/src/server/lib/trace/tracer.ts b/packages/next/src/server/lib/trace/tracer.ts index 663721d963..fe47d88d24 100644 --- a/packages/next/src/server/lib/trace/tracer.ts +++ b/packages/next/src/server/lib/trace/tracer.ts @@ -1,3 +1,4 @@ +import type { FetchEventResult } from '../../web/types' import type { SpanTypes } from './constants' import { LogSpanAllowList, NextVanillaSpanAllowlist } from './constants' @@ -36,10 +37,22 @@ const isPromise = (p: any): p is Promise => { return p !== null && typeof p === 'object' && typeof p.then === 'function' } -type BubbledError = Error & { bubble?: boolean } +export class BubbledError extends Error { + constructor( + public readonly bubble?: boolean, + public readonly result?: FetchEventResult + ) { + super() + } +} + +export function isBubbledError(error: unknown): error is BubbledError { + if (typeof error !== 'object' || error === null) return false + return error instanceof BubbledError +} const closeSpanWithError = (span: Span, error?: Error) => { - if ((error as BubbledError | undefined)?.bubble === true) { + if (isBubbledError(error) && error.bubble) { span.setAttribute('next.bubble', true) } else { if (error) { @@ -307,7 +320,7 @@ class NextTracerImpl implements NextTracer { } try { if (fn.length > 1) { - return fn(span, (err?: Error) => closeSpanWithError(span, err)) + return fn(span, (err) => closeSpanWithError(span, err)) } const result = fn(span) diff --git a/packages/next/src/server/next-server.ts b/packages/next/src/server/next-server.ts index 3e93a22431..36c6c68312 100644 --- a/packages/next/src/server/next-server.ts +++ b/packages/next/src/server/next-server.ts @@ -88,7 +88,7 @@ import { INSTRUMENTATION_HOOK_FILENAME, RSC_PREFETCH_SUFFIX, } from '../lib/constants' -import { getTracer } from './lib/trace/tracer' +import { BubbledError, getTracer } from './lib/trace/tracer' import { NextNodeServerSpan } from './lib/trace/constants' import { nodeFs } from './lib/node-fs-methods' import { getRouteRegex } from '../shared/lib/router/utils/route-regex' @@ -1676,10 +1676,7 @@ export default class NextNodeServer extends BaseServer< if ('response' in result) { if (isMiddlewareInvoke) { bubblingResult = true - const err = new Error() - ;(err as any).result = result - ;(err as any).bubble = true - throw err + throw new BubbledError(true, result) } for (const [key, value] of Object.entries( diff --git a/run-tests.js b/run-tests.js index 32e0a5a441..4aa0796e98 100644 --- a/run-tests.js +++ b/run-tests.js @@ -1,3 +1,5 @@ +//@ts-check + const os = require('os') const path = require('path') const _glob = require('glob') @@ -5,6 +7,7 @@ const { existsSync } = require('fs') const fsp = require('fs/promises') const nodeFetch = require('node-fetch') const vercelFetch = require('@vercel/fetch') +// @ts-expect-error const fetch = vercelFetch(nodeFetch) const { promisify } = require('util') const { Sema } = require('async-sema') @@ -25,6 +28,8 @@ let argv = require('yargs/yargs')(process.argv.slice(2)) .string('g') .alias('g', 'group') .number('c') + .boolean('related') + .alias('r', 'related') .alias('c', 'concurrency').argv function escapeRegexp(str) { @@ -197,6 +202,7 @@ async function main() { group: argv.group ?? false, testPattern: argv.testPattern ?? false, type: argv.type ?? false, + related: argv.related ?? false, retries: argv.retries ?? DEFAULT_NUM_RETRIES, } let numRetries = options.retries @@ -223,21 +229,32 @@ async function main() { console.log('Running tests with concurrency:', options.concurrency) /** @type TestFile[] */ - let tests = argv._.filter((arg) => arg.match(/\.test\.(js|ts|tsx)/)).map( - (file) => ({ - file, - excludedCases: [], - }) - ) + let tests = argv._.filter((arg) => + arg.toString().match(/\.test\.(js|ts|tsx)/) + ).map((file) => ({ file: file.toString(), excludedCases: [] })) let prevTimings if (tests.length === 0) { + /** @type {RegExp | undefined} */ let testPatternRegex - if (options.testPattern) { + if (options.testPattern && typeof options.testPattern === 'string') { testPatternRegex = new RegExp(options.testPattern) } + if (options.related) { + const { getRelatedTests } = await import('./scripts/run-related-test.mjs') + const tests = await getRelatedTests() + if (tests.length) + testPatternRegex = new RegExp(tests.map(escapeRegexp).join('|')) + + if (testPatternRegex) { + console.log('Running related tests:', testPatternRegex.toString()) + } else { + console.log('No matching related tests.') + } + } + tests = ( await glob('**/*.test.{js,ts,tsx}', { nodir: true, @@ -311,12 +328,13 @@ async function main() { return true }) - if (options.group) { + if (options.group && typeof options.group === 'string') { const groupParts = options.group.split('/') const groupPos = parseInt(groupParts[0], 10) const groupTotal = parseInt(groupParts[1], 10) if (prevTimings) { + /** @type {TestFile[][]} */ const groups = [[]] const groupTimes = [0] @@ -463,6 +481,7 @@ ${ENDGROUP}`) // Format the output of junit report to include the test name // For the debugging purpose to compare actual run list to the generated reports // [NOTE]: This won't affect if junit reporter is not enabled + // @ts-expect-error .replaceAll() does exist. Follow-up why TS is not recognizing it JEST_JUNIT_OUTPUT_NAME: test.file.replaceAll('/', '_'), // Specify suite name for the test to avoid unexpected merging across different env / grouped tests // This is not individual suites name (corresponding 'describe'), top level suite name which have redundant names by default @@ -553,6 +572,7 @@ ${ENDGROUP}`) const err = new Error( code ? `failed with code: ${code}` : `failed with signal: ${signal}` ) + // @ts-expect-error err.output = outputChunks .map(({ chunk }) => chunk.toString()) .join('') diff --git a/scripts/run-related-test.mjs b/scripts/run-related-test.mjs index 4dddae9b0e..2a92a6e9f8 100644 --- a/scripts/run-related-test.mjs +++ b/scripts/run-related-test.mjs @@ -19,7 +19,9 @@ async function getChangedFilesFromPackages(baseBranch = 'canary') { await exec('git config --global --add safe.directory /work') await exec(`git remote set-branches --add origin ${baseBranch}`) await exec(`git fetch origin ${baseBranch} --depth=20`) - const { stdout } = await exec(`git diff --name-only ${baseBranch}`) + const { stdout } = await exec( + `git diff 'origin/${baseBranch}...' --name-only` + ) return stdout .trim() .split('\n') diff --git a/test/e2e/app-dir/ppr-full/ppr-full.test.ts b/test/e2e/app-dir/ppr-full/ppr-full.test.ts index c5ad98cf45..f6e7fe3b83 100644 --- a/test/e2e/app-dir/ppr-full/ppr-full.test.ts +++ b/test/e2e/app-dir/ppr-full/ppr-full.test.ts @@ -1,4 +1,4 @@ -import { createNextDescribe } from 'e2e-utils' +import { createNextDescribe, isNextStart } from 'e2e-utils' import { links } from './components/links' async function measure(stream: NodeJS.ReadableStream) { @@ -304,12 +304,18 @@ createNextDescribe( 'text/html; charset=utf-8' ) - if (!isNextDev) { + if (isNextStart) { expect(res.headers.get('cache-control')).toEqual( 's-maxage=31536000, stale-while-revalidate' ) } + if (isNextDeploy) { + expect(res.headers.get('cache-control')).toEqual( + 'public, max-age=0, must-revalidate' + ) + } + if (signal === 'redirect()') { const location = res.headers.get('location') expect(typeof location).toEqual('string') diff --git a/test/related-tests-manifest.json b/test/related-tests-manifest.json index 3803280f17..ba50869d72 100644 --- a/test/related-tests-manifest.json +++ b/test/related-tests-manifest.json @@ -1,3 +1,3 @@ { - "packages/next/src/server": ["e2e/app-dir/ppr-*"] + "packages/next/src/server": ["e2e/app-dir/ppr-full", "e2e/app-dir/ppr-errors"] }