Remove next-dev and its test suite and benchmarks (#55983)

We've rearchitected Next.js+Turbopack so Turbopack does not run
reimplement pieces of Next.js in its devserver. This:

- Removes the `next-dev` binary, which is no longer reachable through
`next --turbo`.
- Removes its test suite, as much of it is tested (and often more
thoroughly) by the Next.js test suite
- Removes its benchmark suite, which should be covered by
`Turbopack-bench` by
https://github.com/vercel/turbo/tree/main/crates/turbopack-bench

Test Plan: CI


Closes WEB-1652
This commit is contained in:
Will Binns-Smith 2023-09-25 20:22:39 -07:00 committed by GitHub
parent c56f9f4ff9
commit 9ac463b218
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
372 changed files with 119 additions and 12156 deletions

View file

@ -1,9 +0,0 @@
[[profile.default.overrides]]
filter = "package(next-dev-tests)"
# Default is 100ms. Extending this addresses false positives in the
# next-dev integration tests.
leak-timeout = "500ms"
retries = 2
slow-timeout = "60s"
threads-required = 4
failure-output = "immediate-final"

View file

@ -22,9 +22,6 @@ packages/next/src/bundles/webpack/packages/lazy-compilation-*.js
packages/next-swc/crates/**/tests/**/output*
packages/next-swc/crates/core/tests/loader/issue-32553/input.js
packages/next-swc/crates/next-dev-tests/tests/integration/turbopack/basic/error/input/broken.js
packages/next-swc/crates/next-dev-tests/tests/integration/next/webpack-loaders/no-options/input/pages/hello.raw.js
packages/next-swc/crates/next-dev-tests/tests/integration/next/webpack-loaders/no-options/input/raw/hello.js
packages/next-swc/native/**/*
packages/next-codemod/transforms/__testfixtures__/**

992
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -9,8 +9,6 @@ members = [
"packages/next-swc/crates/next-api",
"packages/next-swc/crates/next-build",
"packages/next-swc/crates/next-core",
"packages/next-swc/crates/next-dev",
"packages/next-swc/crates/next-dev-tests",
"packages/next-swc/crates/next-transform-font",
"packages/next-swc/crates/next-transform-dynamic",
"packages/next-swc/crates/next-transform-strip-page-exports",
@ -29,10 +27,6 @@ opt-level = 3
next-api = { path = "packages/next-swc/crates/next-api", default-features = false }
next-build = { path = "packages/next-swc/crates/next-build", default-features = false }
next-core = { path = "packages/next-swc/crates/next-core", default-features = false }
next-dev = { path = "packages/next-swc/crates/next-dev", default-features = false, features = [
"serializable",
] }
next-dev-tests = { path = "packages/next-swc/crates/next-dev-tests" }
next-transform-font = { path = "packages/next-swc/crates/next-transform-font" }
next-transform-dynamic = { path = "packages/next-swc/crates/next-transform-dynamic" }
next-transform-strip-page-exports = { path = "packages/next-swc/crates/next-transform-strip-page-exports" }

View file

@ -17,9 +17,8 @@ plugin = ["turbopack-binding/__swc_core_binding_napi_plugin", "turbopack-binding
sentry_native_tls = ["sentry", "sentry/native-tls", "native-tls"]
sentry_rustls = ["sentry", "sentry/rustls", "rustls-tls"]
native-tls = ["next-dev/native-tls"]
rustls-tls = ["next-dev/rustls-tls"]
serializable = ["next-dev/serializable"]
native-tls = ["next-core/native-tls"]
rustls-tls = ["next-core/rustls-tls"]
image-webp = ["next-core/image-webp"]
image-avif = ["next-core/image-avif"]
# Enable all the available image codec support.
@ -49,7 +48,6 @@ napi = { version = "2", default-features = false, features = [
] }
napi-derive = "2"
next-swc = { version = "0.0.0", path = "../core" }
next-dev = { workspace = true }
next-api = { workspace = true }
next-build = { workspace = true }
next-core = { workspace = true }

View file

@ -10,10 +10,8 @@ autobenches = false
bench = false
[features]
default = ["custom_allocator", "native-tls"]
default = ["custom_allocator"]
custom_allocator = ["turbopack-binding/__turbo_tasks_malloc", "turbopack-binding/__turbo_tasks_malloc_custom_allocator"]
native-tls = ["next-core/native-tls"]
rustls-tls = ["next-core/rustls-tls"]
[dependencies]
anyhow = { workspace = true, features = ["backtrace"] }
@ -47,4 +45,4 @@ tracing-subscriber = { workspace = true, features = ["env-filter", "json"] }
shadow-rs = { workspace = true }
turbopack-binding = { workspace = true, features = [
"__turbo_tasks_build"
]}
]}

File diff suppressed because it is too large Load diff

View file

@ -1,255 +0,0 @@
use anyhow::{bail, Context, Result};
use indexmap::IndexMap;
use mime::{APPLICATION_JAVASCRIPT_UTF_8, APPLICATION_JSON};
use serde::Serialize;
use turbo_tasks::{
graph::{GraphTraversal, NonDeterministic},
ReadRef, Vc,
};
use turbopack_binding::{
turbo::{tasks::TryJoinIterExt, tasks_fs::File},
turbopack::{
core::{asset::AssetContent, introspect::Introspectable, version::VersionedContentExt},
dev_server::source::{
route_tree::{BaseSegment, RouteTree, RouteTrees, RouteType},
ContentSource, ContentSourceContent, ContentSourceData, GetContentSourceContent,
},
node::render::{
node_api_source::NodeApiContentSource, rendered_source::NodeRenderContentSource,
},
},
};
use crate::{embed_js::next_js_file, next_config::Rewrites, util::get_asset_path_from_pathname};
/// A content source which creates the next.js `_devPagesManifest.json` and
/// `_devMiddlewareManifest.json` which are used for client side navigation.
#[turbo_tasks::value(shared)]
pub struct DevManifestContentSource {
pub page_roots: Vec<Vc<Box<dyn ContentSource>>>,
pub rewrites: Vc<Rewrites>,
}
#[turbo_tasks::value_impl]
impl DevManifestContentSource {
/// Recursively find all routes in the `page_roots` content sources.
#[turbo_tasks::function]
async fn find_routes(self: Vc<Self>) -> Result<Vc<Vec<String>>> {
let this = &*self.await?;
async fn content_source_to_pathname(
content_source: Vc<Box<dyn ContentSource>>,
) -> Result<Option<ReadRef<String>>> {
// TODO This shouldn't use casts but an public api instead
if let Some(api_source) =
Vc::try_resolve_downcast_type::<NodeApiContentSource>(content_source).await?
{
return Ok(Some(api_source.get_pathname().await?));
}
if let Some(page_source) =
Vc::try_resolve_downcast_type::<NodeRenderContentSource>(content_source).await?
{
return Ok(Some(page_source.get_pathname().await?));
}
Ok(None)
}
async fn get_content_source_children(
content_source: Vc<Box<dyn ContentSource>>,
) -> Result<Vec<Vc<Box<dyn ContentSource>>>> {
Ok(content_source.get_children().await?.clone_value())
}
let routes = NonDeterministic::new()
.visit(this.page_roots.iter().copied(), get_content_source_children)
.await
.completed()?
.into_iter()
.map(content_source_to_pathname)
.try_join()
.await?;
let mut routes = routes
.into_iter()
.flatten()
.map(|route| route.clone_value())
.collect::<Vec<_>>();
routes.sort_by_cached_key(|s| s.split('/').map(PageSortKey::from).collect::<Vec<_>>());
routes.dedup();
Ok(Vc::cell(routes))
}
/// Recursively find all pages in the `page_roots` content sources
/// (excluding api routes).
#[turbo_tasks::function]
async fn find_pages(self: Vc<Self>) -> Result<Vc<Vec<String>>> {
let routes = &*self.find_routes().await?;
// we don't need to sort as it's already sorted by `find_routes`
let pages = routes
.iter()
.filter(|s| !s.starts_with("/api"))
.cloned()
.collect();
Ok(Vc::cell(pages))
}
/// Create a build manifest with all pages.
#[turbo_tasks::function]
async fn create_build_manifest(self: Vc<Self>) -> Result<Vc<String>> {
let this = &*self.await?;
let sorted_pages = &*self.find_pages().await?;
let routes = sorted_pages
.iter()
.map(|pathname| {
(
pathname,
vec![format!(
"_next/static/chunks/pages{}",
get_asset_path_from_pathname(pathname, ".js")
)],
)
})
.collect();
let manifest = BuildManifest {
rewrites: this.rewrites.await?,
sorted_pages,
routes,
};
let manifest = next_js_file("entry/manifest/buildManifest.js".to_string())
.await?
.as_content()
.context("embedded buildManifest file missing")?
.content()
.to_str()?
.replace("$$MANIFEST$$", &serde_json::to_string(&manifest)?);
Ok(Vc::cell(manifest))
}
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct BuildManifest<'a> {
#[serde(rename = "__rewrites")]
rewrites: ReadRef<Rewrites>,
sorted_pages: &'a Vec<String>,
#[serde(flatten)]
routes: IndexMap<&'a String, Vec<String>>,
}
const DEV_MANIFEST_PATHNAME: &str = "_next/static/development/_devPagesManifest.json";
const BUILD_MANIFEST_PATHNAME: &str = "_next/static/development/_buildManifest.js";
const DEV_MIDDLEWARE_MANIFEST_PATHNAME: &str =
"_next/static/development/_devMiddlewareManifest.json";
#[turbo_tasks::value_impl]
impl ContentSource for DevManifestContentSource {
#[turbo_tasks::function]
fn get_routes(self: Vc<Self>) -> Vc<RouteTree> {
Vc::<RouteTrees>::cell(vec![
RouteTree::new_route(
BaseSegment::from_static_pathname(DEV_MANIFEST_PATHNAME).collect(),
RouteType::Exact,
Vc::upcast(self),
),
RouteTree::new_route(
BaseSegment::from_static_pathname(BUILD_MANIFEST_PATHNAME).collect(),
RouteType::Exact,
Vc::upcast(self),
),
RouteTree::new_route(
BaseSegment::from_static_pathname(DEV_MIDDLEWARE_MANIFEST_PATHNAME).collect(),
RouteType::Exact,
Vc::upcast(self),
),
])
.merge()
}
}
#[turbo_tasks::value_impl]
impl GetContentSourceContent for DevManifestContentSource {
#[turbo_tasks::function]
async fn get(
self: Vc<Self>,
path: String,
_data: turbo_tasks::Value<ContentSourceData>,
) -> Result<Vc<ContentSourceContent>> {
let manifest_file = match path.as_str() {
DEV_MANIFEST_PATHNAME => {
let pages = &*self.find_routes().await?;
File::from(serde_json::to_string(&serde_json::json!({
"pages": pages,
}))?)
.with_content_type(APPLICATION_JSON)
}
BUILD_MANIFEST_PATHNAME => {
let build_manifest = &*self.create_build_manifest().await?;
File::from(build_manifest.as_str()).with_content_type(APPLICATION_JAVASCRIPT_UTF_8)
}
DEV_MIDDLEWARE_MANIFEST_PATHNAME => {
// If there is actual middleware, this request will have been handled by the
// node router in next-core/js/src/entry/router.ts and
// next/src/server/lib/route-resolver.ts.
// If we've reached this point, then there is no middleware and we need to
// respond with an empty `MiddlewareMatcher[]`.
File::from("[]").with_content_type(APPLICATION_JSON)
}
_ => bail!("unknown path: {}", path),
};
Ok(ContentSourceContent::static_content(
AssetContent::file(manifest_file.into()).versioned(),
))
}
}
#[turbo_tasks::value_impl]
impl Introspectable for DevManifestContentSource {
#[turbo_tasks::function]
fn ty(&self) -> Vc<String> {
Vc::cell("dev manifest source".to_string())
}
#[turbo_tasks::function]
fn details(&self) -> Vc<String> {
Vc::cell(
"provides _devPagesManifest.json, _buildManifest.js and _devMiddlewareManifest.json."
.to_string(),
)
}
}
/// PageSortKey is necessary because the next.js client code looks for matches
/// in the order the pages are sent in the manifest,if they're sorted
/// alphabetically this means \[slug] and \[\[catchall]] routes are prioritized
/// over fixed paths, so we have to override the ordering with this.
#[derive(Ord, PartialOrd, Eq, PartialEq)]
enum PageSortKey {
Static(String),
Slug,
CatchAll,
}
impl From<&str> for PageSortKey {
fn from(value: &str) -> Self {
if value.starts_with("[[") && value.ends_with("]]") {
PageSortKey::CatchAll
} else if value.starts_with('[') && value.ends_with(']') {
PageSortKey::Slug
} else {
PageSortKey::Static(value.to_string())
}
}
}

View file

@ -8,54 +8,46 @@
mod app_render;
mod app_segment_config;
mod app_source;
pub mod app_structure;
mod babel;
mod bootstrap;
pub mod dev_manifest;
mod embed_js;
mod emit;
pub mod env;
mod env;
mod fallback;
pub mod loader_tree;
mod loader_tree;
pub mod middleware;
pub mod mode;
pub mod next_app;
mod next_build;
pub mod next_client;
pub mod next_client_chunks;
mod next_client_component;
pub mod next_client_reference;
pub mod next_config;
pub mod next_dynamic;
pub mod next_edge;
mod next_font;
pub mod next_image;
mod next_image;
mod next_import_map;
pub mod next_manifests;
pub mod next_pages;
mod next_route_matcher;
pub mod next_server;
pub mod next_server_component;
pub mod next_shared;
mod next_server_component;
mod next_shared;
pub mod next_telemetry;
mod page_loader;
mod page_source;
pub mod pages_structure;
pub mod router;
pub mod router_source;
mod runtime;
mod sass;
pub mod tracing_presets;
mod transform_options;
pub mod url_node;
pub mod util;
mod web_entry_source;
pub use app_segment_config::{
parse_segment_config_from_loader_tree, parse_segment_config_from_source,
};
pub use app_source::create_app_source;
pub use emit::{
all_assets_from_entries, all_server_paths, emit_all_assets, emit_assets, emit_client_assets,
};
@ -63,10 +55,8 @@ pub use next_edge::context::{
get_edge_chunking_context, get_edge_compile_time_info, get_edge_resolve_options_context,
};
pub use page_loader::{create_page_loader_entry_module, PageLoaderAsset};
pub use page_source::create_page_source;
pub use turbopack_binding::{turbopack::node::source_map, *};
pub use util::{get_asset_path_from_pathname, pathname_for_path, PathType};
pub use web_entry_source::create_web_entry_source;
pub fn register() {
turbo_tasks::register();

View file

@ -37,16 +37,10 @@ pub struct LoaderTreeBuilder {
loader_tree_code: String,
context: Vc<ModuleAssetContext>,
mode: NextMode,
server_component_transition: ServerComponentTransition,
server_component_transition: Vc<Box<dyn Transition>>,
pages: Vec<Vc<FileSystemPath>>,
}
#[derive(Clone, Debug)]
pub enum ServerComponentTransition {
Transition(Vc<Box<dyn Transition>>),
TransitionName(String),
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum ComponentType {
Page,
@ -75,7 +69,7 @@ impl ComponentType {
impl LoaderTreeBuilder {
fn new(
context: Vc<ModuleAssetContext>,
server_component_transition: ServerComponentTransition,
server_component_transition: Vc<Box<dyn Transition>>,
mode: NextMode,
) -> Self {
LoaderTreeBuilder {
@ -153,15 +147,9 @@ impl LoaderTreeBuilder {
EcmaScriptModulesReferenceSubType::Undefined,
));
let module = match &self.server_component_transition {
ServerComponentTransition::Transition(transition) => {
transition.process(source, self.context, reference_ty)
}
ServerComponentTransition::TransitionName(transition_name) => self
.context
.with_transition(transition_name.clone())
.process(source, reference_ty),
};
let module =
self.server_component_transition
.process(source, self.context, reference_ty);
self.inner_assets.insert(format!("COMPONENT_{i}"), module);
}
@ -434,7 +422,7 @@ impl LoaderTreeModule {
pub async fn build(
loader_tree: Vc<LoaderTree>,
context: Vc<ModuleAssetContext>,
server_component_transition: ServerComponentTransition,
server_component_transition: Vc<Box<dyn Transition>>,
mode: NextMode,
) -> Result<Self> {
LoaderTreeBuilder::new(context, server_component_transition, mode)

View file

@ -17,7 +17,7 @@ use turbopack_binding::{
use super::app_entry::AppEntry;
use crate::{
app_structure::LoaderTree,
loader_tree::{LoaderTreeModule, ServerComponentTransition},
loader_tree::LoaderTreeModule,
mode::NextMode,
next_app::{AppPage, AppPath},
next_server_component::NextServerComponentTransition,
@ -47,7 +47,7 @@ pub async fn get_app_page_entry(
let loader_tree = LoaderTreeModule::build(
loader_tree,
context,
ServerComponentTransition::Transition(server_component_transition),
server_component_transition,
NextMode::Build,
)
.await?;

View file

@ -1,115 +0,0 @@
use anyhow::Result;
use turbo_tasks::{Value, Vc};
use turbopack_binding::{
turbo::tasks_fs::FileSystemPath,
turbopack::{
core::{compile_time_info::CompileTimeInfo, module::Module},
ecmascript::chunk::EcmascriptChunkingContext,
node::execution_context::ExecutionContext,
turbopack::{
ecmascript::chunk::EcmascriptChunkPlaceable, module_options::ModuleOptionsContext,
resolve_options_context::ResolveOptionsContext, transition::Transition,
ModuleAssetContext,
},
},
};
use super::with_chunks::WithChunksAsset;
use crate::{
mode::NextMode,
next_client::context::{
get_client_module_options_context, get_client_resolve_options_context, ClientContextType,
},
next_config::NextConfig,
};
#[turbo_tasks::value(shared)]
pub struct NextClientChunksTransition {
pub client_compile_time_info: Vc<CompileTimeInfo>,
pub client_module_options_context: Vc<ModuleOptionsContext>,
pub client_resolve_options_context: Vc<ResolveOptionsContext>,
pub client_chunking_context: Vc<Box<dyn EcmascriptChunkingContext>>,
}
#[turbo_tasks::value_impl]
impl NextClientChunksTransition {
#[turbo_tasks::function]
pub fn new(
project_path: Vc<FileSystemPath>,
execution_context: Vc<ExecutionContext>,
ty: Value<ClientContextType>,
mode: NextMode,
client_chunking_context: Vc<Box<dyn EcmascriptChunkingContext>>,
client_compile_time_info: Vc<CompileTimeInfo>,
next_config: Vc<NextConfig>,
) -> Vc<NextClientChunksTransition> {
let client_module_options_context = get_client_module_options_context(
project_path,
execution_context,
client_compile_time_info.environment(),
ty,
mode,
next_config,
);
NextClientChunksTransition {
client_chunking_context,
client_module_options_context,
client_resolve_options_context: get_client_resolve_options_context(
project_path,
ty,
mode,
next_config,
execution_context,
),
client_compile_time_info,
}
.cell()
}
}
#[turbo_tasks::value_impl]
impl Transition for NextClientChunksTransition {
#[turbo_tasks::function]
fn process_compile_time_info(
&self,
_compile_time_info: Vc<CompileTimeInfo>,
) -> Vc<CompileTimeInfo> {
self.client_compile_time_info
}
#[turbo_tasks::function]
fn process_module_options_context(
&self,
_context: Vc<ModuleOptionsContext>,
) -> Vc<ModuleOptionsContext> {
self.client_module_options_context
}
#[turbo_tasks::function]
fn process_resolve_options_context(
&self,
_context: Vc<ResolveOptionsContext>,
) -> Vc<ResolveOptionsContext> {
self.client_resolve_options_context
}
#[turbo_tasks::function]
async fn process_module(
&self,
asset: Vc<Box<dyn Module>>,
_context: Vc<ModuleAssetContext>,
) -> Result<Vc<Box<dyn Module>>> {
Ok(
if let Some(placeable) =
Vc::try_resolve_sidecast::<Box<dyn EcmascriptChunkPlaceable>>(asset).await?
{
Vc::upcast(WithChunksAsset::new(
placeable,
self.client_chunking_context,
))
} else {
asset
},
)
}
}

View file

@ -1,85 +0,0 @@
use anyhow::{bail, Result};
use turbo_tasks::{Value, Vc};
use turbopack_binding::turbopack::{
core::{
asset::{Asset, AssetContent},
chunk::{availability_info::AvailabilityInfo, Chunk, ChunkableModule, ChunkingContext},
ident::AssetIdent,
module::Module,
reference::ModuleReferences,
},
ecmascript::chunk::EcmascriptChunkingContext,
turbopack::ecmascript::chunk::{
EcmascriptChunk, EcmascriptChunkItem, EcmascriptChunkPlaceable, EcmascriptExports,
},
};
#[turbo_tasks::function]
fn modifier() -> Vc<String> {
Vc::cell("in chunking context".to_string())
}
#[turbo_tasks::value(shared)]
pub struct InChunkingContextAsset {
pub asset: Vc<Box<dyn EcmascriptChunkPlaceable>>,
pub chunking_context: Vc<Box<dyn ChunkingContext>>,
}
#[turbo_tasks::value_impl]
impl Module for InChunkingContextAsset {
#[turbo_tasks::function]
fn ident(&self) -> Vc<AssetIdent> {
self.asset.ident().with_modifier(modifier())
}
#[turbo_tasks::function]
fn references(&self) -> Vc<ModuleReferences> {
self.asset.references()
}
}
#[turbo_tasks::value_impl]
impl Asset for InChunkingContextAsset {
#[turbo_tasks::function]
fn content(&self) -> Vc<AssetContent> {
self.asset.content()
}
}
#[turbo_tasks::value_impl]
impl ChunkableModule for InChunkingContextAsset {
#[turbo_tasks::function]
fn as_chunk(
&self,
_context: Vc<Box<dyn ChunkingContext>>,
availability_info: Value<AvailabilityInfo>,
) -> Vc<Box<dyn Chunk>> {
Vc::upcast(EcmascriptChunk::new(
self.chunking_context,
self.asset,
availability_info,
))
}
}
#[turbo_tasks::value_impl]
impl EcmascriptChunkPlaceable for InChunkingContextAsset {
#[turbo_tasks::function]
async fn as_chunk_item(
&self,
_context: Vc<Box<dyn EcmascriptChunkingContext>>,
) -> Result<Vc<Box<dyn EcmascriptChunkItem>>> {
let Some(chunking_context) =
Vc::try_resolve_sidecast::<Box<dyn EcmascriptChunkingContext>>(self.chunking_context)
.await?
else {
bail!("chunking context is not an EcmascriptChunkingContext")
};
Ok(self.asset.as_chunk_item(chunking_context))
}
#[turbo_tasks::function]
fn get_exports(&self) -> Vc<EcmascriptExports> {
self.asset.get_exports()
}
}

View file

@ -1,5 +0,0 @@
pub(crate) mod client_chunks_transition;
pub(crate) mod in_chunking_context_asset;
pub(crate) mod with_chunks;
pub use client_chunks_transition::NextClientChunksTransition;

View file

@ -1,233 +0,0 @@
use std::io::Write;
use anyhow::Result;
use indoc::writedoc;
use turbo_tasks::Vc;
use turbopack_binding::{
turbo::{
tasks::{TryJoinIterExt, Value},
tasks_fs::rope::RopeBuilder,
},
turbopack::{
core::{
asset::{Asset, AssetContent},
chunk::{
availability_info::AvailabilityInfo, Chunk, ChunkData, ChunkGroupReference,
ChunkItem, ChunkableModule, ChunkingContext, ChunksData,
},
ident::AssetIdent,
module::Module,
output::OutputAssets,
reference::{ModuleReferences, SingleOutputAssetReference},
},
ecmascript::{
chunk::{
EcmascriptChunk, EcmascriptChunkData, EcmascriptChunkItem,
EcmascriptChunkItemContent, EcmascriptChunkItemExt, EcmascriptChunkPlaceable,
EcmascriptChunkingContext, EcmascriptExports,
},
utils::StringifyJs,
},
},
};
#[turbo_tasks::function]
fn modifier() -> Vc<String> {
Vc::cell("chunks".to_string())
}
#[turbo_tasks::value]
pub struct WithChunksAsset {
asset: Vc<Box<dyn EcmascriptChunkPlaceable>>,
chunking_context: Vc<Box<dyn EcmascriptChunkingContext>>,
}
#[turbo_tasks::value_impl]
impl WithChunksAsset {
/// Create a new [`WithChunksAsset`].
///
/// # Arguments
///
/// * `asset` - The asset to wrap.
/// * `chunking_context` - The chunking context of the asset.
#[turbo_tasks::function]
pub fn new(
asset: Vc<Box<dyn EcmascriptChunkPlaceable>>,
chunking_context: Vc<Box<dyn EcmascriptChunkingContext>>,
) -> Vc<WithChunksAsset> {
WithChunksAsset::cell(WithChunksAsset {
asset,
chunking_context,
})
}
#[turbo_tasks::function]
async fn entry_chunk(self: Vc<Self>) -> Result<Vc<Box<dyn Chunk>>> {
let this = self.await?;
Ok(this.asset.as_root_chunk(Vc::upcast(this.chunking_context)))
}
#[turbo_tasks::function]
async fn chunks(self: Vc<Self>) -> Result<Vc<OutputAssets>> {
let this = self.await?;
Ok(this.chunking_context.chunk_group(self.entry_chunk()))
}
}
#[turbo_tasks::value_impl]
impl Module for WithChunksAsset {
#[turbo_tasks::function]
fn ident(&self) -> Vc<AssetIdent> {
self.asset.ident().with_modifier(modifier())
}
#[turbo_tasks::function]
async fn references(self: Vc<Self>) -> Result<Vc<ModuleReferences>> {
let this = self.await?;
let entry_chunk = self.entry_chunk();
Ok(Vc::cell(vec![Vc::upcast(ChunkGroupReference::new(
Vc::upcast(this.chunking_context),
entry_chunk,
))]))
}
}
#[turbo_tasks::value_impl]
impl Asset for WithChunksAsset {
#[turbo_tasks::function]
fn content(&self) -> Vc<AssetContent> {
unimplemented!()
}
}
#[turbo_tasks::value_impl]
impl ChunkableModule for WithChunksAsset {
#[turbo_tasks::function]
fn as_chunk(
self: Vc<Self>,
context: Vc<Box<dyn ChunkingContext>>,
availability_info: Value<AvailabilityInfo>,
) -> Vc<Box<dyn Chunk>> {
Vc::upcast(EcmascriptChunk::new(
context,
Vc::upcast(self),
availability_info,
))
}
}
#[turbo_tasks::value_impl]
impl EcmascriptChunkPlaceable for WithChunksAsset {
#[turbo_tasks::function]
async fn as_chunk_item(
self: Vc<Self>,
context: Vc<Box<dyn EcmascriptChunkingContext>>,
) -> Result<Vc<Box<dyn EcmascriptChunkItem>>> {
Ok(Vc::upcast(
WithChunksChunkItem {
context,
inner: self,
}
.cell(),
))
}
#[turbo_tasks::function]
fn get_exports(&self) -> Vc<EcmascriptExports> {
// TODO This should be EsmExports
EcmascriptExports::Value.cell()
}
}
#[turbo_tasks::value]
struct WithChunksChunkItem {
context: Vc<Box<dyn EcmascriptChunkingContext>>,
inner: Vc<WithChunksAsset>,
}
#[turbo_tasks::value_impl]
impl WithChunksChunkItem {
#[turbo_tasks::function]
async fn chunks_data(self: Vc<Self>) -> Result<Vc<ChunksData>> {
let this = self.await?;
let inner = this.inner.await?;
Ok(ChunkData::from_assets(
inner.chunking_context.output_root(),
this.inner.chunks(),
))
}
}
#[turbo_tasks::value_impl]
impl EcmascriptChunkItem for WithChunksChunkItem {
#[turbo_tasks::function]
fn chunking_context(&self) -> Vc<Box<dyn EcmascriptChunkingContext>> {
self.context
}
#[turbo_tasks::function]
async fn content(self: Vc<Self>) -> Result<Vc<EcmascriptChunkItemContent>> {
let this = self.await?;
let inner = this.inner.await?;
let chunks_data = self.chunks_data().await?;
let chunks_data = chunks_data.iter().try_join().await?;
let chunks_data: Vec<_> = chunks_data
.iter()
.map(|chunk_data| EcmascriptChunkData::new(chunk_data))
.collect();
let module_id = &*inner
.asset
.as_chunk_item(inner.chunking_context)
.id()
.await?;
let mut code = RopeBuilder::default();
writedoc!(
code,
r#"
__turbopack_esm__({{
default: () => {},
chunks: () => chunks,
}});
const chunks = {:#};
"#,
StringifyJs(&module_id),
StringifyJs(&chunks_data),
)?;
Ok(EcmascriptChunkItemContent {
inner_code: code.build(),
..Default::default()
}
.cell())
}
}
#[turbo_tasks::value_impl]
impl ChunkItem for WithChunksChunkItem {
#[turbo_tasks::function]
fn asset_ident(&self) -> Vc<AssetIdent> {
self.inner.ident()
}
#[turbo_tasks::function]
async fn references(self: Vc<Self>) -> Result<Vc<ModuleReferences>> {
let mut references = self.await?.inner.references().await?.clone_value();
let chunk_data_key = Vc::cell("chunk data".to_string());
for chunk_data in &*self.chunks_data().await? {
references.extend(chunk_data.references().await?.iter().map(|&output_asset| {
Vc::upcast(SingleOutputAssetReference::new(
output_asset,
chunk_data_key,
))
}));
}
Ok(Vc::cell(references))
}
}

View file

@ -1,895 +0,0 @@
use anyhow::{bail, Result};
use indexmap::{indexmap, IndexMap};
use serde::{Deserialize, Serialize};
use serde_json::Value as JsonValue;
use turbo_tasks::{trace::TraceRawVcs, Value, Vc};
use turbo_tasks_fs::FileSystemPathOption;
use turbopack_binding::{
turbo::{
tasks_env::{CustomProcessEnv, EnvMap, ProcessEnv},
tasks_fs::{FileContent, FileSystemPath},
},
turbopack::{
core::{
chunk::{ChunkingContext, EvaluatableAsset, EvaluatableAssetExt},
compile_time_info::CompileTimeInfo,
context::AssetContext,
environment::ServerAddr,
file_source::FileSource,
module::Module,
reference_type::{EntryReferenceSubType, ReferenceType},
source::{Source, Sources},
},
dev::DevChunkingContext,
dev_server::{
html::DevHtmlAsset,
source::{
asset_graph::AssetGraphContentSource,
combined::CombinedContentSource,
route_tree::{BaseSegment, RouteType},
ContentSource, ContentSourceData, ContentSourceExt,
},
},
ecmascript::chunk::EcmascriptChunkingContext,
env::ProcessEnvAsset,
node::{
debug::should_debug,
execution_context::ExecutionContext,
render::{
node_api_source::create_node_api_source,
rendered_source::create_node_rendered_source,
},
route_matcher::RouteMatcher,
NodeEntry, NodeRenderingEntry,
},
turbopack::ModuleAssetContext,
},
};
use crate::{
embed_js::next_asset,
env::env_for_js,
fallback::get_fallback_page,
mode::NextMode,
next_client::{
context::{
get_client_assets_path, get_client_chunking_context, get_client_module_options_context,
get_client_resolve_options_context, get_client_runtime_entries, ClientContextType,
},
transition::NextClientTransition,
},
next_client_chunks::client_chunks_transition::NextClientChunksTransition,
next_config::NextConfig,
next_edge::{
context::{get_edge_compile_time_info, get_edge_resolve_options_context},
route_transition::NextEdgeRouteTransition,
},
next_route_matcher::{
NextExactMatcher, NextFallbackMatcher, NextParamsMatcher, NextPrefixSuffixParamsMatcher,
},
next_server::context::{
get_server_compile_time_info, get_server_module_options_context,
get_server_resolve_options_context, ServerContextType,
},
page_loader::create_page_loader,
pages_structure::{PagesDirectoryStructure, PagesStructure, PagesStructureItem},
util::{parse_config_from_source, pathname_for_path, render_data, NextRuntime, PathType},
};
/// Create a content source serving the `pages` or `src/pages` directory as
/// Next.js pages folder.
#[turbo_tasks::function]
pub async fn create_page_source(
pages_structure: Vc<PagesStructure>,
project_root: Vc<FileSystemPath>,
dist_root: Vc<String>,
execution_context: Vc<ExecutionContext>,
node_root: Vc<FileSystemPath>,
client_root: Vc<FileSystemPath>,
env: Vc<Box<dyn ProcessEnv>>,
client_chunking_context: Vc<Box<dyn EcmascriptChunkingContext>>,
client_compile_time_info: Vc<CompileTimeInfo>,
next_config: Vc<NextConfig>,
server_addr: Vc<ServerAddr>,
) -> Result<Vc<Box<dyn ContentSource>>> {
let pages_dir = if let Some(pages) = pages_structure.await?.pages {
pages.project_path().resolve().await?
} else {
project_root.join("pages".to_string())
};
let mode = NextMode::DevServer;
let client_ty = Value::new(ClientContextType::Pages { pages_dir });
let server_ty = Value::new(ServerContextType::Pages { pages_dir });
let server_data_ty = Value::new(ServerContextType::PagesData { pages_dir });
let client_module_options_context = get_client_module_options_context(
project_root,
execution_context,
client_compile_time_info.environment(),
client_ty,
mode,
next_config,
);
let client_resolve_options_context = get_client_resolve_options_context(
project_root,
client_ty,
mode,
next_config,
execution_context,
);
let client_runtime_entries = get_client_runtime_entries(
project_root,
env,
client_ty,
mode,
next_config,
execution_context,
);
let next_client_transition = Vc::upcast(
NextClientTransition {
is_app: false,
client_chunking_context: Vc::upcast(client_chunking_context),
client_module_options_context,
client_resolve_options_context,
client_compile_time_info,
runtime_entries: client_runtime_entries,
}
.cell(),
);
let edge_compile_time_info = get_edge_compile_time_info(project_root, server_addr, dist_root);
let edge_chunking_context = Vc::upcast(
DevChunkingContext::builder(
project_root,
node_root.join("edge".to_string()),
node_root.join("edge/chunks".to_string()),
get_client_assets_path(client_root),
edge_compile_time_info.environment(),
)
.reference_chunk_source_maps(should_debug("page_source"))
.build(),
);
let edge_resolve_options_context = get_edge_resolve_options_context(
project_root,
server_ty,
mode,
next_config,
execution_context,
);
let next_edge_transition = Vc::upcast(
NextEdgeRouteTransition {
edge_compile_time_info,
edge_chunking_context,
edge_module_options_context: None,
edge_resolve_options_context,
output_path: node_root,
base_path: project_root,
bootstrap_asset: next_asset("entry/edge-bootstrap.ts".to_string()),
entry_name: "edge".to_string(),
}
.cell(),
);
let server_compile_time_info = get_server_compile_time_info(mode, env, server_addr);
let server_resolve_options_context = get_server_resolve_options_context(
project_root,
server_ty,
mode,
next_config,
execution_context,
);
let server_module_options_context = get_server_module_options_context(
project_root,
execution_context,
server_ty,
mode,
next_config,
);
let server_data_module_options_context = get_server_module_options_context(
project_root,
execution_context,
server_data_ty,
mode,
next_config,
);
let transitions = Vc::cell(
[
("next-edge".to_string(), next_edge_transition),
("next-client".to_string(), next_client_transition),
(
"next-client-chunks".to_string(),
Vc::upcast(NextClientChunksTransition::new(
project_root,
execution_context,
client_ty,
mode,
client_chunking_context,
client_compile_time_info,
next_config,
)),
),
]
.into_iter()
.collect(),
);
let client_context: Vc<Box<dyn AssetContext>> = Vc::upcast(ModuleAssetContext::new(
transitions,
client_compile_time_info,
client_module_options_context,
client_resolve_options_context,
));
let server_context: Vc<Box<dyn AssetContext>> = Vc::upcast(ModuleAssetContext::new(
transitions,
server_compile_time_info,
server_module_options_context,
server_resolve_options_context,
));
let server_data_context: Vc<Box<dyn AssetContext>> = Vc::upcast(ModuleAssetContext::new(
transitions,
server_compile_time_info,
server_data_module_options_context,
server_resolve_options_context,
));
let injected_env = env_for_js(Vc::upcast(EnvMap::empty()), false, next_config);
let env = Vc::upcast(CustomProcessEnv::new(env, next_config.env()));
let server_runtime_entries = Vc::cell(vec![Vc::upcast(ProcessEnvAsset::new(
project_root,
injected_env,
))]);
let fallback_runtime_entries = Vc::cell(vec![]);
let fallback_page = get_fallback_page(
project_root,
execution_context,
client_root,
env,
client_compile_time_info,
next_config,
);
let render_data = render_data(next_config, server_addr);
let page_extensions = next_config.page_extensions();
let sources = vec![
// Match _next/404 first to ensure rewrites work properly.
create_not_found_page_source(
project_root,
env,
server_context,
client_context,
Vc::upcast(client_chunking_context),
pages_dir,
page_extensions,
fallback_runtime_entries,
fallback_page,
client_root,
node_root.join("force_not_found".to_string()),
BaseSegment::from_static_pathname("_next/404").collect(),
RouteType::Exact,
Vc::upcast(NextExactMatcher::new(Vc::cell("_next/404".to_string()))),
render_data,
)
.issue_file_path(pages_dir, "Next.js pages directory not found".to_string()),
create_page_source_for_root_directory(
pages_structure,
project_root,
env,
server_context,
server_data_context,
client_context,
pages_dir,
server_runtime_entries,
fallback_page,
client_root,
node_root,
render_data,
),
Vc::upcast::<Box<dyn ContentSource>>(AssetGraphContentSource::new_eager(
client_root,
Vc::upcast(fallback_page),
))
.issue_file_path(pages_dir, "Next.js pages directory fallback".to_string()),
create_not_found_page_source(
project_root,
env,
server_context,
client_context,
Vc::upcast(client_chunking_context),
pages_dir,
page_extensions,
fallback_runtime_entries,
fallback_page,
client_root,
node_root.join("fallback_not_found".to_string()),
Vec::new(),
RouteType::NotFound,
Vc::upcast(NextFallbackMatcher::new()),
render_data,
)
.issue_file_path(
pages_dir,
"Next.js pages directory not found fallback".to_string(),
),
];
let source = Vc::upcast(CombinedContentSource { sources }.cell());
Ok(source)
}
/// Handles a single page file in the pages directory
#[turbo_tasks::function]
async fn create_page_source_for_file(
project_path: Vc<FileSystemPath>,
env: Vc<Box<dyn ProcessEnv>>,
server_context: Vc<Box<dyn AssetContext>>,
server_data_context: Vc<Box<dyn AssetContext>>,
client_context: Vc<Box<dyn AssetContext>>,
_pages_dir: Vc<FileSystemPath>,
page_asset: Vc<Box<dyn Source>>,
runtime_entries: Vc<Sources>,
fallback_page: Vc<DevHtmlAsset>,
client_root: Vc<FileSystemPath>,
client_path: Vc<FileSystemPath>,
is_api_path: bool,
node_path: Vc<FileSystemPath>,
node_root: Vc<FileSystemPath>,
render_data: Vc<JsonValue>,
) -> Result<Vc<Box<dyn ContentSource>>> {
let mode = NextMode::DevServer;
let server_chunking_context = Vc::upcast(
DevChunkingContext::builder(
project_path,
node_path,
node_path.join("chunks".to_string()),
get_client_assets_path(client_root),
server_context.compile_time_info().environment(),
)
.reference_chunk_source_maps(should_debug("page_source"))
.build(),
);
let data_node_path = node_path.join("data".to_string());
let server_data_chunking_context = Vc::upcast(
DevChunkingContext::builder(
project_path,
data_node_path,
data_node_path.join("chunks".to_string()),
get_client_assets_path(client_root),
server_context.compile_time_info().environment(),
)
.reference_chunk_source_maps(should_debug("page_source"))
.build(),
);
let client_chunking_context = get_client_chunking_context(
project_path,
client_root,
client_context.compile_time_info().environment(),
mode,
);
let pathname = pathname_for_path(client_root, client_path, PathType::PagesPage);
let route_matcher = NextParamsMatcher::new(pathname);
let (base_segments, route_type) = pathname_to_segments(&pathname.await?, "")?;
Ok(if is_api_path {
create_node_api_source(
project_path,
env,
base_segments,
route_type,
client_root,
Vc::upcast(route_matcher),
pathname,
Vc::upcast(
SsrEntry {
runtime_entries,
context: server_context,
entry_asset: page_asset,
ty: SsrType::AutoApi,
chunking_context: server_chunking_context,
node_path,
node_root,
project_path,
}
.cell(),
),
render_data,
should_debug("page_source"),
)
} else {
let data_pathname = pathname_for_path(client_root, client_path, PathType::Data);
let data_route_matcher = NextPrefixSuffixParamsMatcher::new(
data_pathname,
"_next/data/development/".to_string(),
".json".to_string(),
);
let (data_base_segments, data_route_type) = pathname_to_segments(
&format!("_next/data/development/{}", data_pathname.await?),
".json",
)?;
let ssr_entry = Vc::upcast(
SsrEntry {
runtime_entries,
context: server_context,
entry_asset: page_asset,
ty: SsrType::Html,
chunking_context: server_chunking_context,
node_path,
node_root,
project_path,
}
.cell(),
);
let ssr_data_entry = Vc::upcast(
SsrEntry {
runtime_entries,
context: server_data_context,
entry_asset: page_asset,
ty: SsrType::Data,
chunking_context: server_data_chunking_context,
node_path: data_node_path,
node_root,
project_path,
}
.cell(),
);
Vc::upcast(CombinedContentSource::new(vec![
create_node_rendered_source(
project_path,
env,
base_segments.clone(),
route_type.clone(),
client_root,
Vc::upcast(route_matcher),
pathname,
ssr_entry,
fallback_page,
render_data,
should_debug("page_source"),
),
create_node_rendered_source(
project_path,
env,
data_base_segments,
data_route_type,
client_root,
Vc::upcast(data_route_matcher),
pathname,
ssr_data_entry,
fallback_page,
render_data,
should_debug("page_source"),
),
create_page_loader(
client_root,
client_context,
Vc::upcast(client_chunking_context),
page_asset,
pathname,
FileSystemPathOption::none(),
),
]))
})
}
async fn get_not_found_page(
pages_dir: Vc<FileSystemPath>,
page_extensions: Vc<Vec<String>>,
) -> Result<Option<Vc<Box<dyn Source>>>> {
for ext in page_extensions.await?.iter() {
let not_found_path = pages_dir.join(format!("404.{ext}"));
let content = not_found_path.read();
if let FileContent::Content(_) = &*content.await? {
return Ok(Some(Vc::upcast(FileSource::new(not_found_path))));
}
}
Ok(None)
}
/// Handles a single page file in the pages directory
#[turbo_tasks::function]
async fn create_not_found_page_source(
project_path: Vc<FileSystemPath>,
env: Vc<Box<dyn ProcessEnv>>,
server_context: Vc<Box<dyn AssetContext>>,
client_context: Vc<Box<dyn AssetContext>>,
client_chunking_context: Vc<Box<dyn ChunkingContext>>,
pages_dir: Vc<FileSystemPath>,
page_extensions: Vc<Vec<String>>,
runtime_entries: Vc<Sources>,
fallback_page: Vc<DevHtmlAsset>,
client_root: Vc<FileSystemPath>,
node_path: Vc<FileSystemPath>,
base_segments: Vec<BaseSegment>,
route_type: RouteType,
route_matcher: Vc<Box<dyn RouteMatcher>>,
render_data: Vc<JsonValue>,
) -> Result<Vc<Box<dyn ContentSource>>> {
let server_chunking_context = Vc::upcast(
DevChunkingContext::builder(
project_path,
node_path,
node_path.join("chunks".to_string()),
get_client_assets_path(client_root),
server_context.compile_time_info().environment(),
)
.reference_chunk_source_maps(should_debug("page_source"))
.build(),
);
let (page_asset, pathname) =
if let Some(not_found_page_asset) = get_not_found_page(pages_dir, page_extensions).await? {
// If a 404 page is defined, the pathname should be 404.
(not_found_page_asset, Vc::cell("/404".to_string()))
} else {
(
// The error page asset must be within the context path so it can depend on the
// Next.js module.
next_asset("entry/error.tsx".to_string()),
// If no 404 page is defined, the pathname should be _error.
Vc::cell("/_error".to_string()),
)
};
let ssr_entry = Vc::upcast(
SsrEntry {
runtime_entries,
context: server_context,
entry_asset: page_asset,
ty: SsrType::Html,
chunking_context: server_chunking_context,
node_path,
node_root: node_path,
project_path,
}
.cell(),
);
let page_loader = create_page_loader(
client_root,
client_context,
client_chunking_context,
page_asset,
pathname,
FileSystemPathOption::none(),
);
Ok(Vc::upcast(CombinedContentSource::new(vec![
create_node_rendered_source(
project_path,
env,
base_segments,
route_type,
client_root,
route_matcher,
pathname,
ssr_entry,
fallback_page,
render_data,
should_debug("page_source"),
),
page_loader,
])))
}
/// Handles a directory in the pages directory (or the pages directory itself).
/// Calls itself recursively for sub directories or the
/// [create_page_source_for_file] method for files.
#[turbo_tasks::function]
async fn create_page_source_for_root_directory(
pages_structure: Vc<PagesStructure>,
project_root: Vc<FileSystemPath>,
env: Vc<Box<dyn ProcessEnv>>,
server_context: Vc<Box<dyn AssetContext>>,
server_data_context: Vc<Box<dyn AssetContext>>,
client_context: Vc<Box<dyn AssetContext>>,
pages_dir: Vc<FileSystemPath>,
runtime_entries: Vc<Sources>,
fallback_page: Vc<DevHtmlAsset>,
client_root: Vc<FileSystemPath>,
node_root: Vc<FileSystemPath>,
render_data: Vc<JsonValue>,
) -> Result<Vc<Box<dyn ContentSource>>> {
let PagesStructure {
app: _,
document: _,
error: _,
ref api,
ref pages,
} = *pages_structure.await?;
let mut sources = vec![];
if let Some(pages) = pages {
sources.push(create_page_source_for_directory(
*pages,
project_root,
env,
server_context,
server_data_context,
client_context,
pages_dir,
runtime_entries,
fallback_page,
client_root,
false,
node_root,
render_data,
));
}
if let Some(api) = api {
sources.push(create_page_source_for_directory(
*api,
project_root,
env,
server_context,
server_data_context,
client_context,
pages_dir,
runtime_entries,
fallback_page,
client_root,
true,
node_root,
render_data,
));
}
Ok(Vc::upcast(CombinedContentSource { sources }.cell()))
}
/// Handles a directory in the pages directory (or the pages directory itself).
/// Calls itself recursively for sub directories or the
/// [create_page_source_for_file] method for files.
#[turbo_tasks::function]
async fn create_page_source_for_directory(
pages_structure: Vc<PagesDirectoryStructure>,
project_root: Vc<FileSystemPath>,
env: Vc<Box<dyn ProcessEnv>>,
server_context: Vc<Box<dyn AssetContext>>,
server_data_context: Vc<Box<dyn AssetContext>>,
client_context: Vc<Box<dyn AssetContext>>,
pages_dir: Vc<FileSystemPath>,
runtime_entries: Vc<Sources>,
fallback_page: Vc<DevHtmlAsset>,
client_root: Vc<FileSystemPath>,
is_api_path: bool,
node_root: Vc<FileSystemPath>,
render_data: Vc<JsonValue>,
) -> Result<Vc<Box<dyn ContentSource>>> {
let PagesDirectoryStructure {
ref items,
ref children,
..
} = *pages_structure.await?;
let mut sources = vec![];
for item in items.iter() {
let PagesStructureItem {
project_path,
next_router_path,
original_path: _,
} = *item.await?;
let source = create_page_source_for_file(
project_root,
env,
server_context,
server_data_context,
client_context,
pages_dir,
Vc::upcast(FileSource::new(project_path)),
runtime_entries,
fallback_page,
client_root,
next_router_path,
is_api_path,
node_root,
node_root,
render_data,
)
.issue_file_path(
project_path,
if is_api_path {
"Next.js page API file"
} else {
"Next.js page file"
}
.to_string(),
);
sources.push(source);
}
for child in children.iter() {
sources.push(create_page_source_for_directory(
*child,
project_root,
env,
server_context,
server_data_context,
client_context,
pages_dir,
runtime_entries,
fallback_page,
client_root,
is_api_path,
node_root,
render_data,
))
}
Ok(Vc::upcast(CombinedContentSource { sources }.cell()))
}
fn pathname_to_segments(pathname: &str, extension: &str) -> Result<(Vec<BaseSegment>, RouteType)> {
let mut segments = Vec::new();
let mut split = pathname.split('/');
while let Some(segment) = split.next() {
if segment.is_empty() {
// ignore
} else if segment.starts_with("[[...") && segment.ends_with("]]")
|| segment.starts_with("[...") && segment.ends_with(']')
{
// (optional) catch all segment
if split.remainder().is_some() {
bail!(
"Invalid route {}, catch all segment must be the last segment",
pathname
)
}
return Ok((segments, RouteType::CatchAll));
} else if segment.starts_with('[') || segment.ends_with(']') {
// dynamic segment
segments.push(BaseSegment::Dynamic);
} else {
// normal segment
segments.push(BaseSegment::Static(segment.to_string()));
}
}
if let Some(BaseSegment::Static(s)) = segments.last_mut() {
s.push_str(extension);
}
Ok((segments, RouteType::Exact))
}
/// The node.js renderer for SSR of pages.
#[turbo_tasks::value]
pub struct SsrEntry {
runtime_entries: Vc<Sources>,
context: Vc<Box<dyn AssetContext>>,
entry_asset: Vc<Box<dyn Source>>,
ty: SsrType,
chunking_context: Vc<Box<dyn ChunkingContext>>,
node_path: Vc<FileSystemPath>,
node_root: Vc<FileSystemPath>,
project_path: Vc<FileSystemPath>,
}
#[derive(
Clone, Copy, Debug, Eq, PartialEq, Hash, Serialize, Deserialize, PartialOrd, Ord, TraceRawVcs,
)]
pub enum SsrType {
Api,
EdgeApi,
AutoApi,
Html,
Data,
}
#[turbo_tasks::value_impl]
impl SsrEntry {
#[turbo_tasks::function]
pub async fn entry(self: Vc<Self>) -> Result<Vc<NodeRenderingEntry>> {
let this = self.await?;
let entry_asset_page = this.context.process(
this.entry_asset,
Value::new(ReferenceType::Entry(EntryReferenceSubType::Page)),
);
let ty = if this.ty == SsrType::AutoApi {
let page_config = parse_config_from_source(entry_asset_page);
if page_config.await?.runtime == NextRuntime::Edge {
SsrType::EdgeApi
} else {
SsrType::Api
}
} else {
this.ty
};
let (internal_asset, inner_assets): (_, IndexMap<_, Vc<Box<dyn Module>>>) = match ty {
SsrType::AutoApi => unreachable!(),
SsrType::Api => (
next_asset("entry/server-api.tsx".to_string()),
indexmap! {
"INNER".to_string() => entry_asset_page,
},
),
SsrType::EdgeApi => {
let entry_asset_edge_chunk_group = this
.context
.with_transition("next-edge".to_string())
.process(
this.entry_asset,
Value::new(ReferenceType::Entry(EntryReferenceSubType::PagesApi)),
);
(
next_asset("entry/server-edge-api.tsx".to_string()),
indexmap! {
"INNER_EDGE_CHUNK_GROUP".to_string() => entry_asset_edge_chunk_group,
},
)
}
SsrType::Data => (
next_asset("entry/server-data.tsx".to_string()),
indexmap! {
"INNER".to_string() => entry_asset_page,
},
),
SsrType::Html => {
let entry_asset_client_chunk_group = this
.context
.with_transition("next-client".to_string())
.process(
this.entry_asset,
Value::new(ReferenceType::Entry(EntryReferenceSubType::Page)),
);
(
next_asset("entry/server-renderer.tsx".to_string()),
indexmap! {
"INNER".to_string() => entry_asset_page,
"INNER_CLIENT_CHUNK_GROUP".to_string() => entry_asset_client_chunk_group,
},
)
}
};
let module = this.context.process(
internal_asset,
Value::new(ReferenceType::Internal(Vc::cell(inner_assets))),
);
let Some(module) = Vc::try_resolve_sidecast::<Box<dyn EvaluatableAsset>>(module).await?
else {
bail!("internal module must be evaluatable");
};
Ok(NodeRenderingEntry {
runtime_entries: Vc::cell(
this.runtime_entries
.await?
.iter()
.map(|entry| entry.to_evaluatable(this.context))
.collect(),
),
module,
chunking_context: this.chunking_context,
intermediate_output_path: this.node_path,
output_root: this.node_root,
project_dir: this.project_path,
}
.cell())
}
}
#[turbo_tasks::value_impl]
impl NodeEntry for SsrEntry {
#[turbo_tasks::function]
fn entry(self: Vc<Self>, _data: Value<ContentSourceData>) -> Vc<NodeRenderingEntry> {
// Call without being keyed by data
self.entry()
}
}

View file

@ -1,468 +0,0 @@
use anyhow::{anyhow, bail, Context, Result};
use futures::StreamExt;
use indexmap::indexmap;
use serde::Deserialize;
use serde_json::json;
use turbo_tasks::{util::SharedError, Completion, Completions, Value, Vc};
use turbo_tasks_fs::json::parse_json_with_source_context;
use turbopack_binding::{
turbo::{
tasks_bytes::{Bytes, Stream},
tasks_fs::{to_sys_path, File, FileSystemPath},
},
turbopack::{
core::{
asset::AssetContent,
changed::any_content_changed_of_module,
chunk::ChunkingContext,
context::AssetContext,
environment::{ServerAddr, ServerInfo},
file_source::FileSource,
ident::AssetIdent,
issue::IssueDescriptionExt,
module::Module,
reference_type::{EcmaScriptModulesReferenceSubType, InnerAssets, ReferenceType},
resolve::{find_context_file, FindContextFileResult},
virtual_source::VirtualSource,
},
dev::DevChunkingContext,
node::{
debug::should_debug,
evaluate::{evaluate, get_evaluate_pool},
execution_context::ExecutionContext,
source_map::{trace_stack_with_source_mapping_assets, StructuredError},
},
turbopack::{evaluate_context::node_evaluate_asset_context, transition::TransitionsByName},
},
};
use crate::{
embed_js::next_asset,
middleware::middleware_files,
mode::NextMode,
next_config::NextConfig,
next_edge::{
context::{get_edge_compile_time_info, get_edge_resolve_options_context},
route_transition::NextEdgeRouteTransition,
},
next_import_map::get_next_build_import_map,
next_server::context::{get_server_module_options_context, ServerContextType},
util::parse_config_from_source,
};
#[turbo_tasks::function]
fn next_configs() -> Vc<Vec<String>> {
Vc::cell(
["next.config.mjs", "next.config.js"]
.into_iter()
.map(ToOwned::to_owned)
.collect(),
)
}
#[turbo_tasks::value(shared)]
#[derive(Debug, Clone, Default)]
#[serde(rename_all = "camelCase")]
pub struct RouterRequest {
pub method: String,
pub pathname: String,
pub raw_query: String,
pub raw_headers: Vec<(String, String)>,
pub body: Vec<Bytes>,
}
#[turbo_tasks::value(shared)]
#[derive(Debug, Clone, Default)]
#[serde(rename_all = "camelCase")]
pub struct RewriteResponse {
pub url: String,
pub headers: Vec<(String, String)>,
}
#[turbo_tasks::value(shared)]
#[derive(Debug, Clone, Default)]
#[serde(rename_all = "camelCase")]
pub struct MiddlewareHeadersResponse {
pub status_code: u16,
pub headers: Vec<(String, String)>,
}
#[turbo_tasks::value(shared)]
#[derive(Debug, Clone, Default)]
pub struct MiddlewareBodyResponse(Bytes);
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "kebab-case")]
enum RouterIncomingMessage {
Rewrite { data: RewriteResponse },
MiddlewareHeaders { data: MiddlewareHeadersResponse },
MiddlewareBody { data: Vec<u8> },
None,
Error { error: StructuredError },
}
#[turbo_tasks::value]
#[derive(Debug, Clone, Default)]
pub struct MiddlewareResponse {
pub status_code: u16,
pub headers: Vec<(String, String)>,
#[turbo_tasks(trace_ignore)]
pub body: Stream<Result<Bytes, SharedError>>,
}
#[turbo_tasks::value]
#[derive(Debug)]
pub enum RouterResult {
Rewrite(RewriteResponse),
Middleware(MiddlewareResponse),
None,
Error(#[turbo_tasks(trace_ignore)] SharedError),
}
#[turbo_tasks::function]
async fn next_config_changed(
context: Vc<Box<dyn AssetContext>>,
project_path: Vc<FileSystemPath>,
) -> Result<Vc<Completion>> {
let find_config_result = find_context_file(project_path, next_configs());
Ok(match *find_config_result.await? {
FindContextFileResult::Found(config_path, _) => {
let module = context.process(
Vc::upcast(FileSource::new(config_path)),
Value::new(ReferenceType::Internal(InnerAssets::empty())),
);
any_content_changed_of_module(module)
}
FindContextFileResult::NotFound(_) => Completion::immutable(),
})
}
#[turbo_tasks::function]
async fn config_assets(
context: Vc<Box<dyn AssetContext>>,
project_path: Vc<FileSystemPath>,
page_extensions: Vc<Vec<String>>,
) -> Result<Vc<InnerAssets>> {
let find_config_result = find_context_file(project_path, middleware_files(page_extensions));
// The router.ts file expects a manifest of chunks for the middleware. If there
// is no middleware file, then we need to generate a default empty manifest
// and we cannot process it with the next-edge transition because it
// requires a real file for some reason.
let (manifest, config) = match *find_config_result.await? {
FindContextFileResult::Found(config_path, _) => {
let config = context.process(
Vc::upcast(FileSource::new(config_path)),
Value::new(ReferenceType::EcmaScriptModules(
EcmaScriptModulesReferenceSubType::Undefined,
)),
);
let config = parse_config_from_source(config);
let manifest = context.with_transition("next-edge".to_string()).process(
Vc::upcast(FileSource::new(config_path)),
Value::new(ReferenceType::EcmaScriptModules(
EcmaScriptModulesReferenceSubType::Undefined,
)),
);
(manifest, config)
}
FindContextFileResult::NotFound(_) => {
let manifest = context.process(
Vc::upcast(VirtualSource::new(
project_path.join("middleware.js".to_string()),
AssetContent::file(File::from("export default [];").into()),
)),
Value::new(ReferenceType::Internal(InnerAssets::empty())),
);
let config = Default::default();
(manifest, config)
}
};
let config_asset = context.process(
Vc::upcast(VirtualSource::new(
project_path.join("middleware_config.js".to_string()),
AssetContent::file(
File::from(format!(
"export default {};",
json!({ "matcher": &config.await?.matcher })
))
.into(),
),
)),
Value::new(ReferenceType::Internal(InnerAssets::empty())),
);
Ok(Vc::cell(indexmap! {
"MIDDLEWARE_CHUNK_GROUP".to_string() => manifest,
"MIDDLEWARE_CONFIG".to_string() => config_asset,
}))
}
#[turbo_tasks::function]
fn route_executor(
context: Vc<Box<dyn AssetContext>>,
configs: Vc<InnerAssets>,
) -> Vc<Box<dyn Module>> {
context.process(
next_asset("entry/router.ts".to_string()),
Value::new(ReferenceType::Internal(configs)),
)
}
#[turbo_tasks::function]
fn edge_transition_map(
server_addr: Vc<ServerAddr>,
dist_root: Vc<String>,
project_path: Vc<FileSystemPath>,
output_path: Vc<FileSystemPath>,
next_config: Vc<NextConfig>,
execution_context: Vc<ExecutionContext>,
) -> Vc<TransitionsByName> {
let mode = NextMode::DevServer;
let edge_compile_time_info = get_edge_compile_time_info(project_path, server_addr, dist_root);
let edge_chunking_context = Vc::upcast(
DevChunkingContext::builder(
project_path,
output_path.join("edge".to_string()),
output_path.join("edge/chunks".to_string()),
output_path.join("edge/assets".to_string()),
edge_compile_time_info.environment(),
)
.reference_chunk_source_maps(should_debug("router"))
.build(),
);
let edge_resolve_options_context = get_edge_resolve_options_context(
project_path,
Value::new(ServerContextType::Middleware),
mode,
next_config,
execution_context,
);
let server_module_options_context = get_server_module_options_context(
project_path,
execution_context,
Value::new(ServerContextType::Middleware),
mode,
next_config,
);
let next_edge_transition = Vc::upcast(
NextEdgeRouteTransition {
edge_compile_time_info,
edge_chunking_context,
edge_module_options_context: Some(server_module_options_context),
edge_resolve_options_context,
output_path: output_path.root(),
base_path: project_path,
bootstrap_asset: next_asset("entry/edge-bootstrap.ts".to_string()),
entry_name: "middleware".to_string(),
}
.cell(),
);
Vc::cell(
[("next-edge".to_string(), next_edge_transition)]
.into_iter()
.collect(),
)
}
#[turbo_tasks::function]
pub async fn route(
execution_context: Vc<ExecutionContext>,
request: Vc<RouterRequest>,
dist_root: Vc<String>,
next_config: Vc<NextConfig>,
server_addr: Vc<ServerAddr>,
routes_changed: Vc<Completion>,
) -> Result<Vc<RouterResult>> {
let RouterRequest {
ref method,
ref pathname,
..
} = *request.await?;
route_internal(
execution_context,
request,
dist_root,
next_config,
server_addr,
routes_changed,
)
.attach_description(format!("Next.js Routing for {} {}", method, pathname))
.await
}
macro_rules! shared_anyhow {
($msg:literal $(,)?) => {
turbo_tasks::util::SharedError::new(anyhow::anyhow!($msg))
};
($err:expr $(,)?) => {
turbo_tasks::util::SharedError::new(anyhow::anyhow!($err))
};
($fmt:expr, $($arg:tt)*) => {
turbo_tasks::util::SharedError::new(anyhow::anyhow!($fmt, $($arg)*))
};
}
#[turbo_tasks::function]
async fn route_internal(
execution_context: Vc<ExecutionContext>,
request: Vc<RouterRequest>,
dist_root: Vc<String>,
next_config: Vc<NextConfig>,
server_addr: Vc<ServerAddr>,
routes_changed: Vc<Completion>,
) -> Result<Vc<RouterResult>> {
let ExecutionContext {
project_path,
chunking_context,
env,
} = *execution_context.await?;
let context = node_evaluate_asset_context(
execution_context,
Some(get_next_build_import_map()),
Some(edge_transition_map(
server_addr,
dist_root,
project_path,
chunking_context.output_root(),
next_config,
execution_context,
)),
);
let configs = config_assets(context, project_path, next_config.page_extensions());
let router_asset = route_executor(context, configs);
// This invalidates the router when the next config changes
let next_config_changed = next_config_changed(context, project_path);
let request = serde_json::value::to_value(&*request.await?)?;
let Some(dir) = to_sys_path(project_path).await? else {
bail!("Next.js requires a disk path to check for valid routes");
};
let chunking_context = chunking_context.with_layer("router".to_string());
let server_addr = server_addr.await?;
let invalidation = Completions::all(vec![next_config_changed, routes_changed]);
let debug = should_debug("router");
let result = evaluate(
router_asset,
project_path,
env,
AssetIdent::from_path(project_path),
context,
chunking_context.with_layer("router".to_string()),
None,
vec![
Vc::cell(request),
Vc::cell(dir.to_string_lossy().into()),
Vc::cell(serde_json::to_value(ServerInfo::try_from(&*server_addr)?)?),
],
invalidation,
debug,
)
.await?;
let mut read = result.read();
let first = match read.next().await {
Some(Ok(first)) => first,
Some(Err(e)) => {
return Ok(RouterResult::Error(SharedError::new(
anyhow!(e)
.context("router evaluation failed: received error from javascript stream"),
))
.cell())
}
None => {
return Ok(RouterResult::Error(shared_anyhow!(
"router evaluation failed: no message received from javascript stream"
))
.cell())
}
};
let first = first.to_str()?;
let first: RouterIncomingMessage = parse_json_with_source_context(first)
.with_context(|| format!("parsing incoming message ({})", first))?;
let (res, read) = match first {
RouterIncomingMessage::Rewrite { data } => (RouterResult::Rewrite(data), Some(read)),
RouterIncomingMessage::MiddlewareHeaders { data } => {
// The double encoding here is annoying. It'd be a lot nicer if we could embed
// a buffer directly into the IPC message without having to wrap it in an
// object.
let body = read.map(|data| {
let chunk: RouterIncomingMessage = data?
.to_str()
.context("error decoding string")
.and_then(parse_json_with_source_context)?;
match chunk {
RouterIncomingMessage::MiddlewareBody { data } => Ok(Bytes::from(data)),
m => Err(shared_anyhow!("unexpected message type: {:#?}", m)),
}
});
let middleware = MiddlewareResponse {
status_code: data.status_code,
headers: data.headers,
body: Stream::from(body),
};
(RouterResult::Middleware(middleware), None)
}
RouterIncomingMessage::None => (RouterResult::None, Some(read)),
RouterIncomingMessage::Error { error } => {
// Must be the same pool as above
let pool = get_evaluate_pool(
router_asset,
project_path,
env,
context,
chunking_context,
None,
invalidation,
debug,
)
.await?;
(
RouterResult::Error(shared_anyhow!(
trace_stack_with_source_mapping_assets(
error,
pool.assets_for_source_mapping,
chunking_context.output_root(),
project_path
)
.await?
)),
Some(read),
)
}
RouterIncomingMessage::MiddlewareBody { .. } => (
RouterResult::Error(shared_anyhow!(
"unexpected incoming middleware body without middleware headers"
)),
Some(read),
),
};
// Middleware will naturally drain the full stream, but the rest only take a
// single item. In order to free the NodeJsOperation, we must pull another
// value out of the stream.
if let Some(mut read) = read {
if let Some(v) = read.next().await {
bail!("unexpected message type: {:#?}", v);
}
}
Ok(res.cell())
}

View file

@ -1,222 +0,0 @@
use anyhow::{anyhow, bail, Context, Result};
use futures::{Stream, TryStreamExt};
use indexmap::IndexSet;
use turbo_tasks::{Completion, Completions, Value, Vc};
use turbopack_binding::turbopack::{
core::{
environment::ServerAddr,
introspect::{Introspectable, IntrospectableChildren},
},
dev_server::source::{
route_tree::{RouteTree, RouteType},
Body, ContentSource, ContentSourceContent, ContentSourceData, ContentSourceDataVary,
GetContentSourceContent, HeaderList, ProxyResult, RewriteBuilder,
},
node::execution_context::ExecutionContext,
};
use crate::{
app_structure::OptionAppDir,
next_config::NextConfig,
pages_structure::PagesStructure,
router::{route, RouterRequest, RouterResult},
};
#[turbo_tasks::value(shared)]
pub struct NextRouterContentSource {
/// A wrapped content source from which we will fetch assets.
inner: Vc<Box<dyn ContentSource>>,
execution_context: Vc<ExecutionContext>,
next_config: Vc<NextConfig>,
server_addr: Vc<ServerAddr>,
app_dir: Vc<OptionAppDir>,
pages_structure: Vc<PagesStructure>,
dist_root: Vc<String>,
}
#[turbo_tasks::value_impl]
impl NextRouterContentSource {
#[turbo_tasks::function]
pub fn new(
inner: Vc<Box<dyn ContentSource>>,
execution_context: Vc<ExecutionContext>,
next_config: Vc<NextConfig>,
server_addr: Vc<ServerAddr>,
app_dir: Vc<OptionAppDir>,
pages_structure: Vc<PagesStructure>,
dist_root: Vc<String>,
) -> Vc<NextRouterContentSource> {
NextRouterContentSource {
inner,
execution_context,
next_config,
server_addr,
app_dir,
pages_structure,
dist_root,
}
.cell()
}
}
#[turbo_tasks::function]
fn routes_changed(
app_dir: Vc<OptionAppDir>,
pages_structure: Vc<PagesStructure>,
next_config: Vc<NextConfig>,
) -> Vc<Completion> {
Completions::all(vec![
app_dir.routes_changed(next_config),
pages_structure.routes_changed(),
])
}
#[turbo_tasks::value_impl]
impl ContentSource for NextRouterContentSource {
#[turbo_tasks::function]
async fn get_routes(self: Vc<Self>) -> Result<Vc<RouteTree>> {
let this = self.await?;
// The next-dev server can currently run against projects as simple as
// `index.js`. If this isn't a Next.js project, don't try to use the Next.js
// router.
if this.app_dir.await?.is_none() && this.pages_structure.await?.pages.is_none() {
return Ok(this.inner.get_routes());
}
// Prefetch get_routes from inner
let _ = this.inner.get_routes();
Ok(RouteTree::new_route(
Vec::new(),
RouteType::CatchAll,
Vc::upcast(self),
))
}
}
#[turbo_tasks::value_impl]
impl GetContentSourceContent for NextRouterContentSource {
#[turbo_tasks::function]
fn vary(&self) -> Vc<ContentSourceDataVary> {
ContentSourceDataVary {
method: true,
raw_headers: true,
raw_query: true,
body: true,
..Default::default()
}
.cell()
}
#[turbo_tasks::function]
async fn get(
self: Vc<Self>,
path: String,
data: Value<ContentSourceData>,
) -> Result<Vc<ContentSourceContent>> {
let this = self.await?;
let ContentSourceData {
method: Some(method),
raw_headers: Some(raw_headers),
raw_query: Some(raw_query),
body: Some(body),
..
} = &*data
else {
bail!("missing data for router");
};
// TODO: change router so we can stream the request body to it
let mut body_stream = body.await?.read();
let mut body = Vec::with_capacity(body_stream.size_hint().0);
while let Some(data) = body_stream.try_next().await? {
body.push(data);
}
let request = RouterRequest {
pathname: format!("/{path}"),
method: method.clone(),
raw_headers: raw_headers.clone(),
raw_query: raw_query.clone(),
body,
}
.cell();
let res = route(
this.execution_context,
request,
this.dist_root,
this.next_config,
this.server_addr,
routes_changed(this.app_dir, this.pages_structure, this.next_config),
);
let res = res
.await
.with_context(|| format!("failed to fetch /{path}{}", formated_query(raw_query)))?;
Ok(match &*res {
RouterResult::Error(e) => {
return Err(anyhow!(e.clone()).context(format!(
"error during Next.js routing for /{path}{}",
formated_query(raw_query)
)))
}
RouterResult::None => {
let rewrite =
RewriteBuilder::new_source_with_path_and_query(this.inner, format!("/{path}"));
ContentSourceContent::Rewrite(rewrite.build()).cell()
}
RouterResult::Rewrite(data) => {
let mut rewrite =
RewriteBuilder::new_source_with_path_and_query(this.inner, data.url.clone());
if !data.headers.is_empty() {
rewrite = rewrite.response_headers(HeaderList::new(data.headers.clone()));
}
ContentSourceContent::Rewrite(rewrite.build()).cell()
}
RouterResult::Middleware(data) => ContentSourceContent::HttpProxy(
ProxyResult {
status: data.status_code,
headers: data.headers.clone(),
body: Body::from_stream(data.body.read()),
}
.cell(),
)
.cell(),
})
}
}
fn formated_query(query: &str) -> String {
if query.is_empty() {
"".to_string()
} else {
format!("?{query}")
}
}
#[turbo_tasks::value_impl]
impl Introspectable for NextRouterContentSource {
#[turbo_tasks::function]
fn ty(&self) -> Vc<String> {
Vc::cell("next router source".to_string())
}
#[turbo_tasks::function]
fn details(&self) -> Vc<String> {
Vc::cell("handles routing by letting Next.js handle the routing.".to_string())
}
#[turbo_tasks::function]
async fn children(&self) -> Result<Vc<IntrospectableChildren>> {
let mut children = IndexSet::new();
if let Some(inner) = Vc::try_resolve_sidecast::<Box<dyn Introspectable>>(self.inner).await?
{
children.insert((Vc::cell("inner".to_string()), inner));
}
Ok(Vc::cell(children))
}
}

View file

@ -1,252 +0,0 @@
use std::collections::HashMap;
use anyhow::{anyhow, Result};
use turbo_tasks::Vc;
use turbopack_binding::{
turbo::{
tasks::{TryJoinIterExt, Value},
tasks_fs::FileSystemPath,
},
turbopack::{
core::{
chunk::{ChunkableModule, ChunkingContext},
compile_time_defines,
compile_time_info::{CompileTimeDefines, CompileTimeInfo, FreeVarReferences},
context::AssetContext,
environment::{BrowserEnvironment, Environment, ExecutionEnvironment},
file_source::FileSource,
free_var_references,
reference_type::{EntryReferenceSubType, ReferenceType},
resolve::{
origin::{PlainResolveOrigin, ResolveOrigin, ResolveOriginExt},
parse::Request,
},
},
dev::{react_refresh::assert_can_resolve_react_refresh, DevChunkingContext},
dev_server::{
html::DevHtmlAsset,
source::{asset_graph::AssetGraphContentSource, ContentSource},
},
node::execution_context::ExecutionContext,
turbopack::{ecmascript::EcmascriptModuleAsset, ModuleAssetContext},
},
};
use crate::{
embed_js::next_js_file_path,
mode::NextMode,
next_client::{
context::{get_client_resolve_options_context, ClientContextType},
get_client_module_options_context, RuntimeEntries, RuntimeEntry,
},
next_config::NextConfig,
};
fn defines() -> CompileTimeDefines {
compile_time_defines!(
process.turbopack = true,
process.env.NODE_ENV = "development",
)
}
#[turbo_tasks::function]
fn web_defines() -> Vc<CompileTimeDefines> {
defines().cell()
}
#[turbo_tasks::function]
async fn web_free_vars() -> Result<Vc<FreeVarReferences>> {
Ok(free_var_references!(..defines().into_iter()).cell())
}
#[turbo_tasks::function]
pub fn get_compile_time_info(browserslist_query: String) -> Vc<CompileTimeInfo> {
CompileTimeInfo::builder(Environment::new(Value::new(ExecutionEnvironment::Browser(
BrowserEnvironment {
dom: true,
web_worker: false,
service_worker: false,
browserslist_query: browserslist_query.to_owned(),
}
.into(),
))))
.defines(web_defines())
.free_var_references(web_free_vars())
.cell()
}
#[turbo_tasks::function]
async fn get_web_runtime_entries(
project_root: Vc<FileSystemPath>,
ty: Value<ClientContextType>,
mode: NextMode,
next_config: Vc<NextConfig>,
execution_context: Vc<ExecutionContext>,
) -> Result<Vc<RuntimeEntries>> {
let mut runtime_entries = vec![];
let resolve_options_context =
get_client_resolve_options_context(project_root, ty, mode, next_config, execution_context);
let enable_react_refresh =
assert_can_resolve_react_refresh(project_root, resolve_options_context)
.await?
.as_request();
// It's important that React Refresh come before the regular bootstrap file,
// because the bootstrap contains JSX which requires Refresh's global
// functions to be available.
if let Some(request) = enable_react_refresh {
runtime_entries
.push(RuntimeEntry::Request(request, project_root.join("_".to_string())).cell())
};
runtime_entries.push(
RuntimeEntry::Source(Vc::upcast(FileSource::new(next_js_file_path(
"dev/bootstrap.ts".to_string(),
))))
.cell(),
);
Ok(Vc::cell(runtime_entries))
}
// This is different from `get_client_chunking_context` as we need the assets
// to be available under a different root, otherwise we can run into conflicts.
// We don't want to have `get_client_chunking_context` depend on the
// `ClientContextType` as it's only relevant in this case, and would otherwise
// create new dev chunking contexts for no reason.
#[turbo_tasks::function]
fn get_web_client_chunking_context(
project_path: Vc<FileSystemPath>,
client_root: Vc<FileSystemPath>,
environment: Vc<Environment>,
) -> Vc<Box<dyn ChunkingContext>> {
Vc::upcast(
DevChunkingContext::builder(
project_path,
client_root,
client_root.join("_chunks".to_string()),
client_root.join("_media".to_string()),
environment,
)
.hot_module_replacement()
.build(),
)
}
#[turbo_tasks::function]
fn get_web_client_asset_context(
project_path: Vc<FileSystemPath>,
execution_context: Vc<ExecutionContext>,
compile_time_info: Vc<CompileTimeInfo>,
ty: Value<ClientContextType>,
mode: NextMode,
next_config: Vc<NextConfig>,
) -> Vc<Box<dyn AssetContext>> {
let resolve_options_context =
get_client_resolve_options_context(project_path, ty, mode, next_config, execution_context);
let module_options_context = get_client_module_options_context(
project_path,
execution_context,
compile_time_info.environment(),
ty,
mode,
next_config,
);
let context: Vc<Box<dyn AssetContext>> = Vc::upcast(ModuleAssetContext::new(
Vc::cell(HashMap::new()),
compile_time_info,
module_options_context,
resolve_options_context,
));
context
}
#[turbo_tasks::function]
pub async fn create_web_entry_source(
project_root: Vc<FileSystemPath>,
execution_context: Vc<ExecutionContext>,
entry_requests: Vec<Vc<Request>>,
client_root: Vc<FileSystemPath>,
eager_compile: bool,
browserslist_query: String,
next_config: Vc<NextConfig>,
) -> Result<Vc<Box<dyn ContentSource>>> {
let ty = Value::new(ClientContextType::Other);
let mode = NextMode::DevServer;
let compile_time_info = get_compile_time_info(browserslist_query);
let context = get_web_client_asset_context(
project_root,
execution_context,
compile_time_info,
ty,
mode,
next_config,
);
let chunking_context =
get_web_client_chunking_context(project_root, client_root, compile_time_info.environment());
let entries = get_web_runtime_entries(project_root, ty, mode, next_config, execution_context);
let runtime_entries = entries.resolve_entries(context);
let origin = Vc::upcast::<Box<dyn ResolveOrigin>>(PlainResolveOrigin::new(
context,
project_root.join("_".to_string()),
));
let entries = entry_requests
.into_iter()
.map(|request| async move {
let ty = Value::new(ReferenceType::Entry(EntryReferenceSubType::Web));
Ok(origin
.resolve_asset(request, origin.resolve_options(ty.clone()), ty)
.primary_modules()
.await?
.first()
.copied())
})
.try_join()
.await?;
let entries: Vec<_> = entries
.into_iter()
.flatten()
.map(|module| async move {
if let Some(ecmascript) =
Vc::try_resolve_downcast_type::<EcmascriptModuleAsset>(module).await?
{
Ok((
Vc::upcast(ecmascript),
chunking_context,
Some(runtime_entries.with_entry(Vc::upcast(ecmascript))),
))
} else if let Some(chunkable) =
Vc::try_resolve_sidecast::<Box<dyn ChunkableModule>>(module).await?
{
// TODO this is missing runtime code, so it's probably broken and we should also
// add an ecmascript chunk with the runtime code
Ok((chunkable, chunking_context, None))
} else {
// TODO convert into a serve-able asset
Err(anyhow!(
"Entry module is not chunkable, so it can't be used to bootstrap the \
application"
))
}
})
.try_join()
.await?;
let entry_asset = Vc::upcast(DevHtmlAsset::new(
client_root.join("index.html".to_string()),
entries,
));
let graph = Vc::upcast(if eager_compile {
AssetGraphContentSource::new_eager(client_root, entry_asset)
} else {
AssetGraphContentSource::new_lazy(client_root, entry_asset)
});
Ok(graph)
}

View file

@ -1 +0,0 @@
tests/temp

View file

@ -1,67 +0,0 @@
[package]
name = "next-dev-tests"
version = "0.1.0"
description = "TBD"
license = "MPL-2.0"
edition = "2021"
autobenches = false
# don't publish this crate
publish = false
[features]
tokio_console = [
"dep:console-subscriber",
"tokio/tracing",
"turbopack-binding/__turbo_tasks_tokio_tracing",
]
[dependencies]
console-subscriber = { workspace = true, optional = true }
[dev-dependencies]
anyhow = { workspace = true }
chromiumoxide = { workspace = true, features = [
"tokio-runtime",
], default-features = false }
dunce = { workspace = true }
futures = { workspace = true }
httpmock = { workspace = true, default-features = false, features = ["standalone"] }
lazy_static = { workspace = true }
mime = { workspace = true }
next-core = { workspace = true }
next-dev = { path = "../next-dev" }
owo-colors = { workspace = true }
parking_lot = { workspace = true }
rand = { workspace = true }
regex = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
tempdir = "0.3.7"
testing = { workspace = true }
tokio = { workspace = true, features = ["full"] }
# For matching on errors from chromiumoxide. Keep in
# sync with chromiumoxide's tungstenite requirement.
tungstenite = { workspace = true }
turbopack-binding = { workspace = true, features = [
"__turbo_tasks_malloc",
"__turbo_tasks_memory",
"__turbo_tasks",
"__turbo_tasks_fs",
"__turbo_tasks_testing",
"__turbopack_cli_utils",
"__turbopack_test_utils",
"__turbopack_core",
"__turbopack_core_issue_path",
"__turbopack_node",
"__turbopack_dev_server",
"__swc_transform_relay"
]}
turbo-tasks = { workspace = true }
url = { workspace = true }
webbrowser = { workspace = true }
[build-dependencies]
turbopack-binding = { workspace = true, features = [
"__turbo_tasks_build"
]}

View file

@ -1,9 +0,0 @@
use turbopack_binding::turbo::tasks_build::{generate_register, rerun_if_glob};
fn main() {
generate_register();
// The test/integration crate need to be rebuilt if any test input is changed.
// Unfortunately, we can't have the build.rs file operate differently on
// each file, so the entire next-dev crate needs to be rebuilt.
rerun_if_glob("tests/integration/*/*", "tests/integration");
}

View file

@ -1,275 +0,0 @@
import * as jest from 'jest-circus-browser/dist/umd/jest-circus'
import expectMod from 'expect/build-es5/index'
declare global {
var __jest__: typeof jest
var expect: typeof expectMod
// We need to extract only the call signature as `autoReady(jest.describe)` drops all the other properties
var describe: AutoReady<typeof jest.describe>
var it: AutoReady<typeof jest.it>
var TURBOPACK_READY: (arg: string) => void
var TURBOPACK_CHANGE_FILE: (arg: string) => void
var nsObj: (obj: any) => any
var __turbopackFileChanged: (id: string, error: Error) => void
interface Window {
NEXT_HYDRATED?: boolean
onNextHydrated?: () => void
}
}
globalThis.__jest__ = jest
globalThis.expect = expectMod
globalThis.describe = autoReady(jest.describe, markReady)
globalThis.it = autoReady(jest.it, markReady)
// From https://github.com/webpack/webpack/blob/9fcaa243573005d6fdece9a3f8d89a0e8b399613/test/TestCases.template.js#L422
globalThis.nsObj = function nsObj(obj) {
Object.defineProperty(obj, Symbol.toStringTag, {
value: 'Module',
})
return obj
}
type AnyFunction = (...args: any[]) => any
type AutoReady<T extends AnyFunction> = T & {
[K in keyof T]: T[K] extends AnyFunction ? AutoReady<T[K]> : T[K]
}
function autoReady<T extends AnyFunction, F extends () => void>(
fn: T,
callback: F
): AutoReady<T> {
const wrappedFn = ((...args: Parameters<T>): ReturnType<T> => {
callback()
return fn(...args)
}) as AutoReady<T>
for (const key in fn) {
if (typeof fn[key] === 'function') {
;(wrappedFn as any)[key] = autoReady(fn[key] as AnyFunction, callback)
} else {
;(wrappedFn as any)[key] = fn[key]
}
}
return wrappedFn
}
let isReady = false
function markReady() {
if (!isReady) {
isReady = true
requestIdleCallback(
() => {
if (typeof TURBOPACK_READY === 'function') {
TURBOPACK_READY('')
} else {
console.info(
'%cTurbopack tests:',
'font-weight: bold;',
'Entering debug mode. Run `await __jest__.run()` in the browser console to run tests.'
)
}
},
{ timeout: 20000 }
)
}
}
export function wait(ms: number): Promise<void> {
return new Promise((resolve) => {
setTimeout(resolve, ms)
})
}
export async function waitForCondition(
predicate: () => boolean,
timeout: number | null = null
): Promise<void> {
const start = Date.now()
while (true) {
if (predicate()) {
break
}
await wait(1)
if (timeout != null && Date.now() - start > timeout) {
throw new Error('Timed out waiting for condition')
}
}
}
async function waitForPath(contentWindow: Window, path: string): Promise<void> {
return waitForCondition(() => contentWindow.location.pathname === path)
}
/**
* Loads a new page in an iframe and waits for it to load.
*/
export function load(iframe: HTMLIFrameElement, path: string): Promise<void> {
iframe.src = path
return new Promise((resolve) => {
let eventListener = () => {
iframe.removeEventListener('load', eventListener)
resolve()
}
iframe.addEventListener('load', eventListener)
})
}
/**
* Waits for the currently loading page in an iframe to finish loading.
*
* If the iframe is already loaded, this function will return immediately.
*
* Note: if you've just changed the iframe's `src` attribute, you should use `load` instead.
*/
export function waitForLoaded(iframe: HTMLIFrameElement): Promise<void> {
return new Promise((resolve) => {
if (
iframe.contentDocument != null &&
iframe.contentDocument.readyState === 'complete' &&
iframe.contentDocument.documentURI !== 'about:blank'
) {
resolve()
} else {
let eventListener = () => {
iframe.removeEventListener('load', eventListener)
resolve()
}
iframe.addEventListener('load', eventListener)
}
})
}
export function waitForSelector(
node: HTMLIFrameElement | ShadowRoot,
selector: string
): Promise<Element> {
return new Promise((resolve, reject) => {
const document =
'contentDocument' in node ? node.contentDocument!.documentElement : node
const timeout = 30000
let element = document.querySelector(selector)
if (element) {
return resolve(element)
}
const observer = new MutationObserver(async () => {
let el = document.querySelector(selector)
if (el) {
resolve(el)
observer.disconnect()
}
})
observer.observe(document, { childList: true, subtree: true })
if (timeout) {
setTimeout(() => {
observer.disconnect()
reject(
new Error(
`Timed out waiting for selector "${selector}" in "${document}"\n\nNode content: "${
'innerHTML' in document ? document.innerHTML : 'no innerHTML'
}"`
)
)
}, timeout)
}
})
}
export async function waitForErrorOverlay(
node: HTMLIFrameElement
): Promise<ShadowRoot> {
let element = await waitForSelector(node, 'nextjs-portal')
return element.shadowRoot!
}
export function waitForHydration(
iframe: HTMLIFrameElement,
path: string
): Promise<void> {
return new Promise((resolve) => {
if (
iframe.contentDocument != null &&
iframe.contentDocument.readyState === 'complete'
) {
waitForHydrationAndResolve(iframe.contentWindow!, path).then(resolve)
} else {
const eventListener = () => {
waitForHydrationAndResolve(iframe.contentWindow!, path).then(resolve)
iframe.removeEventListener('load', eventListener)
}
iframe.addEventListener('load', eventListener)
}
})
}
async function waitForHydrationAndResolve(
contentWindow: Window,
path: string
): Promise<void> {
await waitForPath(contentWindow, path)
return await new Promise((resolve) => {
if (contentWindow.NEXT_HYDRATED) {
resolve()
} else {
contentWindow.onNextHydrated = () => {
resolve()
}
}
})
}
export function markAsHydrated() {
window.NEXT_HYDRATED = true
if (typeof window.onNextHydrated === 'function') {
window.onNextHydrated()
}
}
const fileChangedResolvers: Map<
string,
{ resolve: (value: unknown) => void; reject: (error: Error) => void }
> = new Map()
globalThis.__turbopackFileChanged = (id: string, error?: Error) => {
const resolver = fileChangedResolvers.get(id)
if (resolver == null) {
throw new Error(`No resolver found for id ${id}`)
} else if (error != null) {
resolver.reject(error)
} else {
resolver.resolve(null)
}
}
function unsafeUniqueId(): string {
const LENGTH = 10
const BASE = 16
return Math.floor(Math.random() * Math.pow(BASE, LENGTH))
.toString(BASE)
.slice(0, LENGTH)
}
export async function changeFile(
path: string,
find: string,
replaceWith: string
) {
return new Promise((resolve, reject) => {
let id
while ((id = unsafeUniqueId())) {
if (!fileChangedResolvers.has(id)) break
}
fileChangedResolvers.set(id, { resolve, reject })
TURBOPACK_CHANGE_FILE(JSON.stringify({ path, id, find, replaceWith }))
})
}

View file

@ -1,20 +0,0 @@
import { useEffect } from 'react'
export type Harness = typeof import('./harness')
let ranOnce = false
/**
* Run a callback once the test harness is loaded.
*/
export function useTestHarness<T extends (harness: Harness) => void>(
callback: T
) {
useEffect(() => {
if (ranOnce) {
return
}
ranOnce = true
import('./harness').then(callback)
})
}

View file

@ -1,11 +0,0 @@
{
"name": "@turbo/pack-test-harness",
"private": true,
"version": "0.0.1",
"main": "./hooks.ts",
"dependencies": {
"expect": "24.5.0",
"jest-circus": "27.5.1",
"jest-circus-browser": "^1.0.7"
}
}

View file

@ -1,7 +0,0 @@
declare module 'jest-circus-browser/dist/umd/jest-circus' {
export * from 'jest-circus'
}
declare module 'expect/build-es5/index' {
export { default } from 'expect'
}

View file

@ -1,768 +0,0 @@
#![feature(arbitrary_self_types)]
#![feature(async_fn_in_trait)]
#![cfg(test)]
use std::{
collections::{hash_map::Entry, HashMap},
env,
fmt::Write,
future::{pending, Future},
net::{IpAddr, Ipv4Addr, SocketAddr},
panic::{catch_unwind, resume_unwind, AssertUnwindSafe},
path::{Path, PathBuf},
time::Duration,
};
use anyhow::{anyhow, Context, Result};
use chromiumoxide::{
browser::{Browser, BrowserConfig},
cdp::{
browser_protocol::network::EventResponseReceived,
js_protocol::runtime::{
AddBindingParams, EventBindingCalled, EventConsoleApiCalled, EventExceptionThrown,
PropertyPreview, RemoteObject,
},
},
error::CdpError::Ws,
Page,
};
use dunce::canonicalize;
use futures::StreamExt;
use lazy_static::lazy_static;
use next_core::turbopack::{
cli_utils::issue::{format_issue, LogOptions},
core::issue::IssueSeverity,
};
use next_dev::{EntryRequest, NextDevServerBuilder};
use owo_colors::OwoColorize;
use parking_lot::Mutex;
use regex::{Captures, Regex, Replacer};
use serde::Deserialize;
use tempdir::TempDir;
use tokio::{
net::TcpSocket,
sync::mpsc::{unbounded_channel, UnboundedSender},
task::JoinSet,
};
use tungstenite::{error::ProtocolError::ResetWithoutClosingHandshake, Error::Protocol};
use turbo_tasks::{ReadRef, Vc};
use turbopack_binding::{
turbo::{
tasks::{RawVc, State, TransientInstance, TransientValue, TurboTasks},
tasks_fs::{DiskFileSystem, FileSystem, FileSystemPath},
tasks_memory::MemoryBackend,
tasks_testing::retry::{retry, retry_async},
},
turbopack::{
core::issue::{
CapturedIssues, Issue, IssueReporter, Issues, OptionIssueSource, PlainIssue,
},
test_utils::snapshot::snapshot_issues,
},
};
fn register() {
next_dev::register();
include!(concat!(env!("OUT_DIR"), "/register_test_integration.rs"));
}
#[derive(Debug)]
struct JsResult {
uncaught_exceptions: Vec<String>,
run_result: JestRunResult,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct JestRunResult {
test_results: Vec<JestTestResult>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct JestTestResult {
test_path: Vec<String>,
errors: Vec<String>,
}
lazy_static! {
/// Allows for interactive manual debugging of a test case in a browser with:
/// `TURBOPACK_DEBUG_BROWSER=1 cargo test -p next-dev-tests -- test_my_pattern --nocapture`
static ref DEBUG_BROWSER: bool = env::var("TURBOPACK_DEBUG_BROWSER").is_ok();
/// Only starts the dev server on port 3000, but doesn't spawn a browser or run any tests.
static ref DEBUG_START: bool = env::var("TURBOPACK_DEBUG_START").is_ok();
/// When using TURBOPACK_DEBUG_START, this will open the browser to the dev server.
static ref DEBUG_OPEN: bool = env::var("TURBOPACK_DEBUG_OPEN").is_ok();
}
fn run_async_test<'a, T>(future: impl Future<Output = T> + Send + 'a) -> T {
let runtime = tokio::runtime::Builder::new_multi_thread()
.worker_threads(1)
.enable_all()
.build()
.unwrap();
let result = catch_unwind(AssertUnwindSafe(|| runtime.block_on(future)));
println!("Stutting down runtime...");
runtime.shutdown_timeout(Duration::from_secs(5));
println!("Stut down runtime");
match result {
Ok(result) => result,
Err(err) => resume_unwind(err),
}
}
#[testing::fixture("tests/integration/*/*/*/input")]
fn test(resource: PathBuf) {
let resource = resource.parent().unwrap().to_path_buf();
if resource.ends_with("__flakey__") {
// "Skip" directories named `__skipped__`, which include test directories to
// skip. These tests are not considered truly skipped by `cargo test`, but they
// are not run.
//
// All current `__flakey__` tests need longer timeouts, but the current
// build of `jest-circus-browser` does not support configuring this.
//
// TODO(WEB-319): Update the version of `jest-circus` in `jest-circus-browser`,
// which supports configuring this. Or explore an alternative.
return;
}
let JsResult {
uncaught_exceptions,
run_result,
} = run_async_test(run_test(resource));
let mut messages = vec![];
if run_result.test_results.is_empty() {
messages.push("No tests were run.".to_string());
}
for test_result in run_result.test_results {
// It's possible to fail multiple tests across these tests,
// so collect them and fail the respective test in Rust with
// an aggregate message.
if !test_result.errors.is_empty() {
messages.push(format!(
"\"{}\":\n{}",
test_result.test_path[1..].join(" > "),
test_result.errors.join("\n")
));
}
}
for uncaught_exception in uncaught_exceptions {
messages.push(format!("Uncaught exception: {}", uncaught_exception));
}
if !messages.is_empty() {
panic!(
"Failed with error(s) in the following test(s):\n\n{}",
messages.join("\n\n--\n")
)
};
}
fn copy_recursive(from: &Path, to: &Path) -> std::io::Result<()> {
let from = canonicalize(from)?;
let to = canonicalize(to)?;
let mut entries = vec![];
for entry in from.read_dir()? {
let entry = entry?;
let path = entry.path();
let to_path = to.join(path.file_name().unwrap());
if path.is_dir() {
std::fs::create_dir_all(&to_path)?;
entries.push((path, to_path));
} else {
std::fs::copy(&path, &to_path)?;
}
}
for (from, to) in entries {
copy_recursive(&from, &to)?;
}
Ok(())
}
async fn run_test(resource: PathBuf) -> JsResult {
register();
let resource = canonicalize(resource).unwrap();
assert!(resource.exists(), "{} does not exist", resource.display());
assert!(
resource.is_dir(),
"{} is not a directory. Integration tests must be directories.",
resource.to_str().unwrap()
);
let package_root = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let tests_dir = package_root.join("tests");
let integration_tests_dir = tests_dir.join("integration");
// We run tests from a temporary directory because tests can modify files in the
// test directory when testing the file watcher/HMR, and we have no reliable way
// to ensure that we can restore the original state of the test directory after
// running the test.
let resource_temp: PathBuf = tests_dir.join("temp").join(
resource
.strip_prefix(integration_tests_dir)
.expect("resource path must be within the integration tests directory"),
);
// We don't care about errors when removing the previous temp directory.
// It can still exist if we crashed during a previous test run.
let _ = std::fs::remove_dir_all(&resource_temp);
std::fs::create_dir_all(&resource_temp).expect("failed to create temporary directory");
copy_recursive(&resource, &resource_temp)
.expect("failed to copy test files to temporary directory");
let cargo_workspace_root = canonicalize(package_root)
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap()
.to_path_buf();
let test_dir = resource_temp.to_path_buf();
let workspace_root = cargo_workspace_root.parent().unwrap().parent().unwrap();
let project_dir = test_dir.join("input");
let requested_addr = if *DEBUG_START {
"127.0.0.1:3000".parse().unwrap()
} else {
get_free_local_addr().unwrap()
};
let mock_dir = resource_temp.join("__httpmock__");
let mock_server_future = get_mock_server_future(&mock_dir);
let (issue_tx, mut issue_rx) = unbounded_channel();
let issue_tx = TransientInstance::new(issue_tx);
let result;
{
let tt = TurboTasks::new(MemoryBackend::default());
let server = NextDevServerBuilder::new(
tt.clone(),
project_dir.to_string_lossy().to_string(),
workspace_root.to_string_lossy().to_string(),
)
.entry_request(EntryRequest::Module(
"@turbo/pack-test-harness".to_string(),
"/harness".to_string(),
))
.entry_request(EntryRequest::Relative("index.js".to_owned()))
.eager_compile(false)
.hostname(requested_addr.ip())
.port(requested_addr.port())
.log_level(turbopack_binding::turbopack::core::issue::IssueSeverity::Warning)
.log_detail(true)
.issue_reporter(Box::new(move || {
Vc::upcast(TestIssueReporter::new(issue_tx.clone()))
}))
.show_all(true)
.build()
.await
.unwrap();
let local_addr =
SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), server.addr.port());
println!(
"{event_type} - server started at http://{address}",
event_type = "ready".green(),
address = server.addr
);
if *DEBUG_START {
if *DEBUG_OPEN {
webbrowser::open(&format!("http://{}", local_addr)).unwrap();
}
tokio::select! {
_ = mock_server_future => {},
_ = pending() => {},
_ = server.future => {},
};
panic!("Never resolves")
}
result = tokio::select! {
// Poll the mock_server first to add the env var
_ = mock_server_future => panic!("Never resolves"),
r = run_browser(local_addr, &project_dir) => r.expect("error while running browser"),
_ = server.future => panic!("Never resolves"),
};
env::remove_var("TURBOPACK_TEST_ONLY_MOCK_SERVER");
let task = tt.spawn_once_task(async move {
let issues_fs = Vc::upcast::<Box<dyn FileSystem>>(DiskFileSystem::new(
"issues".to_string(),
resource.join("issues").to_string_lossy().to_string(),
));
let mut issues = vec![];
while let Ok(issue) = issue_rx.try_recv() {
issues.push(issue);
}
snapshot_issues(
issues.iter().cloned(),
issues_fs.root(),
&cargo_workspace_root.to_string_lossy(),
)
.await?;
Ok::<Vc<()>, _>(Default::default())
});
tt.wait_task_completion(task, true).await.unwrap();
}
if let Err(err) = retry(
(),
|()| std::fs::remove_dir_all(&resource_temp),
3,
Duration::from_millis(100),
) {
eprintln!("Failed to remove temporary directory: {}", err);
}
result
}
async fn create_browser(is_debugging: bool) -> Result<(Browser, TempDir, JoinSet<()>)> {
let mut config_builder = BrowserConfig::builder();
config_builder = config_builder.no_sandbox();
let tmp = TempDir::new("chromiumoxid").unwrap();
config_builder = config_builder.user_data_dir(&tmp);
if is_debugging {
config_builder = config_builder
.with_head()
.args(vec!["--auto-open-devtools-for-tabs"]);
}
let (browser, mut handler) = retry_async(
config_builder.build().map_err(|s| anyhow!(s))?,
|c| {
let c = c.clone();
Browser::launch(c)
},
3,
Duration::from_millis(100),
)
.await
.context("Launching browser failed")?;
// For windows it's important that the browser is dropped so that the test can
// complete. To do that we need to cancel the spawned task below (which will
// drop the browser). For this we are using a JoinSet which cancels all tasks
// when dropped.
let mut set = JoinSet::new();
// See https://crates.io/crates/chromiumoxide
set.spawn(async move {
loop {
if let Err(Ws(Protocol(ResetWithoutClosingHandshake))) = handler.next().await.unwrap() {
// The user has most likely closed the browser. End gracefully.
break;
}
}
});
Ok((browser, tmp, set))
}
const TURBOPACK_READY_BINDING: &str = "TURBOPACK_READY";
const TURBOPACK_DONE_BINDING: &str = "TURBOPACK_DONE";
const TURBOPACK_CHANGE_FILE_BINDING: &str = "TURBOPACK_CHANGE_FILE";
const BINDINGS: [&str; 3] = [
TURBOPACK_READY_BINDING,
TURBOPACK_DONE_BINDING,
TURBOPACK_CHANGE_FILE_BINDING,
];
async fn run_browser(addr: SocketAddr, project_dir: &Path) -> Result<JsResult> {
let is_debugging = *DEBUG_BROWSER;
println!("starting browser...");
let (browser, _tmp, mut handle) = create_browser(is_debugging).await?;
println!("open about:blank...");
// `browser.new_page()` opens a tab, navigates to the destination, and waits for
// the page to load. chromiumoxide/Chrome DevTools Protocol has been flakey,
// returning `ChannelSendError`s (WEB-259). Retry if necessary.
let page = retry_async(
(),
|_| browser.new_page("about:blank"),
5,
Duration::from_millis(100),
)
.await
.context("Failed to create new browser page")?;
for binding in BINDINGS {
page.execute(AddBindingParams::new(binding)).await?;
}
let mut errors = page
.event_listener::<EventExceptionThrown>()
.await
.context("Unable to listen to exception events")?;
let mut binding_events = page
.event_listener::<EventBindingCalled>()
.await
.context("Unable to listen to binding events")?;
let mut console_events = page
.event_listener::<EventConsoleApiCalled>()
.await
.context("Unable to listen to console events")?;
let mut network_response_events = page
.event_listener::<EventResponseReceived>()
.await
.context("Unable to listen to response received events")?;
println!("start navigating to http://{addr}...");
page.evaluate_expression(format!("window.location='http://{addr}'"))
.await
.context("Unable to evaluate javascript to naviagate to target page")?;
println!("waiting for navigation...");
// Wait for the next network response event
// This is the HTML page that we're testing
network_response_events.next().await.context(
"Network events channel ended unexpectedly while waiting on the network response",
)?;
if is_debugging {
let _ = page.evaluate(
r#"console.info("%cTurbopack tests:", "font-weight: bold;", "Waiting for TURBOPACK_READY to be signaled by page...");"#,
)
.await;
}
println!("finished navigation to http://{addr}");
let mut errors_next = errors.next();
let mut bindings_next = binding_events.next();
let mut console_next = console_events.next();
let mut network_next = network_response_events.next();
let mut uncaught_exceptions = vec![];
loop {
tokio::select! {
event = &mut console_next => {
if let Some(event) = event {
println!(
"console {:?}: {}",
event.r#type,
event
.args
.iter()
.filter_map(|a| a.value.as_ref().map(|v| format!("{:?}", v)))
.collect::<Vec<_>>()
.join(", ")
);
} else {
return Err(anyhow!("Console events channel ended unexpectedly"));
}
console_next = console_events.next();
}
event = &mut errors_next => {
if let Some(event) = event {
let mut message = String::new();
let d = &event.exception_details;
writeln!(message, "{}", d.text)?;
if let Some(RemoteObject { preview: Some(ref exception), .. }) = d.exception {
if let Some(PropertyPreview{ value: Some(ref exception_message), .. }) = exception.properties.iter().find(|p| p.name == "message") {
writeln!(message, "{}", exception_message)?;
}
}
if let Some(stack_trace) = &d.stack_trace {
for frame in &stack_trace.call_frames {
writeln!(message, " at {} ({}:{}:{})", frame.function_name, frame.url, frame.line_number, frame.column_number)?;
}
}
let expected_error = message.contains("(expected error)");
let message = message.trim_end();
if !is_debugging {
if !expected_error {
uncaught_exceptions.push(message.to_string());
}
} else if expected_error {
println!("Exception throw in page:\n{}", message);
} else {
println!("Exception throw in page (this would fail the test case without TURBOPACK_DEBUG_BROWSER):\n{}", message);
}
} else {
return Err(anyhow!("Error events channel ended unexpectedly"));
}
errors_next = errors.next();
}
event = &mut bindings_next => {
if let Some(event) = event {
if let Some(run_result) = handle_binding(&page, &event, project_dir, is_debugging).await? {
return Ok(JsResult {
uncaught_exceptions,
run_result,
});
}
} else {
return Err(anyhow!("Binding events channel ended unexpectedly"));
}
bindings_next = binding_events.next();
}
event = &mut network_next => {
if let Some(event) = event {
println!("network {} [{}]", event.response.url, event.response.status);
} else {
return Err(anyhow!("Network events channel ended unexpectedly"));
}
network_next = network_response_events.next();
}
result = handle.join_next() => {
if let Some(result) = result {
result?;
} else {
return Err(anyhow!("Browser closed"));
}
}
() = tokio::time::sleep(Duration::from_secs(60)) => {
if !is_debugging {
return Err(anyhow!("Test timeout while waiting for TURBOPACK_READY"));
}
}
};
}
}
fn get_free_local_addr() -> Result<SocketAddr, std::io::Error> {
let socket = TcpSocket::new_v6()?;
socket.bind("[::]:0".parse().unwrap())?;
socket.local_addr()
}
async fn get_mock_server_future(mock_dir: &Path) -> Result<(), String> {
if mock_dir.exists() {
let port = get_free_local_addr().unwrap().port();
env::set_var(
"TURBOPACK_TEST_ONLY_MOCK_SERVER",
format!("http://127.0.0.1:{}", port),
);
httpmock::standalone::start_standalone_server(
port,
false,
Some(mock_dir.to_path_buf()),
false,
0,
std::future::pending(),
)
.await
} else {
std::future::pending::<Result<(), String>>().await
}
}
async fn handle_binding(
page: &Page,
event: &EventBindingCalled,
project_dir: &Path,
is_debugging: bool,
) -> Result<Option<JestRunResult>, anyhow::Error> {
match event.name.as_str() {
TURBOPACK_READY_BINDING => {
if is_debugging {
let run_tests_msg = "Entering debug mode. Run `await __jest__.run()` in the \
browser console to run tests.";
println!("\n\n{}", run_tests_msg);
page.evaluate(format!(
r#"console.info("%cTurbopack tests:", "font-weight: bold;", "{}");"#,
run_tests_msg
))
.await?;
} else {
page.evaluate_expression(
"(() => { __jest__.run().then((runResult) => \
TURBOPACK_DONE(JSON.stringify(runResult))) })()",
)
.await?;
}
}
TURBOPACK_DONE_BINDING => {
let run_result: JestRunResult = serde_json::from_str(&event.payload)?;
return Ok(Some(run_result));
}
TURBOPACK_CHANGE_FILE_BINDING => {
let change_file: ChangeFileCommand = serde_json::from_str(&event.payload)?;
let path = Path::new(&change_file.path);
// Ensure `change_file.path` can't escape the project directory.
let path = path
.components()
.filter(|c| matches!(c, std::path::Component::Normal(_)))
.collect::<std::path::PathBuf>();
let path: PathBuf = project_dir.join(path);
let mut file_contents = std::fs::read_to_string(&path)?;
if !file_contents.contains(&change_file.find) {
page.evaluate(format!(
"__turbopackFileChanged({}, new Error({}));",
serde_json::to_string(&change_file.id)?,
serde_json::to_string(&format!(
"TURBOPACK_CHANGE_FILE: file {} does not contain {}",
path.display(),
&change_file.find
))?
))
.await?;
} else {
file_contents = file_contents.replace(&change_file.find, &change_file.replace_with);
std::fs::write(&path, file_contents)?;
page.evaluate(format!(
"__turbopackFileChanged({});",
serde_json::to_string(&change_file.id)?
))
.await?;
}
}
_ => {}
};
Ok(None)
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct ChangeFileCommand {
path: String,
id: String,
find: String,
replace_with: String,
}
#[turbo_tasks::value(shared, serialization = "none", eq = "manual", cell = "new")]
struct TestIssueReporter {
#[turbo_tasks(trace_ignore, debug_ignore)]
pub issue_tx: State<UnboundedSender<ReadRef<PlainIssue>>>,
#[turbo_tasks(trace_ignore, debug_ignore)]
pub already_printed: Mutex<HashMap<String, ()>>,
}
#[turbo_tasks::value_impl]
impl TestIssueReporter {
#[turbo_tasks::function]
fn new(issue_tx: TransientInstance<UnboundedSender<ReadRef<PlainIssue>>>) -> Vc<Self> {
TestIssueReporter {
issue_tx: State::new((*issue_tx).clone()),
already_printed: Default::default(),
}
.cell()
}
}
#[turbo_tasks::value_impl]
impl IssueReporter for TestIssueReporter {
#[turbo_tasks::function]
async fn report_issues(
&self,
captured_issues: TransientInstance<ReadRef<CapturedIssues>>,
_source: TransientValue<RawVc>,
_min_failing_severity: Vc<IssueSeverity>,
) -> Result<Vc<bool>> {
let log_options = LogOptions {
current_dir: PathBuf::new(),
project_dir: PathBuf::new(),
show_all: true,
log_detail: true,
log_level: IssueSeverity::Info,
};
let issue_tx = self.issue_tx.get_untracked().clone();
for (issue, path) in captured_issues.iter_with_shortest_path() {
let plain = NormalizedIssue(issue).cell().into_plain(path);
issue_tx.send(plain.await?)?;
let str = format_issue(&*plain.await?, None, &log_options);
if let Entry::Vacant(e) = self.already_printed.lock().entry(str) {
println!("{}", e.key());
e.insert(());
}
}
Ok(Vc::cell(false))
}
}
struct StackTraceReplacer;
impl Replacer for StackTraceReplacer {
fn replace_append(&mut self, caps: &Captures<'_>, dst: &mut String) {
let code = caps.get(2).map_or("", |m| m.as_str());
if code.starts_with("node:") {
return;
}
let mut name = caps.get(1).map_or("", |m| m.as_str());
name = name.strip_prefix("Object.").unwrap_or(name);
write!(dst, "\n at {} ({})", name, code).unwrap();
}
}
#[turbo_tasks::value(transparent)]
struct NormalizedIssue(Vc<Box<dyn Issue>>);
#[turbo_tasks::value_impl]
impl Issue for NormalizedIssue {
#[turbo_tasks::function]
fn severity(&self) -> Vc<IssueSeverity> {
self.0.severity()
}
#[turbo_tasks::function]
fn file_path(&self) -> Vc<FileSystemPath> {
self.0.file_path()
}
#[turbo_tasks::function]
fn category(&self) -> Vc<String> {
self.0.category()
}
#[turbo_tasks::function]
fn title(&self) -> Vc<String> {
self.0.title()
}
#[turbo_tasks::function]
async fn description(&self) -> Result<Vc<String>> {
let str = self.0.description().await?;
let regex1 = Regex::new(r"\n +at (.+) \((.+)\)(?: \[.+\])?").unwrap();
let regex2 = Regex::new(r"\n +at ()(.+) \[.+\]").unwrap();
let regex3 = Regex::new(r"\n +\[at .+\]").unwrap();
Ok(Vc::cell(
regex3
.replace_all(
&regex2.replace_all(
&regex1.replace_all(&str, StackTraceReplacer),
StackTraceReplacer,
),
"",
)
.to_string(),
))
}
#[turbo_tasks::function]
fn detail(&self) -> Vc<String> {
self.0.detail()
}
#[turbo_tasks::function]
fn documentation_link(&self) -> Vc<String> {
self.0.documentation_link()
}
#[turbo_tasks::function]
fn source(&self) -> Vc<OptionIssueSource> {
self.0.source()
}
#[turbo_tasks::function]
fn sub_issues(&self) -> Vc<Issues> {
self.0.sub_issues()
}
}

View file

@ -1 +0,0 @@
!node_modules

View file

@ -1,19 +0,0 @@
import type React from 'react'
import Test from '../test'
export default function Page(): React.ReactElement {
return (
<div>
<Test />
</div>
)
}
export async function generateMetadata({ params }) {
return {
title: `Page(${params.slug})`,
openGraph: {
images: new URL('../triangle-black.png', import.meta.url).pathname,
},
}
}

View file

@ -1,17 +0,0 @@
export default function RootLayout({ children }: { children: any }) {
return (
<html>
<body>{children}</body>
</html>
)
}
export const metadata = {
icons: {
icon: new URL('./triangle-black.png', import.meta.url).pathname,
},
title: {
absolute: 'RootLayout absolute',
template: '%s - RootLayout',
},
}

View file

@ -1,5 +0,0 @@
import { redirect } from 'next/navigation'
export default function Page() {
redirect('/slug_name')
}

View file

@ -1,19 +0,0 @@
'use client'
import type React from 'react'
import { useTestHarness } from '@turbo/pack-test-harness'
export default function Test(): React.ReactElement | null {
useTestHarness(() => {
it('should have the correct title set', () => {
expect(document.title).toBe('Page(slug_name) - RootLayout')
let iconMeta = document.querySelector('link[rel=icon]')
expect(iconMeta).toHaveProperty('href')
expect(iconMeta.href).toMatch(/\/_next\/static\/assets/)
let ogImageMeta = document.querySelector("meta[property='og:image']")
expect(ogImageMeta).toHaveProperty('content')
expect(ogImageMeta.content).toMatch(/\/_next\/static\/assets/)
})
})
return null
}

View file

@ -1,12 +0,0 @@
import { cookies } from 'next/headers'
export default function RootLayout({ children }: { children: any }) {
return (
<html>
<body>
{JSON.stringify(cookies(), null, 2)}
{children}
</body>
</html>
)
}

View file

@ -1,9 +0,0 @@
import Test from './test'
export default function Page() {
return (
<div>
<Test />
</div>
)
}

View file

@ -1,10 +0,0 @@
'use client'
import { useEffect } from 'react'
import { useTestHarness } from '@turbo/pack-test-harness'
export default function Test() {
useTestHarness(() => {
it('should run', () => {})
})
}

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:18:19 lint TP1004 fs.readFile(???*0*) is very dynamic
14 | default: obj
15 | };
16 | }
17 | const nodeFs = {
+ v
18 + readFile: (f)=>_fs.default.promises.readFile(f),
+ ^
19 | readFileSync: (f)=>_fs.default.readFileSync(f),
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:18:19 lint TP1004 fs.readFile(???*0*) is very dynamic
14 | default: obj
15 | };
16 | }
17 | const nodeFs = {
+ v
18 + readFile: (f)=>_fs.default.promises.readFile(f),
+ ^
19 | readFileSync: (f)=>_fs.default.readFileSync(f),
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:19:23 lint TP1004 fs.readFileSync(???*0*) is very dynamic
15 | };
16 | }
17 | const nodeFs = {
18 | readFile: (f)=>_fs.default.promises.readFile(f),
+ v
19 + readFileSync: (f)=>_fs.default.readFileSync(f),
+ ^
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:19:23 lint TP1004 fs.readFileSync(???*0*) is very dynamic
15 | };
16 | }
17 | const nodeFs = {
18 | readFile: (f)=>_fs.default.promises.readFile(f),
+ v
19 + readFileSync: (f)=>_fs.default.readFileSync(f),
+ ^
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
- *0* f
⚠️ pattern without value

View file

@ -1,14 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:24:15 lint TP1004 fs.stat(???*0*) is very dynamic
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
+ v
24 + stat: (f)=>_fs.default.promises.stat(f)
+ ^
25 | };
26 |
27 | //# sourceMappingURL=node-fs-methods.js.map
- *0* f
⚠️ pattern without value

View file

@ -1,14 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:24:15 lint TP1004 fs.stat(???*0*) is very dynamic
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
+ v
24 + stat: (f)=>_fs.default.promises.stat(f)
+ ^
25 | };
26 |
27 | //# sourceMappingURL=node-fs-methods.js.map
- *0* f
⚠️ pattern without value

View file

@ -1,7 +0,0 @@
export default function RootLayout({ children }: { children: any }) {
return (
<html>
<body>{children}</body>
</html>
)
}

View file

@ -1,3 +0,0 @@
export default function Loading() {
return <>Loading</>
}

View file

@ -1,9 +0,0 @@
import Test from './test'
export default function Page() {
return (
<div>
<Test />
</div>
)
}

View file

@ -1,9 +0,0 @@
'use client'
import { useTestHarness } from '@turbo/pack-test-harness'
export default function Test() {
useTestHarness(() => {
it('should run', () => {})
})
}

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:18:19 lint TP1004 fs.readFile(???*0*) is very dynamic
14 | default: obj
15 | };
16 | }
17 | const nodeFs = {
+ v
18 + readFile: (f)=>_fs.default.promises.readFile(f),
+ ^
19 | readFileSync: (f)=>_fs.default.readFileSync(f),
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:18:19 lint TP1004 fs.readFile(???*0*) is very dynamic
14 | default: obj
15 | };
16 | }
17 | const nodeFs = {
+ v
18 + readFile: (f)=>_fs.default.promises.readFile(f),
+ ^
19 | readFileSync: (f)=>_fs.default.readFileSync(f),
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:19:23 lint TP1004 fs.readFileSync(???*0*) is very dynamic
15 | };
16 | }
17 | const nodeFs = {
18 | readFile: (f)=>_fs.default.promises.readFile(f),
+ v
19 + readFileSync: (f)=>_fs.default.readFileSync(f),
+ ^
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:19:23 lint TP1004 fs.readFileSync(???*0*) is very dynamic
15 | };
16 | }
17 | const nodeFs = {
18 | readFile: (f)=>_fs.default.promises.readFile(f),
+ v
19 + readFileSync: (f)=>_fs.default.readFileSync(f),
+ ^
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
- *0* f
⚠️ pattern without value

View file

@ -1,14 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:24:15 lint TP1004 fs.stat(???*0*) is very dynamic
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
+ v
24 + stat: (f)=>_fs.default.promises.stat(f)
+ ^
25 | };
26 |
27 | //# sourceMappingURL=node-fs-methods.js.map
- *0* f
⚠️ pattern without value

View file

@ -1,14 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:24:15 lint TP1004 fs.stat(???*0*) is very dynamic
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
+ v
24 + stat: (f)=>_fs.default.promises.stat(f)
+ ^
25 | };
26 |
27 | //# sourceMappingURL=node-fs-methods.js.map
- *0* f
⚠️ pattern without value

View file

@ -1,7 +0,0 @@
export default function RootLayout({ children }: { children: any }) {
return (
<html>
<body>{children}</body>
</html>
)
}

View file

@ -1,16 +0,0 @@
import Test from './test'
import { unstable_cache } from 'next/cache'
const getValue = unstable_cache(async () => Math.random(), [], {
revalidate: 60,
})
export default async function Page() {
return (
<div>
<div id="value1">{await getValue()}</div>
<div id="value2">{await getValue()}</div>
<Test />
</div>
)
}

View file

@ -1,14 +0,0 @@
'use client'
import { useTestHarness } from '@turbo/pack-test-harness'
export default function Test() {
useTestHarness(() => {
it('should cache with unstable_cache', () => {
let value1 = document.getElementById('value1').textContent
let value2 = document.getElementById('value2').textContent
expect(value1).toBe(value2)
expect(value1).not.toBe('')
})
})
}

View file

@ -1,5 +0,0 @@
module.exports = {
experimental: {
appDir: true,
},
}

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:18:19 lint TP1004 fs.readFile(???*0*) is very dynamic
14 | default: obj
15 | };
16 | }
17 | const nodeFs = {
+ v
18 + readFile: (f)=>_fs.default.promises.readFile(f),
+ ^
19 | readFileSync: (f)=>_fs.default.readFileSync(f),
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:18:19 lint TP1004 fs.readFile(???*0*) is very dynamic
14 | default: obj
15 | };
16 | }
17 | const nodeFs = {
+ v
18 + readFile: (f)=>_fs.default.promises.readFile(f),
+ ^
19 | readFileSync: (f)=>_fs.default.readFileSync(f),
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:19:23 lint TP1004 fs.readFileSync(???*0*) is very dynamic
15 | };
16 | }
17 | const nodeFs = {
18 | readFile: (f)=>_fs.default.promises.readFile(f),
+ v
19 + readFileSync: (f)=>_fs.default.readFileSync(f),
+ ^
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:19:23 lint TP1004 fs.readFileSync(???*0*) is very dynamic
15 | };
16 | }
17 | const nodeFs = {
18 | readFile: (f)=>_fs.default.promises.readFile(f),
+ v
19 + readFileSync: (f)=>_fs.default.readFileSync(f),
+ ^
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
- *0* f
⚠️ pattern without value

View file

@ -1,14 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:24:15 lint TP1004 fs.stat(???*0*) is very dynamic
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
+ v
24 + stat: (f)=>_fs.default.promises.stat(f)
+ ^
25 | };
26 |
27 | //# sourceMappingURL=node-fs-methods.js.map
- *0* f
⚠️ pattern without value

View file

@ -1,14 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:24:15 lint TP1004 fs.stat(???*0*) is very dynamic
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
+ v
24 + stat: (f)=>_fs.default.promises.stat(f)
+ ^
25 | };
26 |
27 | //# sourceMappingURL=node-fs-methods.js.map
- *0* f
⚠️ pattern without value

View file

@ -1,7 +0,0 @@
export default function RootLayout({ children }: { children: any }) {
return (
<html>
<body>{children}</body>
</html>
)
}

View file

@ -1,12 +0,0 @@
import Test from './test'
export const dynamic = 'force-dynamic'
export default function Page({ searchParams }) {
return (
<div>
<h1>{JSON.stringify(searchParams)}</h1>
<Test />
</div>
)
}

View file

@ -1,9 +0,0 @@
'use client'
import { useTestHarness } from '@turbo/pack-test-harness'
export default function Test() {
useTestHarness(() => {
it('should run', () => {})
})
}

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:18:19 lint TP1004 fs.readFile(???*0*) is very dynamic
14 | default: obj
15 | };
16 | }
17 | const nodeFs = {
+ v
18 + readFile: (f)=>_fs.default.promises.readFile(f),
+ ^
19 | readFileSync: (f)=>_fs.default.readFileSync(f),
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:18:19 lint TP1004 fs.readFile(???*0*) is very dynamic
14 | default: obj
15 | };
16 | }
17 | const nodeFs = {
+ v
18 + readFile: (f)=>_fs.default.promises.readFile(f),
+ ^
19 | readFileSync: (f)=>_fs.default.readFileSync(f),
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:19:23 lint TP1004 fs.readFileSync(???*0*) is very dynamic
15 | };
16 | }
17 | const nodeFs = {
18 | readFile: (f)=>_fs.default.promises.readFile(f),
+ v
19 + readFileSync: (f)=>_fs.default.readFileSync(f),
+ ^
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:19:23 lint TP1004 fs.readFileSync(???*0*) is very dynamic
15 | };
16 | }
17 | const nodeFs = {
18 | readFile: (f)=>_fs.default.promises.readFile(f),
+ v
19 + readFileSync: (f)=>_fs.default.readFileSync(f),
+ ^
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
- *0* f
⚠️ pattern without value

View file

@ -1,14 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:24:15 lint TP1004 fs.stat(???*0*) is very dynamic
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
+ v
24 + stat: (f)=>_fs.default.promises.stat(f)
+ ^
25 | };
26 |
27 | //# sourceMappingURL=node-fs-methods.js.map
- *0* f
⚠️ pattern without value

View file

@ -1,14 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:24:15 lint TP1004 fs.stat(???*0*) is very dynamic
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
+ v
24 + stat: (f)=>_fs.default.promises.stat(f)
+ ^
25 | };
26 |
27 | //# sourceMappingURL=node-fs-methods.js.map
- *0* f
⚠️ pattern without value

View file

@ -1,7 +0,0 @@
export default function RootLayout({ children }: { children: any }) {
return (
<html>
<body>{children}</body>
</html>
)
}

View file

@ -1,9 +0,0 @@
{
"name": "Next.js Static Manifest",
"short_name": "Next.js App",
"description": "Next.js App",
"start_url": "/",
"display": "standalone",
"background_color": "#fff",
"theme_color": "#fff"
}

View file

@ -1,9 +0,0 @@
import Test from './test'
export default function Page() {
return (
<div>
<Test />
</div>
)
}

View file

@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url>
<loc>https://vercel.com/</loc>
<lastmod>2023-03-06T18:04:14.008Z</lastmod>
</url>
</urlset>

View file

@ -1,84 +0,0 @@
'use client'
import { useTestHarness } from '@turbo/pack-test-harness'
export default function Test() {
useTestHarness(() => {
it('should have the correct link tags', () => {
const links = Array.from(document.querySelectorAll('link')).filter(
(l) => l.rel !== 'preload'
)
expect(
links.map((l) => ({
href: l.getAttribute('href'),
rel: l.getAttribute('rel'),
sizes: l.getAttribute('sizes'),
}))
).toEqual([
expect.objectContaining({
rel: 'manifest',
href: expect.stringMatching(/^\/manifest\.webmanifest$/),
sizes: null,
}),
expect.objectContaining({
rel: 'icon',
href: expect.stringMatching(/^\/icon\d+\.png\?.+$/),
sizes: '32x32',
}),
expect.objectContaining({
rel: 'icon',
href: expect.stringMatching(/^\/icon\d+\.png\?.+$/),
sizes: '64x64',
}),
expect.objectContaining({
rel: 'apple-touch-icon',
href: expect.stringMatching(/^\/apple-icon\.png\?.+$/),
sizes: '114x114',
}),
])
})
it('should have the correct meta tags', () => {
const meta = Array.from(document.querySelectorAll('meta'))
const metaObject = Object.fromEntries(
meta
.filter((l) => l.getAttribute('property'))
.map((l) => [l.getAttribute('property'), l.getAttribute('content')])
)
expect(metaObject).toEqual({
'og:image': expect.stringMatching(/^.+\/opengraph-image\.png\?.+$/),
'og:image:width': '114',
'og:image:height': '114',
'og:image:type': 'image/png',
'og:image:alt': 'This is an alt text.',
})
})
it('should provide a robots.txt', async () => {
const res = await fetch('/robots.txt')
expect(res.status).toBe(200)
expect(await res.text()).toBe('User-Agent: *\nDisallow:\n')
})
it('should provide a sitemap.xml', async () => {
const res = await fetch('/sitemap.xml')
expect(res.status).toBe(200)
expect(await res.text()).toBe(
`<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
<url>
<loc>https://vercel.com/</loc>
<lastmod>2023-03-06T18:04:14.008Z</lastmod>
</url>
</urlset>
`
)
})
it('should provide a favicon.ico', async () => {
const res = await fetch('/favicon.ico')
expect(res.status).toBe(200)
expect(res.headers.get('content-type')).toBe('image/x-icon')
})
})
}

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:18:19 lint TP1004 fs.readFile(???*0*) is very dynamic
14 | default: obj
15 | };
16 | }
17 | const nodeFs = {
+ v
18 + readFile: (f)=>_fs.default.promises.readFile(f),
+ ^
19 | readFileSync: (f)=>_fs.default.readFileSync(f),
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:18:19 lint TP1004 fs.readFile(???*0*) is very dynamic
14 | default: obj
15 | };
16 | }
17 | const nodeFs = {
+ v
18 + readFile: (f)=>_fs.default.promises.readFile(f),
+ ^
19 | readFileSync: (f)=>_fs.default.readFileSync(f),
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:19:23 lint TP1004 fs.readFileSync(???*0*) is very dynamic
15 | };
16 | }
17 | const nodeFs = {
18 | readFile: (f)=>_fs.default.promises.readFile(f),
+ v
19 + readFileSync: (f)=>_fs.default.readFileSync(f),
+ ^
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:19:23 lint TP1004 fs.readFileSync(???*0*) is very dynamic
15 | };
16 | }
17 | const nodeFs = {
18 | readFile: (f)=>_fs.default.promises.readFile(f),
+ v
19 + readFileSync: (f)=>_fs.default.readFileSync(f),
+ ^
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
- *0* f
⚠️ pattern without value

View file

@ -1,14 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:24:15 lint TP1004 fs.stat(???*0*) is very dynamic
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
+ v
24 + stat: (f)=>_fs.default.promises.stat(f)
+ ^
25 | };
26 |
27 | //# sourceMappingURL=node-fs-methods.js.map
- *0* f
⚠️ pattern without value

View file

@ -1,14 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:24:15 lint TP1004 fs.stat(???*0*) is very dynamic
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
+ v
24 + stat: (f)=>_fs.default.promises.stat(f)
+ ^
25 | };
26 |
27 | //# sourceMappingURL=node-fs-methods.js.map
- *0* f
⚠️ pattern without value

View file

@ -1,7 +0,0 @@
export default function RootLayout({ children }: { children: any }) {
return (
<html>
<body>{children}</body>
</html>
)
}

View file

@ -1,9 +0,0 @@
import { Test } from './test'
export default function Page() {
return (
<div>
<Test />
</div>
)
}

View file

@ -1,9 +0,0 @@
'use client'
import { useTestHarness } from '@turbo/pack-test-harness'
export function Test() {
useTestHarness(() => {
it('should allow to import a named export from a client component', () => {})
}, [])
}

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:18:19 lint TP1004 fs.readFile(???*0*) is very dynamic
14 | default: obj
15 | };
16 | }
17 | const nodeFs = {
+ v
18 + readFile: (f)=>_fs.default.promises.readFile(f),
+ ^
19 | readFileSync: (f)=>_fs.default.readFileSync(f),
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:18:19 lint TP1004 fs.readFile(???*0*) is very dynamic
14 | default: obj
15 | };
16 | }
17 | const nodeFs = {
+ v
18 + readFile: (f)=>_fs.default.promises.readFile(f),
+ ^
19 | readFileSync: (f)=>_fs.default.readFileSync(f),
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:19:23 lint TP1004 fs.readFileSync(???*0*) is very dynamic
15 | };
16 | }
17 | const nodeFs = {
18 | readFile: (f)=>_fs.default.promises.readFile(f),
+ v
19 + readFileSync: (f)=>_fs.default.readFileSync(f),
+ ^
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
- *0* f
⚠️ pattern without value

View file

@ -1,15 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:19:23 lint TP1004 fs.readFileSync(???*0*) is very dynamic
15 | };
16 | }
17 | const nodeFs = {
18 | readFile: (f)=>_fs.default.promises.readFile(f),
+ v
19 + readFileSync: (f)=>_fs.default.readFileSync(f),
+ ^
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
- *0* f
⚠️ pattern without value

View file

@ -1,14 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:24:15 lint TP1004 fs.stat(???*0*) is very dynamic
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
+ v
24 + stat: (f)=>_fs.default.promises.stat(f)
+ ^
25 | };
26 |
27 | //# sourceMappingURL=node-fs-methods.js.map
- *0* f
⚠️ pattern without value

View file

@ -1,14 +0,0 @@
warning - [parse] [project]/packages/next/dist/server/lib/node-fs-methods.js /packages/next/dist/server/lib/node-fs-methods.js:24:15 lint TP1004 fs.stat(???*0*) is very dynamic
20 | writeFile: (f, d)=>_fs.default.promises.writeFile(f, d),
21 | mkdir: (dir)=>_fs.default.promises.mkdir(dir, {
22 | recursive: true
23 | }),
+ v
24 + stat: (f)=>_fs.default.promises.stat(f)
+ ^
25 | };
26 |
27 | //# sourceMappingURL=node-fs-methods.js.map
- *0* f
⚠️ pattern without value

View file

@ -1,11 +0,0 @@
import { cookies } from 'next/headers'
export async function GET(request: Request) {
const cookieStore = cookies()
const token = cookieStore.get('token')?.value ?? Math.random()
return new Response(String(token), {
status: 200,
headers: { 'Set-Cookie': `token=${token}` },
})
}

View file

@ -1,7 +0,0 @@
export default function RootLayout({ children }: { children: any }) {
return (
<html>
<body>{children}</body>
</html>
)
}

Some files were not shown because too many files have changed in this diff Show more