Revert "Next Build Turbo POC (#49942)" (#51538)

This reverts commit 7d0bdab83e.

This is failing all builds blocking releases so this reverts it for now
to allow further investigation async.
This commit is contained in:
JJ Kasper 2023-06-20 00:54:04 -04:00 committed by GitHub
parent 21af14dc11
commit 41ce805de5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
27 changed files with 135 additions and 1844 deletions

View file

@ -5,10 +5,6 @@ CARGO_WORKSPACE_DIR = { value = "", relative = true }
rustdocflags = []
[target.x86_64-unknown-linux-gnu]
# Should be kept in sync with turbopack's linker
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
[target.x86_64-pc-windows-msvc]
linker = "rust-lld"

View file

@ -52,12 +52,6 @@ runs:
echo CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse >> $GITHUB_ENV
fi
- shell: bash
run: |
: install mold linker
apt update
apt install -y mold
- name: 'Setup Rust toolchain'
uses: dtolnay/rust-toolchain@master
if: ${{ !inputs.skip-install }}

30
Cargo.lock generated
View file

@ -3260,16 +3260,7 @@ name = "next-build"
version = "0.1.0"
dependencies = [
"anyhow",
"clap 4.1.11",
"console-subscriber",
"dunce",
"next-core",
"serde",
"serde_json",
"tokio",
"tracing",
"tracing-subscriber",
"turbo-tasks",
"turbopack-binding",
"vergen",
]
@ -7385,7 +7376,6 @@ dependencies = [
"turbo-tasks-testing",
"turbopack",
"turbopack-bench",
"turbopack-build",
"turbopack-cli-utils",
"turbopack-core",
"turbopack-dev",
@ -7400,26 +7390,6 @@ dependencies = [
"turbopack-test-utils",
]
[[package]]
name = "turbopack-build"
version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-230615.1#1ff1956dc18ff1805b2ac87f21f79e1abea75fc8"
dependencies = [
"anyhow",
"indexmap",
"indoc",
"serde",
"serde_json",
"serde_qs",
"turbo-tasks",
"turbo-tasks-build",
"turbo-tasks-fs",
"turbopack-core",
"turbopack-css",
"turbopack-ecmascript",
"turbopack-ecmascript-runtime",
]
[[package]]
name = "turbopack-cli-utils"
version = "0.1.0"

View file

@ -48,9 +48,9 @@ turbo-tasks = { workspace = true }
once_cell = { workspace = true }
serde = "1"
serde_json = "1"
tracing = { workspace = true }
tracing = { version = "0.1.37" }
tracing-futures = "0.2.5"
tracing-subscriber = { workspace = true }
tracing-subscriber = "0.3.9"
tracing-chrome = "0.5.0"
turbopack-binding = { workspace = true, features = [
"__swc_core_binding_napi",

View file

@ -1,14 +1,7 @@
use std::{
convert::{TryFrom, TryInto},
path::PathBuf,
};
use std::convert::TryFrom;
use anyhow::Context;
use napi::bindgen_prelude::*;
use next_build::{
build as turbo_next_build, build_options::BuildContext, BuildOptions as NextBuildOptions,
};
use next_core::next_config::{Rewrite, Rewrites, RouteHas};
use next_build::{next_build as turbo_next_build, NextBuildOptions};
use next_dev::{devserver_options::DevServerOptions, start_server};
use crate::util::MapErr;
@ -22,162 +15,95 @@ pub async fn start_turbo_dev(options: Buffer) -> napi::Result<()> {
#[napi(object, object_to_js = false)]
#[derive(Debug)]
pub struct NextBuildContext {
// Added by Next.js for next build --turbo specifically.
/// The root directory of the workspace.
pub root: Option<String>,
/// The project's directory.
pub dir: Option<String>,
/// The build ID.
pub build_id: Option<String>,
/// The rewrites, as computed by Next.js.
pub rewrites: Option<NapiRewrites>,
// TODO(alexkirsz) These are detected directly by Turbopack for now.
// pub app_dir: Option<String>,
// pub pages_dir: Option<String>,
// TODO(alexkirsz) These are used to generate route types.
// pub original_rewrites: Option<Rewrites>,
// pub original_redirects: Option<Vec<Redirect>>,
pub app_dir: Option<String>,
pub pages_dir: Option<String>,
pub rewrites: Option<Rewrites>,
pub original_rewrites: Option<Rewrites>,
pub original_redirects: Option<Vec<Redirect>>,
}
impl TryFrom<NextBuildContext> for NextBuildOptions {
type Error = napi::Error;
fn try_from(value: NextBuildContext) -> Result<Self> {
Ok(Self {
dir: value.dir.map(PathBuf::try_from).transpose()?,
root: value.root.map(PathBuf::try_from).transpose()?,
log_level: None,
show_all: true,
log_detail: true,
full_stats: true,
memory_limit: None,
build_context: Some(BuildContext {
build_id: value
.build_id
.context("NextBuildContext must provide a build ID")?,
rewrites: value
.rewrites
.context("NextBuildContext must provide rewrites")?
.into(),
}),
})
}
}
/// Keep in sync with [`next_core::next_config::Rewrites`]
#[napi(object, object_to_js = false)]
#[derive(Debug)]
pub struct NapiRewrites {
pub fallback: Vec<NapiRewrite>,
pub after_files: Vec<NapiRewrite>,
pub before_files: Vec<NapiRewrite>,
pub struct Rewrites {
pub fallback: Vec<Rewrite>,
pub after_files: Vec<Rewrite>,
pub before_files: Vec<Rewrite>,
}
impl From<NapiRewrites> for Rewrites {
fn from(val: NapiRewrites) -> Self {
Rewrites {
fallback: val
.fallback
.into_iter()
.map(|rewrite| rewrite.into())
.collect(),
after_files: val
.after_files
.into_iter()
.map(|rewrite| rewrite.into())
.collect(),
before_files: val
.before_files
.into_iter()
.map(|rewrite| rewrite.into())
.collect(),
}
}
}
/// Keep in sync with [`next_core::next_config::Rewrite`]
#[napi(object, object_to_js = false)]
#[derive(Debug)]
pub struct NapiRewrite {
pub struct Rewrite {
pub source: String,
pub destination: String,
pub base_path: Option<bool>,
pub locale: Option<bool>,
pub has: Option<Vec<NapiRouteHas>>,
pub missing: Option<Vec<NapiRouteHas>>,
}
impl From<NapiRewrite> for Rewrite {
fn from(val: NapiRewrite) -> Self {
Rewrite {
source: val.source,
destination: val.destination,
base_path: val.base_path,
locale: val.locale,
has: val
.has
.map(|has| has.into_iter().map(|has| has.into()).collect()),
missing: val
.missing
.map(|missing| missing.into_iter().map(|missing| missing.into()).collect()),
}
}
}
/// Keep in sync with [`next_core::next_config::RouteHas`]
#[napi(object, object_to_js = false)]
#[derive(Debug)]
pub enum NapiRouteHas {
Header { key: String, value: Option<String> },
Query { key: String, value: Option<String> },
Cookie { key: String, value: Option<String> },
Host { value: String },
pub struct Redirect {
pub source: String,
pub destination: String,
pub permanent: Option<bool>,
pub status_code: Option<u32>,
pub has: Option<RouteHas>,
pub missing: Option<RouteHas>,
}
impl FromNapiValue for NapiRouteHas {
#[derive(Debug)]
pub struct RouteHas {
pub r#type: RouteType,
pub key: Option<String>,
pub value: Option<String>,
}
#[derive(Debug)]
pub enum RouteType {
Header,
Query,
Cookie,
Host,
}
impl TryFrom<String> for RouteType {
type Error = napi::Error;
fn try_from(value: String) -> Result<Self> {
match value.as_str() {
"header" => Ok(RouteType::Header),
"query" => Ok(RouteType::Query),
"cookie" => Ok(RouteType::Cookie),
"host" => Ok(RouteType::Host),
_ => Err(napi::Error::new(
napi::Status::InvalidArg,
"Invalid route type",
)),
}
}
}
impl FromNapiValue for RouteHas {
unsafe fn from_napi_value(env: sys::napi_env, napi_val: sys::napi_value) -> Result<Self> {
let object = Object::from_napi_value(env, napi_val)?;
let type_ = object.get_named_property::<String>("type")?;
Ok(match type_.as_str() {
"header" => NapiRouteHas::Header {
key: object.get_named_property("key")?,
value: object.get_named_property("value")?,
},
"query" => NapiRouteHas::Query {
key: object.get_named_property("key")?,
value: object.get_named_property("value")?,
},
"cookie" => NapiRouteHas::Cookie {
key: object.get_named_property("key")?,
value: object.get_named_property("value")?,
},
"host" => NapiRouteHas::Host {
value: object.get_named_property("value")?,
},
_ => {
return Err(napi::Error::new(
Status::GenericFailure,
format!("invalid type for RouteHas: {}", type_),
))
}
let r#type = object.get_named_property::<String>("type")?;
Ok(RouteHas {
r#type: RouteType::try_from(r#type)?,
key: object.get("key")?,
value: object.get("value")?,
})
}
}
impl From<NapiRouteHas> for RouteHas {
fn from(val: NapiRouteHas) -> Self {
match val {
NapiRouteHas::Header { key, value } => RouteHas::Header { key, value },
NapiRouteHas::Query { key, value } => RouteHas::Query { key, value },
NapiRouteHas::Cookie { key, value } => RouteHas::Cookie { key, value },
NapiRouteHas::Host { value } => RouteHas::Host { value },
impl From<NextBuildContext> for NextBuildOptions {
fn from(value: NextBuildContext) -> Self {
Self {
dir: value.dir,
memory_limit: None,
full_stats: None,
}
}
}
#[napi]
pub async fn next_build(ctx: NextBuildContext) -> napi::Result<()> {
turbo_next_build(ctx.try_into()?).await.convert_err()
turbo_next_build(ctx.into()).await.convert_err()
}

View file

@ -6,67 +6,16 @@ license = "MPL-2.0"
edition = "2021"
autobenches = false
[[bin]]
name = "next-build"
path = "src/main.rs"
bench = false
required-features = ["cli"]
[lib]
bench = false
[features]
# By default, we enable native-tls for reqwest via downstream transitive features.
# This is for the convenience of running daily dev workflows, i.e running
# `cargo xxx` without explicitly specifying features, not that we want to
# promote this as default backend. Actual configuration is done when building next-swc,
# and also turbopack standalone when we have it.
default = ["cli", "custom_allocator", "native-tls"]
cli = ["clap"]
tokio_console = [
"dep:console-subscriber",
"tokio/tracing",
"turbo-tasks/tokio_tracing",
]
native-tls = ["next-core/native-tls"]
rustls-tls = ["next-core/rustls-tls"]
custom_allocator = [
"turbopack-binding/__turbo_tasks_malloc",
"turbopack-binding/__turbo_tasks_malloc_custom_allocator",
]
serializable = []
profile = []
custom_allocator = ["turbopack-binding/__turbo_tasks_malloc", "turbopack-binding/__turbo_tasks_malloc_custom_allocator"]
[dependencies]
anyhow = { workspace = true }
clap = { workspace = true, features = ["derive", "env"], optional = true }
console-subscriber = { workspace = true, optional = true }
dunce = { workspace = true }
anyhow = "1.0.47"
next-core = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
turbopack-binding = { workspace = true, features = [
"__turbo_tasks",
"__turbo_tasks_malloc",
"__turbo_tasks_memory",
"__turbo_tasks_env",
"__turbo_tasks_fs",
"__turbo_tasks_memory",
"__turbopack",
"__turbopack_build",
"__turbopack_cli_utils",
"__turbopack_core",
"__turbopack_dev",
"__turbopack_ecmascript",
"__turbopack_ecmascript_runtime",
"__turbopack_env",
"__turbopack_node",
] }
turbo-tasks = { workspace = true }
turbopack-binding = { workspace = true, features = ["__turbo_tasks", "__turbo_tasks_memory"] }
[build-dependencies]
turbopack-binding = { workspace = true, features = ["__turbo_tasks_build"] }

View file

@ -1,39 +0,0 @@
use std::path::PathBuf;
use next_core::{next_config::Rewrites, turbopack::core::issue::IssueSeverity};
#[derive(Clone, Debug)]
pub struct BuildOptions {
/// The root directory of the workspace.
pub root: Option<PathBuf>,
/// The project's directory.
pub dir: Option<PathBuf>,
/// The maximum memory to use for the build.
pub memory_limit: Option<usize>,
/// The log level to use for the build.
pub log_level: Option<IssueSeverity>,
/// Whether to show all logs.
pub show_all: bool,
/// Whether to show detailed logs.
pub log_detail: bool,
/// Whether to compute full stats.
pub full_stats: bool,
/// The Next.js build context.
pub build_context: Option<BuildContext>,
}
#[derive(Clone, Debug)]
pub struct BuildContext {
/// The build id.
pub build_id: String,
/// Next.js config rewrites.
pub rewrites: Rewrites,
}

View file

@ -1,66 +1,35 @@
use turbopack_binding::turbo::{
tasks::{run_once, TransientInstance, TurboTasks},
tasks::{NothingVc, StatsType, TurboTasks, TurboTasksBackendApi},
tasks_memory::MemoryBackend,
};
pub mod build_options;
pub mod manifests;
pub(crate) mod next_build;
pub(crate) mod next_pages;
pub fn register() {
turbopack_binding::turbo::tasks::register();
include!(concat!(env!("OUT_DIR"), "/register.rs"));
}
use anyhow::Result;
use turbo_tasks::{StatsType, TurboTasksBackendApi};
pub struct NextBuildOptions {
pub dir: Option<String>,
pub memory_limit: Option<usize>,
pub full_stats: Option<bool>,
}
pub use self::build_options::BuildOptions;
pub async fn build(options: BuildOptions) -> Result<()> {
#[cfg(feature = "tokio_console")]
console_subscriber::init();
pub async fn next_build(options: NextBuildOptions) -> anyhow::Result<()> {
register();
setup_tracing();
let tt = TurboTasks::new(MemoryBackend::new(
options.memory_limit.map_or(usize::MAX, |l| l * 1024 * 1024),
));
let stats_type = match options.full_stats {
true => StatsType::Full,
false => StatsType::Essential,
Some(true) => StatsType::Full,
_ => StatsType::Essential,
};
tt.set_stats_type(stats_type);
run_once(tt, async move {
next_build::next_build(TransientInstance::new(options)).await?;
Ok(())
let task = tt.spawn_root_task(move || {
Box::pin(async move {
// run next build here
Ok(NothingVc::new().into())
})
.await?;
});
tt.wait_task_completion(task, true).await?;
Ok(())
}
fn setup_tracing() {
use tracing_subscriber::{prelude::*, EnvFilter, Registry};
let subscriber = Registry::default();
let stdout_log = tracing_subscriber::fmt::layer().pretty();
let subscriber = subscriber.with(stdout_log);
let subscriber = subscriber.with(EnvFilter::from_default_env());
subscriber.init();
}
pub fn register() {
turbopack_binding::turbo::tasks::register();
turbopack_binding::turbo::tasks_fs::register();
turbopack_binding::turbopack::turbopack::register();
turbopack_binding::turbopack::core::register();
turbopack_binding::turbopack::node::register();
turbopack_binding::turbopack::dev::register();
turbopack_binding::turbopack::build::register();
next_core::register();
include!(concat!(env!("OUT_DIR"), "/register.rs"));
}

View file

@ -1,102 +0,0 @@
use std::path::PathBuf;
use anyhow::Result;
use clap::Parser;
use next_build::BuildOptions;
use turbopack_binding::turbopack::cli_utils::issue::IssueSeverityCliOption;
#[global_allocator]
static ALLOC: turbopack_binding::turbo::malloc::TurboMalloc =
turbopack_binding::turbo::malloc::TurboMalloc;
#[derive(Debug, Parser)]
#[clap(author, version, about, long_about = None)]
pub struct BuildCliArgs {
/// The directory of the Next.js application.
/// If no directory is provided, the current directory will be used.
#[clap(value_parser)]
pub dir: Option<PathBuf>,
/// The root directory of the project. Nothing outside of this directory can
/// be accessed. e. g. the monorepo root.
/// If no directory is provided, `dir` will be used.
#[clap(long, value_parser)]
pub root: Option<PathBuf>,
/// Display version of the binary. Noop if used in library mode.
#[clap(long)]
pub display_version: bool,
/// Filter by issue severity.
#[clap(short, long)]
pub log_level: Option<IssueSeverityCliOption>,
/// Show all log messages without limit.
#[clap(long)]
pub show_all: bool,
/// Expand the log details.
#[clap(long)]
pub log_detail: bool,
/// Whether to enable full task stats recording in Turbo Engine.
#[clap(long)]
pub full_stats: bool,
/// Enable experimental garbage collection with the provided memory limit in
/// MB.
#[clap(long)]
pub memory_limit: Option<usize>,
}
fn main() {
use turbopack_binding::turbo::malloc::TurboMalloc;
tokio::runtime::Builder::new_multi_thread()
.enable_all()
.on_thread_stop(|| {
TurboMalloc::thread_stop();
})
.build()
.unwrap()
.block_on(main_inner())
.unwrap()
}
async fn main_inner() -> Result<()> {
let args = BuildCliArgs::parse();
if args.display_version {
// Note: enabling git causes trouble with aarch64 linux builds with libz-sys
println!(
"Build Timestamp\t\t{:#?}",
option_env!("VERGEN_BUILD_TIMESTAMP").unwrap_or_else(|| "N/A")
);
println!(
"Build Version\t\t{:#?}",
option_env!("VERGEN_BUILD_SEMVER").unwrap_or_else(|| "N/A")
);
println!(
"Cargo Target Triple\t{:#?}",
option_env!("VERGEN_CARGO_TARGET_TRIPLE").unwrap_or_else(|| "N/A")
);
println!(
"Cargo Profile\t\t{:#?}",
option_env!("VERGEN_CARGO_PROFILE").unwrap_or_else(|| "N/A")
);
return Ok(());
}
next_build::build(BuildOptions {
dir: args.dir,
root: args.root,
memory_limit: args.memory_limit,
log_level: args.log_level.map(|l| l.0),
show_all: args.show_all,
log_detail: args.log_detail,
full_stats: args.full_stats,
build_context: None,
})
.await
}

View file

@ -1,179 +0,0 @@
//! Type definitions for the Next.js manifest formats.
use std::collections::HashMap;
use next_core::next_config::Rewrites;
use serde::Serialize;
#[derive(Serialize, Default, Debug)]
pub struct PagesManifest {
#[serde(flatten)]
pub pages: HashMap<String, String>,
}
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct BuildManifest {
pub dev_files: Vec<String>,
pub amp_dev_files: Vec<String>,
pub polyfill_files: Vec<String>,
pub low_priority_files: Vec<String>,
pub root_main_files: Vec<String>,
pub pages: HashMap<String, Vec<String>>,
pub amp_first_pages: Vec<String>,
}
#[derive(Serialize, Debug)]
#[serde(rename_all = "camelCase", tag = "version")]
pub enum MiddlewaresManifest {
#[serde(rename = "2")]
MiddlewaresManifestV2(MiddlewaresManifestV2),
#[serde(other)]
Unsupported,
}
impl Default for MiddlewaresManifest {
fn default() -> Self {
Self::MiddlewaresManifestV2(Default::default())
}
}
#[derive(Serialize, Default, Debug)]
pub struct MiddlewaresManifestV2 {
pub sorted_middleware: Vec<()>,
pub middleware: HashMap<String, ()>,
pub functions: HashMap<String, ()>,
}
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ReactLoadableManifest {
#[serde(flatten)]
pub manifest: HashMap<String, ReactLoadableManifestEntry>,
}
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ReactLoadableManifestEntry {
pub id: u32,
pub files: Vec<String>,
}
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct NextFontManifest {
pub pages: HashMap<String, Vec<String>>,
pub app: HashMap<String, Vec<String>>,
pub app_using_size_adjust: bool,
pub pages_using_size_adjust: bool,
}
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct AppPathsManifest {
#[serde(flatten)]
pub edge_server_app_paths: PagesManifest,
#[serde(flatten)]
pub node_server_app_paths: PagesManifest,
}
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ServerReferenceManifest {
#[serde(flatten)]
pub server_actions: ActionManifest,
#[serde(flatten)]
pub edge_server_actions: ActionManifest,
}
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ActionManifest {
#[serde(flatten)]
pub actions: HashMap<String, ActionManifestEntry>,
}
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ActionManifestEntry {
pub workers: HashMap<String, ActionManifestWorkerEntry>,
}
#[derive(Serialize, Debug)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum ActionManifestWorkerEntry {
String(String),
Number(f64),
}
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ClientReferenceManifest {
pub client_modules: ManifestNode,
pub ssr_module_mapping: HashMap<String, ManifestNode>,
#[serde(rename = "edgeSSRModuleMapping")]
pub edge_ssr_module_mapping: HashMap<String, ManifestNode>,
pub css_files: HashMap<String, Vec<String>>,
}
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ClientCssReferenceManifest {
pub css_imports: HashMap<String, Vec<String>>,
pub css_modules: HashMap<String, Vec<String>>,
}
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ManifestNode {
#[serde(flatten)]
pub module_exports: HashMap<String, ManifestNodeEntry>,
}
#[derive(Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ManifestNodeEntry {
pub id: ModuleId,
pub name: String,
pub chunks: Vec<String>,
pub r#async: bool,
}
#[derive(Serialize, Debug)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum ModuleId {
String(String),
Number(f64),
}
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct FontManifest(pub Vec<FontManifestEntry>);
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct FontManifestEntry {
pub url: String,
pub content: String,
}
#[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")]
pub struct AppBuildManifest {
pub pages: HashMap<String, Vec<String>>,
}
// TODO(alexkirsz) Unify with the one for dev.
#[derive(Serialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct ClientBuildManifest<'a> {
#[serde(rename = "__rewrites")]
pub rewrites: &'a Rewrites,
pub sorted_pages: &'a [String],
#[serde(flatten)]
pub pages: HashMap<String, Vec<&'a str>>,
}

View file

@ -1,579 +0,0 @@
use std::{
collections::{HashMap, HashSet},
env::current_dir,
path::{PathBuf, MAIN_SEPARATOR},
};
use anyhow::{anyhow, Context, Result};
use dunce::canonicalize;
use next_core::{
self, next_config::load_next_config, pages_structure::find_pages_structure,
turbopack::ecmascript::utils::StringifyJs, url_node::get_sorted_routes,
};
use serde::Serialize;
use turbo_tasks::{
graph::{GraphTraversal, ReverseTopological},
CollectiblesSource, CompletionVc, RawVc, TransientInstance, TransientValue, TryJoinIterExt,
ValueToString,
};
use turbopack_binding::{
turbo::tasks_fs::{DiskFileSystemVc, FileContent, FileSystem, FileSystemPathVc, FileSystemVc},
turbopack::{
cli_utils::issue::{ConsoleUiVc, LogOptions},
core::{
asset::{Asset, AssetVc, AssetsVc},
environment::ServerAddrVc,
issue::{IssueReporter, IssueReporterVc, IssueSeverity, IssueVc},
reference::AssetReference,
virtual_fs::VirtualFileSystemVc,
},
dev::DevChunkingContextVc,
env::dotenv::load_env,
node::execution_context::ExecutionContextVc,
turbopack::evaluate_context::node_build_environment,
},
};
use crate::{
build_options::{BuildContext, BuildOptions},
manifests::{
AppBuildManifest, AppPathsManifest, BuildManifest, ClientBuildManifest,
ClientCssReferenceManifest, ClientReferenceManifest, FontManifest, MiddlewaresManifest,
NextFontManifest, PagesManifest, ReactLoadableManifest, ServerReferenceManifest,
},
next_pages::page_chunks::get_page_chunks,
};
#[turbo_tasks::function]
pub(crate) async fn next_build(options: TransientInstance<BuildOptions>) -> Result<CompletionVc> {
let project_root = options
.dir
.as_ref()
.map(canonicalize)
.unwrap_or_else(current_dir)
.context("project directory can't be found")?
.to_str()
.context("project directory contains invalid characters")?
.to_string();
let workspace_root = if let Some(root) = options.root.as_ref() {
canonicalize(root)
.context("root directory can't be found")?
.to_str()
.context("root directory contains invalid characters")?
.to_string()
} else {
project_root.clone()
};
let browserslist_query = "last 1 Chrome versions, last 1 Firefox versions, last 1 Safari \
versions, last 1 Edge versions";
let log_options = LogOptions {
project_dir: PathBuf::from(project_root.clone()),
current_dir: current_dir().unwrap(),
show_all: options.show_all,
log_detail: options.log_detail,
log_level: options.log_level.unwrap_or(IssueSeverity::Warning),
};
let issue_reporter: IssueReporterVc =
ConsoleUiVc::new(TransientInstance::new(log_options)).into();
let node_fs = node_fs(&project_root, issue_reporter);
let node_root = node_fs.root().join(".next");
let client_fs = client_fs(&project_root, issue_reporter);
let client_root = client_fs.root().join(".next");
// TODO(alexkirsz) This should accept a URL for assetPrefix.
// let client_public_fs = VirtualFileSystemVc::new();
// let client_public_root = client_public_fs.root();
let workspace_fs = workspace_fs(&workspace_root, issue_reporter);
let project_relative = project_root.strip_prefix(&workspace_root).unwrap();
let project_relative = project_relative
.strip_prefix(MAIN_SEPARATOR)
.unwrap_or(project_relative)
.replace(MAIN_SEPARATOR, "/");
let project_root = workspace_fs.root().join(&project_relative);
let next_router_fs = VirtualFileSystemVc::new().as_file_system();
let next_router_root = next_router_fs.root();
let build_chunking_context = DevChunkingContextVc::builder(
project_root,
node_root,
node_root.join("chunks"),
node_root.join("assets"),
node_build_environment(),
)
.build();
let env = load_env(project_root);
// TODO(alexkirsz) Should this accept `node_root` at all?
let execution_context = ExecutionContextVc::new(project_root, build_chunking_context, env);
let next_config = load_next_config(execution_context.with_layer("next_config"));
let pages_structure = find_pages_structure(project_root, next_router_root, next_config);
let page_chunks = get_page_chunks(
pages_structure,
project_root,
execution_context,
node_root,
client_root,
env,
browserslist_query,
next_config,
ServerAddrVc::empty(),
);
handle_issues(page_chunks, issue_reporter).await?;
let filter_pages = std::env::var("NEXT_TURBO_FILTER_PAGES");
let filter_pages = filter_pages
.as_ref()
.ok()
.map(|filter| filter.split(',').collect::<HashSet<_>>());
let filter_pages = filter_pages.as_ref();
{
// Client manifest.
let mut build_manifest: BuildManifest = Default::default();
// Server manifest.
let mut pages_manifest: PagesManifest = Default::default();
let build_manifest_path = client_root.join("build-manifest.json");
let pages_manifest_path = node_root.join("server/pages-manifest.json");
let page_chunks_and_url = page_chunks
.await?
.iter()
.map(|page_chunk| async move {
let page_chunk = page_chunk.await?;
let pathname = page_chunk.pathname.await?;
if let Some(filter_pages) = &filter_pages {
if !filter_pages.contains(pathname.as_str()) {
return Ok(None);
}
}
// We can't use partitioning for client assets as client assets might be created
// by non-client assets referred from client assets.
// Although this should perhaps be enforced by Turbopack semantics.
let all_node_assets: Vec<_> = all_assets_from_entry(page_chunk.node_chunk)
.await?
.iter()
.map(|asset| async move {
Ok((
asset.ident().path().await?.is_inside(&*node_root.await?),
asset,
))
})
.try_join()
.await?
.into_iter()
.filter_map(|(is_inside, asset)| if is_inside { Some(*asset) } else { None })
.collect();
let client_chunks = page_chunk.client_chunks;
// We can't use partitioning for client assets as client assets might be created
// by non-client assets referred from client assets.
// Although this should perhaps be enforced by Turbopack semantics.
let all_client_assets: Vec<_> = all_assets_from_entries(client_chunks)
.await?
.iter()
.map(|asset| async move {
Ok((
asset.ident().path().await?.is_inside(&*client_root.await?),
asset,
))
})
.try_join()
.await?
.into_iter()
.filter_map(|(is_inside, asset)| if is_inside { Some(*asset) } else { None })
.collect();
Ok(Some((
pathname,
page_chunk.node_chunk,
all_node_assets,
client_chunks,
all_client_assets,
)))
})
.try_join()
.await?
.into_iter()
.flatten()
.collect::<Vec<_>>();
{
let build_manifest_dir_path = build_manifest_path.parent().await?;
let pages_manifest_dir_path = pages_manifest_path.parent().await?;
let mut deduplicated_node_assets = HashMap::new();
let mut deduplicated_client_assets = HashMap::new();
// TODO(alexkirsz) We want all assets to emit them to the output directory, but
// we only want runtime assets in the manifest. Furthermore, the pages
// manifest (server) only wants a single runtime asset, so we need to
// bundle node assets somewhat.
for (pathname, node_chunk, all_node_assets, client_chunks, all_client_assets) in
page_chunks_and_url
{
tracing::debug!("pathname: {}", pathname.to_string(),);
tracing::debug!(
"node chunk: {}",
node_chunk.ident().path().to_string().await?
);
tracing::debug!(
"client_chunks:\n{}",
client_chunks
.await?
.iter()
.map(|chunk| async move {
Ok(format!(" - {}", chunk.ident().path().to_string().await?))
})
.try_join()
.await?
.join("\n")
);
// TODO(alexkirsz) Deduplication should not happen at this level, but
// right now we have chunks with the same path being generated
// from different entrypoints, and writing them multiple times causes
// an infinite invalidation loop.
deduplicated_node_assets.extend(
all_node_assets
.into_iter()
.map(|asset| async move { Ok((asset.ident().path().to_string().await?, asset)) })
.try_join()
.await?,
);
deduplicated_client_assets.extend(
all_client_assets
.into_iter()
.map(|asset| async move { Ok((asset.ident().path().to_string().await?, asset)) })
.try_join()
.await?
);
let build_manifest_pages_entry = build_manifest
.pages
.entry(pathname.clone_value())
.or_default();
for chunk in client_chunks.await?.iter() {
let chunk_path = chunk.ident().path().await?;
if let Some(asset_path) = build_manifest_dir_path.get_path_to(&chunk_path) {
build_manifest_pages_entry.push(asset_path.to_string());
}
}
let chunk_path = node_chunk.ident().path().await?;
if let Some(asset_path) = pages_manifest_dir_path.get_path_to(&chunk_path) {
pages_manifest
.pages
.insert(pathname.clone_value(), asset_path.to_string());
}
}
tracing::debug!(
"all node assets: {}",
deduplicated_node_assets
.values()
.map(|asset| async move {
Ok(format!(" - {}", asset.ident().path().to_string().await?))
})
.try_join()
.await?
.join("\n")
);
deduplicated_node_assets
.into_values()
.map(|asset| async move {
emit(asset).await?;
Ok(())
})
.try_join()
.await?;
tracing::debug!(
"all client assets: {}",
deduplicated_client_assets
.values()
.map(|asset| async move {
Ok(format!(" - {}", asset.ident().path().to_string().await?))
})
.try_join()
.await?
.join("\n")
);
deduplicated_client_assets
.into_values()
.map(|asset| async move {
emit(asset).await?;
Ok(())
})
.try_join()
.await?;
}
write_placeholder_manifest(
&MiddlewaresManifest::default(),
node_root,
"server/middleware-manifest.json",
)
.await?;
write_placeholder_manifest(
&NextFontManifest::default(),
node_root,
"server/next-font-manifest.json",
)
.await?;
write_placeholder_manifest(
&FontManifest::default(),
node_root,
"server/font-manifest.json",
)
.await?;
write_placeholder_manifest(
&AppPathsManifest::default(),
node_root,
"server/app-paths-manifest.json",
)
.await?;
write_placeholder_manifest(
&ServerReferenceManifest::default(),
node_root,
"server/server-reference-manifest.json",
)
.await?;
write_placeholder_manifest(
&ClientReferenceManifest::default(),
node_root,
"server/client-reference-manifest.json",
)
.await?;
write_placeholder_manifest(
&ClientCssReferenceManifest::default(),
node_root,
"server/flight-server-css-manifest.json",
)
.await?;
write_placeholder_manifest(
&ReactLoadableManifest::default(),
node_root,
"react-loadable-manifest.json",
)
.await?;
write_placeholder_manifest(
&AppBuildManifest::default(),
node_root,
"app-build-manifest.json",
)
.await?;
if let Some(build_context) = &options.build_context {
let BuildContext { build_id, rewrites } = build_context;
tracing::debug!("writing _ssgManifest.js for build id: {}", build_id);
let ssg_manifest_path = format!("static/{build_id}/_ssgManifest.js");
let ssg_manifest_fs_path = node_root.join(&ssg_manifest_path);
ssg_manifest_fs_path
.write(
FileContent::Content(
"self.__SSG_MANIFEST=new \
Set;self.__SSG_MANIFEST_CB&&self.__SSG_MANIFEST_CB()"
.into(),
)
.cell(),
)
.await?;
build_manifest.low_priority_files.push(ssg_manifest_path);
let sorted_pages =
get_sorted_routes(&pages_manifest.pages.keys().cloned().collect::<Vec<_>>())?;
let app_dependencies: HashSet<&str> = pages_manifest
.pages
.get("/_app")
.iter()
.map(|s| s.as_str())
.collect();
let mut pages = HashMap::new();
for page in &sorted_pages {
if page == "_app" {
continue;
}
let dependencies = pages_manifest
.pages
.get(page)
.iter()
.map(|dep| dep.as_str())
.filter(|dep| !app_dependencies.contains(*dep))
.collect::<Vec<_>>();
if !dependencies.is_empty() {
pages.insert(page.to_string(), dependencies);
}
}
let client_manifest = ClientBuildManifest {
rewrites,
sorted_pages: &sorted_pages,
pages,
};
let client_manifest_path = format!("static/{build_id}/_buildManifest.js");
let client_manifest_fs_path = node_root.join(&client_manifest_path);
client_manifest_fs_path
.write(
FileContent::Content(
format!(
"self.__BUILD_MANIFEST={};self.__BUILD_MANIFEST_CB && \
self.__BUILD_MANIFEST_CB()",
StringifyJs(&client_manifest)
)
.into(),
)
.cell(),
)
.await?;
build_manifest.low_priority_files.push(client_manifest_path);
}
// TODO(alexkirsz) These manifests should be assets.
let build_manifest_contents = serde_json::to_string_pretty(&build_manifest)?;
let pages_manifest_contents = serde_json::to_string_pretty(&pages_manifest)?;
build_manifest_path
.write(FileContent::Content(build_manifest_contents.into()).cell())
.await?;
pages_manifest_path
.write(FileContent::Content(pages_manifest_contents.into()).cell())
.await?;
}
Ok(CompletionVc::immutable())
}
#[turbo_tasks::function]
fn emit(asset: AssetVc) -> CompletionVc {
asset.content().write(asset.ident().path())
}
#[turbo_tasks::function]
async fn workspace_fs(
workspace_root: &str,
issue_reporter: IssueReporterVc,
) -> Result<FileSystemVc> {
let disk_fs = DiskFileSystemVc::new("workspace".to_string(), workspace_root.to_string());
handle_issues(disk_fs, issue_reporter).await?;
Ok(disk_fs.into())
}
#[turbo_tasks::function]
async fn node_fs(node_root: &str, issue_reporter: IssueReporterVc) -> Result<FileSystemVc> {
let disk_fs = DiskFileSystemVc::new("node".to_string(), node_root.to_string());
handle_issues(disk_fs, issue_reporter).await?;
Ok(disk_fs.into())
}
#[turbo_tasks::function]
async fn client_fs(client_root: &str, issue_reporter: IssueReporterVc) -> Result<FileSystemVc> {
let disk_fs = DiskFileSystemVc::new("client".to_string(), client_root.to_string());
handle_issues(disk_fs, issue_reporter).await?;
Ok(disk_fs.into())
}
async fn handle_issues<T: Into<RawVc> + CollectiblesSource + Copy>(
source: T,
issue_reporter: IssueReporterVc,
) -> Result<()> {
let issues = IssueVc::peek_issues_with_path(source)
.await?
.strongly_consistent()
.await?;
let has_fatal = issue_reporter.report_issues(
TransientInstance::new(issues.clone()),
TransientValue::new(source.into()),
);
if *has_fatal.await? {
Err(anyhow!("Fatal issue(s) occurred"))
} else {
Ok(())
}
}
/// Walks the asset graph from a single asset and collect all referenced assets.
#[turbo_tasks::function]
async fn all_assets_from_entry(entry: AssetVc) -> Result<AssetsVc> {
Ok(AssetsVc::cell(
ReverseTopological::new()
.skip_duplicates()
.visit([entry], get_referenced_assets)
.await
.completed()?
.into_inner()
.into_iter()
.collect(),
))
}
/// Walks the asset graph from multiple assets and collect all referenced
/// assets.
#[turbo_tasks::function]
async fn all_assets_from_entries(entries: AssetsVc) -> Result<AssetsVc> {
Ok(AssetsVc::cell(
ReverseTopological::new()
.skip_duplicates()
.visit(entries.await?.iter().copied(), get_referenced_assets)
.await
.completed()?
.into_inner()
.into_iter()
.collect(),
))
}
/// Computes the list of all chunk children of a given chunk.
async fn get_referenced_assets(asset: AssetVc) -> Result<impl Iterator<Item = AssetVc> + Send> {
Ok(asset
.references()
.await?
.iter()
.map(|reference| async move {
let primary_assets = reference.resolve_reference().primary_assets().await?;
Ok(primary_assets.clone_value())
})
.try_join()
.await?
.into_iter()
.flatten())
}
async fn write_placeholder_manifest<T>(
manifest: &T,
node_root: FileSystemPathVc,
path: &str,
) -> Result<()>
where
T: Serialize,
{
let json = serde_json::to_string_pretty(manifest)?;
let node_path = node_root.join(path);
node_path
.write(FileContent::Content(json.into()).cell())
.await?;
Ok(())
}

View file

@ -1,88 +0,0 @@
use anyhow::{bail, Result};
use next_core::{
create_page_loader_entry_asset,
turbopack::core::{asset::AssetsVc, chunk::EvaluatableAssetsVc},
};
use turbopack_binding::{
turbo::{
tasks::{primitives::StringVc, Value},
tasks_fs::FileSystemPathVc,
},
turbopack::{
core::{
asset::AssetVc,
chunk::{ChunkableAsset, ChunkingContext, ChunkingContextVc},
context::{AssetContext, AssetContextVc},
reference_type::ReferenceType,
},
dev::DevChunkingContextVc,
ecmascript::EcmascriptModuleAssetVc,
},
};
#[turbo_tasks::value]
pub(crate) struct PagesBuildClientContext {
project_root: FileSystemPathVc,
client_root: FileSystemPathVc,
client_asset_context: AssetContextVc,
client_runtime_entries: EvaluatableAssetsVc,
}
#[turbo_tasks::value_impl]
impl PagesBuildClientContextVc {
#[turbo_tasks::function]
pub fn new(
project_root: FileSystemPathVc,
client_root: FileSystemPathVc,
client_asset_context: AssetContextVc,
client_runtime_entries: EvaluatableAssetsVc,
) -> PagesBuildClientContextVc {
PagesBuildClientContext {
project_root,
client_root,
client_asset_context,
client_runtime_entries,
}
.cell()
}
#[turbo_tasks::function]
async fn client_chunking_context(self) -> Result<ChunkingContextVc> {
let this = self.await?;
Ok(DevChunkingContextVc::builder(
this.project_root,
this.client_root,
this.client_root.join("static/chunks"),
this.client_root.join("static/media"),
this.client_asset_context.compile_time_info().environment(),
)
.build())
}
#[turbo_tasks::function]
pub async fn client_chunk(
self,
asset: AssetVc,
pathname: StringVc,
reference_type: Value<ReferenceType>,
) -> Result<AssetsVc> {
let this = self.await?;
let client_asset_page = this.client_asset_context.process(asset, reference_type);
let client_asset_page =
create_page_loader_entry_asset(this.client_asset_context, client_asset_page, pathname);
let Some(client_module_asset) = EcmascriptModuleAssetVc::resolve_from(client_asset_page).await? else {
bail!("Expected an EcmaScript module asset");
};
let client_chunking_context = self.client_chunking_context();
Ok(client_chunking_context.evaluated_chunk_group(
client_module_asset.as_root_chunk(client_chunking_context),
this.client_runtime_entries
.with_entry(client_module_asset.into()),
))
}
}

View file

@ -1,3 +0,0 @@
pub(crate) mod client_context;
pub(crate) mod node_context;
pub(crate) mod page_chunks;

View file

@ -1,100 +0,0 @@
use anyhow::{bail, Result};
use next_core::{next_client::RuntimeEntriesVc, turbopack::core::chunk::EvaluatableAssetsVc};
use turbopack_binding::{
turbo::{tasks::Value, tasks_fs::FileSystemPathVc},
turbopack::{
build::BuildChunkingContextVc,
core::{
asset::AssetVc,
context::{AssetContext, AssetContextVc},
reference_type::{EntryReferenceSubType, ReferenceType},
resolve::{parse::RequestVc, pattern::QueryMapVc},
},
ecmascript::EcmascriptModuleAssetVc,
},
};
#[turbo_tasks::value]
pub(crate) struct PagesBuildNodeContext {
project_root: FileSystemPathVc,
node_root: FileSystemPathVc,
node_asset_context: AssetContextVc,
node_runtime_entries: EvaluatableAssetsVc,
}
#[turbo_tasks::value_impl]
impl PagesBuildNodeContextVc {
#[turbo_tasks::function]
pub fn new(
project_root: FileSystemPathVc,
node_root: FileSystemPathVc,
node_asset_context: AssetContextVc,
node_runtime_entries: RuntimeEntriesVc,
) -> PagesBuildNodeContextVc {
PagesBuildNodeContext {
project_root,
node_root,
node_asset_context,
node_runtime_entries: node_runtime_entries.resolve_entries(node_asset_context),
}
.cell()
}
#[turbo_tasks::function]
pub async fn resolve_module(
self,
origin: FileSystemPathVc,
package: String,
path: String,
) -> Result<AssetVc> {
let this = self.await?;
let Some(asset) = this
.node_asset_context
.resolve_asset(
origin,
RequestVc::module(package.clone(), Value::new(path.clone().into()), QueryMapVc::none()),
this.node_asset_context.resolve_options(origin, Value::new(ReferenceType::Entry(EntryReferenceSubType::Page))),
Value::new(ReferenceType::Entry(EntryReferenceSubType::Page))
)
.primary_assets()
.await?
.first()
.copied()
else {
bail!("module {}/{} not found in {}", package, path, origin.await?);
};
Ok(asset)
}
#[turbo_tasks::function]
async fn node_chunking_context(self) -> Result<BuildChunkingContextVc> {
let this = self.await?;
Ok(BuildChunkingContextVc::builder(
this.project_root,
this.node_root,
this.node_root.join("server/pages"),
this.node_root.join("server/assets"),
this.node_asset_context.compile_time_info().environment(),
)
.build())
}
#[turbo_tasks::function]
pub async fn node_chunk(
self,
asset: AssetVc,
reference_type: Value<ReferenceType>,
) -> Result<AssetVc> {
let this = self.await?;
let node_asset_page = this.node_asset_context.process(asset, reference_type);
let Some(node_module_asset) = EcmascriptModuleAssetVc::resolve_from(node_asset_page).await? else {
bail!("Expected an EcmaScript module asset");
};
let chunking_context = self.node_chunking_context();
Ok(chunking_context.generate_exported_chunk(node_module_asset, this.node_runtime_entries))
}
}

View file

@ -1,355 +0,0 @@
use anyhow::Result;
use next_core::{
env::env_for_js,
mode::NextMode,
next_client::{
get_client_compile_time_info, get_client_module_options_context,
get_client_resolve_options_context, get_client_runtime_entries, ClientContextType,
RuntimeEntriesVc, RuntimeEntry,
},
next_client_chunks::NextClientChunksTransitionVc,
next_config::NextConfigVc,
next_server::{
get_server_compile_time_info, get_server_module_options_context,
get_server_resolve_options_context, ServerContextType,
},
pages_structure::{
OptionPagesStructureVc, PagesDirectoryStructure, PagesDirectoryStructureVc, PagesStructure,
PagesStructureItem, PagesStructureVc,
},
pathname_for_path,
turbopack::core::asset::AssetsVc,
PathType,
};
use turbopack_binding::{
turbo::{
tasks::{primitives::StringVc, Value},
tasks_env::ProcessEnvVc,
tasks_fs::FileSystemPathVc,
},
turbopack::{
core::{
asset::AssetVc,
context::AssetContextVc,
environment::ServerAddrVc,
reference_type::{EntryReferenceSubType, ReferenceType},
source_asset::SourceAssetVc,
},
env::ProcessEnvAssetVc,
node::execution_context::ExecutionContextVc,
turbopack::{transition::TransitionsByNameVc, ModuleAssetContextVc},
},
};
use super::{client_context::PagesBuildClientContextVc, node_context::PagesBuildNodeContextVc};
#[turbo_tasks::value(transparent)]
pub struct PageChunks(Vec<PageChunkVc>);
#[turbo_tasks::value_impl]
impl PageChunksVc {
#[turbo_tasks::function]
pub fn empty() -> Self {
PageChunks(vec![]).cell()
}
}
/// Returns a list of page chunks.
#[turbo_tasks::function]
pub async fn get_page_chunks(
pages_structure: OptionPagesStructureVc,
project_root: FileSystemPathVc,
execution_context: ExecutionContextVc,
node_root: FileSystemPathVc,
client_root: FileSystemPathVc,
env: ProcessEnvVc,
browserslist_query: &str,
next_config: NextConfigVc,
node_addr: ServerAddrVc,
) -> Result<PageChunksVc> {
let Some(pages_structure) = *pages_structure.await? else {
return Ok(PageChunksVc::empty());
};
let pages_dir = pages_structure.project_path().resolve().await?;
let mode = NextMode::Build;
let client_ty = Value::new(ClientContextType::Pages { pages_dir });
let node_ty = Value::new(ServerContextType::Pages { pages_dir });
let client_compile_time_info = get_client_compile_time_info(mode, browserslist_query);
let transitions = TransitionsByNameVc::cell(
[(
// This is necessary for the next dynamic transform to work.
// TODO(alexkirsz) Should accept client chunking context? But how do we get this?
"next-client-chunks".to_string(),
NextClientChunksTransitionVc::new(
project_root,
execution_context,
client_ty,
mode,
client_root,
client_compile_time_info,
next_config,
)
.into(),
)]
.into_iter()
.collect(),
);
let client_module_options_context = get_client_module_options_context(
project_root,
execution_context,
client_compile_time_info.environment(),
client_ty,
mode,
next_config,
);
let client_resolve_options_context = get_client_resolve_options_context(
project_root,
client_ty,
mode,
next_config,
execution_context,
);
let client_asset_context: AssetContextVc = ModuleAssetContextVc::new(
transitions,
client_compile_time_info,
client_module_options_context,
client_resolve_options_context,
)
.into();
let node_compile_time_info = get_server_compile_time_info(node_ty, mode, env, node_addr);
let node_resolve_options_context = get_server_resolve_options_context(
project_root,
node_ty,
mode,
next_config,
execution_context,
);
let node_module_options_context = get_server_module_options_context(
project_root,
execution_context,
node_ty,
mode,
next_config,
);
let node_asset_context = ModuleAssetContextVc::new(
transitions,
node_compile_time_info,
node_module_options_context,
node_resolve_options_context,
)
.into();
let node_runtime_entries = get_node_runtime_entries(project_root, env, next_config);
let client_runtime_entries = get_client_runtime_entries(
project_root,
env,
client_ty,
mode,
next_config,
execution_context,
);
let client_runtime_entries = client_runtime_entries.resolve_entries(client_asset_context);
let node_build_context = PagesBuildNodeContextVc::new(
project_root,
node_root,
node_asset_context,
node_runtime_entries,
);
let client_build_context = PagesBuildClientContextVc::new(
project_root,
client_root,
client_asset_context,
client_runtime_entries,
);
Ok(get_page_chunks_for_root_directory(
node_build_context,
client_build_context,
pages_structure,
))
}
#[turbo_tasks::function]
async fn get_page_chunks_for_root_directory(
node_build_context: PagesBuildNodeContextVc,
client_build_context: PagesBuildClientContextVc,
pages_structure: PagesStructureVc,
) -> Result<PageChunksVc> {
let PagesStructure {
app,
document,
error,
api,
pages,
} = *pages_structure.await?;
let mut chunks = vec![];
let next_router_root = pages.next_router_path();
// This only makes sense on both the client and the server, but they should map
// to different assets (server can be an external module).
let app = app.await?;
chunks.push(get_page_chunk_for_file(
node_build_context,
client_build_context,
SourceAssetVc::new(app.project_path).into(),
next_router_root,
app.next_router_path,
));
// This only makes sense on the server.
let document = document.await?;
chunks.push(get_page_chunk_for_file(
node_build_context,
client_build_context,
SourceAssetVc::new(document.project_path).into(),
next_router_root,
document.next_router_path,
));
// This only makes sense on both the client and the server, but they should map
// to different assets (server can be an external module).
let error = error.await?;
chunks.push(get_page_chunk_for_file(
node_build_context,
client_build_context,
SourceAssetVc::new(error.project_path).into(),
next_router_root,
error.next_router_path,
));
if let Some(api) = api {
chunks.extend(
get_page_chunks_for_directory(
node_build_context,
client_build_context,
api,
next_router_root,
)
.await?
.iter()
.copied(),
);
}
chunks.extend(
get_page_chunks_for_directory(
node_build_context,
client_build_context,
pages,
next_router_root,
)
.await?
.iter()
.copied(),
);
Ok(PageChunksVc::cell(chunks))
}
#[turbo_tasks::function]
async fn get_page_chunks_for_directory(
node_build_context: PagesBuildNodeContextVc,
client_build_context: PagesBuildClientContextVc,
pages_structure: PagesDirectoryStructureVc,
next_router_root: FileSystemPathVc,
) -> Result<PageChunksVc> {
let PagesDirectoryStructure {
ref items,
ref children,
..
} = *pages_structure.await?;
let mut chunks = vec![];
for item in items.iter() {
let PagesStructureItem {
project_path,
next_router_path,
specificity: _,
} = *item.await?;
chunks.push(get_page_chunk_for_file(
node_build_context,
client_build_context,
SourceAssetVc::new(project_path).into(),
next_router_root,
next_router_path,
));
}
for child in children.iter() {
chunks.extend(
// TODO(alexkirsz) This should be a tree structure instead of a flattened list.
get_page_chunks_for_directory(
node_build_context,
client_build_context,
*child,
next_router_root,
)
.await?
.iter()
.copied(),
)
}
Ok(PageChunksVc::cell(chunks))
}
/// A page chunk corresponding to some route.
#[turbo_tasks::value]
pub struct PageChunk {
/// The pathname of the page.
pub pathname: StringVc,
/// The Node.js chunk.
pub node_chunk: AssetVc,
/// The client chunks.
pub client_chunks: AssetsVc,
}
#[turbo_tasks::function]
async fn get_page_chunk_for_file(
node_build_context: PagesBuildNodeContextVc,
client_build_context: PagesBuildClientContextVc,
page_asset: AssetVc,
next_router_root: FileSystemPathVc,
next_router_path: FileSystemPathVc,
) -> Result<PageChunkVc> {
let reference_type = Value::new(ReferenceType::Entry(EntryReferenceSubType::Page));
let pathname = pathname_for_path(next_router_root, next_router_path, PathType::Page);
Ok(PageChunk {
pathname,
node_chunk: node_build_context.node_chunk(page_asset, reference_type.clone()),
client_chunks: client_build_context.client_chunk(page_asset, pathname, reference_type),
}
.cell())
}
#[turbo_tasks::function]
async fn pathname_from_path(next_router_path: FileSystemPathVc) -> Result<StringVc> {
let pathname = next_router_path.await?;
Ok(StringVc::cell(pathname.path.clone()))
}
#[turbo_tasks::function]
fn get_node_runtime_entries(
project_root: FileSystemPathVc,
env: ProcessEnvVc,
next_config: NextConfigVc,
) -> RuntimeEntriesVc {
let node_runtime_entries = vec![RuntimeEntry::Source(
ProcessEnvAssetVc::new(project_root, env_for_js(env, false, next_config)).into(),
)
.cell()];
RuntimeEntriesVc::cell(node_runtime_entries)
}

View file

@ -15,7 +15,7 @@ pub mod manifest;
pub mod mode;
mod next_build;
pub mod next_client;
pub mod next_client_chunks;
mod next_client_chunks;
mod next_client_component;
pub mod next_config;
mod next_edge;
@ -38,10 +38,8 @@ mod util;
mod web_entry_source;
pub use app_source::create_app_source;
pub use page_loader::create_page_loader_entry_asset;
pub use page_source::create_page_source;
pub use turbopack_binding::{turbopack::node::source_map, *};
pub use util::{pathname_for_path, PathType};
pub use web_entry_source::create_web_entry_source;
pub fn register() {

View file

@ -337,14 +337,14 @@ pub fn get_client_chunking_context(
#[turbo_tasks::function]
pub fn get_client_assets_path(
client_root: FileSystemPathVc,
server_root: FileSystemPathVc,
ty: Value<ClientContextType>,
) -> FileSystemPathVc {
match ty.into_value() {
ClientContextType::Pages { .. }
| ClientContextType::App { .. }
| ClientContextType::Fallback => client_root.join("/_next/static/media"),
ClientContextType::Other => client_root.join("/_assets"),
| ClientContextType::Fallback => server_root.join("/_next/static/media"),
ClientContextType::Other => server_root.join("/_assets"),
}
}

View file

@ -2,10 +2,3 @@ pub(crate) mod context;
pub(crate) mod runtime_entry;
pub(crate) mod transforms;
pub(crate) mod transition;
pub use context::{
get_client_compile_time_info, get_client_module_options_context,
get_client_resolve_options_context, get_client_runtime_entries, ClientContextType,
};
pub use runtime_entry::{RuntimeEntries, RuntimeEntriesVc, RuntimeEntry, RuntimeEntryVc};
pub use transition::NextClientTransition;

View file

@ -1,5 +1,3 @@
pub(crate) mod client_chunks_transition;
pub(crate) mod in_chunking_context_asset;
pub(crate) mod with_chunks;
pub use client_chunks_transition::NextClientChunksTransitionVc;

View file

@ -678,6 +678,7 @@ pub async fn load_next_config_internal(
next_asset("entry/config/next.js"),
Value::new(ReferenceType::Entry(EntryReferenceSubType::Undefined)),
);
let config_value = evaluate(
load_next_config_asset,
project_path,

View file

@ -1,8 +1,3 @@
pub(crate) mod context;
pub(crate) mod resolve;
pub(crate) mod transforms;
pub use context::{
get_server_compile_time_info, get_server_module_options_context,
get_server_resolve_options_context, ServerContextType,
};

View file

@ -54,17 +54,17 @@ pub struct PageLoaderAsset {
pub pathname: StringVc,
}
#[turbo_tasks::function]
pub async fn create_page_loader_entry_asset(
client_context: AssetContextVc,
entry_asset: AssetVc,
pathname: StringVc,
) -> Result<AssetVc> {
#[turbo_tasks::value_impl]
impl PageLoaderAssetVc {
#[turbo_tasks::function]
async fn get_loader_entry_asset(self) -> Result<AssetVc> {
let this = &*self.await?;
let mut result = RopeBuilder::default();
writeln!(
result,
"const PAGE_PATH = {};\n",
StringifyJs(&*pathname.await?)
StringifyJs(&*this.pathname.await?)
)?;
let page_loader_path = next_js_file_path("entry/page-loader.ts");
@ -77,28 +77,25 @@ pub async fn create_page_loader_entry_asset(
let file = File::from(result.build());
let virtual_asset = VirtualAssetVc::new(page_loader_path, file.into()).into();
Ok(VirtualAssetVc::new(page_loader_path, file.into()).into())
}
Ok(client_context.process(
virtual_asset,
Value::new(ReferenceType::Internal(
InnerAssetsVc::cell(indexmap! {
"PAGE".to_string() => client_context.process(entry_asset, Value::new(ReferenceType::Entry(EntryReferenceSubType::Page)))
})
)))
)
}
#[turbo_tasks::value_impl]
impl PageLoaderAssetVc {
#[turbo_tasks::function]
async fn get_page_chunks(self) -> Result<AssetsVc> {
let this = &*self.await?;
let page_loader_entry_asset =
create_page_loader_entry_asset(this.client_context, this.entry_asset, this.pathname);
let loader_entry_asset = self.get_loader_entry_asset();
let Some(module) = EvaluatableAssetVc::resolve_from(page_loader_entry_asset).await? else {
let module = this.client_context.process(
loader_entry_asset,
Value::new(ReferenceType::Internal(
InnerAssetsVc::cell(indexmap! {
"PAGE".to_string() => this.client_context.process(this.entry_asset, Value::new(ReferenceType::Entry(EntryReferenceSubType::Page)))
})
)),
);
let Some(module) = EvaluatableAssetVc::resolve_from(module).await? else {
bail!("internal module must be evaluatable");
};

View file

@ -107,12 +107,6 @@ pub struct PagesDirectoryStructure {
#[turbo_tasks::value_impl]
impl PagesDirectoryStructureVc {
/// Returns the router path of this directory.
#[turbo_tasks::function]
pub async fn next_router_path(self) -> Result<FileSystemPathVc> {
Ok(self.await?.next_router_path)
}
/// Returns the path to the directory of this structure in the project file
/// system.
#[turbo_tasks::function]

View file

@ -38,7 +38,7 @@ use crate::{
get_client_asset_context, get_client_chunking_context,
get_client_resolve_options_context, ClientContextType,
},
RuntimeEntriesVc, RuntimeEntry,
runtime_entry::{RuntimeEntriesVc, RuntimeEntry},
},
next_config::NextConfigVc,
};

View file

@ -210,7 +210,6 @@ export default async function build(
noMangling = false,
appDirOnly = false,
turboNextBuild = false,
turboNextBuildRoot = null,
buildMode: 'default' | 'experimental-compile' | 'experimental-generate'
): Promise<void> {
const isCompile = buildMode === 'experimental-compile'
@ -551,23 +550,6 @@ export default async function build(
app: appPageKeys.length > 0 ? appPageKeys : undefined,
}
if (turboNextBuild) {
// TODO(WEB-397) This is a temporary workaround to allow for filtering a
// subset of pages when building with --experimental-turbo, until we
// have complete support for all pages.
if (process.env.NEXT_TURBO_FILTER_PAGES) {
const filterPages = process.env.NEXT_TURBO_FILTER_PAGES.split(',')
pageKeys.pages = pageKeys.pages.filter((page) => {
return filterPages.some((filterPage) => {
return isMatch(page, filterPage)
})
})
}
// TODO(alexkirsz) Filter out app pages entirely as they are not supported yet.
pageKeys.app = undefined
}
const numConflictingAppPaths = conflictingAppPagePaths.length
if (mappedAppPages && numConflictingAppPaths > 0) {
Log.error(
@ -950,23 +932,7 @@ export default async function build(
async function turbopackBuild() {
const turboNextBuildStart = process.hrtime()
const turboJson = findUp.sync('turbo.json', { cwd: dir })
// eslint-disable-next-line no-shadow
const packagePath = findUp.sync('package.json', { cwd: dir })
let root =
turboNextBuildRoot ??
(turboJson
? path.dirname(turboJson)
: packagePath
? path.dirname(packagePath)
: undefined)
await binding.turbo.nextBuild({
...NextBuildContext,
root,
})
await binding.turbo.nextBuild(NextBuildContext)
const [duration] = process.hrtime(turboNextBuildStart)
return { duration, turbotraceContext: null }
}

View file

@ -18,7 +18,6 @@ const nextBuild: CliCommand = (argv) => {
'--no-mangling': Boolean,
'--experimental-app-only': Boolean,
'--experimental-turbo': Boolean,
'--experimental-turbo-root': String,
'--build-mode': String,
// Aliases
'-h': '--help',
@ -83,7 +82,6 @@ const nextBuild: CliCommand = (argv) => {
args['--no-mangling'],
args['--experimental-app-only'],
args['--experimental-turbo'],
args['--experimental-turbo-root'],
args['--build-mode'] || 'default'
).catch((err) => {
console.error('')

View file

@ -38,18 +38,10 @@
"outputs": ["dist/**"]
},
"typescript": {},
"rust-check": {
"inputs": [".cargo/**", "**/*.rs", "**/Cargo.toml"]
},
"test-cargo-unit": {
"inputs": [".cargo/**", "**/*.rs", "**/Cargo.toml"]
},
"test-cargo-integration": {
"inputs": [".cargo/**", "**/*.rs", "**/Cargo.toml"]
},
"test-cargo-bench": {
"inputs": [".cargo/**", "**/*.rs", "**/Cargo.toml"]
},
"rust-check": {},
"test-cargo-unit": {},
"test-cargo-integration": {},
"test-cargo-bench": {},
"//#get-test-timings": {
"inputs": ["run-tests.js"],
"outputs": ["test-timings.json"]