feat(turbopack): Introduce RcStr (#66262)

# Turbopack

* https://github.com/vercel/turbo/pull/8272 <!-- Donny/강동윤 - feat:
Update `swc_core` to `v0.92.8` -->
* https://github.com/vercel/turbo/pull/8262 <!-- Alexander Lyon - add
crate to calculate prehashes -->
* https://github.com/vercel/turbo/pull/8174 <!-- Tobias Koppers - use
prehash to avoid rehashing the key in the task cache -->
* https://github.com/vercel/turbo/pull/7674 <!-- Alexander Lyon - [turbo
trace] add ability to filter by value and occurences -->
* https://github.com/vercel/turbo/pull/8287 <!-- Donny/강동윤 - feat:
Update `swc_core` to `v0.92.10` -->
* https://github.com/vercel/turbo/pull/8037 <!-- Alexander Lyon - create
turbo-static for compile time graph analysis -->
* https://github.com/vercel/turbo/pull/8293 <!-- Will Binns-Smith - Sync
Cargo.lock with Next.js -->
* https://github.com/vercel/turbo/pull/8239 <!-- Benjamin Woodruff -
Reduce amount of code generated by ValueDebugFormat -->
* https://github.com/vercel/turbo/pull/8304 <!-- Benjamin Woodruff -
Minor optimizations to the codegen of TaskFnInputFunction -->
* https://github.com/vercel/turbo/pull/8221 <!-- Donny/강동윤 - perf:
Introduce `RcStr` -->


### What?

I tried using `Arc<String>` in
https://github.com/vercel/turbo/pull/7772, but a team member suggested
creating a new type so we can replace underlying implementation easily
in the future.

### Why?

To reduce memory usage.

### How?

Closes PACK-2776
This commit is contained in:
Donny/강동윤 2024-06-05 15:09:28 +09:00 committed by GitHub
parent 750ae91e30
commit 3cf225c8ee
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
94 changed files with 1490 additions and 1483 deletions

78
Cargo.lock generated
View file

@ -321,7 +321,7 @@ dependencies = [
[[package]] [[package]]
name = "auto-hash-map" name = "auto-hash-map"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"serde", "serde",
"smallvec", "smallvec",
@ -3092,7 +3092,7 @@ dependencies = [
[[package]] [[package]]
name = "node-file-trace" name = "node-file-trace"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"serde", "serde",
@ -6945,10 +6945,15 @@ dependencies = [
"utf-8", "utf-8",
] ]
[[package]]
name = "turbo-prehash"
version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
[[package]] [[package]]
name = "turbo-tasks" name = "turbo-tasks"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -6980,7 +6985,7 @@ dependencies = [
[[package]] [[package]]
name = "turbo-tasks-build" name = "turbo-tasks-build"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cargo-lock", "cargo-lock",
@ -6992,7 +6997,7 @@ dependencies = [
[[package]] [[package]]
name = "turbo-tasks-bytes" name = "turbo-tasks-bytes"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"bytes", "bytes",
@ -7006,7 +7011,7 @@ dependencies = [
[[package]] [[package]]
name = "turbo-tasks-env" name = "turbo-tasks-env"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"dotenvs", "dotenvs",
@ -7020,7 +7025,7 @@ dependencies = [
[[package]] [[package]]
name = "turbo-tasks-fetch" name = "turbo-tasks-fetch"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"lazy_static", "lazy_static",
@ -7036,7 +7041,7 @@ dependencies = [
[[package]] [[package]]
name = "turbo-tasks-fs" name = "turbo-tasks-fs"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"auto-hash-map", "auto-hash-map",
@ -7068,7 +7073,7 @@ dependencies = [
[[package]] [[package]]
name = "turbo-tasks-hash" name = "turbo-tasks-hash"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"md4", "md4",
"turbo-tasks-macros", "turbo-tasks-macros",
@ -7078,7 +7083,7 @@ dependencies = [
[[package]] [[package]]
name = "turbo-tasks-macros" name = "turbo-tasks-macros"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"proc-macro-error", "proc-macro-error",
@ -7092,7 +7097,7 @@ dependencies = [
[[package]] [[package]]
name = "turbo-tasks-macros-shared" name = "turbo-tasks-macros-shared"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -7102,7 +7107,7 @@ dependencies = [
[[package]] [[package]]
name = "turbo-tasks-malloc" name = "turbo-tasks-malloc"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"mimalloc", "mimalloc",
] ]
@ -7110,7 +7115,7 @@ dependencies = [
[[package]] [[package]]
name = "turbo-tasks-memory" name = "turbo-tasks-memory"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"auto-hash-map", "auto-hash-map",
@ -7127,6 +7132,7 @@ dependencies = [
"smallvec", "smallvec",
"tokio", "tokio",
"tracing", "tracing",
"turbo-prehash",
"turbo-tasks", "turbo-tasks",
"turbo-tasks-build", "turbo-tasks-build",
"turbo-tasks-hash", "turbo-tasks-hash",
@ -7136,7 +7142,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack" name = "turbopack"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-recursion", "async-recursion",
@ -7166,7 +7172,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-binding" name = "turbopack-binding"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"auto-hash-map", "auto-hash-map",
"mdxjs", "mdxjs",
@ -7207,7 +7213,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-browser" name = "turbopack-browser"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"indexmap 1.9.3", "indexmap 1.9.3",
@ -7230,7 +7236,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-cli-utils" name = "turbopack-cli-utils"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"clap", "clap",
@ -7247,7 +7253,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-core" name = "turbopack-core"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-recursion", "async-recursion",
@ -7276,7 +7282,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-css" name = "turbopack-css"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"indexmap 1.9.3", "indexmap 1.9.3",
@ -7303,7 +7309,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-dev-server" name = "turbopack-dev-server"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-compression", "async-compression",
@ -7339,7 +7345,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-ecmascript" name = "turbopack-ecmascript"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -7374,7 +7380,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-ecmascript-hmr-protocol" name = "turbopack-ecmascript-hmr-protocol"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"serde", "serde",
"serde_json", "serde_json",
@ -7385,7 +7391,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-ecmascript-plugins" name = "turbopack-ecmascript-plugins"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@ -7409,7 +7415,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-ecmascript-runtime" name = "turbopack-ecmascript-runtime"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"indoc", "indoc",
@ -7425,7 +7431,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-env" name = "turbopack-env"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"indexmap 1.9.3", "indexmap 1.9.3",
@ -7441,7 +7447,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-image" name = "turbopack-image"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"base64 0.21.4", "base64 0.21.4",
@ -7460,7 +7466,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-json" name = "turbopack-json"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"serde", "serde",
@ -7475,7 +7481,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-mdx" name = "turbopack-mdx"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"mdxjs", "mdxjs",
@ -7490,7 +7496,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-node" name = "turbopack-node"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-stream", "async-stream",
@ -7524,7 +7530,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-nodejs" name = "turbopack-nodejs"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"indexmap 1.9.3", "indexmap 1.9.3",
@ -7544,7 +7550,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-resolve" name = "turbopack-resolve"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"indexmap 1.9.3", "indexmap 1.9.3",
@ -7562,7 +7568,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-static" name = "turbopack-static"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"serde", "serde",
@ -7578,7 +7584,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-swc-utils" name = "turbopack-swc-utils"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"swc_core", "swc_core",
"turbo-tasks", "turbo-tasks",
@ -7589,7 +7595,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-trace-server" name = "turbopack-trace-server"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"either", "either",
@ -7609,7 +7615,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-trace-utils" name = "turbopack-trace-utils"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"crossbeam-channel", "crossbeam-channel",
@ -7625,7 +7631,7 @@ dependencies = [
[[package]] [[package]]
name = "turbopack-wasm" name = "turbopack-wasm"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240531.2#d4fd4fbbffc829fb78b8cc57f2bbd036b869a5a4" source = "git+https://github.com/vercel/turbo.git?tag=turbopack-240605.1#6c244029821ba4b5ffe9c8d407830ec0c91d519d"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"indexmap 1.9.3", "indexmap 1.9.3",

View file

@ -37,11 +37,11 @@ swc_core = { version = "0.92.10", features = [
testing = { version = "0.35.25" } testing = { version = "0.35.25" }
# Turbo crates # Turbo crates
turbopack-binding = { git = "https://github.com/vercel/turbo.git", tag = "turbopack-240531.2" } turbopack-binding = { git = "https://github.com/vercel/turbo.git", tag = "turbopack-240605.1" }
# [TODO]: need to refactor embed_directory! macro usages, as well as resolving turbo_tasks::function, macros.. # [TODO]: need to refactor embed_directory! macro usages, as well as resolving turbo_tasks::function, macros..
turbo-tasks = { git = "https://github.com/vercel/turbo.git", tag = "turbopack-240531.2" } turbo-tasks = { git = "https://github.com/vercel/turbo.git", tag = "turbopack-240605.1" }
# [TODO]: need to refactor embed_directory! macro usage in next-core # [TODO]: need to refactor embed_directory! macro usage in next-core
turbo-tasks-fs = { git = "https://github.com/vercel/turbo.git", tag = "turbopack-240531.2" } turbo-tasks-fs = { git = "https://github.com/vercel/turbo.git", tag = "turbopack-240605.1" }
# General Deps # General Deps

View file

@ -12,7 +12,7 @@ use next_core::app_structure::{
LoaderTree, MetadataItem, MetadataWithAltItem, LoaderTree, MetadataItem, MetadataWithAltItem,
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::{ReadRef, Vc}; use turbo_tasks::{RcStr, ReadRef, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::{ turbo::{
tasks::{ tasks::{
@ -27,12 +27,8 @@ use turbopack_binding::{
use crate::register; use crate::register;
#[turbo_tasks::function] #[turbo_tasks::function]
async fn project_fs(project_dir: String, watching: bool) -> Result<Vc<Box<dyn FileSystem>>> { async fn project_fs(project_dir: RcStr, watching: bool) -> Result<Vc<Box<dyn FileSystem>>> {
let disk_fs = DiskFileSystem::new( let disk_fs = DiskFileSystem::new(PROJECT_FILESYSTEM_NAME.into(), project_dir, vec![]);
PROJECT_FILESYSTEM_NAME.to_string(),
project_dir.to_string(),
vec![],
);
if watching { if watching {
disk_fs.await?.start_watching_with_invalidation_reason()?; disk_fs.await?.start_watching_with_invalidation_reason()?;
} }
@ -42,8 +38,8 @@ async fn project_fs(project_dir: String, watching: bool) -> Result<Vc<Box<dyn Fi
#[turbo_tasks::value] #[turbo_tasks::value]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct LoaderTreeForJs { struct LoaderTreeForJs {
segment: String, segment: RcStr,
parallel_routes: IndexMap<String, ReadRef<LoaderTreeForJs>>, parallel_routes: IndexMap<RcStr, ReadRef<LoaderTreeForJs>>,
#[turbo_tasks(trace_ignore)] #[turbo_tasks(trace_ignore)]
components: ComponentsForJs, components: ComponentsForJs,
#[turbo_tasks(trace_ignore)] #[turbo_tasks(trace_ignore)]
@ -57,13 +53,13 @@ enum EntrypointForJs {
loader_tree: ReadRef<LoaderTreeForJs>, loader_tree: ReadRef<LoaderTreeForJs>,
}, },
AppRoute { AppRoute {
path: String, path: RcStr,
}, },
} }
#[turbo_tasks::value(transparent)] #[turbo_tasks::value(transparent)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct EntrypointsForJs(HashMap<String, EntrypointForJs>); struct EntrypointsForJs(HashMap<RcStr, EntrypointForJs>);
#[turbo_tasks::value(transparent)] #[turbo_tasks::value(transparent)]
struct OptionEntrypointsForJs(Option<Vc<EntrypointsForJs>>); struct OptionEntrypointsForJs(Option<Vc<EntrypointsForJs>>);
@ -71,14 +67,14 @@ struct OptionEntrypointsForJs(Option<Vc<EntrypointsForJs>>);
async fn fs_path_to_path( async fn fs_path_to_path(
project_path: Vc<FileSystemPath>, project_path: Vc<FileSystemPath>,
path: Vc<FileSystemPath>, path: Vc<FileSystemPath>,
) -> Result<String> { ) -> Result<RcStr> {
match project_path.await?.get_path_to(&*path.await?) { match project_path.await?.get_path_to(&*path.await?) {
None => Err(anyhow!( None => Err(anyhow!(
"Path {} is not inside of the project path {}", "Path {} is not inside of the project path {}",
path.to_string().await?, path.to_string().await?,
project_path.to_string().await? project_path.to_string().await?
)), )),
Some(p) => Ok(p.to_string()), Some(p) => Ok(p.into()),
} }
} }
@ -86,21 +82,21 @@ async fn fs_path_to_path(
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
struct ComponentsForJs { struct ComponentsForJs {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
page: Option<String>, page: Option<RcStr>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
layout: Option<String>, layout: Option<RcStr>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
error: Option<String>, error: Option<RcStr>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
loading: Option<String>, loading: Option<RcStr>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
template: Option<String>, template: Option<RcStr>,
#[serde(skip_serializing_if = "Option::is_none", rename = "not-found")] #[serde(skip_serializing_if = "Option::is_none", rename = "not-found")]
not_found: Option<String>, not_found: Option<RcStr>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
default: Option<String>, default: Option<RcStr>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
route: Option<String>, route: Option<RcStr>,
metadata: MetadataForJs, metadata: MetadataForJs,
} }
@ -134,19 +130,19 @@ struct GlobalMetadataForJs {
#[serde(tag = "type", rename_all = "camelCase")] #[serde(tag = "type", rename_all = "camelCase")]
enum MetadataWithAltItemForJs { enum MetadataWithAltItemForJs {
Static { Static {
path: String, path: RcStr,
alt_path: Option<String>, alt_path: Option<RcStr>,
}, },
Dynamic { Dynamic {
path: String, path: RcStr,
}, },
} }
#[derive(Deserialize, Serialize, PartialEq, Eq, ValueDebugFormat)] #[derive(Deserialize, Serialize, PartialEq, Eq, ValueDebugFormat)]
#[serde(tag = "type", rename_all = "camelCase")] #[serde(tag = "type", rename_all = "camelCase")]
enum MetadataItemForJs { enum MetadataItemForJs {
Static { path: String }, Static { path: RcStr },
Dynamic { path: String }, Dynamic { path: RcStr },
} }
async fn prepare_components_for_js( async fn prepare_components_for_js(
@ -166,7 +162,7 @@ async fn prepare_components_for_js(
} = &*components.await?; } = &*components.await?;
let mut result = ComponentsForJs::default(); let mut result = ComponentsForJs::default();
async fn add( async fn add(
result: &mut Option<String>, result: &mut Option<RcStr>,
project_path: Vc<FileSystemPath>, project_path: Vc<FileSystemPath>,
value: &Option<Vc<FileSystemPath>>, value: &Option<Vc<FileSystemPath>>,
) -> Result<()> { ) -> Result<()> {
@ -305,7 +301,7 @@ async fn prepare_entrypoints_for_js(
.await? .await?
.iter() .iter()
.map(|(key, value)| { .map(|(key, value)| {
let key = key.to_string(); let key = key.to_string().into();
async move { async move {
let value = match *value { let value = match *value {
Entrypoint::AppPage { loader_tree, .. } => EntrypointForJs::AppPage { Entrypoint::AppPage { loader_tree, .. } => EntrypointForJs::AppPage {
@ -330,19 +326,19 @@ async fn prepare_entrypoints_for_js(
#[turbo_tasks::function] #[turbo_tasks::function]
async fn get_value( async fn get_value(
root_dir: String, root_dir: RcStr,
project_dir: String, project_dir: RcStr,
page_extensions: Vec<String>, page_extensions: Vec<RcStr>,
watching: bool, watching: bool,
) -> Result<Vc<OptionEntrypointsForJs>> { ) -> Result<Vc<OptionEntrypointsForJs>> {
let page_extensions = Vc::cell(page_extensions); let page_extensions = Vc::cell(page_extensions);
let fs = project_fs(root_dir.clone(), watching); let fs = project_fs(root_dir.clone(), watching);
let project_relative = project_dir.strip_prefix(&root_dir).unwrap(); let project_relative = project_dir.strip_prefix(&*root_dir).unwrap();
let project_relative = project_relative let project_relative = project_relative
.strip_prefix(MAIN_SEPARATOR) .strip_prefix(MAIN_SEPARATOR)
.unwrap_or(project_relative) .unwrap_or(project_relative)
.replace(MAIN_SEPARATOR, "/"); .replace(MAIN_SEPARATOR, "/");
let project_path = fs.root().join(project_relative); let project_path = fs.root().join(project_relative.into());
let app_dir = find_app_dir(project_path); let app_dir = find_app_dir(project_path);
@ -373,8 +369,8 @@ pub fn stream_entrypoints(
let value = serde_json::to_value(value)?; let value = serde_json::to_value(value)?;
Ok(vec![value]) Ok(vec![value])
})?; })?;
let root_dir = Arc::new(root_dir); let root_dir = RcStr::from(root_dir);
let project_dir = Arc::new(project_dir); let project_dir = RcStr::from(project_dir);
let page_extensions = Arc::new(page_extensions); let page_extensions = Arc::new(page_extensions);
turbo_tasks.spawn_root_task(move || { turbo_tasks.spawn_root_task(move || {
let func: ThreadsafeFunction<Option<ReadRef<EntrypointsForJs>>> = func.clone(); let func: ThreadsafeFunction<Option<ReadRef<EntrypointsForJs>>> = func.clone();
@ -383,9 +379,9 @@ pub fn stream_entrypoints(
let page_extensions: Arc<Vec<String>> = page_extensions.clone(); let page_extensions: Arc<Vec<String>> = page_extensions.clone();
Box::pin(async move { Box::pin(async move {
if let Some(entrypoints) = &*get_value( if let Some(entrypoints) = &*get_value(
(*root_dir).clone(), root_dir.clone(),
(*project_dir).clone(), project_dir.clone(),
page_extensions.iter().map(|s| s.to_string()).collect(), page_extensions.iter().map(|s| s.as_str().into()).collect(),
true, true,
) )
.await? .await?
@ -415,9 +411,9 @@ pub async fn get_entrypoints(
let result = turbo_tasks let result = turbo_tasks
.run_once(async move { .run_once(async move {
let value = if let Some(entrypoints) = &*get_value( let value = if let Some(entrypoints) = &*get_value(
root_dir, root_dir.into(),
project_dir, project_dir.into(),
page_extensions.iter().map(|s| s.to_string()).collect(), page_extensions.iter().map(|s| s.as_str().into()).collect(),
false, false,
) )
.await? .await?

View file

@ -57,7 +57,7 @@ impl From<WrittenEndpoint> for NapiWrittenEndpoint {
} => Self { } => Self {
r#type: "nodejs".to_string(), r#type: "nodejs".to_string(),
entry_path: Some(server_entry_path), entry_path: Some(server_entry_path),
client_paths, client_paths: client_paths.into_iter().map(From::from).collect(),
server_paths: server_paths.into_iter().map(From::from).collect(), server_paths: server_paths.into_iter().map(From::from).collect(),
..Default::default() ..Default::default()
}, },
@ -66,7 +66,7 @@ impl From<WrittenEndpoint> for NapiWrittenEndpoint {
client_paths, client_paths,
} => Self { } => Self {
r#type: "edge".to_string(), r#type: "edge".to_string(),
client_paths, client_paths: client_paths.into_iter().map(From::from).collect(),
server_paths: server_paths.into_iter().map(From::from).collect(), server_paths: server_paths.into_iter().map(From::from).collect(),
..Default::default() ..Default::default()
}, },

View file

@ -22,7 +22,7 @@ use rand::Rng;
use tokio::{io::AsyncWriteExt, time::Instant}; use tokio::{io::AsyncWriteExt, time::Instant};
use tracing::Instrument; use tracing::Instrument;
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Registry}; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Registry};
use turbo_tasks::{Completion, ReadRef, TransientInstance, TurboTasks, UpdateInfo, Vc}; use turbo_tasks::{Completion, RcStr, ReadRef, TransientInstance, TurboTasks, UpdateInfo, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::{ turbo::{
tasks_fs::{DiskFileSystem, FileContent, FileSystem, FileSystemPath}, tasks_fs::{DiskFileSystem, FileContent, FileSystem, FileSystemPath},
@ -77,9 +77,9 @@ pub struct NapiDraftModeOptions {
impl From<NapiDraftModeOptions> for DraftModeOptions { impl From<NapiDraftModeOptions> for DraftModeOptions {
fn from(val: NapiDraftModeOptions) -> Self { fn from(val: NapiDraftModeOptions) -> Self {
DraftModeOptions { DraftModeOptions {
preview_mode_id: val.preview_mode_id, preview_mode_id: val.preview_mode_id.into(),
preview_mode_encryption_key: val.preview_mode_encryption_key, preview_mode_encryption_key: val.preview_mode_encryption_key.into(),
preview_mode_signing_key: val.preview_mode_signing_key, preview_mode_signing_key: val.preview_mode_signing_key.into(),
} }
} }
} }
@ -186,20 +186,20 @@ pub struct NapiTurboEngineOptions {
impl From<NapiProjectOptions> for ProjectOptions { impl From<NapiProjectOptions> for ProjectOptions {
fn from(val: NapiProjectOptions) -> Self { fn from(val: NapiProjectOptions) -> Self {
ProjectOptions { ProjectOptions {
root_path: val.root_path, root_path: val.root_path.into(),
project_path: val.project_path, project_path: val.project_path.into(),
watch: val.watch, watch: val.watch,
next_config: val.next_config, next_config: val.next_config.into(),
js_config: val.js_config, js_config: val.js_config.into(),
env: val env: val
.env .env
.into_iter() .into_iter()
.map(|var| (var.name, var.value)) .map(|var| (var.name.into(), var.value.into()))
.collect(), .collect(),
define_env: val.define_env.into(), define_env: val.define_env.into(),
dev: val.dev, dev: val.dev,
encryption_key: val.encryption_key, encryption_key: val.encryption_key.into(),
build_id: val.build_id, build_id: val.build_id.into(),
preview_props: val.preview_props.into(), preview_props: val.preview_props.into(),
} }
} }
@ -208,18 +208,20 @@ impl From<NapiProjectOptions> for ProjectOptions {
impl From<NapiPartialProjectOptions> for PartialProjectOptions { impl From<NapiPartialProjectOptions> for PartialProjectOptions {
fn from(val: NapiPartialProjectOptions) -> Self { fn from(val: NapiPartialProjectOptions) -> Self {
PartialProjectOptions { PartialProjectOptions {
root_path: val.root_path, root_path: val.root_path.map(From::from),
project_path: val.project_path, project_path: val.project_path.map(From::from),
watch: val.watch, watch: val.watch,
next_config: val.next_config, next_config: val.next_config.map(From::from),
js_config: val.js_config, js_config: val.js_config.map(From::from),
env: val env: val.env.map(|env| {
.env env.into_iter()
.map(|env| env.into_iter().map(|var| (var.name, var.value)).collect()), .map(|var| (var.name.into(), var.value.into()))
.collect()
}),
define_env: val.define_env.map(|env| env.into()), define_env: val.define_env.map(|env| env.into()),
dev: val.dev, dev: val.dev,
encryption_key: val.encryption_key, encryption_key: val.encryption_key.map(From::from),
build_id: val.build_id, build_id: val.build_id.map(From::from),
preview_props: val.preview_props.map(|props| props.into()), preview_props: val.preview_props.map(|props| props.into()),
} }
} }
@ -231,17 +233,17 @@ impl From<NapiDefineEnv> for DefineEnv {
client: val client: val
.client .client
.into_iter() .into_iter()
.map(|var| (var.name, var.value)) .map(|var| (var.name.into(), var.value.into()))
.collect(), .collect(),
edge: val edge: val
.edge .edge
.into_iter() .into_iter()
.map(|var| (var.name, var.value)) .map(|var| (var.name.into(), var.value.into()))
.collect(), .collect(),
nodejs: val nodejs: val
.nodejs .nodejs
.into_iter() .into_iter()
.map(|var| (var.name, var.value)) .map(|var| (var.name.into(), var.value.into()))
.collect(), .collect(),
} }
} }
@ -357,10 +359,8 @@ pub async fn project_new(
/// - https://github.com/oven-sh/bun/blob/06a9aa80c38b08b3148bfeabe560/src/install/install.zig#L3038 /// - https://github.com/oven-sh/bun/blob/06a9aa80c38b08b3148bfeabe560/src/install/install.zig#L3038
#[tracing::instrument] #[tracing::instrument]
async fn benchmark_file_io(directory: Vc<FileSystemPath>) -> Result<Vc<Completion>> { async fn benchmark_file_io(directory: Vc<FileSystemPath>) -> Result<Vc<Completion>> {
let temp_path = directory.join(format!( let temp_path =
"tmp_file_io_benchmark_{:x}", directory.join(format!("tmp_file_io_benchmark_{:x}", rand::random::<u128>()).into());
rand::random::<u128>()
));
// try to get the real file path on disk so that we can use it with tokio // try to get the real file path on disk so that we can use it with tokio
let fs = Vc::try_resolve_downcast_type::<DiskFileSystem>(directory.fs()) let fs = Vc::try_resolve_downcast_type::<DiskFileSystem>(directory.fs())
@ -624,7 +624,11 @@ pub fn project_entrypoints_subscribe(
.routes .routes
.iter() .iter()
.map(|(pathname, route)| { .map(|(pathname, route)| {
NapiRoute::from_route(pathname.clone(), route.clone(), &turbo_tasks) NapiRoute::from_route(
pathname.clone().into(),
route.clone(),
&turbo_tasks,
)
}) })
.collect::<Vec<_>>(), .collect::<Vec<_>>(),
middleware: entrypoints middleware: entrypoints
@ -670,7 +674,7 @@ struct HmrUpdateWithIssues {
#[turbo_tasks::function] #[turbo_tasks::function]
async fn hmr_update( async fn hmr_update(
project: Vc<Project>, project: Vc<Project>,
identifier: String, identifier: RcStr,
state: Vc<VersionState>, state: Vc<VersionState>,
) -> Result<Vc<HmrUpdateWithIssues>> { ) -> Result<Vc<HmrUpdateWithIssues>> {
let update_operation = project.hmr_update(identifier, state); let update_operation = project.hmr_update(identifier, state);
@ -701,7 +705,7 @@ pub fn project_hmr_events(
let outer_identifier = identifier.clone(); let outer_identifier = identifier.clone();
let session = session.clone(); let session = session.clone();
move || { move || {
let identifier = outer_identifier.clone(); let identifier: RcStr = outer_identifier.clone().into();
let session = session.clone(); let session = session.clone();
async move { async move {
let project = project.project().resolve().await?; let project = project.project().resolve().await?;
@ -773,7 +777,7 @@ struct HmrIdentifiers {
#[turbo_tasks::value(serialization = "none")] #[turbo_tasks::value(serialization = "none")]
struct HmrIdentifiersWithIssues { struct HmrIdentifiersWithIssues {
identifiers: ReadRef<Vec<String>>, identifiers: ReadRef<Vec<RcStr>>,
issues: Arc<Vec<ReadRef<PlainIssue>>>, issues: Arc<Vec<ReadRef<PlainIssue>>>,
diagnostics: Arc<Vec<ReadRef<PlainDiagnostic>>>, diagnostics: Arc<Vec<ReadRef<PlainDiagnostic>>>,
} }
@ -967,7 +971,9 @@ pub async fn project_trace_source(
( (
path, path,
match module { match module {
Some(module) => Some(urlencoding::decode(&module.1)?.into_owned()), Some(module) => {
Some(urlencoding::decode(&module.1)?.into_owned().into())
}
None => None, None => None,
}, },
) )
@ -992,13 +998,13 @@ pub async fn project_trace_source(
.container .container
.project() .project()
.node_root() .node_root()
.join(chunk_base.to_owned()); .join(chunk_base.into());
let client_path = project let client_path = project
.container .container
.project() .project()
.client_relative_path() .client_relative_path()
.join(chunk_base.to_owned()); .join(chunk_base.into());
let mut map_result = project let mut map_result = project
.container .container
@ -1053,7 +1059,7 @@ pub async fn project_trace_source(
Ok(Some(StackFrame { Ok(Some(StackFrame {
file: source_file.to_string(), file: source_file.to_string(),
method_name: name, method_name: name.as_ref().map(ToString::to_string),
line, line,
column, column,
is_server: frame.is_server, is_server: frame.is_server,
@ -1079,7 +1085,7 @@ pub async fn project_get_source_for_asset(
.project_path() .project_path()
.fs() .fs()
.root() .root()
.join(file_path.to_string()) .join(file_path.clone().into())
.read() .read()
.await?; .await?;

View file

@ -120,12 +120,12 @@ impl From<&PlainIssue> for NapiIssue {
.as_ref() .as_ref()
.map(|styled| serde_json::to_value(StyledStringSerialize::from(styled)).unwrap()), .map(|styled| serde_json::to_value(StyledStringSerialize::from(styled)).unwrap()),
stage: issue.stage.to_string(), stage: issue.stage.to_string(),
file_path: issue.file_path.clone(), file_path: issue.file_path.to_string(),
detail: issue detail: issue
.detail .detail
.as_ref() .as_ref()
.map(|styled| serde_json::to_value(StyledStringSerialize::from(styled)).unwrap()), .map(|styled| serde_json::to_value(StyledStringSerialize::from(styled)).unwrap()),
documentation_link: issue.documentation_link.clone(), documentation_link: issue.documentation_link.to_string(),
severity: issue.severity.as_str().to_string(), severity: issue.severity.as_str().to_string(),
source: issue.source.as_deref().map(|source| source.into()), source: issue.source.as_deref().map(|source| source.into()),
title: serde_json::to_value(StyledStringSerialize::from(&issue.title)).unwrap(), title: serde_json::to_value(StyledStringSerialize::from(&issue.title)).unwrap(),
@ -255,9 +255,13 @@ pub struct NapiDiagnostic {
impl NapiDiagnostic { impl NapiDiagnostic {
pub fn from(diagnostic: &PlainDiagnostic) -> Self { pub fn from(diagnostic: &PlainDiagnostic) -> Self {
Self { Self {
category: diagnostic.category.clone(), category: diagnostic.category.to_string(),
name: diagnostic.name.clone(), name: diagnostic.name.to_string(),
payload: diagnostic.payload.clone(), payload: diagnostic
.payload
.iter()
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect(),
} }
} }
} }

View file

@ -190,10 +190,21 @@ impl FromNapiValue for NapiRouteHas {
impl From<NapiRouteHas> for RouteHas { impl From<NapiRouteHas> for RouteHas {
fn from(val: NapiRouteHas) -> Self { fn from(val: NapiRouteHas) -> Self {
match val { match val {
NapiRouteHas::Header { key, value } => RouteHas::Header { key, value }, NapiRouteHas::Header { key, value } => RouteHas::Header {
NapiRouteHas::Query { key, value } => RouteHas::Query { key, value }, key: key.into(),
NapiRouteHas::Cookie { key, value } => RouteHas::Cookie { key, value }, value: value.map(From::from),
NapiRouteHas::Host { value } => RouteHas::Host { value }, },
NapiRouteHas::Query { key, value } => RouteHas::Query {
key: key.into(),
value: value.map(From::from),
},
NapiRouteHas::Cookie { key, value } => RouteHas::Cookie {
key: key.into(),
value: value.map(From::from),
},
NapiRouteHas::Host { value } => RouteHas::Host {
value: value.into(),
},
} }
} }
} }

View file

@ -40,5 +40,5 @@ pub async fn run_turbo_tracing(
}), }),
) )
.await?; .await?;
Ok(files) Ok(files.into_iter().map(|f| f.to_string()).collect())
} }

View file

@ -36,7 +36,7 @@ use next_core::{
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::Instrument; use tracing::Instrument;
use turbo_tasks::{trace::TraceRawVcs, Completion, TryJoinIterExt, Value, Vc}; use turbo_tasks::{trace::TraceRawVcs, Completion, RcStr, TryJoinIterExt, Value, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::{ turbo::{
tasks_env::{CustomProcessEnv, ProcessEnv}, tasks_env::{CustomProcessEnv, ProcessEnv},
@ -167,8 +167,8 @@ impl AppProject {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn client_transition_name(self: Vc<Self>) -> Vc<String> { fn client_transition_name(self: Vc<Self>) -> Vc<RcStr> {
Vc::cell(ECMASCRIPT_CLIENT_TRANSITION_NAME.to_string()) Vc::cell(ECMASCRIPT_CLIENT_TRANSITION_NAME.into())
} }
#[turbo_tasks::function] #[turbo_tasks::function]
@ -275,23 +275,20 @@ impl AppProject {
fn rsc_module_context(self: Vc<Self>) -> Vc<ModuleAssetContext> { fn rsc_module_context(self: Vc<Self>) -> Vc<ModuleAssetContext> {
let transitions = [ let transitions = [
( (
ECMASCRIPT_CLIENT_TRANSITION_NAME.to_string(), ECMASCRIPT_CLIENT_TRANSITION_NAME.into(),
Vc::upcast(NextEcmascriptClientReferenceTransition::new( Vc::upcast(NextEcmascriptClientReferenceTransition::new(
Vc::upcast(self.client_transition()), Vc::upcast(self.client_transition()),
self.ssr_transition(), self.ssr_transition(),
)), )),
), ),
( (
"next-dynamic".to_string(), "next-dynamic".into(),
Vc::upcast(NextDynamicTransition::new(Vc::upcast( Vc::upcast(NextDynamicTransition::new(Vc::upcast(
self.client_transition(), self.client_transition(),
))), ))),
), ),
("next-ssr".to_string(), Vc::upcast(self.ssr_transition())), ("next-ssr".into(), Vc::upcast(self.ssr_transition())),
( ("next-shared".into(), Vc::upcast(self.shared_transition())),
"next-shared".to_string(),
Vc::upcast(self.shared_transition()),
),
] ]
.into_iter() .into_iter()
.collect(); .collect();
@ -300,7 +297,7 @@ impl AppProject {
self.project().server_compile_time_info(), self.project().server_compile_time_info(),
self.rsc_module_options_context(), self.rsc_module_options_context(),
self.rsc_resolve_options_context(), self.rsc_resolve_options_context(),
Vc::cell("app-rsc".to_string()), Vc::cell("app-rsc".into()),
) )
} }
@ -308,24 +305,21 @@ impl AppProject {
fn edge_rsc_module_context(self: Vc<Self>) -> Vc<ModuleAssetContext> { fn edge_rsc_module_context(self: Vc<Self>) -> Vc<ModuleAssetContext> {
let transitions = [ let transitions = [
( (
ECMASCRIPT_CLIENT_TRANSITION_NAME.to_string(), ECMASCRIPT_CLIENT_TRANSITION_NAME.into(),
Vc::upcast(NextEcmascriptClientReferenceTransition::new( Vc::upcast(NextEcmascriptClientReferenceTransition::new(
Vc::upcast(self.client_transition()), Vc::upcast(self.client_transition()),
self.edge_ssr_transition(), self.edge_ssr_transition(),
)), )),
), ),
( (
"next-dynamic".to_string(), "next-dynamic".into(),
Vc::upcast(NextDynamicTransition::new(Vc::upcast( Vc::upcast(NextDynamicTransition::new(Vc::upcast(
self.client_transition(), self.client_transition(),
))), ))),
), ),
("next-ssr".into(), Vc::upcast(self.edge_ssr_transition())),
( (
"next-ssr".to_string(), "next-shared".into(),
Vc::upcast(self.edge_ssr_transition()),
),
(
"next-shared".to_string(),
Vc::upcast(self.edge_shared_transition()), Vc::upcast(self.edge_shared_transition()),
), ),
] ]
@ -336,7 +330,7 @@ impl AppProject {
self.project().edge_compile_time_info(), self.project().edge_compile_time_info(),
self.edge_rsc_module_options_context(), self.edge_rsc_module_options_context(),
self.edge_rsc_resolve_options_context(), self.edge_rsc_resolve_options_context(),
Vc::cell("app-edge-rsc".to_string()), Vc::cell("app-edge-rsc".into()),
) )
} }
@ -344,23 +338,20 @@ impl AppProject {
fn route_module_context(self: Vc<Self>) -> Vc<ModuleAssetContext> { fn route_module_context(self: Vc<Self>) -> Vc<ModuleAssetContext> {
let transitions = [ let transitions = [
( (
ECMASCRIPT_CLIENT_TRANSITION_NAME.to_string(), ECMASCRIPT_CLIENT_TRANSITION_NAME.into(),
Vc::upcast(NextEcmascriptClientReferenceTransition::new( Vc::upcast(NextEcmascriptClientReferenceTransition::new(
Vc::upcast(self.client_transition()), Vc::upcast(self.client_transition()),
self.ssr_transition(), self.ssr_transition(),
)), )),
), ),
( (
"next-dynamic".to_string(), "next-dynamic".into(),
Vc::upcast(NextDynamicTransition::new(Vc::upcast( Vc::upcast(NextDynamicTransition::new(Vc::upcast(
self.client_transition(), self.client_transition(),
))), ))),
), ),
("next-ssr".to_string(), Vc::upcast(self.ssr_transition())), ("next-ssr".into(), Vc::upcast(self.ssr_transition())),
( ("next-shared".into(), Vc::upcast(self.shared_transition())),
"next-shared".to_string(),
Vc::upcast(self.shared_transition()),
),
] ]
.into_iter() .into_iter()
.collect(); .collect();
@ -370,7 +361,7 @@ impl AppProject {
self.project().server_compile_time_info(), self.project().server_compile_time_info(),
self.route_module_options_context(), self.route_module_options_context(),
self.route_resolve_options_context(), self.route_resolve_options_context(),
Vc::cell("app-route".to_string()), Vc::cell("app-route".into()),
) )
} }
@ -378,21 +369,21 @@ impl AppProject {
fn edge_route_module_context(self: Vc<Self>) -> Vc<ModuleAssetContext> { fn edge_route_module_context(self: Vc<Self>) -> Vc<ModuleAssetContext> {
let transitions = [ let transitions = [
( (
ECMASCRIPT_CLIENT_TRANSITION_NAME.to_string(), ECMASCRIPT_CLIENT_TRANSITION_NAME.into(),
Vc::upcast(NextEcmascriptClientReferenceTransition::new( Vc::upcast(NextEcmascriptClientReferenceTransition::new(
Vc::upcast(self.client_transition()), Vc::upcast(self.client_transition()),
self.edge_ssr_transition(), self.edge_ssr_transition(),
)), )),
), ),
( (
"next-dynamic".to_string(), "next-dynamic".into(),
Vc::upcast(NextDynamicTransition::new(Vc::upcast( Vc::upcast(NextDynamicTransition::new(Vc::upcast(
self.client_transition(), self.client_transition(),
))), ))),
), ),
("next-ssr".to_string(), Vc::upcast(self.ssr_transition())), ("next-ssr".into(), Vc::upcast(self.ssr_transition())),
( (
"next-shared".to_string(), "next-shared".into(),
Vc::upcast(self.edge_shared_transition()), Vc::upcast(self.edge_shared_transition()),
), ),
] ]
@ -403,7 +394,7 @@ impl AppProject {
self.project().edge_compile_time_info(), self.project().edge_compile_time_info(),
self.edge_route_module_options_context(), self.edge_route_module_options_context(),
self.edge_route_resolve_options_context(), self.edge_route_resolve_options_context(),
Vc::cell("app-edge-route".to_string()), Vc::cell("app-edge-route".into()),
) )
} }
@ -414,7 +405,7 @@ impl AppProject {
self.project().client_compile_time_info(), self.project().client_compile_time_info(),
self.client_module_options_context(), self.client_module_options_context(),
self.client_resolve_options_context(), self.client_resolve_options_context(),
Vc::cell("app-client".to_string()), Vc::cell("app-client".into()),
) )
} }
@ -470,7 +461,7 @@ impl AppProject {
self.project().server_compile_time_info(), self.project().server_compile_time_info(),
self.ssr_module_options_context(), self.ssr_module_options_context(),
self.ssr_resolve_options_context(), self.ssr_resolve_options_context(),
Vc::cell("app-ssr".to_string()), Vc::cell("app-ssr".into()),
) )
} }
@ -480,7 +471,7 @@ impl AppProject {
self.project().server_compile_time_info(), self.project().server_compile_time_info(),
self.ssr_module_options_context(), self.ssr_module_options_context(),
self.ssr_resolve_options_context(), self.ssr_resolve_options_context(),
Vc::cell("app-shared".to_string()), Vc::cell("app-shared".into()),
) )
} }
@ -490,7 +481,7 @@ impl AppProject {
self.project().edge_compile_time_info(), self.project().edge_compile_time_info(),
self.edge_ssr_module_options_context(), self.edge_ssr_module_options_context(),
self.edge_ssr_resolve_options_context(), self.edge_ssr_resolve_options_context(),
Vc::cell("app-edge-ssr".to_string()), Vc::cell("app-edge-ssr".into()),
) )
} }
@ -500,7 +491,7 @@ impl AppProject {
self.project().edge_compile_time_info(), self.project().edge_compile_time_info(),
self.edge_ssr_module_options_context(), self.edge_ssr_module_options_context(),
self.edge_ssr_resolve_options_context(), self.edge_ssr_resolve_options_context(),
Vc::cell("app-edge-shared".to_string()), Vc::cell("app-edge-shared".into()),
) )
} }
@ -553,7 +544,7 @@ impl AppProject {
.iter() .iter()
.map(|(pathname, app_entrypoint)| async { .map(|(pathname, app_entrypoint)| async {
Ok(( Ok((
pathname.to_string(), pathname.to_string().into(),
app_entry_point_to_route(self, app_entrypoint.clone()) app_entry_point_to_route(self, app_entrypoint.clone())
.await? .await?
.clone_value(), .clone_value(),
@ -759,7 +750,7 @@ impl AppEndpoint {
let client_relative_path = this.app_project.project().client_relative_path(); let client_relative_path = this.app_project.project().client_relative_path();
let client_relative_path_ref = client_relative_path.await?; let client_relative_path_ref = client_relative_path.await?;
let server_path = node_root.join("server".to_string()); let server_path = node_root.join("server".into());
let mut server_assets = vec![]; let mut server_assets = vec![];
let mut client_assets = vec![]; let mut client_assets = vec![];
@ -781,7 +772,7 @@ impl AppEndpoint {
app_entry app_entry
.rsc_entry .rsc_entry
.ident() .ident()
.with_modifier(Vc::cell("client_shared_chunks".to_string())), .with_modifier(Vc::cell("client_shared_chunks".into())),
this.app_project.client_runtime_entries(), this.app_project.client_runtime_entries(),
client_chunking_context, client_chunking_context,
) )
@ -794,7 +785,7 @@ impl AppEndpoint {
let chunk_path = chunk.ident().path().await?; let chunk_path = chunk.ident().path().await?;
if chunk_path.extension_ref() == Some("js") { if chunk_path.extension_ref() == Some("js") {
if let Some(chunk_path) = client_relative_path_ref.get_path_to(&chunk_path) { if let Some(chunk_path) = client_relative_path_ref.get_path_to(&chunk_path) {
client_shared_chunks_paths.push(chunk_path.to_string()); client_shared_chunks_paths.push(chunk_path.into());
} }
} }
} }
@ -878,7 +869,7 @@ impl AppEndpoint {
Ok(client_relative_path_ref Ok(client_relative_path_ref
.get_path_to(path) .get_path_to(path)
.context("asset path should be inside client root")? .context("asset path should be inside client root")?
.to_string()) .into())
}) })
.collect::<anyhow::Result<Vec<_>>>()?; .collect::<anyhow::Result<Vec<_>>>()?;
entry_client_chunks_paths.extend(client_shared_chunks_paths.iter().cloned()); entry_client_chunks_paths.extend(client_shared_chunks_paths.iter().cloned());
@ -890,9 +881,9 @@ impl AppEndpoint {
}; };
let manifest_path_prefix = &app_entry.original_name; let manifest_path_prefix = &app_entry.original_name;
let app_build_manifest_output = Vc::upcast(VirtualOutputAsset::new( let app_build_manifest_output = Vc::upcast(VirtualOutputAsset::new(
node_root.join(format!( node_root.join(
"server/app{manifest_path_prefix}/app-build-manifest.json", format!("server/app{manifest_path_prefix}/app-build-manifest.json",).into(),
)), ),
AssetContent::file( AssetContent::file(
File::from(serde_json::to_string_pretty(&app_build_manifest)?).into(), File::from(serde_json::to_string_pretty(&app_build_manifest)?).into(),
), ),
@ -903,16 +894,16 @@ impl AppEndpoint {
// load it as a RawModule. // load it as a RawModule.
let next_package = get_next_package(this.app_project.project().project_path()); let next_package = get_next_package(this.app_project.project().project_path());
let polyfill_source = FileSource::new( let polyfill_source = FileSource::new(
next_package.join("dist/build/polyfills/polyfill-nomodule.js".to_string()), next_package.join("dist/build/polyfills/polyfill-nomodule.js".into()),
); );
let polyfill_output_path = let polyfill_output_path =
client_chunking_context.chunk_path(polyfill_source.ident(), ".js".to_string()); client_chunking_context.chunk_path(polyfill_source.ident(), ".js".into());
let polyfill_output_asset = let polyfill_output_asset =
RawOutput::new(polyfill_output_path, Vc::upcast(polyfill_source)); RawOutput::new(polyfill_output_path, Vc::upcast(polyfill_source));
let polyfill_client_path = client_relative_path_ref let polyfill_client_path = client_relative_path_ref
.get_path_to(&*polyfill_output_path.await?) .get_path_to(&*polyfill_output_path.await?)
.context("failed to resolve client-relative path to polyfill")? .context("failed to resolve client-relative path to polyfill")?
.to_string(); .into();
let polyfill_client_paths = vec![polyfill_client_path]; let polyfill_client_paths = vec![polyfill_client_path];
client_assets.push(Vc::upcast(polyfill_output_asset)); client_assets.push(Vc::upcast(polyfill_output_asset));
@ -922,9 +913,8 @@ impl AppEndpoint {
..Default::default() ..Default::default()
}; };
let build_manifest_output = Vc::upcast(VirtualOutputAsset::new( let build_manifest_output = Vc::upcast(VirtualOutputAsset::new(
node_root.join(format!( node_root
"server/app{manifest_path_prefix}/build-manifest.json", .join(format!("server/app{manifest_path_prefix}/build-manifest.json",).into()),
)),
AssetContent::file( AssetContent::file(
File::from(serde_json::to_string_pretty(&build_manifest)?).into(), File::from(serde_json::to_string_pretty(&build_manifest)?).into(),
), ),
@ -973,17 +963,14 @@ impl AppEndpoint {
fn create_app_paths_manifest( fn create_app_paths_manifest(
node_root: Vc<FileSystemPath>, node_root: Vc<FileSystemPath>,
original_name: &str, original_name: &str,
filename: String, filename: RcStr,
) -> Result<Vc<Box<dyn OutputAsset>>> { ) -> Result<Vc<Box<dyn OutputAsset>>> {
let manifest_path_prefix = original_name; let manifest_path_prefix = original_name;
let path = node_root.join(format!( let path = node_root
"server/app{manifest_path_prefix}/app-paths-manifest.json", .join(format!("server/app{manifest_path_prefix}/app-paths-manifest.json",).into());
));
let app_paths_manifest = AppPathsManifest { let app_paths_manifest = AppPathsManifest {
node_server_app_paths: PagesManifest { node_server_app_paths: PagesManifest {
pages: [(original_name.to_string(), filename)] pages: [(original_name.into(), filename)].into_iter().collect(),
.into_iter()
.collect(),
}, },
..Default::default() ..Default::default()
}; };
@ -1057,11 +1044,11 @@ impl AppEndpoint {
// //
// they are created in `setup-dev-bundler.ts` // they are created in `setup-dev-bundler.ts`
let mut file_paths_from_root = vec![ let mut file_paths_from_root = vec![
"server/server-reference-manifest.js".to_string(), "server/server-reference-manifest.js".into(),
"server/middleware-build-manifest.js".to_string(), "server/middleware-build-manifest.js".into(),
"server/middleware-react-loadable-manifest.js".to_string(), "server/middleware-react-loadable-manifest.js".into(),
"server/next-font-manifest.js".to_string(), "server/next-font-manifest.js".into(),
"server/interception-route-rewrite-manifest.js".to_string(), "server/interception-route-rewrite-manifest.js".into(),
]; ];
let mut wasm_paths_from_root = vec![]; let mut wasm_paths_from_root = vec![];
@ -1079,20 +1066,20 @@ impl AppEndpoint {
wasm_paths_from_root wasm_paths_from_root
.extend(get_wasm_paths_from_root(&node_root_value, &all_output_assets).await?); .extend(get_wasm_paths_from_root(&node_root_value, &all_output_assets).await?);
let entry_file = "app-edge-has-no-entrypoint".to_string(); let entry_file = "app-edge-has-no-entrypoint".into();
// create middleware manifest // create middleware manifest
// TODO(alexkirsz) This should be shared with next build. // TODO(alexkirsz) This should be shared with next build.
let named_regex = get_named_middleware_regex(&app_entry.pathname); let named_regex = get_named_middleware_regex(&app_entry.pathname);
let matchers = MiddlewareMatcher { let matchers = MiddlewareMatcher {
regexp: Some(named_regex), regexp: Some(named_regex.into()),
original_source: app_entry.pathname.clone(), original_source: app_entry.pathname.clone(),
..Default::default() ..Default::default()
}; };
let edge_function_definition = EdgeFunctionDefinition { let edge_function_definition = EdgeFunctionDefinition {
files: file_paths_from_root, files: file_paths_from_root,
wasm: wasm_paths_to_bindings(wasm_paths_from_root), wasm: wasm_paths_to_bindings(wasm_paths_from_root),
name: app_entry.pathname.to_string(), name: app_entry.pathname.clone(),
page: app_entry.original_name.clone(), page: app_entry.original_name.clone(),
regions: app_entry regions: app_entry
.config .config
@ -1113,9 +1100,10 @@ impl AppEndpoint {
}; };
let manifest_path_prefix = &app_entry.original_name; let manifest_path_prefix = &app_entry.original_name;
let middleware_manifest_v2 = Vc::upcast(VirtualOutputAsset::new( let middleware_manifest_v2 = Vc::upcast(VirtualOutputAsset::new(
node_root.join(format!( node_root.join(
"server/app{manifest_path_prefix}/middleware-manifest.json", format!("server/app{manifest_path_prefix}/middleware-manifest.json",)
)), .into(),
),
AssetContent::file( AssetContent::file(
FileContent::Content(File::from(serde_json::to_string_pretty( FileContent::Content(File::from(serde_json::to_string_pretty(
&middleware_manifest_v2, &middleware_manifest_v2,
@ -1145,10 +1133,13 @@ impl AppEndpoint {
let loadable_manifest_output = create_react_loadable_manifest( let loadable_manifest_output = create_react_loadable_manifest(
dynamic_import_entries, dynamic_import_entries,
client_relative_path, client_relative_path,
node_root.join(format!( node_root.join(
"server/app{}/react-loadable-manifest.json", format!(
&app_entry.original_name "server/app{}/react-loadable-manifest.json",
)), &app_entry.original_name
)
.into(),
),
); );
server_assets.extend(loadable_manifest_output.await?.iter().copied()); server_assets.extend(loadable_manifest_output.await?.iter().copied());
@ -1188,10 +1179,13 @@ impl AppEndpoint {
} = *{ } = *{
let _span = tracing::trace_span!("server node entrypoint").entered(); let _span = tracing::trace_span!("server node entrypoint").entered();
chunking_context.entry_chunk_group( chunking_context.entry_chunk_group(
server_path.join(format!( server_path.join(
"app{original_name}.js", format!(
original_name = app_entry.original_name "app{original_name}.js",
)), original_name = app_entry.original_name
)
.into(),
),
app_entry.rsc_entry, app_entry.rsc_entry,
Vc::cell(evaluatable_assets), Vc::cell(evaluatable_assets),
Value::new(AvailabilityInfo::Root), Value::new(AvailabilityInfo::Root),
@ -1207,7 +1201,7 @@ impl AppEndpoint {
.await? .await?
.get_path_to(&*rsc_chunk.ident().path().await?) .get_path_to(&*rsc_chunk.ident().path().await?)
.context("RSC chunk path should be within app paths manifest directory")? .context("RSC chunk path should be within app paths manifest directory")?
.to_string(), .into(),
)?; )?;
server_assets.push(app_paths_manifest_output); server_assets.push(app_paths_manifest_output);
@ -1228,10 +1222,13 @@ impl AppEndpoint {
let loadable_manifest_output = create_react_loadable_manifest( let loadable_manifest_output = create_react_loadable_manifest(
dynamic_import_entries, dynamic_import_entries,
client_relative_path, client_relative_path,
node_root.join(format!( node_root.join(
"server/app{}/react-loadable-manifest.json", format!(
&app_entry.original_name "server/app{}/react-loadable-manifest.json",
)), &app_entry.original_name
)
.into(),
),
); );
server_assets.extend(loadable_manifest_output.await?.iter().copied()); server_assets.extend(loadable_manifest_output.await?.iter().copied());

View file

@ -6,7 +6,7 @@ use indexmap::IndexMap;
use tracing::Level; use tracing::Level;
use turbo_tasks::{ use turbo_tasks::{
graph::{GraphTraversal, NonDeterministic, VisitControlFlow}, graph::{GraphTraversal, NonDeterministic, VisitControlFlow},
ReadRef, TryJoinIterExt, Value, ValueToString, Vc, RcStr, ReadRef, TryJoinIterExt, Value, ValueToString, Vc,
}; };
use turbopack_binding::{ use turbopack_binding::{
swc::core::ecma::{ swc::core::ecma::{
@ -39,7 +39,7 @@ where
F: FnMut(Vc<Box<dyn ChunkableModule>>) -> Fu, F: FnMut(Vc<Box<dyn ChunkableModule>>) -> Fu,
Fu: Future<Output = Result<Vc<OutputAssets>>> + Send, Fu: Future<Output = Result<Vc<OutputAssets>>> + Send,
{ {
let mut chunks_hash: HashMap<String, Vc<OutputAssets>> = HashMap::new(); let mut chunks_hash: HashMap<RcStr, Vc<OutputAssets>> = HashMap::new();
let mut dynamic_import_chunks = IndexMap::new(); let mut dynamic_import_chunks = IndexMap::new();
// Iterate over the collected import mappings, and create a chunk for each // Iterate over the collected import mappings, and create a chunk for each
@ -62,7 +62,7 @@ where
// chunks in case if there are same modules being imported in differnt // chunks in case if there are same modules being imported in differnt
// origins. // origins.
let chunk_group = build_chunk(module).await?; let chunk_group = build_chunk(module).await?;
chunks_hash.insert(imported_raw_str.to_string(), chunk_group); chunks_hash.insert(imported_raw_str.clone(), chunk_group);
chunk_group chunk_group
}; };
@ -178,22 +178,19 @@ pub(crate) async fn collect_next_dynamic_imports(
struct NextDynamicVisit; struct NextDynamicVisit;
impl turbo_tasks::graph::Visit<(Vc<Box<dyn Module>>, ReadRef<String>)> for NextDynamicVisit { impl turbo_tasks::graph::Visit<(Vc<Box<dyn Module>>, ReadRef<RcStr>)> for NextDynamicVisit {
type Edge = (Vc<Box<dyn Module>>, ReadRef<String>); type Edge = (Vc<Box<dyn Module>>, ReadRef<RcStr>);
type EdgesIntoIter = Vec<Self::Edge>; type EdgesIntoIter = Vec<Self::Edge>;
type EdgesFuture = impl Future<Output = Result<Self::EdgesIntoIter>>; type EdgesFuture = impl Future<Output = Result<Self::EdgesIntoIter>>;
fn visit( fn visit(
&mut self, &mut self,
edge: Self::Edge, edge: Self::Edge,
) -> VisitControlFlow<(Vc<Box<dyn Module>>, ReadRef<String>)> { ) -> VisitControlFlow<(Vc<Box<dyn Module>>, ReadRef<RcStr>)> {
VisitControlFlow::Continue(edge) VisitControlFlow::Continue(edge)
} }
fn edges( fn edges(&mut self, &(parent, _): &(Vc<Box<dyn Module>>, ReadRef<RcStr>)) -> Self::EdgesFuture {
&mut self,
&(parent, _): &(Vc<Box<dyn Module>>, ReadRef<String>),
) -> Self::EdgesFuture {
async move { async move {
primary_referenced_modules(parent) primary_referenced_modules(parent)
.await? .await?
@ -204,7 +201,7 @@ impl turbo_tasks::graph::Visit<(Vc<Box<dyn Module>>, ReadRef<String>)> for NextD
} }
} }
fn span(&mut self, (_, name): &(Vc<Box<dyn Module>>, ReadRef<String>)) -> tracing::Span { fn span(&mut self, (_, name): &(Vc<Box<dyn Module>>, ReadRef<RcStr>)) -> tracing::Span {
tracing::span!(Level::INFO, "next/dynamic visit", name = display(name)) tracing::span!(Level::INFO, "next/dynamic visit", name = display(name))
} }
} }
@ -244,7 +241,7 @@ async fn build_dynamic_imports_map_for_module(
client_asset_context, client_asset_context,
server_module.ident().path(), server_module.ident().path(),
)), )),
Request::parse(Value::new(Pattern::Constant(import.to_string()))), Request::parse(Value::new(Pattern::Constant(import.clone()))),
Value::new(EcmaScriptModulesReferenceSubType::DynamicImport), Value::new(EcmaScriptModulesReferenceSubType::DynamicImport),
IssueSeverity::Error.cell(), IssueSeverity::Error.cell(),
None, None,
@ -264,7 +261,7 @@ async fn build_dynamic_imports_map_for_module(
/// import wrapped with dynamic() via CollectImportSourceVisitor. /// import wrapped with dynamic() via CollectImportSourceVisitor.
struct DynamicImportVisitor { struct DynamicImportVisitor {
dynamic_ident: Option<Ident>, dynamic_ident: Option<Ident>,
pub import_sources: Vec<String>, pub import_sources: Vec<RcStr>,
} }
impl DynamicImportVisitor { impl DynamicImportVisitor {
@ -309,7 +306,7 @@ impl Visit for DynamicImportVisitor {
/// A visitor to collect import source string from import('path/to/module') /// A visitor to collect import source string from import('path/to/module')
struct CollectImportSourceVisitor { struct CollectImportSourceVisitor {
import_source: Option<String>, import_source: Option<RcStr>,
} }
impl CollectImportSourceVisitor { impl CollectImportSourceVisitor {
@ -329,7 +326,7 @@ impl Visit for CollectImportSourceVisitor {
if let Callee::Import(_import) = call_expr.callee { if let Callee::Import(_import) = call_expr.callee {
if let Some(arg) = call_expr.args.first() { if let Some(arg) = call_expr.args.first() {
if let Expr::Lit(Lit::Str(str_)) = &*arg.expr { if let Expr::Lit(Lit::Str(str_)) = &*arg.expr {
self.import_source = Some(str_.value.to_string()); self.import_source = Some(str_.value.as_str().into());
} }
} }
} }
@ -339,8 +336,8 @@ impl Visit for CollectImportSourceVisitor {
} }
} }
pub type DynamicImportedModules = Vec<(String, Vc<Box<dyn Module>>)>; pub type DynamicImportedModules = Vec<(RcStr, Vc<Box<dyn Module>>)>;
pub type DynamicImportedOutputAssets = Vec<(String, Vc<OutputAssets>)>; pub type DynamicImportedOutputAssets = Vec<(RcStr, Vc<OutputAssets>)>;
/// A struct contains mapping for the dynamic imports to construct chunk per /// A struct contains mapping for the dynamic imports to construct chunk per
/// each individual module (Origin Module, Vec<(ImportSourceString, Module)>) /// each individual module (Origin Module, Vec<(ImportSourceString, Module)>)

View file

@ -1,5 +1,5 @@
use indexmap::IndexMap; use indexmap::IndexMap;
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use crate::{ use crate::{
project::{Instrumentation, Middleware}, project::{Instrumentation, Middleware},
@ -8,7 +8,7 @@ use crate::{
#[turbo_tasks::value(shared)] #[turbo_tasks::value(shared)]
pub struct Entrypoints { pub struct Entrypoints {
pub routes: IndexMap<String, Route>, pub routes: IndexMap<RcStr, Route>,
pub middleware: Option<Middleware>, pub middleware: Option<Middleware>,
pub instrumentation: Option<Instrumentation>, pub instrumentation: Option<Instrumentation>,
pub pages_document_endpoint: Vc<Box<dyn Endpoint>>, pub pages_document_endpoint: Vc<Box<dyn Endpoint>>,

View file

@ -1,6 +1,6 @@
use anyhow::Result; use anyhow::Result;
use next_core::{all_assets_from_entries, next_manifests::NextFontManifest}; use next_core::{all_assets_from_entries, next_manifests::NextFontManifest};
use turbo_tasks::{ValueToString, Vc}; use turbo_tasks::{RcStr, ValueToString, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::{File, FileSystemPath}, turbo::tasks_fs::{File, FileSystemPath},
turbopack::core::{ turbopack::core::{
@ -34,13 +34,9 @@ pub(crate) async fn create_font_manifest(
.collect(); .collect();
let path = if app_dir { let path = if app_dir {
node_root.join(format!( node_root.join(format!("server/app{manifest_path_prefix}/next-font-manifest.json",).into())
"server/app{manifest_path_prefix}/next-font-manifest.json",
))
} else { } else {
node_root.join(format!( node_root.join(format!("server/pages{manifest_path_prefix}/next-font-manifest.json").into())
"server/pages{manifest_path_prefix}/next-font-manifest.json"
))
}; };
let has_fonts = !font_paths.is_empty(); let has_fonts = !font_paths.is_empty();
@ -49,13 +45,14 @@ pub(crate) async fn create_font_manifest(
let font_paths = font_paths let font_paths = font_paths
.into_iter() .into_iter()
.filter(|path| path.contains(".p.")) .filter(|path| path.contains(".p."))
.map(RcStr::from)
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let next_font_manifest = if !has_fonts { let next_font_manifest = if !has_fonts {
Default::default() Default::default()
} else if app_dir { } else if app_dir {
let dir_str = dir.to_string().await?; let dir_str = dir.to_string().await?;
let page_path = format!("{}{}", dir_str, original_name); let page_path = format!("{}{}", dir_str, original_name).into();
NextFontManifest { NextFontManifest {
app: [(page_path, font_paths)].into_iter().collect(), app: [(page_path, font_paths)].into_iter().collect(),
@ -64,7 +61,7 @@ pub(crate) async fn create_font_manifest(
} }
} else { } else {
NextFontManifest { NextFontManifest {
pages: [(pathname.to_string(), font_paths)].into_iter().collect(), pages: [(pathname.into(), font_paths)].into_iter().collect(),
pages_using_size_adjust: using_size_adjust, pages_using_size_adjust: using_size_adjust,
..Default::default() ..Default::default()
} }

View file

@ -73,7 +73,7 @@ impl InstrumentationEndpoint {
self.context, self.context,
self.project.project_path(), self.project.project_path(),
userland_module, userland_module,
"instrumentation".to_string(), "instrumentation".into(),
); );
let mut evaluatable_assets = get_server_runtime_entries( let mut evaluatable_assets = get_server_runtime_entries(
@ -126,7 +126,7 @@ impl InstrumentationEndpoint {
.entry_chunk_group( .entry_chunk_group(
self.project self.project
.node_root() .node_root()
.join("server/instrumentation.js".to_string()), .join("server/instrumentation.js".into()),
module, module,
get_server_runtime_entries( get_server_runtime_entries(
Value::new(ServerContextType::Instrumentation), Value::new(ServerContextType::Instrumentation),
@ -161,7 +161,7 @@ impl InstrumentationEndpoint {
let instrumentation_definition = InstrumentationDefinition { let instrumentation_definition = InstrumentationDefinition {
files: file_paths_from_root, files: file_paths_from_root,
wasm: wasm_paths_to_bindings(wasm_paths_from_root), wasm: wasm_paths_to_bindings(wasm_paths_from_root),
name: "instrumentation".to_string(), name: "instrumentation".into(),
..Default::default() ..Default::default()
}; };
let middleware_manifest_v2 = MiddlewaresManifestV2 { let middleware_manifest_v2 = MiddlewaresManifestV2 {
@ -169,7 +169,7 @@ impl InstrumentationEndpoint {
..Default::default() ..Default::default()
}; };
let middleware_manifest_v2 = Vc::upcast(VirtualOutputAsset::new( let middleware_manifest_v2 = Vc::upcast(VirtualOutputAsset::new(
node_root.join("server/instrumentation/middleware-manifest.json".to_string()), node_root.join("server/instrumentation/middleware-manifest.json".into()),
AssetContent::file( AssetContent::file(
FileContent::Content(File::from(serde_json::to_string_pretty( FileContent::Content(File::from(serde_json::to_string_pretty(
&middleware_manifest_v2, &middleware_manifest_v2,

View file

@ -2,7 +2,7 @@ use std::collections::HashMap;
use anyhow::Result; use anyhow::Result;
use next_core::next_manifests::LoadableManifest; use next_core::next_manifests::LoadableManifest;
use turbo_tasks::{TryFlatJoinIterExt, Vc}; use turbo_tasks::{RcStr, TryFlatJoinIterExt, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::{File, FileContent, FileSystemPath}, turbo::tasks_fs::{File, FileContent, FileSystemPath},
turbopack::core::{ turbopack::core::{
@ -24,7 +24,7 @@ pub async fn create_react_loadable_manifest(
let dynamic_import_entries = &*dynamic_import_entries.await?; let dynamic_import_entries = &*dynamic_import_entries.await?;
let mut output = vec![]; let mut output = vec![];
let mut loadable_manifest: HashMap<String, LoadableManifest> = Default::default(); let mut loadable_manifest: HashMap<RcStr, LoadableManifest> = Default::default();
for (origin, dynamic_imports) in dynamic_import_entries.into_iter() { for (origin, dynamic_imports) in dynamic_import_entries.into_iter() {
let origin_path = &*origin.ident().path().await?; let origin_path = &*origin.ident().path().await?;
@ -33,7 +33,7 @@ pub async fn create_react_loadable_manifest(
let chunk_output = chunk_output.await?; let chunk_output = chunk_output.await?;
output.extend(chunk_output.iter().copied()); output.extend(chunk_output.iter().copied());
let id = format!("{} -> {}", origin_path, import); let id: RcStr = format!("{} -> {}", origin_path, import).into();
let client_relative_path_value = client_relative_path.await?; let client_relative_path_value = client_relative_path.await?;
let files = chunk_output let files = chunk_output
@ -43,7 +43,7 @@ pub async fn create_react_loadable_manifest(
async move { async move {
Ok(client_relative_path_value Ok(client_relative_path_value
.get_path_to(&*file.ident().path().await?) .get_path_to(&*file.ident().path().await?)
.map(|path| path.to_string())) .map(|path| path.into()))
} }
}) })
.try_flat_join() .try_flat_join()

View file

@ -75,7 +75,7 @@ impl MiddlewareEndpoint {
self.context, self.context,
self.project.project_path(), self.project.project_path(),
module, module,
"middleware".to_string(), "middleware".into(),
); );
let mut evaluatable_assets = get_server_runtime_entries( let mut evaluatable_assets = get_server_runtime_entries(
@ -140,7 +140,7 @@ impl MiddlewareEndpoint {
.iter() .iter()
.map(|matcher| match matcher { .map(|matcher| match matcher {
MiddlewareMatcherKind::Str(matchers) => MiddlewareMatcher { MiddlewareMatcherKind::Str(matchers) => MiddlewareMatcher {
original_source: matchers.to_string(), original_source: matchers.as_str().into(),
..Default::default() ..Default::default()
}, },
MiddlewareMatcherKind::Matcher(matcher) => matcher.clone(), MiddlewareMatcherKind::Matcher(matcher) => matcher.clone(),
@ -148,8 +148,8 @@ impl MiddlewareEndpoint {
.collect() .collect()
} else { } else {
vec![MiddlewareMatcher { vec![MiddlewareMatcher {
regexp: Some("^/.*$".to_string()), regexp: Some("^/.*$".into()),
original_source: "/:path*".to_string(), original_source: "/:path*".into(),
..Default::default() ..Default::default()
}] }]
}; };
@ -157,21 +157,21 @@ impl MiddlewareEndpoint {
let edge_function_definition = EdgeFunctionDefinition { let edge_function_definition = EdgeFunctionDefinition {
files: file_paths_from_root, files: file_paths_from_root,
wasm: wasm_paths_to_bindings(wasm_paths_from_root), wasm: wasm_paths_to_bindings(wasm_paths_from_root),
name: "middleware".to_string(), name: "middleware".into(),
page: "/".to_string(), page: "/".into(),
regions: None, regions: None,
matchers, matchers,
env: this.project.edge_env().await?.clone_value(), env: this.project.edge_env().await?.clone_value(),
..Default::default() ..Default::default()
}; };
let middleware_manifest_v2 = MiddlewaresManifestV2 { let middleware_manifest_v2 = MiddlewaresManifestV2 {
middleware: [("/".to_string(), edge_function_definition)] middleware: [("/".into(), edge_function_definition)]
.into_iter() .into_iter()
.collect(), .collect(),
..Default::default() ..Default::default()
}; };
let middleware_manifest_v2 = Vc::upcast(VirtualOutputAsset::new( let middleware_manifest_v2 = Vc::upcast(VirtualOutputAsset::new(
node_root.join("server/middleware/middleware-manifest.json".to_string()), node_root.join("server/middleware/middleware-manifest.json".into()),
AssetContent::file( AssetContent::file(
FileContent::Content(File::from(serde_json::to_string_pretty( FileContent::Content(File::from(serde_json::to_string_pretty(
&middleware_manifest_v2, &middleware_manifest_v2,

View file

@ -27,7 +27,7 @@ use next_core::{
}; };
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::Instrument; use tracing::Instrument;
use turbo_tasks::{trace::TraceRawVcs, Completion, TaskInput, TryJoinIterExt, Value, Vc}; use turbo_tasks::{trace::TraceRawVcs, Completion, RcStr, TaskInput, TryJoinIterExt, Value, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::{ turbo::tasks_fs::{
File, FileContent, FileSystem, FileSystemPath, FileSystemPathOption, VirtualFileSystem, File, FileContent, FileSystem, FileSystemPath, FileSystemPathOption, VirtualFileSystem,
@ -104,27 +104,27 @@ impl PagesProject {
let mut routes = IndexMap::new(); let mut routes = IndexMap::new();
async fn add_page_to_routes( async fn add_page_to_routes(
routes: &mut IndexMap<String, Route>, routes: &mut IndexMap<RcStr, Route>,
page: Vc<PagesStructureItem>, page: Vc<PagesStructureItem>,
make_route: impl Fn(Vc<String>, Vc<String>, Vc<FileSystemPath>) -> Route, make_route: impl Fn(Vc<RcStr>, Vc<RcStr>, Vc<FileSystemPath>) -> Route,
) -> Result<()> { ) -> Result<()> {
let PagesStructureItem { let PagesStructureItem {
next_router_path, next_router_path,
project_path, project_path,
original_path, original_path,
} = *page.await?; } = *page.await?;
let pathname = format!("/{}", next_router_path.await?.path); let pathname: RcStr = format!("/{}", next_router_path.await?.path).into();
let pathname_vc = Vc::cell(pathname.clone()); let pathname_vc = Vc::cell(pathname.clone());
let original_name = Vc::cell(format!("/{}", original_path.await?.path)); let original_name = Vc::cell(format!("/{}", original_path.await?.path).into());
let route = make_route(pathname_vc, original_name, project_path); let route = make_route(pathname_vc, original_name, project_path);
routes.insert(pathname, route); routes.insert(pathname, route);
Ok(()) Ok(())
} }
async fn add_dir_to_routes( async fn add_dir_to_routes(
routes: &mut IndexMap<String, Route>, routes: &mut IndexMap<RcStr, Route>,
dir: Vc<PagesDirectoryStructure>, dir: Vc<PagesDirectoryStructure>,
make_route: impl Fn(Vc<String>, Vc<String>, Vc<FileSystemPath>) -> Route, make_route: impl Fn(Vc<RcStr>, Vc<RcStr>, Vc<FileSystemPath>) -> Route,
) -> Result<()> { ) -> Result<()> {
let mut queue = vec![dir]; let mut queue = vec![dir];
while let Some(dir) = queue.pop() { while let Some(dir) = queue.pop() {
@ -201,9 +201,9 @@ impl PagesProject {
project_path, project_path,
original_path, original_path,
} = *item.await?; } = *item.await?;
let pathname = format!("/{}", next_router_path.await?.path); let pathname: RcStr = format!("/{}", next_router_path.await?.path).into();
let pathname_vc = Vc::cell(pathname.clone()); let pathname_vc = Vc::cell(pathname.clone());
let original_name = Vc::cell(format!("/{}", original_path.await?.path)); let original_name = Vc::cell(format!("/{}", original_path.await?.path).into());
let path = project_path; let path = project_path;
let endpoint = Vc::upcast(PageEndpoint::new( let endpoint = Vc::upcast(PageEndpoint::new(
ty, ty,
@ -255,7 +255,7 @@ impl PagesProject {
Ok(if let Some(pages) = self.pages_structure().await?.pages { Ok(if let Some(pages) = self.pages_structure().await?.pages {
pages.project_path() pages.project_path()
} else { } else {
self.project().project_path().join("pages".to_string()) self.project().project_path().join("pages".into())
}) })
} }
@ -263,7 +263,7 @@ impl PagesProject {
fn transitions(self: Vc<Self>) -> Vc<TransitionsByName> { fn transitions(self: Vc<Self>) -> Vc<TransitionsByName> {
Vc::cell( Vc::cell(
[( [(
"next-dynamic".to_string(), "next-dynamic".into(),
Vc::upcast(NextDynamicTransition::new(Vc::upcast( Vc::upcast(NextDynamicTransition::new(Vc::upcast(
self.client_transition(), self.client_transition(),
))), ))),
@ -279,7 +279,7 @@ impl PagesProject {
self.project().client_compile_time_info(), self.project().client_compile_time_info(),
self.client_module_options_context(), self.client_module_options_context(),
self.client_resolve_options_context(), self.client_resolve_options_context(),
Vc::cell("client".to_string()), Vc::cell("client".into()),
) )
} }
@ -317,7 +317,7 @@ impl PagesProject {
self.project().client_compile_time_info(), self.project().client_compile_time_info(),
self.client_module_options_context(), self.client_module_options_context(),
self.client_resolve_options_context(), self.client_resolve_options_context(),
Vc::cell("client".to_string()), Vc::cell("client".into()),
)) ))
} }
@ -328,7 +328,7 @@ impl PagesProject {
self.project().server_compile_time_info(), self.project().server_compile_time_info(),
self.ssr_module_options_context(), self.ssr_module_options_context(),
self.ssr_resolve_options_context(), self.ssr_resolve_options_context(),
Vc::cell("ssr".to_string()), Vc::cell("ssr".into()),
) )
} }
@ -341,7 +341,7 @@ impl PagesProject {
self.project().server_compile_time_info(), self.project().server_compile_time_info(),
self.api_module_options_context(), self.api_module_options_context(),
self.ssr_resolve_options_context(), self.ssr_resolve_options_context(),
Vc::cell("api".to_string()), Vc::cell("api".into()),
) )
} }
@ -352,7 +352,7 @@ impl PagesProject {
self.project().server_compile_time_info(), self.project().server_compile_time_info(),
self.ssr_data_module_options_context(), self.ssr_data_module_options_context(),
self.ssr_resolve_options_context(), self.ssr_resolve_options_context(),
Vc::cell("ssr-data".to_string()), Vc::cell("ssr-data".into()),
) )
} }
@ -363,7 +363,7 @@ impl PagesProject {
self.project().edge_compile_time_info(), self.project().edge_compile_time_info(),
self.edge_ssr_module_options_context(), self.edge_ssr_module_options_context(),
self.edge_ssr_resolve_options_context(), self.edge_ssr_resolve_options_context(),
Vc::cell("edge-ssr".to_string()), Vc::cell("edge-ssr".into()),
) )
} }
@ -374,7 +374,7 @@ impl PagesProject {
self.project().edge_compile_time_info(), self.project().edge_compile_time_info(),
self.edge_api_module_options_context(), self.edge_api_module_options_context(),
self.edge_ssr_resolve_options_context(), self.edge_ssr_resolve_options_context(),
Vc::cell("edge-api".to_string()), Vc::cell("edge-api".into()),
) )
} }
@ -385,7 +385,7 @@ impl PagesProject {
self.project().edge_compile_time_info(), self.project().edge_compile_time_info(),
self.edge_ssr_data_module_options_context(), self.edge_ssr_data_module_options_context(),
self.edge_ssr_resolve_options_context(), self.edge_ssr_resolve_options_context(),
Vc::cell("edge-ssr-data".to_string()), Vc::cell("edge-ssr-data".into()),
) )
} }
@ -570,8 +570,8 @@ impl PagesProject {
struct PageEndpoint { struct PageEndpoint {
ty: PageEndpointType, ty: PageEndpointType,
pages_project: Vc<PagesProject>, pages_project: Vc<PagesProject>,
pathname: Vc<String>, pathname: Vc<RcStr>,
original_name: Vc<String>, original_name: Vc<RcStr>,
path: Vc<FileSystemPath>, path: Vc<FileSystemPath>,
pages_structure: Vc<PagesStructure>, pages_structure: Vc<PagesStructure>,
} }
@ -597,8 +597,8 @@ impl PageEndpoint {
fn new( fn new(
ty: PageEndpointType, ty: PageEndpointType,
pages_project: Vc<PagesProject>, pages_project: Vc<PagesProject>,
pathname: Vc<String>, pathname: Vc<RcStr>,
original_name: Vc<String>, original_name: Vc<RcStr>,
path: Vc<FileSystemPath>, path: Vc<FileSystemPath>,
pages_structure: Vc<PagesStructure>, pages_structure: Vc<PagesStructure>,
) -> Vc<Self> { ) -> Vc<Self> {
@ -640,17 +640,14 @@ impl PageEndpoint {
let client_main_module = esm_resolve( let client_main_module = esm_resolve(
Vc::upcast(PlainResolveOrigin::new( Vc::upcast(PlainResolveOrigin::new(
client_module_context, client_module_context,
this.pages_project this.pages_project.project().project_path().join("_".into()),
.project()
.project_path()
.join("_".to_string()),
)), )),
Request::parse(Value::new(Pattern::Constant( Request::parse(Value::new(Pattern::Constant(
match *this.pages_project.project().next_mode().await? { match *this.pages_project.project().next_mode().await? {
NextMode::Development => "next/dist/client/next-dev-turbopack.js", NextMode::Development => "next/dist/client/next-dev-turbopack.js",
NextMode::Build => "next/dist/client/next-turbopack.js", NextMode::Build => "next/dist/client/next-turbopack.js",
} }
.to_string(), .into(),
))), ))),
Value::new(EcmaScriptModulesReferenceSubType::Undefined), Value::new(EcmaScriptModulesReferenceSubType::Undefined),
IssueSeverity::Error.cell(), IssueSeverity::Error.cell(),
@ -798,7 +795,7 @@ impl PageEndpoint {
let asset_path = get_asset_path_from_pathname(pathname, ".js"); let asset_path = get_asset_path_from_pathname(pathname, ".js");
let ssr_entry_chunk_path_string = format!("pages{asset_path}"); let ssr_entry_chunk_path_string: RcStr = format!("pages{asset_path}").into();
let ssr_entry_chunk_path = node_path.join(ssr_entry_chunk_path_string); let ssr_entry_chunk_path = node_path.join(ssr_entry_chunk_path_string);
let EntryChunkGroupResult { let EntryChunkGroupResult {
asset: ssr_entry_chunk, asset: ssr_entry_chunk,
@ -851,7 +848,7 @@ impl PageEndpoint {
this.pages_project this.pages_project
.project() .project()
.node_root() .node_root()
.join("server".to_string()), .join("server".into()),
this.pages_project.project().project_path(), this.pages_project.project().project_path(),
this.pages_project.ssr_module_context(), this.pages_project.ssr_module_context(),
this.pages_project.edge_ssr_module_context(), this.pages_project.edge_ssr_module_context(),
@ -871,7 +868,7 @@ impl PageEndpoint {
this.pages_project this.pages_project
.project() .project()
.node_root() .node_root()
.join("server/data".to_string()), .join("server/data".into()),
this.pages_project.project().project_path(), this.pages_project.project().project_path(),
this.pages_project.ssr_data_module_context(), this.pages_project.ssr_data_module_context(),
this.pages_project.edge_ssr_data_module_context(), this.pages_project.edge_ssr_data_module_context(),
@ -891,7 +888,7 @@ impl PageEndpoint {
this.pages_project this.pages_project
.project() .project()
.node_root() .node_root()
.join("server".to_string()), .join("server".into()),
this.pages_project.project().project_path(), this.pages_project.project().project_path(),
this.pages_project.api_module_context(), this.pages_project.api_module_context(),
this.pages_project.edge_api_module_context(), this.pages_project.edge_api_module_context(),
@ -912,21 +909,20 @@ impl PageEndpoint {
let chunk_path = entry_chunk.ident().path().await?; let chunk_path = entry_chunk.ident().path().await?;
let asset_path = node_root let asset_path = node_root
.join("server".to_string()) .join("server".into())
.await? .await?
.get_path_to(&chunk_path) .get_path_to(&chunk_path)
.context("ssr chunk entry path must be inside the node root")?; .context("ssr chunk entry path must be inside the node root")?;
let pages_manifest = PagesManifest { let pages_manifest = PagesManifest {
pages: [(this.pathname.await?.clone_value(), asset_path.to_string())] pages: [(this.pathname.await?.clone_value(), asset_path.into())]
.into_iter() .into_iter()
.collect(), .collect(),
}; };
let manifest_path_prefix = get_asset_prefix_from_pathname(&this.pathname.await?); let manifest_path_prefix = get_asset_prefix_from_pathname(&this.pathname.await?);
Ok(Vc::upcast(VirtualOutputAsset::new( Ok(Vc::upcast(VirtualOutputAsset::new(
node_root.join(format!( node_root
"server/pages{manifest_path_prefix}/pages-manifest.json", .join(format!("server/pages{manifest_path_prefix}/pages-manifest.json",).into()),
)),
AssetContent::file(File::from(serde_json::to_string_pretty(&pages_manifest)?).into()), AssetContent::file(File::from(serde_json::to_string_pretty(&pages_manifest)?).into()),
))) )))
} }
@ -942,9 +938,9 @@ impl PageEndpoint {
Ok(create_react_loadable_manifest( Ok(create_react_loadable_manifest(
dynamic_import_entries, dynamic_import_entries,
client_relative_path, client_relative_path,
node_root.join(format!( node_root.join(
"server/pages{loadable_path_prefix}/react-loadable-manifest.json" format!("server/pages{loadable_path_prefix}/react-loadable-manifest.json").into(),
)), ),
)) ))
} }
@ -971,7 +967,7 @@ impl PageEndpoint {
Ok(client_relative_path_ref Ok(client_relative_path_ref
.get_path_to(&chunk_path) .get_path_to(&chunk_path)
.context("client chunk entry path must be inside the client root")? .context("client chunk entry path must be inside the client root")?
.to_string()) .into())
} }
}) })
.try_join() .try_join()
@ -983,9 +979,8 @@ impl PageEndpoint {
}; };
let manifest_path_prefix = get_asset_prefix_from_pathname(&this.pathname.await?); let manifest_path_prefix = get_asset_prefix_from_pathname(&this.pathname.await?);
Ok(Vc::upcast(VirtualOutputAsset::new( Ok(Vc::upcast(VirtualOutputAsset::new(
node_root.join(format!( node_root
"server/pages{manifest_path_prefix}/build-manifest.json", .join(format!("server/pages{manifest_path_prefix}/build-manifest.json",).into()),
)),
AssetContent::file(File::from(serde_json::to_string_pretty(&build_manifest)?).into()), AssetContent::file(File::from(serde_json::to_string_pretty(&build_manifest)?).into()),
))) )))
} }
@ -1070,10 +1065,10 @@ impl PageEndpoint {
// //
// they are created in `setup-dev-bundler.ts` // they are created in `setup-dev-bundler.ts`
let mut file_paths_from_root = vec![ let mut file_paths_from_root = vec![
"server/server-reference-manifest.js".to_string(), "server/server-reference-manifest.js".into(),
"server/middleware-build-manifest.js".to_string(), "server/middleware-build-manifest.js".into(),
"server/middleware-react-loadable-manifest.js".to_string(), "server/middleware-react-loadable-manifest.js".into(),
"server/next-font-manifest.js".to_string(), "server/next-font-manifest.js".into(),
]; ];
let mut wasm_paths_from_root = vec![]; let mut wasm_paths_from_root = vec![];
@ -1087,35 +1082,36 @@ impl PageEndpoint {
wasm_paths_from_root wasm_paths_from_root
.extend(get_wasm_paths_from_root(&node_root_value, &all_output_assets).await?); .extend(get_wasm_paths_from_root(&node_root_value, &all_output_assets).await?);
let named_regex = get_named_middleware_regex(&pathname); let named_regex = get_named_middleware_regex(&pathname).into();
let matchers = MiddlewareMatcher { let matchers = MiddlewareMatcher {
regexp: Some(named_regex), regexp: Some(named_regex),
original_source: pathname.to_string(), original_source: pathname.clone_value(),
..Default::default() ..Default::default()
}; };
let original_name = this.original_name.await?; let original_name = this.original_name.await?;
let edge_function_definition = EdgeFunctionDefinition { let edge_function_definition = EdgeFunctionDefinition {
files: file_paths_from_root, files: file_paths_from_root,
wasm: wasm_paths_to_bindings(wasm_paths_from_root), wasm: wasm_paths_to_bindings(wasm_paths_from_root),
name: pathname.to_string(), name: pathname.clone_value(),
page: original_name.to_string(), page: original_name.clone_value(),
regions: None, regions: None,
matchers: vec![matchers], matchers: vec![matchers],
env: this.pages_project.project().edge_env().await?.clone_value(), env: this.pages_project.project().edge_env().await?.clone_value(),
..Default::default() ..Default::default()
}; };
let middleware_manifest_v2 = MiddlewaresManifestV2 { let middleware_manifest_v2 = MiddlewaresManifestV2 {
sorted_middleware: vec![pathname.to_string()], sorted_middleware: vec![pathname.clone_value()],
functions: [(pathname.to_string(), edge_function_definition)] functions: [(pathname.clone_value(), edge_function_definition)]
.into_iter() .into_iter()
.collect(), .collect(),
..Default::default() ..Default::default()
}; };
let manifest_path_prefix = get_asset_prefix_from_pathname(&this.pathname.await?); let manifest_path_prefix = get_asset_prefix_from_pathname(&this.pathname.await?);
let middleware_manifest_v2 = Vc::upcast(VirtualOutputAsset::new( let middleware_manifest_v2 = Vc::upcast(VirtualOutputAsset::new(
node_root.join(format!( node_root.join(
"server/pages{manifest_path_prefix}/middleware-manifest.json" format!("server/pages{manifest_path_prefix}/middleware-manifest.json")
)), .into(),
),
AssetContent::file( AssetContent::file(
FileContent::Content(File::from(serde_json::to_string_pretty( FileContent::Content(File::from(serde_json::to_string_pretty(
&middleware_manifest_v2, &middleware_manifest_v2,
@ -1154,16 +1150,16 @@ impl Endpoint for PageEndpoint {
let span = { let span = {
match this.ty { match this.ty {
PageEndpointType::Html => { PageEndpointType::Html => {
tracing::info_span!("page endpoint HTML", name = *original_name) tracing::info_span!("page endpoint HTML", name = original_name.to_string())
} }
PageEndpointType::Data => { PageEndpointType::Data => {
tracing::info_span!("page endpoint data", name = *original_name) tracing::info_span!("page endpoint data", name = original_name.to_string())
} }
PageEndpointType::Api => { PageEndpointType::Api => {
tracing::info_span!("page endpoint API", name = *original_name) tracing::info_span!("page endpoint API", name = original_name.to_string())
} }
PageEndpointType::SsrOnly => { PageEndpointType::SsrOnly => {
tracing::info_span!("page endpoint SSR", name = *original_name) tracing::info_span!("page endpoint SSR", name = original_name.to_string())
} }
} }
}; };

View file

@ -1,7 +1,7 @@
use anyhow::Result; use anyhow::Result;
use next_core::{all_assets_from_entries, next_manifests::AssetBinding}; use next_core::{all_assets_from_entries, next_manifests::AssetBinding};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::{trace::TraceRawVcs, TryFlatJoinIterExt, Vc}; use turbo_tasks::{trace::TraceRawVcs, RcStr, TryFlatJoinIterExt, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::FileSystemPath, turbo::tasks_fs::FileSystemPath,
turbopack::core::{ turbopack::core::{
@ -62,7 +62,7 @@ pub async fn all_server_paths(
pub async fn all_paths_in_root( pub async fn all_paths_in_root(
assets: Vc<OutputAssets>, assets: Vc<OutputAssets>,
root: Vc<FileSystemPath>, root: Vc<FileSystemPath>,
) -> Result<Vc<Vec<String>>> { ) -> Result<Vc<Vec<RcStr>>> {
let all_assets = &*all_assets_from_entries(assets).await?; let all_assets = &*all_assets_from_entries(assets).await?;
let root = &*root.await?; let root = &*root.await?;
@ -75,7 +75,7 @@ pub(crate) async fn get_paths_from_root(
root: &FileSystemPath, root: &FileSystemPath,
output_assets: &[Vc<Box<dyn OutputAsset>>], output_assets: &[Vc<Box<dyn OutputAsset>>],
filter: impl FnOnce(&str) -> bool + Copy, filter: impl FnOnce(&str) -> bool + Copy,
) -> Result<Vec<String>> { ) -> Result<Vec<RcStr>> {
output_assets output_assets
.iter() .iter()
.map({ .map({
@ -86,7 +86,7 @@ pub(crate) async fn get_paths_from_root(
}; };
Ok(if filter(relative) { Ok(if filter(relative) {
Some(relative.to_string()) Some(relative.into())
} else { } else {
None None
}) })
@ -99,21 +99,21 @@ pub(crate) async fn get_paths_from_root(
pub(crate) async fn get_js_paths_from_root( pub(crate) async fn get_js_paths_from_root(
root: &FileSystemPath, root: &FileSystemPath,
output_assets: &[Vc<Box<dyn OutputAsset>>], output_assets: &[Vc<Box<dyn OutputAsset>>],
) -> Result<Vec<String>> { ) -> Result<Vec<RcStr>> {
get_paths_from_root(root, output_assets, |path| path.ends_with(".js")).await get_paths_from_root(root, output_assets, |path| path.ends_with(".js")).await
} }
pub(crate) async fn get_wasm_paths_from_root( pub(crate) async fn get_wasm_paths_from_root(
root: &FileSystemPath, root: &FileSystemPath,
output_assets: &[Vc<Box<dyn OutputAsset>>], output_assets: &[Vc<Box<dyn OutputAsset>>],
) -> Result<Vec<String>> { ) -> Result<Vec<RcStr>> {
get_paths_from_root(root, output_assets, |path| path.ends_with(".wasm")).await get_paths_from_root(root, output_assets, |path| path.ends_with(".wasm")).await
} }
pub(crate) async fn get_font_paths_from_root( pub(crate) async fn get_font_paths_from_root(
root: &FileSystemPath, root: &FileSystemPath,
output_assets: &[Vc<Box<dyn OutputAsset>>], output_assets: &[Vc<Box<dyn OutputAsset>>],
) -> Result<Vec<String>> { ) -> Result<Vec<RcStr>> {
get_paths_from_root(root, output_assets, |path| { get_paths_from_root(root, output_assets, |path| {
path.ends_with(".woff") path.ends_with(".woff")
|| path.ends_with(".woff2") || path.ends_with(".woff2")
@ -142,7 +142,7 @@ fn get_file_stem(path: &str) -> &str {
} }
} }
pub(crate) fn wasm_paths_to_bindings(paths: Vec<String>) -> Vec<AssetBinding> { pub(crate) fn wasm_paths_to_bindings(paths: Vec<RcStr>) -> Vec<AssetBinding> {
paths paths
.into_iter() .into_iter()
.map(|path| { .map(|path| {
@ -155,7 +155,7 @@ pub(crate) fn wasm_paths_to_bindings(paths: Vec<String>) -> Vec<AssetBinding> {
); );
AssetBinding { AssetBinding {
name: format!("wasm_{}", escaped), name: format!("wasm_{}", escaped).into(),
file_path: path, file_path: path,
} }
}) })

View file

@ -26,7 +26,7 @@ use turbo_tasks::{
debug::ValueDebugFormat, debug::ValueDebugFormat,
graph::{AdjacencyMap, GraphTraversal}, graph::{AdjacencyMap, GraphTraversal},
trace::TraceRawVcs, trace::TraceRawVcs,
Completion, Completions, IntoTraitRef, State, TaskInput, TraitRef, TransientInstance, Completion, Completions, IntoTraitRef, RcStr, State, TaskInput, TraitRef, TransientInstance,
TryFlatJoinIterExt, Value, Vc, TryFlatJoinIterExt, Value, Vc,
}; };
use turbopack_binding::{ use turbopack_binding::{
@ -71,9 +71,9 @@ use crate::{
#[derive(Debug, Serialize, Deserialize, Clone, TaskInput, PartialEq, Eq, TraceRawVcs)] #[derive(Debug, Serialize, Deserialize, Clone, TaskInput, PartialEq, Eq, TraceRawVcs)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct DraftModeOptions { pub struct DraftModeOptions {
pub preview_mode_id: String, pub preview_mode_id: RcStr,
pub preview_mode_encryption_key: String, pub preview_mode_encryption_key: RcStr,
pub preview_mode_signing_key: String, pub preview_mode_signing_key: RcStr,
} }
#[derive(Debug, Serialize, Deserialize, Clone, TaskInput, PartialEq, Eq, TraceRawVcs)] #[derive(Debug, Serialize, Deserialize, Clone, TaskInput, PartialEq, Eq, TraceRawVcs)]
@ -81,19 +81,19 @@ pub struct DraftModeOptions {
pub struct ProjectOptions { pub struct ProjectOptions {
/// A root path from which all files must be nested under. Trying to access /// A root path from which all files must be nested under. Trying to access
/// a file outside this root will fail. Think of this as a chroot. /// a file outside this root will fail. Think of this as a chroot.
pub root_path: String, pub root_path: RcStr,
/// A path inside the root_path which contains the app/pages directories. /// A path inside the root_path which contains the app/pages directories.
pub project_path: String, pub project_path: RcStr,
/// The contents of next.config.js, serialized to JSON. /// The contents of next.config.js, serialized to JSON.
pub next_config: String, pub next_config: RcStr,
/// The contents of ts/config read by load-jsconfig, serialized to JSON. /// The contents of ts/config read by load-jsconfig, serialized to JSON.
pub js_config: String, pub js_config: RcStr,
/// A map of environment variables to use when compiling code. /// A map of environment variables to use when compiling code.
pub env: Vec<(String, String)>, pub env: Vec<(RcStr, RcStr)>,
/// A map of environment variables which should get injected at compile /// A map of environment variables which should get injected at compile
/// time. /// time.
@ -106,10 +106,10 @@ pub struct ProjectOptions {
pub dev: bool, pub dev: bool,
/// The server actions encryption key. /// The server actions encryption key.
pub encryption_key: String, pub encryption_key: RcStr,
/// The build id. /// The build id.
pub build_id: String, pub build_id: RcStr,
/// Options for draft mode. /// Options for draft mode.
pub preview_props: DraftModeOptions, pub preview_props: DraftModeOptions,
@ -120,19 +120,19 @@ pub struct ProjectOptions {
pub struct PartialProjectOptions { pub struct PartialProjectOptions {
/// A root path from which all files must be nested under. Trying to access /// A root path from which all files must be nested under. Trying to access
/// a file outside this root will fail. Think of this as a chroot. /// a file outside this root will fail. Think of this as a chroot.
pub root_path: Option<String>, pub root_path: Option<RcStr>,
/// A path inside the root_path which contains the app/pages directories. /// A path inside the root_path which contains the app/pages directories.
pub project_path: Option<String>, pub project_path: Option<RcStr>,
/// The contents of next.config.js, serialized to JSON. /// The contents of next.config.js, serialized to JSON.
pub next_config: Option<String>, pub next_config: Option<RcStr>,
/// The contents of ts/config read by load-jsconfig, serialized to JSON. /// The contents of ts/config read by load-jsconfig, serialized to JSON.
pub js_config: Option<String>, pub js_config: Option<RcStr>,
/// A map of environment variables to use when compiling code. /// A map of environment variables to use when compiling code.
pub env: Option<Vec<(String, String)>>, pub env: Option<Vec<(RcStr, RcStr)>>,
/// A map of environment variables which should get injected at compile /// A map of environment variables which should get injected at compile
/// time. /// time.
@ -145,10 +145,10 @@ pub struct PartialProjectOptions {
pub dev: Option<bool>, pub dev: Option<bool>,
/// The server actions encryption key. /// The server actions encryption key.
pub encryption_key: Option<String>, pub encryption_key: Option<RcStr>,
/// The build id. /// The build id.
pub build_id: Option<String>, pub build_id: Option<RcStr>,
/// Options for draft mode. /// Options for draft mode.
pub preview_props: Option<DraftModeOptions>, pub preview_props: Option<DraftModeOptions>,
@ -157,9 +157,9 @@ pub struct PartialProjectOptions {
#[derive(Debug, Serialize, Deserialize, Clone, TaskInput, PartialEq, Eq, TraceRawVcs)] #[derive(Debug, Serialize, Deserialize, Clone, TaskInput, PartialEq, Eq, TraceRawVcs)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct DefineEnv { pub struct DefineEnv {
pub client: Vec<(String, String)>, pub client: Vec<(RcStr, RcStr)>,
pub edge: Vec<(String, String)>, pub edge: Vec<(RcStr, RcStr)>,
pub nodejs: Vec<(String, String)>, pub nodejs: Vec<(RcStr, RcStr)>,
} }
#[derive(Serialize, Deserialize, TraceRawVcs, PartialEq, Eq, ValueDebugFormat)] #[derive(Serialize, Deserialize, TraceRawVcs, PartialEq, Eq, ValueDebugFormat)]
@ -288,7 +288,7 @@ impl ProjectContainer {
.await? .await?
.dist_dir .dist_dir
.as_ref() .as_ref()
.map_or_else(|| ".next".to_string(), |d| d.to_string()); .map_or_else(|| ".next".into(), |d| d.clone());
Ok(Project { Ok(Project {
root_path, root_path,
@ -301,7 +301,7 @@ impl ProjectContainer {
define_env, define_env,
browserslist_query: "last 1 Chrome versions, last 1 Firefox versions, last 1 Safari \ browserslist_query: "last 1 Chrome versions, last 1 Firefox versions, last 1 Safari \
versions, last 1 Edge versions" versions, last 1 Edge versions"
.to_string(), .into(),
mode: if dev { mode: if dev {
NextMode::Development.cell() NextMode::Development.cell()
} else { } else {
@ -323,7 +323,7 @@ impl ProjectContainer {
/// See [Project::hmr_identifiers]. /// See [Project::hmr_identifiers].
#[turbo_tasks::function] #[turbo_tasks::function]
pub fn hmr_identifiers(self: Vc<Self>) -> Vc<Vec<String>> { pub fn hmr_identifiers(self: Vc<Self>) -> Vc<Vec<RcStr>> {
self.project().hmr_identifiers() self.project().hmr_identifiers()
} }
@ -340,7 +340,7 @@ impl ProjectContainer {
pub async fn get_source_map( pub async fn get_source_map(
self: Vc<Self>, self: Vc<Self>,
file_path: Vc<FileSystemPath>, file_path: Vc<FileSystemPath>,
section: Option<String>, section: Option<RcStr>,
) -> Result<Vc<OptionSourceMap>> { ) -> Result<Vc<OptionSourceMap>> {
let this = self.await?; let this = self.await?;
Ok(this Ok(this
@ -353,13 +353,13 @@ impl ProjectContainer {
pub struct Project { pub struct Project {
/// A root path from which all files must be nested under. Trying to access /// A root path from which all files must be nested under. Trying to access
/// a file outside this root will fail. Think of this as a chroot. /// a file outside this root will fail. Think of this as a chroot.
root_path: String, root_path: RcStr,
/// A path where to emit the build outputs. next.config.js's distDir. /// A path where to emit the build outputs. next.config.js's distDir.
dist_dir: String, dist_dir: RcStr,
/// A path inside the root_path which contains the app/pages directories. /// A path inside the root_path which contains the app/pages directories.
pub project_path: String, pub project_path: RcStr,
/// Whether to watch the filesystem for file changes. /// Whether to watch the filesystem for file changes.
watch: bool, watch: bool,
@ -377,15 +377,15 @@ pub struct Project {
/// time. /// time.
define_env: Vc<ProjectDefineEnv>, define_env: Vc<ProjectDefineEnv>,
browserslist_query: String, browserslist_query: RcStr,
mode: Vc<NextMode>, mode: Vc<NextMode>,
versioned_content_map: Vc<VersionedContentMap>, versioned_content_map: Vc<VersionedContentMap>,
build_id: String, build_id: RcStr,
encryption_key: String, encryption_key: RcStr,
preview_props: DraftModeOptions, preview_props: DraftModeOptions,
} }
@ -471,8 +471,8 @@ impl Project {
async fn project_fs(self: Vc<Self>) -> Result<Vc<Box<dyn FileSystem>>> { async fn project_fs(self: Vc<Self>) -> Result<Vc<Box<dyn FileSystem>>> {
let this = self.await?; let this = self.await?;
let disk_fs = DiskFileSystem::new( let disk_fs = DiskFileSystem::new(
PROJECT_FILESYSTEM_NAME.to_string(), PROJECT_FILESYSTEM_NAME.into(),
this.root_path.to_string(), this.root_path.clone(),
vec![], vec![],
); );
if this.watch { if this.watch {
@ -490,19 +490,19 @@ impl Project {
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn output_fs(self: Vc<Self>) -> Result<Vc<Box<dyn FileSystem>>> { pub async fn output_fs(self: Vc<Self>) -> Result<Vc<Box<dyn FileSystem>>> {
let this = self.await?; let this = self.await?;
let disk_fs = DiskFileSystem::new("output".to_string(), this.project_path.clone(), vec![]); let disk_fs = DiskFileSystem::new("output".into(), this.project_path.clone(), vec![]);
Ok(Vc::upcast(disk_fs)) Ok(Vc::upcast(disk_fs))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn dist_dir(self: Vc<Self>) -> Result<Vc<String>> { pub async fn dist_dir(self: Vc<Self>) -> Result<Vc<RcStr>> {
Ok(Vc::cell(self.await?.dist_dir.to_string())) Ok(Vc::cell(self.await?.dist_dir.clone()))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn node_root(self: Vc<Self>) -> Result<Vc<FileSystemPath>> { pub async fn node_root(self: Vc<Self>) -> Result<Vc<FileSystemPath>> {
let this = self.await?; let this = self.await?;
Ok(self.output_fs().root().join(this.dist_dir.to_string())) Ok(self.output_fs().root().join(this.dist_dir.clone()))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
@ -518,25 +518,25 @@ impl Project {
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn client_relative_path(self: Vc<Self>) -> Result<Vc<FileSystemPath>> { pub async fn client_relative_path(self: Vc<Self>) -> Result<Vc<FileSystemPath>> {
let next_config = self.next_config().await?; let next_config = self.next_config().await?;
Ok(self.client_root().join(format!( Ok(self.client_root().join(
"{}/_next", format!(
next_config "{}/_next",
.base_path next_config.base_path.clone().unwrap_or_else(|| "".into()),
.clone() )
.unwrap_or_else(|| "".to_string()), .into(),
))) ))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn project_path(self: Vc<Self>) -> Result<Vc<FileSystemPath>> { pub async fn project_path(self: Vc<Self>) -> Result<Vc<FileSystemPath>> {
let this = self.await?; let this = self.await?;
let root = self.project_root_path(); let root = self.project_root_path();
let project_relative = this.project_path.strip_prefix(&this.root_path).unwrap(); let project_relative = this.project_path.strip_prefix(&*this.root_path).unwrap();
let project_relative = project_relative let project_relative = project_relative
.strip_prefix(MAIN_SEPARATOR) .strip_prefix(MAIN_SEPARATOR)
.unwrap_or(project_relative) .unwrap_or(project_relative)
.replace(MAIN_SEPARATOR, "/"); .replace(MAIN_SEPARATOR, "/");
Ok(root.join(project_relative)) Ok(root.join(project_relative.into()))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
@ -569,8 +569,8 @@ impl Project {
self.project_path(), self.project_path(),
node_root, node_root,
node_root, node_root,
node_root.join("chunks".to_string()), node_root.join("chunks".into()),
node_root.join("assets".to_string()), node_root.join("assets".into()),
node_build_environment(), node_build_environment(),
next_mode.runtime_type(), next_mode.runtime_type(),
) )
@ -613,11 +613,11 @@ impl Project {
#[turbo_tasks::function] #[turbo_tasks::function]
pub(super) fn edge_env(&self) -> Vc<EnvMap> { pub(super) fn edge_env(&self) -> Vc<EnvMap> {
let edge_env = indexmap! { let edge_env = indexmap! {
"__NEXT_BUILD_ID".to_string() => self.build_id.clone(), "__NEXT_BUILD_ID".into() => self.build_id.clone(),
"NEXT_SERVER_ACTIONS_ENCRYPTION_KEY".to_string() => self.encryption_key.clone(), "NEXT_SERVER_ACTIONS_ENCRYPTION_KEY".into() => self.encryption_key.clone(),
"__NEXT_PREVIEW_MODE_ID".to_string() => self.preview_props.preview_mode_id.clone(), "__NEXT_PREVIEW_MODE_ID".into() => self.preview_props.preview_mode_id.clone(),
"__NEXT_PREVIEW_MODE_ENCRYPTION_KEY".to_string() => self.preview_props.preview_mode_encryption_key.clone(), "__NEXT_PREVIEW_MODE_ENCRYPTION_KEY".into() => self.preview_props.preview_mode_encryption_key.clone(),
"__NEXT_PREVIEW_MODE_SIGNING_KEY".to_string() => self.preview_props.preview_mode_signing_key.clone(), "__NEXT_PREVIEW_MODE_SIGNING_KEY".into() => self.preview_props.preview_mode_signing_key.clone(),
}; };
Vc::cell(edge_env) Vc::cell(edge_env)
} }
@ -688,7 +688,7 @@ impl Project {
#[turbo_tasks::function] #[turbo_tasks::function]
async fn collect_project_feature_telemetry(self: Vc<Self>) -> Result<Vc<()>> { async fn collect_project_feature_telemetry(self: Vc<Self>) -> Result<Vc<()>> {
let emit_event = |feature_name: &str, enabled: bool| { let emit_event = |feature_name: &str, enabled: bool| {
NextFeatureTelemetry::new(feature_name.to_string(), enabled) NextFeatureTelemetry::new(feature_name.into(), enabled)
.cell() .cell()
.emit(); .emit();
}; };
@ -786,14 +786,14 @@ impl Project {
path: self.project_path(), path: self.project_path(),
title: StyledString::Text( title: StyledString::Text(
format!("App Router and Pages Router both match path: {}", pathname) format!("App Router and Pages Router both match path: {}", pathname)
.to_string(), .into(),
) )
.cell(), .cell(),
description: StyledString::Text( description: StyledString::Text(
"Next.js does not support having both App Router and Pages Router \ "Next.js does not support having both App Router and Pages Router \
routes matching the same path. Please remove one of the conflicting \ routes matching the same path. Please remove one of the conflicting \
routes." routes."
.to_string(), .into(),
) )
.cell(), .cell(),
severity: IssueSeverity::Error.cell(), severity: IssueSeverity::Error.cell(),
@ -895,7 +895,7 @@ impl Project {
self.next_config(), self.next_config(),
self.execution_context(), self.execution_context(),
), ),
Vc::cell("middleware".to_string()), Vc::cell("middleware".into()),
)) ))
} }
@ -929,7 +929,7 @@ impl Project {
self.next_config(), self.next_config(),
self.execution_context(), self.execution_context(),
), ),
Vc::cell("instrumentation".to_string()), Vc::cell("instrumentation".into()),
)) ))
} }
@ -979,7 +979,7 @@ impl Project {
#[turbo_tasks::function] #[turbo_tasks::function]
async fn hmr_content( async fn hmr_content(
self: Vc<Self>, self: Vc<Self>,
identifier: String, identifier: RcStr,
) -> Result<Vc<Box<dyn VersionedContent>>> { ) -> Result<Vc<Box<dyn VersionedContent>>> {
Ok(self Ok(self
.await? .await?
@ -988,7 +988,7 @@ impl Project {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
async fn hmr_version(self: Vc<Self>, identifier: String) -> Result<Vc<Box<dyn Version>>> { async fn hmr_version(self: Vc<Self>, identifier: RcStr) -> Result<Vc<Box<dyn Version>>> {
let content = self.hmr_content(identifier); let content = self.hmr_content(identifier);
Ok(content.version()) Ok(content.version())
@ -999,7 +999,7 @@ impl Project {
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn hmr_version_state( pub async fn hmr_version_state(
self: Vc<Self>, self: Vc<Self>,
identifier: String, identifier: RcStr,
session: TransientInstance<()>, session: TransientInstance<()>,
) -> Result<Vc<VersionState>> { ) -> Result<Vc<VersionState>> {
let version = self.hmr_version(identifier); let version = self.hmr_version(identifier);
@ -1019,7 +1019,7 @@ impl Project {
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn hmr_update( pub async fn hmr_update(
self: Vc<Self>, self: Vc<Self>,
identifier: String, identifier: RcStr,
from: Vc<VersionState>, from: Vc<VersionState>,
) -> Result<Vc<Update>> { ) -> Result<Vc<Update>> {
let from = from.get(); let from = from.get();
@ -1029,7 +1029,7 @@ impl Project {
/// Gets a list of all HMR identifiers that can be subscribed to. This is /// Gets a list of all HMR identifiers that can be subscribed to. This is
/// only needed for testing purposes and isn't used in real apps. /// only needed for testing purposes and isn't used in real apps.
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn hmr_identifiers(self: Vc<Self>) -> Result<Vc<Vec<String>>> { pub async fn hmr_identifiers(self: Vc<Self>) -> Result<Vc<Vec<RcStr>>> {
Ok(self Ok(self
.await? .await?
.versioned_content_map .versioned_content_map

View file

@ -1,7 +1,7 @@
use anyhow::Result; use anyhow::Result;
use indexmap::IndexMap; use indexmap::IndexMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::{debug::ValueDebugFormat, trace::TraceRawVcs, Completion, Vc}; use turbo_tasks::{debug::ValueDebugFormat, trace::TraceRawVcs, Completion, RcStr, Vc};
use crate::paths::ServerPath; use crate::paths::ServerPath;
@ -84,15 +84,15 @@ pub enum WrittenEndpoint {
/// Relative to the root_path /// Relative to the root_path
server_entry_path: String, server_entry_path: String,
server_paths: Vec<ServerPath>, server_paths: Vec<ServerPath>,
client_paths: Vec<String>, client_paths: Vec<RcStr>,
}, },
Edge { Edge {
server_paths: Vec<ServerPath>, server_paths: Vec<ServerPath>,
client_paths: Vec<String>, client_paths: Vec<RcStr>,
}, },
} }
/// The routes as map from pathname to route. (pathname includes the leading /// The routes as map from pathname to route. (pathname includes the leading
/// slash) /// slash)
#[turbo_tasks::value(transparent)] #[turbo_tasks::value(transparent)]
pub struct Routes(IndexMap<String, Route>); pub struct Routes(IndexMap<RcStr, Route>);

View file

@ -9,7 +9,7 @@ use next_core::{
use tracing::Instrument; use tracing::Instrument;
use turbo_tasks::{ use turbo_tasks::{
graph::{GraphTraversal, NonDeterministic}, graph::{GraphTraversal, NonDeterministic},
TryFlatJoinIterExt, Value, ValueToString, Vc, RcStr, TryFlatJoinIterExt, Value, ValueToString, Vc,
}; };
use turbopack_binding::{ use turbopack_binding::{
swc::core::{common::comments::Comments, ecma::ast::Program}, swc::core::{common::comments::Comments, ecma::ast::Program},
@ -54,7 +54,7 @@ pub(crate) async fn create_server_actions_manifest(
) -> Result<(Vc<Box<dyn EvaluatableAsset>>, Vc<Box<dyn OutputAsset>>)> { ) -> Result<(Vc<Box<dyn EvaluatableAsset>>, Vc<Box<dyn OutputAsset>>)> {
let actions = get_actions(rsc_entry, server_reference_modules, asset_context); let actions = get_actions(rsc_entry, server_reference_modules, asset_context);
let loader = let loader =
build_server_actions_loader(project_path, page_name.to_string(), actions, asset_context); build_server_actions_loader(project_path, page_name.into(), actions, asset_context);
let evaluable = Vc::try_resolve_sidecast::<Box<dyn EvaluatableAsset>>(loader) let evaluable = Vc::try_resolve_sidecast::<Box<dyn EvaluatableAsset>>(loader)
.await? .await?
.context("loader module must be evaluatable")?; .context("loader module must be evaluatable")?;
@ -76,7 +76,7 @@ pub(crate) async fn create_server_actions_manifest(
#[turbo_tasks::function] #[turbo_tasks::function]
async fn build_server_actions_loader( async fn build_server_actions_loader(
project_path: Vc<FileSystemPath>, project_path: Vc<FileSystemPath>,
page_name: String, page_name: RcStr,
actions: Vc<AllActions>, actions: Vc<AllActions>,
asset_context: Vc<Box<dyn AssetContext>>, asset_context: Vc<Box<dyn AssetContext>>,
) -> Result<Vc<Box<dyn EcmascriptChunkPlaceable>>> { ) -> Result<Vc<Box<dyn EcmascriptChunkPlaceable>>> {
@ -92,7 +92,7 @@ async fn build_server_actions_loader(
let index = import_map.len(); let index = import_map.len();
let module_name = import_map let module_name = import_map
.entry(*module) .entry(*module)
.or_insert_with(|| format!("ACTIONS_MODULE{index}")); .or_insert_with(|| format!("ACTIONS_MODULE{index}").into());
writeln!( writeln!(
contents, contents,
" '{hash_id}': (...args) => Promise.resolve(require('{module_name}')).then(mod => \ " '{hash_id}': (...args) => Promise.resolve(require('{module_name}')).then(mod => \
@ -101,7 +101,8 @@ async fn build_server_actions_loader(
} }
write!(contents, "}});")?; write!(contents, "}});")?;
let output_path = project_path.join(format!(".next-internal/server/app{page_name}/actions.js")); let output_path =
project_path.join(format!(".next-internal/server/app{page_name}/actions.js").into());
let file = File::from(contents.build()); let file = File::from(contents.build());
let source = VirtualSource::new(output_path, AssetContent::file(file.into())); let source = VirtualSource::new(output_path, AssetContent::file(file.into()));
let import_map = import_map.into_iter().map(|(k, v)| (v, k)).collect(); let import_map = import_map.into_iter().map(|(k, v)| (v, k)).collect();
@ -128,12 +129,11 @@ async fn build_manifest(
page_name: &str, page_name: &str,
runtime: NextRuntime, runtime: NextRuntime,
actions: Vc<AllActions>, actions: Vc<AllActions>,
loader_id: Vc<String>, loader_id: Vc<RcStr>,
) -> Result<Vc<Box<dyn OutputAsset>>> { ) -> Result<Vc<Box<dyn OutputAsset>>> {
let manifest_path_prefix = page_name; let manifest_path_prefix = page_name;
let manifest_path = node_root.join(format!( let manifest_path = node_root
"server/app{manifest_path_prefix}/server-reference-manifest.json", .join(format!("server/app{manifest_path_prefix}/server-reference-manifest.json",).into());
));
let mut manifest = ServerReferenceManifest { let mut manifest = ServerReferenceManifest {
..Default::default() ..Default::default()
}; };
@ -163,8 +163,8 @@ async fn build_manifest(
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn action_modifier() -> Vc<String> { fn action_modifier() -> Vc<RcStr> {
Vc::cell("action".to_string()) Vc::cell("action".into())
} }
/// Traverses the entire module graph starting from [Module], looking for magic /// Traverses the entire module graph starting from [Module], looking for magic

View file

@ -4,7 +4,7 @@ use anyhow::{bail, Result};
use next_core::emit_client_assets; use next_core::emit_client_assets;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::{ use turbo_tasks::{
debug::ValueDebugFormat, trace::TraceRawVcs, Completion, State, TryFlatJoinIterExt, debug::ValueDebugFormat, trace::TraceRawVcs, Completion, RcStr, State, TryFlatJoinIterExt,
TryJoinIterExt, ValueDefault, ValueToString, Vc, TryJoinIterExt, ValueDefault, ValueToString, Vc,
}; };
use turbopack_binding::{ use turbopack_binding::{
@ -124,7 +124,7 @@ impl VersionedContentMap {
pub async fn get_source_map( pub async fn get_source_map(
self: Vc<Self>, self: Vc<Self>,
path: Vc<FileSystemPath>, path: Vc<FileSystemPath>,
section: Option<String>, section: Option<RcStr>,
) -> Result<Vc<OptionSourceMap>> { ) -> Result<Vc<OptionSourceMap>> {
if let Some(generate_source_map) = if let Some(generate_source_map) =
Vc::try_resolve_sidecast::<Box<dyn GenerateSourceMap>>(self.get_asset(path)).await? Vc::try_resolve_sidecast::<Box<dyn GenerateSourceMap>>(self.get_asset(path)).await?
@ -169,7 +169,7 @@ impl VersionedContentMap {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn keys_in_path(&self, root: Vc<FileSystemPath>) -> Result<Vc<Vec<String>>> { pub async fn keys_in_path(&self, root: Vc<FileSystemPath>) -> Result<Vc<Vec<RcStr>>> {
let keys = { let keys = {
let map = self.map_path_to_op.get(); let map = self.map_path_to_op.get();
map.keys().copied().collect::<Vec<_>>() map.keys().copied().collect::<Vec<_>>()
@ -177,7 +177,7 @@ impl VersionedContentMap {
let root = &root.await?; let root = &root.await?;
let keys = keys let keys = keys
.into_iter() .into_iter()
.map(|path| async move { Ok(root.get_path_to(&*path.await?).map(|p| p.to_string())) }) .map(|path| async move { Ok(root.get_path_to(&*path.await?).map(RcStr::from)) })
.try_flat_join() .try_flat_join()
.await?; .await?;
Ok(Vc::cell(keys)) Ok(Vc::cell(keys))

View file

@ -3,7 +3,7 @@ use std::ops::Deref;
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value; use serde_json::Value;
use turbo_tasks::{trace::TraceRawVcs, TryJoinIterExt, ValueDefault, Vc}; use turbo_tasks::{trace::TraceRawVcs, RcStr, TryJoinIterExt, ValueDefault, Vc};
use turbo_tasks_fs::FileSystemPath; use turbo_tasks_fs::FileSystemPath;
use turbopack_binding::{ use turbopack_binding::{
swc::core::{ swc::core::{
@ -71,7 +71,7 @@ pub struct NextSegmentConfig {
pub revalidate: Option<NextRevalidate>, pub revalidate: Option<NextRevalidate>,
pub fetch_cache: Option<NextSegmentFetchCache>, pub fetch_cache: Option<NextSegmentFetchCache>,
pub runtime: Option<NextRuntime>, pub runtime: Option<NextRuntime>,
pub preferred_region: Option<Vec<String>>, pub preferred_region: Option<Vec<RcStr>>,
pub experimental_ppr: Option<bool>, pub experimental_ppr: Option<bool>,
} }
@ -178,7 +178,7 @@ impl Issue for NextSegmentConfigParsingIssue {
#[turbo_tasks::function] #[turbo_tasks::function]
fn title(&self) -> Vc<StyledString> { fn title(&self) -> Vc<StyledString> {
StyledString::Text("Unable to parse config export in source file".to_string()).cell() StyledString::Text("Unable to parse config export in source file".into()).cell()
} }
#[turbo_tasks::function] #[turbo_tasks::function]
@ -197,7 +197,7 @@ impl Issue for NextSegmentConfigParsingIssue {
StyledString::Text( StyledString::Text(
"The exported configuration object in a source file need to have a very specific \ "The exported configuration object in a source file need to have a very specific \
format from which some properties can be statically parsed at compiled-time." format from which some properties can be statically parsed at compiled-time."
.to_string(), .into(),
) )
.cell(), .cell(),
)) ))
@ -209,10 +209,10 @@ impl Issue for NextSegmentConfigParsingIssue {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn documentation_link(&self) -> Vc<String> { fn documentation_link(&self) -> Vc<RcStr> {
Vc::cell( Vc::cell(
"https://nextjs.org/docs/app/api-reference/file-conventions/route-segment-config" "https://nextjs.org/docs/app/api-reference/file-conventions/route-segment-config"
.to_string(), .into(),
) )
} }
@ -312,7 +312,7 @@ fn parse_config_value(
let (explainer, hints) = value.explain(2, 0); let (explainer, hints) = value.explain(2, 0);
NextSegmentConfigParsingIssue { NextSegmentConfigParsingIssue {
ident: source.ident(), ident: source.ident(),
detail: StyledString::Text(format!("{detail} Got {explainer}.{hints}")).cell(), detail: StyledString::Text(format!("{detail} Got {explainer}.{hints}").into()).cell(),
source: issue_source(source, span), source: issue_source(source, span),
} }
.cell() .cell()
@ -402,14 +402,14 @@ fn parse_config_value(
let preferred_region = match value { let preferred_region = match value {
// Single value is turned into a single-element Vec. // Single value is turned into a single-element Vec.
JsValue::Constant(ConstantValue::Str(str)) => vec![str.to_string()], JsValue::Constant(ConstantValue::Str(str)) => vec![str.to_string().into()],
// Array of strings is turned into a Vec. If one of the values in not a String it // Array of strings is turned into a Vec. If one of the values in not a String it
// will error. // will error.
JsValue::Array { items, .. } => { JsValue::Array { items, .. } => {
let mut regions = Vec::new(); let mut regions = Vec::new();
for item in items { for item in items {
if let JsValue::Constant(ConstantValue::Str(str)) = item { if let JsValue::Constant(ConstantValue::Str(str)) = item {
regions.push(str.to_string()); regions.push(str.to_string().into());
} else { } else {
invalid_config( invalid_config(
"Values of the `preferredRegion` array need to static strings", "Values of the `preferredRegion` array need to static strings",

View file

@ -11,7 +11,7 @@ use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::Instrument; use tracing::Instrument;
use turbo_tasks::{ use turbo_tasks::{
debug::ValueDebugFormat, trace::TraceRawVcs, Completion, Completions, TaskInput, debug::ValueDebugFormat, trace::TraceRawVcs, Completion, Completions, RcStr, TaskInput,
TryJoinIterExt, ValueToString, Vc, TryJoinIterExt, ValueToString, Vc,
}; };
use turbopack_binding::{ use turbopack_binding::{
@ -104,11 +104,11 @@ pub enum MetadataItem {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn get_metadata_route_name(meta: MetadataItem) -> Result<Vc<String>> { pub async fn get_metadata_route_name(meta: MetadataItem) -> Result<Vc<RcStr>> {
Ok(match meta { Ok(match meta {
MetadataItem::Static { path } => { MetadataItem::Static { path } => {
let path_value = path.await?; let path_value = path.await?;
Vc::cell(path_value.file_name().to_string()) Vc::cell(path_value.file_name().into())
} }
MetadataItem::Dynamic { path } => { MetadataItem::Dynamic { path } => {
let Some(stem) = &*path.file_stem().await? else { let Some(stem) = &*path.file_stem().await? else {
@ -119,7 +119,7 @@ pub async fn get_metadata_route_name(meta: MetadataItem) -> Result<Vc<String>> {
}; };
match stem.as_str() { match stem.as_str() {
"manifest" => Vc::cell("manifest.webmanifest".to_string()), "manifest" => Vc::cell("manifest.webmanifest".into()),
_ => Vc::cell(stem.clone()), _ => Vc::cell(stem.clone()),
} }
} }
@ -213,7 +213,7 @@ impl GlobalMetadata {
#[derive(Debug)] #[derive(Debug)]
pub struct DirectoryTree { pub struct DirectoryTree {
/// key is e.g. "dashboard", "(dashboard)", "@slot" /// key is e.g. "dashboard", "(dashboard)", "@slot"
pub subdirectories: BTreeMap<String, Vc<DirectoryTree>>, pub subdirectories: BTreeMap<RcStr, Vc<DirectoryTree>>,
pub components: Vc<Components>, pub components: Vc<Components>,
} }
@ -259,8 +259,8 @@ impl OptionAppDir {
/// Finds and returns the [DirectoryTree] of the app directory if existing. /// Finds and returns the [DirectoryTree] of the app directory if existing.
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn find_app_dir(project_path: Vc<FileSystemPath>) -> Result<Vc<OptionAppDir>> { pub async fn find_app_dir(project_path: Vc<FileSystemPath>) -> Result<Vc<OptionAppDir>> {
let app = project_path.join("app".to_string()); let app = project_path.join("app".into());
let src_app = project_path.join("src/app".to_string()); let src_app = project_path.join("src/app".into());
let app_dir = if *app.get_type().await? == FileSystemEntryType::Directory { let app_dir = if *app.get_type().await? == FileSystemEntryType::Directory {
app app
} else if *src_app.get_type().await? == FileSystemEntryType::Directory { } else if *src_app.get_type().await? == FileSystemEntryType::Directory {
@ -284,11 +284,11 @@ pub async fn find_app_dir_if_enabled(project_path: Vc<FileSystemPath>) -> Result
#[turbo_tasks::function] #[turbo_tasks::function]
async fn get_directory_tree( async fn get_directory_tree(
dir: Vc<FileSystemPath>, dir: Vc<FileSystemPath>,
page_extensions: Vc<Vec<String>>, page_extensions: Vc<Vec<RcStr>>,
) -> Result<Vc<DirectoryTree>> { ) -> Result<Vc<DirectoryTree>> {
let span = { let span = {
let dir = dir.to_string().await?; let dir = dir.to_string().await?.to_string();
tracing::info_span!("read app directory tree", name = *dir) tracing::info_span!("read app directory tree", name = dir)
}; };
get_directory_tree_internal(dir, page_extensions) get_directory_tree_internal(dir, page_extensions)
.instrument(span) .instrument(span)
@ -297,7 +297,7 @@ async fn get_directory_tree(
async fn get_directory_tree_internal( async fn get_directory_tree_internal(
dir: Vc<FileSystemPath>, dir: Vc<FileSystemPath>,
page_extensions: Vc<Vec<String>>, page_extensions: Vc<Vec<RcStr>>,
) -> Result<Vc<DirectoryTree>> { ) -> Result<Vc<DirectoryTree>> {
let DirectoryContent::Entries(entries) = &*dir.read_dir().await? else { let DirectoryContent::Entries(entries) = &*dir.read_dir().await? else {
// the file watcher might invalidate things in the wrong order, // the file watcher might invalidate things in the wrong order,
@ -375,7 +375,7 @@ async fn get_directory_tree_internal(
let basename = file_name let basename = file_name
.rsplit_once('.') .rsplit_once('.')
.map_or(file_name, |(basename, _)| basename); .map_or(file_name, |(basename, _)| basename);
let alt_path = file.parent().join(format!("{}.alt.txt", basename)); let alt_path = file.parent().join(format!("{}.alt.txt", basename).into());
let alt_path = matches!(&*alt_path.get_type().await?, FileSystemEntryType::File) let alt_path = matches!(&*alt_path.get_type().await?, FileSystemEntryType::File)
.then_some(alt_path); .then_some(alt_path);
@ -392,7 +392,7 @@ async fn get_directory_tree_internal(
// appDir ignores paths starting with an underscore // appDir ignores paths starting with an underscore
if !basename.starts_with('_') { if !basename.starts_with('_') {
let result = get_directory_tree(dir, page_extensions); let result = get_directory_tree(dir, page_extensions);
subdirectories.insert(basename.to_string(), result); subdirectories.insert(basename.clone(), result);
} }
} }
// TODO(WEB-952) handle symlinks in app dir // TODO(WEB-952) handle symlinks in app dir
@ -421,8 +421,8 @@ async fn get_directory_tree_internal(
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct LoaderTree { pub struct LoaderTree {
pub page: AppPage, pub page: AppPage,
pub segment: String, pub segment: RcStr,
pub parallel_routes: IndexMap<String, Vc<LoaderTree>>, pub parallel_routes: IndexMap<RcStr, Vc<LoaderTree>>,
pub components: Vc<Components>, pub components: Vc<Components>,
pub global_metadata: Vc<GlobalMetadata>, pub global_metadata: Vc<GlobalMetadata>,
} }
@ -432,7 +432,7 @@ impl LoaderTree {
/// Returns true if there's a page match in this loader tree. /// Returns true if there's a page match in this loader tree.
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn has_page(&self) -> Result<Vc<bool>> { pub async fn has_page(&self) -> Result<Vc<bool>> {
if self.segment == "__PAGE__" { if &*self.segment == "__PAGE__" {
return Ok(Vc::cell(true)); return Ok(Vc::cell(true));
} }
@ -449,7 +449,7 @@ impl LoaderTree {
/// route. /// route.
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn has_only_catchall(&self) -> Result<Vc<bool>> { pub async fn has_only_catchall(&self) -> Result<Vc<bool>> {
if self.segment == "__PAGE__" && !self.page.is_catchall() { if &*self.segment == "__PAGE__" && !self.page.is_catchall() {
return Ok(Vc::cell(false)); return Ok(Vc::cell(false));
} }
@ -527,11 +527,14 @@ fn conflict_issue(
DirectoryTreeIssue { DirectoryTreeIssue {
app_dir, app_dir,
message: StyledString::Text(format!( message: StyledString::Text(
"Conflicting {} at {}: {a} at {value_a} and {b} at {value_b}", format!(
item_names, "Conflicting {} at {}: {a} at {value_a} and {b} at {value_b}",
e.key(), item_names,
)) e.key(),
)
.into(),
)
.cell(), .cell(),
severity: IssueSeverity::Error.cell(), severity: IssueSeverity::Error.cell(),
} }
@ -684,7 +687,7 @@ fn add_app_metadata_route(
#[turbo_tasks::function] #[turbo_tasks::function]
pub fn get_entrypoints( pub fn get_entrypoints(
app_dir: Vc<FileSystemPath>, app_dir: Vc<FileSystemPath>,
page_extensions: Vc<Vec<String>>, page_extensions: Vc<Vec<RcStr>>,
) -> Vc<Entrypoints> { ) -> Vc<Entrypoints> {
directory_tree_to_entrypoints( directory_tree_to_entrypoints(
app_dir, app_dir,
@ -704,7 +707,7 @@ fn directory_tree_to_entrypoints(
directory_tree_to_entrypoints_internal( directory_tree_to_entrypoints_internal(
app_dir, app_dir,
global_metadata, global_metadata,
"".to_string(), "".into(),
directory_tree, directory_tree,
AppPage::new(), AppPage::new(),
root_layouts, root_layouts,
@ -722,7 +725,7 @@ impl Issue for DuplicateParallelRouteIssue {
#[turbo_tasks::function] #[turbo_tasks::function]
async fn file_path(self: Vc<Self>) -> Result<Vc<FileSystemPath>> { async fn file_path(self: Vc<Self>) -> Result<Vc<FileSystemPath>> {
let this = self.await?; let this = self.await?;
Ok(this.app_dir.join(this.page.to_string())) Ok(this.app_dir.join(this.page.to_string().into()))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
@ -733,7 +736,7 @@ impl Issue for DuplicateParallelRouteIssue {
#[turbo_tasks::function] #[turbo_tasks::function]
fn title(self: Vc<Self>) -> Vc<StyledString> { fn title(self: Vc<Self>) -> Vc<StyledString> {
StyledString::Text( StyledString::Text(
"You cannot have two parallel pages that resolve to the same path.".to_string(), "You cannot have two parallel pages that resolve to the same path.".into(),
) )
.cell() .cell()
} }
@ -795,7 +798,7 @@ async fn check_duplicate(
async fn directory_tree_to_loader_tree( async fn directory_tree_to_loader_tree(
app_dir: Vc<FileSystemPath>, app_dir: Vc<FileSystemPath>,
global_metadata: Vc<GlobalMetadata>, global_metadata: Vc<GlobalMetadata>,
directory_name: String, directory_name: RcStr,
directory_tree: Vc<DirectoryTree>, directory_tree: Vc<DirectoryTree>,
app_page: AppPage, app_page: AppPage,
// the page this loader tree is constructed for // the page this loader tree is constructed for
@ -828,7 +831,7 @@ async fn directory_tree_to_loader_tree(
if (is_root_directory || is_root_layout) && components.not_found.is_none() { if (is_root_directory || is_root_layout) && components.not_found.is_none() {
components.not_found = Some( components.not_found = Some(
get_next_package(app_dir).join("dist/client/components/not-found-error.js".to_string()), get_next_package(app_dir).join("dist/client/components/not-found-error.js".into()),
); );
} }
@ -843,7 +846,7 @@ async fn directory_tree_to_loader_tree(
let current_level_is_parallel_route = is_parallel_route(&directory_name); let current_level_is_parallel_route = is_parallel_route(&directory_name);
if current_level_is_parallel_route { if current_level_is_parallel_route {
tree.segment = "children".to_string(); tree.segment = "children".into();
} }
if let Some(page) = (app_path == for_app_path || app_path.is_catchall()) if let Some(page) = (app_path == for_app_path || app_path.is_catchall())
@ -862,10 +865,10 @@ async fn directory_tree_to_loader_tree(
}; };
tree.parallel_routes.insert( tree.parallel_routes.insert(
"children".to_string(), "children".into(),
LoaderTree { LoaderTree {
page: app_page.clone(), page: app_page.clone(),
segment: "__PAGE__".to_string(), segment: "__PAGE__".into(),
parallel_routes: IndexMap::new(), parallel_routes: IndexMap::new(),
components: Components { components: Components {
page: Some(page), page: Some(page),
@ -879,7 +882,7 @@ async fn directory_tree_to_loader_tree(
); );
if current_level_is_parallel_route { if current_level_is_parallel_route {
tree.segment = "page$".to_string(); tree.segment = "page$".into();
} }
} }
@ -915,7 +918,7 @@ async fn directory_tree_to_loader_tree(
if let Some(subtree) = subtree { if let Some(subtree) = subtree {
if let Some(key) = parallel_route_key { if let Some(key) = parallel_route_key {
tree.parallel_routes.insert(key.to_string(), subtree); tree.parallel_routes.insert(key.into(), subtree);
continue; continue;
} }
@ -933,7 +936,7 @@ async fn directory_tree_to_loader_tree(
// there's probably already a more specific page in the // there's probably already a more specific page in the
// slot. // slot.
} else if *current_tree.has_only_catchall().await? { } else if *current_tree.has_only_catchall().await? {
tree.parallel_routes.insert("children".to_string(), subtree); tree.parallel_routes.insert("children".into(), subtree);
} else { } else {
// TODO: Investigate if this is still needed. Emitting the // TODO: Investigate if this is still needed. Emitting the
// error causes the test "should // error causes the test "should
@ -955,7 +958,7 @@ async fn directory_tree_to_loader_tree(
// .emit(); // .emit();
} }
} else { } else {
tree.parallel_routes.insert("children".to_string(), subtree); tree.parallel_routes.insert("children".into(), subtree);
} }
} else if let Some(key) = parallel_route_key { } else if let Some(key) = parallel_route_key {
bail!( bail!(
@ -967,7 +970,7 @@ async fn directory_tree_to_loader_tree(
} }
if tree.parallel_routes.is_empty() { if tree.parallel_routes.is_empty() {
tree.segment = "__DEFAULT__".to_string(); tree.segment = "__DEFAULT__".into();
if let Some(default) = components.default { if let Some(default) = components.default {
tree.components = Components { tree.components = Components {
default: Some(default), default: Some(default),
@ -979,7 +982,7 @@ async fn directory_tree_to_loader_tree(
tree.components = Components { tree.components = Components {
default: Some( default: Some(
get_next_package(app_dir) get_next_package(app_dir)
.join("dist/client/components/parallel-route-default.js".to_string()), .join("dist/client/components/parallel-route-default.js".into()),
), ),
..Default::default() ..Default::default()
} }
@ -989,10 +992,10 @@ async fn directory_tree_to_loader_tree(
} }
} else if tree.parallel_routes.get("children").is_none() { } else if tree.parallel_routes.get("children").is_none() {
tree.parallel_routes.insert( tree.parallel_routes.insert(
"children".to_string(), "children".into(),
LoaderTree { LoaderTree {
page: app_page.clone(), page: app_page.clone(),
segment: "__DEFAULT__".to_string(), segment: "__DEFAULT__".into(),
parallel_routes: IndexMap::new(), parallel_routes: IndexMap::new(),
components: if let Some(default) = components.default { components: if let Some(default) = components.default {
Components { Components {
@ -1004,9 +1007,8 @@ async fn directory_tree_to_loader_tree(
// default fallback component // default fallback component
Components { Components {
default: Some( default: Some(
get_next_package(app_dir).join( get_next_package(app_dir)
"dist/client/components/parallel-route-default.js".to_string(), .join("dist/client/components/parallel-route-default.js".into()),
),
), ),
..Default::default() ..Default::default()
} }
@ -1033,7 +1035,7 @@ async fn directory_tree_to_loader_tree(
async fn directory_tree_to_entrypoints_internal( async fn directory_tree_to_entrypoints_internal(
app_dir: Vc<FileSystemPath>, app_dir: Vc<FileSystemPath>,
global_metadata: Vc<GlobalMetadata>, global_metadata: Vc<GlobalMetadata>,
directory_name: String, directory_name: RcStr,
directory_tree: Vc<DirectoryTree>, directory_tree: Vc<DirectoryTree>,
app_page: AppPage, app_page: AppPage,
root_layouts: Vc<Vec<Vc<FileSystemPath>>>, root_layouts: Vc<Vec<Vc<FileSystemPath>>>,
@ -1054,7 +1056,7 @@ async fn directory_tree_to_entrypoints_internal(
async fn directory_tree_to_entrypoints_internal_untraced( async fn directory_tree_to_entrypoints_internal_untraced(
app_dir: Vc<FileSystemPath>, app_dir: Vc<FileSystemPath>,
global_metadata: Vc<GlobalMetadata>, global_metadata: Vc<GlobalMetadata>,
directory_name: String, directory_name: RcStr,
directory_tree: Vc<DirectoryTree>, directory_tree: Vc<DirectoryTree>,
app_page: AppPage, app_page: AppPage,
root_layouts: Vc<Vec<Vc<FileSystemPath>>>, root_layouts: Vc<Vec<Vc<FileSystemPath>>>,
@ -1161,16 +1163,16 @@ async fn directory_tree_to_entrypoints_internal_untraced(
page: app_page.clone(), page: app_page.clone(),
segment: directory_name.clone(), segment: directory_name.clone(),
parallel_routes: indexmap! { parallel_routes: indexmap! {
"children".to_string() => LoaderTree { "children".into() => LoaderTree {
page: app_page.clone(), page: app_page.clone(),
segment: "/_not-found".to_string(), segment: "/_not-found".into(),
parallel_routes: indexmap! { parallel_routes: indexmap! {
"children".to_string() => LoaderTree { "children".into() => LoaderTree {
page: app_page.clone(), page: app_page.clone(),
segment: "__PAGE__".to_string(), segment: "__PAGE__".into(),
parallel_routes: IndexMap::new(), parallel_routes: IndexMap::new(),
components: Components { components: Components {
page: components.not_found.or_else(|| Some(get_next_package(app_dir).join("dist/client/components/not-found-error.js".to_string()))), page: components.not_found.or_else(|| Some(get_next_package(app_dir).join("dist/client/components/not-found-error.js".into()))),
..Default::default() ..Default::default()
}.cell(), }.cell(),
global_metadata global_metadata
@ -1211,7 +1213,7 @@ async fn directory_tree_to_entrypoints_internal_untraced(
let map = directory_tree_to_entrypoints_internal( let map = directory_tree_to_entrypoints_internal(
app_dir, app_dir,
global_metadata, global_metadata,
subdir_name.to_string(), subdir_name.clone(),
subdirectory, subdirectory,
child_app_page.clone(), child_app_page.clone(),
root_layouts, root_layouts,
@ -1294,7 +1296,7 @@ async fn directory_tree_to_entrypoints_internal_untraced(
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn get_global_metadata( pub async fn get_global_metadata(
app_dir: Vc<FileSystemPath>, app_dir: Vc<FileSystemPath>,
page_extensions: Vc<Vec<String>>, page_extensions: Vc<Vec<RcStr>>,
) -> Result<Vc<GlobalMetadata>> { ) -> Result<Vc<GlobalMetadata>> {
let DirectoryContent::Entries(entries) = &*app_dir.read_dir().await? else { let DirectoryContent::Entries(entries) = &*app_dir.read_dir().await? else {
bail!("app_dir must be a directory") bail!("app_dir must be a directory")
@ -1348,10 +1350,7 @@ impl Issue for DirectoryTreeIssue {
#[turbo_tasks::function] #[turbo_tasks::function]
async fn title(&self) -> Result<Vc<StyledString>> { async fn title(&self) -> Result<Vc<StyledString>> {
Ok( Ok(StyledString::Text("An issue occurred while preparing your Next.js app".into()).cell())
StyledString::Text("An issue occurred while preparing your Next.js app".to_string())
.cell(),
)
} }
#[turbo_tasks::function] #[turbo_tasks::function]

View file

@ -80,7 +80,7 @@ pub async fn bootstrap(
let config_asset = context let config_asset = context
.process( .process(
Vc::upcast(VirtualSource::new( Vc::upcast(VirtualSource::new(
asset.ident().path().join("bootstrap-config.ts".to_string()), asset.ident().path().join("bootstrap-config.ts".into()),
AssetContent::file( AssetContent::file(
File::from( File::from(
config config
@ -97,8 +97,8 @@ pub async fn bootstrap(
.module(); .module();
let mut inner_assets = inner_assets.await?.clone_value(); let mut inner_assets = inner_assets.await?.clone_value();
inner_assets.insert("ENTRY".to_string(), asset); inner_assets.insert("ENTRY".into(), asset);
inner_assets.insert("BOOTSTRAP_CONFIG".to_string(), config_asset); inner_assets.insert("BOOTSTRAP_CONFIG".into(), config_asset);
let asset = context let asset = context
.process( .process(

View file

@ -1,4 +1,4 @@
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::{FileContent, FileSystem, FileSystemPath}, turbo::tasks_fs::{FileContent, FileSystem, FileSystemPath},
turbopack::core::{file_source::FileSource, source::Source}, turbopack::core::{file_source::FileSource, source::Source},
@ -13,16 +13,16 @@ pub(crate) fn next_js_fs() -> Vc<Box<dyn FileSystem>> {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub(crate) fn next_js_file(path: String) -> Vc<FileContent> { pub(crate) fn next_js_file(path: RcStr) -> Vc<FileContent> {
next_js_fs().root().join(path).read() next_js_fs().root().join(path).read()
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub(crate) fn next_js_file_path(path: String) -> Vc<FileSystemPath> { pub(crate) fn next_js_file_path(path: RcStr) -> Vc<FileSystemPath> {
next_js_fs().root().join(path) next_js_fs().root().join(path)
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub(crate) fn next_asset(path: String) -> Vc<Box<dyn Source>> { pub(crate) fn next_asset(path: RcStr) -> Vc<Box<dyn Source>> {
Vc::upcast(FileSource::new(next_js_file_path(path))) Vc::upcast(FileSource::new(next_js_file_path(path)))
} }

View file

@ -1,8 +1,8 @@
use anyhow::Result; use anyhow::Result;
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn instrumentation_files(page_extensions: Vc<Vec<String>>) -> Result<Vc<Vec<String>>> { pub async fn instrumentation_files(page_extensions: Vc<Vec<RcStr>>) -> Result<Vc<Vec<RcStr>>> {
let extensions = page_extensions.await?; let extensions = page_extensions.await?;
let files = ["instrumentation.", "src/instrumentation."] let files = ["instrumentation.", "src/instrumentation."]
.into_iter() .into_iter()
@ -10,6 +10,7 @@ pub async fn instrumentation_files(page_extensions: Vc<Vec<String>>) -> Result<V
extensions extensions
.iter() .iter()
.map(move |ext| String::from(f) + ext.as_str()) .map(move |ext| String::from(f) + ext.as_str())
.map(RcStr::from)
}) })
.collect(); .collect();
Ok(Vc::cell(files)) Ok(Vc::cell(files))

View file

@ -7,7 +7,7 @@ use anyhow::Result;
use async_recursion::async_recursion; use async_recursion::async_recursion;
use indexmap::IndexMap; use indexmap::IndexMap;
use indoc::formatdoc; use indoc::formatdoc;
use turbo_tasks::{Value, ValueToString, Vc}; use turbo_tasks::{RcStr, Value, ValueToString, Vc};
use turbo_tasks_fs::FileSystemPath; use turbo_tasks_fs::FileSystemPath;
use turbopack_binding::turbopack::{ use turbopack_binding::turbopack::{
core::{ core::{
@ -33,15 +33,15 @@ use crate::{
}; };
pub struct LoaderTreeBuilder { pub struct LoaderTreeBuilder {
inner_assets: IndexMap<String, Vc<Box<dyn Module>>>, inner_assets: IndexMap<RcStr, Vc<Box<dyn Module>>>,
counter: usize, counter: usize,
imports: Vec<String>, imports: Vec<RcStr>,
loader_tree_code: String, loader_tree_code: String,
context: Vc<ModuleAssetContext>, context: Vc<ModuleAssetContext>,
server_component_transition: Vc<Box<dyn Transition>>, server_component_transition: Vc<Box<dyn Transition>>,
pages: Vec<Vc<FileSystemPath>>, pages: Vec<Vc<FileSystemPath>>,
/// next.config.js' basePath option to construct og metadata. /// next.config.js' basePath option to construct og metadata.
base_path: Option<String>, base_path: Option<RcStr>,
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq)] #[derive(Clone, Copy, Debug, PartialEq, Eq)]
@ -73,7 +73,7 @@ impl LoaderTreeBuilder {
fn new( fn new(
context: Vc<ModuleAssetContext>, context: Vc<ModuleAssetContext>,
server_component_transition: Vc<Box<dyn Transition>>, server_component_transition: Vc<Box<dyn Transition>>,
base_path: Option<String>, base_path: Option<RcStr>,
) -> Self { ) -> Self {
LoaderTreeBuilder { LoaderTreeBuilder {
inner_assets: IndexMap::new(), inner_assets: IndexMap::new(),
@ -123,15 +123,19 @@ impl LoaderTreeBuilder {
path = StringifyJs(&module.ident().path().to_string().await?) path = StringifyJs(&module.ident().path().to_string().await?)
)?; )?;
self.imports.push(formatdoc!( self.imports.push(
r#" formatdoc!(
r#"
import {} from "COMPONENT_{}"; import {} from "COMPONENT_{}";
"#, "#,
identifier, identifier,
i i
)); )
.into(),
);
self.inner_assets.insert(format!("COMPONENT_{i}"), module); self.inner_assets
.insert(format!("COMPONENT_{i}").into(), module);
} }
Ok(()) Ok(())
} }
@ -240,12 +244,12 @@ impl LoaderTreeBuilder {
let inner_module_id = format!("METADATA_{i}"); let inner_module_id = format!("METADATA_{i}");
self.imports self.imports
.push(format!("import {identifier} from \"{inner_module_id}\";")); .push(format!("import {identifier} from \"{inner_module_id}\";").into());
let source = dynamic_image_metadata_source( let source = dynamic_image_metadata_source(
Vc::upcast(self.context), Vc::upcast(self.context),
*path, *path,
name.to_string(), name.into(),
app_page.clone(), app_page.clone(),
); );
@ -258,7 +262,7 @@ impl LoaderTreeBuilder {
)), )),
) )
.module(); .module();
self.inner_assets.insert(inner_module_id, module); self.inner_assets.insert(inner_module_id.into(), module);
let s = " "; let s = " ";
writeln!(self.loader_tree_code, "{s}{identifier},")?; writeln!(self.loader_tree_code, "{s}{identifier},")?;
@ -279,18 +283,18 @@ impl LoaderTreeBuilder {
let identifier = magic_identifier::mangle(&format!("{name} #{i}")); let identifier = magic_identifier::mangle(&format!("{name} #{i}"));
let inner_module_id = format!("METADATA_{i}"); let inner_module_id = format!("METADATA_{i}");
let helper_import = "import { fillMetadataSegment } from \ let helper_import: RcStr = "import { fillMetadataSegment } from \
\"next/dist/lib/metadata/get-metadata-route\"" \"next/dist/lib/metadata/get-metadata-route\""
.to_string(); .into();
if !self.imports.contains(&helper_import) { if !self.imports.contains(&helper_import) {
self.imports.push(helper_import); self.imports.push(helper_import);
} }
self.imports self.imports
.push(format!("import {identifier} from \"{inner_module_id}\";")); .push(format!("import {identifier} from \"{inner_module_id}\";").into());
self.inner_assets.insert( self.inner_assets.insert(
inner_module_id, inner_module_id.into(),
Vc::upcast(StructuredImageModuleType::create_module( Vc::upcast(StructuredImageModuleType::create_module(
Vc::upcast(FileSource::new(path)), Vc::upcast(FileSource::new(path)),
BlurPlaceholderMode::None, BlurPlaceholderMode::None,
@ -332,7 +336,7 @@ impl LoaderTreeBuilder {
let identifier = magic_identifier::mangle(&format!("{name} alt text #{i}")); let identifier = magic_identifier::mangle(&format!("{name} alt text #{i}"));
let inner_module_id = format!("METADATA_ALT_{i}"); let inner_module_id = format!("METADATA_ALT_{i}");
self.imports self.imports
.push(format!("import {identifier} from \"{inner_module_id}\";")); .push(format!("import {identifier} from \"{inner_module_id}\";").into());
let module = self let module = self
.context .context
.process( .process(
@ -342,7 +346,7 @@ impl LoaderTreeBuilder {
Value::new(ReferenceType::Internal(InnerAssets::empty())), Value::new(ReferenceType::Internal(InnerAssets::empty())),
) )
.module(); .module();
self.inner_assets.insert(inner_module_id, module); self.inner_assets.insert(inner_module_id.into(), module);
writeln!(self.loader_tree_code, "{s} alt: {identifier},")?; writeln!(self.loader_tree_code, "{s} alt: {identifier},")?;
} }
@ -425,7 +429,7 @@ impl LoaderTreeBuilder {
self.walk_tree(loader_tree, true).await?; self.walk_tree(loader_tree, true).await?;
Ok(LoaderTreeModule { Ok(LoaderTreeModule {
imports: self.imports, imports: self.imports,
loader_tree_code: self.loader_tree_code, loader_tree_code: self.loader_tree_code.into(),
inner_assets: self.inner_assets, inner_assets: self.inner_assets,
pages: self.pages, pages: self.pages,
}) })
@ -433,9 +437,9 @@ impl LoaderTreeBuilder {
} }
pub struct LoaderTreeModule { pub struct LoaderTreeModule {
pub imports: Vec<String>, pub imports: Vec<RcStr>,
pub loader_tree_code: String, pub loader_tree_code: RcStr,
pub inner_assets: IndexMap<String, Vc<Box<dyn Module>>>, pub inner_assets: IndexMap<RcStr, Vc<Box<dyn Module>>>,
pub pages: Vec<Vc<FileSystemPath>>, pub pages: Vec<Vc<FileSystemPath>>,
} }
@ -444,7 +448,7 @@ impl LoaderTreeModule {
loader_tree: Vc<LoaderTree>, loader_tree: Vc<LoaderTree>,
context: Vc<ModuleAssetContext>, context: Vc<ModuleAssetContext>,
server_component_transition: Vc<Box<dyn Transition>>, server_component_transition: Vc<Box<dyn Transition>>,
base_path: Option<String>, base_path: Option<RcStr>,
) -> Result<Self> { ) -> Result<Self> {
LoaderTreeBuilder::new(context, server_component_transition, base_path) LoaderTreeBuilder::new(context, server_component_transition, base_path)
.build(loader_tree) .build(loader_tree)

View file

@ -1,6 +1,6 @@
use anyhow::Result; use anyhow::Result;
use indexmap::indexmap; use indexmap::indexmap;
use turbo_tasks::{Value, Vc}; use turbo_tasks::{RcStr, Value, Vc};
use turbo_tasks_fs::FileSystemPath; use turbo_tasks_fs::FileSystemPath;
use turbopack_binding::turbopack::core::{ use turbopack_binding::turbopack::core::{
context::AssetContext, module::Module, reference_type::ReferenceType, context::AssetContext, module::Module, reference_type::ReferenceType,
@ -9,7 +9,7 @@ use turbopack_binding::turbopack::core::{
use crate::util::load_next_js_template; use crate::util::load_next_js_template;
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn middleware_files(page_extensions: Vc<Vec<String>>) -> Result<Vc<Vec<String>>> { pub async fn middleware_files(page_extensions: Vc<Vec<RcStr>>) -> Result<Vc<Vec<RcStr>>> {
let extensions = page_extensions.await?; let extensions = page_extensions.await?;
let files = ["middleware.", "src/middleware."] let files = ["middleware.", "src/middleware."]
.into_iter() .into_iter()
@ -17,6 +17,7 @@ pub async fn middleware_files(page_extensions: Vc<Vec<String>>) -> Result<Vc<Vec
extensions extensions
.iter() .iter()
.map(move |ext| String::from(f) + ext.as_str()) .map(move |ext| String::from(f) + ext.as_str())
.map(RcStr::from)
}) })
.collect(); .collect();
Ok(Vc::cell(files)) Ok(Vc::cell(files))
@ -35,8 +36,8 @@ pub async fn get_middleware_module(
"middleware.js", "middleware.js",
project_root, project_root,
indexmap! { indexmap! {
"VAR_USERLAND" => INNER.to_string(), "VAR_USERLAND" => INNER.into(),
"VAR_DEFINITION_PAGE" => "/middleware".to_string(), "VAR_DEFINITION_PAGE" => "/middleware".into(),
}, },
indexmap! {}, indexmap! {},
indexmap! {}, indexmap! {},
@ -44,7 +45,7 @@ pub async fn get_middleware_module(
.await?; .await?;
let inner_assets = indexmap! { let inner_assets = indexmap! {
INNER.to_string() => userland_module INNER.into() => userland_module
}; };
let module = context let module = context

View file

@ -1,7 +1,7 @@
use anyhow::Result; use anyhow::Result;
use indexmap::IndexMap; use indexmap::IndexMap;
use tracing::Instrument; use tracing::Instrument;
use turbo_tasks::{TryFlatJoinIterExt, TryJoinIterExt, Value, ValueToString, Vc}; use turbo_tasks::{RcStr, TryFlatJoinIterExt, TryJoinIterExt, Value, ValueToString, Vc};
use turbopack_binding::turbopack::core::{ use turbopack_binding::turbopack::core::{
chunk::{availability_info::AvailabilityInfo, ChunkingContext, ChunkingContextExt}, chunk::{availability_info::AvailabilityInfo, ChunkingContext, ChunkingContextExt},
module::Module, module::Module,
@ -18,13 +18,13 @@ use crate::{
}; };
#[turbo_tasks::function] #[turbo_tasks::function]
fn client_modules_modifier() -> Vc<String> { fn client_modules_modifier() -> Vc<RcStr> {
Vc::cell("client modules".to_string()) Vc::cell("client modules".into())
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn client_modules_ssr_modifier() -> Vc<String> { fn client_modules_ssr_modifier() -> Vc<RcStr> {
Vc::cell("client modules ssr".to_string()) Vc::cell("client modules ssr".into())
} }
#[turbo_tasks::value] #[turbo_tasks::value]

View file

@ -1,4 +1,4 @@
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbopack_binding::turbopack::ecmascript::chunk::EcmascriptChunkPlaceable; use turbopack_binding::turbopack::ecmascript::chunk::EcmascriptChunkPlaceable;
use crate::app_segment_config::NextSegmentConfig; use crate::app_segment_config::NextSegmentConfig;
@ -7,10 +7,10 @@ use crate::app_segment_config::NextSegmentConfig;
#[turbo_tasks::value(shared)] #[turbo_tasks::value(shared)]
pub struct AppEntry { pub struct AppEntry {
/// The pathname of the route or page. /// The pathname of the route or page.
pub pathname: String, pub pathname: RcStr,
/// The original Next.js name of the route or page. This is used instead of /// The original Next.js name of the route or page. This is used instead of
/// the pathname to refer to this entry. /// the pathname to refer to this entry.
pub original_name: String, pub original_name: RcStr,
/// The RSC module asset for the route or page. /// The RSC module asset for the route or page.
pub rsc_entry: Vc<Box<dyn EcmascriptChunkPlaceable>>, pub rsc_entry: Vc<Box<dyn EcmascriptChunkPlaceable>>,
/// The source code config for this entry. /// The source code config for this entry.

View file

@ -2,7 +2,7 @@ use std::io::Write;
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use indexmap::indexmap; use indexmap::indexmap;
use turbo_tasks::{TryJoinIterExt, Value, ValueToString, Vc}; use turbo_tasks::{RcStr, TryJoinIterExt, Value, ValueToString, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::{rope::RopeBuilder, File, FileSystemPath}, turbo::tasks_fs::{rope::RopeBuilder, File, FileSystemPath},
turbopack::{ turbopack::{
@ -71,25 +71,25 @@ pub async fn get_app_page_entry(
let pages = pages.iter().map(|page| page.to_string()).try_join().await?; let pages = pages.iter().map(|page| page.to_string()).try_join().await?;
let original_name = page.to_string(); let original_name: RcStr = page.to_string().into();
let pathname = AppPath::from(page.clone()).to_string(); let pathname: RcStr = AppPath::from(page.clone()).to_string().into();
// Load the file from the next.js codebase. // Load the file from the next.js codebase.
let source = load_next_js_template( let source = load_next_js_template(
"app-page.js", "app-page.js",
project_root, project_root,
indexmap! { indexmap! {
"VAR_DEFINITION_PAGE" => page.to_string(), "VAR_DEFINITION_PAGE" => page.to_string().into(),
"VAR_DEFINITION_PATHNAME" => pathname.clone(), "VAR_DEFINITION_PATHNAME" => pathname.clone(),
"VAR_ORIGINAL_PATHNAME" => original_name.clone(), "VAR_ORIGINAL_PATHNAME" => original_name.clone(),
// TODO(alexkirsz) Support custom global error. // TODO(alexkirsz) Support custom global error.
"VAR_MODULE_GLOBAL_ERROR" => "next/dist/client/components/error-boundary".to_string(), "VAR_MODULE_GLOBAL_ERROR" => "next/dist/client/components/error-boundary".into(),
}, },
indexmap! { indexmap! {
"tree" => loader_tree_code, "tree" => loader_tree_code,
"pages" => StringifyJs(&pages).to_string(), "pages" => StringifyJs(&pages).to_string().into(),
"__next_app_require__" => "__turbopack_require__".to_string(), "__next_app_require__" => "__turbopack_require__".into(),
"__next_app_load_chunk__" => " __turbopack_load__".to_string(), "__next_app_load_chunk__" => " __turbopack_load__".into(),
}, },
indexmap! {}, indexmap! {},
) )
@ -103,7 +103,9 @@ pub async fn get_app_page_entry(
let file = File::from(result.build()); let file = File::from(result.build());
let source = VirtualSource::new_with_ident( let source = VirtualSource::new_with_ident(
source.ident().with_query(Vc::cell(query.to_string())), source
.ident()
.with_query(Vc::cell(query.to_string().into())),
AssetContent::file(file.into()), AssetContent::file(file.into()),
); );
@ -171,15 +173,15 @@ async fn wrap_edge_page(
"edge-ssr-app.js", "edge-ssr-app.js",
project_root, project_root,
indexmap! { indexmap! {
"VAR_USERLAND" => INNER.to_string(), "VAR_USERLAND" => INNER.into(),
"VAR_PAGE" => page.to_string(), "VAR_PAGE" => page.to_string().into(),
}, },
indexmap! { indexmap! {
"sriEnabled" => serde_json::Value::Bool(sri_enabled).to_string(), "sriEnabled" => serde_json::Value::Bool(sri_enabled).to_string().into(),
"nextConfig" => serde_json::to_string(next_config)?, "nextConfig" => serde_json::to_string(next_config)?.into(),
"isServerComponent" => serde_json::Value::Bool(is_server_component).to_string(), "isServerComponent" => serde_json::Value::Bool(is_server_component).to_string().into(),
"dev" => serde_json::Value::Bool(dev).to_string(), "dev" => serde_json::Value::Bool(dev).to_string().into(),
"serverActions" => serde_json::to_string(&server_actions)? "serverActions" => serde_json::to_string(&server_actions)?.into(),
}, },
indexmap! { indexmap! {
"incrementalCacheHandler" => None, "incrementalCacheHandler" => None,
@ -188,7 +190,7 @@ async fn wrap_edge_page(
.await?; .await?;
let inner_assets = indexmap! { let inner_assets = indexmap! {
INNER.to_string() => entry INNER.into() => entry
}; };
let wrapped = context let wrapped = context
@ -202,6 +204,6 @@ async fn wrap_edge_page(
context, context,
project_root, project_root,
wrapped, wrapped,
AppPath::from(page).to_string(), AppPath::from(page).to_string().into(),
)) ))
} }

View file

@ -1,6 +1,6 @@
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use indexmap::indexmap; use indexmap::indexmap;
use turbo_tasks::{Value, ValueToString, Vc}; use turbo_tasks::{RcStr, Value, ValueToString, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::FileSystemPath, turbo::tasks_fs::FileSystemPath,
turbopack::{ turbopack::{
@ -57,14 +57,14 @@ pub async fn get_app_route_entry(
nodejs_context nodejs_context
}; };
let original_name = page.to_string(); let original_name: RcStr = page.to_string().into();
let pathname = AppPath::from(page.clone()).to_string(); let pathname: RcStr = AppPath::from(page.clone()).to_string().into();
let path = source.ident().path(); let path = source.ident().path();
const INNER: &str = "INNER_APP_ROUTE"; const INNER: &str = "INNER_APP_ROUTE";
let output_type = next_config let output_type: RcStr = next_config
.await? .await?
.output .output
.as_ref() .as_ref()
@ -72,21 +72,22 @@ pub async fn get_app_route_entry(
OutputType::Standalone => "\"standalone\"".to_string(), OutputType::Standalone => "\"standalone\"".to_string(),
OutputType::Export => "\"export\"".to_string(), OutputType::Export => "\"export\"".to_string(),
}) })
.unwrap_or_else(|| "\"\"".to_string()); .map(RcStr::from)
.unwrap_or_else(|| "\"\"".into());
// Load the file from the next.js codebase. // Load the file from the next.js codebase.
let virtual_source = load_next_js_template( let virtual_source = load_next_js_template(
"app-route.js", "app-route.js",
project_root, project_root,
indexmap! { indexmap! {
"VAR_DEFINITION_PAGE" => page.to_string(), "VAR_DEFINITION_PAGE" => page.to_string().into(),
"VAR_DEFINITION_PATHNAME" => pathname.clone(), "VAR_DEFINITION_PATHNAME" => pathname.clone(),
"VAR_DEFINITION_FILENAME" => path.file_stem().await?.as_ref().unwrap().clone(), "VAR_DEFINITION_FILENAME" => path.file_stem().await?.as_ref().unwrap().as_str().into(),
// TODO(alexkirsz) Is this necessary? // TODO(alexkirsz) Is this necessary?
"VAR_DEFINITION_BUNDLE_PATH" => "".to_string(), "VAR_DEFINITION_BUNDLE_PATH" => "".into(),
"VAR_ORIGINAL_PATHNAME" => original_name.clone(), "VAR_ORIGINAL_PATHNAME" => original_name.clone(),
"VAR_RESOLVED_PAGE_PATH" => path.to_string().await?.clone_value(), "VAR_RESOLVED_PAGE_PATH" => path.to_string().await?.clone_value(),
"VAR_USERLAND" => INNER.to_string(), "VAR_USERLAND" => INNER.into(),
}, },
indexmap! { indexmap! {
"nextConfigOutput" => output_type "nextConfigOutput" => output_type
@ -103,7 +104,7 @@ pub async fn get_app_route_entry(
.module(); .module();
let inner_assets = indexmap! { let inner_assets = indexmap! {
INNER.to_string() => userland_module INNER.into() => userland_module
}; };
let mut rsc_entry = context let mut rsc_entry = context
@ -142,7 +143,7 @@ async fn wrap_edge_route(
context: Vc<Box<dyn AssetContext>>, context: Vc<Box<dyn AssetContext>>,
project_root: Vc<FileSystemPath>, project_root: Vc<FileSystemPath>,
entry: Vc<Box<dyn Module>>, entry: Vc<Box<dyn Module>>,
pathname: String, pathname: RcStr,
) -> Result<Vc<Box<dyn Module>>> { ) -> Result<Vc<Box<dyn Module>>> {
const INNER: &str = "INNER_ROUTE_ENTRY"; const INNER: &str = "INNER_ROUTE_ENTRY";
@ -150,7 +151,7 @@ async fn wrap_edge_route(
"edge-app-route.js", "edge-app-route.js",
project_root, project_root,
indexmap! { indexmap! {
"VAR_USERLAND" => INNER.to_string(), "VAR_USERLAND" => INNER.into(),
}, },
indexmap! {}, indexmap! {},
indexmap! {}, indexmap! {},
@ -158,7 +159,7 @@ async fn wrap_edge_route(
.await?; .await?;
let inner_assets = indexmap! { let inner_assets = indexmap! {
INNER.to_string() => entry INNER.into() => entry
}; };
let wrapped = context let wrapped = context

View file

@ -1,5 +1,5 @@
use anyhow::Result; use anyhow::Result;
use turbo_tasks::{TryJoinIterExt, ValueToString, Vc}; use turbo_tasks::{RcStr, TryJoinIterExt, ValueToString, Vc};
use turbo_tasks_fs::glob::Glob; use turbo_tasks_fs::glob::Glob;
use turbopack_binding::turbopack::{ use turbopack_binding::turbopack::{
core::{ core::{
@ -159,8 +159,8 @@ impl IncludedModuleReference {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl ValueToString for IncludedModuleReference { impl ValueToString for IncludedModuleReference {
#[turbo_tasks::function] #[turbo_tasks::function]
fn to_string(&self) -> Vc<String> { fn to_string(&self) -> Vc<RcStr> {
Vc::cell("module".to_string()) Vc::cell("module".into())
} }
} }

View file

@ -4,7 +4,7 @@
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use indoc::formatdoc; use indoc::formatdoc;
use turbo_tasks::{ValueToString, Vc}; use turbo_tasks::{RcStr, ValueToString, Vc};
use turbo_tasks_fs::{File, FileContent, FileSystemPath}; use turbo_tasks_fs::{File, FileContent, FileSystemPath};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_hash::hash_xxh3_hash64, turbo::tasks_hash::hash_xxh3_hash64,
@ -46,7 +46,7 @@ async fn hash_file_content(path: Vc<FileSystemPath>) -> Result<u64> {
pub async fn dynamic_image_metadata_source( pub async fn dynamic_image_metadata_source(
asset_context: Vc<Box<dyn AssetContext>>, asset_context: Vc<Box<dyn AssetContext>>,
path: Vc<FileSystemPath>, path: Vc<FileSystemPath>,
ty: String, ty: RcStr,
page: AppPage, page: AppPage,
) -> Result<Vc<Box<dyn Source>>> { ) -> Result<Vc<Box<dyn Source>>> {
let stem = path.file_stem().await?; let stem = path.file_stem().await?;
@ -126,7 +126,7 @@ pub async fn dynamic_image_metadata_source(
let file = File::from(code); let file = File::from(code);
let source = VirtualSource::new( let source = VirtualSource::new(
path.parent().join(format!("{stem}--metadata.js")), path.parent().join(format!("{stem}--metadata.js").into()),
AssetContent::file(file.into()), AssetContent::file(file.into()),
); );
@ -134,7 +134,7 @@ pub async fn dynamic_image_metadata_source(
} }
#[turbo_tasks::function] #[turbo_tasks::function]
async fn collect_direct_exports(module: Vc<Box<dyn Module>>) -> Result<Vc<Vec<String>>> { async fn collect_direct_exports(module: Vc<Box<dyn Module>>) -> Result<Vc<Vec<RcStr>>> {
let Some(ecmascript_asset) = let Some(ecmascript_asset) =
Vc::try_resolve_downcast_type::<EcmascriptModuleAsset>(module).await? Vc::try_resolve_downcast_type::<EcmascriptModuleAsset>(module).await?
else { else {

View file

@ -2,7 +2,7 @@ use std::{collections::HashMap, ops::Deref};
use anyhow::Result; use anyhow::Result;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbo_tasks_fs::FileSystemPath; use turbo_tasks_fs::FileSystemPath;
use crate::next_app::{AppPage, PageSegment, PageType}; use crate::next_app::{AppPage, PageSegment, PageType};
@ -50,7 +50,7 @@ fn match_numbered_metadata(stem: &str) -> Option<(&str, &str)> {
fn match_metadata_file<'a>( fn match_metadata_file<'a>(
filename: &'a str, filename: &'a str,
page_extensions: &[String], page_extensions: &[RcStr],
metadata: &HashMap<&str, &[&str]>, metadata: &HashMap<&str, &[&str]>,
) -> Option<MetadataFileMatch<'a>> { ) -> Option<MetadataFileMatch<'a>> {
let (stem, ext) = filename.split_once('.')?; let (stem, ext) = filename.split_once('.')?;
@ -115,7 +115,7 @@ pub(crate) async fn get_content_type(path: Vc<FileSystemPath>) -> Result<String>
pub fn match_local_metadata_file<'a>( pub fn match_local_metadata_file<'a>(
basename: &'a str, basename: &'a str,
page_extensions: &[String], page_extensions: &[RcStr],
) -> Option<MetadataFileMatch<'a>> { ) -> Option<MetadataFileMatch<'a>> {
match_metadata_file(basename, page_extensions, STATIC_LOCAL_METADATA.deref()) match_metadata_file(basename, page_extensions, STATIC_LOCAL_METADATA.deref())
} }
@ -127,7 +127,7 @@ pub struct GlobalMetadataFileMatch<'a> {
pub fn match_global_metadata_file<'a>( pub fn match_global_metadata_file<'a>(
basename: &'a str, basename: &'a str,
page_extensions: &[String], page_extensions: &[RcStr],
) -> Option<GlobalMetadataFileMatch<'a>> { ) -> Option<GlobalMetadataFileMatch<'a>> {
match_metadata_file(basename, page_extensions, STATIC_GLOBAL_METADATA.deref()).map(|m| { match_metadata_file(basename, page_extensions, STATIC_GLOBAL_METADATA.deref()).map(|m| {
GlobalMetadataFileMatch { GlobalMetadataFileMatch {
@ -183,7 +183,7 @@ fn file_stem(path: &str) -> &str {
/// /favicon, /manifest, use to match dynamic API routes like app/robots.ts. /// /favicon, /manifest, use to match dynamic API routes like app/robots.ts.
pub fn is_metadata_route_file( pub fn is_metadata_route_file(
app_dir_relative_path: &str, app_dir_relative_path: &str,
page_extensions: &[String], page_extensions: &[RcStr],
with_extension: bool, with_extension: bool,
) -> bool { ) -> bool {
let (dir, filename) = split_directory(app_dir_relative_path); let (dir, filename) = split_directory(app_dir_relative_path);
@ -326,17 +326,20 @@ pub fn normalize_metadata_route(mut page: AppPage) -> Result<AppPage> {
page.0.pop(); page.0.pop();
page.push(PageSegment::Static(format!( page.push(PageSegment::Static(
"{}{}{}", format!(
base_name, "{}{}{}",
suffix base_name,
.map(|suffix| format!("-{suffix}")) suffix
.unwrap_or_default(), .map(|suffix| format!("-{suffix}"))
ext.map(|ext| format!(".{ext}")).unwrap_or_default(), .unwrap_or_default(),
)))?; ext.map(|ext| format!(".{ext}")).unwrap_or_default(),
)
.into(),
))?;
if !is_static_route { if !is_static_route {
page.push(PageSegment::OptionalCatchAll("__metadata_id__".to_string()))?; page.push(PageSegment::OptionalCatchAll("__metadata_id__".into()))?;
} }
page.push(PageSegment::PageType(PageType::Route))?; page.push(PageSegment::PageType(PageType::Route))?;

View file

@ -145,7 +145,7 @@ async fn static_route_source(
let file = File::from(code); let file = File::from(code);
let source = VirtualSource::new( let source = VirtualSource::new(
path.parent().join(format!("{stem}--route-entry.js")), path.parent().join(format!("{stem}--route-entry.js").into()),
AssetContent::file(file.into()), AssetContent::file(file.into()),
); );
@ -197,7 +197,7 @@ async fn dynamic_text_route_source(path: Vc<FileSystemPath>) -> Result<Vc<Box<dy
let file = File::from(code); let file = File::from(code);
let source = VirtualSource::new( let source = VirtualSource::new(
path.parent().join(format!("{stem}--route-entry.js")), path.parent().join(format!("{stem}--route-entry.js").into()),
AssetContent::file(file.into()), AssetContent::file(file.into()),
); );
@ -218,8 +218,7 @@ async fn dynamic_site_map_route_source(
let mut static_generation_code = ""; let mut static_generation_code = "";
if mode.is_production() && page.contains(&PageSegment::Dynamic("[__metadata_id__]".to_string())) if mode.is_production() && page.contains(&PageSegment::Dynamic("[__metadata_id__]".into())) {
{
static_generation_code = indoc! { static_generation_code = indoc! {
r#" r#"
export async function generateStaticParams() { export async function generateStaticParams() {
@ -297,7 +296,7 @@ async fn dynamic_site_map_route_source(
let file = File::from(code); let file = File::from(code);
let source = VirtualSource::new( let source = VirtualSource::new(
path.parent().join(format!("{stem}--route-entry.js")), path.parent().join(format!("{stem}--route-entry.js").into()),
AssetContent::file(file.into()), AssetContent::file(file.into()),
); );
@ -355,7 +354,7 @@ async fn dynamic_image_route_source(path: Vc<FileSystemPath>) -> Result<Vc<Box<d
let file = File::from(code); let file = File::from(code);
let source = VirtualSource::new( let source = VirtualSource::new(
path.parent().join(format!("{stem}--route-entry.js")), path.parent().join(format!("{stem}--route-entry.js").into()),
AssetContent::file(file.into()), AssetContent::file(file.into()),
); );

View file

@ -14,7 +14,7 @@ use std::{
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::{trace::TraceRawVcs, TaskInput}; use turbo_tasks::{trace::TraceRawVcs, RcStr, TaskInput};
pub use crate::next_app::{ pub use crate::next_app::{
app_client_references_chunks::{get_app_client_references_chunks, ClientReferencesChunks}, app_client_references_chunks::{get_app_client_references_chunks, ClientReferencesChunks},
@ -40,17 +40,17 @@ pub use crate::next_app::{
)] )]
pub enum PageSegment { pub enum PageSegment {
/// e.g. `/dashboard` /// e.g. `/dashboard`
Static(String), Static(RcStr),
/// e.g. `/[id]` /// e.g. `/[id]`
Dynamic(String), Dynamic(RcStr),
/// e.g. `/[...slug]` /// e.g. `/[...slug]`
CatchAll(String), CatchAll(RcStr),
/// e.g. `/[[...slug]]` /// e.g. `/[[...slug]]`
OptionalCatchAll(String), OptionalCatchAll(RcStr),
/// e.g. `/(shop)` /// e.g. `/(shop)`
Group(String), Group(RcStr),
/// e.g. `/@auth` /// e.g. `/@auth`
Parallel(String), Parallel(RcStr),
/// The final page type appended. (e.g. `/dashboard/page`, /// The final page type appended. (e.g. `/dashboard/page`,
/// `/api/hello/route`) /// `/api/hello/route`)
PageType(PageType), PageType(PageType),
@ -67,32 +67,32 @@ impl PageSegment {
} }
if let Some(s) = segment.strip_prefix('(').and_then(|s| s.strip_suffix(')')) { if let Some(s) = segment.strip_prefix('(').and_then(|s| s.strip_suffix(')')) {
return Ok(PageSegment::Group(s.to_string())); return Ok(PageSegment::Group(s.into()));
} }
if let Some(s) = segment.strip_prefix('@') { if let Some(s) = segment.strip_prefix('@') {
return Ok(PageSegment::Parallel(s.to_string())); return Ok(PageSegment::Parallel(s.into()));
} }
if let Some(s) = segment if let Some(s) = segment
.strip_prefix("[[...") .strip_prefix("[[...")
.and_then(|s| s.strip_suffix("]]")) .and_then(|s| s.strip_suffix("]]"))
{ {
return Ok(PageSegment::OptionalCatchAll(s.to_string())); return Ok(PageSegment::OptionalCatchAll(s.into()));
} }
if let Some(s) = segment if let Some(s) = segment
.strip_prefix("[...") .strip_prefix("[...")
.and_then(|s| s.strip_suffix(']')) .and_then(|s| s.strip_suffix(']'))
{ {
return Ok(PageSegment::CatchAll(s.to_string())); return Ok(PageSegment::CatchAll(s.into()));
} }
if let Some(s) = segment.strip_prefix('[').and_then(|s| s.strip_suffix(']')) { if let Some(s) = segment.strip_prefix('[').and_then(|s| s.strip_suffix(']')) {
return Ok(PageSegment::Dynamic(s.to_string())); return Ok(PageSegment::Dynamic(s.into()));
} }
Ok(PageSegment::Static(segment.to_string())) Ok(PageSegment::Static(segment.into()))
} }
} }
@ -308,13 +308,13 @@ impl PartialOrd for AppPage {
)] )]
pub enum PathSegment { pub enum PathSegment {
/// e.g. `/dashboard` /// e.g. `/dashboard`
Static(String), Static(RcStr),
/// e.g. `/[id]` /// e.g. `/[id]`
Dynamic(String), Dynamic(RcStr),
/// e.g. `/[...slug]` /// e.g. `/[...slug]`
CatchAll(String), CatchAll(RcStr),
/// e.g. `/[[...slug]]` /// e.g. `/[[...slug]]`
OptionalCatchAll(String), OptionalCatchAll(RcStr),
} }
impl Display for PathSegment { impl Display for PathSegment {

View file

@ -1,5 +1,5 @@
use anyhow::Result; use anyhow::Result;
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::FileSystemPath, turbo::tasks_fs::FileSystemPath,
turbopack::core::resolve::{options::ImportMapping, ExternalType}, turbopack::core::resolve::{options::ImportMapping, ExternalType},
@ -13,22 +13,19 @@ pub async fn get_postcss_package_mapping(
) -> Result<Vc<ImportMapping>> { ) -> Result<Vc<ImportMapping>> {
Ok(ImportMapping::Alternatives(vec![ Ok(ImportMapping::Alternatives(vec![
// Prefer the local installed version over the next.js version // Prefer the local installed version over the next.js version
ImportMapping::PrimaryAlternative("postcss".to_string(), Some(project_path)).cell(), ImportMapping::PrimaryAlternative("postcss".into(), Some(project_path)).cell(),
ImportMapping::PrimaryAlternative( ImportMapping::PrimaryAlternative("postcss".into(), Some(get_next_package(project_path)))
"postcss".to_string(), .cell(),
Some(get_next_package(project_path)),
)
.cell(),
]) ])
.cell()) .cell())
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn get_external_next_compiled_package_mapping( pub async fn get_external_next_compiled_package_mapping(
package_name: Vc<String>, package_name: Vc<RcStr>,
) -> Result<Vc<ImportMapping>> { ) -> Result<Vc<ImportMapping>> {
Ok(ImportMapping::Alternatives(vec![ImportMapping::External( Ok(ImportMapping::Alternatives(vec![ImportMapping::External(
Some(format!("next/dist/compiled/{}", &*package_name.await?)), Some(format!("next/dist/compiled/{}", &*package_name.await?).into()),
ExternalType::CommonJs, ExternalType::CommonJs,
) )
.into()]) .into()])

View file

@ -2,7 +2,7 @@ use std::iter::once;
use anyhow::Result; use anyhow::Result;
use indexmap::IndexMap; use indexmap::IndexMap;
use turbo_tasks::{Value, Vc}; use turbo_tasks::{RcStr, Value, Vc};
use turbo_tasks_fs::FileSystem; use turbo_tasks_fs::FileSystem;
use turbopack_binding::{ use turbopack_binding::{
turbo::{tasks_env::EnvMap, tasks_fs::FileSystemPath}, turbo::{tasks_env::EnvMap, tasks_fs::FileSystemPath},
@ -66,17 +66,17 @@ use crate::{
util::foreign_code_context_condition, util::foreign_code_context_condition,
}; };
fn defines(define_env: &IndexMap<String, String>) -> CompileTimeDefines { fn defines(define_env: &IndexMap<RcStr, RcStr>) -> CompileTimeDefines {
let mut defines = IndexMap::new(); let mut defines = IndexMap::new();
for (k, v) in define_env { for (k, v) in define_env {
defines defines
.entry(k.split('.').map(|s| s.to_string()).collect::<Vec<String>>()) .entry(k.split('.').map(|s| s.into()).collect::<Vec<RcStr>>())
.or_insert_with(|| { .or_insert_with(|| {
let val = serde_json::from_str(v); let val = serde_json::from_str(v);
match val { match val {
Ok(serde_json::Value::Bool(v)) => CompileTimeDefineValue::Bool(v), Ok(serde_json::Value::Bool(v)) => CompileTimeDefineValue::Bool(v),
Ok(serde_json::Value::String(v)) => CompileTimeDefineValue::String(v), Ok(serde_json::Value::String(v)) => CompileTimeDefineValue::String(v.into()),
_ => CompileTimeDefineValue::JSON(v.clone()), _ => CompileTimeDefineValue::JSON(v.clone()),
} }
}); });
@ -95,14 +95,14 @@ async fn next_client_free_vars(define_env: Vc<EnvMap>) -> Result<Vc<FreeVarRefer
Ok(free_var_references!( Ok(free_var_references!(
..defines(&*define_env.await?).into_iter(), ..defines(&*define_env.await?).into_iter(),
Buffer = FreeVarReference::EcmaScriptModule { Buffer = FreeVarReference::EcmaScriptModule {
request: "node:buffer".to_string(), request: "node:buffer".into(),
lookup_path: None, lookup_path: None,
export: Some("Buffer".to_string()), export: Some("Buffer".into()),
}, },
process = FreeVarReference::EcmaScriptModule { process = FreeVarReference::EcmaScriptModule {
request: "node:process".to_string(), request: "node:process".into(),
lookup_path: None, lookup_path: None,
export: Some("default".to_string()), export: Some("default".into()),
} }
) )
.cell()) .cell())
@ -110,7 +110,7 @@ async fn next_client_free_vars(define_env: Vc<EnvMap>) -> Result<Vc<FreeVarRefer
#[turbo_tasks::function] #[turbo_tasks::function]
pub fn get_client_compile_time_info( pub fn get_client_compile_time_info(
browserslist_query: String, browserslist_query: RcStr,
define_env: Vc<EnvMap>, define_env: Vc<EnvMap>,
) -> Vc<CompileTimeInfo> { ) -> Vc<CompileTimeInfo> {
CompileTimeInfo::builder(Environment::new(Value::new(ExecutionEnvironment::Browser( CompileTimeInfo::builder(Environment::new(Value::new(ExecutionEnvironment::Browser(
@ -149,7 +149,7 @@ pub async fn get_client_resolve_options_context(
let next_client_fallback_import_map = get_next_client_fallback_import_map(ty); let next_client_fallback_import_map = get_next_client_fallback_import_map(ty);
let next_client_resolved_map = let next_client_resolved_map =
get_next_client_resolved_map(project_path, project_path, *mode.await?); get_next_client_resolved_map(project_path, project_path, *mode.await?);
let custom_conditions = vec![mode.await?.condition().to_string()]; let custom_conditions = vec![mode.await?.condition().into()];
let module_options_context = ResolveOptionsContext { let module_options_context = ResolveOptionsContext {
enable_node_modules: Some(project_path.root().resolve().await?), enable_node_modules: Some(project_path.root().resolve().await?),
custom_conditions, custom_conditions,
@ -218,7 +218,7 @@ pub async fn get_client_module_options_context(
// foreign_code_context_condition. This allows to import codes from // foreign_code_context_condition. This allows to import codes from
// node_modules that requires webpack loaders, which next-dev implicitly // node_modules that requires webpack loaders, which next-dev implicitly
// does by default. // does by default.
let conditions = vec!["browser".to_string(), mode.await?.condition().to_string()]; let conditions = vec!["browser".into(), mode.await?.condition().into()];
let foreign_enable_webpack_loaders = webpack_loader_options( let foreign_enable_webpack_loaders = webpack_loader_options(
project_path, project_path,
next_config, next_config,
@ -226,7 +226,7 @@ pub async fn get_client_module_options_context(
conditions conditions
.iter() .iter()
.cloned() .cloned()
.chain(once("foreign".to_string())) .chain(once("foreign".into()))
.collect(), .collect(),
) )
.await?; .await?;
@ -326,7 +326,7 @@ pub async fn get_client_module_options_context(
pub async fn get_client_chunking_context( pub async fn get_client_chunking_context(
project_path: Vc<FileSystemPath>, project_path: Vc<FileSystemPath>,
client_root: Vc<FileSystemPath>, client_root: Vc<FileSystemPath>,
asset_prefix: Vc<Option<String>>, asset_prefix: Vc<Option<RcStr>>,
environment: Vc<Environment>, environment: Vc<Environment>,
mode: Vc<NextMode>, mode: Vc<NextMode>,
) -> Result<Vc<Box<dyn ChunkingContext>>> { ) -> Result<Vc<Box<dyn ChunkingContext>>> {
@ -335,7 +335,7 @@ pub async fn get_client_chunking_context(
project_path, project_path,
client_root, client_root,
client_root, client_root,
client_root.join("static/chunks".to_string()), client_root.join("static/chunks".into()),
get_client_assets_path(client_root), get_client_assets_path(client_root),
environment, environment,
next_mode.runtime_type(), next_mode.runtime_type(),
@ -353,7 +353,7 @@ pub async fn get_client_chunking_context(
#[turbo_tasks::function] #[turbo_tasks::function]
pub fn get_client_assets_path(client_root: Vc<FileSystemPath>) -> Vc<FileSystemPath> { pub fn get_client_assets_path(client_root: Vc<FileSystemPath>) -> Vc<FileSystemPath> {
client_root.join("static/media".to_string()) client_root.join("static/media".into())
} }
#[turbo_tasks::function] #[turbo_tasks::function]
@ -379,7 +379,7 @@ pub async fn get_client_runtime_entries(
// functions to be available. // functions to be available.
if let Some(request) = enable_react_refresh { if let Some(request) = enable_react_refresh {
runtime_entries runtime_entries
.push(RuntimeEntry::Request(request, project_root.join("_".to_string())).cell()) .push(RuntimeEntry::Request(request, project_root.join("_".into())).cell())
}; };
} }
@ -387,9 +387,9 @@ pub async fn get_client_runtime_entries(
runtime_entries.push( runtime_entries.push(
RuntimeEntry::Request( RuntimeEntry::Request(
Request::parse(Value::new(Pattern::Constant( Request::parse(Value::new(Pattern::Constant(
"next/dist/client/app-next-turbopack.js".to_string(), "next/dist/client/app-next-turbopack.js".into(),
))), ))),
project_root.join("_".to_string()), project_root.join("_".into()),
) )
.cell(), .cell(),
); );

View file

@ -1,5 +1,5 @@
use anyhow::{bail, Context, Result}; use anyhow::{bail, Context, Result};
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbopack_binding::turbopack::{ use turbopack_binding::turbopack::{
core::{ core::{
asset::{Asset, AssetContent}, asset::{Asset, AssetContent},
@ -31,8 +31,8 @@ impl CssClientReferenceModule {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn css_client_reference_modifier() -> Vc<String> { fn css_client_reference_modifier() -> Vc<RcStr> {
Vc::cell("css client reference".to_string()) Vc::cell("css client reference".into())
} }
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]

View file

@ -1,6 +1,6 @@
#![allow(rustdoc::private_intra_doc_links)] #![allow(rustdoc::private_intra_doc_links)]
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbopack_binding::turbopack::{ use turbopack_binding::turbopack::{
core::{ core::{
asset::{Asset, AssetContent}, asset::{Asset, AssetContent},
@ -46,8 +46,8 @@ impl EcmascriptClientReferenceModule {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn ecmascript_client_reference_modifier() -> Vc<String> { fn ecmascript_client_reference_modifier() -> Vc<RcStr> {
Vc::cell("ecmascript client reference".to_string()) Vc::cell("ecmascript client reference".into())
} }
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]

View file

@ -2,7 +2,7 @@ use std::{io::Write, iter::once};
use anyhow::{bail, Context, Result}; use anyhow::{bail, Context, Result};
use indoc::writedoc; use indoc::writedoc;
use turbo_tasks::{Value, ValueToString, Vc}; use turbo_tasks::{RcStr, Value, ValueToString, Vc};
use turbo_tasks_fs::File; use turbo_tasks_fs::File;
use turbopack_binding::turbopack::{ use turbopack_binding::turbopack::{
core::{ core::{
@ -143,7 +143,7 @@ impl EcmascriptClientReferenceProxyModule {
AssetContent::file(File::from(code.source_code().clone()).into()); AssetContent::file(File::from(code.source_code().clone()).into());
let proxy_source = VirtualSource::new( let proxy_source = VirtualSource::new(
self.server_module_ident.path().join("proxy.js".to_string()), self.server_module_ident.path().join("proxy.js".into()),
proxy_module_content, proxy_module_content,
); );
@ -253,13 +253,13 @@ struct ProxyModuleChunkItem {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn client_proxy_modifier() -> Vc<String> { fn client_proxy_modifier() -> Vc<RcStr> {
Vc::cell("client proxy".to_string()) Vc::cell("client proxy".into())
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn client_reference_description() -> Vc<String> { fn client_reference_description() -> Vc<RcStr> {
Vc::cell("client references".to_string()) Vc::cell("client references".into())
} }
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]

View file

@ -1,5 +1,5 @@
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use turbo_tasks::{Value, Vc}; use turbo_tasks::{RcStr, Value, Vc};
use turbopack_binding::turbopack::{ use turbopack_binding::turbopack::{
core::{ core::{
context::ProcessResult, context::ProcessResult,
@ -40,7 +40,7 @@ impl NextEcmascriptClientReferenceTransition {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl Transition for NextEcmascriptClientReferenceTransition { impl Transition for NextEcmascriptClientReferenceTransition {
#[turbo_tasks::function] #[turbo_tasks::function]
fn process_layer(self: Vc<Self>, layer: Vc<String>) -> Vc<String> { fn process_layer(self: Vc<Self>, layer: Vc<RcStr>) -> Vc<RcStr> {
layer layer
} }
@ -58,10 +58,12 @@ impl Transition for NextEcmascriptClientReferenceTransition {
let ident = source.ident().await?; let ident = source.ident().await?;
let ident_path = ident.path.await?; let ident_path = ident.path.await?;
let client_source = if ident_path.path.contains("next/dist/esm/") { let client_source = if ident_path.path.contains("next/dist/esm/") {
let path = ident let path = ident.path.root().join(
.path ident_path
.root() .path
.join(ident_path.path.replace("next/dist/esm/", "next/dist/")); .replace("next/dist/esm/", "next/dist/")
.into(),
);
Vc::upcast(FileSource::new_with_query(path, ident.query)) Vc::upcast(FileSource::new_with_query(path, ident.query))
} else { } else {
source source

View file

@ -8,7 +8,7 @@ use turbo_tasks::{
debug::ValueDebugFormat, debug::ValueDebugFormat,
graph::{AdjacencyMap, GraphTraversal, Visit, VisitControlFlow}, graph::{AdjacencyMap, GraphTraversal, Visit, VisitControlFlow},
trace::TraceRawVcs, trace::TraceRawVcs,
ReadRef, TryJoinIterExt, ValueToString, Vc, RcStr, ReadRef, TryJoinIterExt, ValueToString, Vc,
}; };
use turbopack_binding::turbopack::core::{ use turbopack_binding::turbopack::core::{
module::{Module, Modules}, module::{Module, Modules},
@ -170,9 +170,9 @@ struct VisitClientReferenceNode {
Clone, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, ValueDebugFormat, TraceRawVcs, Clone, Eq, PartialEq, Hash, Serialize, Deserialize, Debug, ValueDebugFormat, TraceRawVcs,
)] )]
enum VisitClientReferenceNodeType { enum VisitClientReferenceNodeType {
ClientReference(ClientReference, ReadRef<String>), ClientReference(ClientReference, ReadRef<RcStr>),
ServerComponentEntry(Vc<NextServerComponentModule>, ReadRef<String>), ServerComponentEntry(Vc<NextServerComponentModule>, ReadRef<RcStr>),
Internal(Vc<Box<dyn Module>>, ReadRef<String>), Internal(Vc<Box<dyn Module>>, ReadRef<RcStr>),
} }
impl Visit<VisitClientReferenceNode> for VisitClientReference { impl Visit<VisitClientReferenceNode> for VisitClientReference {
@ -269,13 +269,13 @@ impl Visit<VisitClientReferenceNode> for VisitClientReference {
fn span(&mut self, node: &VisitClientReferenceNode) -> tracing::Span { fn span(&mut self, node: &VisitClientReferenceNode) -> tracing::Span {
match &node.ty { match &node.ty {
VisitClientReferenceNodeType::ClientReference(_, name) => { VisitClientReferenceNodeType::ClientReference(_, name) => {
tracing::info_span!("client reference", name = **name) tracing::info_span!("client reference", name = name.to_string())
} }
VisitClientReferenceNodeType::Internal(_, name) => { VisitClientReferenceNodeType::Internal(_, name) => {
tracing::info_span!("module", name = **name) tracing::info_span!("module", name = name.to_string())
} }
VisitClientReferenceNodeType::ServerComponentEntry(_, name) => { VisitClientReferenceNodeType::ServerComponentEntry(_, name) => {
tracing::info_span!("layout segment", name = **name) tracing::info_span!("layout segment", name = name.to_string())
} }
} }
} }

View file

@ -4,7 +4,7 @@ use anyhow::{bail, Context, Result};
use indexmap::IndexMap; use indexmap::IndexMap;
use serde::{Deserialize, Deserializer, Serialize}; use serde::{Deserialize, Deserializer, Serialize};
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use turbo_tasks::{trace::TraceRawVcs, TaskInput, Vc}; use turbo_tasks::{trace::TraceRawVcs, RcStr, TaskInput, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::{tasks_env::EnvMap, tasks_fs::FileSystemPath}, turbo::{tasks_env::EnvMap, tasks_fs::FileSystemPath},
turbopack::{ turbopack::{
@ -59,28 +59,28 @@ struct CustomRoutes {
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] #[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct NextConfig { pub struct NextConfig {
pub config_file: Option<String>, pub config_file: Option<RcStr>,
pub config_file_name: String, pub config_file_name: RcStr,
/// In-memory cache size in bytes. /// In-memory cache size in bytes.
/// ///
/// If `cache_max_memory_size: 0` disables in-memory caching. /// If `cache_max_memory_size: 0` disables in-memory caching.
pub cache_max_memory_size: Option<f64>, pub cache_max_memory_size: Option<f64>,
/// custom path to a cache handler to use /// custom path to a cache handler to use
pub cache_handler: Option<String>, pub cache_handler: Option<RcStr>,
pub env: IndexMap<String, JsonValue>, pub env: IndexMap<String, JsonValue>,
pub experimental: ExperimentalConfig, pub experimental: ExperimentalConfig,
pub images: ImageConfig, pub images: ImageConfig,
pub page_extensions: Vec<String>, pub page_extensions: Vec<RcStr>,
pub react_strict_mode: Option<bool>, pub react_strict_mode: Option<bool>,
pub transpile_packages: Option<Vec<String>>, pub transpile_packages: Option<Vec<RcStr>>,
pub modularize_imports: Option<IndexMap<String, ModularizeImportPackageConfig>>, pub modularize_imports: Option<IndexMap<String, ModularizeImportPackageConfig>>,
pub dist_dir: Option<String>, pub dist_dir: Option<RcStr>,
sass_options: Option<serde_json::Value>, sass_options: Option<serde_json::Value>,
pub trailing_slash: Option<bool>, pub trailing_slash: Option<bool>,
pub asset_prefix: Option<String>, pub asset_prefix: Option<RcStr>,
pub base_path: Option<String>, pub base_path: Option<RcStr>,
pub skip_middleware_url_normalize: Option<bool>, pub skip_middleware_url_normalize: Option<bool>,
pub skip_trailing_slash_redirect: Option<bool>, pub skip_trailing_slash_redirect: Option<bool>,
pub i18n: Option<I18NConfig>, pub i18n: Option<I18NConfig>,
@ -98,7 +98,7 @@ pub struct NextConfig {
/// build. /// build.
/// ///
/// [API Reference](https://nextjs.org/docs/app/api-reference/next-config-js/serverExternalPackages) /// [API Reference](https://nextjs.org/docs/app/api-reference/next-config-js/serverExternalPackages)
pub server_external_packages: Option<Vec<String>>, pub server_external_packages: Option<Vec<RcStr>>,
#[serde(rename = "_originalRedirects")] #[serde(rename = "_originalRedirects")]
pub original_redirects: Option<Vec<Redirect>>, pub original_redirects: Option<Vec<Redirect>>,
@ -223,30 +223,30 @@ pub enum OutputType {
#[serde(tag = "type", rename_all = "kebab-case")] #[serde(tag = "type", rename_all = "kebab-case")]
pub enum RouteHas { pub enum RouteHas {
Header { Header {
key: String, key: RcStr,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
value: Option<String>, value: Option<RcStr>,
}, },
Cookie { Cookie {
key: String, key: RcStr,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
value: Option<String>, value: Option<RcStr>,
}, },
Query { Query {
key: String, key: RcStr,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
value: Option<String>, value: Option<RcStr>,
}, },
Host { Host {
value: String, value: RcStr,
}, },
} }
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, TraceRawVcs)] #[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, TraceRawVcs)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct HeaderValue { pub struct HeaderValue {
pub key: String, pub key: RcStr,
pub value: String, pub value: RcStr,
} }
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, TraceRawVcs)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, TraceRawVcs)]
@ -413,9 +413,9 @@ pub enum RemotePatternProtocal {
pub struct ExperimentalTurboConfig { pub struct ExperimentalTurboConfig {
/// This option has been replaced by `rules`. /// This option has been replaced by `rules`.
pub loaders: Option<JsonValue>, pub loaders: Option<JsonValue>,
pub rules: Option<IndexMap<String, RuleConfigItemOrShortcut>>, pub rules: Option<IndexMap<RcStr, RuleConfigItemOrShortcut>>,
pub resolve_alias: Option<IndexMap<String, JsonValue>>, pub resolve_alias: Option<IndexMap<RcStr, JsonValue>>,
pub resolve_extensions: Option<Vec<String>>, pub resolve_extensions: Option<Vec<RcStr>>,
pub use_swc_css: Option<bool>, pub use_swc_css: Option<bool>,
} }
@ -424,7 +424,7 @@ pub struct ExperimentalTurboConfig {
pub struct RuleConfigItemOptions { pub struct RuleConfigItemOptions {
pub loaders: Vec<LoaderItem>, pub loaders: Vec<LoaderItem>,
#[serde(default, alias = "as")] #[serde(default, alias = "as")]
pub rename_as: Option<String>, pub rename_as: Option<RcStr>,
} }
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, TraceRawVcs)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, TraceRawVcs)]
@ -438,14 +438,14 @@ pub enum RuleConfigItemOrShortcut {
#[serde(rename_all = "camelCase", untagged)] #[serde(rename_all = "camelCase", untagged)]
pub enum RuleConfigItem { pub enum RuleConfigItem {
Options(RuleConfigItemOptions), Options(RuleConfigItemOptions),
Conditional(IndexMap<String, RuleConfigItem>), Conditional(IndexMap<RcStr, RuleConfigItem>),
Boolean(bool), Boolean(bool),
} }
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, TraceRawVcs)] #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, TraceRawVcs)]
#[serde(untagged)] #[serde(untagged)]
pub enum LoaderItem { pub enum LoaderItem {
LoaderName(String), LoaderName(RcStr),
LoaderOptions(WebpackLoaderItem), LoaderOptions(WebpackLoaderItem),
} }
@ -473,7 +473,7 @@ pub struct ReactCompilerOptions {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub compilation_mode: Option<ReactCompilerMode>, pub compilation_mode: Option<ReactCompilerMode>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub panic_threshold: Option<String>, pub panic_threshold: Option<RcStr>,
} }
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, TraceRawVcs)] #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, TraceRawVcs)]
@ -489,26 +489,26 @@ pub struct OptionalReactCompilerOptions(Option<Vc<ReactCompilerOptions>>);
#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, TraceRawVcs)] #[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize, TraceRawVcs)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct ExperimentalConfig { pub struct ExperimentalConfig {
pub allowed_revalidate_header_keys: Option<Vec<String>>, pub allowed_revalidate_header_keys: Option<Vec<RcStr>>,
pub client_router_filter: Option<bool>, pub client_router_filter: Option<bool>,
/// decimal for percent for possible false positives e.g. 0.01 for 10% /// decimal for percent for possible false positives e.g. 0.01 for 10%
/// potential false matches lower percent increases size of the filter /// potential false matches lower percent increases size of the filter
pub client_router_filter_allowed_rate: Option<f64>, pub client_router_filter_allowed_rate: Option<f64>,
pub client_router_filter_redirects: Option<bool>, pub client_router_filter_redirects: Option<bool>,
pub fetch_cache_key_prefix: Option<String>, pub fetch_cache_key_prefix: Option<RcStr>,
pub isr_flush_to_disk: Option<bool>, pub isr_flush_to_disk: Option<bool>,
/// For use with `@next/mdx`. Compile MDX files using the new Rust compiler. /// For use with `@next/mdx`. Compile MDX files using the new Rust compiler.
/// @see [api reference](https://nextjs.org/docs/app/api-reference/next-config-js/mdxRs) /// @see [api reference](https://nextjs.org/docs/app/api-reference/next-config-js/mdxRs)
mdx_rs: Option<MdxRsOptions>, mdx_rs: Option<MdxRsOptions>,
pub strict_next_head: Option<bool>, pub strict_next_head: Option<bool>,
pub swc_plugins: Option<Vec<(String, serde_json::Value)>>, pub swc_plugins: Option<Vec<(RcStr, serde_json::Value)>>,
pub turbo: Option<ExperimentalTurboConfig>, pub turbo: Option<ExperimentalTurboConfig>,
pub turbotrace: Option<serde_json::Value>, pub turbotrace: Option<serde_json::Value>,
pub external_middleware_rewrites_resolve: Option<bool>, pub external_middleware_rewrites_resolve: Option<bool>,
pub scroll_restoration: Option<bool>, pub scroll_restoration: Option<bool>,
pub use_deployment_id: Option<bool>, pub use_deployment_id: Option<bool>,
pub use_deployment_id_server_actions: Option<bool>, pub use_deployment_id_server_actions: Option<bool>,
pub deployment_id: Option<String>, pub deployment_id: Option<RcStr>,
pub manual_client_base_path: Option<bool>, pub manual_client_base_path: Option<bool>,
pub optimistic_client_cache: Option<bool>, pub optimistic_client_cache: Option<bool>,
pub middleware_prefetch: Option<MiddlewarePrefetchType>, pub middleware_prefetch: Option<MiddlewarePrefetchType>,
@ -516,7 +516,7 @@ pub struct ExperimentalConfig {
/// Use Record<string, unknown> as critters doesn't export its Option type ([link](https://github.com/GoogleChromeLabs/critters/blob/a590c05f9197b656d2aeaae9369df2483c26b072/packages/critters/src/index.d.ts)) /// Use Record<string, unknown> as critters doesn't export its Option type ([link](https://github.com/GoogleChromeLabs/critters/blob/a590c05f9197b656d2aeaae9369df2483c26b072/packages/critters/src/index.d.ts))
pub optimize_css: Option<serde_json::Value>, pub optimize_css: Option<serde_json::Value>,
pub next_script_workers: Option<bool>, pub next_script_workers: Option<bool>,
pub web_vitals_attribution: Option<Vec<String>>, pub web_vitals_attribution: Option<Vec<RcStr>>,
pub server_actions: Option<ServerActionsOrLegacyBool>, pub server_actions: Option<ServerActionsOrLegacyBool>,
pub sri: Option<SubResourceIntegrity>, pub sri: Option<SubResourceIntegrity>,
react_compiler: Option<ReactCompilerOptionsOrBoolean>, react_compiler: Option<ReactCompilerOptionsOrBoolean>,
@ -554,10 +554,10 @@ pub struct ExperimentalConfig {
optimize_server_react: Option<bool>, optimize_server_react: Option<bool>,
/// Automatically apply the "modularize_imports" optimization to imports of /// Automatically apply the "modularize_imports" optimization to imports of
/// the specified packages. /// the specified packages.
optimize_package_imports: Option<Vec<String>>, optimize_package_imports: Option<Vec<RcStr>>,
output_file_tracing_ignores: Option<Vec<String>>, output_file_tracing_ignores: Option<Vec<RcStr>>,
output_file_tracing_includes: Option<serde_json::Value>, output_file_tracing_includes: Option<serde_json::Value>,
output_file_tracing_root: Option<String>, output_file_tracing_root: Option<RcStr>,
/// Using this feature will enable the `react@experimental` for the `app` /// Using this feature will enable the `react@experimental` for the `app`
/// directory. /// directory.
ppr: Option<ExperimentalPartialPrerendering>, ppr: Option<ExperimentalPartialPrerendering>,
@ -760,7 +760,7 @@ impl RemoveConsoleConfig {
} }
#[turbo_tasks::value(transparent)] #[turbo_tasks::value(transparent)]
pub struct ResolveExtensions(Option<Vec<String>>); pub struct ResolveExtensions(Option<Vec<RcStr>>);
#[turbo_tasks::value(transparent)] #[turbo_tasks::value(transparent)]
pub struct OptionalMdxTransformOptions(Option<Vc<MdxTransformOptions>>); pub struct OptionalMdxTransformOptions(Option<Vc<MdxTransformOptions>>);
@ -768,7 +768,7 @@ pub struct OptionalMdxTransformOptions(Option<Vc<MdxTransformOptions>>);
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl NextConfig { impl NextConfig {
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn from_string(string: Vc<String>) -> Result<Vc<Self>> { pub async fn from_string(string: Vc<RcStr>) -> Result<Vc<Self>> {
let string = string.await?; let string = string.await?;
let config: NextConfig = serde_json::from_str(&string) let config: NextConfig = serde_json::from_str(&string)
.with_context(|| format!("failed to parse next.config.js: {}", string))?; .with_context(|| format!("failed to parse next.config.js: {}", string))?;
@ -781,7 +781,7 @@ impl NextConfig {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn server_external_packages(self: Vc<Self>) -> Result<Vc<Vec<String>>> { pub async fn server_external_packages(self: Vc<Self>) -> Result<Vc<Vec<RcStr>>> {
Ok(Vc::cell( Ok(Vc::cell(
self.await? self.await?
.server_external_packages .server_external_packages
@ -802,12 +802,12 @@ impl NextConfig {
.iter() .iter()
.map(|(k, v)| { .map(|(k, v)| {
( (
k.clone(), k.as_str().into(),
if let JsonValue::String(s) = v { if let JsonValue::String(s) = v {
// A string value is kept, calling `to_string` would wrap in to quotes. // A string value is kept, calling `to_string` would wrap in to quotes.
s.clone() s.as_str().into()
} else { } else {
v.to_string() v.to_string().into()
}, },
) )
}) })
@ -822,12 +822,12 @@ impl NextConfig {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn page_extensions(self: Vc<Self>) -> Result<Vc<Vec<String>>> { pub async fn page_extensions(self: Vc<Self>) -> Result<Vc<Vec<RcStr>>> {
Ok(Vc::cell(self.await?.page_extensions.clone())) Ok(Vc::cell(self.await?.page_extensions.clone()))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn transpile_packages(self: Vc<Self>) -> Result<Vc<Vec<String>>> { pub async fn transpile_packages(self: Vc<Self>) -> Result<Vc<Vec<RcStr>>> {
Ok(Vc::cell( Ok(Vc::cell(
self.await?.transpile_packages.clone().unwrap_or_default(), self.await?.transpile_packages.clone().unwrap_or_default(),
)) ))
@ -836,7 +836,7 @@ impl NextConfig {
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn webpack_rules( pub async fn webpack_rules(
self: Vc<Self>, self: Vc<Self>,
active_conditions: Vec<String>, active_conditions: Vec<RcStr>,
) -> Result<Vc<OptionWebpackRules>> { ) -> Result<Vc<OptionWebpackRules>> {
let this = self.await?; let this = self.await?;
let Some(turbo_rules) = this let Some(turbo_rules) = this
@ -874,7 +874,7 @@ impl NextConfig {
} }
fn find_rule<'a>( fn find_rule<'a>(
rule: &'a RuleConfigItem, rule: &'a RuleConfigItem,
active_conditions: &HashSet<String>, active_conditions: &HashSet<RcStr>,
) -> FindRuleResult<'a> { ) -> FindRuleResult<'a> {
match rule { match rule {
RuleConfigItem::Options(rule) => FindRuleResult::Found(rule), RuleConfigItem::Options(rule) => FindRuleResult::Found(rule),
@ -900,7 +900,7 @@ impl NextConfig {
match rule { match rule {
RuleConfigItemOrShortcut::Loaders(loaders) => { RuleConfigItemOrShortcut::Loaders(loaders) => {
rules.insert( rules.insert(
ext.to_string(), ext.clone(),
LoaderRuleItem { LoaderRuleItem {
loaders: transform_loaders(loaders), loaders: transform_loaders(loaders),
rename_as: None, rename_as: None,
@ -912,7 +912,7 @@ impl NextConfig {
find_rule(rule, &active_conditions) find_rule(rule, &active_conditions)
{ {
rules.insert( rules.insert(
ext.to_string(), ext.clone(),
LoaderRuleItem { LoaderRuleItem {
loaders: transform_loaders(loaders), loaders: transform_loaders(loaders),
rename_as: rename_as.clone(), rename_as: rename_as.clone(),
@ -980,8 +980,7 @@ impl NextConfig {
provider_import_source: Some( provider_import_source: Some(
options options
.provider_import_source .provider_import_source
.as_ref() .clone()
.map(|s| s.to_string())
.unwrap_or(mdx_import_source_file()), .unwrap_or(mdx_import_source_file()),
), ),
..options.clone() ..options.clone()
@ -1041,18 +1040,21 @@ impl NextConfig {
/// Returns the final asset prefix. If an assetPrefix is set, it's used. /// Returns the final asset prefix. If an assetPrefix is set, it's used.
/// Otherwise, the basePath is used. /// Otherwise, the basePath is used.
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn computed_asset_prefix(self: Vc<Self>) -> Result<Vc<Option<String>>> { pub async fn computed_asset_prefix(self: Vc<Self>) -> Result<Vc<Option<RcStr>>> {
let this = self.await?; let this = self.await?;
Ok(Vc::cell(Some(format!( Ok(Vc::cell(Some(
"{}/_next/", format!(
if let Some(asset_prefix) = &this.asset_prefix { "{}/_next/",
asset_prefix if let Some(asset_prefix) = &this.asset_prefix {
} else { asset_prefix
this.base_path.as_ref().map_or("", |b| b.as_str()) } else {
} this.base_path.as_ref().map_or("", |b| b.as_str())
.trim_end_matches('/') }
)))) .trim_end_matches('/')
)
.into(),
)))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
@ -1090,7 +1092,7 @@ impl NextConfig {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn optimize_package_imports(self: Vc<Self>) -> Result<Vc<Vec<String>>> { pub async fn optimize_package_imports(self: Vc<Self>) -> Result<Vc<Vec<RcStr>>> {
Ok(Vc::cell( Ok(Vc::cell(
self.await? self.await?
.experimental .experimental
@ -1113,7 +1115,7 @@ pub struct JsConfig {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl JsConfig { impl JsConfig {
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn from_string(string: Vc<String>) -> Result<Vc<Self>> { pub async fn from_string(string: Vc<RcStr>) -> Result<Vc<Self>> {
let string = string.await?; let string = string.await?;
let config: JsConfig = serde_json::from_str(&string) let config: JsConfig = serde_json::from_str(&string)
.with_context(|| format!("failed to parse next.config.js: {}", string))?; .with_context(|| format!("failed to parse next.config.js: {}", string))?;
@ -1132,9 +1134,9 @@ impl JsConfig {
#[turbo_tasks::value] #[turbo_tasks::value]
struct OutdatedConfigIssue { struct OutdatedConfigIssue {
path: Vc<FileSystemPath>, path: Vc<FileSystemPath>,
old_name: String, old_name: RcStr,
new_name: String, new_name: RcStr,
description: String, description: RcStr,
} }
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
@ -1158,7 +1160,7 @@ impl Issue for OutdatedConfigIssue {
fn title(&self) -> Vc<StyledString> { fn title(&self) -> Vc<StyledString> {
StyledString::Line(vec![ StyledString::Line(vec![
StyledString::Code(self.old_name.clone()), StyledString::Code(self.old_name.clone()),
StyledString::Text(" has been replaced by ".to_string()), StyledString::Text(" has been replaced by ".into()),
StyledString::Code(self.new_name.clone()), StyledString::Code(self.new_name.clone()),
]) ])
.cell() .cell()
@ -1166,8 +1168,6 @@ impl Issue for OutdatedConfigIssue {
#[turbo_tasks::function] #[turbo_tasks::function]
fn description(&self) -> Vc<OptionStyledString> { fn description(&self) -> Vc<OptionStyledString> {
Vc::cell(Some( Vc::cell(Some(StyledString::Text(self.description.clone()).cell()))
StyledString::Text(self.description.to_string()).cell(),
))
} }
} }

View file

@ -1,5 +1,5 @@
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbopack_binding::turbopack::core::{ use turbopack_binding::turbopack::core::{
asset::{Asset, AssetContent}, asset::{Asset, AssetContent},
chunk::{ChunkableModule, ChunkingContext, ChunkingContextExt}, chunk::{ChunkableModule, ChunkingContext, ChunkingContextExt},
@ -46,8 +46,8 @@ impl NextDynamicEntryModule {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn dynamic_modifier() -> Vc<String> { fn dynamic_modifier() -> Vc<RcStr> {
Vc::cell("dynamic".to_string()) Vc::cell("dynamic".into())
} }
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]

View file

@ -1,5 +1,5 @@
use anyhow::Result; use anyhow::Result;
use turbo_tasks::{Value, Vc}; use turbo_tasks::{RcStr, Value, Vc};
use turbopack_binding::turbopack::{ use turbopack_binding::turbopack::{
core::{context::ProcessResult, reference_type::ReferenceType, source::Source}, core::{context::ProcessResult, reference_type::ReferenceType, source::Source},
turbopack::{transition::Transition, ModuleAssetContext}, turbopack::{transition::Transition, ModuleAssetContext},
@ -26,7 +26,7 @@ impl NextDynamicTransition {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl Transition for NextDynamicTransition { impl Transition for NextDynamicTransition {
#[turbo_tasks::function] #[turbo_tasks::function]
fn process_layer(self: Vc<Self>, layer: Vc<String>) -> Vc<String> { fn process_layer(self: Vc<Self>, layer: Vc<RcStr>) -> Vc<RcStr> {
layer layer
} }

View file

@ -4,7 +4,7 @@ use anyhow::Result;
use tracing::Instrument; use tracing::Instrument;
use turbo_tasks::{ use turbo_tasks::{
graph::{AdjacencyMap, GraphTraversal, Visit, VisitControlFlow}, graph::{AdjacencyMap, GraphTraversal, Visit, VisitControlFlow},
ReadRef, TryJoinIterExt, ValueToString, Vc, RcStr, ReadRef, TryJoinIterExt, ValueToString, Vc,
}; };
use turbopack_binding::turbopack::core::{ use turbopack_binding::turbopack::core::{
module::{Module, Modules}, module::{Module, Modules},
@ -66,8 +66,8 @@ struct VisitDynamic;
#[derive(Clone, Eq, PartialEq, Hash)] #[derive(Clone, Eq, PartialEq, Hash)]
enum VisitDynamicNode { enum VisitDynamicNode {
Dynamic(Vc<NextDynamicEntryModule>, ReadRef<String>), Dynamic(Vc<NextDynamicEntryModule>, ReadRef<RcStr>),
Internal(Vc<Box<dyn Module>>, ReadRef<String>), Internal(Vc<Box<dyn Module>>, ReadRef<RcStr>),
} }
impl Visit<VisitDynamicNode> for VisitDynamic { impl Visit<VisitDynamicNode> for VisitDynamic {
@ -117,10 +117,10 @@ impl Visit<VisitDynamicNode> for VisitDynamic {
fn span(&mut self, node: &VisitDynamicNode) -> tracing::Span { fn span(&mut self, node: &VisitDynamicNode) -> tracing::Span {
match node { match node {
VisitDynamicNode::Dynamic(_, name) => { VisitDynamicNode::Dynamic(_, name) => {
tracing::info_span!("dynamic module", name = **name) tracing::info_span!("dynamic module", name = name.to_string())
} }
VisitDynamicNode::Internal(_, name) => { VisitDynamicNode::Internal(_, name) => {
tracing::info_span!("module", name = **name) tracing::info_span!("module", name = name.to_string())
} }
} }
} }

View file

@ -1,6 +1,6 @@
use anyhow::Result; use anyhow::Result;
use indexmap::IndexMap; use indexmap::IndexMap;
use turbo_tasks::{Value, Vc}; use turbo_tasks::{RcStr, Value, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::{tasks_env::EnvMap, tasks_fs::FileSystemPath}, turbo::{tasks_env::EnvMap, tasks_fs::FileSystemPath},
turbopack::{ turbopack::{
@ -33,17 +33,17 @@ use crate::{
util::{foreign_code_context_condition, NextRuntime}, util::{foreign_code_context_condition, NextRuntime},
}; };
fn defines(define_env: &IndexMap<String, String>) -> CompileTimeDefines { fn defines(define_env: &IndexMap<RcStr, RcStr>) -> CompileTimeDefines {
let mut defines = IndexMap::new(); let mut defines = IndexMap::new();
for (k, v) in define_env { for (k, v) in define_env {
defines defines
.entry(k.split('.').map(|s| s.to_string()).collect::<Vec<String>>()) .entry(k.split('.').map(|s| s.into()).collect::<Vec<RcStr>>())
.or_insert_with(|| { .or_insert_with(|| {
let val = serde_json::from_str(v); let val = serde_json::from_str(v);
match val { match val {
Ok(serde_json::Value::Bool(v)) => CompileTimeDefineValue::Bool(v), Ok(serde_json::Value::Bool(v)) => CompileTimeDefineValue::Bool(v),
Ok(serde_json::Value::String(v)) => CompileTimeDefineValue::String(v), Ok(serde_json::Value::String(v)) => CompileTimeDefineValue::String(v.into()),
_ => CompileTimeDefineValue::JSON(v.clone()), _ => CompileTimeDefineValue::JSON(v.clone()),
} }
}); });
@ -67,9 +67,9 @@ async fn next_edge_free_vars(
Ok(free_var_references!( Ok(free_var_references!(
..defines(&*define_env.await?).into_iter(), ..defines(&*define_env.await?).into_iter(),
Buffer = FreeVarReference::EcmaScriptModule { Buffer = FreeVarReference::EcmaScriptModule {
request: "buffer".to_string(), request: "buffer".into(),
lookup_path: Some(project_path), lookup_path: Some(project_path),
export: Some("Buffer".to_string()), export: Some("Buffer".into()),
}, },
) )
.cell()) .cell())
@ -141,16 +141,17 @@ pub async fn get_edge_resolve_options_context(
after_resolve_plugins.extend_from_slice(&base_plugins); after_resolve_plugins.extend_from_slice(&base_plugins);
// https://github.com/vercel/next.js/blob/bf52c254973d99fed9d71507a2e818af80b8ade7/packages/next/src/build/webpack-config.ts#L96-L102 // https://github.com/vercel/next.js/blob/bf52c254973d99fed9d71507a2e818af80b8ade7/packages/next/src/build/webpack-config.ts#L96-L102
let mut custom_conditions = vec![mode.await?.condition().to_string()]; let mut custom_conditions = vec![mode.await?.condition().into()];
custom_conditions.extend( custom_conditions.extend(
NextRuntime::Edge NextRuntime::Edge
.conditions() .conditions()
.iter() .iter()
.map(ToString::to_string), .map(ToString::to_string)
.map(RcStr::from),
); );
if ty.supports_react_server() { if ty.supports_react_server() {
custom_conditions.push("react-server".to_string()); custom_conditions.push("react-server".into());
}; };
let resolve_options_context = ResolveOptionsContext { let resolve_options_context = ResolveOptionsContext {
@ -186,18 +187,18 @@ pub async fn get_edge_chunking_context_with_client_assets(
project_path: Vc<FileSystemPath>, project_path: Vc<FileSystemPath>,
node_root: Vc<FileSystemPath>, node_root: Vc<FileSystemPath>,
client_root: Vc<FileSystemPath>, client_root: Vc<FileSystemPath>,
asset_prefix: Vc<Option<String>>, asset_prefix: Vc<Option<RcStr>>,
environment: Vc<Environment>, environment: Vc<Environment>,
) -> Result<Vc<Box<dyn ChunkingContext>>> { ) -> Result<Vc<Box<dyn ChunkingContext>>> {
let output_root = node_root.join("server/edge".to_string()); let output_root = node_root.join("server/edge".into());
let next_mode = mode.await?; let next_mode = mode.await?;
Ok(Vc::upcast( Ok(Vc::upcast(
BrowserChunkingContext::builder( BrowserChunkingContext::builder(
project_path, project_path,
output_root, output_root,
client_root, client_root,
output_root.join("chunks/ssr".to_string()), output_root.join("chunks/ssr".into()),
client_root.join("static/media".to_string()), client_root.join("static/media".into()),
environment, environment,
next_mode.runtime_type(), next_mode.runtime_type(),
) )
@ -214,15 +215,15 @@ pub async fn get_edge_chunking_context(
node_root: Vc<FileSystemPath>, node_root: Vc<FileSystemPath>,
environment: Vc<Environment>, environment: Vc<Environment>,
) -> Result<Vc<Box<dyn ChunkingContext>>> { ) -> Result<Vc<Box<dyn ChunkingContext>>> {
let output_root = node_root.join("server/edge".to_string()); let output_root = node_root.join("server/edge".into());
let next_mode = mode.await?; let next_mode = mode.await?;
Ok(Vc::upcast( Ok(Vc::upcast(
BrowserChunkingContext::builder( BrowserChunkingContext::builder(
project_path, project_path,
output_root, output_root,
output_root, output_root,
output_root.join("chunks".to_string()), output_root.join("chunks".into()),
output_root.join("assets".to_string()), output_root.join("assets".into()),
environment, environment,
next_mode.runtime_type(), next_mode.runtime_type(),
) )
@ -230,7 +231,7 @@ pub async fn get_edge_chunking_context(
// instead. This special blob url is handled by the custom fetch // instead. This special blob url is handled by the custom fetch
// implementation in the edge sandbox. It will respond with the // implementation in the edge sandbox. It will respond with the
// asset from the output directory. // asset from the output directory.
.asset_base_path(Vc::cell(Some("blob:server/edge/".to_string()))) .asset_base_path(Vc::cell(Some("blob:server/edge/".into())))
.minify_type(next_mode.minify_type()) .minify_type(next_mode.minify_type())
.build(), .build(),
)) ))

View file

@ -1,7 +1,7 @@
use anyhow::Result; use anyhow::Result;
use indexmap::indexmap; use indexmap::indexmap;
use indoc::formatdoc; use indoc::formatdoc;
use turbo_tasks::{Value, Vc}; use turbo_tasks::{RcStr, Value, Vc};
use turbo_tasks_fs::{File, FileSystemPath}; use turbo_tasks_fs::{File, FileSystemPath};
use turbopack_binding::turbopack::{ use turbopack_binding::turbopack::{
core::{ core::{
@ -16,7 +16,7 @@ pub async fn wrap_edge_entry(
context: Vc<Box<dyn AssetContext>>, context: Vc<Box<dyn AssetContext>>,
project_root: Vc<FileSystemPath>, project_root: Vc<FileSystemPath>,
entry: Vc<Box<dyn Module>>, entry: Vc<Box<dyn Module>>,
pathname: String, pathname: RcStr,
) -> Result<Vc<Box<dyn Module>>> { ) -> Result<Vc<Box<dyn Module>>> {
// The wrapped module could be an async module, we handle that with the proxy // The wrapped module could be an async module, we handle that with the proxy
// here. The comma expression makes sure we don't call the function with the // here. The comma expression makes sure we don't call the function with the
@ -32,12 +32,12 @@ pub async fn wrap_edge_entry(
// TODO(alexkirsz) Figure out how to name this virtual asset. // TODO(alexkirsz) Figure out how to name this virtual asset.
let virtual_source = VirtualSource::new( let virtual_source = VirtualSource::new(
project_root.join("edge-wrapper.js".to_string()), project_root.join("edge-wrapper.js".into()),
AssetContent::file(file.into()), AssetContent::file(file.into()),
); );
let inner_assets = indexmap! { let inner_assets = indexmap! {
"MODULE".to_string() => entry "MODULE".into() => entry
}; };
let module = context let module = context

View file

@ -1,6 +1,6 @@
use anyhow::Result; use anyhow::Result;
use indoc::formatdoc; use indoc::formatdoc;
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbo_tasks_fs::File; use turbo_tasks_fs::File;
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::FileSystemPath, turbo::tasks_fs::FileSystemPath,
@ -46,7 +46,7 @@ impl NextEdgeUnsupportedModuleReplacer {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl ImportMappingReplacement for NextEdgeUnsupportedModuleReplacer { impl ImportMappingReplacement for NextEdgeUnsupportedModuleReplacer {
#[turbo_tasks::function] #[turbo_tasks::function]
fn replace(&self, _capture: String) -> Vc<ImportMapping> { fn replace(&self, _capture: RcStr) -> Vc<ImportMapping> {
ImportMapping::Ignore.into() ImportMapping::Ignore.into()
} }

View file

@ -1,12 +1,12 @@
use anyhow::Result; use anyhow::Result;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbopack_binding::turbo::tasks::trace::TraceRawVcs; use turbopack_binding::turbo::tasks::trace::TraceRawVcs;
pub(crate) struct DefaultFallbackFont { pub(crate) struct DefaultFallbackFont {
pub name: String, pub name: RcStr,
pub capsize_key: String, pub capsize_key: RcStr,
pub az_avg_width: f64, pub az_avg_width: f64,
pub units_per_em: u32, pub units_per_em: u32,
} }
@ -14,16 +14,16 @@ pub(crate) struct DefaultFallbackFont {
// From https://github.com/vercel/next.js/blob/a3893bf69c83fb08e88c87bf8a21d987a0448c8e/packages/font/src/utils.ts#L4 // From https://github.com/vercel/next.js/blob/a3893bf69c83fb08e88c87bf8a21d987a0448c8e/packages/font/src/utils.ts#L4
pub(crate) static DEFAULT_SANS_SERIF_FONT: Lazy<DefaultFallbackFont> = pub(crate) static DEFAULT_SANS_SERIF_FONT: Lazy<DefaultFallbackFont> =
Lazy::new(|| DefaultFallbackFont { Lazy::new(|| DefaultFallbackFont {
name: "Arial".to_owned(), name: "Arial".into(),
capsize_key: "arial".to_owned(), capsize_key: "arial".into(),
az_avg_width: 934.5116279069767, az_avg_width: 934.5116279069767,
units_per_em: 2048, units_per_em: 2048,
}); });
pub(crate) static DEFAULT_SERIF_FONT: Lazy<DefaultFallbackFont> = pub(crate) static DEFAULT_SERIF_FONT: Lazy<DefaultFallbackFont> =
Lazy::new(|| DefaultFallbackFont { Lazy::new(|| DefaultFallbackFont {
name: "Times New Roman".to_owned(), name: "Times New Roman".into(),
capsize_key: "timesNewRoman".to_owned(), capsize_key: "timesNewRoman".into(),
az_avg_width: 854.3953488372093, az_avg_width: 854.3953488372093,
units_per_em: 2048, units_per_em: 2048,
}); });
@ -32,9 +32,9 @@ pub(crate) static DEFAULT_SERIF_FONT: Lazy<DefaultFallbackFont> =
#[turbo_tasks::value(shared)] #[turbo_tasks::value(shared)]
pub(crate) struct AutomaticFontFallback { pub(crate) struct AutomaticFontFallback {
/// e.g. `__Roboto_Fallback_c123b8` /// e.g. `__Roboto_Fallback_c123b8`
pub scoped_font_family: Vc<String>, pub scoped_font_family: Vc<RcStr>,
/// The name of font locally, used in `src: local("{}")` /// The name of font locally, used in `src: local("{}")`
pub local_font_family: Vc<String>, pub local_font_family: Vc<RcStr>,
pub adjustment: Option<FontAdjustment>, pub adjustment: Option<FontAdjustment>,
} }
@ -48,7 +48,7 @@ pub(crate) enum FontFallback {
/// return this and omit fallback information instead. /// return this and omit fallback information instead.
Error, Error,
/// A list of manually provided font names to use a fallback, as-is. /// A list of manually provided font names to use a fallback, as-is.
Manual(Vec<String>), Manual(Vec<RcStr>),
} }
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]

View file

@ -4,7 +4,7 @@ use anyhow::{Context, Result};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use regex::Regex; use regex::Regex;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::{trace::TraceRawVcs, Vc}; use turbo_tasks::{trace::TraceRawVcs, RcStr, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::FileSystemPath, turbo::tasks_fs::FileSystemPath,
turbopack::core::issue::{IssueExt, IssueSeverity, StyledString}, turbopack::core::issue::{IssueExt, IssueSeverity, StyledString},
@ -27,7 +27,7 @@ use crate::{
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub(super) struct FontMetricsMapEntry { pub(super) struct FontMetricsMapEntry {
category: String, category: RcStr,
ascent: i32, ascent: i32,
descent: i32, descent: i32,
line_gap: u32, line_gap: u32,
@ -36,11 +36,11 @@ pub(super) struct FontMetricsMapEntry {
} }
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
pub(super) struct FontMetricsMap(pub HashMap<String, FontMetricsMapEntry>); pub(super) struct FontMetricsMap(pub HashMap<RcStr, FontMetricsMapEntry>);
#[derive(Debug, PartialEq, Serialize, Deserialize, TraceRawVcs)] #[derive(Debug, PartialEq, Serialize, Deserialize, TraceRawVcs)]
struct Fallback { struct Fallback {
pub font_family: String, pub font_family: RcStr,
pub adjustment: Option<FontAdjustment>, pub adjustment: Option<FontAdjustment>,
} }
@ -53,11 +53,9 @@ pub(super) async fn get_font_fallback(
Ok(match &options.fallback { Ok(match &options.fallback {
Some(fallback) => FontFallback::Manual(fallback.clone()).cell(), Some(fallback) => FontFallback::Manual(fallback.clone()).cell(),
None => { None => {
let metrics_json = load_next_js_templateon( let metrics_json =
context, load_next_js_templateon(context, "dist/server/capsize-font-metrics.json".into())
"dist/server/capsize-font-metrics.json".to_string(), .await?;
)
.await?;
let fallback = lookup_fallback( let fallback = lookup_fallback(
&options.font_family, &options.font_family,
metrics_json, metrics_json,
@ -77,13 +75,16 @@ pub(super) async fn get_font_fallback(
Err(_) => { Err(_) => {
NextFontIssue { NextFontIssue {
path: context, path: context,
title: StyledString::Text(format!( title: StyledString::Text(
"Failed to find font override values for font `{}`", format!(
&options.font_family, "Failed to find font override values for font `{}`",
)) &options.font_family,
)
.into(),
)
.cell(), .cell(),
description: StyledString::Text( description: StyledString::Text(
"Skipping generating a fallback font.".to_owned(), "Skipping generating a fallback font.".into(),
) )
.cell(), .cell(),
severity: IssueSeverity::Warning.cell(), severity: IssueSeverity::Warning.cell(),
@ -100,7 +101,7 @@ pub(super) async fn get_font_fallback(
static FALLBACK_FONT_NAME: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?:^\w|[A-Z]|\b\w)").unwrap()); static FALLBACK_FONT_NAME: Lazy<Regex> = Lazy::new(|| Regex::new(r"(?:^\w|[A-Z]|\b\w)").unwrap());
// From https://github.com/vercel/next.js/blob/1628260b88ce3052ac307a1607b6e8470188ab83/packages/next/src/server/font-utils.ts#L101 // From https://github.com/vercel/next.js/blob/1628260b88ce3052ac307a1607b6e8470188ab83/packages/next/src/server/font-utils.ts#L101
fn format_fallback_font_name(font_family: &str) -> String { fn format_fallback_font_name(font_family: &str) -> RcStr {
let mut fallback_name = FALLBACK_FONT_NAME let mut fallback_name = FALLBACK_FONT_NAME
.replace(font_family, |caps: &regex::Captures| { .replace(font_family, |caps: &regex::Captures| {
caps.iter() caps.iter()
@ -118,7 +119,7 @@ fn format_fallback_font_name(font_family: &str) -> String {
}) })
.to_string(); .to_string();
fallback_name.retain(|c| !c.is_whitespace()); fallback_name.retain(|c| !c.is_whitespace());
fallback_name fallback_name.into()
} }
fn lookup_fallback( fn lookup_fallback(
@ -208,7 +209,7 @@ mod tests {
assert_eq!( assert_eq!(
lookup_fallback("Inter", font_metrics, true)?, lookup_fallback("Inter", font_metrics, true)?,
Fallback { Fallback {
font_family: "Arial".to_owned(), font_family: "Arial".into(),
adjustment: Some(FontAdjustment { adjustment: Some(FontAdjustment {
ascent: 0.901_989_700_374_532, ascent: 0.901_989_700_374_532,
descent: -0.224_836_142_322_097_4, descent: -0.224_836_142_322_097_4,
@ -254,7 +255,7 @@ mod tests {
assert_eq!( assert_eq!(
lookup_fallback("Roboto Slab", font_metrics, true)?, lookup_fallback("Roboto Slab", font_metrics, true)?,
Fallback { Fallback {
font_family: "Times New Roman".to_owned(), font_family: "Times New Roman".into(),
adjustment: Some(FontAdjustment { adjustment: Some(FontAdjustment {
ascent: 0.885_645_438_273_993_8, ascent: 0.885_645_438_273_993_8,
descent: -0.229_046_234_036_377_7, descent: -0.229_046_234_036_377_7,

View file

@ -5,7 +5,7 @@ use futures::FutureExt;
use indexmap::IndexMap; use indexmap::IndexMap;
use indoc::formatdoc; use indoc::formatdoc;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::{ turbo::{
tasks::{Completion, Value}, tasks::{Completion, Value},
@ -68,7 +68,7 @@ pub const USER_AGENT_FOR_GOOGLE_FONTS: &str = "Mozilla/5.0 (Macintosh; Intel Mac
Chrome/104.0.0.0 Safari/537.36"; Chrome/104.0.0.0 Safari/537.36";
#[turbo_tasks::value(transparent)] #[turbo_tasks::value(transparent)]
struct FontData(IndexMap<String, FontDataEntry>); struct FontData(IndexMap<RcStr, FontDataEntry>);
#[turbo_tasks::value(shared)] #[turbo_tasks::value(shared)]
pub(crate) struct NextFontGoogleReplacer { pub(crate) struct NextFontGoogleReplacer {
@ -83,7 +83,7 @@ impl NextFontGoogleReplacer {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
async fn import_map_result(&self, query: String) -> Result<Vc<ImportMapResult>> { async fn import_map_result(&self, query: RcStr) -> Result<Vc<ImportMapResult>> {
let request_hash = get_request_hash(&query).await?; let request_hash = get_request_hash(&query).await?;
let qstr = qstring::QString::from(query.as_str()); let qstr = qstring::QString::from(query.as_str());
@ -95,8 +95,8 @@ impl NextFontGoogleReplacer {
let fallback = get_font_fallback(self.project_path, options); let fallback = get_font_fallback(self.project_path, options);
let properties = get_font_css_properties(options, fallback).await?; let properties = get_font_css_properties(options, fallback).await?;
let js_asset = VirtualSource::new( let js_asset = VirtualSource::new(
next_js_file_path("internal/font/google".to_string()) next_js_file_path("internal/font/google".into())
.join(format!("{}.js", get_request_id(options.font_family(), request_hash).await?)), .join(format!("{}.js", get_request_id(options.font_family(), request_hash).await?).into()),
AssetContent::file(FileContent::Content( AssetContent::file(FileContent::Content(
formatdoc!( formatdoc!(
r#" r#"
@ -142,7 +142,7 @@ impl NextFontGoogleReplacer {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl ImportMappingReplacement for NextFontGoogleReplacer { impl ImportMappingReplacement for NextFontGoogleReplacer {
#[turbo_tasks::function] #[turbo_tasks::function]
fn replace(&self, _capture: String) -> Vc<ImportMapping> { fn replace(&self, _capture: RcStr) -> Vc<ImportMapping> {
ImportMapping::Ignore.into() ImportMapping::Ignore.into()
} }
@ -168,7 +168,7 @@ impl ImportMappingReplacement for NextFontGoogleReplacer {
let this = &*self.await?; let this = &*self.await?;
if can_use_next_font(this.project_path, *query).await? { if can_use_next_font(this.project_path, *query).await? {
Ok(self.import_map_result(query.await?.to_string())) Ok(self.import_map_result(query.await?.as_str().into()))
} else { } else {
Ok(ImportMapResult::NoEntry.into()) Ok(ImportMapResult::NoEntry.into())
} }
@ -195,7 +195,7 @@ impl NextFontGoogleCssModuleReplacer {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
async fn import_map_result(&self, query: String) -> Result<Vc<ImportMapResult>> { async fn import_map_result(&self, query: RcStr) -> Result<Vc<ImportMapResult>> {
let request_hash = get_request_hash(&query).await?; let request_hash = get_request_hash(&query).await?;
let query_vc = Vc::cell(query); let query_vc = Vc::cell(query);
let font_data = load_font_data(self.project_path); let font_data = load_font_data(self.project_path);
@ -203,18 +203,19 @@ impl NextFontGoogleCssModuleReplacer {
let stylesheet_url = get_stylesheet_url_from_options(options, font_data); let stylesheet_url = get_stylesheet_url_from_options(options, font_data);
let scoped_font_family = let scoped_font_family =
get_scoped_font_family(FontFamilyType::WebFont.cell(), options.font_family()); get_scoped_font_family(FontFamilyType::WebFont.cell(), options.font_family());
let css_virtual_path = next_js_file_path("internal/font/google".to_string()).join(format!( let css_virtual_path = next_js_file_path("internal/font/google".into()).join(
"/{}.module.css", format!(
get_request_id(options.font_family(), request_hash).await? "/{}.module.css",
)); get_request_id(options.font_family(), request_hash).await?
)
.into(),
);
// When running Next.js integration tests, use the mock data available in // When running Next.js integration tests, use the mock data available in
// process.env.NEXT_FONT_GOOGLE_MOCKED_RESPONSES instead of making real // process.env.NEXT_FONT_GOOGLE_MOCKED_RESPONSES instead of making real
// requests to Google Fonts. // requests to Google Fonts.
let env = Vc::upcast::<Box<dyn ProcessEnv>>(CommandLineProcessEnv::new()); let env = Vc::upcast::<Box<dyn ProcessEnv>>(CommandLineProcessEnv::new());
let mocked_responses_path = &*env let mocked_responses_path = &*env.read("NEXT_FONT_GOOGLE_MOCKED_RESPONSES".into()).await?;
.read("NEXT_FONT_GOOGLE_MOCKED_RESPONSES".to_string())
.await?;
let stylesheet_str = mocked_responses_path let stylesheet_str = mocked_responses_path
.as_ref() .as_ref()
.map_or_else( .map_or_else(
@ -270,7 +271,7 @@ impl NextFontGoogleCssModuleReplacer {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl ImportMappingReplacement for NextFontGoogleCssModuleReplacer { impl ImportMappingReplacement for NextFontGoogleCssModuleReplacer {
#[turbo_tasks::function] #[turbo_tasks::function]
fn replace(&self, _capture: String) -> Vc<ImportMapping> { fn replace(&self, _capture: RcStr) -> Vc<ImportMapping> {
ImportMapping::Ignore.into() ImportMapping::Ignore.into()
} }
@ -295,7 +296,7 @@ impl ImportMappingReplacement for NextFontGoogleCssModuleReplacer {
return Ok(ImportMapResult::NoEntry.into()); return Ok(ImportMapResult::NoEntry.into());
}; };
Ok(self.import_map_result(query_vc.await?.to_string())) Ok(self.import_map_result(query_vc.await?.to_string().into()))
} }
} }
@ -322,7 +323,7 @@ impl NextFontGoogleFontFileReplacer {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl ImportMappingReplacement for NextFontGoogleFontFileReplacer { impl ImportMappingReplacement for NextFontGoogleFontFileReplacer {
#[turbo_tasks::function] #[turbo_tasks::function]
fn replace(&self, _capture: String) -> Vc<ImportMapping> { fn replace(&self, _capture: RcStr) -> Vc<ImportMapping> {
ImportMapping::Ignore.into() ImportMapping::Ignore.into()
} }
@ -364,12 +365,13 @@ impl ImportMappingReplacement for NextFontGoogleFontFileReplacer {
name.push_str(".p") name.push_str(".p")
} }
let font_virtual_path = next_js_file_path("internal/font/google".to_string()) let font_virtual_path = next_js_file_path("internal/font/google".into())
.join(format!("/{}.{}", name, ext)); .join(format!("/{}.{}", name, ext).into());
// doesn't seem ideal to download the font into a string, but probably doesn't // doesn't seem ideal to download the font into a string, but probably doesn't
// really matter either. // really matter either.
let Some(font) = fetch_from_google_fonts(Vc::cell(url), font_virtual_path).await? else { let Some(font) = fetch_from_google_fonts(Vc::cell(url.into()), font_virtual_path).await?
else {
return Ok(ImportMapResult::Result(ResolveResult::unresolveable().into()).into()); return Ok(ImportMapResult::Result(ResolveResult::unresolveable().into()).into());
}; };
@ -386,7 +388,7 @@ impl ImportMappingReplacement for NextFontGoogleFontFileReplacer {
async fn load_font_data(project_root: Vc<FileSystemPath>) -> Result<Vc<FontData>> { async fn load_font_data(project_root: Vc<FileSystemPath>) -> Result<Vc<FontData>> {
let data: FontData = load_next_js_templateon( let data: FontData = load_next_js_templateon(
project_root, project_root,
"dist/compiled/@next/font/dist/google/font-data.json".to_string(), "dist/compiled/@next/font/dist/google/font-data.json".into(),
) )
.await?; .await?;
@ -397,11 +399,11 @@ async fn load_font_data(project_root: Vc<FileSystemPath>) -> Result<Vc<FontData>
/// font family names. /// font family names.
#[turbo_tasks::function] #[turbo_tasks::function]
async fn update_google_stylesheet( async fn update_google_stylesheet(
stylesheet: Vc<String>, stylesheet: Vc<RcStr>,
options: Vc<NextFontGoogleOptions>, options: Vc<NextFontGoogleOptions>,
scoped_font_family: Vc<String>, scoped_font_family: Vc<RcStr>,
has_size_adjust: Vc<bool>, has_size_adjust: Vc<bool>,
) -> Result<Vc<String>> { ) -> Result<Vc<RcStr>> {
let options = &*options.await?; let options = &*options.await?;
// Update font-family definitions to the scoped name // Update font-family definitions to the scoped name
@ -439,7 +441,7 @@ async fn update_google_stylesheet(
) )
} }
Ok(Vc::cell(stylesheet.to_string())) Ok(Vc::cell(stylesheet.into()))
} }
#[derive(Debug)] #[derive(Debug)]
@ -449,7 +451,7 @@ struct FontFile {
} }
// https://github.com/vercel/next.js/blob/b95e45a5112e9f65e939eac9445ef550db072ea7/packages/font/src/google/find-font-files-in-css.ts // https://github.com/vercel/next.js/blob/b95e45a5112e9f65e939eac9445ef550db072ea7/packages/font/src/google/find-font-files-in-css.ts
fn find_font_files_in_css(css: &str, subsets_to_preload: &[String]) -> Vec<FontFile> { fn find_font_files_in_css(css: &str, subsets_to_preload: &[RcStr]) -> Vec<FontFile> {
let mut font_files: Vec<FontFile> = Vec::new(); let mut font_files: Vec<FontFile> = Vec::new();
let mut current_subset = ""; let mut current_subset = "";
@ -482,7 +484,7 @@ fn find_font_files_in_css(css: &str, subsets_to_preload: &[String]) -> Vec<FontF
async fn get_stylesheet_url_from_options( async fn get_stylesheet_url_from_options(
options: Vc<NextFontGoogleOptions>, options: Vc<NextFontGoogleOptions>,
font_data: Vc<FontData>, font_data: Vc<FontData>,
) -> Result<Vc<String>> { ) -> Result<Vc<RcStr>> {
#[allow(unused_mut, unused_assignments)] // This is used in test environments #[allow(unused_mut, unused_assignments)] // This is used in test environments
let mut css_url: Option<String> = None; let mut css_url: Option<String> = None;
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
@ -490,27 +492,27 @@ async fn get_stylesheet_url_from_options(
use turbopack_binding::turbo::tasks_env::{CommandLineProcessEnv, ProcessEnv}; use turbopack_binding::turbo::tasks_env::{CommandLineProcessEnv, ProcessEnv};
let env = CommandLineProcessEnv::new(); let env = CommandLineProcessEnv::new();
if let Some(url) = &*env if let Some(url) = &*env.read("TURBOPACK_TEST_ONLY_MOCK_SERVER".into()).await? {
.read("TURBOPACK_TEST_ONLY_MOCK_SERVER".to_string())
.await?
{
css_url = Some(format!("{}/css2", url)); css_url = Some(format!("{}/css2", url));
} }
} }
let options = options.await?; let options = options.await?;
Ok(Vc::cell(get_stylesheet_url( Ok(Vc::cell(
css_url.as_deref().unwrap_or(GOOGLE_FONTS_STYLESHEET_URL), get_stylesheet_url(
&options.font_family, css_url.as_deref().unwrap_or(GOOGLE_FONTS_STYLESHEET_URL),
&get_font_axes(
&*font_data.await?,
&options.font_family, &options.font_family,
&options.weights, &get_font_axes(
&options.styles, &*font_data.await?,
&options.selected_variable_axes, &options.font_family,
)?, &options.weights,
&options.display, &options.styles,
)?)) &options.selected_variable_axes,
)?,
&options.display,
)?
.into(),
))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
@ -522,20 +524,20 @@ async fn get_font_css_properties(
let scoped_font_family = let scoped_font_family =
&*get_scoped_font_family(FontFamilyType::WebFont.cell(), options_vc.font_family()).await?; &*get_scoped_font_family(FontFamilyType::WebFont.cell(), options_vc.font_family()).await?;
let mut font_families = vec![format!("'{}'", scoped_font_family.clone())]; let mut font_families = vec![format!("'{}'", scoped_font_family.clone()).into()];
let font_fallback = &*font_fallback.await?; let font_fallback = &*font_fallback.await?;
match font_fallback { match font_fallback {
FontFallback::Manual(fonts) => { FontFallback::Manual(fonts) => {
font_families.extend_from_slice(fonts); font_families.extend_from_slice(fonts);
} }
FontFallback::Automatic(fallback) => { FontFallback::Automatic(fallback) => {
font_families.push(format!("'{}'", *fallback.scoped_font_family.await?)); font_families.push(format!("'{}'", *fallback.scoped_font_family.await?).into());
} }
FontFallback::Error => {} FontFallback::Error => {}
} }
Ok(FontCssProperties::cell(FontCssProperties { Ok(FontCssProperties::cell(FontCssProperties {
font_family: Vc::cell(font_families.join(", ")), font_family: Vc::cell(font_families.join(", ").into()),
weight: Vc::cell(match &options.weights { weight: Vc::cell(match &options.weights {
FontWeights::Variable => None, FontWeights::Variable => None,
FontWeights::Fixed(weights) => { FontWeights::Fixed(weights) => {
@ -543,7 +545,7 @@ async fn get_font_css_properties(
// Don't set a rule for weight if multiple are requested // Don't set a rule for weight if multiple are requested
None None
} else { } else {
weights.first().map(|w| w.to_string()) weights.first().map(|w| w.to_string().into())
} }
} }
}), }),
@ -559,7 +561,7 @@ async fn get_font_css_properties(
#[turbo_tasks::function] #[turbo_tasks::function]
async fn font_options_from_query_map( async fn font_options_from_query_map(
query: Vc<String>, query: Vc<RcStr>,
font_data: Vc<FontData>, font_data: Vc<FontData>,
) -> Result<Vc<NextFontGoogleOptions>> { ) -> Result<Vc<NextFontGoogleOptions>> {
let query_map = qstring::QString::from(&**query.await?); let query_map = qstring::QString::from(&**query.await?);
@ -578,7 +580,7 @@ async fn font_options_from_query_map(
} }
async fn font_file_options_from_query_map( async fn font_file_options_from_query_map(
query: Vc<String>, query: Vc<RcStr>,
) -> Result<NextFontGoogleFontFileOptions> { ) -> Result<NextFontGoogleFontFileOptions> {
let query_map = qstring::QString::from(&**query.await?); let query_map = qstring::QString::from(&**query.await?);
@ -594,21 +596,21 @@ async fn font_file_options_from_query_map(
} }
async fn fetch_real_stylesheet( async fn fetch_real_stylesheet(
stylesheet_url: Vc<String>, stylesheet_url: Vc<RcStr>,
css_virtual_path: Vc<FileSystemPath>, css_virtual_path: Vc<FileSystemPath>,
) -> Result<Option<Vc<String>>> { ) -> Result<Option<Vc<RcStr>>> {
let body = fetch_from_google_fonts(stylesheet_url, css_virtual_path).await?; let body = fetch_from_google_fonts(stylesheet_url, css_virtual_path).await?;
Ok(body.map(|body| body.to_string())) Ok(body.map(|body| body.to_string()))
} }
async fn fetch_from_google_fonts( async fn fetch_from_google_fonts(
url: Vc<String>, url: Vc<RcStr>,
virtual_path: Vc<FileSystemPath>, virtual_path: Vc<FileSystemPath>,
) -> Result<Option<Vc<HttpResponseBody>>> { ) -> Result<Option<Vc<HttpResponseBody>>> {
let result = fetch( let result = fetch(
url, url,
Vc::cell(Some(USER_AGENT_FOR_GOOGLE_FONTS.to_owned())), Vc::cell(Some(USER_AGENT_FOR_GOOGLE_FONTS.into())),
Vc::cell(None), Vc::cell(None),
) )
.await?; .await?;
@ -632,19 +634,19 @@ async fn fetch_from_google_fonts(
} }
async fn get_mock_stylesheet( async fn get_mock_stylesheet(
stylesheet_url: Vc<String>, stylesheet_url: Vc<RcStr>,
mocked_responses_path: &str, mocked_responses_path: &str,
execution_context: Vc<ExecutionContext>, execution_context: Vc<ExecutionContext>,
) -> Result<Option<Vc<String>>> { ) -> Result<Option<Vc<RcStr>>> {
let response_path = Path::new(&mocked_responses_path); let response_path = Path::new(&mocked_responses_path);
let mock_fs = Vc::upcast::<Box<dyn FileSystem>>(DiskFileSystem::new( let mock_fs = Vc::upcast::<Box<dyn FileSystem>>(DiskFileSystem::new(
"mock".to_string(), "mock".into(),
response_path response_path
.parent() .parent()
.context("Must be valid path")? .context("Must be valid path")?
.to_str() .to_str()
.context("Must exist")? .context("Must exist")?
.to_string(), .into(),
vec![], vec![],
)); ));
@ -653,9 +655,8 @@ async fn get_mock_stylesheet(
project_path: _, project_path: _,
chunking_context, chunking_context,
} = *execution_context.await?; } = *execution_context.await?;
let context = let context = node_evaluate_asset_context(execution_context, None, None, "next_font".into());
node_evaluate_asset_context(execution_context, None, None, "next_font".to_string()); let loader_path = mock_fs.root().join("loader.js".into());
let loader_path = mock_fs.root().join("loader.js".to_string());
let mocked_response_asset = context let mocked_response_asset = context
.process( .process(
Vc::upcast(VirtualSource::new( Vc::upcast(VirtualSource::new(
@ -693,8 +694,7 @@ async fn get_mock_stylesheet(
match &val.try_into_single().await? { match &val.try_into_single().await? {
SingleValue::Single(val) => { SingleValue::Single(val) => {
let val: HashMap<String, Option<String>> = let val: HashMap<RcStr, Option<RcStr>> = parse_json_with_source_context(val.to_str()?)?;
parse_json_with_source_context(val.to_str()?)?;
Ok(val Ok(val
.get(&*stylesheet_url.await?) .get(&*stylesheet_url.await?)
.context("url not found")? .context("url not found")?

View file

@ -1,31 +1,31 @@
use anyhow::{anyhow, Context, Result}; use anyhow::{anyhow, Context, Result};
use indexmap::{indexset, IndexMap, IndexSet}; use indexmap::{indexset, IndexMap, IndexSet};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbopack_binding::turbo::tasks::{trace::TraceRawVcs, Value}; use turbopack_binding::turbo::tasks::{trace::TraceRawVcs, Value};
use super::request::{NextFontRequest, OneOrManyStrings}; use super::request::{NextFontRequest, OneOrManyStrings};
const ALLOWED_DISPLAY_VALUES: &[&str] = &["auto", "block", "swap", "fallback", "optional"]; const ALLOWED_DISPLAY_VALUES: &[&str] = &["auto", "block", "swap", "fallback", "optional"];
pub(super) type FontData = IndexMap<String, FontDataEntry>; pub(super) type FontData = IndexMap<RcStr, FontDataEntry>;
#[turbo_tasks::value(serialization = "auto_for_input")] #[turbo_tasks::value(serialization = "auto_for_input")]
#[derive(Clone, Debug, PartialOrd, Ord, Hash)] #[derive(Clone, Debug, PartialOrd, Ord, Hash)]
pub(super) struct NextFontGoogleOptions { pub(super) struct NextFontGoogleOptions {
/// Name of the requested font from Google. Contains literal spaces. /// Name of the requested font from Google. Contains literal spaces.
pub font_family: String, pub font_family: RcStr,
pub weights: FontWeights, pub weights: FontWeights,
pub styles: Vec<String>, pub styles: Vec<RcStr>,
pub display: String, pub display: RcStr,
pub preload: bool, pub preload: bool,
pub selected_variable_axes: Option<Vec<String>>, pub selected_variable_axes: Option<Vec<RcStr>>,
pub fallback: Option<Vec<String>>, pub fallback: Option<Vec<RcStr>>,
pub adjust_font_fallback: bool, pub adjust_font_fallback: bool,
/// An optional name for a css custom property (css variable) that applies /// An optional name for a css custom property (css variable) that applies
/// the font family when used. /// the font family when used.
pub variable: Option<String>, pub variable: Option<RcStr>,
pub subsets: Option<Vec<String>>, pub subsets: Option<Vec<RcStr>>,
} }
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
@ -36,8 +36,8 @@ impl NextFontGoogleOptions {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn font_family(self: Vc<Self>) -> Result<Vc<String>> { pub async fn font_family(self: Vc<Self>) -> Result<Vc<RcStr>> {
Ok(Vc::cell((*self.await?.font_family).to_owned())) Ok(Vc::cell((*self.await?.font_family).into()))
} }
} }
@ -51,15 +51,15 @@ pub(super) enum FontWeights {
#[derive(Debug, PartialEq, Eq, Deserialize, Serialize, TraceRawVcs)] #[derive(Debug, PartialEq, Eq, Deserialize, Serialize, TraceRawVcs)]
pub(super) struct FontDataEntry { pub(super) struct FontDataEntry {
pub weights: Vec<String>, pub weights: Vec<RcStr>,
pub styles: Vec<String>, pub styles: Vec<RcStr>,
pub axes: Option<Vec<Axis>>, pub axes: Option<Vec<Axis>>,
} }
#[derive(Debug, PartialEq, Deserialize, Serialize, TraceRawVcs)] #[derive(Debug, PartialEq, Deserialize, Serialize, TraceRawVcs)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub(super) struct Axis { pub(super) struct Axis {
pub tag: String, pub tag: RcStr,
pub min: f64, pub min: f64,
pub max: f64, pub max: f64,
} }
@ -71,7 +71,7 @@ impl Eq for Axis {}
// https://github.com/vercel/next.js/blob/28454c6ddbc310419467e5415aee26e48d079b46/packages/font/src/google/utils.ts#L22 // https://github.com/vercel/next.js/blob/28454c6ddbc310419467e5415aee26e48d079b46/packages/font/src/google/utils.ts#L22
pub(super) fn options_from_request( pub(super) fn options_from_request(
request: &NextFontRequest, request: &NextFontRequest,
data: &IndexMap<String, FontDataEntry>, data: &IndexMap<RcStr, FontDataEntry>,
) -> Result<NextFontGoogleOptions> { ) -> Result<NextFontGoogleOptions> {
if request.arguments.len() > 1 { if request.arguments.len() > 1 {
return Err(anyhow!( return Err(anyhow!(
@ -82,10 +82,10 @@ pub(super) fn options_from_request(
let argument = request.arguments.last().cloned().unwrap_or_default(); let argument = request.arguments.last().cloned().unwrap_or_default();
// `import` comes from the imported symbol in JS, which separates with _ // `import` comes from the imported symbol in JS, which separates with _
let font_family = request.import.replace('_', " "); let font_family: RcStr = request.import.replace('_', " ").into();
let font_data = data.get(&font_family).context("Unknown font")?; let font_data = data.get(&font_family).context("Unknown font")?;
let requested_weights: IndexSet<String> = argument let requested_weights: IndexSet<RcStr> = argument
.weight .weight
.map(|w| match w { .map(|w| match w {
OneOrManyStrings::One(one) => indexset! {one}, OneOrManyStrings::One(one) => indexset! {one},
@ -102,7 +102,7 @@ pub(super) fn options_from_request(
.unwrap_or_default(); .unwrap_or_default();
let weights = if requested_weights.is_empty() { let weights = if requested_weights.is_empty() {
if !font_data.weights.contains(&"variable".to_owned()) { if !font_data.weights.contains(&"variable".into()) {
return Err(anyhow!( return Err(anyhow!(
"Missing weight for {}. Available weights: {}", "Missing weight for {}. Available weights: {}",
font_family, font_family,
@ -145,7 +145,7 @@ pub(super) fn options_from_request(
if font_data.styles.len() == 1 { if font_data.styles.len() == 1 {
styles.push(font_data.styles[0].clone()); styles.push(font_data.styles[0].clone());
} else { } else {
styles.push("normal".to_owned()); styles.push("normal".into());
} }
} }
@ -160,9 +160,9 @@ pub(super) fn options_from_request(
} }
} }
let display = argument.display.unwrap_or_else(|| "swap".to_owned()); let display = argument.display.unwrap_or_else(|| "swap".into());
if !ALLOWED_DISPLAY_VALUES.contains(&display.as_ref()) { if !ALLOWED_DISPLAY_VALUES.contains(&display.as_str()) {
return Err(anyhow!( return Err(anyhow!(
"Invalid display value {} for font {}.\nAvailable display values: {}", "Invalid display value {} for font {}.\nAvailable display values: {}",
display, display,
@ -195,6 +195,7 @@ pub(super) fn options_from_request(
mod tests { mod tests {
use anyhow::Result; use anyhow::Result;
use indexmap::IndexMap; use indexmap::IndexMap;
use turbo_tasks::RcStr;
use turbopack_binding::turbo::tasks_fs::json::parse_json_with_source_context; use turbopack_binding::turbo::tasks_fs::json::parse_json_with_source_context;
use super::{options_from_request, FontDataEntry, NextFontGoogleOptions}; use super::{options_from_request, FontDataEntry, NextFontGoogleOptions};
@ -202,7 +203,7 @@ mod tests {
#[test] #[test]
fn test_errors_on_unknown_font() -> Result<()> { fn test_errors_on_unknown_font() -> Result<()> {
let data: IndexMap<String, FontDataEntry> = parse_json_with_source_context( let data: IndexMap<RcStr, FontDataEntry> = parse_json_with_source_context(
r#" r#"
{ {
"ABeeZee": { "ABeeZee": {
@ -235,7 +236,7 @@ mod tests {
#[test] #[test]
fn test_default_values_when_no_arguments() -> Result<()> { fn test_default_values_when_no_arguments() -> Result<()> {
let data: IndexMap<String, FontDataEntry> = parse_json_with_source_context( let data: IndexMap<RcStr, FontDataEntry> = parse_json_with_source_context(
r#" r#"
{ {
"ABeeZee": { "ABeeZee": {
@ -260,10 +261,10 @@ mod tests {
assert_eq!( assert_eq!(
options_from_request(&request, &data)?, options_from_request(&request, &data)?,
NextFontGoogleOptions { NextFontGoogleOptions {
font_family: "ABeeZee".to_owned(), font_family: "ABeeZee".into(),
weights: FontWeights::Variable, weights: FontWeights::Variable,
styles: vec!["normal".to_owned()], styles: vec!["normal".into()],
display: "swap".to_owned(), display: "swap".into(),
preload: true, preload: true,
selected_variable_axes: None, selected_variable_axes: None,
fallback: None, fallback: None,
@ -278,7 +279,7 @@ mod tests {
#[test] #[test]
fn test_errors_when_no_weights_chosen_no_variable() -> Result<()> { fn test_errors_when_no_weights_chosen_no_variable() -> Result<()> {
let data: IndexMap<String, FontDataEntry> = parse_json_with_source_context( let data: IndexMap<RcStr, FontDataEntry> = parse_json_with_source_context(
r#" r#"
{ {
"ABeeZee": { "ABeeZee": {
@ -314,7 +315,7 @@ mod tests {
#[test] #[test]
fn test_errors_on_unnecessary_weights() -> Result<()> { fn test_errors_on_unnecessary_weights() -> Result<()> {
let data: IndexMap<String, FontDataEntry> = parse_json_with_source_context( let data: IndexMap<RcStr, FontDataEntry> = parse_json_with_source_context(
r#" r#"
{ {
"ABeeZee": { "ABeeZee": {
@ -353,7 +354,7 @@ mod tests {
#[test] #[test]
fn test_errors_on_unvavailable_weights() -> Result<()> { fn test_errors_on_unvavailable_weights() -> Result<()> {
let data: IndexMap<String, FontDataEntry> = parse_json_with_source_context( let data: IndexMap<RcStr, FontDataEntry> = parse_json_with_source_context(
r#" r#"
{ {
"ABeeZee": { "ABeeZee": {
@ -391,7 +392,7 @@ mod tests {
#[test] #[test]
fn test_defaults_to_only_style_when_one_available() -> Result<()> { fn test_defaults_to_only_style_when_one_available() -> Result<()> {
let data: IndexMap<String, FontDataEntry> = parse_json_with_source_context( let data: IndexMap<RcStr, FontDataEntry> = parse_json_with_source_context(
r#" r#"
{ {
"ABeeZee": { "ABeeZee": {
@ -416,14 +417,14 @@ mod tests {
)?; )?;
let options = options_from_request(&request, &data)?; let options = options_from_request(&request, &data)?;
assert_eq!(options.styles, vec!["italic".to_owned()]); assert_eq!(options.styles, vec![RcStr::from("italic")]);
Ok(()) Ok(())
} }
#[test] #[test]
fn test_defaults_to_normal_style_when_multiple() -> Result<()> { fn test_defaults_to_normal_style_when_multiple() -> Result<()> {
let data: IndexMap<String, FontDataEntry> = parse_json_with_source_context( let data: IndexMap<RcStr, FontDataEntry> = parse_json_with_source_context(
r#" r#"
{ {
"ABeeZee": { "ABeeZee": {
@ -448,14 +449,14 @@ mod tests {
)?; )?;
let options = options_from_request(&request, &data)?; let options = options_from_request(&request, &data)?;
assert_eq!(options.styles, vec!["normal".to_owned()]); assert_eq!(options.styles, vec![RcStr::from("normal")]);
Ok(()) Ok(())
} }
#[test] #[test]
fn test_errors_on_unknown_styles() -> Result<()> { fn test_errors_on_unknown_styles() -> Result<()> {
let data: IndexMap<String, FontDataEntry> = parse_json_with_source_context( let data: IndexMap<RcStr, FontDataEntry> = parse_json_with_source_context(
r#" r#"
{ {
"ABeeZee": { "ABeeZee": {
@ -495,7 +496,7 @@ mod tests {
#[test] #[test]
fn test_errors_on_unknown_display() -> Result<()> { fn test_errors_on_unknown_display() -> Result<()> {
let data: IndexMap<String, FontDataEntry> = parse_json_with_source_context( let data: IndexMap<RcStr, FontDataEntry> = parse_json_with_source_context(
r#" r#"
{ {
"ABeeZee": { "ABeeZee": {
@ -536,7 +537,7 @@ mod tests {
#[test] #[test]
fn test_errors_on_axes_without_variable() -> Result<()> { fn test_errors_on_axes_without_variable() -> Result<()> {
let data: IndexMap<String, FontDataEntry> = parse_json_with_source_context( let data: IndexMap<RcStr, FontDataEntry> = parse_json_with_source_context(
r#" r#"
{ {
"ABeeZee": { "ABeeZee": {

View file

@ -1,4 +1,5 @@
use serde::Deserialize; use serde::Deserialize;
use turbo_tasks::RcStr;
/// The top-most structure encoded into the query param in requests to /// The top-most structure encoded into the query param in requests to
/// `next/font/google` generated by the next/font swc transform. e.g. /// `next/font/google` generated by the next/font swc transform. e.g.
@ -6,26 +7,26 @@ use serde::Deserialize;
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub(super) struct NextFontRequest { pub(super) struct NextFontRequest {
pub import: String, pub import: RcStr,
pub arguments: Vec<NextFontRequestArguments>, pub arguments: Vec<NextFontRequestArguments>,
} }
#[derive(Clone, Debug, Default, Deserialize)] #[derive(Clone, Debug, Default, Deserialize)]
pub(super) struct NextFontRequestArguments { pub(super) struct NextFontRequestArguments {
pub weight: Option<OneOrManyStrings>, pub weight: Option<OneOrManyStrings>,
pub subsets: Option<Vec<String>>, pub subsets: Option<Vec<RcStr>>,
pub style: Option<OneOrManyStrings>, pub style: Option<OneOrManyStrings>,
pub display: Option<String>, pub display: Option<RcStr>,
pub preload: Option<bool>, pub preload: Option<bool>,
pub axes: Option<Vec<String>>, pub axes: Option<Vec<RcStr>>,
pub fallback: Option<Vec<String>>, pub fallback: Option<Vec<RcStr>>,
pub adjust_font_fallback: Option<bool>, pub adjust_font_fallback: Option<bool>,
pub variable: Option<String>, pub variable: Option<RcStr>,
} }
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
#[serde(untagged)] #[serde(untagged)]
pub(super) enum OneOrManyStrings { pub(super) enum OneOrManyStrings {
One(String), One(RcStr),
Many(Vec<String>), Many(Vec<RcStr>),
} }

View file

@ -1,5 +1,5 @@
use anyhow::Result; use anyhow::Result;
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use super::FontCssProperties; use super::FontCssProperties;
use crate::next_font::{ use crate::next_font::{
@ -9,16 +9,16 @@ use crate::next_font::{
#[turbo_tasks::function] #[turbo_tasks::function]
pub(super) async fn build_stylesheet( pub(super) async fn build_stylesheet(
base_stylesheet: Vc<Option<String>>, base_stylesheet: Vc<Option<RcStr>>,
font_css_properties: Vc<FontCssProperties>, font_css_properties: Vc<FontCssProperties>,
font_fallback: Vc<FontFallback>, font_fallback: Vc<FontFallback>,
) -> Result<Vc<String>> { ) -> Result<Vc<RcStr>> {
let base_stylesheet = &*base_stylesheet.await?; let base_stylesheet = &*base_stylesheet.await?;
let mut stylesheet = base_stylesheet let mut stylesheet = base_stylesheet
.as_ref() .as_ref()
.map_or_else(|| "".to_owned(), |s| s.to_owned()); .map_or_else(|| "".to_owned(), |s| s.to_string());
stylesheet.push_str(&build_fallback_definition(Vc::cell(vec![font_fallback])).await?); stylesheet.push_str(&build_fallback_definition(Vc::cell(vec![font_fallback])).await?);
stylesheet.push_str(&build_font_class_rules(font_css_properties).await?); stylesheet.push_str(&build_font_class_rules(font_css_properties).await?);
Ok(Vc::cell(stylesheet)) Ok(Vc::cell(stylesheet.into()))
} }

View file

@ -2,14 +2,15 @@ use std::cmp::Ordering;
use anyhow::{anyhow, bail, Context, Result}; use anyhow::{anyhow, bail, Context, Result};
use indexmap::{indexset, IndexSet}; use indexmap::{indexset, IndexSet};
use turbo_tasks::RcStr;
use super::options::{FontData, FontWeights}; use super::options::{FontData, FontWeights};
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub(super) struct FontAxes { pub(super) struct FontAxes {
pub(super) wght: IndexSet<String>, pub(super) wght: IndexSet<RcStr>,
pub(super) ital: IndexSet<FontStyle>, pub(super) ital: IndexSet<FontStyle>,
pub(super) variable_axes: Option<Vec<(String, String)>>, pub(super) variable_axes: Option<Vec<(RcStr, RcStr)>>,
} }
#[derive(Debug, PartialEq, Eq, Hash)] #[derive(Debug, PartialEq, Eq, Hash)]
@ -23,8 +24,8 @@ pub(super) fn get_font_axes(
font_data: &FontData, font_data: &FontData,
font_family: &str, font_family: &str,
weights: &FontWeights, weights: &FontWeights,
styles: &[String], styles: &[RcStr],
selected_variable_axes: &Option<Vec<String>>, selected_variable_axes: &Option<Vec<RcStr>>,
) -> Result<FontAxes> { ) -> Result<FontAxes> {
let all_axes = &font_data let all_axes = &font_data
.get(font_family) .get(font_family)
@ -32,8 +33,8 @@ pub(super) fn get_font_axes(
.axes; .axes;
let ital = { let ital = {
let has_italic = styles.contains(&"italic".to_owned()); let has_italic = styles.contains(&"italic".into());
let has_normal = styles.contains(&"normal".to_owned()); let has_normal = styles.contains(&"normal".into());
let mut set = IndexSet::new(); let mut set = IndexSet::new();
if has_normal { if has_normal {
set.insert(FontStyle::Normal); set.insert(FontStyle::Normal);
@ -54,7 +55,7 @@ pub(super) fn get_font_axes(
let definable_axes_tags = defineable_axes let definable_axes_tags = defineable_axes
.iter() .iter()
.map(|axis| axis.tag.to_owned()) .map(|axis| axis.tag.to_owned())
.collect::<Vec<String>>(); .collect::<Vec<RcStr>>();
for tag in selected_variable_axes { for tag in selected_variable_axes {
if !definable_axes_tags.contains(tag) { if !definable_axes_tags.contains(tag) {
@ -72,11 +73,13 @@ pub(super) fn get_font_axes(
let mut variable_axes = vec![]; let mut variable_axes = vec![];
for axis in defineable_axes { for axis in defineable_axes {
if axis.tag == "wght" { if axis.tag == "wght" {
weight_axis = Some(format!("{}..{}", axis.min, axis.max)); weight_axis = Some(format!("{}..{}", axis.min, axis.max).into());
} else if let Some(selected_variable_axes) = selected_variable_axes { } else if let Some(selected_variable_axes) = selected_variable_axes {
if selected_variable_axes.contains(&axis.tag) { if selected_variable_axes.contains(&axis.tag) {
variable_axes variable_axes.push((
.push((axis.tag.clone(), format!("{}..{}", axis.min, axis.max))); axis.tag.clone(),
format!("{}..{}", axis.min, axis.max).into(),
));
} }
} }
} }
@ -96,7 +99,7 @@ pub(super) fn get_font_axes(
} }
FontWeights::Fixed(weights) => Ok(FontAxes { FontWeights::Fixed(weights) => Ok(FontAxes {
wght: IndexSet::from_iter(weights.iter().map(|w| w.to_string())), wght: IndexSet::from_iter(weights.iter().map(|w| w.to_string().into())),
ital, ital,
variable_axes: None, variable_axes: None,
}), }),
@ -320,12 +323,12 @@ mod tests {
"Inter", "Inter",
&FontWeights::Variable, &FontWeights::Variable,
&[], &[],
&Some(vec!["slnt".to_owned()]), &Some(vec!["slnt".into()]),
)?, )?,
FontAxes { FontAxes {
wght: indexset! {"100..900".to_owned()}, wght: indexset! {"100..900".into()},
ital: indexset! {}, ital: indexset! {},
variable_axes: Some(vec![("slnt".to_owned(), "-10..0".to_owned())]) variable_axes: Some(vec![("slnt".into(), "-10..0".into())])
} }
); );
Ok(()) Ok(())
@ -361,12 +364,12 @@ mod tests {
"Inter", "Inter",
&FontWeights::Variable, &FontWeights::Variable,
&[], &[],
&Some(vec!["slnt".to_owned()]), &Some(vec!["slnt".into()]),
)?, )?,
FontAxes { FontAxes {
wght: indexset! {}, wght: indexset! {},
ital: indexset! {}, ital: indexset! {},
variable_axes: Some(vec![("slnt".to_owned(), "-10..0".to_owned())]) variable_axes: Some(vec![("slnt".into(), "-10..0".into())])
} }
); );
Ok(()) Ok(())
@ -396,7 +399,7 @@ mod tests {
assert_eq!( assert_eq!(
get_font_axes(&data, "Hind", &FontWeights::Fixed(vec![500]), &[], &None)?, get_font_axes(&data, "Hind", &FontWeights::Fixed(vec![500]), &[], &None)?,
FontAxes { FontAxes {
wght: indexset! {"500".to_owned()}, wght: indexset! {"500".into()},
ital: indexset! {}, ital: indexset! {},
variable_axes: None variable_axes: None
} }
@ -411,7 +414,7 @@ mod tests {
GOOGLE_FONTS_STYLESHEET_URL, GOOGLE_FONTS_STYLESHEET_URL,
"Roboto Mono", "Roboto Mono",
&FontAxes { &FontAxes {
wght: indexset! {"500".to_owned()}, wght: indexset! {"500".into()},
ital: indexset! {FontStyle::Normal}, ital: indexset! {FontStyle::Normal},
variable_axes: None variable_axes: None
}, },
@ -430,12 +433,12 @@ mod tests {
GOOGLE_FONTS_STYLESHEET_URL, GOOGLE_FONTS_STYLESHEET_URL,
"Roboto Serif", "Roboto Serif",
&FontAxes { &FontAxes {
wght: indexset! {"500".to_owned()}, wght: indexset! {"500".into()},
ital: indexset! {FontStyle::Normal}, ital: indexset! {FontStyle::Normal},
variable_axes: Some(vec![ variable_axes: Some(vec![
("GRAD".to_owned(), "-50..100".to_owned()), ("GRAD".into(), "-50..100".into()),
("opsz".to_owned(), "8..144".to_owned()), ("opsz".into(), "8..144".into()),
("wdth".to_owned(), "50..150".to_owned()), ("wdth".into(), "50..150".into()),
]) ])
}, },
"optional" "optional"
@ -453,12 +456,12 @@ mod tests {
GOOGLE_FONTS_STYLESHEET_URL, GOOGLE_FONTS_STYLESHEET_URL,
"Roboto Serif", "Roboto Serif",
&FontAxes { &FontAxes {
wght: indexset! {"500".to_owned(), "300".to_owned()}, wght: indexset! {"500".into(), "300".into()},
ital: indexset! {FontStyle::Normal, FontStyle::Italic}, ital: indexset! {FontStyle::Normal, FontStyle::Italic},
variable_axes: Some(vec![ variable_axes: Some(vec![
("GRAD".to_owned(), "-50..100".to_owned()), ("GRAD".into(), "-50..100".into()),
("opsz".to_owned(), "8..144".to_owned()), ("opsz".into(), "8..144".into()),
("wdth".to_owned(), "50..150".to_owned()), ("wdth".into(), "50..150".into()),
]) ])
}, },
"optional" "optional"
@ -480,8 +483,8 @@ mod tests {
wght: indexset! {}, wght: indexset! {},
ital: indexset! {}, ital: indexset! {},
variable_axes: Some(vec![ variable_axes: Some(vec![
("EDPT".to_owned(), "0..200".to_owned()), ("EDPT".into(), "0..200".into()),
("EHLT".to_owned(), "0..24".to_owned()), ("EHLT".into(), "0..24".into()),
]) ])
}, },
"optional" "optional"
@ -537,7 +540,7 @@ mod tests {
GOOGLE_FONTS_STYLESHEET_URL, GOOGLE_FONTS_STYLESHEET_URL,
"Hind", "Hind",
&FontAxes { &FontAxes {
wght: indexset! {"500".to_owned()}, wght: indexset! {"500".into()},
ital: indexset! {}, ital: indexset! {},
variable_axes: None variable_axes: None
}, },

View file

@ -1,7 +1,8 @@
use thiserror::Error; use thiserror::Error;
use turbo_tasks::RcStr;
#[derive(Debug, Error)] #[derive(Debug, Error)]
pub enum FontError { pub enum FontError {
#[error("could not find font file")] #[error("could not find font file")]
FontFileNotFound(String), FontFileNotFound(RcStr),
} }

View file

@ -39,7 +39,7 @@ pub(super) async fn get_font_fallbacks(
AdjustFontFallback::Arial => font_fallbacks.push( AdjustFontFallback::Arial => font_fallbacks.push(
FontFallback::Automatic(AutomaticFontFallback { FontFallback::Automatic(AutomaticFontFallback {
scoped_font_family, scoped_font_family,
local_font_family: Vc::cell("Arial".to_owned()), local_font_family: Vc::cell("Arial".into()),
adjustment: Some( adjustment: Some(
get_font_adjustment(context, options_vc, &DEFAULT_SANS_SERIF_FONT).await?, get_font_adjustment(context, options_vc, &DEFAULT_SANS_SERIF_FONT).await?,
), ),
@ -49,7 +49,7 @@ pub(super) async fn get_font_fallbacks(
AdjustFontFallback::TimesNewRoman => font_fallbacks.push( AdjustFontFallback::TimesNewRoman => font_fallbacks.push(
FontFallback::Automatic(AutomaticFontFallback { FontFallback::Automatic(AutomaticFontFallback {
scoped_font_family, scoped_font_family,
local_font_family: Vc::cell("Times New Roman".to_owned()), local_font_family: Vc::cell("Times New Roman".into()),
adjustment: Some( adjustment: Some(
get_font_adjustment(context, options_vc, &DEFAULT_SERIF_FONT).await?, get_font_adjustment(context, options_vc, &DEFAULT_SERIF_FONT).await?,
), ),
@ -164,7 +164,7 @@ fn pick_font_for_fallback_generation(
// Prefer normal style if they have the same weight // Prefer normal style if they have the same weight
if used_font_distance == current_font_distance if used_font_distance == current_font_distance
&& current_descriptor.style != Some("italic".to_owned()) && current_descriptor.style != Some("italic".into())
{ {
used_descriptor = current_descriptor; used_descriptor = current_descriptor;
continue; continue;
@ -251,6 +251,7 @@ fn parse_weight_string(weight_str: &str) -> Result<f64> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use anyhow::Result; use anyhow::Result;
use turbo_tasks::RcStr;
use crate::next_font::local::{ use crate::next_font::local::{
font_fallback::pick_font_for_fallback_generation, font_fallback::pick_font_for_fallback_generation,
@ -259,9 +260,9 @@ mod tests {
fn generate_font_descriptor(weight: &FontWeight, style: &Option<String>) -> FontDescriptor { fn generate_font_descriptor(weight: &FontWeight, style: &Option<String>) -> FontDescriptor {
FontDescriptor { FontDescriptor {
ext: "ttf".to_owned(), ext: "ttf".into(),
path: "foo.ttf".to_owned(), path: "foo.ttf".into(),
style: style.clone(), style: style.clone().map(RcStr::from),
weight: Some(weight.clone()), weight: Some(weight.clone()),
} }
} }
@ -270,34 +271,34 @@ mod tests {
fn test_picks_weight_closest_to_400() -> Result<()> { fn test_picks_weight_closest_to_400() -> Result<()> {
assert_eq!( assert_eq!(
pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ pick_font_for_fallback_generation(&FontDescriptors::Many(vec![
generate_font_descriptor(&FontWeight::Fixed("300".to_owned()), &None), generate_font_descriptor(&FontWeight::Fixed("300".into()), &None),
generate_font_descriptor(&FontWeight::Fixed("600".to_owned()), &None) generate_font_descriptor(&FontWeight::Fixed("600".into()), &None)
]))?, ]))?,
&generate_font_descriptor(&FontWeight::Fixed("300".to_owned()), &None) &generate_font_descriptor(&FontWeight::Fixed("300".into()), &None)
); );
assert_eq!( assert_eq!(
pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ pick_font_for_fallback_generation(&FontDescriptors::Many(vec![
generate_font_descriptor(&FontWeight::Fixed("200".to_owned()), &None), generate_font_descriptor(&FontWeight::Fixed("200".into()), &None),
generate_font_descriptor(&FontWeight::Fixed("500".to_owned()), &None) generate_font_descriptor(&FontWeight::Fixed("500".into()), &None)
]))?, ]))?,
&generate_font_descriptor(&FontWeight::Fixed("500".to_owned()), &None) &generate_font_descriptor(&FontWeight::Fixed("500".into()), &None)
); );
assert_eq!( assert_eq!(
pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ pick_font_for_fallback_generation(&FontDescriptors::Many(vec![
generate_font_descriptor(&FontWeight::Fixed("normal".to_owned()), &None), generate_font_descriptor(&FontWeight::Fixed("normal".into()), &None),
generate_font_descriptor(&FontWeight::Fixed("700".to_owned()), &None) generate_font_descriptor(&FontWeight::Fixed("700".into()), &None)
]))?, ]))?,
&generate_font_descriptor(&FontWeight::Fixed("normal".to_owned()), &None) &generate_font_descriptor(&FontWeight::Fixed("normal".into()), &None)
); );
assert_eq!( assert_eq!(
pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ pick_font_for_fallback_generation(&FontDescriptors::Many(vec![
generate_font_descriptor(&FontWeight::Fixed("bold".to_owned()), &None), generate_font_descriptor(&FontWeight::Fixed("bold".into()), &None),
generate_font_descriptor(&FontWeight::Fixed("900".to_owned()), &None) generate_font_descriptor(&FontWeight::Fixed("900".into()), &None)
]))?, ]))?,
&generate_font_descriptor(&FontWeight::Fixed("bold".to_owned()), &None) &generate_font_descriptor(&FontWeight::Fixed("bold".into()), &None)
); );
Ok(()) Ok(())
@ -307,10 +308,10 @@ mod tests {
fn test_picks_thinner_weight_if_same_distance_to_400() -> Result<()> { fn test_picks_thinner_weight_if_same_distance_to_400() -> Result<()> {
assert_eq!( assert_eq!(
pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ pick_font_for_fallback_generation(&FontDescriptors::Many(vec![
generate_font_descriptor(&FontWeight::Fixed("300".to_owned()), &None), generate_font_descriptor(&FontWeight::Fixed("300".into()), &None),
generate_font_descriptor(&FontWeight::Fixed("500".to_owned()), &None) generate_font_descriptor(&FontWeight::Fixed("500".into()), &None)
]))?, ]))?,
&generate_font_descriptor(&FontWeight::Fixed("300".to_owned()), &None) &generate_font_descriptor(&FontWeight::Fixed("300".into()), &None)
); );
Ok(()) Ok(())
@ -320,53 +321,26 @@ mod tests {
fn test_picks_variable_closest_to_400() -> Result<()> { fn test_picks_variable_closest_to_400() -> Result<()> {
assert_eq!( assert_eq!(
pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ pick_font_for_fallback_generation(&FontDescriptors::Many(vec![
generate_font_descriptor( generate_font_descriptor(&FontWeight::Variable("100".into(), "300".into()), &None),
&FontWeight::Variable("100".to_owned(), "300".to_owned()), generate_font_descriptor(&FontWeight::Variable("600".into(), "900".into()), &None)
&None
),
generate_font_descriptor(
&FontWeight::Variable("600".to_owned(), "900".to_owned()),
&None
)
]))?, ]))?,
&generate_font_descriptor( &generate_font_descriptor(&FontWeight::Variable("100".into(), "300".into()), &None)
&FontWeight::Variable("100".to_owned(), "300".to_owned()),
&None
)
); );
assert_eq!( assert_eq!(
pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ pick_font_for_fallback_generation(&FontDescriptors::Many(vec![
generate_font_descriptor( generate_font_descriptor(&FontWeight::Variable("100".into(), "200".into()), &None),
&FontWeight::Variable("100".to_owned(), "200".to_owned()), generate_font_descriptor(&FontWeight::Variable("500".into(), "800".into()), &None)
&None
),
generate_font_descriptor(
&FontWeight::Variable("500".to_owned(), "800".to_owned()),
&None
)
]))?, ]))?,
&generate_font_descriptor( &generate_font_descriptor(&FontWeight::Variable("500".into(), "800".into()), &None)
&FontWeight::Variable("500".to_owned(), "800".to_owned()),
&None
)
); );
assert_eq!( assert_eq!(
pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ pick_font_for_fallback_generation(&FontDescriptors::Many(vec![
generate_font_descriptor( generate_font_descriptor(&FontWeight::Variable("100".into(), "900".into()), &None),
&FontWeight::Variable("100".to_owned(), "900".to_owned()), generate_font_descriptor(&FontWeight::Variable("300".into(), "399".into()), &None)
&None
),
generate_font_descriptor(
&FontWeight::Variable("300".to_owned(), "399".to_owned()),
&None
)
]))?, ]))?,
&generate_font_descriptor( &generate_font_descriptor(&FontWeight::Variable("100".into(), "900".into()), &None)
&FontWeight::Variable("100".to_owned(), "900".to_owned()),
&None
)
); );
Ok(()) Ok(())
@ -376,19 +350,10 @@ mod tests {
fn test_prefer_normal_over_italic() -> Result<()> { fn test_prefer_normal_over_italic() -> Result<()> {
assert_eq!( assert_eq!(
pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ pick_font_for_fallback_generation(&FontDescriptors::Many(vec![
generate_font_descriptor( generate_font_descriptor(&FontWeight::Fixed("400".into()), &Some("normal".into())),
&FontWeight::Fixed("400".to_owned()), generate_font_descriptor(&FontWeight::Fixed("400".into()), &Some("italic".into()))
&Some("normal".to_owned())
),
generate_font_descriptor(
&FontWeight::Fixed("400".to_owned()),
&Some("italic".to_owned())
)
]))?, ]))?,
&generate_font_descriptor( &generate_font_descriptor(&FontWeight::Fixed("400".into()), &Some("normal".into()))
&FontWeight::Fixed("400".to_owned()),
&Some("normal".to_owned())
)
); );
Ok(()) Ok(())
@ -397,22 +362,10 @@ mod tests {
#[test] #[test]
fn test_errors_on_invalid_weight() -> Result<()> { fn test_errors_on_invalid_weight() -> Result<()> {
match pick_font_for_fallback_generation(&FontDescriptors::Many(vec![ match pick_font_for_fallback_generation(&FontDescriptors::Many(vec![
generate_font_descriptor( generate_font_descriptor(&FontWeight::Variable("normal".into(), "bold".into()), &None),
&FontWeight::Variable("normal".to_owned(), "bold".to_owned()), generate_font_descriptor(&FontWeight::Variable("400".into(), "bold".into()), &None),
&None, generate_font_descriptor(&FontWeight::Variable("normal".into(), "700".into()), &None),
), generate_font_descriptor(&FontWeight::Variable("100".into(), "abc".into()), &None),
generate_font_descriptor(
&FontWeight::Variable("400".to_owned(), "bold".to_owned()),
&None,
),
generate_font_descriptor(
&FontWeight::Variable("normal".to_owned(), "700".to_owned()),
&None,
),
generate_font_descriptor(
&FontWeight::Variable("100".to_owned(), "abc".to_owned()),
&None,
),
])) { ])) {
Ok(_) => panic!(), Ok(_) => panic!(),
Err(err) => { Err(err) => {

View file

@ -1,7 +1,7 @@
use anyhow::{bail, Context, Result}; use anyhow::{bail, Context, Result};
use indoc::formatdoc; use indoc::formatdoc;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbo_tasks_fs::glob::Glob; use turbo_tasks_fs::glob::Glob;
use turbopack_binding::{ use turbopack_binding::{
turbo::{ turbo::{
@ -48,7 +48,7 @@ pub mod util;
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize)]
struct NextFontLocalFontFileOptions { struct NextFontLocalFontFileOptions {
pub path: String, pub path: RcStr,
pub preload: bool, pub preload: bool,
pub has_size_adjust: bool, pub has_size_adjust: bool,
} }
@ -71,7 +71,7 @@ impl BeforeResolvePlugin for NextFontLocalResolvePlugin {
#[turbo_tasks::function] #[turbo_tasks::function]
async fn before_resolve_condition(&self) -> Vc<BeforeResolvePluginCondition> { async fn before_resolve_condition(&self) -> Vc<BeforeResolvePluginCondition> {
BeforeResolvePluginCondition::new(Glob::new( BeforeResolvePluginCondition::new(Glob::new(
"{next,@vercel/turbopack-next/internal}/font/local/*".to_string(), "{next,@vercel/turbopack-next/internal}/font/local/*".into(),
)) ))
} }
@ -119,18 +119,21 @@ impl BeforeResolvePlugin for NextFontLocalResolvePlugin {
{ {
FontResolvingIssue { FontResolvingIssue {
origin_path: lookup_path, origin_path: lookup_path,
font_path: Vc::cell(font_path.to_string()), font_path: Vc::cell(font_path.clone()),
} }
.cell() .cell()
.emit(); .emit();
return Ok(ResolveResultOption::some( return Ok(ResolveResultOption::some(
ResolveResult::primary_with_key( ResolveResult::primary_with_key(
RequestKey::new(font_path.to_string()), RequestKey::new(font_path.clone()),
ResolveResultItem::Error(Vc::cell(format!( ResolveResultItem::Error(Vc::cell(
"Font file not found: Can't resolve {}'", format!(
font_path "Font file not found: Can't resolve {}'",
))), font_path
)
.into(),
)),
) )
.into(), .into(),
)); ));
@ -173,10 +176,13 @@ impl BeforeResolvePlugin for NextFontLocalResolvePlugin {
.unwrap_or_else(|| "".to_owned()), .unwrap_or_else(|| "".to_owned()),
); );
let js_asset = VirtualSource::new( let js_asset = VirtualSource::new(
lookup_path.join(format!( lookup_path.join(
"{}.js", format!(
get_request_id(options_vc.font_family(), request_hash).await? "{}.js",
)), get_request_id(options_vc.font_family(), request_hash).await?
)
.into(),
),
AssetContent::file(FileContent::Content(file_content.into()).into()), AssetContent::file(FileContent::Content(file_content.into()).into()),
); );
@ -188,10 +194,13 @@ impl BeforeResolvePlugin for NextFontLocalResolvePlugin {
let query = query_vc.await?.to_string(); let query = query_vc.await?.to_string();
let request_hash = get_request_hash(&query).await?; let request_hash = get_request_hash(&query).await?;
let options = font_options_from_query_map(*query_vc); let options = font_options_from_query_map(*query_vc);
let css_virtual_path = lookup_path.join(format!( let css_virtual_path = lookup_path.join(
"/{}.module.css", format!(
get_request_id(options.font_family(), request_hash).await? "/{}.module.css",
)); get_request_id(options.font_family(), request_hash).await?
)
.into(),
);
let fallback = get_font_fallbacks(lookup_path, options); let fallback = get_font_fallbacks(lookup_path, options);
let stylesheet = build_stylesheet( let stylesheet = build_stylesheet(
@ -229,7 +238,7 @@ impl BeforeResolvePlugin for NextFontLocalResolvePlugin {
name.push_str(".p") name.push_str(".p")
} }
let font_virtual_path = lookup_path.join(format!("/{}.{}", name, ext)); let font_virtual_path = lookup_path.join(format!("/{}.{}", name, ext).into());
let font_file = lookup_path.join(path.clone()).read(); let font_file = lookup_path.join(path.clone()).read();
@ -264,7 +273,7 @@ async fn get_font_css_properties(
// Don't include values for variable fonts. These are included in font-face // Don't include values for variable fonts. These are included in font-face
// definitions only. // definitions only.
.filter(|w| !matches!(w, FontWeight::Variable(_, _))) .filter(|w| !matches!(w, FontWeight::Variable(_, _)))
.map(|w| w.to_string()), .map(|w| w.to_string().into()),
}), }),
style: Vc::cell(match &options.fonts { style: Vc::cell(match &options.fonts {
FontDescriptors::Many(_) => None, FontDescriptors::Many(_) => None,
@ -277,7 +286,7 @@ async fn get_font_css_properties(
} }
#[turbo_tasks::function] #[turbo_tasks::function]
async fn font_options_from_query_map(query: Vc<String>) -> Result<Vc<NextFontLocalOptions>> { async fn font_options_from_query_map(query: Vc<RcStr>) -> Result<Vc<NextFontLocalOptions>> {
let query_map = qstring::QString::from(&**query.await?); let query_map = qstring::QString::from(&**query.await?);
if query_map.len() != 1 { if query_map.len() != 1 {
@ -293,7 +302,7 @@ async fn font_options_from_query_map(query: Vc<String>) -> Result<Vc<NextFontLoc
} }
async fn font_file_options_from_query_map( async fn font_file_options_from_query_map(
query: Vc<String>, query: Vc<RcStr>,
) -> Result<NextFontLocalFontFileOptions> { ) -> Result<NextFontLocalFontFileOptions> {
let query_map = qstring::QString::from(&**query.await?); let query_map = qstring::QString::from(&**query.await?);
@ -310,7 +319,7 @@ async fn font_file_options_from_query_map(
#[turbo_tasks::value(shared)] #[turbo_tasks::value(shared)]
struct FontResolvingIssue { struct FontResolvingIssue {
font_path: Vc<String>, font_path: Vc<RcStr>,
origin_path: Vc<FileSystemPath>, origin_path: Vc<FileSystemPath>,
} }
@ -335,9 +344,9 @@ impl Issue for FontResolvingIssue {
async fn title(self: Vc<Self>) -> Result<Vc<StyledString>> { async fn title(self: Vc<Self>) -> Result<Vc<StyledString>> {
let this = self.await?; let this = self.await?;
Ok(StyledString::Line(vec![ Ok(StyledString::Line(vec![
StyledString::Text("Font file not found: Can't resolve '".to_string()), StyledString::Text("Font file not found: Can't resolve '".into()),
StyledString::Code(this.font_path.await?.to_string()), StyledString::Code(this.font_path.await?.clone_value()),
StyledString::Text("'".to_string()), StyledString::Text("'".into()),
]) ])
.cell()) .cell())
} }

View file

@ -2,7 +2,7 @@ use std::{fmt::Display, str::FromStr};
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbopack_binding::turbo::tasks::{trace::TraceRawVcs, Value}; use turbopack_binding::turbo::tasks::{trace::TraceRawVcs, Value};
use super::request::{ use super::request::{
@ -17,21 +17,21 @@ use super::request::{
pub(super) struct NextFontLocalOptions { pub(super) struct NextFontLocalOptions {
pub fonts: FontDescriptors, pub fonts: FontDescriptors,
pub default_weight: Option<FontWeight>, pub default_weight: Option<FontWeight>,
pub default_style: Option<String>, pub default_style: Option<RcStr>,
/// The desired css `font-display` property /// The desired css `font-display` property
pub display: String, pub display: RcStr,
pub preload: bool, pub preload: bool,
/// A list of manually-provided fallback fonts to be included in the /// A list of manually-provided fallback fonts to be included in the
/// font-family string as-is. /// font-family string as-is.
pub fallback: Option<Vec<String>>, pub fallback: Option<Vec<RcStr>>,
/// The user's desired fallback font /// The user's desired fallback font
pub adjust_font_fallback: AdjustFontFallback, pub adjust_font_fallback: AdjustFontFallback,
/// An optional name for a css custom property (css variable) that applies /// An optional name for a css custom property (css variable) that applies
/// the font family when used. /// the font family when used.
pub variable: Option<String>, pub variable: Option<RcStr>,
/// The name of the variable assigned to the results of calling the /// The name of the variable assigned to the results of calling the
/// `localFont` function. This is used as the font family's base name. /// `localFont` function. This is used as the font family's base name.
pub variable_name: String, pub variable_name: RcStr,
} }
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
@ -42,8 +42,8 @@ impl NextFontLocalOptions {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn font_family(self: Vc<Self>) -> Result<Vc<String>> { pub async fn font_family(self: Vc<Self>) -> Result<Vc<RcStr>> {
Ok(Vc::cell((*self.await?.variable_name).to_owned())) Ok(Vc::cell(self.await?.variable_name.clone()))
} }
} }
@ -54,9 +54,9 @@ impl NextFontLocalOptions {
)] )]
pub(super) struct FontDescriptor { pub(super) struct FontDescriptor {
pub weight: Option<FontWeight>, pub weight: Option<FontWeight>,
pub style: Option<String>, pub style: Option<RcStr>,
pub path: String, pub path: RcStr,
pub ext: String, pub ext: RcStr,
} }
impl FontDescriptor { impl FontDescriptor {
@ -66,10 +66,10 @@ impl FontDescriptor {
.rsplit('.') .rsplit('.')
.next() .next()
.context("Extension required")? .context("Extension required")?
.to_owned(); .into();
Ok(Self { Ok(Self {
path: src_descriptor.path.to_owned(), path: src_descriptor.path.clone(),
weight: src_descriptor weight: src_descriptor
.weight .weight
.as_ref() .as_ref()
@ -96,8 +96,8 @@ pub(super) enum FontDescriptors {
Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, Hash, TraceRawVcs, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, Hash, TraceRawVcs,
)] )]
pub(super) enum FontWeight { pub(super) enum FontWeight {
Variable(String, String), Variable(RcStr, RcStr),
Fixed(String), Fixed(RcStr),
} }
pub struct ParseFontWeightErr; pub struct ParseFontWeightErr;
@ -106,9 +106,9 @@ impl FromStr for FontWeight {
fn from_str(weight_str: &str) -> std::result::Result<Self, Self::Err> { fn from_str(weight_str: &str) -> std::result::Result<Self, Self::Err> {
if let Some((start, end)) = weight_str.split_once(' ') { if let Some((start, end)) = weight_str.split_once(' ') {
Ok(FontWeight::Variable(start.to_owned(), end.to_owned())) Ok(FontWeight::Variable(start.into(), end.into()))
} else { } else {
Ok(FontWeight::Fixed(weight_str.to_owned())) Ok(FontWeight::Fixed(weight_str.into()))
} }
} }
} }
@ -120,7 +120,7 @@ impl Display for FontWeight {
"{}", "{}",
match self { match self {
Self::Variable(start, end) => format!("{} {}", start, end), Self::Variable(start, end) => format!("{} {}", start, end),
Self::Fixed(val) => val.to_owned(), Self::Fixed(val) => val.to_string(),
} }
) )
} }
@ -151,16 +151,16 @@ pub(super) fn options_from_request(request: &NextFontLocalRequest) -> Result<Nex
), ),
SrcRequest::One(path) => { SrcRequest::One(path) => {
FontDescriptors::One(FontDescriptor::from_src_request(&SrcDescriptor { FontDescriptors::One(FontDescriptor::from_src_request(&SrcDescriptor {
path: path.to_owned(), path: path.as_str().into(),
weight: weight.to_owned(), weight: weight.as_deref().map(RcStr::from),
style: style.to_owned(), style: style.as_deref().map(RcStr::from),
})?) })?)
} }
}; };
Ok(NextFontLocalOptions { Ok(NextFontLocalOptions {
fonts, fonts,
display: display.to_owned(), display: display.as_str().into(),
preload: preload.to_owned(), preload: preload.to_owned(),
fallback: fallback.to_owned(), fallback: fallback.to_owned(),
adjust_font_fallback: adjust_font_fallback.to_owned(), adjust_font_fallback: adjust_font_fallback.to_owned(),
@ -201,19 +201,19 @@ mod tests {
options_from_request(&request)?, options_from_request(&request)?,
NextFontLocalOptions { NextFontLocalOptions {
fonts: FontDescriptors::One(FontDescriptor { fonts: FontDescriptors::One(FontDescriptor {
path: "./Roboto-Regular.ttf".to_owned(), path: "./Roboto-Regular.ttf".into(),
weight: None, weight: None,
style: None, style: None,
ext: "ttf".to_owned(), ext: "ttf".into(),
}), }),
default_style: None, default_style: None,
default_weight: None, default_weight: None,
display: "swap".to_owned(), display: "swap".into(),
preload: true, preload: true,
fallback: None, fallback: None,
adjust_font_fallback: AdjustFontFallback::Arial, adjust_font_fallback: AdjustFontFallback::Arial,
variable: None, variable: None,
variable_name: "myFont".to_owned() variable_name: "myFont".into()
}, },
); );
@ -249,26 +249,26 @@ mod tests {
NextFontLocalOptions { NextFontLocalOptions {
fonts: FontDescriptors::Many(vec![ fonts: FontDescriptors::Many(vec![
FontDescriptor { FontDescriptor {
path: "./Roboto-Regular.ttf".to_owned(), path: "./Roboto-Regular.ttf".into(),
weight: Some(FontWeight::Fixed("400".to_owned())), weight: Some(FontWeight::Fixed("400".into())),
style: Some("normal".to_owned()), style: Some("normal".into()),
ext: "ttf".to_owned(), ext: "ttf".into(),
}, },
FontDescriptor { FontDescriptor {
path: "./Roboto-Italic.ttf".to_owned(), path: "./Roboto-Italic.ttf".into(),
weight: Some(FontWeight::Fixed("400".to_owned())), weight: Some(FontWeight::Fixed("400".into())),
style: None, style: None,
ext: "ttf".to_owned(), ext: "ttf".into(),
} }
]), ]),
default_weight: Some(FontWeight::Fixed("300".to_owned())), default_weight: Some(FontWeight::Fixed("300".into())),
default_style: Some("italic".to_owned()), default_style: Some("italic".into()),
display: "swap".to_owned(), display: "swap".into(),
preload: true, preload: true,
fallback: None, fallback: None,
adjust_font_fallback: AdjustFontFallback::Arial, adjust_font_fallback: AdjustFontFallback::Arial,
variable: None, variable: None,
variable_name: "myFont".to_owned() variable_name: "myFont".into()
}, },
); );
@ -329,19 +329,19 @@ mod tests {
options_from_request(&request)?, options_from_request(&request)?,
NextFontLocalOptions { NextFontLocalOptions {
fonts: FontDescriptors::One(FontDescriptor { fonts: FontDescriptors::One(FontDescriptor {
path: "./Roboto-Regular.woff".to_owned(), path: "./Roboto-Regular.woff".into(),
weight: Some(FontWeight::Fixed("500".to_owned())), weight: Some(FontWeight::Fixed("500".into())),
style: Some("italic".to_owned()), style: Some("italic".into()),
ext: "woff".to_owned(), ext: "woff".into(),
}), }),
default_style: Some("italic".to_owned()), default_style: Some("italic".into()),
default_weight: Some(FontWeight::Fixed("500".to_owned())), default_weight: Some(FontWeight::Fixed("500".into())),
display: "optional".to_owned(), display: "optional".into(),
preload: false, preload: false,
fallback: Some(vec!["Fallback".to_owned()]), fallback: Some(vec!["Fallback".into()]),
adjust_font_fallback: AdjustFontFallback::TimesNewRoman, adjust_font_fallback: AdjustFontFallback::TimesNewRoman,
variable: Some("myvar".to_owned()), variable: Some("myvar".into()),
variable_name: "myFont".to_owned() variable_name: "myFont".into()
}, },
); );

View file

@ -1,4 +1,5 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::RcStr;
use turbopack_binding::turbo::tasks::trace::TraceRawVcs; use turbopack_binding::turbo::tasks::trace::TraceRawVcs;
/// The top-most structure encoded into the query param in requests to /// The top-most structure encoded into the query param in requests to
@ -8,40 +9,40 @@ use turbopack_binding::turbo::tasks::trace::TraceRawVcs;
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub(super) struct NextFontLocalRequest { pub(super) struct NextFontLocalRequest {
pub arguments: (NextFontLocalRequestArguments,), pub arguments: (NextFontLocalRequestArguments,),
pub variable_name: String, pub variable_name: RcStr,
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub(super) struct NextFontLocalRequestArguments { pub(super) struct NextFontLocalRequestArguments {
pub src: SrcRequest, pub src: SrcRequest,
pub weight: Option<String>, pub weight: Option<RcStr>,
pub style: Option<String>, pub style: Option<RcStr>,
#[serde(default = "default_display")] #[serde(default = "default_display")]
pub display: String, pub display: RcStr,
#[serde(default = "default_preload")] #[serde(default = "default_preload")]
pub preload: bool, pub preload: bool,
pub fallback: Option<Vec<String>>, pub fallback: Option<Vec<RcStr>>,
#[serde( #[serde(
default = "default_adjust_font_fallback", default = "default_adjust_font_fallback",
deserialize_with = "deserialize_adjust_font_fallback" deserialize_with = "deserialize_adjust_font_fallback"
)] )]
pub adjust_font_fallback: AdjustFontFallback, pub adjust_font_fallback: AdjustFontFallback,
pub variable: Option<String>, pub variable: Option<RcStr>,
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
#[serde(untagged)] #[serde(untagged)]
pub(super) enum SrcRequest { pub(super) enum SrcRequest {
One(String), One(RcStr),
Many(Vec<SrcDescriptor>), Many(Vec<SrcDescriptor>),
} }
#[derive(Clone, Debug, Deserialize)] #[derive(Clone, Debug, Deserialize)]
pub(super) struct SrcDescriptor { pub(super) struct SrcDescriptor {
pub path: String, pub path: RcStr,
pub weight: Option<String>, pub weight: Option<RcStr>,
pub style: Option<String>, pub style: Option<RcStr>,
} }
/// The user's desired fallback font /// The user's desired fallback font
@ -99,8 +100,8 @@ fn default_preload() -> bool {
true true
} }
fn default_display() -> String { fn default_display() -> RcStr {
"swap".to_owned() "swap".into()
} }
#[cfg(test)] #[cfg(test)]

View file

@ -1,6 +1,6 @@
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use indoc::formatdoc; use indoc::formatdoc;
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use super::options::{FontDescriptors, NextFontLocalOptions}; use super::options::{FontDescriptors, NextFontLocalOptions};
use crate::next_font::{ use crate::next_font::{
@ -15,30 +15,33 @@ pub(super) async fn build_stylesheet(
options: Vc<NextFontLocalOptions>, options: Vc<NextFontLocalOptions>,
fallbacks: Vc<FontFallbacks>, fallbacks: Vc<FontFallbacks>,
css_properties: Vc<FontCssProperties>, css_properties: Vc<FontCssProperties>,
) -> Result<Vc<String>> { ) -> Result<Vc<RcStr>> {
let scoped_font_family = let scoped_font_family =
get_scoped_font_family(FontFamilyType::WebFont.cell(), options.font_family()); get_scoped_font_family(FontFamilyType::WebFont.cell(), options.font_family());
Ok(Vc::cell(formatdoc!( Ok(Vc::cell(
r#" formatdoc!(
r#"
{} {}
{} {}
{} {}
"#, "#,
*build_font_face_definitions(scoped_font_family, options, fallbacks.has_size_adjust()) *build_font_face_definitions(scoped_font_family, options, fallbacks.has_size_adjust())
.await?, .await?,
(*build_fallback_definition(fallbacks).await?), (*build_fallback_definition(fallbacks).await?),
*build_font_class_rules(css_properties).await? *build_font_class_rules(css_properties).await?
))) )
.into(),
))
} }
/// Builds a string of `@font-face` definitions for each local font file /// Builds a string of `@font-face` definitions for each local font file
#[turbo_tasks::function] #[turbo_tasks::function]
pub(super) async fn build_font_face_definitions( pub(super) async fn build_font_face_definitions(
scoped_font_family: Vc<String>, scoped_font_family: Vc<RcStr>,
options: Vc<NextFontLocalOptions>, options: Vc<NextFontLocalOptions>,
has_size_adjust: Vc<bool>, has_size_adjust: Vc<bool>,
) -> Result<Vc<String>> { ) -> Result<Vc<RcStr>> {
let options = &*options.await?; let options = &*options.await?;
let mut definitions = String::new(); let mut definitions = String::new();
@ -83,7 +86,7 @@ pub(super) async fn build_font_face_definitions(
)); ));
} }
Ok(Vc::cell(definitions)) Ok(Vc::cell(definitions.into()))
} }
/// Used as e.g. `format('woff')` in `src` properties in `@font-face` /// Used as e.g. `format('woff')` in `src` properties in `@font-face`

View file

@ -1,5 +1,5 @@
use anyhow::Result; use anyhow::Result;
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use super::options::NextFontLocalOptions; use super::options::NextFontLocalOptions;
use crate::next_font::{ use crate::next_font::{
@ -12,16 +12,17 @@ use crate::next_font::{
pub(super) async fn build_font_family_string( pub(super) async fn build_font_family_string(
options: Vc<NextFontLocalOptions>, options: Vc<NextFontLocalOptions>,
font_fallbacks: Vc<FontFallbacks>, font_fallbacks: Vc<FontFallbacks>,
) -> Result<Vc<String>> { ) -> Result<Vc<RcStr>> {
let mut font_families = vec![format!( let mut font_families = vec![format!(
"'{}'", "'{}'",
*get_scoped_font_family(FontFamilyType::WebFont.cell(), options.font_family(),).await? *get_scoped_font_family(FontFamilyType::WebFont.cell(), options.font_family(),).await?
)]; )
.into()];
for font_fallback in &*font_fallbacks.await? { for font_fallback in &*font_fallbacks.await? {
match &*font_fallback.await? { match &*font_fallback.await? {
FontFallback::Automatic(fallback) => { FontFallback::Automatic(fallback) => {
font_families.push(format!("'{}'", *fallback.scoped_font_family.await?)); font_families.push(format!("'{}'", *fallback.scoped_font_family.await?).into());
} }
FontFallback::Manual(fallbacks) => { FontFallback::Manual(fallbacks) => {
font_families.extend_from_slice(fallbacks); font_families.extend_from_slice(fallbacks);
@ -30,5 +31,5 @@ pub(super) async fn build_font_family_string(
} }
} }
Ok(Vc::cell(font_families.join(", "))) Ok(Vc::cell(font_families.join(", ").into()))
} }

View file

@ -1,6 +1,6 @@
use anyhow::Result; use anyhow::Result;
use indoc::formatdoc; use indoc::formatdoc;
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use super::{ use super::{
font_fallback::{FontFallback, FontFallbacks}, font_fallback::{FontFallback, FontFallbacks},
@ -9,7 +9,7 @@ use super::{
/// Builds `@font-face` stylesheet definition for a given FontFallback /// Builds `@font-face` stylesheet definition for a given FontFallback
#[turbo_tasks::function] #[turbo_tasks::function]
pub(crate) async fn build_fallback_definition(fallbacks: Vc<FontFallbacks>) -> Result<Vc<String>> { pub(crate) async fn build_fallback_definition(fallbacks: Vc<FontFallbacks>) -> Result<Vc<RcStr>> {
let mut res = "".to_owned(); let mut res = "".to_owned();
for fallback_vc in &*fallbacks.await? { for fallback_vc in &*fallbacks.await? {
if let FontFallback::Automatic(fallback) = &*fallback_vc.await? { if let FontFallback::Automatic(fallback) = &*fallback_vc.await? {
@ -44,13 +44,13 @@ pub(crate) async fn build_fallback_definition(fallbacks: Vc<FontFallbacks>) -> R
} }
} }
Ok(Vc::cell(res)) Ok(Vc::cell(res.into()))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
pub(super) async fn build_font_class_rules( pub(super) async fn build_font_class_rules(
css_properties: Vc<FontCssProperties>, css_properties: Vc<FontCssProperties>,
) -> Result<Vc<String>> { ) -> Result<Vc<RcStr>> {
let css_properties = &*css_properties.await?; let css_properties = &*css_properties.await?;
let font_family_string = &*css_properties.font_family.await?; let font_family_string = &*css_properties.font_family.await?;
@ -88,7 +88,7 @@ pub(super) async fn build_font_class_rules(
)) ))
} }
Ok(Vc::cell(rules)) Ok(Vc::cell(rules.into()))
} }
fn format_fixed_percentage(value: f64) -> String { fn format_fixed_percentage(value: f64) -> String {

View file

@ -1,6 +1,6 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use serde::Deserialize; use serde::Deserialize;
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbo_tasks_fs::{json::parse_json_with_source_context, FileSystemPath}; use turbo_tasks_fs::{json::parse_json_with_source_context, FileSystemPath};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_hash::hash_xxh3_hash64, turbo::tasks_hash::hash_xxh3_hash64,
@ -14,10 +14,10 @@ use super::issue::NextFontIssue;
/// module. /// module.
#[turbo_tasks::value(shared)] #[turbo_tasks::value(shared)]
pub(crate) struct FontCssProperties { pub(crate) struct FontCssProperties {
pub font_family: Vc<String>, pub font_family: Vc<RcStr>,
pub weight: Vc<Option<String>>, pub weight: Vc<Option<RcStr>>,
pub style: Vc<Option<String>>, pub style: Vc<Option<RcStr>>,
pub variable: Vc<Option<String>>, pub variable: Vc<Option<RcStr>>,
} }
/// A hash of the requested querymap derived from how the user invoked /// A hash of the requested querymap derived from how the user invoked
@ -52,35 +52,38 @@ pub(crate) enum FontFamilyType {
#[turbo_tasks::function] #[turbo_tasks::function]
pub(crate) async fn get_scoped_font_family( pub(crate) async fn get_scoped_font_family(
ty: Vc<FontFamilyType>, ty: Vc<FontFamilyType>,
font_family_name: Vc<String>, font_family_name: Vc<RcStr>,
) -> Result<Vc<String>> { ) -> Result<Vc<RcStr>> {
let font_family_base = font_family_name.await?.to_string(); let font_family_base = font_family_name.await?.to_string();
let font_family_name = match &*ty.await? { let font_family_name = match &*ty.await? {
FontFamilyType::WebFont => font_family_base, FontFamilyType::WebFont => font_family_base,
FontFamilyType::Fallback => format!("{} Fallback", font_family_base), FontFamilyType::Fallback => format!("{} Fallback", font_family_base),
}; };
Ok(Vc::cell(font_family_name)) Ok(Vc::cell(font_family_name.into()))
} }
/// Returns a [Vc] for [String] uniquely identifying the request for the font. /// Returns a [Vc] for [String] uniquely identifying the request for the font.
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn get_request_id(font_family: Vc<String>, request_hash: u32) -> Result<Vc<String>> { pub async fn get_request_id(font_family: Vc<RcStr>, request_hash: u32) -> Result<Vc<RcStr>> {
Ok(Vc::cell(format!( Ok(Vc::cell(
"{}_{:x?}", format!(
font_family.await?.to_lowercase().replace(' ', "_"), "{}_{:x?}",
request_hash font_family.await?.to_lowercase().replace(' ', "_"),
))) request_hash
)
.into(),
))
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
struct HasPath { struct HasPath {
path: String, path: RcStr,
} }
pub(crate) async fn can_use_next_font( pub(crate) async fn can_use_next_font(
project_path: Vc<FileSystemPath>, project_path: Vc<FileSystemPath>,
query: Vc<String>, query: Vc<RcStr>,
) -> Result<bool> { ) -> Result<bool> {
let query_map = qstring::QString::from(&**query.await?); let query_map = qstring::QString::from(&**query.await?);
let request: HasPath = parse_json_with_source_context( let request: HasPath = parse_json_with_source_context(
@ -98,12 +101,12 @@ pub(crate) async fn can_use_next_font(
NextFontIssue { NextFontIssue {
path, path,
title: StyledString::Line(vec![ title: StyledString::Line(vec![
StyledString::Code("next/font:".to_string()), StyledString::Code("next/font:".into()),
StyledString::Text(" error:".to_string()), StyledString::Text(" error:".into()),
]) ])
.cell(), .cell(),
description: StyledString::Line(vec![ description: StyledString::Line(vec![
StyledString::Text("Cannot be used within ".to_string()), StyledString::Text("Cannot be used within ".into()),
StyledString::Code(request.path), StyledString::Code(request.path),
]) ])
.cell(), .cell(),

View file

@ -57,7 +57,7 @@ impl StructuredImageModuleType {
.cell(), .cell(),
), ),
Value::new(ReferenceType::Internal(Vc::cell(indexmap!( Value::new(ReferenceType::Internal(Vc::cell(indexmap!(
"IMAGE".to_string() => Vc::upcast(static_asset) "IMAGE".into() => Vc::upcast(static_asset)
)))), )))),
) )
.module(); .module();

View file

@ -1,7 +1,7 @@
use std::io::Write; use std::io::Write;
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::{rope::RopeBuilder, FileContent}, turbo::tasks_fs::{rope::RopeBuilder, FileContent},
turbopack::{ turbopack::{
@ -17,8 +17,8 @@ use turbopack_binding::{
use super::module::BlurPlaceholderMode; use super::module::BlurPlaceholderMode;
fn modifier() -> Vc<String> { fn modifier() -> Vc<RcStr> {
Vc::cell("structured image object".to_string()) Vc::cell("structured image object".into())
} }
#[turbo_tasks::function] #[turbo_tasks::function]
@ -45,7 +45,7 @@ impl Source for StructuredImageFileSource {
self.image self.image
.ident() .ident()
.with_modifier(modifier()) .with_modifier(modifier())
.rename_as("*.mjs".to_string()) .rename_as("*.mjs".into())
} }
} }

View file

@ -2,7 +2,7 @@ use std::collections::{BTreeMap, HashMap};
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use indexmap::{indexmap, IndexMap}; use indexmap::{indexmap, IndexMap};
use turbo_tasks::{Value, Vc}; use turbo_tasks::{RcStr, Value, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::{FileSystem, FileSystemPath}, turbo::tasks_fs::{FileSystem, FileSystemPath},
turbopack::{ turbopack::{
@ -249,11 +249,7 @@ pub fn get_next_build_import_map() -> Vc<ImportMap> {
import_map.insert_exact_alias("styled-jsx", external); import_map.insert_exact_alias("styled-jsx", external);
import_map.insert_exact_alias( import_map.insert_exact_alias(
"styled-jsx/style", "styled-jsx/style",
ImportMapping::External( ImportMapping::External(Some("styled-jsx/style.js".into()), ExternalType::CommonJs).cell(),
Some("styled-jsx/style.js".to_string()),
ExternalType::CommonJs,
)
.cell(),
); );
import_map.insert_wildcard_alias("styled-jsx/", external); import_map.insert_wildcard_alias("styled-jsx/", external);
@ -330,11 +326,8 @@ pub async fn get_next_server_import_map(
import_map.insert_exact_alias("styled-jsx", external); import_map.insert_exact_alias("styled-jsx", external);
import_map.insert_exact_alias( import_map.insert_exact_alias(
"styled-jsx/style", "styled-jsx/style",
ImportMapping::External( ImportMapping::External(Some("styled-jsx/style.js".into()), ExternalType::CommonJs)
Some("styled-jsx/style.js".to_string()), .cell(),
ExternalType::CommonJs,
)
.cell(),
); );
import_map.insert_wildcard_alias("styled-jsx/", external); import_map.insert_wildcard_alias("styled-jsx/", external);
// TODO: we should not bundle next/dist/build/utils in the pages renderer at all // TODO: we should not bundle next/dist/build/utils in the pages renderer at all
@ -739,8 +732,8 @@ async fn rsc_aliases(
Ok(()) Ok(())
} }
pub fn mdx_import_source_file() -> String { pub fn mdx_import_source_file() -> RcStr {
format!("{VIRTUAL_PACKAGE_NAME}/mdx-import-source") format!("{VIRTUAL_PACKAGE_NAME}/mdx-import-source").into()
} }
// Insert aliases for Next.js stubs of fetch, object-assign, and url // Insert aliases for Next.js stubs of fetch, object-assign, and url
@ -905,9 +898,7 @@ pub async fn get_next_package(context_directory: Vc<FileSystemPath>) -> Result<V
let result = resolve( let result = resolve(
context_directory, context_directory,
Value::new(ReferenceType::CommonJs(CommonJsReferenceSubType::Undefined)), Value::new(ReferenceType::CommonJs(CommonJsReferenceSubType::Undefined)),
Request::parse(Value::new(Pattern::Constant( Request::parse(Value::new(Pattern::Constant("next/package.json".into()))),
"next/package.json".to_string(),
))),
node_cjs_resolve_options(context_directory.root()), node_cjs_resolve_options(context_directory.root()),
); );
let source = result let source = result
@ -923,7 +914,7 @@ pub async fn insert_alias_option<const N: usize>(
alias_options: Vc<ResolveAliasMap>, alias_options: Vc<ResolveAliasMap>,
conditions: [&'static str; N], conditions: [&'static str; N],
) -> Result<()> { ) -> Result<()> {
let conditions = BTreeMap::from(conditions.map(|c| (c.to_string(), ConditionValue::Set))); let conditions = BTreeMap::from(conditions.map(|c| (c.into(), ConditionValue::Set)));
for (alias, value) in &alias_options.await? { for (alias, value) in &alias_options.await? {
if let Some(mapping) = export_value_to_import_mapping(value, &conditions, project_path) { if let Some(mapping) = export_value_to_import_mapping(value, &conditions, project_path) {
import_map.insert_alias(alias, mapping); import_map.insert_alias(alias, mapping);
@ -934,7 +925,7 @@ pub async fn insert_alias_option<const N: usize>(
fn export_value_to_import_mapping( fn export_value_to_import_mapping(
value: &SubpathValue, value: &SubpathValue,
conditions: &BTreeMap<String, ConditionValue>, conditions: &BTreeMap<RcStr, ConditionValue>,
project_path: Vc<FileSystemPath>, project_path: Vc<FileSystemPath>,
) -> Option<Vc<ImportMapping>> { ) -> Option<Vc<ImportMapping>> {
let mut result = Vec::new(); let mut result = Vec::new();
@ -948,13 +939,13 @@ fn export_value_to_import_mapping(
None None
} else { } else {
Some(if result.len() == 1 { Some(if result.len() == 1 {
ImportMapping::PrimaryAlternative(result[0].0.to_string(), Some(project_path)).cell() ImportMapping::PrimaryAlternative(result[0].0.into(), Some(project_path)).cell()
} else { } else {
ImportMapping::Alternatives( ImportMapping::Alternatives(
result result
.iter() .iter()
.map(|(m, _)| { .map(|(m, _)| {
ImportMapping::PrimaryAlternative(m.to_string(), Some(project_path)).cell() ImportMapping::PrimaryAlternative((*m).into(), Some(project_path)).cell()
}) })
.collect(), .collect(),
) )
@ -990,7 +981,10 @@ fn insert_alias_to_alternatives<'a>(
alias: impl Into<String> + 'a, alias: impl Into<String> + 'a,
alternatives: Vec<Vc<ImportMapping>>, alternatives: Vec<Vc<ImportMapping>>,
) { ) {
import_map.insert_exact_alias(alias, ImportMapping::Alternatives(alternatives).into()); import_map.insert_exact_alias(
alias.into(),
ImportMapping::Alternatives(alternatives).into(),
);
} }
/// Inserts an alias to an import mapping into an import map. /// Inserts an alias to an import mapping into an import map.
@ -1001,7 +995,7 @@ fn insert_package_alias(
) { ) {
import_map.insert_wildcard_alias( import_map.insert_wildcard_alias(
prefix, prefix,
ImportMapping::PrimaryAlternative("./*".to_string(), Some(package_root)).cell(), ImportMapping::PrimaryAlternative("./*".into(), Some(package_root)).cell(),
); );
} }
@ -1017,11 +1011,11 @@ fn insert_turbopack_dev_alias(import_map: &mut ImportMap) {
/// Creates a direct import mapping to the result of resolving a request /// Creates a direct import mapping to the result of resolving a request
/// in a context. /// in a context.
fn request_to_import_mapping(context_path: Vc<FileSystemPath>, request: &str) -> Vc<ImportMapping> { fn request_to_import_mapping(context_path: Vc<FileSystemPath>, request: &str) -> Vc<ImportMapping> {
ImportMapping::PrimaryAlternative(request.to_string(), Some(context_path)).cell() ImportMapping::PrimaryAlternative(request.into(), Some(context_path)).cell()
} }
/// Creates a direct import mapping to the result of resolving an external /// Creates a direct import mapping to the result of resolving an external
/// request. /// request.
fn external_request_to_import_mapping(request: &str) -> Vc<ImportMapping> { fn external_request_to_import_mapping(request: &str) -> Vc<ImportMapping> {
ImportMapping::External(Some(request.to_string()), ExternalType::CommonJs).into() ImportMapping::External(Some(request.into()), ExternalType::CommonJs).into()
} }

View file

@ -1,6 +1,6 @@
use anyhow::Result; use anyhow::Result;
use indoc::formatdoc; use indoc::formatdoc;
use turbo_tasks::{TryJoinIterExt, ValueToString, Vc}; use turbo_tasks::{RcStr, TryJoinIterExt, ValueToString, Vc};
use turbo_tasks_fs::{File, FileSystemPath}; use turbo_tasks_fs::{File, FileSystemPath};
use turbopack_binding::turbopack::{ use turbopack_binding::turbopack::{
core::{ core::{
@ -26,7 +26,7 @@ impl ClientReferenceManifest {
pub async fn build_output( pub async fn build_output(
node_root: Vc<FileSystemPath>, node_root: Vc<FileSystemPath>,
client_relative_path: Vc<FileSystemPath>, client_relative_path: Vc<FileSystemPath>,
entry_name: String, entry_name: RcStr,
client_references: Vc<ClientReferenceGraphResult>, client_references: Vc<ClientReferenceGraphResult>,
client_references_chunks: Vc<ClientReferencesChunks>, client_references_chunks: Vc<ClientReferencesChunks>,
client_chunking_context: Vc<Box<dyn ChunkingContext>>, client_chunking_context: Vc<Box<dyn ChunkingContext>>,
@ -39,7 +39,7 @@ impl ClientReferenceManifest {
.computed_asset_prefix() .computed_asset_prefix()
.await? .await?
.as_ref() .as_ref()
.map(|p| p.to_owned()) .map(|p| p.clone())
.unwrap_or_default(); .unwrap_or_default();
entry_manifest.module_loading.cross_origin = next_config entry_manifest.module_loading.cross_origin = next_config
@ -90,6 +90,7 @@ impl ClientReferenceManifest {
// It's possible that a chunk also emits CSS files, that will // It's possible that a chunk also emits CSS files, that will
// be handled separatedly. // be handled separatedly.
.filter(|path| path.ends_with(".js")) .filter(|path| path.ends_with(".js"))
.map(RcStr::from)
.collect::<Vec<_>>() .collect::<Vec<_>>()
} else { } else {
Vec::new() Vec::new()
@ -97,7 +98,7 @@ impl ClientReferenceManifest {
entry_manifest.client_modules.module_exports.insert( entry_manifest.client_modules.module_exports.insert(
get_client_reference_module_key(&server_path, "*"), get_client_reference_module_key(&server_path, "*"),
ManifestNodeEntry { ManifestNodeEntry {
name: "*".to_string(), name: "*".into(),
id: (&*client_module_id).into(), id: (&*client_module_id).into(),
chunks: client_chunks_paths, chunks: client_chunks_paths,
// TODO(WEB-434) // TODO(WEB-434)
@ -134,15 +135,16 @@ impl ClientReferenceManifest {
.iter() .iter()
.filter_map(|chunk_path| node_root_ref.get_path_to(chunk_path)) .filter_map(|chunk_path| node_root_ref.get_path_to(chunk_path))
.map(ToString::to_string) .map(ToString::to_string)
.map(RcStr::from)
.collect::<Vec<_>>() .collect::<Vec<_>>()
} else { } else {
Vec::new() Vec::new()
}; };
let mut ssr_manifest_node = ManifestNode::default(); let mut ssr_manifest_node = ManifestNode::default();
ssr_manifest_node.module_exports.insert( ssr_manifest_node.module_exports.insert(
"*".to_string(), "*".into(),
ManifestNodeEntry { ManifestNodeEntry {
name: "*".to_string(), name: "*".into(),
id: (&*ssr_module_id).into(), id: (&*ssr_module_id).into(),
chunks: ssr_chunks_paths, chunks: ssr_chunks_paths,
// TODO(WEB-434) // TODO(WEB-434)
@ -180,7 +182,7 @@ impl ClientReferenceManifest {
let server_component_name = server_component let server_component_name = server_component
.server_path() .server_path()
.with_extension("".to_string()) .with_extension("".into())
.to_string() .to_string()
.await?; .await?;
@ -196,7 +198,7 @@ impl ClientReferenceManifest {
for chunk_path in client_chunks_paths { for chunk_path in client_chunks_paths {
if let Some(path) = client_relative_path.get_path_to(&chunk_path) { if let Some(path) = client_relative_path.get_path_to(&chunk_path) {
let path = path.to_string(); let path = path.into();
if chunk_path.extension_ref() == Some("css") { if chunk_path.extension_ref() == Some("css") {
entry_css_files.insert(path); entry_css_files.insert(path);
} else { } else {
@ -215,9 +217,10 @@ impl ClientReferenceManifest {
// path still (same as webpack does) // path still (same as webpack does)
let normalized_manifest_entry = entry_name.replace("%5F", "_"); let normalized_manifest_entry = entry_name.replace("%5F", "_");
Ok(Vc::upcast(VirtualOutputAsset::new( Ok(Vc::upcast(VirtualOutputAsset::new(
node_root.join(format!( node_root.join(
"server/app{normalized_manifest_entry}_client-reference-manifest.js", format!("server/app{normalized_manifest_entry}_client-reference-manifest.js",)
)), .into(),
),
AssetContent::file( AssetContent::file(
File::from(formatdoc! { File::from(formatdoc! {
r#" r#"
@ -243,10 +246,10 @@ impl From<&TurbopackModuleId> for ModuleId {
} }
/// See next.js/packages/next/src/lib/client-reference.ts /// See next.js/packages/next/src/lib/client-reference.ts
pub fn get_client_reference_module_key(server_path: &str, export_name: &str) -> String { pub fn get_client_reference_module_key(server_path: &str, export_name: &str) -> RcStr {
if export_name == "*" { if export_name == "*" {
server_path.to_string() server_path.into()
} else { } else {
format!("{}#{}", server_path, export_name) format!("{}#{}", server_path, export_name).into()
} }
} }

View file

@ -6,26 +6,26 @@ use std::collections::HashMap;
use indexmap::{IndexMap, IndexSet}; use indexmap::{IndexMap, IndexSet};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::{trace::TraceRawVcs, TaskInput}; use turbo_tasks::{trace::TraceRawVcs, RcStr, TaskInput};
use crate::next_config::{CrossOriginConfig, Rewrites, RouteHas}; use crate::next_config::{CrossOriginConfig, Rewrites, RouteHas};
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
pub struct PagesManifest { pub struct PagesManifest {
#[serde(flatten)] #[serde(flatten)]
pub pages: HashMap<String, String>, pub pages: HashMap<RcStr, RcStr>,
} }
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct BuildManifest { pub struct BuildManifest {
pub dev_files: Vec<String>, pub dev_files: Vec<RcStr>,
pub amp_dev_files: Vec<String>, pub amp_dev_files: Vec<RcStr>,
pub polyfill_files: Vec<String>, pub polyfill_files: Vec<RcStr>,
pub low_priority_files: Vec<String>, pub low_priority_files: Vec<RcStr>,
pub root_main_files: Vec<String>, pub root_main_files: Vec<RcStr>,
pub pages: HashMap<String, Vec<String>>, pub pages: HashMap<RcStr, Vec<RcStr>>,
pub amp_first_pages: Vec<String>, pub amp_first_pages: Vec<RcStr>,
} }
#[derive(Serialize, Debug)] #[derive(Serialize, Debug)]
@ -62,14 +62,14 @@ impl Default for MiddlewaresManifest {
pub struct MiddlewareMatcher { pub struct MiddlewareMatcher {
// When skipped next.js with fill that during merging. // When skipped next.js with fill that during merging.
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub regexp: Option<String>, pub regexp: Option<RcStr>,
#[serde(skip_serializing_if = "bool_is_true")] #[serde(skip_serializing_if = "bool_is_true")]
pub locale: bool, pub locale: bool,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub has: Option<Vec<RouteHas>>, pub has: Option<Vec<RouteHas>>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub missing: Option<Vec<RouteHas>>, pub missing: Option<Vec<RouteHas>>,
pub original_source: String, pub original_source: RcStr,
} }
fn bool_is_true(b: &bool) -> bool { fn bool_is_true(b: &bool) -> bool {
@ -78,9 +78,9 @@ fn bool_is_true(b: &bool) -> bool {
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
pub struct EdgeFunctionDefinition { pub struct EdgeFunctionDefinition {
pub files: Vec<String>, pub files: Vec<RcStr>,
pub name: String, pub name: RcStr,
pub page: String, pub page: RcStr,
pub matchers: Vec<MiddlewareMatcher>, pub matchers: Vec<MiddlewareMatcher>,
#[serde(skip_serializing_if = "Vec::is_empty")] #[serde(skip_serializing_if = "Vec::is_empty")]
pub wasm: Vec<AssetBinding>, pub wasm: Vec<AssetBinding>,
@ -88,13 +88,13 @@ pub struct EdgeFunctionDefinition {
pub assets: Vec<AssetBinding>, pub assets: Vec<AssetBinding>,
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub regions: Option<Regions>, pub regions: Option<Regions>,
pub env: IndexMap<String, String>, pub env: IndexMap<RcStr, RcStr>,
} }
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
pub struct InstrumentationDefinition { pub struct InstrumentationDefinition {
pub files: Vec<String>, pub files: Vec<RcStr>,
pub name: String, pub name: RcStr,
#[serde(skip_serializing_if = "Vec::is_empty")] #[serde(skip_serializing_if = "Vec::is_empty")]
pub wasm: Vec<AssetBinding>, pub wasm: Vec<AssetBinding>,
#[serde(skip_serializing_if = "Vec::is_empty")] #[serde(skip_serializing_if = "Vec::is_empty")]
@ -104,44 +104,44 @@ pub struct InstrumentationDefinition {
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct AssetBinding { pub struct AssetBinding {
pub name: String, pub name: RcStr,
pub file_path: String, pub file_path: RcStr,
} }
#[derive(Serialize, Debug)] #[derive(Serialize, Debug)]
#[serde(untagged)] #[serde(untagged)]
pub enum Regions { pub enum Regions {
Multiple(Vec<String>), Multiple(Vec<RcStr>),
Single(String), Single(RcStr),
} }
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
pub struct MiddlewaresManifestV2 { pub struct MiddlewaresManifestV2 {
pub sorted_middleware: Vec<String>, pub sorted_middleware: Vec<RcStr>,
pub middleware: HashMap<String, EdgeFunctionDefinition>, pub middleware: HashMap<RcStr, EdgeFunctionDefinition>,
pub instrumentation: Option<InstrumentationDefinition>, pub instrumentation: Option<InstrumentationDefinition>,
pub functions: HashMap<String, EdgeFunctionDefinition>, pub functions: HashMap<RcStr, EdgeFunctionDefinition>,
} }
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct ReactLoadableManifest { pub struct ReactLoadableManifest {
#[serde(flatten)] #[serde(flatten)]
pub manifest: HashMap<String, ReactLoadableManifestEntry>, pub manifest: HashMap<RcStr, ReactLoadableManifestEntry>,
} }
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct ReactLoadableManifestEntry { pub struct ReactLoadableManifestEntry {
pub id: u32, pub id: u32,
pub files: Vec<String>, pub files: Vec<RcStr>,
} }
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct NextFontManifest { pub struct NextFontManifest {
pub pages: HashMap<String, Vec<String>>, pub pages: HashMap<RcStr, Vec<RcStr>>,
pub app: HashMap<String, Vec<String>>, pub app: HashMap<RcStr, Vec<RcStr>>,
pub app_using_size_adjust: bool, pub app_using_size_adjust: bool,
pub pages_using_size_adjust: bool, pub pages_using_size_adjust: bool,
} }
@ -162,8 +162,8 @@ pub struct AppPathsManifest {
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct LoadableManifest { pub struct LoadableManifest {
pub id: String, pub id: RcStr,
pub files: Vec<String>, pub files: Vec<RcStr>,
} }
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
@ -228,16 +228,16 @@ pub struct ClientReferenceManifest {
pub edge_ssr_module_mapping: HashMap<ModuleId, ManifestNode>, pub edge_ssr_module_mapping: HashMap<ModuleId, ManifestNode>,
/// Mapping of server component path to required CSS client chunks. /// Mapping of server component path to required CSS client chunks.
#[serde(rename = "entryCSSFiles")] #[serde(rename = "entryCSSFiles")]
pub entry_css_files: HashMap<String, IndexSet<String>>, pub entry_css_files: HashMap<RcStr, IndexSet<RcStr>>,
/// Mapping of server component path to required JS client chunks. /// Mapping of server component path to required JS client chunks.
#[serde(rename = "entryJSFiles")] #[serde(rename = "entryJSFiles")]
pub entry_js_files: HashMap<String, IndexSet<String>>, pub entry_js_files: HashMap<RcStr, IndexSet<RcStr>>,
} }
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct ModuleLoading { pub struct ModuleLoading {
pub prefix: String, pub prefix: RcStr,
pub cross_origin: Option<CrossOriginConfig>, pub cross_origin: Option<CrossOriginConfig>,
} }
@ -246,7 +246,7 @@ pub struct ModuleLoading {
pub struct ManifestNode { pub struct ManifestNode {
/// Mapping of export name to manifest node entry. /// Mapping of export name to manifest node entry.
#[serde(flatten)] #[serde(flatten)]
pub module_exports: HashMap<String, ManifestNodeEntry>, pub module_exports: HashMap<RcStr, ManifestNodeEntry>,
} }
#[derive(Serialize, Debug, Clone)] #[derive(Serialize, Debug, Clone)]
@ -255,9 +255,9 @@ pub struct ManifestNodeEntry {
/// Turbopack module ID. /// Turbopack module ID.
pub id: ModuleId, pub id: ModuleId,
/// Export name. /// Export name.
pub name: String, pub name: RcStr,
/// Chunks for the module. JS and CSS. /// Chunks for the module. JS and CSS.
pub chunks: Vec<String>, pub chunks: Vec<RcStr>,
// TODO(WEB-434) // TODO(WEB-434)
pub r#async: bool, pub r#async: bool,
} }
@ -266,7 +266,7 @@ pub struct ManifestNodeEntry {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[serde(untagged)] #[serde(untagged)]
pub enum ModuleId { pub enum ModuleId {
String(String), String(RcStr),
Number(u64), Number(u64),
} }
@ -277,14 +277,14 @@ pub struct FontManifest(pub Vec<FontManifestEntry>);
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct FontManifestEntry { pub struct FontManifestEntry {
pub url: String, pub url: RcStr,
pub content: String, pub content: RcStr,
} }
#[derive(Serialize, Default, Debug)] #[derive(Serialize, Default, Debug)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct AppBuildManifest { pub struct AppBuildManifest {
pub pages: HashMap<String, Vec<String>>, pub pages: HashMap<RcStr, Vec<RcStr>>,
} }
// TODO(alexkirsz) Unify with the one for dev. // TODO(alexkirsz) Unify with the one for dev.
@ -294,8 +294,8 @@ pub struct ClientBuildManifest<'a> {
#[serde(rename = "__rewrites")] #[serde(rename = "__rewrites")]
pub rewrites: &'a Rewrites, pub rewrites: &'a Rewrites,
pub sorted_pages: &'a [String], pub sorted_pages: &'a [RcStr],
#[serde(flatten)] #[serde(flatten)]
pub pages: HashMap<String, Vec<&'a str>>, pub pages: HashMap<RcStr, Vec<&'a str>>,
} }

View file

@ -3,7 +3,7 @@ use std::io::Write;
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use indexmap::indexmap; use indexmap::indexmap;
use serde::Serialize; use serde::Serialize;
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbo_tasks_fs::FileSystemPath; use turbo_tasks_fs::FileSystemPath;
use turbopack_binding::{ use turbopack_binding::{
turbo::{ turbo::{
@ -33,12 +33,12 @@ use crate::{
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn create_page_ssr_entry_module( pub async fn create_page_ssr_entry_module(
pathname: Vc<String>, pathname: Vc<RcStr>,
reference_type: Value<ReferenceType>, reference_type: Value<ReferenceType>,
project_root: Vc<FileSystemPath>, project_root: Vc<FileSystemPath>,
ssr_module_context: Vc<Box<dyn AssetContext>>, ssr_module_context: Vc<Box<dyn AssetContext>>,
source: Vc<Box<dyn Source>>, source: Vc<Box<dyn Source>>,
next_original_name: Vc<String>, next_original_name: Vc<RcStr>,
pages_structure: Vc<PagesStructure>, pages_structure: Vc<PagesStructure>,
runtime: NextRuntime, runtime: NextRuntime,
next_config: Vc<NextConfig>, next_config: Vc<NextConfig>,
@ -76,12 +76,12 @@ pub async fn create_page_ssr_entry_module(
let mut replacements = indexmap! { let mut replacements = indexmap! {
"VAR_DEFINITION_PAGE" => definition_page.clone(), "VAR_DEFINITION_PAGE" => definition_page.clone(),
"VAR_DEFINITION_PATHNAME" => definition_pathname.clone(), "VAR_DEFINITION_PATHNAME" => definition_pathname.clone(),
"VAR_USERLAND" => INNER.to_string(), "VAR_USERLAND" => INNER.into(),
}; };
if reference_type == ReferenceType::Entry(EntryReferenceSubType::Page) { if reference_type == ReferenceType::Entry(EntryReferenceSubType::Page) {
replacements.insert("VAR_MODULE_DOCUMENT", INNER_DOCUMENT.to_string()); replacements.insert("VAR_MODULE_DOCUMENT", INNER_DOCUMENT.into());
replacements.insert("VAR_MODULE_APP", INNER_APP.to_string()); replacements.insert("VAR_MODULE_APP", INNER_APP.into());
} }
// Load the file from the next.js codebase. // Load the file from the next.js codebase.
@ -119,12 +119,12 @@ pub async fn create_page_ssr_entry_module(
} }
let mut inner_assets = indexmap! { let mut inner_assets = indexmap! {
INNER.to_string() => ssr_module, INNER.into() => ssr_module,
}; };
if reference_type == ReferenceType::Entry(EntryReferenceSubType::Page) { if reference_type == ReferenceType::Entry(EntryReferenceSubType::Page) {
inner_assets.insert( inner_assets.insert(
INNER_DOCUMENT.to_string(), INNER_DOCUMENT.into(),
process_global_item( process_global_item(
pages_structure.document(), pages_structure.document(),
Value::new(reference_type.clone()), Value::new(reference_type.clone()),
@ -132,7 +132,7 @@ pub async fn create_page_ssr_entry_module(
), ),
); );
inner_assets.insert( inner_assets.insert(
INNER_APP.to_string(), INNER_APP.into(),
process_global_item( process_global_item(
pages_structure.app(), pages_structure.app(),
Value::new(reference_type.clone()), Value::new(reference_type.clone()),
@ -165,7 +165,7 @@ pub async fn create_page_ssr_entry_module(
ssr_module_context, ssr_module_context,
project_root, project_root,
ssr_module, ssr_module,
definition_pathname.to_string(), definition_pathname.clone(),
); );
} }
} }
@ -197,8 +197,8 @@ async fn wrap_edge_page(
context: Vc<Box<dyn AssetContext>>, context: Vc<Box<dyn AssetContext>>,
project_root: Vc<FileSystemPath>, project_root: Vc<FileSystemPath>,
entry: Vc<Box<dyn Module>>, entry: Vc<Box<dyn Module>>,
page: String, page: RcStr,
pathname: String, pathname: RcStr,
reference_type: Value<ReferenceType>, reference_type: Value<ReferenceType>,
pages_structure: Vc<PagesStructure>, pages_structure: Vc<PagesStructure>,
next_config: Vc<NextConfig>, next_config: Vc<NextConfig>,
@ -226,20 +226,20 @@ async fn wrap_edge_page(
"edge-ssr.js", "edge-ssr.js",
project_root, project_root,
indexmap! { indexmap! {
"VAR_USERLAND" => INNER.to_string(), "VAR_USERLAND" => INNER.into(),
"VAR_PAGE" => pathname.clone(), "VAR_PAGE" => pathname.clone(),
"VAR_MODULE_DOCUMENT" => INNER_DOCUMENT.to_string(), "VAR_MODULE_DOCUMENT" => INNER_DOCUMENT.into(),
"VAR_MODULE_APP" => INNER_APP.to_string(), "VAR_MODULE_APP" => INNER_APP.into(),
"VAR_MODULE_GLOBAL_ERROR" => INNER_ERROR.to_string(), "VAR_MODULE_GLOBAL_ERROR" => INNER_ERROR.into(),
}, },
indexmap! { indexmap! {
"pagesType" => StringifyJs("pages").to_string(), "pagesType" => StringifyJs("pages").to_string().into(),
"sriEnabled" => serde_json::Value::Bool(sri_enabled).to_string(), "sriEnabled" => serde_json::Value::Bool(sri_enabled).to_string().into(),
"nextConfig" => serde_json::to_string(next_config)?, "nextConfig" => serde_json::to_string(next_config)?.into(),
"dev" => serde_json::Value::Bool(dev).to_string(), "dev" => serde_json::Value::Bool(dev).to_string().into(),
"pageRouteModuleOptions" => serde_json::to_string(&get_route_module_options(page.clone(), pathname.clone()))?, "pageRouteModuleOptions" => serde_json::to_string(&get_route_module_options(page.clone(), pathname.clone()))?.into(),
"errorRouteModuleOptions" => serde_json::to_string(&get_route_module_options("/_error".to_string(), "/_error".to_string()))?, "errorRouteModuleOptions" => serde_json::to_string(&get_route_module_options("/_error".into(), "/_error".into()))?.into(),
"user500RouteModuleOptions" => serde_json::to_string(&get_route_module_options("/500".to_string(), "/500".to_string()))?, "user500RouteModuleOptions" => serde_json::to_string(&get_route_module_options("/500".into(), "/500".into()))?.into(),
}, },
indexmap! { indexmap! {
// TODO // TODO
@ -250,10 +250,10 @@ async fn wrap_edge_page(
.await?; .await?;
let inner_assets = indexmap! { let inner_assets = indexmap! {
INNER.to_string() => entry, INNER.into() => entry,
INNER_DOCUMENT.to_string() => process_global_item(pages_structure.document(), reference_type.clone(), context), INNER_DOCUMENT.into() => process_global_item(pages_structure.document(), reference_type.clone(), context),
INNER_APP.to_string() => process_global_item(pages_structure.app(), reference_type.clone(), context), INNER_APP.into() => process_global_item(pages_structure.app(), reference_type.clone(), context),
INNER_ERROR.to_string() => process_global_item(pages_structure.error(), reference_type.clone(), context), INNER_ERROR.into() => process_global_item(pages_structure.error(), reference_type.clone(), context),
}; };
let wrapped = context let wrapped = context
@ -278,27 +278,27 @@ struct PartialRouteModuleOptions {
#[derive(Serialize)] #[derive(Serialize)]
struct RouteDefinition { struct RouteDefinition {
kind: String, kind: RcStr,
bundle_path: String, bundle_path: RcStr,
filename: String, filename: RcStr,
/// Describes the pathname including all internal modifiers such as /// Describes the pathname including all internal modifiers such as
/// intercepting routes, parallel routes and route/page suffixes that are /// intercepting routes, parallel routes and route/page suffixes that are
/// not part of the pathname. /// not part of the pathname.
page: String, page: RcStr,
/// The pathname (including dynamic placeholders) for a route to resolve. /// The pathname (including dynamic placeholders) for a route to resolve.
pathname: String, pathname: RcStr,
} }
fn get_route_module_options(page: String, pathname: String) -> PartialRouteModuleOptions { fn get_route_module_options(page: RcStr, pathname: RcStr) -> PartialRouteModuleOptions {
PartialRouteModuleOptions { PartialRouteModuleOptions {
definition: RouteDefinition { definition: RouteDefinition {
kind: "PAGES".to_string(), kind: "PAGES".into(),
page, page,
pathname, pathname,
// The following aren't used in production. // The following aren't used in production.
bundle_path: "".to_string(), bundle_path: "".into(),
filename: "".to_string(), filename: "".into(),
}, },
} }
} }

View file

@ -1,5 +1,5 @@
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbopack_binding::turbopack::node::route_matcher::{Params, RouteMatcher, RouteMatcherRef}; use turbopack_binding::turbopack::node::route_matcher::{Params, RouteMatcher, RouteMatcherRef};
use self::{ use self::{
@ -15,13 +15,13 @@ mod prefix_suffix;
/// A route matcher that matches a path against an exact route. /// A route matcher that matches a path against an exact route.
#[turbo_tasks::value] #[turbo_tasks::value]
pub(crate) struct NextExactMatcher { pub(crate) struct NextExactMatcher {
path: Vc<String>, path: Vc<RcStr>,
} }
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl NextExactMatcher { impl NextExactMatcher {
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn new(path: Vc<String>) -> Result<Vc<Self>> { pub async fn new(path: Vc<RcStr>) -> Result<Vc<Self>> {
Ok(Self::cell(NextExactMatcher { path })) Ok(Self::cell(NextExactMatcher { path }))
} }
} }
@ -29,12 +29,12 @@ impl NextExactMatcher {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl RouteMatcher for NextExactMatcher { impl RouteMatcher for NextExactMatcher {
#[turbo_tasks::function] #[turbo_tasks::function]
async fn matches(&self, path: String) -> Result<Vc<bool>> { async fn matches(&self, path: RcStr) -> Result<Vc<bool>> {
Ok(Vc::cell(path == *self.path.await?)) Ok(Vc::cell(path == *self.path.await?))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
async fn params(&self, path: String) -> Result<Vc<Params>> { async fn params(&self, path: RcStr) -> Result<Vc<Params>> {
Ok(Vc::cell(if path == *self.path.await? { Ok(Vc::cell(if path == *self.path.await? {
Some(Default::default()) Some(Default::default())
} else { } else {
@ -53,7 +53,7 @@ pub(crate) struct NextParamsMatcher {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl NextParamsMatcher { impl NextParamsMatcher {
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn new(path: Vc<String>) -> Result<Vc<Self>> { pub async fn new(path: Vc<RcStr>) -> Result<Vc<Self>> {
Ok(Self::cell(NextParamsMatcher { Ok(Self::cell(NextParamsMatcher {
matcher: build_path_regex(path.await?.as_str())?, matcher: build_path_regex(path.await?.as_str())?,
})) }))
@ -63,12 +63,12 @@ impl NextParamsMatcher {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl RouteMatcher for NextParamsMatcher { impl RouteMatcher for NextParamsMatcher {
#[turbo_tasks::function] #[turbo_tasks::function]
fn matches(&self, path: String) -> Vc<bool> { fn matches(&self, path: RcStr) -> Vc<bool> {
Vc::cell(self.matcher.matches(&path)) Vc::cell(self.matcher.matches(&path))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn params(&self, path: String) -> Vc<Params> { fn params(&self, path: RcStr) -> Vc<Params> {
Params::cell(self.matcher.params(&path)) Params::cell(self.matcher.params(&path))
} }
} }
@ -86,7 +86,7 @@ impl NextPrefixSuffixParamsMatcher {
/// Converts a filename within the server root into a regular expression /// Converts a filename within the server root into a regular expression
/// with named capture groups for every dynamic segment. /// with named capture groups for every dynamic segment.
#[turbo_tasks::function] #[turbo_tasks::function]
pub async fn new(path: Vc<String>, prefix: String, suffix: String) -> Result<Vc<Self>> { pub async fn new(path: Vc<RcStr>, prefix: RcStr, suffix: RcStr) -> Result<Vc<Self>> {
Ok(Self::cell(NextPrefixSuffixParamsMatcher { Ok(Self::cell(NextPrefixSuffixParamsMatcher {
matcher: PrefixSuffixMatcher::new( matcher: PrefixSuffixMatcher::new(
prefix.to_string(), prefix.to_string(),
@ -100,12 +100,12 @@ impl NextPrefixSuffixParamsMatcher {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl RouteMatcher for NextPrefixSuffixParamsMatcher { impl RouteMatcher for NextPrefixSuffixParamsMatcher {
#[turbo_tasks::function] #[turbo_tasks::function]
fn matches(&self, path: String) -> Vc<bool> { fn matches(&self, path: RcStr) -> Vc<bool> {
Vc::cell(self.matcher.matches(&path)) Vc::cell(self.matcher.matches(&path))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn params(&self, path: String) -> Vc<Params> { fn params(&self, path: RcStr) -> Vc<Params> {
Params::cell(self.matcher.params(&path)) Params::cell(self.matcher.params(&path))
} }
} }
@ -128,12 +128,12 @@ impl NextFallbackMatcher {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl RouteMatcher for NextFallbackMatcher { impl RouteMatcher for NextFallbackMatcher {
#[turbo_tasks::function] #[turbo_tasks::function]
fn matches(&self, path: String) -> Vc<bool> { fn matches(&self, path: RcStr) -> Vc<bool> {
Vc::cell(self.matcher.matches(&path)) Vc::cell(self.matcher.matches(&path))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn params(&self, path: String) -> Vc<Params> { fn params(&self, path: RcStr) -> Vc<Params> {
Params::cell(self.matcher.params(&path)) Params::cell(self.matcher.params(&path))
} }
} }

View file

@ -47,14 +47,14 @@ impl RouteMatcherRef for PathRegex {
} }
let value = capture.get(idx + 1)?; let value = capture.get(idx + 1)?;
Some(( Some((
param.name.to_string(), param.name.as_str().into(),
match param.kind { match param.kind {
NamedParamKind::Single => Param::Single(value.as_str().to_string()), NamedParamKind::Single => Param::Single(value.as_str().into()),
NamedParamKind::Multi => Param::Multi( NamedParamKind::Multi => Param::Multi(
value value
.as_str() .as_str()
.split('/') .split('/')
.map(|segment| segment.to_string()) .map(|segment| segment.into())
.collect(), .collect(),
), ),
}, },

View file

@ -2,7 +2,7 @@ use std::iter::once;
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use indexmap::IndexMap; use indexmap::IndexMap;
use turbo_tasks::{Value, Vc}; use turbo_tasks::{RcStr, Value, Vc};
use turbo_tasks_fs::FileSystem; use turbo_tasks_fs::FileSystem;
use turbopack_binding::{ use turbopack_binding::{
turbo::{ turbo::{
@ -89,12 +89,12 @@ pub enum ServerContextType {
}, },
AppRSC { AppRSC {
app_dir: Vc<FileSystemPath>, app_dir: Vc<FileSystemPath>,
ecmascript_client_reference_transition_name: Option<Vc<String>>, ecmascript_client_reference_transition_name: Option<Vc<RcStr>>,
client_transition: Option<Vc<Box<dyn Transition>>>, client_transition: Option<Vc<Box<dyn Transition>>>,
}, },
AppRoute { AppRoute {
app_dir: Vc<FileSystemPath>, app_dir: Vc<FileSystemPath>,
ecmascript_client_reference_transition_name: Option<Vc<String>>, ecmascript_client_reference_transition_name: Option<Vc<RcStr>>,
}, },
Middleware, Middleware,
Instrumentation, Instrumentation,
@ -139,9 +139,9 @@ pub async fn get_server_resolve_options_context(
); );
// Always load these predefined packages as external. // Always load these predefined packages as external.
let mut external_packages: Vec<String> = load_next_js_templateon( let mut external_packages: Vec<RcStr> = load_next_js_templateon(
project_path, project_path,
"dist/lib/server-external-packages.json".to_string(), "dist/lib/server-external-packages.json".into(),
) )
.await?; .await?;
@ -174,16 +174,17 @@ pub async fn get_server_resolve_options_context(
); );
let ty = ty.into_value(); let ty = ty.into_value();
let mut custom_conditions = vec![mode.await?.condition().to_string()]; let mut custom_conditions = vec![mode.await?.condition().to_string().into()];
custom_conditions.extend( custom_conditions.extend(
NextRuntime::NodeJs NextRuntime::NodeJs
.conditions() .conditions()
.iter() .iter()
.map(ToString::to_string), .map(ToString::to_string)
.map(RcStr::from),
); );
if ty.supports_react_server() { if ty.supports_react_server() {
custom_conditions.push("react-server".to_string()); custom_conditions.push("react-server".into());
}; };
let external_cjs_modules_plugin = if *next_config.bundle_pages_router_dependencies().await? { let external_cjs_modules_plugin = if *next_config.bundle_pages_router_dependencies().await? {
@ -305,17 +306,17 @@ pub async fn get_server_resolve_options_context(
.cell()) .cell())
} }
fn defines(define_env: &IndexMap<String, String>) -> CompileTimeDefines { fn defines(define_env: &IndexMap<RcStr, RcStr>) -> CompileTimeDefines {
let mut defines = IndexMap::new(); let mut defines = IndexMap::new();
for (k, v) in define_env { for (k, v) in define_env {
defines defines
.entry(k.split('.').map(|s| s.to_string()).collect::<Vec<String>>()) .entry(k.split('.').map(|s| s.into()).collect::<Vec<RcStr>>())
.or_insert_with(|| { .or_insert_with(|| {
let val = serde_json::from_str(v); let val = serde_json::from_str(v);
match val { match val {
Ok(serde_json::Value::Bool(v)) => CompileTimeDefineValue::Bool(v), Ok(serde_json::Value::Bool(v)) => CompileTimeDefineValue::Bool(v),
Ok(serde_json::Value::String(v)) => CompileTimeDefineValue::String(v), Ok(serde_json::Value::String(v)) => CompileTimeDefineValue::String(v.into()),
_ => CompileTimeDefineValue::JSON(v.clone()), _ => CompileTimeDefineValue::JSON(v.clone()),
} }
}); });
@ -402,8 +403,14 @@ pub async fn get_server_module_options_context(
let enable_postcss_transform = Some(postcss_transform_options.cell()); let enable_postcss_transform = Some(postcss_transform_options.cell());
let enable_foreign_postcss_transform = Some(postcss_foreign_transform_options.cell()); let enable_foreign_postcss_transform = Some(postcss_foreign_transform_options.cell());
let mut conditions = vec![mode.await?.condition().to_string()]; let mut conditions = vec![mode.await?.condition().into()];
conditions.extend(next_runtime.conditions().iter().map(ToString::to_string)); conditions.extend(
next_runtime
.conditions()
.iter()
.map(ToString::to_string)
.map(RcStr::from),
);
// A separate webpack rules will be applied to codes matching // A separate webpack rules will be applied to codes matching
// foreign_code_context_condition. This allows to import codes from // foreign_code_context_condition. This allows to import codes from
@ -416,7 +423,7 @@ pub async fn get_server_module_options_context(
conditions conditions
.iter() .iter()
.cloned() .cloned()
.chain(once("foreign".to_string())) .chain(once("foreign".into()))
.collect(), .collect(),
) )
.await?; .await?;
@ -809,7 +816,7 @@ pub async fn get_server_chunking_context_with_client_assets(
project_path: Vc<FileSystemPath>, project_path: Vc<FileSystemPath>,
node_root: Vc<FileSystemPath>, node_root: Vc<FileSystemPath>,
client_root: Vc<FileSystemPath>, client_root: Vc<FileSystemPath>,
asset_prefix: Vc<Option<String>>, asset_prefix: Vc<Option<RcStr>>,
environment: Vc<Environment>, environment: Vc<Environment>,
) -> Result<Vc<NodeJsChunkingContext>> { ) -> Result<Vc<NodeJsChunkingContext>> {
let next_mode = mode.await?; let next_mode = mode.await?;
@ -820,8 +827,8 @@ pub async fn get_server_chunking_context_with_client_assets(
project_path, project_path,
node_root, node_root,
client_root, client_root,
node_root.join("server/chunks/ssr".to_string()), node_root.join("server/chunks/ssr".into()),
client_root.join("static/media".to_string()), client_root.join("static/media".into()),
environment, environment,
next_mode.runtime_type(), next_mode.runtime_type(),
) )
@ -845,8 +852,8 @@ pub async fn get_server_chunking_context(
project_path, project_path,
node_root, node_root,
node_root, node_root,
node_root.join("server/chunks".to_string()), node_root.join("server/chunks".into()),
node_root.join("server/assets".to_string()), node_root.join("server/assets".into()),
environment, environment,
next_mode.runtime_type(), next_mode.runtime_type(),
) )

View file

@ -1,6 +1,6 @@
use anyhow::Result; use anyhow::Result;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use turbo_tasks::{trace::TraceRawVcs, Value, Vc}; use turbo_tasks::{trace::TraceRawVcs, RcStr, Value, Vc};
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::{glob::Glob, FileJsonContent, FileSystemPath}, turbo::tasks_fs::{glob::Glob, FileJsonContent, FileSystemPath},
turbopack::core::{ turbopack::core::{
@ -24,9 +24,9 @@ use turbopack_binding::{
#[turbo_tasks::value(into = "shared")] #[turbo_tasks::value(into = "shared")]
pub enum ExternalPredicate { pub enum ExternalPredicate {
/// Mark all modules as external if they're not listed in the list. /// Mark all modules as external if they're not listed in the list.
AllExcept(Vc<Vec<String>>), AllExcept(Vc<Vec<RcStr>>),
/// Only mark modules listed as external. /// Only mark modules listed as external.
Only(Vc<Vec<String>>), Only(Vc<Vec<RcStr>>),
} }
/// Mark modules as external, so they're resolved at runtime instead of bundled. /// Mark modules as external, so they're resolved at runtime instead of bundled.
@ -62,7 +62,7 @@ impl ExternalCjsModulesResolvePlugin {
#[turbo_tasks::function] #[turbo_tasks::function]
fn condition(root: Vc<FileSystemPath>) -> Vc<AfterResolvePluginCondition> { fn condition(root: Vc<FileSystemPath>) -> Vc<AfterResolvePluginCondition> {
AfterResolvePluginCondition::new(root, Glob::new("**/node_modules/**".to_string())) AfterResolvePluginCondition::new(root, Glob::new("**/node_modules/**".into()))
} }
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
@ -91,7 +91,7 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
// from https://github.com/vercel/next.js/blob/8d1c619ad650f5d147207f267441caf12acd91d1/packages/next/src/build/handle-externals.ts#L188 // from https://github.com/vercel/next.js/blob/8d1c619ad650f5d147207f267441caf12acd91d1/packages/next/src/build/handle-externals.ts#L188
let never_external_regex = lazy_regex::regex!("^(?:private-next-pages\\/|next\\/(?:dist\\/pages\\/|(?:app|document|link|image|legacy\\/image|constants|dynamic|script|navigation|headers|router)$)|string-hash|private-next-rsc-action-validate|private-next-rsc-action-client-wrapper|private-next-rsc-server-reference$)"); let never_external_regex = lazy_regex::regex!("^(?:private-next-pages\\/|next\\/(?:dist\\/pages\\/|(?:app|document|link|image|legacy\\/image|constants|dynamic|script|navigation|headers|router)$)|string-hash|private-next-rsc-action-validate|private-next-rsc-action-client-wrapper|private-next-rsc-server-reference$)");
let request_str = request_value.request(); let request_str = request_value.request().map(|v| v.into_owned());
let Some(mut request_str) = request_str else { let Some(mut request_str) = request_str else {
return Ok(ResolveResultOption::none()); return Ok(ResolveResultOption::none());
}; };
@ -189,12 +189,12 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
Ok(FileType::UnsupportedExtension) Ok(FileType::UnsupportedExtension)
} }
let unable_to_externalize = |request_str: String, reason: &str| { let unable_to_externalize = |request_str: RcStr, reason: &str| {
if must_be_external { if must_be_external {
UnableToExternalize { UnableToExternalize {
file_path: fs_path, file_path: fs_path,
request: request_str, request: request_str,
reason: reason.to_string(), reason: reason.into(),
} }
.cell() .cell()
.emit(); .emit();
@ -224,12 +224,12 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
// have an extension in the request we try to append ".js" // have an extension in the request we try to append ".js"
// automatically // automatically
request_str.push_str(".js"); request_str.push_str(".js");
request = request.append_path(".js".to_string()).resolve().await?; request = request.append_path(".js".into()).resolve().await?;
continue; continue;
} }
// this can't resolve with node.js from the original location, so bundle it // this can't resolve with node.js from the original location, so bundle it
return unable_to_externalize( return unable_to_externalize(
request_str, request_str.into(),
"The request could not be resolved by Node.js from the importing module. The \ "The request could not be resolved by Node.js from the importing module. The \
way Node.js resolves modules is slightly different from the way Next.js \ way Node.js resolves modules is slightly different from the way Next.js \
resolves modules. Next.js was able to resolve it, while Node.js would not be \ resolves modules. Next.js was able to resolve it, while Node.js would not be \
@ -249,7 +249,7 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
let Some(result) = *node_resolved.first_source().await? else { let Some(result) = *node_resolved.first_source().await? else {
// this can't resolve with node.js from the project directory, so bundle it // this can't resolve with node.js from the project directory, so bundle it
return unable_to_externalize( return unable_to_externalize(
request_str, request_str.into(),
"The request could not be resolved by Node.js from the project \ "The request could not be resolved by Node.js from the project \
directory.\nPackages that should be external need to be installed in the project \ directory.\nPackages that should be external need to be installed in the project \
directory, so they can be resolved from the output files.\nTry to install the \ directory, so they can be resolved from the output files.\nTry to install the \
@ -276,7 +276,7 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
let FindContextFileResult::Found(package_json_file, _) = *package_json_file.await? let FindContextFileResult::Found(package_json_file, _) = *package_json_file.await?
else { else {
return unable_to_externalize( return unable_to_externalize(
request_str, request_str.into(),
"The package.json of the package resolved from the project directory can't be \ "The package.json of the package resolved from the project directory can't be \
found.", found.",
); );
@ -285,7 +285,7 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
*package_json_from_original_location.await? *package_json_from_original_location.await?
else { else {
return unable_to_externalize( return unable_to_externalize(
request_str, request_str.into(),
"The package.json of the package can't be found.", "The package.json of the package can't be found.",
); );
}; };
@ -293,7 +293,7 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
&*package_json_file.read_json().await? &*package_json_file.read_json().await?
else { else {
return unable_to_externalize( return unable_to_externalize(
request_str, request_str.into(),
"The package.json of the package resolved from project directory can't be \ "The package.json of the package resolved from project directory can't be \
parsed.", parsed.",
); );
@ -302,7 +302,7 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
&*package_json_from_original_location.read_json().await? &*package_json_from_original_location.read_json().await?
else { else {
return unable_to_externalize( return unable_to_externalize(
request_str, request_str.into(),
"The package.json of the package can't be parsed.", "The package.json of the package can't be parsed.",
); );
}; };
@ -311,7 +311,7 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
package_json_file.get("version"), package_json_file.get("version"),
) else { ) else {
return unable_to_externalize( return unable_to_externalize(
request_str, request_str.into(),
"The package.json of the package has not name or version.", "The package.json of the package has not name or version.",
); );
}; };
@ -320,7 +320,7 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
package_json_from_original_location.get("version"), package_json_from_original_location.get("version"),
) else { ) else {
return unable_to_externalize( return unable_to_externalize(
request_str, request_str.into(),
"The package.json of the package resolved from project directory has not name \ "The package.json of the package resolved from project directory has not name \
or version.", or version.",
); );
@ -328,7 +328,7 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
if (name, version) != (name2, version2) { if (name, version) != (name2, version2) {
// this can't resolve with node.js from the original location, so bundle it // this can't resolve with node.js from the original location, so bundle it
return unable_to_externalize( return unable_to_externalize(
request_str, request_str.into(),
"The package resolves to a different version when requested from the project \ "The package resolves to a different version when requested from the project \
directory compared to the package requested from the importing module.\nMake \ directory compared to the package requested from the importing module.\nMake \
sure to install the same version of the package in both locations.", sure to install the same version of the package in both locations.",
@ -342,19 +342,22 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
(FileType::UnsupportedExtension, _) => { (FileType::UnsupportedExtension, _) => {
// unsupported file type, bundle it // unsupported file type, bundle it
unable_to_externalize( unable_to_externalize(
request_str, request_str.into(),
"Only .mjs, .cjs, .js, .json, or .node can be handled by Node.js.", "Only .mjs, .cjs, .js, .json, or .node can be handled by Node.js.",
) )
} }
(FileType::InvalidPackageJson, _) => { (FileType::InvalidPackageJson, _) => {
// invalid package.json, bundle it // invalid package.json, bundle it
unable_to_externalize(request_str, "The package.json can't be found or parsed.") unable_to_externalize(
request_str.into(),
"The package.json can't be found or parsed.",
)
} }
(FileType::CommonJs, false) => { (FileType::CommonJs, false) => {
// mark as external // mark as external
Ok(ResolveResultOption::some( Ok(ResolveResultOption::some(
ResolveResult::primary(ResolveResultItem::External( ResolveResult::primary(ResolveResultItem::External(
request_str, request_str.into(),
ExternalType::CommonJs, ExternalType::CommonJs,
)) ))
.cell(), .cell(),
@ -391,7 +394,7 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
// mark as external // mark as external
Ok(ResolveResultOption::some( Ok(ResolveResultOption::some(
ResolveResult::primary(ResolveResultItem::External( ResolveResult::primary(ResolveResultItem::External(
request_str, request_str.into(),
if resolves_equal { if resolves_equal {
ExternalType::CommonJs ExternalType::CommonJs
} else { } else {
@ -406,7 +409,7 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
// mark as external // mark as external
Ok(ResolveResultOption::some( Ok(ResolveResultOption::some(
ResolveResult::primary(ResolveResultItem::External( ResolveResult::primary(ResolveResultItem::External(
request_str, request_str.into(),
ExternalType::EcmaScriptModule, ExternalType::EcmaScriptModule,
)) ))
.cell(), .cell(),
@ -416,7 +419,7 @@ impl AfterResolvePlugin for ExternalCjsModulesResolvePlugin {
// even with require() this resolves to a ESM, // even with require() this resolves to a ESM,
// which would break node.js, bundle it // which would break node.js, bundle it
unable_to_externalize( unable_to_externalize(
request_str, request_str.into(),
"The package seems invalid. require() resolves to a EcmaScript module, which \ "The package seems invalid. require() resolves to a EcmaScript module, which \
would result in an error in Node.js.", would result in an error in Node.js.",
) )
@ -436,17 +439,14 @@ pub struct PackagesGlobs {
pub struct OptionPackagesGlobs(Option<PackagesGlobs>); pub struct OptionPackagesGlobs(Option<PackagesGlobs>);
#[turbo_tasks::function] #[turbo_tasks::function]
async fn packages_glob(packages: Vc<Vec<String>>) -> Result<Vc<OptionPackagesGlobs>> { async fn packages_glob(packages: Vc<Vec<RcStr>>) -> Result<Vc<OptionPackagesGlobs>> {
let packages = packages.await?; let packages = packages.await?;
if packages.is_empty() { if packages.is_empty() {
return Ok(Vc::cell(None)); return Ok(Vc::cell(None));
} }
let path_glob = Glob::new(format!("**/node_modules/{{{}}}/**", packages.join(","))); let path_glob = Glob::new(format!("**/node_modules/{{{}}}/**", packages.join(",")).into());
let request_glob = Glob::new(format!( let request_glob =
"{{{},{}/**}}", Glob::new(format!("{{{},{}/**}}", packages.join(","), packages.join("/**,")).into());
packages.join(","),
packages.join("/**,")
));
Ok(Vc::cell(Some(PackagesGlobs { Ok(Vc::cell(Some(PackagesGlobs {
path_glob: path_glob.resolve().await?, path_glob: path_glob.resolve().await?,
request_glob: request_glob.resolve().await?, request_glob: request_glob.resolve().await?,
@ -456,8 +456,8 @@ async fn packages_glob(packages: Vc<Vec<String>>) -> Result<Vc<OptionPackagesGlo
#[turbo_tasks::value] #[turbo_tasks::value]
struct UnableToExternalize { struct UnableToExternalize {
file_path: Vc<FileSystemPath>, file_path: Vc<FileSystemPath>,
request: String, request: RcStr,
reason: String, reason: RcStr,
} }
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
@ -476,17 +476,18 @@ impl Issue for UnableToExternalize {
.take(2) .take(2)
.intersperse("/") .intersperse("/")
.collect::<String>() .collect::<String>()
.into()
} else if let Some((package, _)) = request.split_once('/') { } else if let Some((package, _)) = request.split_once('/') {
package.to_string() package.into()
} else { } else {
request.to_string() request.clone()
}; };
Ok(StyledString::Line(vec![ Ok(StyledString::Line(vec![
StyledString::Text("Package ".to_string()), StyledString::Text("Package ".into()),
StyledString::Code(package), StyledString::Code(package),
StyledString::Text(" (".to_string()), StyledString::Text(" (".into()),
StyledString::Code("serverExternalPackages".to_string()), StyledString::Code("serverExternalPackages".into()),
StyledString::Text(" or default list) can't be external".to_string()), StyledString::Text(" or default list) can't be external".into()),
]) ])
.cell()) .cell())
} }
@ -506,15 +507,13 @@ impl Issue for UnableToExternalize {
Vc::cell(Some( Vc::cell(Some(
StyledString::Stack(vec![ StyledString::Stack(vec![
StyledString::Line(vec![ StyledString::Line(vec![
StyledString::Text("The request ".to_string()), StyledString::Text("The request ".into()),
StyledString::Code(self.request.to_string()), StyledString::Code(self.request.clone()),
StyledString::Text(" matches ".to_string()), StyledString::Text(" matches ".into()),
StyledString::Code("serverExternalPackages".to_string()), StyledString::Code("serverExternalPackages".into()),
StyledString::Text( StyledString::Text(" (or the default list), but it can't be external:".into()),
" (or the default list), but it can't be external:".to_string(),
),
]), ]),
StyledString::Line(vec![StyledString::Text(self.reason.to_string())]), StyledString::Line(vec![StyledString::Text(self.reason.clone())]),
]) ])
.cell(), .cell(),
)) ))

View file

@ -2,7 +2,7 @@ use std::collections::BTreeMap;
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use indoc::formatdoc; use indoc::formatdoc;
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbo_tasks_fs::FileSystemPath; use turbo_tasks_fs::FileSystemPath;
use turbopack_binding::turbopack::{ use turbopack_binding::turbopack::{
core::{ core::{
@ -28,8 +28,8 @@ use turbopack_binding::turbopack::{
use super::server_component_reference::NextServerComponentModuleReference; use super::server_component_reference::NextServerComponentModuleReference;
#[turbo_tasks::function] #[turbo_tasks::function]
fn modifier() -> Vc<String> { fn modifier() -> Vc<RcStr> {
Vc::cell("Next.js server component".to_string()) Vc::cell("Next.js server component".into())
} }
#[turbo_tasks::value(shared)] #[turbo_tasks::value(shared)]
@ -96,7 +96,7 @@ impl EcmascriptChunkPlaceable for NextServerComponentModule {
#[turbo_tasks::function] #[turbo_tasks::function]
fn get_exports(&self) -> Vc<EcmascriptExports> { fn get_exports(&self) -> Vc<EcmascriptExports> {
let exports = BTreeMap::from([( let exports = BTreeMap::from([(
"default".to_string(), "default".into(),
EsmExport::ImportedNamespace(Vc::upcast(NextServerComponentModuleReference::new( EsmExport::ImportedNamespace(Vc::upcast(NextServerComponentModuleReference::new(
Vc::upcast(self.module), Vc::upcast(self.module),
))), ))),

View file

@ -1,5 +1,5 @@
use anyhow::Result; use anyhow::Result;
use turbo_tasks::{ValueToString, Vc}; use turbo_tasks::{RcStr, ValueToString, Vc};
use turbopack_binding::turbopack::core::{ use turbopack_binding::turbopack::core::{
chunk::ChunkableModuleReference, module::Module, reference::ModuleReference, chunk::ChunkableModuleReference, module::Module, reference::ModuleReference,
resolve::ModuleResolveResult, resolve::ModuleResolveResult,
@ -21,11 +21,14 @@ impl NextServerComponentModuleReference {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl ValueToString for NextServerComponentModuleReference { impl ValueToString for NextServerComponentModuleReference {
#[turbo_tasks::function] #[turbo_tasks::function]
async fn to_string(&self) -> Result<Vc<String>> { async fn to_string(&self) -> Result<Vc<RcStr>> {
Ok(Vc::cell(format!( Ok(Vc::cell(
"Next.js server component {}", format!(
self.asset.ident().to_string().await? "Next.js server component {}",
))) self.asset.ident().to_string().await?
)
.into(),
))
} }
} }

View file

@ -1,5 +1,5 @@
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbopack_binding::turbopack::{ use turbopack_binding::turbopack::{
core::module::Module, core::module::Module,
ecmascript::chunk::EcmascriptChunkPlaceable, ecmascript::chunk::EcmascriptChunkPlaceable,
@ -29,7 +29,7 @@ impl NextServerComponentTransition {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl Transition for NextServerComponentTransition { impl Transition for NextServerComponentTransition {
#[turbo_tasks::function] #[turbo_tasks::function]
fn process_layer(self: Vc<Self>, layer: Vc<String>) -> Vc<String> { fn process_layer(self: Vc<Self>, layer: Vc<RcStr>) -> Vc<RcStr> {
layer layer
} }

View file

@ -2,7 +2,7 @@ use std::collections::{HashMap, HashSet};
use anyhow::Result; use anyhow::Result;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use turbo_tasks::{Value, Vc}; use turbo_tasks::{RcStr, Value, Vc};
use turbo_tasks_fs::glob::Glob; use turbo_tasks_fs::glob::Glob;
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::FileSystemPath, turbo::tasks_fs::FileSystemPath,
@ -63,7 +63,7 @@ impl UnsupportedModulesResolvePlugin {
impl AfterResolvePlugin for UnsupportedModulesResolvePlugin { impl AfterResolvePlugin for UnsupportedModulesResolvePlugin {
#[turbo_tasks::function] #[turbo_tasks::function]
fn after_resolve_condition(&self) -> Vc<AfterResolvePluginCondition> { fn after_resolve_condition(&self) -> Vc<AfterResolvePluginCondition> {
AfterResolvePluginCondition::new(self.root.root(), Glob::new("**".to_string())) AfterResolvePluginCondition::new(self.root.root(), Glob::new("**".into()))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
@ -85,7 +85,7 @@ impl AfterResolvePlugin for UnsupportedModulesResolvePlugin {
if UNSUPPORTED_PACKAGES.contains(module.as_str()) { if UNSUPPORTED_PACKAGES.contains(module.as_str()) {
UnsupportedModuleIssue { UnsupportedModuleIssue {
file_path, file_path,
package: module.into(), package: module.clone(),
package_path: None, package_path: None,
} }
.cell() .cell()
@ -96,7 +96,7 @@ impl AfterResolvePlugin for UnsupportedModulesResolvePlugin {
if UNSUPPORTED_PACKAGE_PATHS.contains(&(module, path)) { if UNSUPPORTED_PACKAGE_PATHS.contains(&(module, path)) {
UnsupportedModuleIssue { UnsupportedModuleIssue {
file_path, file_path,
package: module.into(), package: module.clone(),
package_path: Some(path.to_owned()), package_path: Some(path.to_owned()),
} }
.cell() .cell()
@ -112,7 +112,7 @@ impl AfterResolvePlugin for UnsupportedModulesResolvePlugin {
#[turbo_tasks::value(shared)] #[turbo_tasks::value(shared)]
pub struct InvalidImportModuleIssue { pub struct InvalidImportModuleIssue {
pub file_path: Vc<FileSystemPath>, pub file_path: Vc<FileSystemPath>,
pub messages: Vec<String>, pub messages: Vec<RcStr>,
pub skip_context_message: bool, pub skip_context_message: bool,
} }
@ -146,17 +146,15 @@ impl Issue for InvalidImportModuleIssue {
if !self.skip_context_message { if !self.skip_context_message {
//[TODO]: how do we get the import trace? //[TODO]: how do we get the import trace?
messages.push(format!( messages
"The error was caused by importing '{}'", .push(format!("The error was caused by importing '{}'", raw_context.path).into());
raw_context.path
));
} }
Ok(Vc::cell(Some( Ok(Vc::cell(Some(
StyledString::Line( StyledString::Line(
messages messages
.iter() .iter()
.map(|v| StyledString::Text(format!("{}\n", v))) .map(|v| StyledString::Text(format!("{}\n", v).into()))
.collect::<Vec<StyledString>>(), .collect::<Vec<StyledString>>(),
) )
.cell(), .cell(),
@ -171,14 +169,14 @@ impl Issue for InvalidImportModuleIssue {
#[turbo_tasks::value] #[turbo_tasks::value]
pub(crate) struct InvalidImportResolvePlugin { pub(crate) struct InvalidImportResolvePlugin {
root: Vc<FileSystemPath>, root: Vc<FileSystemPath>,
invalid_import: String, invalid_import: RcStr,
message: Vec<String>, message: Vec<RcStr>,
} }
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl InvalidImportResolvePlugin { impl InvalidImportResolvePlugin {
#[turbo_tasks::function] #[turbo_tasks::function]
pub fn new(root: Vc<FileSystemPath>, invalid_import: String, message: Vec<String>) -> Vc<Self> { pub fn new(root: Vc<FileSystemPath>, invalid_import: RcStr, message: Vec<RcStr>) -> Vc<Self> {
InvalidImportResolvePlugin { InvalidImportResolvePlugin {
root, root,
invalid_import, invalid_import,
@ -192,7 +190,7 @@ impl InvalidImportResolvePlugin {
impl AfterResolvePlugin for InvalidImportResolvePlugin { impl AfterResolvePlugin for InvalidImportResolvePlugin {
#[turbo_tasks::function] #[turbo_tasks::function]
fn after_resolve_condition(&self) -> Vc<AfterResolvePluginCondition> { fn after_resolve_condition(&self) -> Vc<AfterResolvePluginCondition> {
AfterResolvePluginCondition::new(self.root.root(), Glob::new("**".to_string())) AfterResolvePluginCondition::new(self.root.root(), Glob::new("**".into()))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
@ -228,11 +226,11 @@ pub(crate) fn get_invalid_client_only_resolve_plugin(
) -> Vc<InvalidImportResolvePlugin> { ) -> Vc<InvalidImportResolvePlugin> {
InvalidImportResolvePlugin::new( InvalidImportResolvePlugin::new(
root, root,
"client-only".to_string(), "client-only".into(),
vec![ vec![
"'client-only' cannot be imported from a Server Component module. It should only be \ "'client-only' cannot be imported from a Server Component module. It should only be \
used from a Client Component." used from a Client Component."
.to_string(), .into(),
], ],
) )
} }
@ -245,11 +243,11 @@ pub(crate) fn get_invalid_server_only_resolve_plugin(
) -> Vc<InvalidImportResolvePlugin> { ) -> Vc<InvalidImportResolvePlugin> {
InvalidImportResolvePlugin::new( InvalidImportResolvePlugin::new(
root, root,
"server-only".to_string(), "server-only".into(),
vec![ vec![
"'server-only' cannot be imported from a Client Component module. It should only be \ "'server-only' cannot be imported from a Client Component module. It should only be \
used from a Server Component." used from a Server Component."
.to_string(), .into(),
], ],
) )
} }
@ -260,15 +258,15 @@ pub(crate) fn get_invalid_styled_jsx_resolve_plugin(
) -> Vc<InvalidImportResolvePlugin> { ) -> Vc<InvalidImportResolvePlugin> {
InvalidImportResolvePlugin::new( InvalidImportResolvePlugin::new(
root, root,
"styled-jsx".to_string(), "styled-jsx".into(),
vec![ vec![
"'client-only' cannot be imported from a Server Component module. It should only be \ "'client-only' cannot be imported from a Server Component module. It should only be \
used from a Client Component." used from a Client Component."
.to_string(), .into(),
"The error was caused by using 'styled-jsx'. It only works in a Client Component but \ "The error was caused by using 'styled-jsx'. It only works in a Client Component but \
none of its parents are marked with \"use client\", so they're Server Components by \ none of its parents are marked with \"use client\", so they're Server Components by \
default." default."
.to_string(), .into(),
], ],
) )
} }
@ -292,7 +290,7 @@ impl AfterResolvePlugin for NextExternalResolvePlugin {
fn after_resolve_condition(&self) -> Vc<AfterResolvePluginCondition> { fn after_resolve_condition(&self) -> Vc<AfterResolvePluginCondition> {
AfterResolvePluginCondition::new( AfterResolvePluginCondition::new(
self.root.root(), self.root.root(),
Glob::new("**/next/dist/**/*.{external,runtime.dev,runtime.prod}.js".to_string()), Glob::new("**/next/dist/**/*.{external,runtime.dev,runtime.prod}.js".into()),
) )
} }
@ -310,10 +308,10 @@ impl AfterResolvePlugin for NextExternalResolvePlugin {
// always be found since the glob pattern above is specific enough. // always be found since the glob pattern above is specific enough.
let starting_index = path.find("next/dist").unwrap(); let starting_index = path.find("next/dist").unwrap();
// Replace '/esm/' with '/' to match the CJS version of the file. // Replace '/esm/' with '/' to match the CJS version of the file.
let modified_path = &path[starting_index..].replace("/esm/", "/"); let modified_path = path[starting_index..].replace("/esm/", "/");
Ok(Vc::cell(Some( Ok(Vc::cell(Some(
ResolveResult::primary(ResolveResultItem::External( ResolveResult::primary(ResolveResultItem::External(
modified_path.to_string(), modified_path.into(),
ExternalType::CommonJs, ExternalType::CommonJs,
)) ))
.into(), .into(),
@ -342,7 +340,7 @@ impl AfterResolvePlugin for NextNodeSharedRuntimeResolvePlugin {
fn after_resolve_condition(&self) -> Vc<AfterResolvePluginCondition> { fn after_resolve_condition(&self) -> Vc<AfterResolvePluginCondition> {
AfterResolvePluginCondition::new( AfterResolvePluginCondition::new(
self.root.root(), self.root.root(),
Glob::new("**/next/dist/**/*.shared-runtime.js".to_string()), Glob::new("**/next/dist/**/*.shared-runtime.js".into()),
) )
} }
@ -378,7 +376,9 @@ impl AfterResolvePlugin for NextNodeSharedRuntimeResolvePlugin {
let (base, _) = path.split_at(starting_index); let (base, _) = path.split_at(starting_index);
let new_path = fs_path.root().join(format!("{base}/{resource_request}")); let new_path = fs_path
.root()
.join(format!("{base}/{resource_request}").into());
Ok(Vc::cell(Some( Ok(Vc::cell(Some(
ResolveResult::source(Vc::upcast(FileSource::new(new_path))).into(), ResolveResult::source(Vc::upcast(FileSource::new(new_path))).into(),
@ -405,7 +405,7 @@ impl ModuleFeatureReportResolvePlugin {
impl AfterResolvePlugin for ModuleFeatureReportResolvePlugin { impl AfterResolvePlugin for ModuleFeatureReportResolvePlugin {
#[turbo_tasks::function] #[turbo_tasks::function]
fn after_resolve_condition(&self) -> Vc<AfterResolvePluginCondition> { fn after_resolve_condition(&self) -> Vc<AfterResolvePluginCondition> {
AfterResolvePluginCondition::new(self.root.root(), Glob::new("**".to_string())) AfterResolvePluginCondition::new(self.root.root(), Glob::new("**".into()))
} }
#[turbo_tasks::function] #[turbo_tasks::function]
@ -430,7 +430,7 @@ impl AfterResolvePlugin for ModuleFeatureReportResolvePlugin {
.find(|sub_path| path.is_match(sub_path)); .find(|sub_path| path.is_match(sub_path));
if let Some(sub_path) = sub_path { if let Some(sub_path) = sub_path {
ModuleFeatureTelemetry::new(format!("{}{}", module, sub_path), 1) ModuleFeatureTelemetry::new(format!("{}{}", module, sub_path).into(), 1)
.cell() .cell()
.emit(); .emit();
} }
@ -460,7 +460,7 @@ impl AfterResolvePlugin for NextSharedRuntimeResolvePlugin {
fn after_resolve_condition(&self) -> Vc<AfterResolvePluginCondition> { fn after_resolve_condition(&self) -> Vc<AfterResolvePluginCondition> {
AfterResolvePluginCondition::new( AfterResolvePluginCondition::new(
self.root.root(), self.root.root(),
Glob::new("**/next/dist/esm/**/*.shared-runtime.js".to_string()), Glob::new("**/next/dist/esm/**/*.shared-runtime.js".into()),
) )
} }
@ -474,7 +474,7 @@ impl AfterResolvePlugin for NextSharedRuntimeResolvePlugin {
) -> Result<Vc<ResolveResultOption>> { ) -> Result<Vc<ResolveResultOption>> {
let raw_fs_path = &*fs_path.await?; let raw_fs_path = &*fs_path.await?;
let modified_path = raw_fs_path.path.replace("next/dist/esm/", "next/dist/"); let modified_path = raw_fs_path.path.replace("next/dist/esm/", "next/dist/");
let new_path = fs_path.root().join(modified_path); let new_path = fs_path.root().join(modified_path.into());
Ok(Vc::cell(Some( Ok(Vc::cell(Some(
ResolveResult::source(Vc::upcast(FileSource::new(new_path))).into(), ResolveResult::source(Vc::upcast(FileSource::new(new_path))).into(),
))) )))

View file

@ -166,7 +166,7 @@ impl Issue for PageStaticInfoIssue {
StyledString::Line( StyledString::Line(
self.messages self.messages
.iter() .iter()
.map(|v| StyledString::Text(format!("{}\n", v))) .map(|v| StyledString::Text(format!("{}\n", v).into()))
.collect::<Vec<StyledString>>(), .collect::<Vec<StyledString>>(),
) )
.cell(), .cell(),

View file

@ -36,21 +36,21 @@ pub async fn get_next_pages_transforms_rule(
ModuleRuleCondition::all(vec![ ModuleRuleCondition::all(vec![
ModuleRuleCondition::ResourcePathInExactDirectory(pages_dir.await?), ModuleRuleCondition::ResourcePathInExactDirectory(pages_dir.await?),
ModuleRuleCondition::not(ModuleRuleCondition::ResourcePathInExactDirectory( ModuleRuleCondition::not(ModuleRuleCondition::ResourcePathInExactDirectory(
pages_dir.join("api".to_string()).await?, pages_dir.join("api".into()).await?,
)), )),
ModuleRuleCondition::not(ModuleRuleCondition::any(vec![ ModuleRuleCondition::not(ModuleRuleCondition::any(vec![
// TODO(alexkirsz): Possibly ignore _app as well? // TODO(alexkirsz): Possibly ignore _app as well?
ModuleRuleCondition::ResourcePathEquals( ModuleRuleCondition::ResourcePathEquals(
pages_dir.join("_document.js".to_string()).await?, pages_dir.join("_document.js".into()).await?,
), ),
ModuleRuleCondition::ResourcePathEquals( ModuleRuleCondition::ResourcePathEquals(
pages_dir.join("_document.jsx".to_string()).await?, pages_dir.join("_document.jsx".into()).await?,
), ),
ModuleRuleCondition::ResourcePathEquals( ModuleRuleCondition::ResourcePathEquals(
pages_dir.join("_document.ts".to_string()).await?, pages_dir.join("_document.ts".into()).await?,
), ),
ModuleRuleCondition::ResourcePathEquals( ModuleRuleCondition::ResourcePathEquals(
pages_dir.join("_document.tsx".to_string()).await?, pages_dir.join("_document.tsx".into()).await?,
), ),
])), ])),
]), ]),

View file

@ -1,4 +1,6 @@
use anyhow::Result; use anyhow::Result;
#[allow(unused_imports)]
use turbo_tasks::RcStr;
use turbo_tasks::Vc; use turbo_tasks::Vc;
use turbo_tasks_fs::FileSystemPath; use turbo_tasks_fs::FileSystemPath;
use turbopack_binding::turbopack::turbopack::module_options::ModuleRule; use turbopack_binding::turbopack::turbopack::module_options::ModuleRule;
@ -30,7 +32,7 @@ pub async fn get_swc_ecma_transform_plugin_rule(
#[cfg(feature = "plugin")] #[cfg(feature = "plugin")]
pub async fn get_swc_ecma_transform_rule_impl( pub async fn get_swc_ecma_transform_rule_impl(
project_path: Vc<FileSystemPath>, project_path: Vc<FileSystemPath>,
plugin_configs: &[(String, serde_json::Value)], plugin_configs: &[(RcStr, serde_json::Value)],
enable_mdx_rs: bool, enable_mdx_rs: bool,
) -> Result<Option<ModuleRule>> { ) -> Result<Option<ModuleRule>> {
use anyhow::{bail, Context}; use anyhow::{bail, Context};
@ -58,7 +60,7 @@ pub async fn get_swc_ecma_transform_rule_impl(
// one for implicit package name resolves to node_modules, // one for implicit package name resolves to node_modules,
// and one for explicit path to a .wasm binary. // and one for explicit path to a .wasm binary.
// Current resolve will fail with latter. // Current resolve will fail with latter.
let request = Request::parse(Value::new(Pattern::Constant(name.to_string()))); let request = Request::parse(Value::new(Pattern::Constant(name.as_str().into())));
let resolve_options = resolve_options( let resolve_options = resolve_options(
project_path, project_path,
ResolveOptionsContext { ResolveOptionsContext {

View file

@ -35,8 +35,8 @@ pub async fn maybe_add_babel_loader(
) -> Result<Vc<OptionWebpackRules>> { ) -> Result<Vc<OptionWebpackRules>> {
let has_babel_config = { let has_babel_config = {
let mut has_babel_config = false; let mut has_babel_config = false;
for filename in BABEL_CONFIG_FILES { for &filename in BABEL_CONFIG_FILES {
let filetype = *project_root.join(filename.to_string()).get_type().await?; let filetype = *project_root.join(filename.into()).get_type().await?;
if matches!(filetype, FileSystemEntryType::File) { if matches!(filetype, FileSystemEntryType::File) {
has_babel_config = true; has_babel_config = true;
break; break;
@ -71,13 +71,11 @@ pub async fn maybe_add_babel_loader(
BabelIssue { BabelIssue {
path: project_root, path: project_root,
title: StyledString::Text( title: StyledString::Text(
"Unable to resolve babel-loader, but a babel config is present" "Unable to resolve babel-loader, but a babel config is present".into(),
.to_owned(),
) )
.cell(), .cell(),
description: StyledString::Text( description: StyledString::Text(
"Make sure babel-loader is installed via your package manager." "Make sure babel-loader is installed via your package manager.".into(),
.to_owned(),
) )
.cell(), .cell(),
severity: IssueSeverity::Fatal.cell(), severity: IssueSeverity::Fatal.cell(),
@ -89,7 +87,7 @@ pub async fn maybe_add_babel_loader(
} }
let loader = WebpackLoaderItem { let loader = WebpackLoaderItem {
loader: "babel-loader".to_string(), loader: "babel-loader".into(),
options: Default::default(), options: Default::default(),
}; };
if let Some(rule) = rule { if let Some(rule) = rule {
@ -98,10 +96,10 @@ pub async fn maybe_add_babel_loader(
rule.loaders = Vc::cell(loaders); rule.loaders = Vc::cell(loaders);
} else { } else {
rules.insert( rules.insert(
pattern.to_string(), pattern.into(),
LoaderRuleItem { LoaderRuleItem {
loaders: Vc::cell(vec![loader]), loaders: Vc::cell(vec![loader]),
rename_as: Some("*".to_string()), rename_as: Some("*".into()),
}, },
); );
} }
@ -122,7 +120,7 @@ pub async fn is_babel_loader_available(project_path: Vc<FileSystemPath>) -> Resu
project_path, project_path,
Value::new(ReferenceType::CommonJs(CommonJsReferenceSubType::Undefined)), Value::new(ReferenceType::CommonJs(CommonJsReferenceSubType::Undefined)),
Request::parse(Value::new(Pattern::Constant( Request::parse(Value::new(Pattern::Constant(
"babel-loader/package.json".to_string(), "babel-loader/package.json".into(),
))), ))),
node_cjs_resolve_options(project_path), node_cjs_resolve_options(project_path),
); );

View file

@ -1,5 +1,5 @@
use anyhow::Result; use anyhow::Result;
use turbo_tasks::Vc; use turbo_tasks::{RcStr, Vc};
use turbo_tasks_fs::FileSystemPath; use turbo_tasks_fs::FileSystemPath;
use turbopack_binding::turbopack::{ use turbopack_binding::turbopack::{
core::resolve::options::ImportMapping, turbopack::module_options::WebpackLoadersOptions, core::resolve::options::ImportMapping, turbopack::module_options::WebpackLoadersOptions,
@ -15,7 +15,7 @@ pub async fn webpack_loader_options(
project_path: Vc<FileSystemPath>, project_path: Vc<FileSystemPath>,
next_config: Vc<NextConfig>, next_config: Vc<NextConfig>,
foreign: bool, foreign: bool,
conditions: Vec<String>, conditions: Vec<RcStr>,
) -> Result<Option<Vc<WebpackLoadersOptions>>> { ) -> Result<Option<Vc<WebpackLoadersOptions>>> {
let rules = *next_config.webpack_rules(conditions).await?; let rules = *next_config.webpack_rules(conditions).await?;
let rules = *maybe_add_sass_loader(next_config.sass_config(), rules).await?; let rules = *maybe_add_sass_loader(next_config.sass_config(), rules).await?;
@ -35,5 +35,5 @@ pub async fn webpack_loader_options(
#[turbo_tasks::function] #[turbo_tasks::function]
fn loader_runner_package_mapping() -> Vc<ImportMapping> { fn loader_runner_package_mapping() -> Vc<ImportMapping> {
get_external_next_compiled_package_mapping(Vc::cell("loader-runner".to_owned())) get_external_next_compiled_package_mapping(Vc::cell("loader-runner".into()))
} }

View file

@ -35,7 +35,7 @@ pub async fn maybe_add_sass_loader(
.or(sass_options.get("additionalData")); .or(sass_options.get("additionalData"));
let rule = rules.get_mut(pattern); let rule = rules.get_mut(pattern);
let sass_loader = WebpackLoaderItem { let sass_loader = WebpackLoaderItem {
loader: "next/dist/compiled/sass-loader".to_string(), loader: "next/dist/compiled/sass-loader".into(),
options: take( options: take(
serde_json::json!({ serde_json::json!({
"sourceMap": true, "sourceMap": true,
@ -47,7 +47,7 @@ pub async fn maybe_add_sass_loader(
), ),
}; };
let resolve_url_loader = WebpackLoaderItem { let resolve_url_loader = WebpackLoaderItem {
loader: "next/dist/build/webpack/loaders/resolve-url-loader/index".to_string(), loader: "next/dist/build/webpack/loaders/resolve-url-loader/index".into(),
options: take( options: take(
serde_json::json!({ serde_json::json!({
//https://github.com/vercel/turbo/blob/d527eb54be384a4658243304cecd547d09c05c6b/crates/turbopack-node/src/transforms/webpack.rs#L191 //https://github.com/vercel/turbo/blob/d527eb54be384a4658243304cecd547d09c05c6b/crates/turbopack-node/src/transforms/webpack.rs#L191
@ -75,10 +75,10 @@ pub async fn maybe_add_sass_loader(
rule.loaders = Vc::cell(loaders); rule.loaders = Vc::cell(loaders);
} else { } else {
rules.insert( rules.insert(
pattern.to_string(), pattern.into(),
LoaderRuleItem { LoaderRuleItem {
loaders: Vc::cell(vec![resolve_url_loader, sass_loader]), loaders: Vc::cell(vec![resolve_url_loader, sass_loader]),
rename_as: Some(format!("*{rename}")), rename_as: Some(format!("*{rename}").into()),
}, },
); );
} }

View file

@ -1,5 +1,6 @@
use std::collections::HashMap; use std::collections::HashMap;
use turbo_tasks::RcStr;
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks::Vc, turbo::tasks::Vc,
turbopack::core::diagnostics::{Diagnostic, DiagnosticPayload}, turbopack::core::diagnostics::{Diagnostic, DiagnosticPayload},
@ -9,15 +10,15 @@ use turbopack_binding::{
/// enabled for the telemetry. The original implementation code can be found at the following [link](https://github.com/vercel/next.js/blob/9da305fe320b89ee2f8c3cfb7ecbf48856368913/packages/next/src/build/webpack-config.ts#L2516). /// enabled for the telemetry. The original implementation code can be found at the following [link](https://github.com/vercel/next.js/blob/9da305fe320b89ee2f8c3cfb7ecbf48856368913/packages/next/src/build/webpack-config.ts#L2516).
#[turbo_tasks::value(shared)] #[turbo_tasks::value(shared)]
pub struct NextFeatureTelemetry { pub struct NextFeatureTelemetry {
pub event_name: String, pub event_name: RcStr,
pub feature_name: String, pub feature_name: RcStr,
pub enabled: bool, pub enabled: bool,
} }
impl NextFeatureTelemetry { impl NextFeatureTelemetry {
pub fn new(feature_name: String, enabled: bool) -> Self { pub fn new(feature_name: RcStr, enabled: bool) -> Self {
NextFeatureTelemetry { NextFeatureTelemetry {
event_name: "EVENT_BUILD_FEATURE_USAGE".to_string(), event_name: "EVENT_BUILD_FEATURE_USAGE".into(),
feature_name, feature_name,
enabled, enabled,
} }
@ -27,12 +28,12 @@ impl NextFeatureTelemetry {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl Diagnostic for NextFeatureTelemetry { impl Diagnostic for NextFeatureTelemetry {
#[turbo_tasks::function] #[turbo_tasks::function]
fn category(&self) -> Vc<String> { fn category(&self) -> Vc<RcStr> {
Vc::cell("NextFeatureTelemetry_category_tbd".to_string()) Vc::cell("NextFeatureTelemetry_category_tbd".into())
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn name(&self) -> Vc<String> { fn name(&self) -> Vc<RcStr> {
Vc::cell(self.event_name.clone()) Vc::cell(self.event_name.clone())
} }
@ -40,7 +41,7 @@ impl Diagnostic for NextFeatureTelemetry {
fn payload(&self) -> Vc<DiagnosticPayload> { fn payload(&self) -> Vc<DiagnosticPayload> {
Vc::cell(HashMap::from([( Vc::cell(HashMap::from([(
self.feature_name.clone(), self.feature_name.clone(),
self.enabled.to_string(), self.enabled.to_string().into(),
)])) )]))
} }
} }
@ -49,15 +50,15 @@ impl Diagnostic for NextFeatureTelemetry {
/// referred as `importing` a certain module. (i.e importing @next/image) /// referred as `importing` a certain module. (i.e importing @next/image)
#[turbo_tasks::value(shared)] #[turbo_tasks::value(shared)]
pub struct ModuleFeatureTelemetry { pub struct ModuleFeatureTelemetry {
pub event_name: String, pub event_name: RcStr,
pub feature_name: String, pub feature_name: RcStr,
pub invocation_count: usize, pub invocation_count: usize,
} }
impl ModuleFeatureTelemetry { impl ModuleFeatureTelemetry {
pub fn new(feature_name: String, invocation_count: usize) -> Self { pub fn new(feature_name: RcStr, invocation_count: usize) -> Self {
ModuleFeatureTelemetry { ModuleFeatureTelemetry {
event_name: "EVENT_BUILD_FEATURE_USAGE".to_string(), event_name: "EVENT_BUILD_FEATURE_USAGE".into(),
feature_name, feature_name,
invocation_count, invocation_count,
} }
@ -67,12 +68,12 @@ impl ModuleFeatureTelemetry {
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
impl Diagnostic for ModuleFeatureTelemetry { impl Diagnostic for ModuleFeatureTelemetry {
#[turbo_tasks::function] #[turbo_tasks::function]
fn category(&self) -> Vc<String> { fn category(&self) -> Vc<RcStr> {
Vc::cell("ModuleFeatureTelemetry_category_tbd".to_string()) Vc::cell("ModuleFeatureTelemetry_category_tbd".into())
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn name(&self) -> Vc<String> { fn name(&self) -> Vc<RcStr> {
Vc::cell(self.event_name.clone()) Vc::cell(self.event_name.clone())
} }
@ -80,7 +81,7 @@ impl Diagnostic for ModuleFeatureTelemetry {
fn payload(&self) -> Vc<DiagnosticPayload> { fn payload(&self) -> Vc<DiagnosticPayload> {
Vc::cell(HashMap::from([( Vc::cell(HashMap::from([(
self.feature_name.clone(), self.feature_name.clone(),
self.invocation_count.to_string(), self.invocation_count.to_string().into(),
)])) )]))
} }
} }

View file

@ -2,7 +2,7 @@ use std::io::Write;
use anyhow::{bail, Result}; use anyhow::{bail, Result};
use indexmap::indexmap; use indexmap::indexmap;
use turbo_tasks::{TryJoinIterExt, Value, Vc}; use turbo_tasks::{RcStr, TryJoinIterExt, Value, Vc};
use turbo_tasks_fs::FileSystemPathOption; use turbo_tasks_fs::FileSystemPathOption;
use turbopack_binding::{ use turbopack_binding::{
turbo::tasks_fs::{rope::RopeBuilder, File, FileContent, FileSystemPath}, turbo::tasks_fs::{rope::RopeBuilder, File, FileContent, FileSystemPath},
@ -29,7 +29,7 @@ use crate::{embed_js::next_js_file_path, util::get_asset_path_from_pathname};
pub async fn create_page_loader_entry_module( pub async fn create_page_loader_entry_module(
client_context: Vc<Box<dyn AssetContext>>, client_context: Vc<Box<dyn AssetContext>>,
entry_asset: Vc<Box<dyn Source>>, entry_asset: Vc<Box<dyn Source>>,
pathname: Vc<String>, pathname: Vc<RcStr>,
) -> Result<Vc<Box<dyn Module>>> { ) -> Result<Vc<Box<dyn Module>>> {
let mut result = RopeBuilder::default(); let mut result = RopeBuilder::default();
writeln!( writeln!(
@ -38,7 +38,7 @@ pub async fn create_page_loader_entry_module(
StringifyJs(&*pathname.await?) StringifyJs(&*pathname.await?)
)?; )?;
let page_loader_path = next_js_file_path("entry/page-loader.ts".to_string()); let page_loader_path = next_js_file_path("entry/page-loader.ts".into());
let base_code = page_loader_path.read(); let base_code = page_loader_path.read();
if let FileContent::Content(base_file) = &*base_code.await? { if let FileContent::Content(base_file) = &*base_code.await? {
result += base_file.content() result += base_file.content()
@ -64,7 +64,7 @@ pub async fn create_page_loader_entry_module(
.process( .process(
virtual_source, virtual_source,
Value::new(ReferenceType::Internal(Vc::cell(indexmap! { Value::new(ReferenceType::Internal(Vc::cell(indexmap! {
"PAGE".to_string() => module, "PAGE".into() => module,
}))), }))),
) )
.module(); .module();
@ -74,7 +74,7 @@ pub async fn create_page_loader_entry_module(
#[turbo_tasks::value(shared)] #[turbo_tasks::value(shared)]
pub struct PageLoaderAsset { pub struct PageLoaderAsset {
pub server_root: Vc<FileSystemPath>, pub server_root: Vc<FileSystemPath>,
pub pathname: Vc<String>, pub pathname: Vc<RcStr>,
pub rebase_prefix_path: Vc<FileSystemPathOption>, pub rebase_prefix_path: Vc<FileSystemPathOption>,
pub page_chunks: Vc<OutputAssets>, pub page_chunks: Vc<OutputAssets>,
} }
@ -84,7 +84,7 @@ impl PageLoaderAsset {
#[turbo_tasks::function] #[turbo_tasks::function]
pub fn new( pub fn new(
server_root: Vc<FileSystemPath>, server_root: Vc<FileSystemPath>,
pathname: Vc<String>, pathname: Vc<RcStr>,
rebase_prefix_path: Vc<FileSystemPathOption>, rebase_prefix_path: Vc<FileSystemPathOption>,
page_chunks: Vc<OutputAssets>, page_chunks: Vc<OutputAssets>,
) -> Vc<Self> { ) -> Vc<Self> {
@ -127,8 +127,8 @@ impl PageLoaderAsset {
} }
#[turbo_tasks::function] #[turbo_tasks::function]
fn page_loader_chunk_reference_description() -> Vc<String> { fn page_loader_chunk_reference_description() -> Vc<RcStr> {
Vc::cell("page loader chunk".to_string()) Vc::cell("page loader chunk".into())
} }
#[turbo_tasks::value_impl] #[turbo_tasks::value_impl]
@ -136,10 +136,15 @@ impl OutputAsset for PageLoaderAsset {
#[turbo_tasks::function] #[turbo_tasks::function]
async fn ident(&self) -> Result<Vc<AssetIdent>> { async fn ident(&self) -> Result<Vc<AssetIdent>> {
let root = self.rebase_prefix_path.await?.unwrap_or(self.server_root); let root = self.rebase_prefix_path.await?.unwrap_or(self.server_root);
Ok(AssetIdent::from_path(root.join(format!( Ok(AssetIdent::from_path(
"static/chunks/pages{}", root.join(
get_asset_path_from_pathname(&self.pathname.await?, ".js") format!(
)))) "static/chunks/pages{}",
get_asset_path_from_pathname(&self.pathname.await?, ".js")
)
.into(),
),
))
} }
#[turbo_tasks::function] #[turbo_tasks::function]

View file

@ -1,6 +1,6 @@
use anyhow::Result; use anyhow::Result;
use tracing::Instrument; use tracing::Instrument;
use turbo_tasks::{Completion, ValueToString, Vc}; use turbo_tasks::{Completion, RcStr, ValueToString, Vc};
use turbo_tasks_fs::FileSystemPathOption; use turbo_tasks_fs::FileSystemPathOption;
use turbopack_binding::turbo::tasks_fs::{ use turbopack_binding::turbo::tasks_fs::{
DirectoryContent, DirectoryEntry, FileSystemEntryType, FileSystemPath, DirectoryContent, DirectoryEntry, FileSystemEntryType, FileSystemPath,
@ -146,14 +146,14 @@ impl PagesDirectoryStructure {
pub async fn find_pages_structure( pub async fn find_pages_structure(
project_root: Vc<FileSystemPath>, project_root: Vc<FileSystemPath>,
next_router_root: Vc<FileSystemPath>, next_router_root: Vc<FileSystemPath>,
page_extensions: Vc<Vec<String>>, page_extensions: Vc<Vec<RcStr>>,
) -> Result<Vc<PagesStructure>> { ) -> Result<Vc<PagesStructure>> {
let pages_root = project_root.join("pages".to_string()); let pages_root = project_root.join("pages".into());
let pages_root = Vc::<FileSystemPathOption>::cell( let pages_root = Vc::<FileSystemPathOption>::cell(
if *pages_root.get_type().await? == FileSystemEntryType::Directory { if *pages_root.get_type().await? == FileSystemEntryType::Directory {
Some(pages_root) Some(pages_root)
} else { } else {
let src_pages_root = project_root.join("src/pages".to_string()); let src_pages_root = project_root.join("src/pages".into());
if *src_pages_root.get_type().await? == FileSystemEntryType::Directory { if *src_pages_root.get_type().await? == FileSystemEntryType::Directory {
Some(src_pages_root) Some(src_pages_root)
} else { } else {
@ -181,7 +181,7 @@ async fn get_pages_structure_for_root_directory(
project_root: Vc<FileSystemPath>, project_root: Vc<FileSystemPath>,
project_path: Vc<FileSystemPathOption>, project_path: Vc<FileSystemPathOption>,
next_router_path: Vc<FileSystemPath>, next_router_path: Vc<FileSystemPath>,
page_extensions: Vc<Vec<String>>, page_extensions: Vc<Vec<RcStr>>,
) -> Result<Vc<PagesStructure>> { ) -> Result<Vc<PagesStructure>> {
let page_extensions_raw = &*page_extensions.await?; let page_extensions_raw = &*page_extensions.await?;
@ -204,8 +204,7 @@ async fn get_pages_structure_for_root_directory(
}; };
match basename { match basename {
"_app" => { "_app" => {
let item_next_router_path = let item_next_router_path = next_router_path.join("_app".into());
next_router_path.join("_app".to_string());
app_item = Some(PagesStructureItem::new( app_item = Some(PagesStructureItem::new(
*file_project_path, *file_project_path,
item_next_router_path, item_next_router_path,
@ -214,7 +213,7 @@ async fn get_pages_structure_for_root_directory(
} }
"_document" => { "_document" => {
let item_next_router_path = let item_next_router_path =
next_router_path.join("_document".to_string()); next_router_path.join("_document".into());
document_item = Some(PagesStructureItem::new( document_item = Some(PagesStructureItem::new(
*file_project_path, *file_project_path,
item_next_router_path, item_next_router_path,
@ -222,8 +221,7 @@ async fn get_pages_structure_for_root_directory(
)); ));
} }
"_error" => { "_error" => {
let item_next_router_path = let item_next_router_path = next_router_path.join("_error".into());
next_router_path.join("_error".to_string());
error_item = Some(PagesStructureItem::new( error_item = Some(PagesStructureItem::new(
*file_project_path, *file_project_path,
item_next_router_path, item_next_router_path,
@ -233,8 +231,7 @@ async fn get_pages_structure_for_root_directory(
basename => { basename => {
let item_next_router_path = let item_next_router_path =
next_router_path_for_basename(next_router_path, basename); next_router_path_for_basename(next_router_path, basename);
let item_original_path = let item_original_path = next_router_path.join(basename.into());
next_router_path.join(basename.to_string());
items.push(( items.push((
basename, basename,
PagesStructureItem::new( PagesStructureItem::new(
@ -246,7 +243,7 @@ async fn get_pages_structure_for_root_directory(
} }
} }
} }
DirectoryEntry::Directory(dir_project_path) => match name.as_ref() { DirectoryEntry::Directory(dir_project_path) => match name.as_str() {
"api" => { "api" => {
api_directory = Some(get_pages_structure_for_directory( api_directory = Some(get_pages_structure_for_directory(
*dir_project_path, *dir_project_path,
@ -292,9 +289,9 @@ async fn get_pages_structure_for_root_directory(
let app_item = if let Some(app_item) = app_item { let app_item = if let Some(app_item) = app_item {
app_item app_item
} else { } else {
let app_router_path = next_router_path.join("_app".to_string()); let app_router_path = next_router_path.join("_app".into());
PagesStructureItem::new( PagesStructureItem::new(
get_next_package(project_root).join("app.js".to_string()), get_next_package(project_root).join("app.js".into()),
app_router_path, app_router_path,
app_router_path, app_router_path,
) )
@ -303,9 +300,9 @@ async fn get_pages_structure_for_root_directory(
let document_item = if let Some(document_item) = document_item { let document_item = if let Some(document_item) = document_item {
document_item document_item
} else { } else {
let document_router_path = next_router_path.join("_document".to_string()); let document_router_path = next_router_path.join("_document".into());
PagesStructureItem::new( PagesStructureItem::new(
get_next_package(project_root).join("document.js".to_string()), get_next_package(project_root).join("document.js".into()),
document_router_path, document_router_path,
document_router_path, document_router_path,
) )
@ -314,9 +311,9 @@ async fn get_pages_structure_for_root_directory(
let error_item = if let Some(error_item) = error_item { let error_item = if let Some(error_item) = error_item {
error_item error_item
} else { } else {
let error_router_path = next_router_path.join("_error".to_string()); let error_router_path = next_router_path.join("_error".into());
PagesStructureItem::new( PagesStructureItem::new(
get_next_package(project_root).join("error.js".to_string()), get_next_package(project_root).join("error.js".into()),
error_router_path, error_router_path,
error_router_path, error_router_path,
) )
@ -339,11 +336,11 @@ async fn get_pages_structure_for_directory(
project_path: Vc<FileSystemPath>, project_path: Vc<FileSystemPath>,
next_router_path: Vc<FileSystemPath>, next_router_path: Vc<FileSystemPath>,
position: u32, position: u32,
page_extensions: Vc<Vec<String>>, page_extensions: Vc<Vec<RcStr>>,
) -> Result<Vc<PagesDirectoryStructure>> { ) -> Result<Vc<PagesDirectoryStructure>> {
let span = { let span = {
let path = project_path.to_string().await?; let path = project_path.to_string().await?.to_string();
tracing::info_span!("analyse pages structure", name = *path) tracing::info_span!("analyse pages structure", name = path)
}; };
async move { async move {
let page_extensions_raw = &*page_extensions.await?; let page_extensions_raw = &*page_extensions.await?;
@ -360,9 +357,9 @@ async fn get_pages_structure_for_directory(
}; };
let item_next_router_path = match basename { let item_next_router_path = match basename {
"index" => next_router_path, "index" => next_router_path,
_ => next_router_path.join(basename.to_string()), _ => next_router_path.join(basename.into()),
}; };
let item_original_name = next_router_path.join(basename.to_string()); let item_original_name = next_router_path.join(basename.into());
items.push(( items.push((
basename, basename,
PagesStructureItem::new( PagesStructureItem::new(
@ -406,10 +403,10 @@ async fn get_pages_structure_for_directory(
.await .await
} }
fn page_basename<'a>(name: &'a str, page_extensions: &'a [String]) -> Option<&'a str> { fn page_basename<'a>(name: &'a str, page_extensions: &'a [RcStr]) -> Option<&'a str> {
page_extensions page_extensions
.iter() .iter()
.find_map(|allowed| name.strip_suffix(allowed)?.strip_suffix('.')) .find_map(|allowed| name.strip_suffix(&**allowed)?.strip_suffix('.'))
} }
fn next_router_path_for_basename( fn next_router_path_for_basename(
@ -419,6 +416,6 @@ fn next_router_path_for_basename(
if basename == "index" { if basename == "index" {
next_router_path next_router_path
} else { } else {
next_router_path.join(basename.to_string()) next_router_path.join(basename.into())
} }
} }

View file

@ -156,11 +156,11 @@ pub async fn get_jsx_transform_options(
// https://github.com/vercel/next.js/blob/3dc2c1c7f8441cdee31da9f7e0986d654c7fd2e7/packages/next/src/build/swc/options.ts#L112 // https://github.com/vercel/next.js/blob/3dc2c1c7f8441cdee31da9f7e0986d654c7fd2e7/packages/next/src/build/swc/options.ts#L112
// This'll be ignored if ts|jsconfig explicitly specifies importSource // This'll be ignored if ts|jsconfig explicitly specifies importSource
import_source: if is_emotion_enabled && !is_rsc_context { import_source: if is_emotion_enabled && !is_rsc_context {
Some("@emotion/react".to_string()) Some("@emotion/react".into())
} else { } else {
None None
}, },
runtime: Some("automatic".to_string()), runtime: Some("automatic".into()),
react_refresh: enable_react_refresh, react_refresh: enable_react_refresh,
}; };
@ -168,7 +168,7 @@ pub async fn get_jsx_transform_options(
read_from_tsconfigs(&tsconfig, |json, _| { read_from_tsconfigs(&tsconfig, |json, _| {
let jsx_import_source = json["compilerOptions"]["jsxImportSource"] let jsx_import_source = json["compilerOptions"]["jsxImportSource"]
.as_str() .as_str()
.map(|s| s.to_string()); .map(|s| s.into());
Some(JsxTransformOptions { Some(JsxTransformOptions {
import_source: if jsx_import_source.is_some() { import_source: if jsx_import_source.is_some() {

View file

@ -1,7 +1,7 @@
use anyhow::{bail, Context, Result}; use anyhow::{bail, Context, Result};
use indexmap::{IndexMap, IndexSet}; use indexmap::{IndexMap, IndexSet};
use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde::{de::DeserializeOwned, Deserialize, Serialize};
use turbo_tasks::{trace::TraceRawVcs, TaskInput, ValueDefault, ValueToString, Vc}; use turbo_tasks::{trace::TraceRawVcs, RcStr, TaskInput, ValueDefault, ValueToString, Vc};
use turbo_tasks_fs::{rope::Rope, util::join_path, File}; use turbo_tasks_fs::{rope::Rope, util::join_path, File};
use turbopack_binding::{ use turbopack_binding::{
swc::core::{ swc::core::{
@ -49,7 +49,7 @@ pub async fn pathname_for_path(
server_root: Vc<FileSystemPath>, server_root: Vc<FileSystemPath>,
server_path: Vc<FileSystemPath>, server_path: Vc<FileSystemPath>,
path_ty: PathType, path_ty: PathType,
) -> Result<Vc<String>> { ) -> Result<Vc<RcStr>> {
let server_path_value = &*server_path.await?; let server_path_value = &*server_path.await?;
let path = if let Some(path) = server_root.await?.get_path_to(server_path_value) { let path = if let Some(path) = server_root.await?.get_path_to(server_path_value) {
path path
@ -62,10 +62,10 @@ pub async fn pathname_for_path(
}; };
let path = match (path_ty, path) { let path = match (path_ty, path) {
// "/" is special-cased to "/index" for data routes. // "/" is special-cased to "/index" for data routes.
(PathType::Data, "") => "/index".to_string(), (PathType::Data, "") => "/index".into(),
// `get_path_to` always strips the leading `/` from the path, so we need to add // `get_path_to` always strips the leading `/` from the path, so we need to add
// it back here. // it back here.
(_, path) => format!("/{}", path), (_, path) => format!("/{}", path).into(),
}; };
Ok(Vc::cell(path)) Ok(Vc::cell(path))
@ -100,7 +100,7 @@ pub async fn foreign_code_context_condition(
// of the `node_modules` specific resolve options (the template files are // of the `node_modules` specific resolve options (the template files are
// technically node module files). // technically node module files).
let not_next_template_dir = ContextCondition::not(ContextCondition::InPath( let not_next_template_dir = ContextCondition::not(ContextCondition::InPath(
get_next_package(project_path).join(NEXT_TEMPLATE_PATH.to_string()), get_next_package(project_path).join(NEXT_TEMPLATE_PATH.into()),
)); ));
let result = if transpile_packages.is_empty() { let result = if transpile_packages.is_empty() {
@ -195,7 +195,7 @@ impl Issue for NextSourceConfigParsingIssue {
#[turbo_tasks::function] #[turbo_tasks::function]
fn title(&self) -> Vc<StyledString> { fn title(&self) -> Vc<StyledString> {
StyledString::Text("Unable to parse config export in source file".to_string()).cell() StyledString::Text("Unable to parse config export in source file".into()).cell()
} }
#[turbo_tasks::function] #[turbo_tasks::function]
@ -214,7 +214,7 @@ impl Issue for NextSourceConfigParsingIssue {
StyledString::Text( StyledString::Text(
"The exported configuration object in a source file need to have a very specific \ "The exported configuration object in a source file need to have a very specific \
format from which some properties can be statically parsed at compiled-time." format from which some properties can be statically parsed at compiled-time."
.to_string(), .into(),
) )
.cell(), .cell(),
)) ))
@ -230,7 +230,7 @@ fn emit_invalid_config_warning(ident: Vc<AssetIdent>, detail: &str, value: &JsVa
let (explainer, hints) = value.explain(2, 0); let (explainer, hints) = value.explain(2, 0);
NextSourceConfigParsingIssue { NextSourceConfigParsingIssue {
ident, ident,
detail: StyledString::Text(format!("{detail} Got {explainer}.{hints}")).cell(), detail: StyledString::Text(format!("{detail} Got {explainer}.{hints}").into()).cell(),
} }
.cell() .cell()
.emit() .emit()
@ -267,20 +267,20 @@ fn parse_route_matcher_from_js_value(
} }
let r = match route_type.as_deref() { let r = match route_type.as_deref() {
Some("header") => route_key.map(|route_key| RouteHas::Header { Some("header") => route_key.map(|route_key| RouteHas::Header {
key: route_key, key: route_key.into(),
value: route_value, value: route_value.map(From::from),
}), }),
Some("cookie") => route_key.map(|route_key| RouteHas::Cookie { Some("cookie") => route_key.map(|route_key| RouteHas::Cookie {
key: route_key, key: route_key.into(),
value: route_value, value: route_value.map(From::from),
}), }),
Some("query") => route_key.map(|route_key| RouteHas::Query { Some("query") => route_key.map(|route_key| RouteHas::Query {
key: route_key, key: route_key.into(),
value: route_value, value: route_value.map(From::from),
}),
Some("host") => route_value.map(|route_value| RouteHas::Host {
value: route_value.into(),
}), }),
Some("host") => {
route_value.map(|route_value| RouteHas::Host { value: route_value })
}
_ => None, _ => None,
}; };
@ -319,7 +319,7 @@ fn parse_route_matcher_from_js_value(
match key.as_str() { match key.as_str() {
Some("source") => { Some("source") => {
if let Some(value) = value.as_str() { if let Some(value) = value.as_str() {
matcher.original_source = value.to_string(); matcher.original_source = value.into();
} }
} }
Some("missing") => { Some("missing") => {
@ -397,7 +397,7 @@ pub async fn parse_config_from_source(module: Vc<Box<dyn Module>>) -> Result<Vc<
detail: StyledString::Text( detail: StyledString::Text(
"The exported config object must contain an variable \ "The exported config object must contain an variable \
initializer." initializer."
.to_string(), .into(),
) )
.cell(), .cell(),
} }
@ -415,7 +415,7 @@ pub async fn parse_config_from_source(module: Vc<Box<dyn Module>>) -> Result<Vc<
ident: module.ident(), ident: module.ident(),
detail: StyledString::Text( detail: StyledString::Text(
"The runtime property must be either \"nodejs\" or \"edge\"." "The runtime property must be either \"nodejs\" or \"edge\"."
.to_string(), .into(),
) )
.cell(), .cell(),
} }
@ -449,7 +449,7 @@ pub async fn parse_config_from_source(module: Vc<Box<dyn Module>>) -> Result<Vc<
detail: StyledString::Text( detail: StyledString::Text(
"The exported segment runtime option must contain an \ "The exported segment runtime option must contain an \
variable initializer." variable initializer."
.to_string(), .into(),
) )
.cell(), .cell(),
} }
@ -536,9 +536,9 @@ fn parse_config_from_js_value(module: Vc<Box<dyn Module>>, value: &JsValue) -> N
pub async fn load_next_js_template( pub async fn load_next_js_template(
path: &str, path: &str,
project_path: Vc<FileSystemPath>, project_path: Vc<FileSystemPath>,
replacements: IndexMap<&'static str, String>, replacements: IndexMap<&'static str, RcStr>,
injections: IndexMap<&'static str, String>, injections: IndexMap<&'static str, RcStr>,
imports: IndexMap<&'static str, Option<String>>, imports: IndexMap<&'static str, Option<RcStr>>,
) -> Result<Vc<Box<dyn Source>>> { ) -> Result<Vc<Box<dyn Source>>> {
let path = virtual_next_js_template_path(project_path, path.to_string()); let path = virtual_next_js_template_path(project_path, path.to_string());
@ -591,7 +591,8 @@ pub async fn load_next_js_template(
import_request import_request
}, },
) )
.context("path should not leave the fs")?, .context("path should not leave the fs")?
.into(),
}; };
let relative = package_root_value let relative = package_root_value
@ -803,12 +804,12 @@ pub fn virtual_next_js_template_path(
file: String, file: String,
) -> Vc<FileSystemPath> { ) -> Vc<FileSystemPath> {
debug_assert!(!file.contains('/')); debug_assert!(!file.contains('/'));
get_next_package(project_path).join(format!("{NEXT_TEMPLATE_PATH}/{file}")) get_next_package(project_path).join(format!("{NEXT_TEMPLATE_PATH}/{file}").into())
} }
pub async fn load_next_js_templateon<T: DeserializeOwned>( pub async fn load_next_js_templateon<T: DeserializeOwned>(
project_path: Vc<FileSystemPath>, project_path: Vc<FileSystemPath>,
path: String, path: RcStr,
) -> Result<T> { ) -> Result<T> {
let file_path = get_next_package(project_path).join(path.clone()); let file_path = get_next_package(project_path).join(path.clone());

View file

@ -206,7 +206,7 @@
"@types/ws": "8.2.0", "@types/ws": "8.2.0",
"@vercel/ncc": "0.34.0", "@vercel/ncc": "0.34.0",
"@vercel/nft": "0.27.1", "@vercel/nft": "0.27.1",
"@vercel/turbopack-ecmascript-runtime": "https://gitpkg-fork.vercel.sh/vercel/turbo/crates/turbopack-ecmascript-runtime/js?turbopack-240526.2", "@vercel/turbopack-ecmascript-runtime": "https://gitpkg-fork.vercel.sh/vercel/turbo/crates/turbopack-ecmascript-runtime/js?turbopack-240605.1",
"acorn": "8.11.3", "acorn": "8.11.3",
"amphtml-validator": "1.0.35", "amphtml-validator": "1.0.35",
"anser": "1.4.9", "anser": "1.4.9",

View file

@ -1090,8 +1090,8 @@ importers:
specifier: 0.27.1 specifier: 0.27.1
version: 0.27.1 version: 0.27.1
'@vercel/turbopack-ecmascript-runtime': '@vercel/turbopack-ecmascript-runtime':
specifier: https://gitpkg-fork.vercel.sh/vercel/turbo/crates/turbopack-ecmascript-runtime/js?turbopack-240526.2 specifier: https://gitpkg-fork.vercel.sh/vercel/turbo/crates/turbopack-ecmascript-runtime/js?turbopack-240605.1
version: '@gitpkg-fork.vercel.sh/vercel/turbo/crates/turbopack-ecmascript-runtime/js?turbopack-240526.2' version: '@gitpkg-fork.vercel.sh/vercel/turbo/crates/turbopack-ecmascript-runtime/js?turbopack-240605.1'
acorn: acorn:
specifier: 8.11.3 specifier: 8.11.3
version: 8.11.3 version: 8.11.3
@ -25838,8 +25838,8 @@ packages:
/zwitch@2.0.4: /zwitch@2.0.4:
resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==}
'@gitpkg-fork.vercel.sh/vercel/turbo/crates/turbopack-ecmascript-runtime/js?turbopack-240526.2': '@gitpkg-fork.vercel.sh/vercel/turbo/crates/turbopack-ecmascript-runtime/js?turbopack-240605.1':
resolution: {tarball: https://gitpkg-fork.vercel.sh/vercel/turbo/crates/turbopack-ecmascript-runtime/js?turbopack-240526.2} resolution: {tarball: https://gitpkg-fork.vercel.sh/vercel/turbo/crates/turbopack-ecmascript-runtime/js?turbopack-240605.1}
name: '@vercel/turbopack-ecmascript-runtime' name: '@vercel/turbopack-ecmascript-runtime'
version: 0.0.0 version: 0.0.0
dependencies: dependencies: