diff --git a/.github/workflows/ci.generate.ts b/.github/workflows/ci.generate.ts index d302f92c988344..dc847a2dd7305c 100755 --- a/.github/workflows/ci.generate.ts +++ b/.github/workflows/ci.generate.ts @@ -5,7 +5,7 @@ import { stringify } from "jsr:@std/yaml@^0.221/stringify"; // Bump this number when you want to purge the cache. // Note: the tools/release/01_bump_crate_versions.ts script will update this version // automatically via regex, so ensure that this line maintains this format. -const cacheVersion = 36; +const cacheVersion = 37; const ubuntuX86Runner = "ubuntu-24.04"; const ubuntuX86XlRunner = "ubuntu-24.04-xl"; @@ -46,9 +46,9 @@ const Runners = { macosArmSelfHosted: { os: "macos", arch: "aarch64", - // Actually use self-hosted runner only in denoland/deno on `main` branch. + // Actually use self-hosted runner only in denoland/deno on `main` branch and for tags (release) builds. runner: - `\${{ github.repository == 'denoland/deno' && github.ref == 'refs/heads/main' && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`, + `\${{ github.repository == 'denoland/deno' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) && '${selfHostedMacosArmRunner}' || '${macosArmRunner}' }}`, }, windowsX86: { os: "windows", @@ -130,9 +130,7 @@ cat /sysroot/.env # to build because the object formats are not compatible. echo " CARGO_PROFILE_BENCH_INCREMENTAL=false -CARGO_PROFILE_BENCH_LTO=false CARGO_PROFILE_RELEASE_INCREMENTAL=false -CARGO_PROFILE_RELEASE_LTO=false RUSTFLAGS<<__1 -C linker-plugin-lto=true -C linker=clang-${llvmVersion} @@ -156,7 +154,7 @@ RUSTDOCFLAGS<<__1 $RUSTFLAGS __1 CC=/usr/bin/clang-${llvmVersion} -CFLAGS=-flto=thin $CFLAGS +CFLAGS=$CFLAGS " > $GITHUB_ENV`, }; @@ -654,6 +652,14 @@ const ci = { "cache-path": "./target", }, }, + { + name: "Set up playwright cache", + uses: "actions/cache@v4", + with: { + path: "./.ms-playwright", + key: "playwright-${{ runner.os }}-${{ runner.arch }}", + }, + }, { name: "test_format.js", if: "matrix.job == 'lint' && matrix.os == 'linux'", @@ -1099,6 +1105,26 @@ const ci = { }, ]), }, + wasm: { + name: "build wasm32", + needs: ["pre_build"], + if: "${{ needs.pre_build.outputs.skip_build != 'true' }}", + "runs-on": ubuntuX86Runner, + "timeout-minutes": 30, + steps: skipJobsIfPrAndMarkedSkip([ + ...cloneRepoStep, + installRustStep, + { + name: "Install wasm target", + run: "rustup target add wasm32-unknown-unknown", + }, + { + name: "Cargo build", + // we want this crate to be wasm compatible + run: "cargo build --target wasm32-unknown-unknown -p deno_resolver", + }, + ]), + }, "publish-canary": { name: "publish canary", "runs-on": ubuntuX86Runner, diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 501c23212a87b0..d51e75ff869614 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -73,7 +73,7 @@ jobs: profile: debug - os: macos arch: aarch64 - runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && github.ref == ''refs/heads/main'' && ''ghcr.io/cirruslabs/macos-runner:sonoma'' || ''macos-14'' }}' + runner: '${{ (!contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'')) && ''ubuntu-24.04'' || github.repository == ''denoland/deno'' && (github.ref == ''refs/heads/main'' || startsWith(github.ref, ''refs/tags/'')) && ''ghcr.io/cirruslabs/macos-runner:sonoma'' || ''macos-14'' }}' job: test profile: release skip: '${{ !contains(github.event.pull_request.labels.*.name, ''ci-full'') && (github.event_name == ''pull_request'') }}' @@ -184,8 +184,8 @@ jobs: ~/.cargo/registry/index ~/.cargo/registry/cache ~/.cargo/git/db - key: '36-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' - restore-keys: '36-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-' + key: '37-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-${{ hashFiles(''Cargo.lock'') }}' + restore-keys: '37-cargo-home-${{ matrix.os }}-${{ matrix.arch }}-' if: '!(matrix.skip)' - uses: dsherret/rust-toolchain-file@v1 if: '!(matrix.skip)' @@ -307,9 +307,7 @@ jobs: # to build because the object formats are not compatible. echo " CARGO_PROFILE_BENCH_INCREMENTAL=false - CARGO_PROFILE_BENCH_LTO=false CARGO_PROFILE_RELEASE_INCREMENTAL=false - CARGO_PROFILE_RELEASE_LTO=false RUSTFLAGS<<__1 -C linker-plugin-lto=true -C linker=clang-19 @@ -333,7 +331,7 @@ jobs: $RUSTFLAGS __1 CC=/usr/bin/clang-19 - CFLAGS=-flto=thin $CFLAGS + CFLAGS=$CFLAGS " > $GITHUB_ENV - name: Remove macOS cURL --ipv4 flag run: |- @@ -379,12 +377,18 @@ jobs: !./target/*/*.zip !./target/*/*.tar.gz key: never_saved - restore-keys: '36-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' + restore-keys: '37-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-' - name: Apply and update mtime cache if: '!(matrix.skip) && (!startsWith(github.ref, ''refs/tags/''))' uses: ./.github/mtime_cache with: cache-path: ./target + - name: Set up playwright cache + uses: actions/cache@v4 + with: + path: ./.ms-playwright + key: 'playwright-${{ runner.os }}-${{ runner.arch }}' + if: '!(matrix.skip)' - name: test_format.js if: '!(matrix.skip) && (matrix.job == ''lint'' && matrix.os == ''linux'')' run: deno run --allow-write --allow-read --allow-run --allow-net ./tools/format.js --check @@ -689,7 +693,34 @@ jobs: !./target/*/gn_root !./target/*/*.zip !./target/*/*.tar.gz - key: '36-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + key: '37-cargo-target-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.profile }}-${{ matrix.job }}-${{ github.sha }}' + wasm: + name: build wasm32 + needs: + - pre_build + if: '${{ needs.pre_build.outputs.skip_build != ''true'' }}' + runs-on: ubuntu-24.04 + timeout-minutes: 30 + steps: + - name: Configure git + run: |- + git config --global core.symlinks true + git config --global fetch.parallel 32 + if: '!(matrix.skip)' + - name: Clone repository + uses: actions/checkout@v4 + with: + fetch-depth: 5 + submodules: false + if: '!(matrix.skip)' + - uses: dsherret/rust-toolchain-file@v1 + if: '!(matrix.skip)' + - name: Install wasm target + run: rustup target add wasm32-unknown-unknown + if: '!(matrix.skip)' + - name: Cargo build + run: cargo build --target wasm32-unknown-unknown -p deno_resolver + if: '!(matrix.skip)' publish-canary: name: publish canary runs-on: ubuntu-24.04 diff --git a/.gitignore b/.gitignore index 8be7f979d1b691..a332a06bff2df6 100644 --- a/.gitignore +++ b/.gitignore @@ -35,4 +35,7 @@ junit.xml # Jupyter files .ipynb_checkpoints/ -Untitled*.ipynb \ No newline at end of file +Untitled*.ipynb + +# playwright browser binary cache +/.ms-playwright diff --git a/Cargo.lock b/Cargo.lock index 54e7cd851e4319..922f44279d9759 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -284,6 +284,12 @@ dependencies = [ "tokio", ] +[[package]] +name = "async-once-cell" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4288f83726785267c6f2ef073a3d83dc3f9b81464e9f99898240cced85fce35a" + [[package]] name = "async-recursion" version = "1.1.1" @@ -341,6 +347,12 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + [[package]] name = "auto_impl" version = "1.2.0" @@ -646,9 +658,9 @@ dependencies = [ [[package]] name = "bytemuck" -version = "1.17.1" +version = "1.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773d90827bc3feecfb67fab12e24de0749aad83c74b9504ecde46237b5cd24e2" +checksum = "ef657dfab802224e671f5818e9a4935f9b1957ed18e58292690cc39e7a4092a3" [[package]] name = "byteorder" @@ -726,13 +738,13 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.106" +version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "066fce287b1d4eafef758e89e09d724a24808a9196fe9756b8ca90e86d0719a2" +checksum = "27f657647bcff5394bf56c7317665bbf790a137a50eaaa5c6bfbb9e27a518f2d" dependencies = [ "jobserver", "libc", - "once_cell", + "shlex", ] [[package]] @@ -844,18 +856,16 @@ checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" name = "cli_tests" version = "0.0.0" dependencies = [ + "anyhow", "bytes", "chrono", - "deno_ast", "deno_bench_util", "deno_cache_dir", - "deno_core", - "deno_fetch", "deno_lockfile", "deno_semver", "deno_terminal 0.2.0", - "deno_tls", "deno_tower_lsp", + "deno_unsync", "fastwebsockets", "file_test_runner", "flaky_test", @@ -864,7 +874,7 @@ dependencies = [ "hickory-server", "http 1.1.0", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "nix", "once_cell", @@ -872,12 +882,17 @@ dependencies = [ "pretty_assertions", "regex", "reqwest", + "rustls", + "rustls-pemfile", + "rustls-tokio-stream", "serde", + "serde_json", "sys_traits", "test_server", "tokio", "url", "uuid", + "walkdir", "zeromq", ] @@ -1250,7 +1265,7 @@ dependencies = [ [[package]] name = "deno" -version = "2.1.6" +version = "2.1.7" dependencies = [ "anstream", "async-trait", @@ -1279,6 +1294,7 @@ dependencies = [ "deno_lib", "deno_lint", "deno_lockfile", + "deno_media_type", "deno_npm", "deno_npm_cache", "deno_package_json", @@ -1286,6 +1302,7 @@ dependencies = [ "deno_resolver", "deno_runtime", "deno_semver", + "deno_snapshots", "deno_task_shell", "deno_telemetry", "deno_terminal 0.2.0", @@ -1297,7 +1314,6 @@ dependencies = [ "dprint-plugin-jupyter", "dprint-plugin-markdown", "dprint-plugin-typescript", - "env_logger", "fancy-regex", "faster-hex", "flate2", @@ -1428,7 +1444,7 @@ dependencies = [ [[package]] name = "deno_bench_util" -version = "0.180.0" +version = "0.181.0" dependencies = [ "bencher", "deno_core", @@ -1437,7 +1453,7 @@ dependencies = [ [[package]] name = "deno_broadcast_channel" -version = "0.180.0" +version = "0.181.0" dependencies = [ "async-trait", "deno_core", @@ -1449,23 +1465,37 @@ dependencies = [ [[package]] name = "deno_cache" -version = "0.118.0" +version = "0.119.0" dependencies = [ + "anyhow", + "async-stream", "async-trait", + "base64 0.21.7", + "bytes", + "chrono", "deno_core", "deno_error", + "futures", + "http 1.1.0", + "http-body 1.0.0", + "http-body-util", + "hyper 1.6.0", + "hyper-util", + "log", "rusqlite", "serde", "sha2", + "slab", "thiserror 2.0.3", "tokio", + "tokio-util", ] [[package]] name = "deno_cache_dir" -version = "0.16.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e73ed17f285731a23df9779ca1e0e721de866db6776ed919ebd9235e0a107c4c" +checksum = "27429da4d0e601baaa41415a43468d49a586645d13497f12e8a9346f9f6b1347" dependencies = [ "async-trait", "base32", @@ -1492,7 +1522,7 @@ dependencies = [ [[package]] name = "deno_canvas" -version = "0.55.0" +version = "0.56.0" dependencies = [ "bytemuck", "deno_core", @@ -1508,9 +1538,9 @@ dependencies = [ [[package]] name = "deno_config" -version = "0.45.0" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47a47412627aa0d08414eca0e8329128013ab70bdb2cdfdc5456c2214cf24c8f" +checksum = "08fe512a72c4300bd997c6849450a1f050da0c909a2a4fbdc44891647392bacf" dependencies = [ "boxed_error", "capacity_builder 0.5.0", @@ -1535,16 +1565,16 @@ dependencies = [ [[package]] name = "deno_console" -version = "0.186.0" +version = "0.187.0" dependencies = [ "deno_core", ] [[package]] name = "deno_core" -version = "0.330.0" +version = "0.333.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd38bbbd68ed873165ccb630322704b44140d3a8c8d50f898beac4d1a8a3358c" +checksum = "2e67f6f874401f3b8fb3e851743c40ee14014648db6685b233b34f783f6959e8" dependencies = [ "anyhow", "az", @@ -1557,6 +1587,7 @@ dependencies = [ "deno_core_icudata", "deno_error", "deno_ops", + "deno_path_util", "deno_unsync", "futures", "indexmap 2.3.0", @@ -1586,7 +1617,7 @@ checksum = "fe4dccb6147bb3f3ba0c7a48e993bfeb999d2c2e47a81badee80e2b370c8d695" [[package]] name = "deno_cron" -version = "0.66.0" +version = "0.67.0" dependencies = [ "anyhow", "async-trait", @@ -1600,7 +1631,7 @@ dependencies = [ [[package]] name = "deno_crypto" -version = "0.200.0" +version = "0.201.0" dependencies = [ "aes", "aes-gcm", @@ -1668,9 +1699,9 @@ dependencies = [ [[package]] name = "deno_error" -version = "0.5.3" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4da6a58de6932a96f84e133c072fd3b525966ee122a71f3efd48bbff2eed5ac" +checksum = "9c23dbc46d5804814b08b4675838f9884e3a52916987ec5105af36d42f9911b5" dependencies = [ "deno_error_macro", "libc", @@ -1682,9 +1713,9 @@ dependencies = [ [[package]] name = "deno_error_macro" -version = "0.5.3" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46351dff93aed2039407c91e2ded2a5591e42d2795ab3d111288625bb710d3d2" +checksum = "babccedee31ce7e57c3e6dff2cb3ab8d68c49d0df8222fe0d11d628e65192790" dependencies = [ "proc-macro2", "quote", @@ -1693,7 +1724,7 @@ dependencies = [ [[package]] name = "deno_fetch" -version = "0.210.0" +version = "0.211.0" dependencies = [ "base64 0.21.7", "bytes", @@ -1706,11 +1737,11 @@ dependencies = [ "dyn-clone", "error_reporter", "fast-socks5", - "h2 0.4.4", + "h2 0.4.7", "hickory-resolver", "http 1.1.0", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-rustls", "hyper-util", "ipnet", @@ -1730,7 +1761,7 @@ dependencies = [ [[package]] name = "deno_ffi" -version = "0.173.0" +version = "0.174.0" dependencies = [ "deno_core", "deno_error", @@ -1751,7 +1782,7 @@ dependencies = [ [[package]] name = "deno_fs" -version = "0.96.0" +version = "0.97.0" dependencies = [ "async-trait", "base32", @@ -1775,9 +1806,9 @@ dependencies = [ [[package]] name = "deno_graph" -version = "0.87.0" +version = "0.87.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f56d4eb4b7c81ae920b6d18c45a1866924f93110caee80bbbc362dc28143f2bb" +checksum = "e4766f426e4258c481c3af019fb4bba31e3108e80b8b2a48bbeb68bfadcc8c18" dependencies = [ "async-trait", "capacity_builder 0.5.0", @@ -1809,7 +1840,7 @@ dependencies = [ [[package]] name = "deno_http" -version = "0.184.0" +version = "0.185.0" dependencies = [ "async-compression", "async-trait", @@ -1828,7 +1859,7 @@ dependencies = [ "http-body-util", "httparse", "hyper 0.14.28", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "itertools 0.10.5", "memmem", @@ -1849,7 +1880,7 @@ dependencies = [ [[package]] name = "deno_io" -version = "0.96.0" +version = "0.97.0" dependencies = [ "async-trait", "deno_core", @@ -1871,7 +1902,7 @@ dependencies = [ [[package]] name = "deno_kv" -version = "0.94.0" +version = "0.95.0" dependencies = [ "anyhow", "async-trait", @@ -1904,34 +1935,43 @@ dependencies = [ [[package]] name = "deno_lib" -version = "0.2.0" +version = "0.3.0" dependencies = [ - "deno_cache_dir", + "capacity_builder 0.5.0", + "deno_config", "deno_error", "deno_fs", + "deno_media_type", "deno_node", + "deno_npm", "deno_path_util", "deno_resolver", "deno_runtime", + "deno_semver", "deno_terminal 0.2.0", + "env_logger", "faster-hex", + "indexmap 2.3.0", + "libsui", "log", "node_resolver", "parking_lot", "ring", "serde", + "serde_json", "sys_traits", "test_server", "thiserror 2.0.3", "tokio", + "twox-hash", "url", ] [[package]] name = "deno_lint" -version = "0.68.2" +version = "0.70.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce713d564f76efd90535061113210bdc6b942ed6327b33eb1d5f76a5daf8e7a5" +checksum = "ac94db8d8597b96c92d30a68b11d4bec6822dcbb3e8675ab1e0136816a301a34" dependencies = [ "anyhow", "deno_ast", @@ -1959,18 +1999,19 @@ dependencies = [ [[package]] name = "deno_media_type" -version = "0.2.3" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a417f8bd3f1074185c4c8ccb6ea6261ae173781596cc358e68ad07aaac11009d" +checksum = "600222d059ab31ff31182b3e12615df2134a9e01605836b78ad8df91ba39eab3" dependencies = [ "data-url", + "encoding_rs", "serde", "url", ] [[package]] name = "deno_napi" -version = "0.117.0" +version = "0.118.0" dependencies = [ "deno_core", "deno_error", @@ -1999,7 +2040,7 @@ dependencies = [ [[package]] name = "deno_net" -version = "0.178.0" +version = "0.179.0" dependencies = [ "deno_core", "deno_error", @@ -2018,7 +2059,7 @@ dependencies = [ [[package]] name = "deno_node" -version = "0.124.0" +version = "0.125.0" dependencies = [ "aead-gcm-stream", "aes", @@ -2030,17 +2071,18 @@ dependencies = [ "bytes", "cbc", "const-oid", + "ctr", "data-encoding", "deno_core", "deno_error", "deno_fetch", "deno_fs", "deno_io", - "deno_media_type", "deno_net", "deno_package_json", "deno_path_util", "deno_permissions", + "deno_process", "deno_whoami", "der", "digest", @@ -2049,13 +2091,13 @@ dependencies = [ "ecdsa", "ed25519-dalek", "elliptic-curve", - "errno 0.2.8", + "errno", "faster-hex", - "h2 0.4.4", + "h2 0.4.7", "hkdf", "http 1.1.0", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "idna", "indexmap 2.3.0", @@ -2063,6 +2105,7 @@ dependencies = [ "k256", "lazy-regex", "libc", + "libsqlite3-sys", "libz-sys", "md-5", "md4", @@ -2078,13 +2121,13 @@ dependencies = [ "p384", "path-clean", "pbkdf2", - "pin-project-lite", "pkcs8", "rand", "regex", "ring", "ripemd", "rsa", + "rusqlite", "scrypt", "sec1", "serde", @@ -2131,7 +2174,7 @@ dependencies = [ [[package]] name = "deno_npm_cache" -version = "0.5.0" +version = "0.6.0" dependencies = [ "async-trait", "base64 0.21.7", @@ -2161,9 +2204,9 @@ dependencies = [ [[package]] name = "deno_ops" -version = "0.206.0" +version = "0.209.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c25ffa9d088ea00748dbef870bba110ac22ebf8cf7b2e9eb288409c5d852af3" +checksum = "fbda741ba267a252bad86153d16c859c3a8e1b219b300a1b4f0a2632c0a465cf" dependencies = [ "indexmap 2.3.0", "proc-macro-rules", @@ -2178,7 +2221,7 @@ dependencies = [ [[package]] name = "deno_os" -version = "0.3.0" +version = "0.4.0" dependencies = [ "deno_core", "deno_error", @@ -2199,9 +2242,9 @@ dependencies = [ [[package]] name = "deno_package_json" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1d3c0f699ba2040669204ce24ab73720499fc290af843e4ce0fc8a9b3d67735" +checksum = "d07d26dbfcc01e636aef86f9baff7faf5338398e74d283d8fe01e39068f48049" dependencies = [ "boxed_error", "deno_error", @@ -2217,9 +2260,9 @@ dependencies = [ [[package]] name = "deno_path_util" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "420e8211aaba7fde83ccaa9a5dad855c3b940ed988d70c95159acd600a70dc87" +checksum = "c87b8996966ae1b13ee9c20219b1d10fc53905b9570faae6adfa34614fd15224" dependencies = [ "deno_error", "percent-encoding", @@ -2230,7 +2273,7 @@ dependencies = [ [[package]] name = "deno_permissions" -version = "0.45.0" +version = "0.46.0" dependencies = [ "capacity_builder 0.5.0", "deno_core", @@ -2248,11 +2291,39 @@ dependencies = [ "winapi", ] +[[package]] +name = "deno_process" +version = "0.2.0" +dependencies = [ + "deno_core", + "deno_error", + "deno_fs", + "deno_io", + "deno_os", + "deno_path_util", + "deno_permissions", + "libc", + "log", + "memchr", + "nix", + "pin-project-lite", + "rand", + "serde", + "simd-json", + "tempfile", + "thiserror 2.0.3", + "tokio", + "which", + "winapi", + "windows-sys 0.59.0", +] + [[package]] name = "deno_resolver" -version = "0.17.0" +version = "0.18.0" dependencies = [ "anyhow", + "async-once-cell", "async-trait", "base32", "boxed_error", @@ -2265,8 +2336,11 @@ dependencies = [ "deno_package_json", "deno_path_util", "deno_semver", + "deno_terminal 0.2.0", + "futures", "log", "node_resolver", + "once_cell", "parking_lot", "sys_traits", "test_server", @@ -2276,7 +2350,7 @@ dependencies = [ [[package]] name = "deno_runtime" -version = "0.194.0" +version = "0.195.0" dependencies = [ "color-print", "deno_ast", @@ -2300,6 +2374,7 @@ dependencies = [ "deno_os", "deno_path_util", "deno_permissions", + "deno_process", "deno_resolver", "deno_telemetry", "deno_terminal 0.2.0", @@ -2313,11 +2388,10 @@ dependencies = [ "dlopen2", "encoding_rs", "fastwebsockets", - "flate2", "http 1.1.0", "http-body-util", "hyper 0.14.28", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "libc", "log", @@ -2361,6 +2435,13 @@ dependencies = [ "url", ] +[[package]] +name = "deno_snapshots" +version = "0.2.0" +dependencies = [ + "deno_runtime", +] + [[package]] name = "deno_task_shell" version = "0.20.2" @@ -2381,13 +2462,15 @@ dependencies = [ [[package]] name = "deno_telemetry" -version = "0.8.0" +version = "0.9.0" dependencies = [ "async-trait", "deno_core", "deno_error", + "deno_tls", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", + "hyper-rustls", "hyper-util", "log", "once_cell", @@ -2424,7 +2507,7 @@ dependencies = [ [[package]] name = "deno_tls" -version = "0.173.0" +version = "0.174.0" dependencies = [ "deno_core", "deno_error", @@ -2475,7 +2558,7 @@ dependencies = [ [[package]] name = "deno_url" -version = "0.186.0" +version = "0.187.0" dependencies = [ "deno_bench_util", "deno_console", @@ -2488,7 +2571,7 @@ dependencies = [ [[package]] name = "deno_web" -version = "0.217.0" +version = "0.218.0" dependencies = [ "async-trait", "base64-simd 0.8.0", @@ -2511,7 +2594,7 @@ dependencies = [ [[package]] name = "deno_webgpu" -version = "0.153.0" +version = "0.154.0" dependencies = [ "deno_core", "deno_error", @@ -2525,7 +2608,7 @@ dependencies = [ [[package]] name = "deno_webidl" -version = "0.186.0" +version = "0.187.0" dependencies = [ "deno_bench_util", "deno_core", @@ -2533,7 +2616,7 @@ dependencies = [ [[package]] name = "deno_websocket" -version = "0.191.0" +version = "0.192.0" dependencies = [ "bytes", "deno_core", @@ -2542,10 +2625,10 @@ dependencies = [ "deno_permissions", "deno_tls", "fastwebsockets", - "h2 0.4.4", + "h2 0.4.7", "http 1.1.0", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "once_cell", "rustls-tokio-stream", @@ -2556,7 +2639,7 @@ dependencies = [ [[package]] name = "deno_webstorage" -version = "0.181.0" +version = "0.182.0" dependencies = [ "deno_core", "deno_error", @@ -2642,6 +2725,43 @@ dependencies = [ "v8_valueserializer", ] +[[package]] +name = "denort" +version = "2.1.7" +dependencies = [ + "async-trait", + "bincode", + "deno_cache_dir", + "deno_config", + "deno_core", + "deno_error", + "deno_lib", + "deno_media_type", + "deno_npm", + "deno_package_json", + "deno_path_util", + "deno_resolver", + "deno_runtime", + "deno_semver", + "deno_snapshots", + "deno_terminal 0.2.0", + "import_map", + "indexmap 2.3.0", + "libsui", + "log", + "node_resolver", + "pretty_assertions", + "serde", + "serde_json", + "sys_traits", + "test_server", + "thiserror 2.0.3", + "tokio", + "tokio-util", + "twox-hash", + "url", +] + [[package]] name = "der" version = "0.7.9" @@ -3177,17 +3297,6 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" -[[package]] -name = "errno" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1" -dependencies = [ - "errno-dragonfly", - "libc", - "winapi", -] - [[package]] name = "errno" version = "0.3.8" @@ -3198,16 +3307,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] - [[package]] name = "error-code" version = "3.2.0" @@ -3280,7 +3379,7 @@ dependencies = [ "base64 0.21.7", "bytes", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "pin-project", "rand", @@ -3527,9 +3626,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", @@ -3537,9 +3636,9 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" @@ -3554,15 +3653,15 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-macro" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", @@ -3571,21 +3670,21 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-util" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-channel", "futures-core", @@ -3802,15 +3901,15 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.4" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "816ec7294445779408f36fe57bc5b7fc1cf59664059096c65f905c1c61f58069" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" dependencies = [ + "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "futures-util", "http 1.1.0", "indexmap 2.3.0", "slab", @@ -4133,9 +4232,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.8.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "f2d708df4e7140240a16cd6ab0ab65c972d7433ab77819ea693fde9c43811e2a" [[package]] name = "httpdate" @@ -4175,14 +4274,14 @@ dependencies = [ [[package]] name = "hyper" -version = "1.4.1" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.4", + "h2 0.4.7", "http 1.1.0", "http-body 1.0.0", "httparse", @@ -4202,7 +4301,7 @@ checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" dependencies = [ "futures-util", "http 1.1.0", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "rustls", "rustls-pki-types", @@ -4218,7 +4317,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "pin-project-lite", "tokio", @@ -4236,7 +4335,7 @@ dependencies = [ "futures-util", "http 1.1.0", "http-body 1.0.0", - "hyper 1.4.1", + "hyper 1.6.0", "pin-project-lite", "socket2", "tokio", @@ -4843,9 +4942,9 @@ dependencies = [ [[package]] name = "libsqlite3-sys" -version = "0.30.0" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b694a822684ddb75df4d657029161431bcb4a85c1856952f845b76912bc6fec" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" dependencies = [ "cc", "pkg-config", @@ -5181,7 +5280,7 @@ dependencies = [ [[package]] name = "napi_sym" -version = "0.116.0" +version = "0.117.0" dependencies = [ "quote", "serde", @@ -5236,11 +5335,12 @@ dependencies = [ [[package]] name = "node_resolver" -version = "0.24.0" +version = "0.25.0" dependencies = [ "anyhow", "async-trait", "boxed_error", + "dashmap", "deno_error", "deno_media_type", "deno_package_json", @@ -5250,10 +5350,10 @@ dependencies = [ "once_cell", "path-clean", "regex", + "serde", "serde_json", "sys_traits", "thiserror 2.0.3", - "tokio", "url", ] @@ -5716,20 +5816,20 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.9" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "311fb059dee1a7b802f036316d790138c613a4e8b180c822e3925a662e9f0c95" +checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror 1.0.64", + "thiserror 2.0.3", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.9" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f73541b156d32197eecda1a4014d7f868fd2bcb3c550d5386087cfba442bf69c" +checksum = "816518421cfc6887a0d62bf441b6ffb4536fcc926395a69e1a85852d4363f57e" dependencies = [ "pest", "pest_generator", @@ -5737,9 +5837,9 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.9" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c35eeed0a3fab112f75165fdc026b3913f4183133f19b49be773ac9ea966e8bd" +checksum = "7d1396fd3a870fc7838768d171b4616d5c91f6cc25e377b673d714567d99377b" dependencies = [ "pest", "pest_meta", @@ -5750,9 +5850,9 @@ dependencies = [ [[package]] name = "pest_meta" -version = "2.7.9" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2adbf29bb9776f28caece835398781ab24435585fe0d4dc1374a61db5accedca" +checksum = "e1e58089ea25d717bfd31fb534e4f3afcc2cc569c70de3e239778991ea3b7dea" dependencies = [ "once_cell", "pest", @@ -6437,11 +6537,11 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "h2 0.4.4", + "h2 0.4.7", "http 1.1.0", "http-body 1.0.0", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-rustls", "hyper-util", "ipnet", @@ -6653,7 +6753,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89" dependencies = [ "bitflags 2.6.0", - "errno 0.3.8", + "errno", "libc", "linux-raw-sys", "windows-sys 0.52.0", @@ -6988,9 +7088,9 @@ dependencies = [ [[package]] name = "serde_v8" -version = "0.239.0" +version = "0.242.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3caa6d882827148e5d9052d9d8d6d1c9d6ad426ed00cab46cafb8c07a0e7126a" +checksum = "aa9d64ec1fdc3316cb65ca60ccbb5a3a914f014ccb0b5f71fc9280506fd28247" dependencies = [ "deno_error", "num-bigint", @@ -7978,10 +8078,10 @@ dependencies = [ "flate2", "futures", "glob", - "h2 0.4.4", + "h2 0.4.7", "http 1.1.0", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-util", "jsonc-parser", "lazy-regex", @@ -8272,11 +8372,11 @@ dependencies = [ "axum", "base64 0.22.1", "bytes", - "h2 0.4.4", + "h2 0.4.7", "http 1.1.0", "http-body 1.0.0", "http-body-util", - "hyper 1.4.1", + "hyper 1.6.0", "hyper-timeout", "hyper-util", "percent-encoding", @@ -8584,9 +8684,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.3" +version = "2.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d157f1b96d14500ffdc1f10ba712e780825526c03d9a49b4d0324b0d9113ada" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" dependencies = [ "form_urlencoded", "idna", diff --git a/Cargo.toml b/Cargo.toml index 46318bb82864dc..61367d98f44b6e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,8 @@ members = [ "bench_util", "cli", "cli/lib", + "cli/rt", + "cli/snapshot", "ext/broadcast_channel", "ext/cache", "ext/canvas", @@ -49,19 +51,19 @@ repository = "https://github.com/denoland/deno" [workspace.dependencies] deno_ast = { version = "=0.44.0", features = ["transpiling"] } -deno_core = { version = "0.330.0" } +deno_core = { version = "0.333.0" } -deno_bench_util = { version = "0.180.0", path = "./bench_util" } -deno_config = { version = "=0.45.0", features = ["workspace", "sync"] } +deno_bench_util = { version = "0.181.0", path = "./bench_util" } +deno_config = { version = "=0.46.0", features = ["workspace"] } deno_lockfile = "=0.24.0" -deno_media_type = { version = "0.2.3", features = ["module_specifier"] } +deno_media_type = { version = "=0.2.5", features = ["module_specifier"] } deno_npm = "=0.27.2" -deno_path_util = "=0.3.0" -deno_permissions = { version = "0.45.0", path = "./runtime/permissions" } -deno_runtime = { version = "0.194.0", path = "./runtime" } +deno_path_util = "=0.3.1" +deno_permissions = { version = "0.46.0", path = "./runtime/permissions" } +deno_runtime = { version = "0.195.0", path = "./runtime" } deno_semver = "=0.7.1" deno_terminal = "0.2.0" -napi_sym = { version = "0.116.0", path = "./ext/napi/sym" } +napi_sym = { version = "0.117.0", path = "./ext/napi/sym" } test_util = { package = "test_server", path = "./tests/util/server" } denokv_proto = "0.9.0" @@ -70,39 +72,43 @@ denokv_remote = "0.9.0" denokv_sqlite = { default-features = false, version = "0.9.0" } # exts -deno_broadcast_channel = { version = "0.180.0", path = "./ext/broadcast_channel" } -deno_cache = { version = "0.118.0", path = "./ext/cache" } -deno_canvas = { version = "0.55.0", path = "./ext/canvas" } -deno_console = { version = "0.186.0", path = "./ext/console" } -deno_cron = { version = "0.66.0", path = "./ext/cron" } -deno_crypto = { version = "0.200.0", path = "./ext/crypto" } -deno_fetch = { version = "0.210.0", path = "./ext/fetch" } -deno_ffi = { version = "0.173.0", path = "./ext/ffi" } -deno_fs = { version = "0.96.0", path = "./ext/fs" } -deno_http = { version = "0.184.0", path = "./ext/http" } -deno_io = { version = "0.96.0", path = "./ext/io" } -deno_kv = { version = "0.94.0", path = "./ext/kv" } -deno_napi = { version = "0.117.0", path = "./ext/napi" } -deno_net = { version = "0.178.0", path = "./ext/net" } -deno_node = { version = "0.124.0", path = "./ext/node" } -deno_os = { version = "0.3.0", path = "./ext/os" } -deno_telemetry = { version = "0.8.0", path = "./ext/telemetry" } -deno_tls = { version = "0.173.0", path = "./ext/tls" } -deno_url = { version = "0.186.0", path = "./ext/url" } -deno_web = { version = "0.217.0", path = "./ext/web" } -deno_webgpu = { version = "0.153.0", path = "./ext/webgpu" } -deno_webidl = { version = "0.186.0", path = "./ext/webidl" } -deno_websocket = { version = "0.191.0", path = "./ext/websocket" } -deno_webstorage = { version = "0.181.0", path = "./ext/webstorage" } +deno_broadcast_channel = { version = "0.181.0", path = "./ext/broadcast_channel" } +deno_cache = { version = "0.119.0", path = "./ext/cache" } +deno_canvas = { version = "0.56.0", path = "./ext/canvas" } +deno_console = { version = "0.187.0", path = "./ext/console" } +deno_cron = { version = "0.67.0", path = "./ext/cron" } +deno_crypto = { version = "0.201.0", path = "./ext/crypto" } +deno_fetch = { version = "0.211.0", path = "./ext/fetch" } +deno_ffi = { version = "0.174.0", path = "./ext/ffi" } +deno_fs = { version = "0.97.0", path = "./ext/fs" } +deno_http = { version = "0.185.0", path = "./ext/http" } +deno_io = { version = "0.97.0", path = "./ext/io" } +deno_kv = { version = "0.95.0", path = "./ext/kv" } +deno_napi = { version = "0.118.0", path = "./ext/napi" } +deno_net = { version = "0.179.0", path = "./ext/net" } +deno_node = { version = "0.125.0", path = "./ext/node" } +deno_os = { version = "0.4.0", path = "./ext/os" } +deno_process = { version = "0.2.0", path = "./ext/process" } +deno_telemetry = { version = "0.9.0", path = "./ext/telemetry" } +deno_tls = { version = "0.174.0", path = "./ext/tls" } +deno_url = { version = "0.187.0", path = "./ext/url" } +deno_web = { version = "0.218.0", path = "./ext/web" } +deno_webgpu = { version = "0.154.0", path = "./ext/webgpu" } +deno_webidl = { version = "0.187.0", path = "./ext/webidl" } +deno_websocket = { version = "0.192.0", path = "./ext/websocket" } +deno_webstorage = { version = "0.182.0", path = "./ext/webstorage" } # workspace libraries -deno_lib = { version = "0.2.0", path = "./cli/lib" } -deno_npm_cache = { version = "0.5.0", path = "./resolvers/npm_cache" } -deno_resolver = { version = "0.17.0", path = "./resolvers/deno" } -node_resolver = { version = "0.24.0", path = "./resolvers/node" } +deno_lib = { version = "0.3.0", path = "./cli/lib" } +deno_npm_cache = { version = "0.6.0", path = "./resolvers/npm_cache" } +deno_resolver = { version = "0.18.0", path = "./resolvers/deno" } +deno_snapshots = { version = "0.2.0", path = "./cli/snapshot" } +node_resolver = { version = "0.25.0", path = "./resolvers/node" } aes = "=0.8.3" anyhow = "1.0.57" +async-once-cell = "0.5.4" +async-stream = "0.3" async-trait = "0.1.73" base32 = "=0.5.1" base64 = "0.21.7" @@ -118,12 +124,13 @@ cbc = { version = "=0.1.2", features = ["alloc"] } chrono = { version = "0.4", default-features = false, features = ["std", "serde"] } color-print = "0.3.5" console_static_text = "=0.8.1" +ctr = { version = "0.9.2", features = ["alloc"] } dashmap = "5.5.3" data-encoding = "2.3.3" data-url = "=0.3.1" -deno_cache_dir = "=0.16.0" -deno_error = "=0.5.3" -deno_package_json = { version = "0.4.0", default-features = false } +deno_cache_dir = "=0.17.0" +deno_error = "=0.5.5" +deno_package_json = { version = "=0.4.2", default-features = false } deno_unsync = "0.4.2" dlopen2 = "0.6.1" ecb = "=0.1.2" @@ -137,14 +144,14 @@ flate2 = { version = "1.0.30", default-features = false } fs3 = "0.5.0" futures = "0.3.21" glob = "0.3.1" -h2 = "0.4.4" +h2 = "0.4.6" hickory-resolver = { version = "0.25.0-alpha.4", features = ["tokio-runtime", "serde"] } http = "1.0" http-body = "1.0" http-body-util = "0.1.2" http_v02 = { package = "http", version = "0.2.9" } httparse = "1.8.0" -hyper = { version = "1.4.1", features = ["full"] } +hyper = { version = "1.6.0", features = ["full"] } hyper-rustls = { version = "0.27.2", default-features = false, features = ["http1", "http2", "tls12", "ring"] } hyper-util = { version = "0.1.10", features = ["tokio", "client", "client-legacy", "server", "server-auto"] } hyper_v014 = { package = "hyper", version = "0.14.26", features = ["runtime", "http1"] } @@ -153,6 +160,7 @@ ipnet = "2.3" jsonc-parser = { version = "=0.26.2", features = ["serde"] } lazy-regex = "3" libc = "0.2.168" +libsui = "0.5.0" libz-sys = { version = "1.1.20", default-features = false } log = { version = "0.4.20", features = ["kv"] } lsp-types = "=0.97.0" # used by tower-lsp and "proposed" feature is unstable in patch releases @@ -213,6 +221,7 @@ tower-service = "0.3.2" twox-hash = "=1.6.3" url = { version = "2.5", features = ["serde", "expose_internals"] } uuid = { version = "1.3.0", features = ["v4"] } +walkdir = "=2.3.2" webpki-root-certs = "0.26.5" webpki-roots = "0.26" which = "6" diff --git a/README.md b/README.md index 19d4fa8a120129..ca71529e28a60a 100644 --- a/README.md +++ b/README.md @@ -6,8 +6,8 @@ the deno mascot dinosaur standing in the rain -[Deno](https://www.deno.com) -([/ˈdiːnoʊ/](http://ipa-reader.xyz/?text=%CB%88di%CB%90no%CA%8A), pronounced +[Deno](https://deno.com) +([/ˈdiːnoʊ/](https://ipa-reader.com/?text=%CB%88di%CB%90no%CA%8A), pronounced `dee-no`) is a JavaScript, TypeScript, and WebAssembly runtime with secure defaults and a great developer experience. It's built on [V8](https://v8.dev/), [Rust](https://www.rust-lang.org/), and [Tokio](https://tokio.rs/). diff --git a/Releases.md b/Releases.md index 1fc6ebd1df1039..d21aef3b281f15 100644 --- a/Releases.md +++ b/Releases.md @@ -6,6 +6,19 @@ https://github.com/denoland/deno/releases We also have one-line install commands at: https://github.com/denoland/deno_install +### 2.1.7 / 2025.01.21 + +- fix(deps): update yanked crates (#27512) +- fix(ext/node): GCM auth tag check on DechiperIv#final (#27733) +- fix(ext/node): add FileHandle#sync (#27677) +- fix(ext/node): propagate socket error to client request object (#27678) +- fix(ext/node): tls.connect regression (#27707) +- fix(ext/os): pass SignalState to web worker (#27741) +- fix(install/global): remove importMap field from specified config file + (#27744) +- fix: use 'getrandom' feature for 'sys_traits' crate +- perf(compile): remove swc from denort (#27721) + ### 2.1.6 / 2025.01.16 - fix(check/lsp): correctly resolve compilerOptions.types (#27686) diff --git a/bench_util/Cargo.toml b/bench_util/Cargo.toml index e2f1204eb26834..3d54b07d995320 100644 --- a/bench_util/Cargo.toml +++ b/bench_util/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_bench_util" -version = "0.180.0" +version = "0.181.0" authors.workspace = true edition.workspace = true license.workspace = true diff --git a/cli/Cargo.toml b/cli/Cargo.toml index d71047cc63e6f8..e8671d92111307 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno" -version = "2.1.6" +version = "2.1.7" authors.workspace = true default-run = "deno" edition.workspace = true @@ -16,11 +16,6 @@ name = "deno" path = "main.rs" doc = false -[[bin]] -name = "denort" -path = "mainrt.rs" -doc = false - [[test]] name = "integration" path = "integration_tests_runner.rs" @@ -49,7 +44,7 @@ dhat-heap = ["dhat"] upgrade = [] # A dev feature to disable creations and loading of snapshots in favor of # loading JS sources at runtime. -hmr = ["deno_runtime/hmr"] +hmr = ["deno_runtime/hmr", "deno_snapshots/disable"] # Vendor zlib as zlib-ng __vendored_zlib_ng = ["flate2/zlib-ng-compat", "libz-sys/zlib-ng"] @@ -60,36 +55,40 @@ lazy-regex.workspace = true serde.workspace = true serde_json.workspace = true zstd.workspace = true -glibc_version = "0.1.2" flate2 = { workspace = true, features = ["default"] } deno_error.workspace = true +[target.'cfg(unix)'.build-dependencies] +glibc_version = "0.1.2" + [target.'cfg(windows)'.build-dependencies] winapi.workspace = true winres.workspace = true [dependencies] deno_ast = { workspace = true, features = ["bundler", "cjs", "codegen", "proposal", "react", "sourcemap", "transforms", "typescript", "view", "visit"] } -deno_cache_dir.workspace = true -deno_config.workspace = true +deno_cache_dir = { workspace = true, features = ["sync"] } +deno_config = { workspace = true, features = ["sync", "workspace"] } deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_doc = { version = "=0.164.0", features = ["rust", "comrak"] } deno_error.workspace = true -deno_graph = { version = "=0.87.0" } +deno_graph = { version = "=0.87.2" } deno_lib.workspace = true -deno_lint = { version = "=0.68.2", features = ["docs"] } +deno_lint = { version = "0.70.0" } deno_lockfile.workspace = true +deno_media_type = { workspace = true, features = ["data_url", "decoding", "module_specifier"] } deno_npm.workspace = true deno_npm_cache.workspace = true -deno_package_json.workspace = true +deno_package_json = { workspace = true, features = ["sync"] } deno_path_util.workspace = true deno_resolver = { workspace = true, features = ["sync"] } deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] } deno_semver.workspace = true +deno_snapshots = { workspace = true } deno_task_shell = "=0.20.2" deno_telemetry.workspace = true deno_terminal.workspace = true -libsui = "0.5.0" +libsui.workspace = true node_resolver.workspace = true anstream = "0.6.14" @@ -115,7 +114,6 @@ dprint-plugin-json = "=0.19.4" dprint-plugin-jupyter = "=0.1.5" dprint-plugin-markdown = "=0.17.8" dprint-plugin-typescript = "=0.93.3" -env_logger = "=0.10.0" fancy-regex = "=0.10.0" faster-hex.workspace = true # If you disable the default __vendored_zlib_ng feature above, you _must_ be able to link against `-lz`. @@ -156,7 +154,6 @@ rustyline-derive = "=0.7.0" serde.workspace = true serde_repr.workspace = true sha2.workspace = true -shell-escape = "=0.1.5" spki = { version = "0.7", features = ["pem"] } sqlformat = "=0.3.2" strsim = "0.11.1" @@ -173,7 +170,7 @@ tracing = { version = "0.1", features = ["log", "default"] } twox-hash.workspace = true typed-arena = "=2.0.2" uuid = { workspace = true, features = ["serde"] } -walkdir = "=2.3.2" +walkdir.workspace = true which.workspace = true zeromq.workspace = true zip = { version = "2.1.6", default-features = false, features = ["deflate-flate2"] } @@ -185,6 +182,7 @@ winapi = { workspace = true, features = ["knownfolders", "mswsock", "objbase", " [target.'cfg(unix)'.dependencies] nix.workspace = true +shell-escape = "=0.1.5" [dev-dependencies] deno_bench_util.workspace = true diff --git a/cli/args/deno_json.rs b/cli/args/deno_json.rs index c27b1d392443e3..3121380251dbfa 100644 --- a/cli/args/deno_json.rs +++ b/cli/args/deno_json.rs @@ -1,12 +1,28 @@ // Copyright 2018-2025 the Deno authors. MIT license. use std::collections::HashSet; +use std::sync::Arc; -use deno_config::deno_json::TsConfigForEmit; +use deno_ast::SourceMapOption; +use deno_config::deno_json::CompilerOptionsParseError; +use deno_config::deno_json::TsConfig; +use deno_config::deno_json::TsConfigType; +use deno_config::deno_json::TsConfigWithIgnoredOptions; +use deno_config::deno_json::TsTypeLib; +use deno_config::workspace::Workspace; +use deno_config::workspace::WorkspaceDirectory; +use deno_core::error::AnyError; use deno_core::serde_json; +use deno_core::unsync::sync::AtomicFlag; +use deno_core::url::Url; +use deno_lib::util::hash::FastInsecureHasher; +use deno_lint::linter::LintConfig as DenoLintConfig; use deno_semver::jsr::JsrDepPackageReq; use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; +use once_cell::sync::OnceCell; + +use crate::util::collections::FolderScopedMap; pub fn import_map_deps( import_map: &serde_json::Value, @@ -102,17 +118,261 @@ fn value_to_dep_req(value: &str) -> Option { } } -pub fn check_warn_tsconfig(ts_config: &TsConfigForEmit) { - if let Some(ignored_options) = &ts_config.maybe_ignored_options { - log::warn!("{}", ignored_options); +fn check_warn_tsconfig( + ts_config: &TsConfigWithIgnoredOptions, + logged_warnings: &LoggedWarnings, +) { + for ignored_options in &ts_config.ignored_options { + if ignored_options + .maybe_specifier + .as_ref() + .map(|s| logged_warnings.folders.insert(s.clone())) + .unwrap_or(true) + { + log::warn!("{}", ignored_options); + } } let serde_json::Value::Object(obj) = &ts_config.ts_config.0 else { return; }; - if obj.get("experimentalDecorators") == Some(&serde_json::Value::Bool(true)) { + if obj.get("experimentalDecorators") == Some(&serde_json::Value::Bool(true)) + && logged_warnings.experimental_decorators.raise() + { log::warn!( - "{} experimentalDecorators compiler option is deprecated and may be removed at any time", - deno_runtime::colors::yellow("Warning"), - ); + "{} experimentalDecorators compiler option is deprecated and may be removed at any time", + deno_runtime::colors::yellow("Warning"), + ); } } + +#[derive(Debug)] +pub struct TranspileAndEmitOptions { + pub transpile: deno_ast::TranspileOptions, + pub emit: deno_ast::EmitOptions, + // stored ahead of time so we don't have to recompute this a lot + pub pre_computed_hash: u64, +} + +#[derive(Debug, Default)] +struct LoggedWarnings { + experimental_decorators: AtomicFlag, + folders: dashmap::DashSet, +} + +#[derive(Default, Debug)] +struct MemoizedValues { + deno_window_check_tsconfig: OnceCell>, + deno_worker_check_tsconfig: OnceCell>, + emit_tsconfig: OnceCell>, + transpile_options: OnceCell>, +} + +#[derive(Debug)] +pub struct TsConfigFolderInfo { + pub dir: WorkspaceDirectory, + logged_warnings: Arc, + memoized: MemoizedValues, +} + +impl TsConfigFolderInfo { + pub fn lib_tsconfig( + &self, + lib: TsTypeLib, + ) -> Result<&Arc, CompilerOptionsParseError> { + let cell = match lib { + TsTypeLib::DenoWindow => &self.memoized.deno_window_check_tsconfig, + TsTypeLib::DenoWorker => &self.memoized.deno_worker_check_tsconfig, + }; + + cell.get_or_try_init(|| { + let tsconfig_result = self + .dir + .to_resolved_ts_config(TsConfigType::Check { lib })?; + check_warn_tsconfig(&tsconfig_result, &self.logged_warnings); + Ok(Arc::new(tsconfig_result.ts_config)) + }) + } + + pub fn emit_tsconfig( + &self, + ) -> Result<&Arc, CompilerOptionsParseError> { + self.memoized.emit_tsconfig.get_or_try_init(|| { + let tsconfig_result = + self.dir.to_resolved_ts_config(TsConfigType::Emit)?; + check_warn_tsconfig(&tsconfig_result, &self.logged_warnings); + Ok(Arc::new(tsconfig_result.ts_config)) + }) + } + + pub fn transpile_options( + &self, + ) -> Result<&Arc, CompilerOptionsParseError> { + self.memoized.transpile_options.get_or_try_init(|| { + let ts_config = self.emit_tsconfig()?; + ts_config_to_transpile_and_emit_options(ts_config.as_ref().clone()) + .map(Arc::new) + .map_err(|source| CompilerOptionsParseError { + specifier: self + .dir + .maybe_deno_json() + .map(|d| d.specifier.clone()) + .unwrap_or_else(|| { + // will never happen because each dir should have a + // deno.json if we got here + debug_assert!(false); + self.dir.dir_url().as_ref().clone() + }), + source, + }) + }) + } +} + +#[derive(Debug)] +pub struct TsConfigResolver { + map: FolderScopedMap, +} + +impl TsConfigResolver { + pub fn from_workspace(workspace: &Arc) -> Self { + // separate the workspace into directories that have a tsconfig + let root_dir = workspace.resolve_member_dir(workspace.root_dir()); + let logged_warnings = Arc::new(LoggedWarnings::default()); + let mut map = FolderScopedMap::new(TsConfigFolderInfo { + dir: root_dir, + logged_warnings: logged_warnings.clone(), + memoized: Default::default(), + }); + for (url, folder) in workspace.config_folders() { + let folder_has_compiler_options = folder + .deno_json + .as_ref() + .map(|d| d.json.compiler_options.is_some()) + .unwrap_or(false); + if url != workspace.root_dir() && folder_has_compiler_options { + let dir = workspace.resolve_member_dir(url); + map.insert( + url.clone(), + TsConfigFolderInfo { + dir, + logged_warnings: logged_warnings.clone(), + memoized: Default::default(), + }, + ); + } + } + Self { map } + } + + pub fn check_js_for_specifier(&self, specifier: &Url) -> bool { + self.folder_for_specifier(specifier).dir.check_js() + } + + pub fn deno_lint_config( + &self, + specifier: &Url, + ) -> Result { + let transpile_options = + &self.transpile_and_emit_options(specifier)?.transpile; + // don't bother storing this in a cell because deno_lint requires an owned value + Ok(DenoLintConfig { + default_jsx_factory: (!transpile_options.jsx_automatic) + .then(|| transpile_options.jsx_factory.clone()), + default_jsx_fragment_factory: (!transpile_options.jsx_automatic) + .then(|| transpile_options.jsx_fragment_factory.clone()), + }) + } + + pub fn transpile_and_emit_options( + &self, + specifier: &Url, + ) -> Result<&Arc, CompilerOptionsParseError> { + let value = self.map.get_for_specifier(specifier); + value.transpile_options() + } + + pub fn folder_for_specifier(&self, specifier: &Url) -> &TsConfigFolderInfo { + self.folder_for_specifier_str(specifier.as_str()) + } + + pub fn folder_for_specifier_str( + &self, + specifier: &str, + ) -> &TsConfigFolderInfo { + self.map.get_for_specifier_str(specifier) + } + + pub fn folder_count(&self) -> usize { + self.map.count() + } +} + +impl deno_graph::CheckJsResolver for TsConfigResolver { + fn resolve(&self, specifier: &deno_graph::ModuleSpecifier) -> bool { + self.check_js_for_specifier(specifier) + } +} + +fn ts_config_to_transpile_and_emit_options( + config: deno_config::deno_json::TsConfig, +) -> Result { + let options: deno_config::deno_json::EmitConfigOptions = + serde_json::from_value(config.0)?; + let imports_not_used_as_values = + match options.imports_not_used_as_values.as_str() { + "preserve" => deno_ast::ImportsNotUsedAsValues::Preserve, + "error" => deno_ast::ImportsNotUsedAsValues::Error, + _ => deno_ast::ImportsNotUsedAsValues::Remove, + }; + let (transform_jsx, jsx_automatic, jsx_development, precompile_jsx) = + match options.jsx.as_str() { + "react" => (true, false, false, false), + "react-jsx" => (true, true, false, false), + "react-jsxdev" => (true, true, true, false), + "precompile" => (false, false, false, true), + _ => (false, false, false, false), + }; + let source_map = if options.inline_source_map { + SourceMapOption::Inline + } else if options.source_map { + SourceMapOption::Separate + } else { + SourceMapOption::None + }; + let transpile = deno_ast::TranspileOptions { + use_ts_decorators: options.experimental_decorators, + use_decorators_proposal: !options.experimental_decorators, + emit_metadata: options.emit_decorator_metadata, + imports_not_used_as_values, + jsx_automatic, + jsx_development, + jsx_factory: options.jsx_factory, + jsx_fragment_factory: options.jsx_fragment_factory, + jsx_import_source: options.jsx_import_source, + precompile_jsx, + precompile_jsx_skip_elements: options.jsx_precompile_skip_elements, + precompile_jsx_dynamic_props: None, + transform_jsx, + var_decl_imports: false, + // todo(dsherret): support verbatim_module_syntax here properly + verbatim_module_syntax: false, + }; + let emit = deno_ast::EmitOptions { + inline_sources: options.inline_sources, + remove_comments: false, + source_map, + source_map_base: None, + source_map_file: None, + }; + let transpile_and_emit_options_hash = { + let mut hasher = FastInsecureHasher::new_without_deno_version(); + hasher.write_hashable(&transpile); + hasher.write_hashable(&emit); + hasher.finish() + }; + Ok(TranspileAndEmitOptions { + transpile, + emit, + pre_computed_hash: transpile_and_emit_options_hash, + }) +} diff --git a/cli/args/flags.rs b/cli/args/flags.rs index fb64b4eeaa9598..7b640a446037cf 100644 --- a/cli/args/flags.rs +++ b/cli/args/flags.rs @@ -31,6 +31,10 @@ use deno_core::error::AnyError; use deno_core::resolve_url_or_path; use deno_core::url::Url; use deno_graph::GraphKind; +use deno_lib::args::CaData; +use deno_lib::args::UnstableConfig; +use deno_lib::version::DENO_VERSION_INFO; +use deno_npm::NpmSystemInfo; use deno_path_util::normalize_path; use deno_path_util::url_to_file_path; use deno_runtime::deno_permissions::SysDescriptor; @@ -497,6 +501,52 @@ impl DenoSubcommand { | Self::Lsp ) } + + pub fn npm_system_info(&self) -> NpmSystemInfo { + match self { + DenoSubcommand::Compile(CompileFlags { + target: Some(target), + .. + }) => { + // the values of NpmSystemInfo align with the possible values for the + // `arch` and `platform` fields of Node.js' `process` global: + // https://nodejs.org/api/process.html + match target.as_str() { + "aarch64-apple-darwin" => NpmSystemInfo { + os: "darwin".into(), + cpu: "arm64".into(), + }, + "aarch64-unknown-linux-gnu" => NpmSystemInfo { + os: "linux".into(), + cpu: "arm64".into(), + }, + "x86_64-apple-darwin" => NpmSystemInfo { + os: "darwin".into(), + cpu: "x64".into(), + }, + "x86_64-unknown-linux-gnu" => NpmSystemInfo { + os: "linux".into(), + cpu: "x64".into(), + }, + "x86_64-pc-windows-msvc" => NpmSystemInfo { + os: "win32".into(), + cpu: "x64".into(), + }, + value => { + log::warn!( + concat!( + "Not implemented npm system info for target '{}'. Using current ", + "system default. This may impact architecture specific dependencies." + ), + value, + ); + NpmSystemInfo::default() + } + } + } + _ => NpmSystemInfo::default(), + } + } } impl Default for DenoSubcommand { @@ -546,15 +596,6 @@ impl Default for TypeCheckMode { } } -#[derive(Clone, Debug, Eq, PartialEq)] -pub enum CaData { - /// The string is a file path - File(String), - /// This variant is not exposed as an option in the CLI, it is used internally - /// for standalone binaries. - Bytes(Vec), -} - // Info needed to run NPM lifecycle scripts #[derive(Clone, Debug, Eq, PartialEq, Default)] pub struct LifecycleScriptsConfig { @@ -582,19 +623,6 @@ fn parse_packages_allowed_scripts(s: &str) -> Result { } } -#[derive( - Clone, Default, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize, -)] -pub struct UnstableConfig { - // TODO(bartlomieju): remove in Deno 2.5 - pub legacy_flag_enabled: bool, // --unstable - pub bare_node_builtins: bool, - pub detect_cjs: bool, - pub sloppy_imports: bool, - pub npm_lazy_caching: bool, - pub features: Vec, // --unstabe-kv --unstable-cron -} - #[derive(Clone, Debug, Eq, PartialEq, Default)] pub struct InternalFlags { /// Used when the language server is configured with an @@ -1484,14 +1512,15 @@ fn handle_repl_flags(flags: &mut Flags, repl_flags: ReplFlags) { } pub fn clap_root() -> Command { + debug_assert_eq!(DENO_VERSION_INFO.typescript, deno_snapshots::TS_VERSION); let long_version = format!( "{} ({}, {}, {})\nv8 {}\ntypescript {}", - crate::version::DENO_VERSION_INFO.deno, - crate::version::DENO_VERSION_INFO.release_channel.name(), + DENO_VERSION_INFO.deno, + DENO_VERSION_INFO.release_channel.name(), env!("PROFILE"), env!("TARGET"), deno_core::v8::VERSION_STRING, - crate::version::DENO_VERSION_INFO.typescript + DENO_VERSION_INFO.typescript ); run_args(Command::new("deno"), true) @@ -1507,7 +1536,7 @@ pub fn clap_root() -> Command { ) .color(ColorChoice::Auto) .term_width(800) - .version(crate::version::DENO_VERSION_INFO.deno) + .version(DENO_VERSION_INFO.deno) .long_version(long_version) .disable_version_flag(true) .disable_help_flag(true) @@ -4274,7 +4303,7 @@ impl CommandExt for Command { let mut cmd = self.arg( Arg::new("unstable") .long("unstable") - .help(cstr!("Enable all unstable features and APIs. Instead of using this flag, consider enabling individual unstable features + .help(cstr!("The `--unstable` flag has been deprecated. Use granular `--unstable-*` flags instead To view the list of individual unstable feature flags, run this command again with --help=unstable")) .action(ArgAction::SetTrue) .hide(matches!(cfg, UnstableArgsConfig::None)) diff --git a/cli/args/import_map.rs b/cli/args/import_map.rs deleted file mode 100644 index ff7e42ef20d061..00000000000000 --- a/cli/args/import_map.rs +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright 2018-2025 the Deno authors. MIT license. - -use deno_core::error::AnyError; -use deno_core::serde_json; -use deno_core::url::Url; - -use crate::file_fetcher::CliFileFetcher; -use crate::file_fetcher::TextDecodedFile; - -pub async fn resolve_import_map_value_from_specifier( - specifier: &Url, - file_fetcher: &CliFileFetcher, -) -> Result { - if specifier.scheme() == "data" { - let data_url_text = - deno_graph::source::RawDataUrl::parse(specifier)?.decode()?; - Ok(serde_json::from_str(&data_url_text)?) - } else { - let file = TextDecodedFile::decode( - file_fetcher.fetch_bypass_permissions(specifier).await?, - )?; - Ok(serde_json::from_str(&file.source)?) - } -} diff --git a/cli/args/lockfile.rs b/cli/args/lockfile.rs index 976992aac8287b..5fa6a49c43a6d2 100644 --- a/cli/args/lockfile.rs +++ b/cli/args/lockfile.rs @@ -61,11 +61,13 @@ impl<'a, T> std::ops::DerefMut for Guard<'a, T> { } #[derive(Debug, thiserror::Error, deno_error::JsError)] -#[error("Failed writing lockfile")] -#[class(inherit)] -struct AtomicWriteFileWithRetriesError { - #[source] - source: std::io::Error, +pub enum AtomicWriteFileWithRetriesError { + #[class(inherit)] + #[error(transparent)] + Changed(JsErrorBox), + #[class(inherit)] + #[error("Failed writing lockfile")] + Io(#[source] std::io::Error), } impl CliLockfile { @@ -87,12 +89,16 @@ impl CliLockfile { self.lockfile.lock().overwrite } - pub fn write_if_changed(&self) -> Result<(), JsErrorBox> { + pub fn write_if_changed( + &self, + ) -> Result<(), AtomicWriteFileWithRetriesError> { if self.skip_write { return Ok(()); } - self.error_if_changed()?; + self + .error_if_changed() + .map_err(AtomicWriteFileWithRetriesError::Changed)?; let mut lockfile = self.lockfile.lock(); let Some(bytes) = lockfile.resolve_write_bytes() else { return Ok(()); // nothing to do @@ -105,9 +111,7 @@ impl CliLockfile { &bytes, cache::CACHE_PERM, ) - .map_err(|source| { - JsErrorBox::from_err(AtomicWriteFileWithRetriesError { source }) - })?; + .map_err(AtomicWriteFileWithRetriesError::Io)?; lockfile.has_content_changed = false; Ok(()) } diff --git a/cli/args/mod.rs b/cli/args/mod.rs index 29b493046f14fb..03ca2814a7d091 100644 --- a/cli/args/mod.rs +++ b/cli/args/mod.rs @@ -3,17 +3,12 @@ pub mod deno_json; mod flags; mod flags_net; -mod import_map; mod lockfile; mod package_json; use std::borrow::Cow; use std::collections::HashMap; use std::env; -use std::io::BufReader; -use std::io::Cursor; -use std::io::Read; -use std::io::Seek; use std::net::SocketAddr; use std::num::NonZeroUsize; use std::path::Path; @@ -22,11 +17,9 @@ use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; -use deno_ast::SourceMapOption; use deno_cache_dir::file_fetcher::CacheSetting; pub use deno_config::deno_json::BenchConfig; pub use deno_config::deno_json::ConfigFile; -use deno_config::deno_json::ConfigFileError; use deno_config::deno_json::FmtConfig; pub use deno_config::deno_json::FmtOptionsConfig; use deno_config::deno_json::LintConfig; @@ -35,21 +28,11 @@ use deno_config::deno_json::NodeModulesDirMode; pub use deno_config::deno_json::ProseWrap; use deno_config::deno_json::TestConfig; pub use deno_config::deno_json::TsConfig; -pub use deno_config::deno_json::TsConfigForEmit; -pub use deno_config::deno_json::TsConfigType; pub use deno_config::deno_json::TsTypeLib; pub use deno_config::glob::FilePatterns; -use deno_config::workspace::CreateResolverOptions; -use deno_config::workspace::FolderConfigs; -use deno_config::workspace::PackageJsonDepResolution; -use deno_config::workspace::VendorEnablement; use deno_config::workspace::Workspace; use deno_config::workspace::WorkspaceDirectory; -use deno_config::workspace::WorkspaceDirectoryEmptyOptions; -use deno_config::workspace::WorkspaceDiscoverOptions; -use deno_config::workspace::WorkspaceDiscoverStart; use deno_config::workspace::WorkspaceLintConfig; -use deno_config::workspace::WorkspaceResolver; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; @@ -57,71 +40,31 @@ use deno_core::resolve_url_or_path; use deno_core::serde_json; use deno_core::url::Url; use deno_graph::GraphKind; -pub use deno_json::check_warn_tsconfig; -use deno_lib::cache::DenoDirProvider; -use deno_lib::env::has_flag_env_var; +use deno_lib::args::has_flag_env_var; +use deno_lib::args::npm_pkg_req_ref_to_binary_command; +use deno_lib::args::CaData; +use deno_lib::args::NPM_PROCESS_STATE; +use deno_lib::version::DENO_VERSION_INFO; use deno_lib::worker::StorageKeyResolver; -use deno_lint::linter::LintConfig as DenoLintConfig; -use deno_npm::npm_rc::NpmRc; -use deno_npm::npm_rc::ResolvedNpmRc; -use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_npm::NpmSystemInfo; -use deno_path_util::normalize_path; use deno_runtime::deno_permissions::PermissionsOptions; -use deno_runtime::deno_tls::deno_native_certs::load_native_certs; -use deno_runtime::deno_tls::rustls; -use deno_runtime::deno_tls::rustls::RootCertStore; -use deno_runtime::deno_tls::rustls_pemfile; -use deno_runtime::deno_tls::webpki_roots; use deno_runtime::inspector_server::InspectorServer; use deno_semver::npm::NpmPackageReqReference; use deno_semver::StackString; use deno_telemetry::OtelConfig; -use deno_telemetry::OtelRuntimeConfig; use deno_terminal::colors; use dotenvy::from_filename; pub use flags::*; -use import_map::resolve_import_map_value_from_specifier; +pub use lockfile::AtomicWriteFileWithRetriesError; pub use lockfile::CliLockfile; pub use lockfile::CliLockfileReadFromPathOptions; use once_cell::sync::Lazy; pub use package_json::NpmInstallDepsProvider; pub use package_json::PackageJsonDepValueParseWithLocationError; -use serde::Deserialize; -use serde::Serialize; -use sys_traits::EnvHomeDir; +use sys_traits::FsRead; use thiserror::Error; -use crate::file_fetcher::CliFileFetcher; use crate::sys::CliSys; -use crate::util::fs::canonicalize_path_maybe_not_exists; -use crate::version; - -pub fn npm_registry_url() -> &'static Url { - static NPM_REGISTRY_DEFAULT_URL: Lazy = Lazy::new(|| { - let env_var_name = "NPM_CONFIG_REGISTRY"; - if let Ok(registry_url) = std::env::var(env_var_name) { - // ensure there is a trailing slash for the directory - let registry_url = format!("{}/", registry_url.trim_end_matches('/')); - match Url::parse(®istry_url) { - Ok(url) => { - return url; - } - Err(err) => { - log::debug!( - "Invalid {} environment variable: {:#}", - env_var_name, - err, - ); - } - } - } - - Url::parse("https://registry.npmjs.org").unwrap() - }); - - &NPM_REGISTRY_DEFAULT_URL -} pub static DENO_DISABLE_PEDANTIC_NODE_WARNINGS: Lazy = Lazy::new(|| { std::env::var("DENO_DISABLE_PEDANTIC_NODE_WARNINGS") @@ -165,60 +108,51 @@ pub fn jsr_api_url() -> &'static Url { &JSR_API_URL } -pub fn ts_config_to_transpile_and_emit_options( - config: deno_config::deno_json::TsConfig, -) -> Result<(deno_ast::TranspileOptions, deno_ast::EmitOptions), AnyError> { - let options: deno_config::deno_json::EmitConfigOptions = - serde_json::from_value(config.0) - .context("Failed to parse compilerOptions")?; - let imports_not_used_as_values = - match options.imports_not_used_as_values.as_str() { - "preserve" => deno_ast::ImportsNotUsedAsValues::Preserve, - "error" => deno_ast::ImportsNotUsedAsValues::Error, - _ => deno_ast::ImportsNotUsedAsValues::Remove, - }; - let (transform_jsx, jsx_automatic, jsx_development, precompile_jsx) = - match options.jsx.as_str() { - "react" => (true, false, false, false), - "react-jsx" => (true, true, false, false), - "react-jsxdev" => (true, true, true, false), - "precompile" => (false, false, false, true), - _ => (false, false, false, false), - }; - let source_map = if options.inline_source_map { - SourceMapOption::Inline - } else if options.source_map { - SourceMapOption::Separate - } else { - SourceMapOption::None - }; - Ok(( - deno_ast::TranspileOptions { - use_ts_decorators: options.experimental_decorators, - use_decorators_proposal: !options.experimental_decorators, - emit_metadata: options.emit_decorator_metadata, - imports_not_used_as_values, - jsx_automatic, - jsx_development, - jsx_factory: options.jsx_factory, - jsx_fragment_factory: options.jsx_fragment_factory, - jsx_import_source: options.jsx_import_source, - precompile_jsx, - precompile_jsx_skip_elements: options.jsx_precompile_skip_elements, - precompile_jsx_dynamic_props: None, - transform_jsx, - var_decl_imports: false, - // todo(dsherret): support verbatim_module_syntax here properly - verbatim_module_syntax: false, - }, - deno_ast::EmitOptions { - inline_sources: options.inline_sources, - remove_comments: false, - source_map, - source_map_base: None, - source_map_file: None, - }, - )) +#[derive(Debug, Clone)] +pub struct ExternalImportMap { + pub path: PathBuf, + pub value: serde_json::Value, +} + +#[derive(Debug)] +pub struct WorkspaceExternalImportMapLoader { + sys: CliSys, + workspace: Arc, + maybe_external_import_map: + once_cell::sync::OnceCell>, +} + +impl WorkspaceExternalImportMapLoader { + pub fn new(sys: CliSys, workspace: Arc) -> Self { + Self { + sys, + workspace, + maybe_external_import_map: Default::default(), + } + } + + pub fn get_or_load(&self) -> Result, AnyError> { + self + .maybe_external_import_map + .get_or_try_init(|| { + let Some(deno_json) = self.workspace.root_deno_json() else { + return Ok(None); + }; + if deno_json.is_an_import_map() { + return Ok(None); + } + let Some(path) = deno_json.to_import_map_path()? else { + return Ok(None); + }; + let contents = + self.sys.fs_read_to_string(&path).with_context(|| { + format!("Unable to read import map at '{}'", path.display()) + })?; + let value = serde_json::from_str(&contents)?; + Ok(Some(ExternalImportMap { path, value })) + }) + .map(|v| v.as_ref()) + } } pub struct WorkspaceBenchOptions { @@ -487,303 +421,26 @@ fn resolve_lint_rules_options( } } -pub fn discover_npmrc_from_workspace( - workspace: &Workspace, -) -> Result<(Arc, Option), AnyError> { - let root_folder = workspace.root_folder_configs(); - discover_npmrc( - root_folder.pkg_json.as_ref().map(|p| p.path.clone()), - root_folder.deno_json.as_ref().and_then(|cf| { - if cf.specifier.scheme() == "file" { - Some(cf.specifier.to_file_path().unwrap()) - } else { - None - } - }), - ) -} - -/// Discover `.npmrc` file - currently we only support it next to `package.json` -/// or next to `deno.json`. -/// -/// In the future we will need to support it in user directory or global directory -/// as per https://docs.npmjs.com/cli/v10/configuring-npm/npmrc#files. -fn discover_npmrc( - maybe_package_json_path: Option, - maybe_deno_json_path: Option, -) -> Result<(Arc, Option), AnyError> { - const NPMRC_NAME: &str = ".npmrc"; - - fn get_env_var(var_name: &str) -> Option { - std::env::var(var_name).ok() - } - - #[derive(Debug, Error)] - #[error("Error loading .npmrc at {}.", path.display())] - struct NpmRcLoadError { - path: PathBuf, - #[source] - source: std::io::Error, - } - - fn try_to_read_npmrc( - dir: &Path, - ) -> Result, NpmRcLoadError> { - let path = dir.join(NPMRC_NAME); - let maybe_source = match std::fs::read_to_string(&path) { - Ok(source) => Some(source), - Err(err) if err.kind() == std::io::ErrorKind::NotFound => None, - Err(err) => return Err(NpmRcLoadError { path, source: err }), - }; - - Ok(maybe_source.map(|source| (source, path))) - } - - fn try_to_parse_npmrc( - source: String, - path: &Path, - ) -> Result, AnyError> { - let npmrc = NpmRc::parse(&source, &get_env_var).with_context(|| { - format!("Failed to parse .npmrc at {}", path.display()) - })?; - let resolved = npmrc - .as_resolved(npm_registry_url()) - .context("Failed to resolve .npmrc options")?; - log::debug!(".npmrc found at: '{}'", path.display()); - Ok(Arc::new(resolved)) - } - - // 1. Try `.npmrc` next to `package.json` - if let Some(package_json_path) = maybe_package_json_path { - if let Some(package_json_dir) = package_json_path.parent() { - if let Some((source, path)) = try_to_read_npmrc(package_json_dir)? { - return try_to_parse_npmrc(source, &path).map(|r| (r, Some(path))); - } - } - } - - // 2. Try `.npmrc` next to `deno.json(c)` - if let Some(deno_json_path) = maybe_deno_json_path { - if let Some(deno_json_dir) = deno_json_path.parent() { - if let Some((source, path)) = try_to_read_npmrc(deno_json_dir)? { - return try_to_parse_npmrc(source, &path).map(|r| (r, Some(path))); - } - } - } - - // TODO(bartlomieju): update to read both files - one in the project root and one and - // home dir and then merge them. - // 3. Try `.npmrc` in the user's home directory - if let Some(home_dir) = crate::sys::CliSys::default().env_home_dir() { - match try_to_read_npmrc(&home_dir) { - Ok(Some((source, path))) => { - return try_to_parse_npmrc(source, &path).map(|r| (r, Some(path))); - } - Ok(None) => {} - Err(err) if err.source.kind() == std::io::ErrorKind::PermissionDenied => { - log::debug!( - "Skipping .npmrc in home directory due to permission denied error. {:#}", - err - ); - } - Err(err) => { - return Err(err.into()); - } - } - } - - log::debug!("No .npmrc file found"); - Ok((create_default_npmrc(), None)) -} - -pub fn create_default_npmrc() -> Arc { - Arc::new(ResolvedNpmRc { - default_config: deno_npm::npm_rc::RegistryConfigWithUrl { - registry_url: npm_registry_url().clone(), - config: Default::default(), - }, - scopes: Default::default(), - registry_configs: Default::default(), - }) -} - -#[derive(Error, Debug, Clone, deno_error::JsError)] -#[class(generic)] -pub enum RootCertStoreLoadError { - #[error( - "Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")" - )] - UnknownStore(String), - #[error("Unable to add pem file to certificate store: {0}")] - FailedAddPemFile(String), - #[error("Failed opening CA file: {0}")] - CaFileOpenError(String), -} - -/// Create and populate a root cert store based on the passed options and -/// environment. -pub fn get_root_cert_store( - maybe_root_path: Option, - maybe_ca_stores: Option>, - maybe_ca_data: Option, -) -> Result { - let mut root_cert_store = RootCertStore::empty(); - let ca_stores: Vec = maybe_ca_stores - .or_else(|| { - let env_ca_store = env::var("DENO_TLS_CA_STORE").ok()?; - Some( - env_ca_store - .split(',') - .map(|s| s.trim().to_string()) - .filter(|s| !s.is_empty()) - .collect(), - ) - }) - .unwrap_or_else(|| vec!["mozilla".to_string()]); - - for store in ca_stores.iter() { - match store.as_str() { - "mozilla" => { - root_cert_store.extend(webpki_roots::TLS_SERVER_ROOTS.to_vec()); - } - "system" => { - let roots = load_native_certs().expect("could not load platform certs"); - for root in roots { - if let Err(err) = root_cert_store - .add(rustls::pki_types::CertificateDer::from(root.0.clone())) - { - log::error!( - "{}", - colors::yellow(&format!( - "Unable to add system certificate to certificate store: {:?}", - err - )) - ); - let hex_encoded_root = faster_hex::hex_string(&root.0); - log::error!("{}", colors::gray(&hex_encoded_root)); - } - } - } - _ => { - return Err(RootCertStoreLoadError::UnknownStore(store.clone())); - } - } - } - - let ca_data = - maybe_ca_data.or_else(|| env::var("DENO_CERT").ok().map(CaData::File)); - if let Some(ca_data) = ca_data { - let result = match ca_data { - CaData::File(ca_file) => { - let ca_file = if let Some(root) = &maybe_root_path { - root.join(&ca_file) - } else { - PathBuf::from(ca_file) - }; - let certfile = std::fs::File::open(ca_file).map_err(|err| { - RootCertStoreLoadError::CaFileOpenError(err.to_string()) - })?; - let mut reader = BufReader::new(certfile); - rustls_pemfile::certs(&mut reader).collect::, _>>() - } - CaData::Bytes(data) => { - let mut reader = BufReader::new(Cursor::new(data)); - rustls_pemfile::certs(&mut reader).collect::, _>>() - } - }; - - match result { - Ok(certs) => { - root_cert_store.add_parsable_certificates(certs); - } - Err(e) => { - return Err(RootCertStoreLoadError::FailedAddPemFile(e.to_string())); - } - } - } - - Ok(root_cert_store) -} - -/// State provided to the process via an environment variable. -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct NpmProcessState { - pub kind: NpmProcessStateKind, - pub local_node_modules_path: Option, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub enum NpmProcessStateKind { - Snapshot(deno_npm::resolution::SerializedNpmResolutionSnapshot), - Byonm, -} - -static NPM_PROCESS_STATE: Lazy> = Lazy::new(|| { - use deno_runtime::ops::process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME; - let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?; - std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME); - let fd = fd.parse::().ok()?; - let mut file = { - use deno_runtime::deno_io::FromRawIoHandle; - unsafe { std::fs::File::from_raw_io_handle(fd as _) } - }; - let mut buf = Vec::new(); - // seek to beginning. after the file is written the position will be inherited by this subprocess, - // and also this file might have been read before - file.seek(std::io::SeekFrom::Start(0)).unwrap(); - file - .read_to_end(&mut buf) - .inspect_err(|e| { - log::error!("failed to read npm process state from fd {fd}: {e}"); - }) - .ok()?; - let state: NpmProcessState = serde_json::from_slice(&buf) - .inspect_err(|e| { - log::error!( - "failed to deserialize npm process state: {e} {}", - String::from_utf8_lossy(&buf) - ) - }) - .ok()?; - Some(state) -}); - -/// Overrides for the options below that when set will -/// use these values over the values derived from the -/// CLI flags or config file. -#[derive(Default, Clone)] -struct CliOptionOverrides { - import_map_specifier: Option>, -} - /// Holds the resolved options of many sources used by subcommands /// and provides some helper function for creating common objects. +#[derive(Debug)] pub struct CliOptions { // the source of the options is a detail the rest of the // application need not concern itself with, so keep these private flags: Arc, initial_cwd: PathBuf, main_module_cell: std::sync::OnceLock>, - maybe_node_modules_folder: Option, - npmrc: Arc, maybe_lockfile: Option>, - maybe_external_import_map: Option<(PathBuf, serde_json::Value)>, - overrides: CliOptionOverrides, pub start_dir: Arc, - pub deno_dir_provider: Arc>, } impl CliOptions { #[allow(clippy::too_many_arguments)] pub fn new( - sys: &CliSys, flags: Arc, initial_cwd: PathBuf, maybe_lockfile: Option>, - npmrc: Arc, start_dir: Arc, - force_global_cache: bool, - maybe_external_import_map: Option<(PathBuf, serde_json::Value)>, ) -> Result { if let Some(insecure_allowlist) = flags.unsafely_ignore_certificate_errors.as_ref() @@ -800,154 +457,38 @@ impl CliOptions { } } - let maybe_lockfile = maybe_lockfile.filter(|_| !force_global_cache); - let deno_dir_provider = Arc::new(DenoDirProvider::new( - sys.clone(), - flags.internal.cache_path.clone(), - )); - let maybe_node_modules_folder = resolve_node_modules_folder( - &initial_cwd, - &flags, - &start_dir.workspace, - &deno_dir_provider, - ) - .with_context(|| "Resolving node_modules folder.")?; - load_env_variables_from_env_file(flags.env_file.as_ref()); Ok(Self { flags, initial_cwd, maybe_lockfile, - npmrc, - maybe_node_modules_folder, - overrides: Default::default(), main_module_cell: std::sync::OnceLock::new(), - maybe_external_import_map, start_dir, - deno_dir_provider, }) } - pub fn from_flags(sys: &CliSys, flags: Arc) -> Result { - let initial_cwd = - std::env::current_dir().with_context(|| "Failed getting cwd.")?; - let maybe_vendor_override = flags.vendor.map(|v| match v { - true => VendorEnablement::Enable { cwd: &initial_cwd }, - false => VendorEnablement::Disable, - }); - let resolve_workspace_discover_options = || { - let additional_config_file_names: &'static [&'static str] = - if matches!(flags.subcommand, DenoSubcommand::Publish(..)) { - &["jsr.json", "jsr.jsonc"] - } else { - &[] - }; - let discover_pkg_json = flags.config_flag != ConfigFlag::Disabled - && !flags.no_npm - && !has_flag_env_var("DENO_NO_PACKAGE_JSON"); - if !discover_pkg_json { - log::debug!("package.json auto-discovery is disabled"); - } - WorkspaceDiscoverOptions { - deno_json_cache: None, - pkg_json_cache: Some(&node_resolver::PackageJsonThreadLocalCache), - workspace_cache: None, - additional_config_file_names, - discover_pkg_json, - maybe_vendor_override, - } - }; - let resolve_empty_options = || WorkspaceDirectoryEmptyOptions { - root_dir: Arc::new( - ModuleSpecifier::from_directory_path(&initial_cwd).unwrap(), - ), - use_vendor_dir: maybe_vendor_override - .unwrap_or(VendorEnablement::Disable), - }; - - let start_dir = match &flags.config_flag { - ConfigFlag::Discover => { - if let Some(start_paths) = flags.config_path_args(&initial_cwd) { - WorkspaceDirectory::discover( - sys, - WorkspaceDiscoverStart::Paths(&start_paths), - &resolve_workspace_discover_options(), - )? - } else { - WorkspaceDirectory::empty(resolve_empty_options()) - } - } - ConfigFlag::Path(path) => { - let config_path = normalize_path(initial_cwd.join(path)); - WorkspaceDirectory::discover( - sys, - WorkspaceDiscoverStart::ConfigFile(&config_path), - &resolve_workspace_discover_options(), - )? - } - ConfigFlag::Disabled => { - WorkspaceDirectory::empty(resolve_empty_options()) - } - }; - + pub fn from_flags( + sys: &CliSys, + flags: Arc, + initial_cwd: PathBuf, + maybe_external_import_map: Option<&ExternalImportMap>, + start_dir: Arc, + ) -> Result { for diagnostic in start_dir.workspace.diagnostics() { log::warn!("{} {}", colors::yellow("Warning"), diagnostic); } - let (npmrc, _) = discover_npmrc_from_workspace(&start_dir.workspace)?; - - fn load_external_import_map( - deno_json: &ConfigFile, - ) -> Result, AnyError> { - if !deno_json.is_an_import_map() { - if let Some(path) = deno_json.to_import_map_path()? { - let contents = std::fs::read_to_string(&path).with_context(|| { - format!("Unable to read import map at '{}'", path.display()) - })?; - let map = serde_json::from_str(&contents)?; - return Ok(Some((path, map))); - } - } - Ok(None) - } - - let external_import_map = - if let Some(deno_json) = start_dir.workspace.root_deno_json() { - load_external_import_map(deno_json)? - } else { - None - }; - let maybe_lock_file = CliLockfile::discover( sys, &flags, &start_dir.workspace, - external_import_map.as_ref().map(|(_, v)| v), + maybe_external_import_map.as_ref().map(|v| &v.value), )?; log::debug!("Finished config loading."); - Self::new( - sys, - flags, - initial_cwd, - maybe_lock_file.map(Arc::new), - npmrc, - Arc::new(start_dir), - false, - external_import_map, - ) - } - - /// This method is purposefully verbose to disourage its use. Do not use it - /// except in the factory structs. Instead, prefer specific methods on `CliOptions` - /// that can take all sources of information into account (ex. config files or env vars). - pub fn into_self_and_flags( - self: Arc, - ) -> (Arc, Arc) { - let flags = self.flags.clone(); - (self, flags) + Self::new(flags, initial_cwd, maybe_lock_file.map(Arc::new), start_dir) } #[inline(always)] @@ -990,49 +531,7 @@ impl CliOptions { } pub fn npm_system_info(&self) -> NpmSystemInfo { - match self.sub_command() { - DenoSubcommand::Compile(CompileFlags { - target: Some(target), - .. - }) => { - // the values of NpmSystemInfo align with the possible values for the - // `arch` and `platform` fields of Node.js' `process` global: - // https://nodejs.org/api/process.html - match target.as_str() { - "aarch64-apple-darwin" => NpmSystemInfo { - os: "darwin".into(), - cpu: "arm64".into(), - }, - "aarch64-unknown-linux-gnu" => NpmSystemInfo { - os: "linux".into(), - cpu: "arm64".into(), - }, - "x86_64-apple-darwin" => NpmSystemInfo { - os: "darwin".into(), - cpu: "x64".into(), - }, - "x86_64-unknown-linux-gnu" => NpmSystemInfo { - os: "linux".into(), - cpu: "x64".into(), - }, - "x86_64-pc-windows-msvc" => NpmSystemInfo { - os: "win32".into(), - cpu: "x64".into(), - }, - value => { - log::warn!( - concat!( - "Not implemented npm system info for target '{}'. Using current ", - "system default. This may impact architecture specific dependencies." - ), - value, - ); - NpmSystemInfo::default() - } - } - } - _ => NpmSystemInfo::default(), - } + self.sub_command().npm_system_info() } /// Resolve the specifier for a specified import map. @@ -1041,72 +540,12 @@ impl CliOptions { /// happens to be an import map. pub fn resolve_specified_import_map_specifier( &self, - ) -> Result, AnyError> { - match self.overrides.import_map_specifier.clone() { - Some(maybe_url) => Ok(maybe_url), - None => resolve_import_map_specifier( - self.flags.import_map_path.as_deref(), - self.workspace().root_deno_json().map(|c| c.as_ref()), - &self.initial_cwd, - ), - } - } - - pub async fn create_workspace_resolver( - &self, - file_fetcher: &CliFileFetcher, - pkg_json_dep_resolution: PackageJsonDepResolution, - ) -> Result { - let overrode_no_import_map: bool = self - .overrides - .import_map_specifier - .as_ref() - .map(|s| s.is_none()) - == Some(true); - let cli_arg_specified_import_map = if overrode_no_import_map { - // use a fake empty import map - Some(deno_config::workspace::SpecifiedImportMap { - base_url: self.workspace().root_dir().join("import_map.json").unwrap(), - value: serde_json::Value::Object(Default::default()), - }) - } else { - let maybe_import_map_specifier = - self.resolve_specified_import_map_specifier()?; - match maybe_import_map_specifier { - Some(specifier) => { - let value = - resolve_import_map_value_from_specifier(&specifier, file_fetcher) - .await - .with_context(|| { - format!("Unable to load '{}' import map", specifier) - })?; - Some(deno_config::workspace::SpecifiedImportMap { - base_url: specifier, - value, - }) - } - None => { - if let Some((path, import_map)) = - self.maybe_external_import_map.as_ref() - { - let path_url = deno_path_util::url_from_file_path(path)?; - Some(deno_config::workspace::SpecifiedImportMap { - base_url: path_url, - value: import_map.clone(), - }) - } else { - None - } - } - } - }; - Ok(self.workspace().create_resolver( - &CliSys::default(), - CreateResolverOptions { - pkg_json_dep_resolution, - specified_import_map: cli_arg_specified_import_map, - }, - )?) + ) -> Result, ImportMapSpecifierResolveError> { + resolve_import_map_specifier( + self.flags.import_map_path.as_deref(), + self.workspace().root_deno_json().map(|c| c.as_ref()), + &self.initial_cwd, + ) } pub fn node_ipc_fd(&self) -> Option { @@ -1216,19 +655,6 @@ impl CliOptions { } } - pub fn resolve_npm_resolution_snapshot( - &self, - ) -> Result, AnyError> { - if let Some(NpmProcessStateKind::Snapshot(snapshot)) = - NPM_PROCESS_STATE.as_ref().map(|s| &s.kind) - { - // TODO(bartlomieju): remove this clone - Ok(Some(snapshot.clone().into_valid()?)) - } else { - Ok(None) - } - } - pub fn resolve_storage_key_resolver(&self) -> StorageKeyResolver { if let Some(location) = &self.flags.location { StorageKeyResolver::from_flag(location) @@ -1247,14 +673,6 @@ impl CliOptions { NPM_PROCESS_STATE.is_some() } - pub fn has_node_modules_dir(&self) -> bool { - self.maybe_node_modules_folder.is_some() - } - - pub fn node_modules_dir_path(&self) -> Option<&PathBuf> { - self.maybe_node_modules_folder.as_ref() - } - pub fn node_modules_dir( &self, ) -> Result< @@ -1271,13 +689,6 @@ impl CliOptions { self.workspace().vendor_dir_path() } - pub fn resolve_ts_config_for_emit( - &self, - config_type: TsConfigType, - ) -> Result { - self.workspace().resolve_ts_config_for_emit(config_type) - } - pub fn resolve_inspector_server( &self, ) -> Result, AnyError> { @@ -1293,7 +704,7 @@ impl CliOptions { Ok(Some(InspectorServer::new( host, - version::DENO_VERSION_INFO.user_agent, + DENO_VERSION_INFO.user_agent, )?)) } @@ -1301,27 +712,6 @@ impl CliOptions { self.maybe_lockfile.as_ref() } - pub fn to_compiler_option_types( - &self, - ) -> Result, serde_json::Error> { - self - .workspace() - .to_compiler_option_types() - .map(|maybe_imports| { - maybe_imports - .into_iter() - .map(|(referrer, imports)| deno_graph::ReferrerImports { - referrer, - imports, - }) - .collect() - }) - } - - pub fn npmrc(&self) -> &Arc { - &self.npmrc - } - pub fn resolve_fmt_options_for_members( &self, fmt_flags: &FmtFlags, @@ -1373,23 +763,6 @@ impl CliOptions { Ok(result) } - pub fn resolve_deno_lint_config(&self) -> Result { - let ts_config_result = - self.resolve_ts_config_for_emit(TsConfigType::Emit)?; - - let (transpile_options, _) = - crate::args::ts_config_to_transpile_and_emit_options( - ts_config_result.ts_config, - )?; - - Ok(DenoLintConfig { - default_jsx_factory: (!transpile_options.jsx_automatic) - .then_some(transpile_options.jsx_factory), - default_jsx_fragment_factory: (!transpile_options.jsx_automatic) - .then_some(transpile_options.jsx_fragment_factory), - }) - } - pub fn resolve_workspace_test_options( &self, test_flags: &TestFlags, @@ -1451,10 +824,6 @@ impl CliOptions { &self.flags.ca_stores } - pub fn check_js(&self) -> bool { - self.workspace().check_js() - } - pub fn coverage_dir(&self) -> Option { match &self.flags.subcommand { DenoSubcommand::Test(test) => test @@ -1719,41 +1088,6 @@ impl CliOptions { self.workspace().package_jsons().next().is_some() || self.is_node_main() } - fn byonm_enabled(&self) -> bool { - // check if enabled via unstable - self.node_modules_dir().ok().flatten() == Some(NodeModulesDirMode::Manual) - || NPM_PROCESS_STATE - .as_ref() - .map(|s| matches!(s.kind, NpmProcessStateKind::Byonm)) - .unwrap_or(false) - } - - pub fn use_byonm(&self) -> bool { - if matches!( - self.sub_command(), - DenoSubcommand::Install(_) - | DenoSubcommand::Add(_) - | DenoSubcommand::Remove(_) - | DenoSubcommand::Init(_) - | DenoSubcommand::Outdated(_) - ) { - // For `deno install/add/remove/init` we want to force the managed resolver so it can set up `node_modules/` directory. - return false; - } - if self.node_modules_dir().ok().flatten().is_none() - && self.maybe_node_modules_folder.is_some() - && self - .workspace() - .config_folders() - .values() - .any(|f| f.pkg_json.is_some()) - { - return true; - } - - self.byonm_enabled() - } - pub fn unstable_sloppy_imports(&self) -> bool { self.flags.unstable_config.sloppy_imports || self.workspace().has_unstable("sloppy-imports") @@ -1878,63 +1212,6 @@ impl CliOptions { } } -/// Resolves the path to use for a local node_modules folder. -fn resolve_node_modules_folder( - cwd: &Path, - flags: &Flags, - workspace: &Workspace, - deno_dir_provider: &Arc>, -) -> Result, AnyError> { - fn resolve_from_root(root_folder: &FolderConfigs, cwd: &Path) -> PathBuf { - root_folder - .deno_json - .as_ref() - .map(|c| Cow::Owned(c.dir_path())) - .or_else(|| { - root_folder - .pkg_json - .as_ref() - .map(|c| Cow::Borrowed(c.dir_path())) - }) - .unwrap_or(Cow::Borrowed(cwd)) - .join("node_modules") - } - - let root_folder = workspace.root_folder_configs(); - let use_node_modules_dir = if let Some(mode) = flags.node_modules_dir { - Some(mode.uses_node_modules_dir()) - } else { - workspace - .node_modules_dir()? - .map(|m| m.uses_node_modules_dir()) - .or(flags.vendor) - .or_else(|| root_folder.deno_json.as_ref().and_then(|c| c.json.vendor)) - }; - let path = if use_node_modules_dir == Some(false) { - return Ok(None); - } else if let Some(state) = &*NPM_PROCESS_STATE { - return Ok(state.local_node_modules_path.as_ref().map(PathBuf::from)); - } else if root_folder.pkg_json.is_some() { - let node_modules_dir = resolve_from_root(root_folder, cwd); - if let Ok(deno_dir) = deno_dir_provider.get_or_create() { - // `deno_dir.root` can be symlink in macOS - if let Ok(root) = canonicalize_path_maybe_not_exists(&deno_dir.root) { - if node_modules_dir.starts_with(root) { - // if the package.json is in deno_dir, then do not use node_modules - // next to it as local node_modules dir - return Ok(None); - } - } - } - node_modules_dir - } else if use_node_modules_dir.is_none() { - return Ok(None); - } else { - resolve_from_root(root_folder, cwd) - }; - Ok(Some(canonicalize_path_maybe_not_exists(&path)?)) -} - fn try_resolve_node_binary_main_entrypoint( specifier: &str, initial_cwd: &Path, @@ -1965,22 +1242,31 @@ fn try_resolve_node_binary_main_entrypoint( } } +#[derive(Debug, Error)] +#[error("Bad URL for import map.")] +pub struct ImportMapSpecifierResolveError { + #[source] + source: deno_path_util::ResolveUrlOrPathError, +} + fn resolve_import_map_specifier( maybe_import_map_path: Option<&str>, maybe_config_file: Option<&ConfigFile>, current_dir: &Path, -) -> Result, AnyError> { +) -> Result, ImportMapSpecifierResolveError> { if let Some(import_map_path) = maybe_import_map_path { if let Some(config_file) = &maybe_config_file { if config_file.json.import_map.is_some() { - log::warn!("{} the configuration file \"{}\" contains an entry for \"importMap\" that is being ignored.", colors::yellow("Warning"), config_file.specifier); + log::warn!( + "{} the configuration file \"{}\" contains an entry for \"importMap\" that is being ignored.", + colors::yellow("Warning"), + config_file.specifier, + ); } } let specifier = - deno_core::resolve_url_or_path(import_map_path, current_dir) - .with_context(|| { - format!("Bad URL (\"{import_map_path}\") for import map.") - })?; + deno_path_util::resolve_url_or_path(import_map_path, current_dir) + .map_err(|source| ImportMapSpecifierResolveError { source })?; Ok(Some(specifier)) } else { Ok(None) @@ -1992,15 +1278,6 @@ pub fn resolve_no_prompt(flags: &PermissionFlags) -> bool { flags.no_prompt || has_flag_env_var("DENO_NO_PROMPT") } -pub fn npm_pkg_req_ref_to_binary_command( - req_ref: &NpmPackageReqReference, -) -> String { - req_ref - .sub_path() - .map(|s| s.to_string()) - .unwrap_or_else(|| req_ref.req().name.to_string()) -} - pub fn config_to_deno_graph_workspace_member( config: &ConfigFile, ) -> Result { @@ -2030,9 +1307,9 @@ fn load_env_variables_from_env_file(filename: Option<&Vec>) { Ok(_) => (), Err(error) => { match error { - dotenvy::Error::LineParse(line, index)=> log::info!("{} Parsing failed within the specified environment file: {} at index: {} of the value: {}",colors::yellow("Warning"), env_file_name, index, line), - dotenvy::Error::Io(_)=> log::info!("{} The `--env-file` flag was used, but the environment file specified '{}' was not found.",colors::yellow("Warning"),env_file_name), - dotenvy::Error::EnvVar(_)=> log::info!("{} One or more of the environment variables isn't present or not unicode within the specified environment file: {}",colors::yellow("Warning"),env_file_name), + dotenvy::Error::LineParse(line, index)=> log::info!("{} Parsing failed within the specified environment file: {} at index: {} of the value: {}", colors::yellow("Warning"), env_file_name, index, line), + dotenvy::Error::Io(_)=> log::info!("{} The `--env-file` flag was used, but the environment file specified '{}' was not found.", colors::yellow("Warning"), env_file_name), + dotenvy::Error::EnvVar(_)=> log::info!("{} One or more of the environment variables isn't present or not unicode within the specified environment file: {}", colors::yellow("Warning"), env_file_name), _ => log::info!("{} Unknown failure occurred with the specified environment file: {}", colors::yellow("Warning"), env_file_name), } } @@ -2061,13 +1338,6 @@ pub enum NpmCachingStrategy { Manual, } -pub fn otel_runtime_config() -> OtelRuntimeConfig { - OtelRuntimeConfig { - runtime_name: Cow::Borrowed("deno"), - runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno), - } -} - #[cfg(test)] mod test { use pretty_assertions::assert_eq; @@ -2080,8 +1350,7 @@ mod test { "importMap": "import_map.json" }"#; let cwd = &std::env::current_dir().unwrap(); - let config_specifier = - ModuleSpecifier::parse("file:///deno/deno.jsonc").unwrap(); + let config_specifier = Url::parse("file:///deno/deno.jsonc").unwrap(); let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); let actual = resolve_import_map_specifier( Some("import-map.json"), @@ -2090,7 +1359,7 @@ mod test { ); let import_map_path = cwd.join("import-map.json"); let expected_specifier = - ModuleSpecifier::from_file_path(import_map_path).unwrap(); + deno_path_util::url_from_file_path(&import_map_path).unwrap(); assert!(actual.is_ok()); let actual = actual.unwrap(); assert_eq!(actual, Some(expected_specifier)); @@ -2099,8 +1368,7 @@ mod test { #[test] fn resolve_import_map_none() { let config_text = r#"{}"#; - let config_specifier = - ModuleSpecifier::parse("file:///deno/deno.jsonc").unwrap(); + let config_specifier = Url::parse("file:///deno/deno.jsonc").unwrap(); let config_file = ConfigFile::new(config_text, config_specifier).unwrap(); let actual = resolve_import_map_specifier( None, diff --git a/cli/bench/sqlite.js b/cli/bench/sqlite.js new file mode 100644 index 00000000000000..f63fbc4314b96b --- /dev/null +++ b/cli/bench/sqlite.js @@ -0,0 +1,36 @@ +// Copyright 2018-2025 the Deno authors. MIT license. +// deno-lint-ignore-file no-console + +import { DatabaseSync } from "node:sqlite"; +import fs from "node:fs"; + +function bench(name, fun, count = 10000) { + const start = Date.now(); + for (let i = 0; i < count; i++) fun(); + const elapsed = Date.now() - start; + const rate = Math.floor(count / (elapsed / 1000)); + console.log(` ${name}: time ${elapsed} ms rate ${rate}`); +} + +for (const name of [":memory:", "test.db"]) { + console.log(`Benchmarking ${name}`); + try { + fs.unlinkSync(name); + } catch { + // Ignore + } + + const db = new DatabaseSync(name); + db.exec("CREATE TABLE test (id INTEGER PRIMARY KEY, name TEXT)"); + + bench("prepare", () => db.prepare("SELECT * FROM test")); + bench("exec", () => db.exec("INSERT INTO test (name) VALUES ('foo')")); + + const stmt = db.prepare("SELECT * FROM test"); + bench("get", () => stmt.get()); + + const stmt2 = db.prepare("SELECT * FROM test WHERE id = ?"); + bench("get (integer bind)", () => stmt2.get(1)); + + bench("all", () => stmt.all(), 1000); +} diff --git a/cli/build.rs b/cli/build.rs index 590fee795d5900..c8e156a265d300 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -5,7 +5,6 @@ use std::path::PathBuf; use deno_core::snapshot::*; use deno_runtime::*; -mod shared; mod ts { use std::collections::HashMap; @@ -14,48 +13,13 @@ mod ts { use std::path::PathBuf; use deno_core::op2; + use deno_core::v8; use deno_core::OpState; use deno_error::JsErrorBox; use serde::Serialize; use super::*; - #[derive(Debug, Serialize)] - #[serde(rename_all = "camelCase")] - struct BuildInfoResponse { - build_specifier: String, - libs: Vec, - } - - #[op2] - #[serde] - fn op_build_info(state: &mut OpState) -> BuildInfoResponse { - let build_specifier = "asset:///bootstrap.ts".to_string(); - let build_libs = state - .borrow::>() - .iter() - .map(|s| s.to_string()) - .collect(); - BuildInfoResponse { - build_specifier, - libs: build_libs, - } - } - - #[op2(fast)] - fn op_is_node_file() -> bool { - false - } - - #[op2] - #[string] - fn op_script_version( - _state: &mut OpState, - #[string] _arg: &str, - ) -> Result, JsErrorBox> { - Ok(Some("1".to_string())) - } - #[derive(Debug, Serialize)] #[serde(rename_all = "camelCase")] struct LoadResponse { @@ -75,19 +39,10 @@ mod ts { let op_crate_libs = state.borrow::>(); let path_dts = state.borrow::(); let re_asset = lazy_regex::regex!(r"asset:/{3}lib\.(\S+)\.d\.ts"); - let build_specifier = "asset:///bootstrap.ts"; - - // we need a basic file to send to tsc to warm it up. - if load_specifier == build_specifier { - Ok(LoadResponse { - data: r#"Deno.writeTextFile("hello.txt", "hello deno!");"#.to_string(), - version: "1".to_string(), - // this corresponds to `ts.ScriptKind.TypeScript` - script_kind: 3, - }) - // specifiers come across as `asset:///lib.{lib_name}.d.ts` and we need to - // parse out just the name so we can lookup the asset. - } else if let Some(caps) = re_asset.captures(load_specifier) { + + // specifiers come across as `asset:///lib.{lib_name}.d.ts` and we need to + // parse out just the name so we can lookup the asset. + if let Some(caps) = re_asset.captures(load_specifier) { if let Some(lib) = caps.get(1).map(|m| m.as_str()) { // if it comes from an op crate, we were supplied with the path to the // file. @@ -101,32 +56,35 @@ mod ts { }; let data = std::fs::read_to_string(path).map_err(JsErrorBox::from_err)?; - Ok(LoadResponse { + return Ok(LoadResponse { data, version: "1".to_string(), // this corresponds to `ts.ScriptKind.TypeScript` script_kind: 3, - }) - } else { - Err(JsErrorBox::new( - "InvalidSpecifier", - format!("An invalid specifier was requested: {}", load_specifier), - )) + }); } - } else { - Err(JsErrorBox::new( - "InvalidSpecifier", - format!("An invalid specifier was requested: {}", load_specifier), - )) } + + Err(JsErrorBox::new( + "InvalidSpecifier", + format!("An invalid specifier was requested: {}", load_specifier), + )) } deno_core::extension!(deno_tsc, - ops = [op_build_info, op_is_node_file, op_load, op_script_version], + ops = [ + op_load, + ], + esm_entry_point = "ext:deno_tsc/99_main_compiler.js", + esm = [ + dir "tsc", + "97_ts_host.js", + "98_lsp.js", + "99_main_compiler.js", + ], js = [ dir "tsc", "00_typescript.js", - "99_main_compiler.js", ], options = { op_crate_libs: HashMap<&'static str, PathBuf>, @@ -272,6 +230,28 @@ mod ts { ) .unwrap(); + // Leak to satisfy type-checker. It's okay since it's only run once for a build script. + let build_libs_ = Box::leak(Box::new(build_libs.clone())); + let runtime_cb = Box::new(|rt: &mut deno_core::JsRuntimeForSnapshot| { + let scope = &mut rt.handle_scope(); + + let context = scope.get_current_context(); + let global = context.global(scope); + + let name = v8::String::new(scope, "snapshot").unwrap(); + let snapshot_fn_val = global.get(scope, name.into()).unwrap(); + let snapshot_fn: v8::Local = + snapshot_fn_val.try_into().unwrap(); + let undefined = v8::undefined(scope); + let build_libs = build_libs_.clone(); + let build_libs_v8 = + deno_core::serde_v8::to_v8(scope, build_libs).unwrap(); + + snapshot_fn + .call(scope, undefined.into(), &[build_libs_v8]) + .unwrap(); + }); + let output = create_snapshot( CreateSnapshotOptions { cargo_manifest_dir: env!("CARGO_MANIFEST_DIR"), @@ -282,7 +262,7 @@ mod ts { path_dts, )], extension_transpiler: None, - with_runtime_cb: None, + with_runtime_cb: Some(runtime_cb), skip_op_registration: false, }, None, @@ -310,57 +290,6 @@ mod ts { println!("cargo:rerun-if-changed={}", path.display()); } } - - pub(crate) fn version() -> String { - let file_text = std::fs::read_to_string("tsc/00_typescript.js").unwrap(); - let version_text = " version = \""; - for line in file_text.lines() { - if let Some(index) = line.find(version_text) { - let remaining_line = &line[index + version_text.len()..]; - return remaining_line[..remaining_line.find('"').unwrap()].to_string(); - } - } - panic!("Could not find ts version.") - } -} - -#[cfg(not(feature = "hmr"))] -fn create_cli_snapshot(snapshot_path: PathBuf) { - use deno_runtime::ops::bootstrap::SnapshotOptions; - - let snapshot_options = SnapshotOptions { - ts_version: ts::version(), - v8_version: deno_core::v8::VERSION_STRING, - target: std::env::var("TARGET").unwrap(), - }; - - deno_runtime::snapshot::create_runtime_snapshot( - snapshot_path, - snapshot_options, - vec![], - ); -} - -fn git_commit_hash() -> String { - if let Ok(output) = std::process::Command::new("git") - .arg("rev-list") - .arg("-1") - .arg("HEAD") - .output() - { - if output.status.success() { - std::str::from_utf8(&output.stdout[..40]) - .unwrap() - .to_string() - } else { - // When not in git repository - // (e.g. when the user install by `cargo install deno`) - "UNKNOWN".to_string() - } - } else { - // When there is no git command for some reason - "UNKNOWN".to_string() - } } fn main() { @@ -370,7 +299,7 @@ fn main() { } deno_napi::print_linker_flags("deno"); - deno_napi::print_linker_flags("denort"); + deno_webgpu::print_linker_flags("deno"); // Host snapshots won't work when cross compiling. let target = env::var("TARGET").unwrap(); @@ -389,51 +318,15 @@ fn main() { } println!("cargo:rerun-if-env-changed=DENO_CANARY"); - println!("cargo:rustc-env=GIT_COMMIT_HASH={}", git_commit_hash()); - println!("cargo:rerun-if-env-changed=GIT_COMMIT_HASH"); - println!( - "cargo:rustc-env=GIT_COMMIT_HASH_SHORT={}", - &git_commit_hash()[..7] - ); - - let ts_version = ts::version(); - debug_assert_eq!(ts_version, "5.6.2"); // bump this assertion when it changes - println!("cargo:rustc-env=TS_VERSION={}", ts_version); - println!("cargo:rerun-if-env-changed=TS_VERSION"); - println!("cargo:rustc-env=TARGET={}", env::var("TARGET").unwrap()); println!("cargo:rustc-env=PROFILE={}", env::var("PROFILE").unwrap()); - if cfg!(windows) { - // these dls load slowly, so delay loading them - let dlls = [ - // webgpu - "d3dcompiler_47", - "OPENGL32", - // network related functions - "iphlpapi", - ]; - for dll in dlls { - println!("cargo:rustc-link-arg-bin=deno=/delayload:{dll}.dll"); - println!("cargo:rustc-link-arg-bin=denort=/delayload:{dll}.dll"); - } - // enable delay loading - println!("cargo:rustc-link-arg-bin=deno=delayimp.lib"); - println!("cargo:rustc-link-arg-bin=denort=delayimp.lib"); - } - let c = PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()); let o = PathBuf::from(env::var_os("OUT_DIR").unwrap()); let compiler_snapshot_path = o.join("COMPILER_SNAPSHOT.bin"); ts::create_compiler_snapshot(compiler_snapshot_path, &c); - #[cfg(not(feature = "hmr"))] - { - let cli_snapshot_path = o.join("CLI_SNAPSHOT.bin"); - create_cli_snapshot(cli_snapshot_path); - } - #[cfg(target_os = "windows")] { let mut res = winres::WindowsResource::new(); diff --git a/cli/cache/cache_db.rs b/cli/cache/cache_db.rs index 7fd66e93338f11..63f24e9574a1fc 100644 --- a/cli/cache/cache_db.rs +++ b/cli/cache/cache_db.rs @@ -9,14 +9,13 @@ use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::parking_lot::MutexGuard; use deno_core::unsync::spawn_blocking; +use deno_lib::util::hash::FastInsecureHasher; use deno_runtime::deno_webstorage::rusqlite; use deno_runtime::deno_webstorage::rusqlite::Connection; use deno_runtime::deno_webstorage::rusqlite::OptionalExtension; use deno_runtime::deno_webstorage::rusqlite::Params; use once_cell::sync::OnceCell; -use super::FastInsecureHasher; - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct CacheDBHash(u64); @@ -233,7 +232,7 @@ impl CacheDB { config: &CacheDBConfiguration, conn: &Connection, version: &str, - ) -> Result<(), AnyError> { + ) -> Result<(), rusqlite::Error> { let sql = config.create_combined_sql(); conn.execute_batch(&sql)?; @@ -266,7 +265,7 @@ impl CacheDB { fn open_connection_and_init( &self, path: Option<&Path>, - ) -> Result { + ) -> Result { let conn = self.actually_open_connection(path)?; Self::initialize_connection(self.config, &conn, self.version)?; Ok(conn) @@ -369,7 +368,9 @@ impl CacheDB { fn open_connection( config: &CacheDBConfiguration, path: Option<&Path>, - open_connection_and_init: impl Fn(Option<&Path>) -> Result, + open_connection_and_init: impl Fn( + Option<&Path>, + ) -> Result, ) -> Result { // Success on first try? We hope that this is the case. let err = match open_connection_and_init(path) { @@ -380,9 +381,20 @@ fn open_connection( let Some(path) = path.as_ref() else { // If an in-memory DB fails, that's game over log::error!("Failed to initialize in-memory cache database."); - return Err(err); + return Err(err.into()); }; + // reduce logging for readonly file system + if let rusqlite::Error::SqliteFailure(ffi_err, _) = &err { + if ffi_err.code == rusqlite::ErrorCode::ReadOnly { + log::debug!( + "Failed creating cache db. Folder readonly: {}", + path.display() + ); + return handle_failure_mode(config, err, open_connection_and_init); + } + } + // ensure the parent directory exists if let Some(parent) = path.parent() { match std::fs::create_dir_all(parent) { @@ -411,10 +423,11 @@ fn open_connection( // Failed, try deleting it let is_tty = std::io::stderr().is_terminal(); log::log!( - if is_tty { log::Level::Warn } else { log::Level::Trace }, - "Could not initialize cache database '{}', deleting and retrying... ({err:?})", - path.to_string_lossy() - ); + if is_tty { log::Level::Warn } else { log::Level::Trace }, + "Could not initialize cache database '{}', deleting and retrying... ({err:?})", + path.to_string_lossy() + ); + if std::fs::remove_file(path).is_ok() { // Try a third time if we successfully deleted it let res = open_connection_and_init(Some(path)); @@ -423,6 +436,11 @@ fn open_connection( }; } + log_failure_mode(path, is_tty, config); + handle_failure_mode(config, err, open_connection_and_init) +} + +fn log_failure_mode(path: &Path, is_tty: bool, config: &CacheDBConfiguration) { match config.on_failure { CacheFailure::InMemory => { log::log!( @@ -432,9 +450,8 @@ fn open_connection( log::Level::Trace }, "Failed to open cache file '{}', opening in-memory cache.", - path.to_string_lossy() + path.display() ); - Ok(ConnectionState::Connected(open_connection_and_init(None)?)) } CacheFailure::Blackhole => { log::log!( @@ -444,23 +461,36 @@ fn open_connection( log::Level::Trace }, "Failed to open cache file '{}', performance may be degraded.", - path.to_string_lossy() + path.display() ); - Ok(ConnectionState::Blackhole) } CacheFailure::Error => { log::error!( "Failed to open cache file '{}', expect further errors.", - path.to_string_lossy() + path.display() ); - Err(err) } } } +fn handle_failure_mode( + config: &CacheDBConfiguration, + err: rusqlite::Error, + open_connection_and_init: impl Fn( + Option<&Path>, + ) -> Result, +) -> Result { + match config.on_failure { + CacheFailure::InMemory => { + Ok(ConnectionState::Connected(open_connection_and_init(None)?)) + } + CacheFailure::Blackhole => Ok(ConnectionState::Blackhole), + CacheFailure::Error => Err(err.into()), + } +} + #[cfg(test)] mod tests { - use deno_core::anyhow::anyhow; use test_util::TempDir; use super::*; @@ -521,7 +551,8 @@ mod tests { let path = temp_dir.path().join("data").to_path_buf(); let state = open_connection(&TEST_DB, Some(path.as_path()), |maybe_path| { match maybe_path { - Some(_) => Err(anyhow!("fail")), + // this error was chosen because it was an error easy to construct + Some(_) => Err(rusqlite::Error::SqliteSingleThreadedMode), None => Ok(Connection::open_in_memory().unwrap()), } }) diff --git a/cli/cache/caches.rs b/cli/cache/caches.rs index dd4a9748146487..fad61f1dc32699 100644 --- a/cli/cache/caches.rs +++ b/cli/cache/caches.rs @@ -3,7 +3,7 @@ use std::path::PathBuf; use std::sync::Arc; -use deno_lib::cache::DenoDirProvider; +use deno_lib::version::DENO_VERSION_INFO; use once_cell::sync::OnceCell; use super::cache_db::CacheDB; @@ -14,10 +14,10 @@ use super::fast_check::FAST_CHECK_CACHE_DB; use super::incremental::INCREMENTAL_CACHE_DB; use super::module_info::MODULE_INFO_CACHE_DB; use super::node::NODE_ANALYSIS_CACHE_DB; -use crate::sys::CliSys; +use crate::cache::DenoDirProvider; pub struct Caches { - dir_provider: Arc>, + dir_provider: Arc, fmt_incremental_cache_db: OnceCell, lint_incremental_cache_db: OnceCell, dep_analysis_db: OnceCell, @@ -28,7 +28,7 @@ pub struct Caches { } impl Caches { - pub fn new(dir: Arc>) -> Self { + pub fn new(dir: Arc) -> Self { Self { dir_provider: dir, fmt_incremental_cache_db: Default::default(), @@ -49,13 +49,9 @@ impl Caches { cell .get_or_init(|| { if let Some(path) = path { - CacheDB::from_path( - config, - path, - crate::version::DENO_VERSION_INFO.deno, - ) + CacheDB::from_path(config, path, DENO_VERSION_INFO.deno) } else { - CacheDB::in_memory(config, crate::version::DENO_VERSION_INFO.deno) + CacheDB::in_memory(config, DENO_VERSION_INFO.deno) } }) .clone() diff --git a/cli/cache/code_cache.rs b/cli/cache/code_cache.rs index 27ec544b5f6375..d938732635f569 100644 --- a/cli/cache/code_cache.rs +++ b/cli/cache/code_cache.rs @@ -1,7 +1,5 @@ // Copyright 2018-2025 the Deno authors. MIT license. -use std::sync::Arc; - use deno_ast::ModuleSpecifier; use deno_core::error::AnyError; use deno_runtime::code_cache; @@ -11,7 +9,6 @@ use super::cache_db::CacheDB; use super::cache_db::CacheDBConfiguration; use super::cache_db::CacheDBHash; use super::cache_db::CacheFailure; -use crate::worker::CliCodeCache; pub static CODE_CACHE_DB: CacheDBConfiguration = CacheDBConfiguration { table_initializer: concat!( @@ -85,12 +82,6 @@ impl CodeCache { } } -impl CliCodeCache for CodeCache { - fn as_code_cache(self: Arc) -> Arc { - self - } -} - impl code_cache::CodeCache for CodeCache { fn get_sync( &self, diff --git a/cli/lib/cache/deno_dir.rs b/cli/cache/deno_dir.rs similarity index 72% rename from cli/lib/cache/deno_dir.rs rename to cli/cache/deno_dir.rs index 00bc83ff9b8705..a5dd96ea7d4868 100644 --- a/cli/lib/cache/deno_dir.rs +++ b/cli/cache/deno_dir.rs @@ -2,79 +2,61 @@ use std::env; use std::path::PathBuf; +use std::sync::Arc; use deno_cache_dir::DenoDirResolutionError; use super::DiskCache; -use crate::sys::DenoLibSys; +use crate::factory::CliDenoDirPathProvider; +use crate::sys::CliSys; /// Lazily creates the deno dir which might be useful in scenarios /// where functionality wants to continue if the DENO_DIR can't be created. -pub struct DenoDirProvider { - sys: TSys, - maybe_custom_root: Option, - deno_dir: std::sync::OnceLock, DenoDirResolutionError>>, +pub struct DenoDirProvider { + deno_dir_path_provider: Arc, + sys: CliSys, + deno_dir: once_cell::sync::OnceCell, } -impl DenoDirProvider { - pub fn new(sys: TSys, maybe_custom_root: Option) -> Self { +impl DenoDirProvider { + pub fn new( + sys: CliSys, + deno_dir_path_provider: Arc, + ) -> Self { Self { sys, - maybe_custom_root, + deno_dir_path_provider, deno_dir: Default::default(), } } - pub fn get_or_create( - &self, - ) -> Result<&DenoDir, DenoDirResolutionError> { - self - .deno_dir - .get_or_init(|| { - DenoDir::new(self.sys.clone(), self.maybe_custom_root.clone()) - }) - .as_ref() - .map_err(|err| match err { - DenoDirResolutionError::NoCacheOrHomeDir => { - DenoDirResolutionError::NoCacheOrHomeDir - } - DenoDirResolutionError::FailedCwd { source } => { - DenoDirResolutionError::FailedCwd { - source: std::io::Error::new(source.kind(), source.to_string()), - } - } - }) + pub fn get_or_create(&self) -> Result<&DenoDir, DenoDirResolutionError> { + self.deno_dir.get_or_try_init(|| { + let path = self.deno_dir_path_provider.get_or_create()?; + Ok(DenoDir::new(self.sys.clone(), path.clone())) + }) } } /// `DenoDir` serves as coordinator for multiple `DiskCache`s containing them /// in single directory that can be controlled with `$DENO_DIR` env variable. #[derive(Debug, Clone)] -pub struct DenoDir { +pub struct DenoDir { /// Example: /Users/rld/.deno/ pub root: PathBuf, /// Used by TsCompiler to cache compiler output. - pub gen_cache: DiskCache, + pub gen_cache: DiskCache, } -impl DenoDir { - pub fn new( - sys: TSys, - maybe_custom_root: Option, - ) -> Result { - let root = deno_cache_dir::resolve_deno_dir( - &sys_traits::impls::RealSys, - maybe_custom_root, - )?; +impl DenoDir { + pub fn new(sys: CliSys, root: PathBuf) -> Self { assert!(root.is_absolute()); let gen_path = root.join("gen"); - let deno_dir = Self { + Self { root, - gen_cache: DiskCache::new(sys, &gen_path), - }; - - Ok(deno_dir) + gen_cache: DiskCache::new(sys, gen_path), + } } /// The root directory of the DENO_DIR for display purposes only. diff --git a/cli/lib/cache/disk_cache.rs b/cli/cache/disk_cache.rs similarity index 93% rename from cli/lib/cache/disk_cache.rs rename to cli/cache/disk_cache.rs index 2c735a34b27e9f..a085ef523597db 100644 --- a/cli/lib/cache/disk_cache.rs +++ b/cli/cache/disk_cache.rs @@ -1,7 +1,6 @@ // Copyright 2018-2025 the Deno authors. MIT license. use std::ffi::OsStr; -use std::fs; use std::path::Component; use std::path::Path; use std::path::PathBuf; @@ -10,26 +9,24 @@ use std::str; use deno_cache_dir::url_to_filename; use deno_cache_dir::CACHE_PERM; +use deno_core::url::Host; +use deno_core::url::Url; use deno_path_util::fs::atomic_write_file_with_retries; -use url::Host; -use url::Url; +use sys_traits::FsRead; -use crate::sys::DenoLibSys; +use crate::sys::CliSys; #[derive(Debug, Clone)] -pub struct DiskCache { - sys: TSys, +pub struct DiskCache { + sys: CliSys, pub location: PathBuf, } -impl DiskCache { +impl DiskCache { /// `location` must be an absolute path. - pub fn new(sys: TSys, location: &Path) -> Self { + pub fn new(sys: CliSys, location: PathBuf) -> Self { assert!(location.is_absolute()); - Self { - sys, - location: location.to_owned(), - } + Self { sys, location } } fn get_cache_filename(&self, url: &Url) -> Option { @@ -119,7 +116,7 @@ impl DiskCache { pub fn get(&self, filename: &Path) -> std::io::Result> { let path = self.location.join(filename); - fs::read(path) + Ok(self.sys.fs_read(path)?.into_owned()) } pub fn set(&self, filename: &Path, data: &[u8]) -> std::io::Result<()> { @@ -141,7 +138,7 @@ mod tests { fn test_set_get_cache_file() { let temp_dir = TempDir::new(); let sub_dir = temp_dir.path().join("sub_dir"); - let cache = DiskCache::new(RealSys, &sub_dir.to_path_buf()); + let cache = DiskCache::new(RealSys, sub_dir.to_path_buf()); let path = PathBuf::from("foo/bar.txt"); cache.set(&path, b"hello").unwrap(); assert_eq!(cache.get(&path).unwrap(), b"hello"); @@ -155,7 +152,7 @@ mod tests { PathBuf::from("/deno_dir/") }; - let cache = DiskCache::new(RealSys, &cache_location); + let cache = DiskCache::new(RealSys, cache_location); let mut test_cases = vec![ ( @@ -211,7 +208,7 @@ mod tests { } else { "/foo" }; - let cache = DiskCache::new(RealSys, &PathBuf::from(p)); + let cache = DiskCache::new(RealSys, PathBuf::from(p)); let mut test_cases = vec![ ( @@ -259,7 +256,7 @@ mod tests { PathBuf::from("/deno_dir/") }; - let cache = DiskCache::new(RealSys, &cache_location); + let cache = DiskCache::new(RealSys, cache_location); let mut test_cases = vec!["unknown://localhost/test.ts"]; diff --git a/cli/cache/emit.rs b/cli/cache/emit.rs index e8a940b3bec02e..8f42dfb3122e2d 100644 --- a/cli/cache/emit.rs +++ b/cli/cache/emit.rs @@ -6,25 +6,25 @@ use deno_ast::ModuleSpecifier; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; use deno_core::unsync::sync::AtomicFlag; -use deno_lib::cache::DiskCache; +use deno_lib::version::DENO_VERSION_INFO; -use crate::sys::CliSys; +use super::DiskCache; /// The cache that stores previously emitted files. #[derive(Debug)] pub struct EmitCache { - disk_cache: DiskCache, + disk_cache: DiskCache, emit_failed_flag: AtomicFlag, file_serializer: EmitFileSerializer, } impl EmitCache { - pub fn new(disk_cache: DiskCache) -> Self { + pub fn new(disk_cache: DiskCache) -> Self { Self { disk_cache, emit_failed_flag: Default::default(), file_serializer: EmitFileSerializer { - cli_version: crate::version::DENO_VERSION_INFO.deno, + cli_version: DENO_VERSION_INFO.deno, }, } } @@ -148,7 +148,7 @@ impl EmitFileSerializer { // it's ok to use an insecure hash here because // if someone can change the emit source then they // can also change the version hash - crate::cache::FastInsecureHasher::new_without_deno_version() // use cli_version property instead + deno_lib::util::hash::FastInsecureHasher::new_without_deno_version() // use cli_version property instead .write(bytes) // emit should not be re-used between cli versions .write_str(self.cli_version) @@ -167,7 +167,7 @@ mod test { pub fn emit_cache_general_use() { let temp_dir = TempDir::new(); let disk_cache = - DiskCache::new(CliSys::default(), temp_dir.path().as_path()); + DiskCache::new(CliSys::default(), temp_dir.path().to_path_buf()); let cache = EmitCache { disk_cache: disk_cache.clone(), file_serializer: EmitFileSerializer { diff --git a/cli/cache/mod.rs b/cli/cache/mod.rs index e16f95e56f1ec9..80e9ab40ae16de 100644 --- a/cli/cache/mod.rs +++ b/cli/cache/mod.rs @@ -30,7 +30,8 @@ mod cache_db; mod caches; mod check; mod code_cache; -mod common; +mod deno_dir; +mod disk_cache; mod emit; mod fast_check; mod incremental; @@ -42,9 +43,11 @@ pub use cache_db::CacheDBHash; pub use caches::Caches; pub use check::TypeCheckCache; pub use code_cache::CodeCache; -pub use common::FastInsecureHasher; /// Permissions used to save a file in the disk caches. pub use deno_cache_dir::CACHE_PERM; +pub use deno_dir::DenoDir; +pub use deno_dir::DenoDirProvider; +pub use disk_cache::DiskCache; pub use emit::EmitCache; pub use fast_check::FastCheckCache; pub use incremental::IncrementalCache; @@ -54,7 +57,6 @@ pub use parsed_source::LazyGraphSourceParser; pub use parsed_source::ParsedSourceCache; pub type GlobalHttpCache = deno_cache_dir::GlobalHttpCache; -pub type LocalHttpCache = deno_cache_dir::LocalHttpCache; pub type LocalLspHttpCache = deno_cache_dir::LocalLspHttpCache; pub use deno_cache_dir::HttpCache; use deno_error::JsErrorBox; @@ -117,11 +119,7 @@ impl FetchCacher { } else if specifier.scheme() == "file" { specifier.to_file_path().ok() } else { - #[allow(deprecated)] - self - .global_http_cache - .get_global_cache_filepath(specifier) - .ok() + self.global_http_cache.local_path_for_url(specifier).ok() } } } diff --git a/cli/emit.rs b/cli/emit.rs index 69ac8323bbb90b..2e5ca33939ff7b 100644 --- a/cli/emit.rs +++ b/cli/emit.rs @@ -20,9 +20,11 @@ use deno_error::JsErrorBox; use deno_graph::MediaType; use deno_graph::Module; use deno_graph::ModuleGraph; +use deno_lib::util::hash::FastInsecureHasher; +use crate::args::deno_json::TranspileAndEmitOptions; +use crate::args::deno_json::TsConfigResolver; use crate::cache::EmitCache; -use crate::cache::FastInsecureHasher; use crate::cache::ParsedSourceCache; use crate::resolver::CliCjsTracker; @@ -31,10 +33,7 @@ pub struct Emitter { cjs_tracker: Arc, emit_cache: Arc, parsed_source_cache: Arc, - transpile_and_emit_options: - Arc<(deno_ast::TranspileOptions, deno_ast::EmitOptions)>, - // cached hash of the transpile and emit options - transpile_and_emit_options_hash: u64, + tsconfig_resolver: Arc, } impl Emitter { @@ -42,21 +41,13 @@ impl Emitter { cjs_tracker: Arc, emit_cache: Arc, parsed_source_cache: Arc, - transpile_options: deno_ast::TranspileOptions, - emit_options: deno_ast::EmitOptions, + tsconfig_resolver: Arc, ) -> Self { - let transpile_and_emit_options_hash = { - let mut hasher = FastInsecureHasher::new_without_deno_version(); - hasher.write_hashable(&transpile_options); - hasher.write_hashable(&emit_options); - hasher.finish() - }; Self { cjs_tracker, emit_cache, parsed_source_cache, - transpile_and_emit_options: Arc::new((transpile_options, emit_options)), - transpile_and_emit_options_hash, + tsconfig_resolver, } } @@ -103,26 +94,37 @@ impl Emitter { specifier: &ModuleSpecifier, module_kind: deno_ast::ModuleKind, source: &str, - ) -> Option { - let source_hash = self.get_source_hash(module_kind, source); - self.emit_cache.get_emit_code(specifier, source_hash) + ) -> Result, AnyError> { + let transpile_and_emit_options = self + .tsconfig_resolver + .transpile_and_emit_options(specifier)?; + let source_hash = + self.get_source_hash(module_kind, transpile_and_emit_options, source); + Ok(self.emit_cache.get_emit_code(specifier, source_hash)) } pub async fn emit_parsed_source( &self, specifier: &ModuleSpecifier, media_type: MediaType, - module_kind: deno_ast::ModuleKind, + module_kind: ModuleKind, source: &Arc, - ) -> Result { + ) -> Result { + let transpile_and_emit_options = self + .tsconfig_resolver + .transpile_and_emit_options(specifier)?; // Note: keep this in sync with the sync version below let helper = EmitParsedSourceHelper(self); - match helper.pre_emit_parsed_source(specifier, module_kind, source) { + match helper.pre_emit_parsed_source( + specifier, + module_kind, + transpile_and_emit_options, + source, + ) { PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::NotCached { source_hash } => { let parsed_source_cache = self.parsed_source_cache.clone(); - let transpile_and_emit_options = - self.transpile_and_emit_options.clone(); + let transpile_and_emit_options = transpile_and_emit_options.clone(); let transpiled_source = deno_core::unsync::spawn_blocking({ let specifier = specifier.clone(); let source = source.clone(); @@ -133,8 +135,8 @@ impl Emitter { media_type, module_kind, source.clone(), - &transpile_and_emit_options.0, - &transpile_and_emit_options.1, + &transpile_and_emit_options.transpile, + &transpile_and_emit_options.emit, ) .map(|r| r.text) } @@ -158,9 +160,17 @@ impl Emitter { module_kind: deno_ast::ModuleKind, source: &Arc, ) -> Result { + let transpile_and_emit_options = self + .tsconfig_resolver + .transpile_and_emit_options(specifier)?; // Note: keep this in sync with the async version above let helper = EmitParsedSourceHelper(self); - match helper.pre_emit_parsed_source(specifier, module_kind, source) { + match helper.pre_emit_parsed_source( + specifier, + module_kind, + transpile_and_emit_options, + source, + ) { PreEmitResult::Cached(emitted_text) => Ok(emitted_text), PreEmitResult::NotCached { source_hash } => { let transpiled_source = EmitParsedSourceHelper::transpile( @@ -169,8 +179,8 @@ impl Emitter { media_type, module_kind, source.clone(), - &self.transpile_and_emit_options.0, - &self.transpile_and_emit_options.1, + &transpile_and_emit_options.transpile, + &transpile_and_emit_options.emit, )? .text; helper.post_emit_parsed_source( @@ -190,7 +200,10 @@ impl Emitter { module_kind: deno_ast::ModuleKind, source: &Arc, ) -> Result<(String, String), AnyError> { - let mut emit_options = self.transpile_and_emit_options.1.clone(); + let transpile_and_emit_options = self + .tsconfig_resolver + .transpile_and_emit_options(specifier)?; + let mut emit_options = transpile_and_emit_options.emit.clone(); emit_options.inline_sources = false; emit_options.source_map = SourceMapOption::Separate; // strip off the path to have more deterministic builds as we don't care @@ -202,7 +215,7 @@ impl Emitter { media_type, module_kind, source.clone(), - &self.transpile_and_emit_options.0, + &transpile_and_emit_options.transpile, &emit_options, )?; Ok((source.text, source.source_map.unwrap())) @@ -232,7 +245,11 @@ impl Emitter { // HMR doesn't work with embedded source maps for some reason, so set // the option to not use them (though you should test this out because // this statement is probably wrong) - let mut options = self.transpile_and_emit_options.1.clone(); + let transpile_and_emit_options = self + .tsconfig_resolver + .transpile_and_emit_options(specifier) + .map_err(JsErrorBox::from_err)?; + let mut options = transpile_and_emit_options.emit.clone(); options.source_map = SourceMapOption::None; let is_cjs = self .cjs_tracker @@ -244,7 +261,7 @@ impl Emitter { .map_err(JsErrorBox::from_err)?; let transpiled_source = parsed_source .transpile( - &self.transpile_and_emit_options.0, + &transpile_and_emit_options.transpile, &deno_ast::TranspileModuleOptions { module_kind: Some(ModuleKind::from_is_cjs(is_cjs)), }, @@ -275,10 +292,15 @@ impl Emitter { /// A hashing function that takes the source code and uses the global emit /// options then generates a string hash which can be stored to /// determine if the cached emit is valid or not. - fn get_source_hash(&self, module_kind: ModuleKind, source_text: &str) -> u64 { + fn get_source_hash( + &self, + module_kind: ModuleKind, + transpile_and_emit: &TranspileAndEmitOptions, + source_text: &str, + ) -> u64 { FastInsecureHasher::new_without_deno_version() // stored in the transpile_and_emit_options_hash .write_str(source_text) - .write_u64(self.transpile_and_emit_options_hash) + .write_u64(transpile_and_emit.pre_computed_hash) .write_hashable(module_kind) .finish() } @@ -291,6 +313,11 @@ enum PreEmitResult { #[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum EmitParsedSourceHelperError { + #[class(inherit)] + #[error(transparent)] + CompilerOptionsParse( + #[from] deno_config::deno_json::CompilerOptionsParseError, + ), #[class(inherit)] #[error(transparent)] ParseDiagnostic(#[from] deno_ast::ParseDiagnostic), @@ -310,9 +337,13 @@ impl<'a> EmitParsedSourceHelper<'a> { &self, specifier: &ModuleSpecifier, module_kind: deno_ast::ModuleKind, + transpile_and_emit_options: &TranspileAndEmitOptions, source: &Arc, ) -> PreEmitResult { - let source_hash = self.0.get_source_hash(module_kind, source); + let source_hash = + self + .0 + .get_source_hash(module_kind, transpile_and_emit_options, source); if let Some(emit_code) = self.0.emit_cache.get_emit_code(specifier, source_hash) diff --git a/cli/factory.rs b/cli/factory.rs index bfe6d055703083..e41fa1b73e91b4 100644 --- a/cli/factory.rs +++ b/cli/factory.rs @@ -1,35 +1,45 @@ // Copyright 2018-2025 the Deno authors. MIT license. +use std::borrow::Cow; use std::future::Future; +use std::path::Path; use std::path::PathBuf; use std::sync::Arc; use deno_cache_dir::npm::NpmCacheDir; -use deno_config::workspace::PackageJsonDepResolution; +use deno_config::workspace::Workspace; +use deno_config::workspace::WorkspaceDirectory; use deno_config::workspace::WorkspaceResolver; +use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::futures::FutureExt; +use deno_core::serde_json; +use deno_core::url::Url; use deno_core::FeatureChecker; use deno_error::JsErrorBox; -use deno_lib::cache::DenoDir; -use deno_lib::cache::DenoDirProvider; +use deno_lib::args::get_root_cert_store; +use deno_lib::args::resolve_npm_resolution_snapshot; +use deno_lib::args::CaData; +use deno_lib::args::NpmProcessStateKind; +use deno_lib::args::NPM_PROCESS_STATE; +use deno_lib::loader::NpmModuleLoader; +use deno_lib::npm::create_npm_process_state_provider; use deno_lib::npm::NpmRegistryReadPermissionChecker; use deno_lib::npm::NpmRegistryReadPermissionCheckerMode; use deno_lib::worker::LibMainWorkerFactory; use deno_lib::worker::LibMainWorkerOptions; +use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm_cache::NpmCacheSetting; use deno_resolver::cjs::IsCjsResolutionMode; -use deno_resolver::npm::managed::ManagedInNpmPkgCheckerCreateOptions; +use deno_resolver::factory::ConfigDiscoveryOption; +use deno_resolver::factory::DenoDirPathProviderOptions; +use deno_resolver::factory::NpmProcessStateOptions; +use deno_resolver::factory::ResolverFactoryOptions; +use deno_resolver::factory::SpecifiedImportMapProvider; use deno_resolver::npm::managed::NpmResolutionCell; -use deno_resolver::npm::CreateInNpmPkgCheckerOptions; use deno_resolver::npm::DenoInNpmPackageChecker; -use deno_resolver::npm::NpmReqResolverOptions; -use deno_resolver::sloppy_imports::SloppyImportsCachedFs; -use deno_resolver::DenoResolverOptions; -use deno_resolver::NodeAndNpmReqResolver; use deno_runtime::deno_fs; use deno_runtime::deno_fs::RealFs; -use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use deno_runtime::deno_permissions::Permissions; use deno_runtime::deno_permissions::PermissionsContainer; use deno_runtime::deno_tls::rustls::RootCertStore; @@ -37,29 +47,30 @@ use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_web::BlobStore; use deno_runtime::inspector_server::InspectorServer; use deno_runtime::permissions::RuntimePermissionDescriptorParser; -use log::warn; use node_resolver::analyze::NodeCodeTranslator; use once_cell::sync::OnceCell; +use sys_traits::EnvCurrentDir; -use crate::args::check_warn_tsconfig; -use crate::args::get_root_cert_store; -use crate::args::CaData; +use crate::args::deno_json::TsConfigResolver; use crate::args::CliOptions; +use crate::args::ConfigFlag; use crate::args::DenoSubcommand; use crate::args::Flags; use crate::args::NpmInstallDepsProvider; -use crate::args::TsConfigType; +use crate::args::WorkspaceExternalImportMapLoader; use crate::cache::Caches; use crate::cache::CodeCache; +use crate::cache::DenoDir; +use crate::cache::DenoDirProvider; use crate::cache::EmitCache; use crate::cache::GlobalHttpCache; use crate::cache::HttpCache; -use crate::cache::LocalHttpCache; use crate::cache::ModuleInfoCache; use crate::cache::NodeAnalysisCache; use crate::cache::ParsedSourceCache; use crate::emit::Emitter; use crate::file_fetcher::CliFileFetcher; +use crate::file_fetcher::TextDecodedFile; use crate::graph_container::MainModuleGraphContainer; use crate::graph_util::FileWatcherReporter; use crate::graph_util::ModuleGraphBuilder; @@ -71,16 +82,12 @@ use crate::node::CliCjsCodeAnalyzer; use crate::node::CliNodeCodeTranslator; use crate::node::CliNodeResolver; use crate::node::CliPackageJsonResolver; -use crate::npm::create_npm_process_state_provider; use crate::npm::installer::NpmInstaller; use crate::npm::installer::NpmResolutionInstaller; -use crate::npm::CliByonmNpmResolverCreateOptions; -use crate::npm::CliManagedNpmResolverCreateOptions; use crate::npm::CliNpmCache; use crate::npm::CliNpmCacheHttpClient; use crate::npm::CliNpmRegistryInfoProvider; use crate::npm::CliNpmResolver; -use crate::npm::CliNpmResolverCreateOptions; use crate::npm::CliNpmResolverManagedSnapshotOption; use crate::npm::CliNpmTarballCache; use crate::npm::NpmResolutionInitializer; @@ -91,7 +98,6 @@ use crate::resolver::CliNpmReqResolver; use crate::resolver::CliResolver; use crate::resolver::CliSloppyImportsResolver; use crate::resolver::FoundPackageJsonDepFlag; -use crate::resolver::NpmModuleLoader; use crate::standalone::binary::DenoCompileBinaryWriter; use crate::sys::CliSys; use crate::tools::check::TypeChecker; @@ -100,7 +106,6 @@ use crate::tools::lint::LintRuleProvider; use crate::tools::run::hmr::HmrRunner; use crate::tsc::TypeCheckingCjsTracker; use crate::util::file_watcher::WatcherCommunicator; -use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::worker::CliMainWorkerFactory; @@ -143,6 +148,74 @@ impl RootCertStoreProvider for CliRootCertStoreProvider { } } +#[derive(Debug)] +struct CliSpecifiedImportMapProvider { + cli_options: Arc, + file_fetcher: Arc, + workspace_external_import_map_loader: Arc, +} + +#[async_trait::async_trait(?Send)] +impl SpecifiedImportMapProvider for CliSpecifiedImportMapProvider { + async fn get( + &self, + ) -> Result, AnyError> { + async fn resolve_import_map_value_from_specifier( + specifier: &Url, + file_fetcher: &CliFileFetcher, + ) -> Result { + if specifier.scheme() == "data" { + let data_url_text = + deno_media_type::data_url::RawDataUrl::parse(specifier)?.decode()?; + Ok(serde_json::from_str(&data_url_text)?) + } else { + let file = TextDecodedFile::decode( + file_fetcher.fetch_bypass_permissions(specifier).await?, + )?; + Ok(serde_json::from_str(&file.source)?) + } + } + + let maybe_import_map_specifier = + self.cli_options.resolve_specified_import_map_specifier()?; + match maybe_import_map_specifier { + Some(specifier) => { + let value = resolve_import_map_value_from_specifier( + &specifier, + &self.file_fetcher, + ) + .await + .with_context(|| { + format!("Unable to load '{}' import map", specifier) + })?; + Ok(Some(deno_config::workspace::SpecifiedImportMap { + base_url: specifier, + value, + })) + } + None => { + if let Some(import_map) = + self.workspace_external_import_map_loader.get_or_load()? + { + let path_url = deno_path_util::url_from_file_path(&import_map.path)?; + Ok(Some(deno_config::workspace::SpecifiedImportMap { + base_url: path_url, + value: import_map.value.clone(), + })) + } else { + Ok(None) + } + } + } + } +} + +pub type CliWorkspaceFactory = deno_resolver::factory::WorkspaceFactory; +pub type CliDenoDirPathProvider = + deno_resolver::factory::DenoDirPathProvider; + +pub type CliResolverFactory = deno_resolver::factory::ResolverFactory; + pub struct Deferred(once_cell::unsync::OnceCell); impl Default for Deferred { @@ -152,10 +225,6 @@ impl Default for Deferred { } impl Deferred { - pub fn from_value(value: T) -> Self { - Self(once_cell::unsync::OnceCell::from(value)) - } - #[inline(always)] pub fn get_or_try_init( &self, @@ -194,17 +263,15 @@ struct CliFactoryServices { cjs_tracker: Deferred>, cli_options: Deferred>, code_cache: Deferred>, - deno_resolver: Deferred>, + deno_dir_path_provider: Deferred>, + deno_dir_provider: Deferred>, emit_cache: Deferred>, emitter: Deferred>, feature_checker: Deferred>, file_fetcher: Deferred>, found_pkg_json_dep_flag: Arc, fs: Deferred>, - global_http_cache: Deferred>, - http_cache: Deferred>, http_client_provider: Deferred>, - in_npm_pkg_checker: Deferred, main_graph_container: Deferred>, maybe_file_watcher_reporter: Deferred>, maybe_inspector_server: Deferred>>, @@ -213,36 +280,40 @@ struct CliFactoryServices { module_info_cache: Deferred>, module_load_preparer: Deferred>, node_code_translator: Deferred>, - node_resolver: Deferred>, npm_cache: Deferred>, - npm_cache_dir: Deferred>, npm_cache_http_client: Deferred>, npm_graph_resolver: Deferred>, npm_installer: Deferred>, npm_registry_info_provider: Deferred>, - npm_req_resolver: Deferred>, - npm_resolution: Arc, npm_resolution_initializer: Deferred>, npm_resolution_installer: Deferred>, - npm_resolver: Deferred, npm_tarball_cache: Deferred>, parsed_source_cache: Deferred>, permission_desc_parser: Deferred>>, - pkg_json_resolver: Deferred>, resolver: Deferred>, + resolver_factory: Deferred>, root_cert_store_provider: Deferred>, root_permissions_container: Deferred, - sloppy_imports_resolver: Deferred>>, text_only_progress_bar: Deferred, + tsconfig_resolver: Deferred>, type_checker: Deferred>, - workspace_resolver: Deferred>, + workspace_factory: Deferred>, + workspace_external_import_map_loader: + Deferred>, +} + +#[derive(Debug, Default)] +struct CliFactoryOverrides { + initial_cwd: Option, + workspace_directory: Option>, } pub struct CliFactory { watcher_communicator: Option>, flags: Arc, services: CliFactoryServices, + overrides: CliFactoryOverrides, } impl CliFactory { @@ -251,18 +322,7 @@ impl CliFactory { flags, watcher_communicator: None, services: Default::default(), - } - } - - pub fn from_cli_options(cli_options: Arc) -> Self { - let (cli_options, flags) = cli_options.into_self_and_flags(); - CliFactory { - watcher_communicator: None, - flags, - services: CliFactoryServices { - cli_options: Deferred::from_value(cli_options), - ..Default::default() - }, + overrides: Default::default(), } } @@ -274,29 +334,63 @@ impl CliFactory { watcher_communicator: Some(watcher_communicator), flags, services: Default::default(), + overrides: Default::default(), } } + pub fn set_initial_cwd(&mut self, initial_cwd: PathBuf) { + self.overrides.initial_cwd = Some(initial_cwd); + } + + pub fn set_workspace_dir(&mut self, dir: Arc) { + self.overrides.workspace_directory = Some(dir); + } + pub fn cli_options(&self) -> Result<&Arc, AnyError> { self.services.cli_options.get_or_try_init(|| { - CliOptions::from_flags(&self.sys(), self.flags.clone()).map(Arc::new) + let workspace_factory = self.workspace_factory()?; + let workspace_directory = workspace_factory.workspace_directory()?; + let maybe_external_import_map = + self.workspace_external_import_map_loader()?.get_or_load()?; + CliOptions::from_flags( + &self.sys(), + self.flags.clone(), + workspace_factory.initial_cwd().clone(), + maybe_external_import_map, + workspace_directory.clone(), + ) + .map(Arc::new) }) } - pub fn deno_dir_provider( - &self, - ) -> Result<&Arc>, AnyError> { - Ok(&self.cli_options()?.deno_dir_provider) + pub fn deno_dir_path_provider(&self) -> &Arc { + self.services.deno_dir_path_provider.get_or_init(|| { + Arc::new(CliDenoDirPathProvider::new( + self.sys(), + DenoDirPathProviderOptions { + maybe_custom_root: self.flags.internal.cache_path.clone(), + }, + )) + }) + } + + pub fn deno_dir_provider(&self) -> &Arc { + self.services.deno_dir_provider.get_or_init(|| { + Arc::new(DenoDirProvider::new( + self.sys(), + self.deno_dir_path_provider().clone(), + )) + }) } - pub fn deno_dir(&self) -> Result<&DenoDir, AnyError> { - Ok(self.deno_dir_provider()?.get_or_create()?) + pub fn deno_dir(&self) -> Result<&DenoDir, AnyError> { + Ok(self.deno_dir_provider().get_or_create()?) } pub fn caches(&self) -> Result<&Arc, AnyError> { self.services.caches.get_or_try_init(|| { let cli_options = self.cli_options()?; - let caches = Arc::new(Caches::new(self.deno_dir_provider()?.clone())); + let caches = Arc::new(Caches::new(self.deno_dir_provider().clone())); // Warm up the caches we know we'll likely need based on the CLI mode match cli_options.sub_command() { DenoSubcommand::Run(_) @@ -342,29 +436,11 @@ impl CliFactory { } pub fn global_http_cache(&self) -> Result<&Arc, AnyError> { - self.services.global_http_cache.get_or_try_init(|| { - Ok(Arc::new(GlobalHttpCache::new( - self.sys(), - self.deno_dir()?.remote_folder_path(), - ))) - }) + Ok(self.workspace_factory()?.global_http_cache()?) } pub fn http_cache(&self) -> Result<&Arc, AnyError> { - self.services.http_cache.get_or_try_init(|| { - let global_cache = self.global_http_cache()?.clone(); - match self.cli_options()?.vendor_dir_path() { - Some(local_path) => { - let local_cache = LocalHttpCache::new( - local_path.clone(), - global_cache, - deno_cache_dir::GlobalToLocalCopy::Allow, - ); - Ok(Arc::new(local_cache)) - } - None => Ok(global_cache), - } - }) + Ok(self.workspace_factory()?.http_cache()?) } pub fn http_client_provider(&self) -> &Arc { @@ -403,22 +479,7 @@ impl CliFactory { pub fn in_npm_pkg_checker( &self, ) -> Result<&DenoInNpmPackageChecker, AnyError> { - self.services.in_npm_pkg_checker.get_or_try_init(|| { - let cli_options = self.cli_options()?; - let options = if cli_options.use_byonm() { - CreateInNpmPkgCheckerOptions::Byonm - } else { - CreateInNpmPkgCheckerOptions::Managed( - ManagedInNpmPkgCheckerCreateOptions { - root_cache_dir_url: self.npm_cache_dir()?.root_dir_url(), - maybe_node_modules_path: cli_options - .node_modules_dir_path() - .map(|p| p.as_path()), - }, - ) - }; - Ok(DenoInNpmPackageChecker::new(options)) - }) + self.resolver_factory()?.in_npm_package_checker() } pub fn npm_cache(&self) -> Result<&Arc, AnyError> { @@ -428,21 +489,13 @@ impl CliFactory { self.npm_cache_dir()?.clone(), self.sys(), NpmCacheSetting::from_cache_setting(&cli_options.cache_setting()), - cli_options.npmrc().clone(), + self.npmrc()?.clone(), ))) }) } pub fn npm_cache_dir(&self) -> Result<&Arc, AnyError> { - self.services.npm_cache_dir.get_or_try_init(|| { - let global_path = self.deno_dir()?.npm_folder_path(); - let cli_options = self.cli_options()?; - Ok(Arc::new(NpmCacheDir::new( - &self.sys(), - global_path, - cli_options.npmrc().get_all_known_registries_urls(), - ))) - }) + Ok(self.workspace_factory()?.npm_cache_dir()?) } pub fn npm_cache_http_client(&self) -> &Arc { @@ -471,8 +524,7 @@ impl CliFactory { pub fn npm_installer_if_managed( &self, ) -> Result>, AnyError> { - let options = self.cli_options()?; - if options.use_byonm() || options.no_npm() { + if self.resolver_factory()?.use_byonm()? || self.cli_options()?.no_npm() { Ok(None) } else { Ok(Some(self.npm_installer()?)) @@ -482,19 +534,22 @@ impl CliFactory { pub fn npm_installer(&self) -> Result<&Arc, AnyError> { self.services.npm_installer.get_or_try_init(|| { let cli_options = self.cli_options()?; + let workspace_factory = self.workspace_factory()?; Ok(Arc::new(NpmInstaller::new( self.npm_cache()?.clone(), Arc::new(NpmInstallDepsProvider::from_workspace( cli_options.workspace(), )), - self.npm_resolution().clone(), + self.npm_resolution()?.clone(), self.npm_resolution_initializer()?.clone(), self.npm_resolution_installer()?.clone(), self.text_only_progress_bar(), self.sys(), self.npm_tarball_cache()?.clone(), cli_options.maybe_lockfile().cloned(), - cli_options.node_modules_dir_path().cloned(), + workspace_factory + .node_modules_dir_path()? + .map(|p| p.to_path_buf()), cli_options.lifecycle_scripts_config(), cli_options.npm_system_info(), ))) @@ -508,17 +563,16 @@ impl CliFactory { .services .npm_registry_info_provider .get_or_try_init(|| { - let cli_options = self.cli_options()?; Ok(Arc::new(CliNpmRegistryInfoProvider::new( self.npm_cache()?.clone(), self.npm_cache_http_client().clone(), - cli_options.npmrc().clone(), + self.npmrc()?.clone(), ))) }) } - pub fn npm_resolution(&self) -> &Arc { - &self.services.npm_resolution + pub fn npm_resolution(&self) -> Result<&Arc, AnyError> { + Ok(self.resolver_factory()?.npm_resolution()) } pub fn npm_resolution_initializer( @@ -531,8 +585,8 @@ impl CliFactory { let cli_options = self.cli_options()?; Ok(Arc::new(NpmResolutionInitializer::new( self.npm_registry_info_provider()?.clone(), - self.npm_resolution().clone(), - match cli_options.resolve_npm_resolution_snapshot()? { + self.npm_resolution()?.clone(), + match resolve_npm_resolution_snapshot()? { Some(snapshot) => { CliNpmResolverManagedSnapshotOption::Specified(Some(snapshot)) } @@ -556,71 +610,62 @@ impl CliFactory { let cli_options = self.cli_options()?; Ok(Arc::new(NpmResolutionInstaller::new( self.npm_registry_info_provider()?.clone(), - self.npm_resolution().clone(), + self.npm_resolution()?.clone(), cli_options.maybe_lockfile().cloned(), ))) }) } pub async fn npm_resolver(&self) -> Result<&CliNpmResolver, AnyError> { - self - .services - .npm_resolver - .get_or_try_init_async( - async { - let cli_options = self.cli_options()?; - Ok(CliNpmResolver::new(if cli_options.use_byonm() { - CliNpmResolverCreateOptions::Byonm( - CliByonmNpmResolverCreateOptions { - sys: self.sys(), - pkg_json_resolver: self.pkg_json_resolver().clone(), - root_node_modules_dir: Some( - match cli_options.node_modules_dir_path() { - Some(node_modules_path) => node_modules_path.to_path_buf(), - // path needs to be canonicalized for node resolution - // (node_modules_dir_path above is already canonicalized) - None => canonicalize_path_maybe_not_exists( - cli_options.initial_cwd(), - )? - .join("node_modules"), - }, - ), - }, - ) - } else { - self - .npm_resolution_initializer()? - .ensure_initialized() - .await?; - CliNpmResolverCreateOptions::Managed( - CliManagedNpmResolverCreateOptions { - sys: self.sys(), - npm_resolution: self.npm_resolution().clone(), - npm_cache_dir: self.npm_cache_dir()?.clone(), - maybe_node_modules_path: cli_options - .node_modules_dir_path() - .cloned(), - npm_system_info: cli_options.npm_system_info(), - npmrc: cli_options.npmrc().clone(), - }, - ) - })) - } - .boxed_local(), - ) - .await + self.initialize_npm_resolution_if_managed().await?; + self.resolver_factory()?.npm_resolver() } pub fn npm_tarball_cache( &self, ) -> Result<&Arc, AnyError> { self.services.npm_tarball_cache.get_or_try_init(|| { - let cli_options = self.cli_options()?; Ok(Arc::new(CliNpmTarballCache::new( self.npm_cache()?.clone(), self.npm_cache_http_client().clone(), self.sys(), - cli_options.npmrc().clone(), + self.npmrc()?.clone(), + ))) + }) + } + + pub fn npmrc(&self) -> Result<&Arc, AnyError> { + Ok(self.workspace_factory()?.npmrc()?) + } + + pub fn resolver_factory(&self) -> Result<&Arc, AnyError> { + self.services.resolver_factory.get_or_try_init(|| { + Ok(Arc::new(CliResolverFactory::new( + self.workspace_factory()?.clone(), + ResolverFactoryOptions { + conditions_from_resolution_mode: Default::default(), + no_sloppy_imports_cache: false, + npm_system_info: self.flags.subcommand.npm_system_info(), + specified_import_map: Some(Box::new(CliSpecifiedImportMapProvider { + cli_options: self.cli_options()?.clone(), + file_fetcher: self.file_fetcher()?.clone(), + workspace_external_import_map_loader: self + .workspace_external_import_map_loader()? + .clone(), + })), + unstable_sloppy_imports: self.flags.unstable_config.sloppy_imports, + package_json_cache: Some(Arc::new( + node_resolver::PackageJsonThreadLocalCache, + )), + package_json_dep_resolution: match &self.flags.subcommand { + DenoSubcommand::Publish(_) => { + // the node_modules directory is not published to jsr, so resolve + // dependencies via the package.json rather than using node resolution + Some(deno_config::workspace::PackageJsonDepResolution::Enabled) + } + _ => None, + }, + }, ))) }) } @@ -628,82 +673,52 @@ impl CliFactory { pub fn sloppy_imports_resolver( &self, ) -> Result>, AnyError> { - self - .services - .sloppy_imports_resolver - .get_or_try_init(|| { - Ok(self.cli_options()?.unstable_sloppy_imports().then(|| { - Arc::new(CliSloppyImportsResolver::new(SloppyImportsCachedFs::new( - self.sys(), - ))) - })) - }) - .map(|maybe| maybe.as_ref()) + self.resolver_factory()?.sloppy_imports_resolver() + } + + pub fn workspace(&self) -> Result<&Arc, AnyError> { + Ok(&self.workspace_directory()?.workspace) + } + + pub fn workspace_directory( + &self, + ) -> Result<&Arc, AnyError> { + Ok(self.workspace_factory()?.workspace_directory()?) + } + + fn workspace_factory(&self) -> Result<&Arc, AnyError> { + self.services.workspace_factory.get_or_try_init(|| { + let initial_cwd = match self.overrides.initial_cwd.clone() { + Some(v) => v, + None => self + .sys() + .env_current_dir() + .with_context(|| "Failed getting cwd.")?, + }; + let options = new_workspace_factory_options( + &initial_cwd, + &self.flags, + self.deno_dir_path_provider().clone(), + ); + let mut factory = + CliWorkspaceFactory::new(self.sys(), initial_cwd, options); + if let Some(workspace_dir) = &self.overrides.workspace_directory { + factory.set_workspace_directory(workspace_dir.clone()); + } + Ok(Arc::new(factory)) + }) } pub async fn workspace_resolver( &self, ) -> Result<&Arc, AnyError> { - self - .services - .workspace_resolver - .get_or_try_init_async(async { - let cli_options = self.cli_options()?; - let resolver = cli_options - .create_workspace_resolver( - self.file_fetcher()?, - if cli_options.use_byonm() - && !matches!( - cli_options.sub_command(), - DenoSubcommand::Publish(_) - ) - { - PackageJsonDepResolution::Disabled - } else { - // todo(dsherret): this should be false for nodeModulesDir: true - PackageJsonDepResolution::Enabled - }, - ) - .await?; - if !resolver.diagnostics().is_empty() { - warn!( - "Import map diagnostics:\n{}", - resolver - .diagnostics() - .iter() - .map(|d| format!(" - {d}")) - .collect::>() - .join("\n") - ); - } - Ok(Arc::new(resolver)) - }) - .await + self.initialize_npm_resolution_if_managed().await?; + self.resolver_factory()?.workspace_resolver().await } pub async fn deno_resolver(&self) -> Result<&Arc, AnyError> { - self - .services - .deno_resolver - .get_or_try_init_async(async { - let cli_options = self.cli_options()?; - Ok(Arc::new(CliDenoResolver::new(DenoResolverOptions { - in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(), - node_and_req_resolver: if cli_options.no_npm() { - None - } else { - Some(NodeAndNpmReqResolver { - node_resolver: self.node_resolver().await?.clone(), - npm_req_resolver: self.npm_req_resolver().await?.clone(), - }) - }, - sloppy_imports_resolver: self.sloppy_imports_resolver()?.cloned(), - workspace_resolver: self.workspace_resolver().await?.clone(), - is_byonm: cli_options.use_byonm(), - maybe_vendor_dir: cli_options.vendor_dir_path(), - }))) - }) - .await + self.initialize_npm_resolution_if_managed().await?; + self.resolver_factory()?.deno_resolver().await } pub async fn resolver(&self) -> Result<&Arc, AnyError> { @@ -763,20 +778,11 @@ impl CliFactory { pub fn emitter(&self) -> Result<&Arc, AnyError> { self.services.emitter.get_or_try_init(|| { - let cli_options = self.cli_options()?; - let ts_config_result = - cli_options.resolve_ts_config_for_emit(TsConfigType::Emit)?; - check_warn_tsconfig(&ts_config_result); - let (transpile_options, emit_options) = - crate::args::ts_config_to_transpile_and_emit_options( - ts_config_result.ts_config, - )?; Ok(Arc::new(Emitter::new( self.cjs_tracker()?.clone(), self.emit_cache()?.clone(), self.parsed_source_cache().clone(), - transpile_options, - emit_options, + self.tsconfig_resolver()?.clone(), ))) }) } @@ -789,23 +795,19 @@ impl CliFactory { } pub async fn node_resolver(&self) -> Result<&Arc, AnyError> { - self - .services - .node_resolver - .get_or_try_init_async( - async { - Ok(Arc::new(CliNodeResolver::new( - self.in_npm_pkg_checker()?.clone(), - RealIsBuiltInNodeModuleChecker, - self.npm_resolver().await?.clone(), - self.pkg_json_resolver().clone(), - self.sys(), - node_resolver::ConditionsFromResolutionMode::default(), - ))) - } - .boxed_local(), - ) - .await + self.initialize_npm_resolution_if_managed().await?; + self.resolver_factory()?.node_resolver() + } + + async fn initialize_npm_resolution_if_managed(&self) -> Result<(), AnyError> { + let npm_resolver = self.resolver_factory()?.npm_resolver()?; + if npm_resolver.is_managed() { + self + .npm_resolution_initializer()? + .ensure_initialized() + .await?; + } + Ok(()) } pub async fn node_code_translator( @@ -815,52 +817,47 @@ impl CliFactory { .services .node_code_translator .get_or_try_init_async(async { - let caches = self.caches()?; - let node_analysis_cache = - NodeAnalysisCache::new(caches.node_analysis_db()); let node_resolver = self.node_resolver().await?.clone(); - let cjs_esm_analyzer = CliCjsCodeAnalyzer::new( - node_analysis_cache, - self.cjs_tracker()?.clone(), - self.fs().clone(), - Some(self.parsed_source_cache().clone()), - ); + let cjs_code_analyzer = self.create_cjs_code_analyzer()?; Ok(Arc::new(NodeCodeTranslator::new( - cjs_esm_analyzer, + cjs_code_analyzer, self.in_npm_pkg_checker()?.clone(), node_resolver, self.npm_resolver().await?.clone(), - self.pkg_json_resolver().clone(), + self.pkg_json_resolver()?.clone(), self.sys(), ))) }) .await } - pub async fn npm_req_resolver( + fn create_cjs_code_analyzer(&self) -> Result { + let caches = self.caches()?; + let node_analysis_cache = NodeAnalysisCache::new(caches.node_analysis_db()); + Ok(CliCjsCodeAnalyzer::new( + node_analysis_cache, + self.cjs_tracker()?.clone(), + self.fs().clone(), + Some(self.parsed_source_cache().clone()), + )) + } + + pub fn npm_req_resolver(&self) -> Result<&Arc, AnyError> { + self.resolver_factory()?.npm_req_resolver() + } + + pub fn pkg_json_resolver( &self, - ) -> Result<&Arc, AnyError> { - self - .services - .npm_req_resolver - .get_or_try_init_async(async { - let npm_resolver = self.npm_resolver().await?; - Ok(Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions { - sys: self.sys(), - in_npm_pkg_checker: self.in_npm_pkg_checker()?.clone(), - node_resolver: self.node_resolver().await?.clone(), - npm_resolver: npm_resolver.clone(), - }))) - }) - .await + ) -> Result<&Arc, AnyError> { + Ok(self.resolver_factory()?.pkg_json_resolver()) } - pub fn pkg_json_resolver(&self) -> &Arc { - self - .services - .pkg_json_resolver - .get_or_init(|| Arc::new(CliPackageJsonResolver::new(self.sys()))) + pub fn tsconfig_resolver(&self) -> Result<&Arc, AnyError> { + self.services.tsconfig_resolver.get_or_try_init(|| { + let workspace = self.workspace()?; + Ok(Arc::new(TsConfigResolver::from_workspace(workspace))) + }) } pub async fn type_checker(&self) -> Result<&Arc, AnyError> { @@ -881,6 +878,7 @@ impl CliFactory { self.npm_installer_if_managed()?.cloned(), self.npm_resolver().await?.clone(), self.sys(), + self.tsconfig_resolver()?.clone(), ))) }) .await @@ -911,6 +909,7 @@ impl CliFactory { self.resolver().await?.clone(), self.root_permissions_container()?.clone(), self.sys(), + self.tsconfig_resolver()?.clone(), ))) }) .await @@ -986,7 +985,7 @@ impl CliFactory { let options = self.cli_options()?; Ok(Arc::new(CliCjsTracker::new( self.in_npm_pkg_checker()?.clone(), - self.pkg_json_resolver().clone(), + self.pkg_json_resolver()?.clone(), if options.is_node_main() || options.unstable_detect_cjs() { IsCjsResolutionMode::ImplicitTypeCommonJs } else if options.detect_cjs() { @@ -1027,11 +1026,11 @@ impl CliFactory { ) -> Result { let cli_options = self.cli_options()?; Ok(DenoCompileBinaryWriter::new( + self.create_cjs_code_analyzer()?, self.cjs_tracker()?, self.cli_options()?, self.deno_dir()?, self.emitter()?, - self.file_fetcher()?, self.http_client_provider(), self.npm_resolver().await?, self.workspace_resolver().await?.as_ref(), @@ -1055,6 +1054,20 @@ impl CliFactory { }) } + fn workspace_external_import_map_loader( + &self, + ) -> Result<&Arc, AnyError> { + self + .services + .workspace_external_import_map_loader + .get_or_try_init(|| { + Ok(Arc::new(WorkspaceExternalImportMapLoader::new( + self.sys(), + self.workspace_directory()?.workspace.clone(), + ))) + }) + } + pub async fn create_cli_main_worker_factory( &self, ) -> Result { @@ -1071,14 +1084,18 @@ impl CliFactory { }; let node_code_translator = self.node_code_translator().await?; let cjs_tracker = self.cjs_tracker()?.clone(); - let pkg_json_resolver = self.pkg_json_resolver().clone(); - let npm_req_resolver = self.npm_req_resolver().await?; + let pkg_json_resolver = self.pkg_json_resolver()?.clone(); + let npm_req_resolver = self.npm_req_resolver()?; + let workspace_factory = self.workspace_factory()?; let npm_registry_permission_checker = { - let mode = if cli_options.use_byonm() { + let mode = if self.resolver_factory()?.use_byonm()? { NpmRegistryReadPermissionCheckerMode::Byonm - } else if let Some(node_modules_dir) = cli_options.node_modules_dir_path() + } else if let Some(node_modules_dir) = + workspace_factory.node_modules_dir_path()? { - NpmRegistryReadPermissionCheckerMode::Local(node_modules_dir.clone()) + NpmRegistryReadPermissionCheckerMode::Local( + node_modules_dir.to_path_buf(), + ) } else { NpmRegistryReadPermissionCheckerMode::Global( self.npm_cache_dir()?.root_dir().to_path_buf(), @@ -1103,8 +1120,8 @@ impl CliFactory { node_resolver.clone(), NpmModuleLoader::new( self.cjs_tracker()?.clone(), - fs.clone(), node_code_translator.clone(), + self.sys(), ), npm_registry_permission_checker, npm_req_resolver.clone(), @@ -1138,7 +1155,6 @@ impl CliFactory { lib_main_worker_factory, maybe_file_watcher_communicator, cli_options.maybe_lockfile().cloned(), - node_resolver.clone(), self.npm_installer_if_managed()?.cloned(), npm_resolver.clone(), self.sys(), @@ -1151,6 +1167,7 @@ impl CliFactory { &self, ) -> Result { let cli_options = self.cli_options()?; + let workspace_factory = self.workspace_factory()?; Ok(LibMainWorkerOptions { argv: cli_options.argv().clone(), // This optimization is only available for "run" subcommand @@ -1160,7 +1177,9 @@ impl CliFactory { log_level: cli_options.log_level().unwrap_or(log::Level::Info).into(), enable_op_summary_metrics: cli_options.enable_op_summary_metrics(), enable_testing_features: cli_options.enable_testing_features(), - has_node_modules_dir: cli_options.has_node_modules_dir(), + has_node_modules_dir: workspace_factory + .node_modules_dir_path()? + .is_some(), inspect_brk: cli_options.inspect_brk().is_some(), inspect_wait: cli_options.inspect_wait().is_some(), strace_ops: cli_options.strace_ops().clone(), @@ -1180,8 +1199,6 @@ impl CliFactory { node_ipc: cli_options.node_ipc_fd(), serve_port: cli_options.serve_port(), serve_host: cli_options.serve_host(), - deno_version: crate::version::DENO_VERSION_INFO.deno, - deno_user_agent: crate::version::DENO_VERSION_INFO.user_agent, otel_config: self.cli_options()?.otel_config(), startup_snapshot: crate::js::deno_isolate_init(), }) @@ -1225,3 +1242,57 @@ impl CliFactory { }) } } + +fn new_workspace_factory_options( + initial_cwd: &Path, + flags: &Flags, + deno_dir_path_provider: Arc, +) -> deno_resolver::factory::WorkspaceFactoryOptions { + deno_resolver::factory::WorkspaceFactoryOptions { + additional_config_file_names: if matches!( + flags.subcommand, + DenoSubcommand::Publish(..) + ) { + &["jsr.json", "jsr.jsonc"] + } else { + &[] + }, + config_discovery: match &flags.config_flag { + ConfigFlag::Discover => { + if let Some(start_paths) = flags.config_path_args(initial_cwd) { + ConfigDiscoveryOption::Discover { start_paths } + } else { + ConfigDiscoveryOption::Disabled + } + } + ConfigFlag::Path(path) => { + ConfigDiscoveryOption::Path(PathBuf::from(path)) + } + ConfigFlag::Disabled => ConfigDiscoveryOption::Disabled, + }, + deno_dir_path_provider: Some(deno_dir_path_provider), + // For `deno install/add/remove/init` we want to force the managed + // resolver so it can set up the `node_modules/` directory. + is_package_manager_subcommand: matches!( + flags.subcommand, + DenoSubcommand::Install(_) + | DenoSubcommand::Add(_) + | DenoSubcommand::Remove(_) + | DenoSubcommand::Init(_) + | DenoSubcommand::Outdated(_) + ), + no_npm: flags.no_npm, + node_modules_dir: flags.node_modules_dir, + + npm_process_state: NPM_PROCESS_STATE.as_ref().map(|s| { + NpmProcessStateOptions { + node_modules_dir: s + .local_node_modules_path + .as_ref() + .map(|s| Cow::Borrowed(s.as_str())), + is_byonm: matches!(s.kind, NpmProcessStateKind::Byonm), + } + }), + vendor: flags.vendor, + } +} diff --git a/cli/file_fetcher.rs b/cli/file_fetcher.rs index cfc26d7e699f38..cc9cf38425e797 100644 --- a/cli/file_fetcher.rs +++ b/cli/file_fetcher.rs @@ -60,11 +60,10 @@ impl TextDecodedFile { file.maybe_headers.as_ref(), ); let specifier = file.url; - match deno_graph::source::decode_source( - &specifier, - file.source, - maybe_charset, - ) { + let charset = maybe_charset.unwrap_or_else(|| { + deno_media_type::encoding::detect_charset(&specifier, &file.source) + }); + match deno_media_type::encoding::decode_arc_source(charset, file.source) { Ok(source) => Ok(TextDecodedFile { media_type, specifier, @@ -586,7 +585,7 @@ mod tests { // in deno_graph async fn test_fetch_remote_encoded( fixture: &str, - charset: &str, + expected_charset: &str, expected: &str, ) { let url_str = format!("http://127.0.0.1:4545/encoding/{fixture}"); @@ -598,15 +597,20 @@ mod tests { Some(&headers), ); assert_eq!( - deno_graph::source::decode_source(&specifier, file.source, maybe_charset) - .unwrap() - .as_ref(), + deno_media_type::encoding::decode_arc_source( + maybe_charset.unwrap_or_else(|| { + deno_media_type::encoding::detect_charset(&specifier, &file.source) + }), + file.source + ) + .unwrap() + .as_ref(), expected ); assert_eq!(media_type, MediaType::TypeScript); assert_eq!( headers.get("content-type").unwrap(), - &format!("application/typescript;charset={charset}") + &format!("application/typescript;charset={expected_charset}") ); } @@ -615,9 +619,12 @@ mod tests { let specifier = ModuleSpecifier::from_file_path(p).unwrap(); let (file, _) = test_fetch(&specifier).await; assert_eq!( - deno_graph::source::decode_source(&specifier, file.source, None) - .unwrap() - .as_ref(), + deno_media_type::encoding::decode_arc_source( + deno_media_type::encoding::detect_charset(&specifier, &file.source), + file.source + ) + .unwrap() + .as_ref(), expected ); } diff --git a/cli/graph_util.rs b/cli/graph_util.rs index e57fcf8a9459b0..6a640b7cb01f19 100644 --- a/cli/graph_util.rs +++ b/cli/graph_util.rs @@ -1,11 +1,13 @@ // Copyright 2018-2025 the Deno authors. MIT license. +use std::collections::BTreeMap; use std::collections::HashSet; use std::error::Error; use std::path::PathBuf; use std::sync::Arc; use deno_config::deno_json; +use deno_config::deno_json::CompilerOptionTypesDeserializeError; use deno_config::deno_json::JsxImportSourceConfig; use deno_config::deno_json::NodeModulesDirMode; use deno_config::workspace::JsrPackageConfig; @@ -19,6 +21,7 @@ use deno_graph::source::Loader; use deno_graph::source::LoaderChecksum; use deno_graph::source::ResolutionKind; use deno_graph::source::ResolveError; +use deno_graph::CheckJsOption; use deno_graph::FillFromLockfileOptions; use deno_graph::GraphKind; use deno_graph::JsrLoadError; @@ -40,6 +43,7 @@ use deno_semver::package::PackageNv; use deno_semver::SmallStackString; use crate::args::config_to_deno_graph_workspace_member; +use crate::args::deno_json::TsConfigResolver; use crate::args::jsr_url; use crate::args::CliLockfile; use crate::args::CliOptions; @@ -67,8 +71,8 @@ use crate::util::file_watcher::WatcherCommunicator; use crate::util::fs::canonicalize_path; #[derive(Clone)] -pub struct GraphValidOptions { - pub check_js: bool, +pub struct GraphValidOptions<'a> { + pub check_js: CheckJsOption<'a>, pub kind: GraphKind, /// Whether to exit the process for integrity check errors such as /// lockfile checksum mismatches and JSR integrity failures. @@ -136,8 +140,8 @@ pub fn fill_graph_from_lockfile( } #[derive(Clone)] -pub struct GraphWalkErrorsOptions { - pub check_js: bool, +pub struct GraphWalkErrorsOptions<'a> { + pub check_js: CheckJsOption<'a>, pub kind: GraphKind, } @@ -147,7 +151,7 @@ pub fn graph_walk_errors<'a>( graph: &'a ModuleGraph, sys: &'a CliSys, roots: &'a [ModuleSpecifier], - options: GraphWalkErrorsOptions, + options: GraphWalkErrorsOptions<'a>, ) -> impl Iterator + 'a { graph .walk( @@ -455,7 +459,6 @@ impl ModuleGraphCreator { check::CheckOptions { build_fast_check_graph: true, lib: self.options.ts_type_lib_window(), - log_ignored_options: true, reload: self.options.reload_flag(), type_check_mode: self.options.type_check_mode(), }, @@ -472,6 +475,9 @@ pub struct BuildFastCheckGraphOptions<'a> { #[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum BuildGraphWithNpmResolutionError { + #[class(inherit)] + #[error(transparent)] + CompilerOptionTypesDeserialize(#[from] CompilerOptionTypesDeserializeError), #[class(inherit)] #[error(transparent)] SerdeJson(#[from] serde_json::Error), @@ -508,6 +514,7 @@ pub struct ModuleGraphBuilder { resolver: Arc, root_permissions_container: PermissionsContainer, sys: CliSys, + tsconfig_resolver: Arc, } impl ModuleGraphBuilder { @@ -529,6 +536,7 @@ impl ModuleGraphBuilder { resolver: Arc, root_permissions_container: PermissionsContainer, sys: CliSys, + tsconfig_resolver: Arc, ) -> Self { Self { caches, @@ -547,6 +555,7 @@ impl ModuleGraphBuilder { resolver, root_permissions_container, sys, + tsconfig_resolver, } } @@ -631,7 +640,16 @@ impl ModuleGraphBuilder { } let maybe_imports = if options.graph_kind.include_types() { - self.cli_options.to_compiler_option_types()? + // Resolve all the imports from every deno.json. We'll separate + // them later based on the folder we're type checking. + let mut imports = Vec::new(); + for deno_json in self.cli_options.workspace().deno_jsons() { + let maybe_imports = deno_json.to_compiler_option_types()?; + imports.extend(maybe_imports.into_iter().map(|(referrer, imports)| { + deno_graph::ReferrerImports { referrer, imports } + })); + } + imports } else { Vec::new() }; @@ -847,7 +865,7 @@ impl ModuleGraphBuilder { } else { GraphKind::CodeOnly }, - check_js: self.cli_options.check_js(), + check_js: CheckJsOption::Custom(self.tsconfig_resolver.as_ref()), exit_integrity_errors: true, }, ) @@ -857,14 +875,23 @@ impl ModuleGraphBuilder { &self, ) -> Result { - let jsx_import_source_config = self + let jsx_import_source_config_unscoped = self .cli_options - .workspace() + .start_dir .to_maybe_jsx_import_source_config()?; + let mut jsx_import_source_config_by_scope = BTreeMap::default(); + for (dir_url, _) in self.cli_options.workspace().config_folders() { + let dir = self.cli_options.workspace().resolve_member_dir(dir_url); + let jsx_import_source_config_unscoped = + dir.to_maybe_jsx_import_source_config()?; + jsx_import_source_config_by_scope + .insert(dir_url.clone(), jsx_import_source_config_unscoped); + } Ok(CliGraphResolver { cjs_tracker: &self.cjs_tracker, resolver: &self.resolver, - jsx_import_source_config, + jsx_import_source_config_unscoped, + jsx_import_source_config_by_scope, }) } } @@ -1100,7 +1127,7 @@ pub fn has_graph_root_local_dependent_changed( follow_dynamic: true, kind: GraphKind::All, prefer_fast_check_graph: true, - check_js: true, + check_js: CheckJsOption::True, }, ); while let Some((s, _)) = dependent_specifiers.next() { @@ -1227,28 +1254,47 @@ fn format_deno_graph_error(err: &dyn Error) -> String { struct CliGraphResolver<'a> { cjs_tracker: &'a CliCjsTracker, resolver: &'a CliResolver, - jsx_import_source_config: Option, + jsx_import_source_config_unscoped: Option, + jsx_import_source_config_by_scope: + BTreeMap, Option>, +} + +impl<'a> CliGraphResolver<'a> { + fn resolve_jsx_import_source_config( + &self, + referrer: &ModuleSpecifier, + ) -> Option<&JsxImportSourceConfig> { + self + .jsx_import_source_config_by_scope + .iter() + .rfind(|(s, _)| referrer.as_str().starts_with(s.as_str())) + .map(|(_, c)| c.as_ref()) + .unwrap_or(self.jsx_import_source_config_unscoped.as_ref()) + } } impl<'a> deno_graph::source::Resolver for CliGraphResolver<'a> { - fn default_jsx_import_source(&self) -> Option { + fn default_jsx_import_source( + &self, + referrer: &ModuleSpecifier, + ) -> Option { self - .jsx_import_source_config - .as_ref() + .resolve_jsx_import_source_config(referrer) .and_then(|c| c.default_specifier.clone()) } - fn default_jsx_import_source_types(&self) -> Option { + fn default_jsx_import_source_types( + &self, + referrer: &ModuleSpecifier, + ) -> Option { self - .jsx_import_source_config - .as_ref() + .resolve_jsx_import_source_config(referrer) .and_then(|c| c.default_types_specifier.clone()) } - fn jsx_import_source_module(&self) -> &str { + fn jsx_import_source_module(&self, referrer: &ModuleSpecifier) -> &str { self - .jsx_import_source_config - .as_ref() + .resolve_jsx_import_source_config(referrer) .map(|c| c.module.as_str()) .unwrap_or(deno_graph::source::DEFAULT_JSX_IMPORT_SOURCE_MODULE) } diff --git a/cli/http_util.rs b/cli/http_util.rs index 5e63ab0a4a71f5..a12fde937ec711 100644 --- a/cli/http_util.rs +++ b/cli/http_util.rs @@ -14,6 +14,7 @@ use deno_core::serde_json; use deno_core::url::Url; use deno_error::JsError; use deno_error::JsErrorBox; +use deno_lib::version::DENO_VERSION_INFO; use deno_runtime::deno_fetch; use deno_runtime::deno_fetch::create_http_client; use deno_runtime::deno_fetch::CreateHttpClientOptions; @@ -28,7 +29,6 @@ use http_body_util::BodyExt; use thiserror::Error; use crate::util::progress_bar::UpdateGuard; -use crate::version; #[derive(Debug, Error)] pub enum SendError { @@ -79,7 +79,7 @@ impl HttpClientProvider { Entry::Occupied(entry) => Ok(HttpClient::new(entry.get().clone())), Entry::Vacant(entry) => { let client = create_http_client( - version::DENO_VERSION_INFO.user_agent, + DENO_VERSION_INFO.user_agent, CreateHttpClientOptions { root_cert_store: match &self.root_cert_store_provider { Some(provider) => Some(provider.get_or_try_init()?.clone()), @@ -481,7 +481,7 @@ mod test { let client = HttpClient::new( create_http_client( - version::DENO_VERSION_INFO.user_agent, + DENO_VERSION_INFO.user_agent, CreateHttpClientOptions { ca_certs: vec![std::fs::read( test_util::testdata_path().join("tls/RootCA.pem"), @@ -525,7 +525,7 @@ mod test { let client = HttpClient::new( create_http_client( - version::DENO_VERSION_INFO.user_agent, + DENO_VERSION_INFO.user_agent, CreateHttpClientOptions::default(), ) .unwrap(), @@ -566,7 +566,7 @@ mod test { let client = HttpClient::new( create_http_client( - version::DENO_VERSION_INFO.user_agent, + DENO_VERSION_INFO.user_agent, CreateHttpClientOptions { root_cert_store: Some(root_cert_store), ..Default::default() @@ -587,7 +587,7 @@ mod test { .unwrap(); let client = HttpClient::new( create_http_client( - version::DENO_VERSION_INFO.user_agent, + DENO_VERSION_INFO.user_agent, CreateHttpClientOptions { ca_certs: vec![std::fs::read( test_util::testdata_path() @@ -620,7 +620,7 @@ mod test { let url = Url::parse("https://localhost:5545/etag_script.ts").unwrap(); let client = HttpClient::new( create_http_client( - version::DENO_VERSION_INFO.user_agent, + DENO_VERSION_INFO.user_agent, CreateHttpClientOptions { ca_certs: vec![std::fs::read( test_util::testdata_path() @@ -661,7 +661,7 @@ mod test { .unwrap(); let client = HttpClient::new( create_http_client( - version::DENO_VERSION_INFO.user_agent, + DENO_VERSION_INFO.user_agent, CreateHttpClientOptions { ca_certs: vec![std::fs::read( test_util::testdata_path() diff --git a/cli/integration_tests_runner.rs b/cli/integration_tests_runner.rs index 7342e62fa0df8d..63f2abe4606c79 100644 --- a/cli/integration_tests_runner.rs +++ b/cli/integration_tests_runner.rs @@ -1,18 +1,5 @@ // Copyright 2018-2025 the Deno authors. MIT license. -pub fn main() { - let mut args = vec!["cargo", "test", "-p", "cli_tests", "--features", "run"]; - - if !cfg!(debug_assertions) { - args.push("--release"); - } - - args.push("--"); - // If any args were passed to this process, pass them through to the child - let orig_args = std::env::args().skip(1).collect::>(); - let orig_args: Vec<&str> = - orig_args.iter().map(|x| x.as_ref()).collect::>(); - args.extend(orig_args); - - test_util::spawn::exec_replace("cargo", &args).unwrap(); +pub fn main() { + // this file exists to cause the executable to be built when running cargo test } diff --git a/cli/js.rs b/cli/js.rs index 5337c53f76723f..37004ad4448791 100644 --- a/cli/js.rs +++ b/cli/js.rs @@ -2,18 +2,7 @@ use log::debug; -#[cfg(not(feature = "hmr"))] -static CLI_SNAPSHOT: &[u8] = - include_bytes!(concat!(env!("OUT_DIR"), "/CLI_SNAPSHOT.bin")); - pub fn deno_isolate_init() -> Option<&'static [u8]> { debug!("Deno isolate init with snapshots."); - #[cfg(not(feature = "hmr"))] - { - Some(CLI_SNAPSHOT) - } - #[cfg(feature = "hmr")] - { - None - } + deno_snapshots::CLI_SNAPSHOT } diff --git a/cli/lib/Cargo.toml b/cli/lib/Cargo.toml index 67caf6e944fc36..b6bcaa7e4345e2 100644 --- a/cli/lib/Cargo.toml +++ b/cli/lib/Cargo.toml @@ -2,7 +2,7 @@ [package] name = "deno_lib" -version = "0.2.0" +version = "0.3.0" authors.workspace = true edition.workspace = true license.workspace = true @@ -14,23 +14,32 @@ description = "Shared code between the Deno CLI and denort" path = "lib.rs" [dependencies] -deno_cache_dir.workspace = true +capacity_builder.workspace = true +deno_config = { workspace = true, features = ["sync", "workspace"] } deno_error.workspace = true deno_fs = { workspace = true, features = ["sync_fs"] } +deno_media_type.workspace = true deno_node = { workspace = true, features = ["sync_fs"] } +deno_npm.workspace = true deno_path_util.workspace = true deno_resolver = { workspace = true, features = ["sync"] } deno_runtime.workspace = true +deno_semver.workspace = true deno_terminal.workspace = true +env_logger = "=0.10.0" faster-hex.workspace = true -log.workspace = true +indexmap.workspace = true +libsui.workspace = true +log = { workspace = true, features = ["serde"] } node_resolver = { workspace = true, features = ["sync"] } parking_lot.workspace = true ring.workspace = true serde = { workspace = true, features = ["derive"] } +serde_json.workspace = true sys_traits = { workspace = true, features = ["getrandom"] } thiserror.workspace = true tokio.workspace = true +twox-hash.workspace = true url.workspace = true [dev-dependencies] diff --git a/cli/lib/args.rs b/cli/lib/args.rs new file mode 100644 index 00000000000000..22bebdf5d9fef7 --- /dev/null +++ b/cli/lib/args.rs @@ -0,0 +1,216 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::io::BufReader; +use std::io::Cursor; +use std::io::Read; +use std::io::Seek; +use std::path::PathBuf; +use std::sync::LazyLock; + +use deno_npm::resolution::PackageIdNotFoundError; +use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; +use deno_runtime::colors; +use deno_runtime::deno_tls::deno_native_certs::load_native_certs; +use deno_runtime::deno_tls::rustls; +use deno_runtime::deno_tls::rustls::RootCertStore; +use deno_runtime::deno_tls::rustls_pemfile; +use deno_runtime::deno_tls::webpki_roots; +use deno_semver::npm::NpmPackageReqReference; +use serde::Deserialize; +use serde::Serialize; +use thiserror::Error; + +pub fn npm_pkg_req_ref_to_binary_command( + req_ref: &NpmPackageReqReference, +) -> String { + req_ref + .sub_path() + .map(|s| s.to_string()) + .unwrap_or_else(|| req_ref.req().name.to_string()) +} + +pub fn has_trace_permissions_enabled() -> bool { + has_flag_env_var("DENO_TRACE_PERMISSIONS") +} + +pub fn has_flag_env_var(name: &str) -> bool { + match std::env::var_os(name) { + Some(value) => value == "1", + None => false, + } +} + +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum CaData { + /// The string is a file path + File(String), + /// This variant is not exposed as an option in the CLI, it is used internally + /// for standalone binaries. + Bytes(Vec), +} + +#[derive(Error, Debug, Clone, deno_error::JsError)] +#[class(generic)] +pub enum RootCertStoreLoadError { + #[error( + "Unknown certificate store \"{0}\" specified (allowed: \"system,mozilla\")" + )] + UnknownStore(String), + #[error("Unable to add pem file to certificate store: {0}")] + FailedAddPemFile(String), + #[error("Failed opening CA file: {0}")] + CaFileOpenError(String), +} + +/// Create and populate a root cert store based on the passed options and +/// environment. +pub fn get_root_cert_store( + maybe_root_path: Option, + maybe_ca_stores: Option>, + maybe_ca_data: Option, +) -> Result { + let mut root_cert_store = RootCertStore::empty(); + let ca_stores: Vec = maybe_ca_stores + .or_else(|| { + let env_ca_store = std::env::var("DENO_TLS_CA_STORE").ok()?; + Some( + env_ca_store + .split(',') + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()) + .collect(), + ) + }) + .unwrap_or_else(|| vec!["mozilla".to_string()]); + + for store in ca_stores.iter() { + match store.as_str() { + "mozilla" => { + root_cert_store.extend(webpki_roots::TLS_SERVER_ROOTS.to_vec()); + } + "system" => { + let roots = load_native_certs().expect("could not load platform certs"); + for root in roots { + if let Err(err) = root_cert_store + .add(rustls::pki_types::CertificateDer::from(root.0.clone())) + { + log::error!( + "{}", + colors::yellow(&format!( + "Unable to add system certificate to certificate store: {:?}", + err + )) + ); + let hex_encoded_root = faster_hex::hex_string(&root.0); + log::error!("{}", colors::gray(&hex_encoded_root)); + } + } + } + _ => { + return Err(RootCertStoreLoadError::UnknownStore(store.clone())); + } + } + } + + let ca_data = + maybe_ca_data.or_else(|| std::env::var("DENO_CERT").ok().map(CaData::File)); + if let Some(ca_data) = ca_data { + let result = match ca_data { + CaData::File(ca_file) => { + let ca_file = if let Some(root) = &maybe_root_path { + root.join(&ca_file) + } else { + PathBuf::from(ca_file) + }; + let certfile = std::fs::File::open(ca_file).map_err(|err| { + RootCertStoreLoadError::CaFileOpenError(err.to_string()) + })?; + let mut reader = BufReader::new(certfile); + rustls_pemfile::certs(&mut reader).collect::, _>>() + } + CaData::Bytes(data) => { + let mut reader = BufReader::new(Cursor::new(data)); + rustls_pemfile::certs(&mut reader).collect::, _>>() + } + }; + + match result { + Ok(certs) => { + root_cert_store.add_parsable_certificates(certs); + } + Err(e) => { + return Err(RootCertStoreLoadError::FailedAddPemFile(e.to_string())); + } + } + } + + Ok(root_cert_store) +} + +/// State provided to the process via an environment variable. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct NpmProcessState { + pub kind: NpmProcessStateKind, + pub local_node_modules_path: Option, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub enum NpmProcessStateKind { + Snapshot(deno_npm::resolution::SerializedNpmResolutionSnapshot), + Byonm, +} + +pub static NPM_PROCESS_STATE: LazyLock> = + LazyLock::new(|| { + use deno_runtime::deno_process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME; + let fd = std::env::var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME).ok()?; + std::env::remove_var(NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME); + let fd = fd.parse::().ok()?; + let mut file = { + use deno_runtime::deno_io::FromRawIoHandle; + unsafe { std::fs::File::from_raw_io_handle(fd as _) } + }; + let mut buf = Vec::new(); + // seek to beginning. after the file is written the position will be inherited by this subprocess, + // and also this file might have been read before + file.seek(std::io::SeekFrom::Start(0)).unwrap(); + file + .read_to_end(&mut buf) + .inspect_err(|e| { + log::error!("failed to read npm process state from fd {fd}: {e}"); + }) + .ok()?; + let state: NpmProcessState = serde_json::from_slice(&buf) + .inspect_err(|e| { + log::error!( + "failed to deserialize npm process state: {e} {}", + String::from_utf8_lossy(&buf) + ) + }) + .ok()?; + Some(state) + }); + +pub fn resolve_npm_resolution_snapshot( +) -> Result, PackageIdNotFoundError> +{ + if let Some(NpmProcessStateKind::Snapshot(snapshot)) = + NPM_PROCESS_STATE.as_ref().map(|s| &s.kind) + { + // TODO(bartlomieju): remove this clone + Ok(Some(snapshot.clone().into_valid()?)) + } else { + Ok(None) + } +} + +#[derive(Clone, Default, Debug, Eq, PartialEq, Serialize, Deserialize)] +pub struct UnstableConfig { + // TODO(bartlomieju): remove in Deno 2.5 + pub legacy_flag_enabled: bool, // --unstable + pub bare_node_builtins: bool, + pub detect_cjs: bool, + pub sloppy_imports: bool, + pub npm_lazy_caching: bool, + pub features: Vec, // --unstabe-kv --unstable-cron +} diff --git a/cli/lib/build.rs b/cli/lib/build.rs new file mode 100644 index 00000000000000..1f52e0c02a08f1 --- /dev/null +++ b/cli/lib/build.rs @@ -0,0 +1,42 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +fn main() { + // todo(dsherret): remove this after Deno 2.2.0 is published and then + // align the version of this crate with Deno then. We need to wait because + // there was previously a deno_lib 2.2.0 published (https://crates.io/crates/deno_lib/versions) + let version_path = std::path::Path::new(".").join("version.txt"); + println!("cargo:rerun-if-changed={}", version_path.display()); + #[allow(clippy::disallowed_methods)] + let text = std::fs::read_to_string(version_path).unwrap(); + println!("cargo:rustc-env=DENO_VERSION={}", text); + + let commit_hash = git_commit_hash(); + println!("cargo:rustc-env=GIT_COMMIT_HASH={}", commit_hash); + println!("cargo:rerun-if-env-changed=GIT_COMMIT_HASH"); + println!( + "cargo:rustc-env=GIT_COMMIT_HASH_SHORT={}", + &commit_hash[..7] + ); +} + +fn git_commit_hash() -> String { + if let Ok(output) = std::process::Command::new("git") + .arg("rev-list") + .arg("-1") + .arg("HEAD") + .output() + { + if output.status.success() { + std::str::from_utf8(&output.stdout[..40]) + .unwrap() + .to_string() + } else { + // When not in git repository + // (e.g. when the user install by `cargo install deno`) + "UNKNOWN".to_string() + } + } else { + // When there is no git command for some reason + "UNKNOWN".to_string() + } +} diff --git a/cli/lib/cache/mod.rs b/cli/lib/cache/mod.rs deleted file mode 100644 index c4395df3e1140f..00000000000000 --- a/cli/lib/cache/mod.rs +++ /dev/null @@ -1,8 +0,0 @@ -// Copyright 2018-2025 the Deno authors. MIT license. - -pub use deno_dir::DenoDir; -pub use deno_dir::DenoDirProvider; -pub use disk_cache::DiskCache; - -mod deno_dir; -mod disk_cache; diff --git a/cli/lib/clippy.toml b/cli/lib/clippy.toml new file mode 100644 index 00000000000000..0060289cf25230 --- /dev/null +++ b/cli/lib/clippy.toml @@ -0,0 +1,48 @@ +disallowed-methods = [ + { path = "std::env::current_dir", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::Path::is_dir", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::Path::is_file", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::Path::is_symlink", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::Path::metadata", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::Path::read_dir", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::Path::read_link", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::Path::symlink_metadata", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::Path::try_exists", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::PathBuf::exists", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::PathBuf::canonicalize", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::PathBuf::is_dir", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::PathBuf::is_file", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::PathBuf::is_symlink", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::PathBuf::metadata", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::PathBuf::read_dir", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::PathBuf::read_link", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::PathBuf::symlink_metadata", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::PathBuf::try_exists", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::env::set_current_dir", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::env::temp_dir", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::canonicalize", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::copy", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::create_dir_all", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::create_dir", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::DirBuilder::new", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::hard_link", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::metadata", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::OpenOptions::new", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::read_dir", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::read_link", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::read_to_string", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::read", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::remove_dir_all", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::remove_dir", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::remove_file", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::rename", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::set_permissions", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::symlink_metadata", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::fs::write", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::Path::canonicalize", reason = "File system operations should be done using DenoLibSys" }, + { path = "std::path::Path::exists", reason = "File system operations should be done using DenoLibSys" }, + { path = "url::Url::to_file_path", reason = "Use deno_path_util instead" }, + { path = "url::Url::from_file_path", reason = "Use deno_path_util instead" }, + { path = "url::Url::from_directory_path", reason = "Use deno_path_util instead" }, +] diff --git a/cli/lib/env.rs b/cli/lib/env.rs deleted file mode 100644 index 9c6001478b058f..00000000000000 --- a/cli/lib/env.rs +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright 2018-2025 the Deno authors. MIT license. - -pub fn has_trace_permissions_enabled() -> bool { - has_flag_env_var("DENO_TRACE_PERMISSIONS") -} - -pub fn has_flag_env_var(name: &str) -> bool { - let value = std::env::var(name); - matches!(value.as_ref().map(|s| s.as_str()), Ok("1")) -} diff --git a/cli/lib/lib.rs b/cli/lib/lib.rs index 5453bddaee8ac3..6b9267805df91c 100644 --- a/cli/lib/lib.rs +++ b/cli/lib/lib.rs @@ -1,9 +1,11 @@ // Copyright 2018-2025 the Deno authors. MIT license. -pub mod cache; -pub mod env; +pub mod args; +pub mod loader; pub mod npm; +pub mod shared; pub mod standalone; pub mod sys; pub mod util; +pub mod version; pub mod worker; diff --git a/cli/lib/loader.rs b/cli/lib/loader.rs new file mode 100644 index 00000000000000..68d56be233a5f9 --- /dev/null +++ b/cli/lib/loader.rs @@ -0,0 +1,217 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::borrow::Cow; +use std::path::PathBuf; +use std::sync::Arc; + +use deno_media_type::MediaType; +use deno_resolver::cjs::CjsTracker; +use deno_resolver::npm::DenoInNpmPackageChecker; +use deno_runtime::deno_core::ModuleSourceCode; +use node_resolver::analyze::CjsCodeAnalyzer; +use node_resolver::analyze::NodeCodeTranslator; +use node_resolver::InNpmPackageChecker; +use node_resolver::IsBuiltInNodeModuleChecker; +use node_resolver::NpmPackageFolderResolver; +use thiserror::Error; +use url::Url; + +use crate::sys::DenoLibSys; +use crate::util::text_encoding::from_utf8_lossy_cow; + +pub struct ModuleCodeStringSource { + pub code: ModuleSourceCode, + pub found_url: Url, + pub media_type: MediaType, +} + +#[derive(Debug, Error, deno_error::JsError)] +#[class(type)] +#[error("[{}]: Stripping types is currently unsupported for files under node_modules, for \"{}\"", self.code(), specifier)] +pub struct StrippingTypesNodeModulesError { + pub specifier: Url, +} + +impl StrippingTypesNodeModulesError { + pub fn code(&self) -> &'static str { + "ERR_UNSUPPORTED_NODE_MODULES_TYPE_STRIPPING" + } +} + +#[derive(Debug, Error, deno_error::JsError)] +pub enum NpmModuleLoadError { + #[class(inherit)] + #[error(transparent)] + UrlToFilePath(#[from] deno_path_util::UrlToFilePathError), + #[class(inherit)] + #[error(transparent)] + StrippingTypesNodeModules(#[from] StrippingTypesNodeModulesError), + #[class(inherit)] + #[error(transparent)] + ClosestPkgJson(#[from] node_resolver::errors::ClosestPkgJsonError), + #[class(inherit)] + #[error(transparent)] + TranslateCjsToEsm(#[from] node_resolver::analyze::TranslateCjsToEsmError), + #[class(inherit)] + #[error("Unable to load {}{}", file_path.display(), maybe_referrer.as_ref().map(|r| format!(" imported from {}", r)).unwrap_or_default())] + UnableToLoad { + file_path: PathBuf, + maybe_referrer: Option, + #[source] + #[inherit] + source: std::io::Error, + }, + #[class(inherit)] + #[error( + "{}", + format_dir_import_message(file_path, maybe_referrer, suggestion) + )] + DirImport { + file_path: PathBuf, + maybe_referrer: Option, + suggestion: Option<&'static str>, + #[source] + #[inherit] + source: std::io::Error, + }, +} + +fn format_dir_import_message( + file_path: &std::path::Path, + maybe_referrer: &Option, + suggestion: &Option<&'static str>, +) -> String { + // directory imports are not allowed when importing from an + // ES module, so provide the user with a helpful error message + let dir_path = file_path; + let mut msg = "Directory import ".to_string(); + msg.push_str(&dir_path.to_string_lossy()); + if let Some(referrer) = maybe_referrer { + msg.push_str(" is not supported resolving import from "); + msg.push_str(referrer.as_str()); + if let Some(entrypoint_name) = suggestion { + msg.push_str("\nDid you mean to import "); + msg.push_str(entrypoint_name); + msg.push_str(" within the directory?"); + } + } + msg +} + +#[derive(Clone)] +pub struct NpmModuleLoader< + TCjsCodeAnalyzer: CjsCodeAnalyzer, + TInNpmPackageChecker: InNpmPackageChecker, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TNpmPackageFolderResolver: NpmPackageFolderResolver, + TSys: DenoLibSys, +> { + cjs_tracker: Arc>, + sys: TSys, + node_code_translator: Arc< + NodeCodeTranslator< + TCjsCodeAnalyzer, + TInNpmPackageChecker, + TIsBuiltInNodeModuleChecker, + TNpmPackageFolderResolver, + TSys, + >, + >, +} + +impl< + TCjsCodeAnalyzer: CjsCodeAnalyzer, + TInNpmPackageChecker: InNpmPackageChecker, + TIsBuiltInNodeModuleChecker: IsBuiltInNodeModuleChecker, + TNpmPackageFolderResolver: NpmPackageFolderResolver, + TSys: DenoLibSys, + > + NpmModuleLoader< + TCjsCodeAnalyzer, + TInNpmPackageChecker, + TIsBuiltInNodeModuleChecker, + TNpmPackageFolderResolver, + TSys, + > +{ + pub fn new( + cjs_tracker: Arc>, + node_code_translator: Arc< + NodeCodeTranslator< + TCjsCodeAnalyzer, + TInNpmPackageChecker, + TIsBuiltInNodeModuleChecker, + TNpmPackageFolderResolver, + TSys, + >, + >, + sys: TSys, + ) -> Self { + Self { + cjs_tracker, + node_code_translator, + sys, + } + } + + pub async fn load( + &self, + specifier: &Url, + maybe_referrer: Option<&Url>, + ) -> Result { + let file_path = deno_path_util::url_to_file_path(specifier)?; + let code = self.sys.fs_read(&file_path).map_err(|source| { + if self.sys.fs_is_dir_no_err(&file_path) { + let suggestion = ["index.mjs", "index.js", "index.cjs"] + .into_iter() + .find(|e| self.sys.fs_is_file_no_err(file_path.join(e))); + NpmModuleLoadError::DirImport { + file_path, + maybe_referrer: maybe_referrer.cloned(), + suggestion, + source, + } + } else { + NpmModuleLoadError::UnableToLoad { + file_path, + maybe_referrer: maybe_referrer.cloned(), + source, + } + } + })?; + + let media_type = MediaType::from_specifier(specifier); + if media_type.is_emittable() { + return Err(NpmModuleLoadError::StrippingTypesNodeModules( + StrippingTypesNodeModulesError { + specifier: specifier.clone(), + }, + )); + } + + let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? { + // translate cjs to esm if it's cjs and inject node globals + let code = from_utf8_lossy_cow(code); + ModuleSourceCode::String( + self + .node_code_translator + .translate_cjs_to_esm(specifier, Some(code)) + .await? + .into_owned() + .into(), + ) + } else { + // esm and json code is untouched + ModuleSourceCode::Bytes(match code { + Cow::Owned(bytes) => bytes.into_boxed_slice().into(), + Cow::Borrowed(bytes) => bytes.into(), + }) + }; + + Ok(ModuleCodeStringSource { + code, + found_url: specifier.clone(), + media_type: MediaType::from_specifier(specifier), + }) + } +} diff --git a/cli/lib/npm/mod.rs b/cli/lib/npm/mod.rs index e7d4d8d9d1931b..b6ad5d1be5d26f 100644 --- a/cli/lib/npm/mod.rs +++ b/cli/lib/npm/mod.rs @@ -2,5 +2,79 @@ mod permission_checker; +use std::path::Path; +use std::sync::Arc; + +use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; +use deno_resolver::npm::ByonmNpmResolver; +use deno_resolver::npm::ManagedNpmResolverRc; +use deno_resolver::npm::NpmResolver; +use deno_runtime::deno_process::NpmProcessStateProvider; +use deno_runtime::deno_process::NpmProcessStateProviderRc; pub use permission_checker::NpmRegistryReadPermissionChecker; pub use permission_checker::NpmRegistryReadPermissionCheckerMode; + +use crate::args::NpmProcessState; +use crate::args::NpmProcessStateKind; +use crate::sys::DenoLibSys; + +pub fn create_npm_process_state_provider( + npm_resolver: &NpmResolver, +) -> NpmProcessStateProviderRc { + match npm_resolver { + NpmResolver::Byonm(byonm_npm_resolver) => { + Arc::new(ByonmNpmProcessStateProvider(byonm_npm_resolver.clone())) + } + NpmResolver::Managed(managed_npm_resolver) => { + Arc::new(ManagedNpmProcessStateProvider(managed_npm_resolver.clone())) + } + } +} + +pub fn npm_process_state( + snapshot: ValidSerializedNpmResolutionSnapshot, + node_modules_path: Option<&Path>, +) -> String { + serde_json::to_string(&NpmProcessState { + kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()), + local_node_modules_path: node_modules_path + .map(|p| p.to_string_lossy().to_string()), + }) + .unwrap() +} + +#[derive(Debug)] +pub struct ManagedNpmProcessStateProvider( + pub ManagedNpmResolverRc, +); + +impl NpmProcessStateProvider + for ManagedNpmProcessStateProvider +{ + fn get_npm_process_state(&self) -> String { + npm_process_state( + self.0.resolution().serialized_valid_snapshot(), + self.0.root_node_modules_path(), + ) + } +} + +#[derive(Debug)] +pub struct ByonmNpmProcessStateProvider( + pub Arc>, +); + +impl NpmProcessStateProvider + for ByonmNpmProcessStateProvider +{ + fn get_npm_process_state(&self) -> String { + serde_json::to_string(&NpmProcessState { + kind: NpmProcessStateKind::Byonm, + local_node_modules_path: self + .0 + .root_node_modules_path() + .map(|p| p.to_string_lossy().to_string()), + }) + .unwrap() + } +} diff --git a/cli/shared.rs b/cli/lib/shared.rs similarity index 79% rename from cli/shared.rs rename to cli/lib/shared.rs index 6a28473edd9492..15ec3c24404432 100644 --- a/cli/shared.rs +++ b/cli/lib/shared.rs @@ -1,8 +1,11 @@ // Copyright 2018-2025 the Deno authors. MIT license. /// This module is shared between build script and the binaries. Use it sparsely. -use deno_core::anyhow::bail; -use deno_core::error::AnyError; +use thiserror::Error; + +#[derive(Debug, Error)] +#[error("Unrecognized release channel: {0}")] +pub struct UnrecognizedReleaseChannelError(pub String); #[derive(Debug, Clone, Copy, PartialEq)] pub enum ReleaseChannel { @@ -50,13 +53,17 @@ impl ReleaseChannel { // NOTE(bartlomieju): do not ever change these values, tools like `patchver` // rely on them. #[allow(unused)] - pub fn deserialize(str_: &str) -> Result { + pub fn deserialize( + str_: &str, + ) -> Result { Ok(match str_ { "stable" => Self::Stable, "canary" => Self::Canary, "rc" => Self::Rc, "lts" => Self::Lts, - unknown => bail!("Unrecognized release channel: {}", unknown), + unknown => { + return Err(UnrecognizedReleaseChannelError(unknown.to_string())) + } }) } } diff --git a/cli/lib/standalone/binary.rs b/cli/lib/standalone/binary.rs new file mode 100644 index 00000000000000..ae02197bf47a4a --- /dev/null +++ b/cli/lib/standalone/binary.rs @@ -0,0 +1,389 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::borrow::Cow; +use std::collections::BTreeMap; + +use deno_config::workspace::PackageJsonDepResolution; +use deno_media_type::MediaType; +use deno_runtime::deno_permissions::PermissionsOptions; +use deno_runtime::deno_telemetry::OtelConfig; +use deno_semver::Version; +use indexmap::IndexMap; +use node_resolver::analyze::CjsAnalysisExports; +use serde::Deserialize; +use serde::Serialize; +use url::Url; + +use super::virtual_fs::FileSystemCaseSensitivity; +use crate::args::UnstableConfig; + +pub const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd"; + +pub trait DenoRtDeserializable<'a>: Sized { + fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)>; +} + +impl<'a> DenoRtDeserializable<'a> for Cow<'a, [u8]> { + fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> { + let (input, data) = read_bytes_with_u32_len(input)?; + Ok((input, Cow::Borrowed(data))) + } +} + +pub trait DenoRtSerializable<'a> { + fn serialize( + &'a self, + builder: &mut capacity_builder::BytesBuilder<'a, Vec>, + ); +} + +#[derive(Deserialize, Serialize)] +pub enum NodeModules { + Managed { + /// Relative path for the node_modules directory in the vfs. + node_modules_dir: Option, + }, + Byonm { + root_node_modules_dir: Option, + }, +} + +#[derive(Deserialize, Serialize)] +pub struct SerializedWorkspaceResolverImportMap { + pub specifier: String, + pub json: String, +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct SerializedResolverWorkspaceJsrPackage { + pub relative_base: String, + pub name: String, + pub version: Option, + pub exports: IndexMap, +} + +#[derive(Deserialize, Serialize)] +pub struct SerializedWorkspaceResolver { + pub import_map: Option, + pub jsr_pkgs: Vec, + pub package_jsons: BTreeMap, + pub pkg_json_resolution: PackageJsonDepResolution, +} + +// Note: Don't use hashmaps/hashsets. Ensure the serialization +// is deterministic. +#[derive(Deserialize, Serialize)] +pub struct Metadata { + pub argv: Vec, + pub seed: Option, + pub code_cache_key: Option, + pub permissions: PermissionsOptions, + pub location: Option, + pub v8_flags: Vec, + pub log_level: Option, + pub ca_stores: Option>, + pub ca_data: Option>, + pub unsafely_ignore_certificate_errors: Option>, + pub env_vars_from_env_file: IndexMap, + pub workspace_resolver: SerializedWorkspaceResolver, + pub entrypoint_key: String, + pub node_modules: Option, + pub unstable_config: UnstableConfig, + pub otel_config: OtelConfig, + pub vfs_case_sensitivity: FileSystemCaseSensitivity, +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct SpecifierId(u32); + +impl SpecifierId { + pub fn new(id: u32) -> Self { + Self(id) + } +} + +impl<'a> capacity_builder::BytesAppendable<'a> for SpecifierId { + fn append_to_builder( + self, + builder: &mut capacity_builder::BytesBuilder<'a, TBytes>, + ) { + builder.append_le(self.0); + } +} + +impl<'a> DenoRtSerializable<'a> for SpecifierId { + fn serialize( + &'a self, + builder: &mut capacity_builder::BytesBuilder<'a, Vec>, + ) { + builder.append_le(self.0); + } +} + +impl<'a> DenoRtDeserializable<'a> for SpecifierId { + fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> { + let (input, id) = read_u32(input)?; + Ok((input, Self(id))) + } +} + +#[derive(Deserialize, Serialize)] +pub enum CjsExportAnalysisEntry { + Esm, + Cjs(CjsAnalysisExports), +} + +const HAS_TRANSPILED_FLAG: u8 = 1 << 0; +const HAS_SOURCE_MAP_FLAG: u8 = 1 << 1; +const HAS_CJS_EXPORT_ANALYSIS_FLAG: u8 = 1 << 2; + +pub struct RemoteModuleEntry<'a> { + pub media_type: MediaType, + pub data: Cow<'a, [u8]>, + pub maybe_transpiled: Option>, + pub maybe_source_map: Option>, + pub maybe_cjs_export_analysis: Option>, +} + +impl<'a> DenoRtSerializable<'a> for RemoteModuleEntry<'a> { + fn serialize( + &'a self, + builder: &mut capacity_builder::BytesBuilder<'a, Vec>, + ) { + fn append_maybe_data<'a>( + builder: &mut capacity_builder::BytesBuilder<'a, Vec>, + maybe_data: Option<&'a [u8]>, + ) { + if let Some(data) = maybe_data { + builder.append_le(data.len() as u32); + builder.append(data); + } + } + + let mut has_data_flags = 0; + if self.maybe_transpiled.is_some() { + has_data_flags |= HAS_TRANSPILED_FLAG; + } + if self.maybe_source_map.is_some() { + has_data_flags |= HAS_SOURCE_MAP_FLAG; + } + if self.maybe_cjs_export_analysis.is_some() { + has_data_flags |= HAS_CJS_EXPORT_ANALYSIS_FLAG; + } + builder.append(serialize_media_type(self.media_type)); + builder.append_le(self.data.len() as u32); + builder.append(self.data.as_ref()); + builder.append(has_data_flags); + append_maybe_data(builder, self.maybe_transpiled.as_deref()); + append_maybe_data(builder, self.maybe_source_map.as_deref()); + append_maybe_data(builder, self.maybe_cjs_export_analysis.as_deref()); + } +} + +impl<'a> DenoRtDeserializable<'a> for RemoteModuleEntry<'a> { + fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> { + #[allow(clippy::type_complexity)] + fn deserialize_data_if_has_flag( + input: &[u8], + has_data_flags: u8, + flag: u8, + ) -> std::io::Result<(&[u8], Option>)> { + if has_data_flags & flag != 0 { + let (input, bytes) = read_bytes_with_u32_len(input)?; + Ok((input, Some(Cow::Borrowed(bytes)))) + } else { + Ok((input, None)) + } + } + + let (input, media_type) = MediaType::deserialize(input)?; + let (input, data) = read_bytes_with_u32_len(input)?; + let (input, has_data_flags) = read_u8(input)?; + let (input, maybe_transpiled) = + deserialize_data_if_has_flag(input, has_data_flags, HAS_TRANSPILED_FLAG)?; + let (input, maybe_source_map) = + deserialize_data_if_has_flag(input, has_data_flags, HAS_SOURCE_MAP_FLAG)?; + let (input, maybe_cjs_export_analysis) = deserialize_data_if_has_flag( + input, + has_data_flags, + HAS_CJS_EXPORT_ANALYSIS_FLAG, + )?; + Ok(( + input, + Self { + media_type, + data: Cow::Borrowed(data), + maybe_transpiled, + maybe_source_map, + maybe_cjs_export_analysis, + }, + )) + } +} + +fn serialize_media_type(media_type: MediaType) -> u8 { + match media_type { + MediaType::JavaScript => 0, + MediaType::Jsx => 1, + MediaType::Mjs => 2, + MediaType::Cjs => 3, + MediaType::TypeScript => 4, + MediaType::Mts => 5, + MediaType::Cts => 6, + MediaType::Dts => 7, + MediaType::Dmts => 8, + MediaType::Dcts => 9, + MediaType::Tsx => 10, + MediaType::Json => 11, + MediaType::Wasm => 12, + MediaType::Css => 13, + MediaType::SourceMap => 14, + MediaType::Unknown => 15, + } +} + +impl<'a> DenoRtDeserializable<'a> for MediaType { + fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> { + let (input, value) = read_u8(input)?; + let value = match value { + 0 => MediaType::JavaScript, + 1 => MediaType::Jsx, + 2 => MediaType::Mjs, + 3 => MediaType::Cjs, + 4 => MediaType::TypeScript, + 5 => MediaType::Mts, + 6 => MediaType::Cts, + 7 => MediaType::Dts, + 8 => MediaType::Dmts, + 9 => MediaType::Dcts, + 10 => MediaType::Tsx, + 11 => MediaType::Json, + 12 => MediaType::Wasm, + 13 => MediaType::Css, + 14 => MediaType::SourceMap, + 15 => MediaType::Unknown, + value => { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!("Unknown media type value: {value}"), + )) + } + }; + Ok((input, value)) + } +} + +/// Data stored keyed by specifier. +pub struct SpecifierDataStore { + data: IndexMap, +} + +impl Default for SpecifierDataStore { + fn default() -> Self { + Self { + data: IndexMap::new(), + } + } +} + +impl SpecifierDataStore { + pub fn with_capacity(capacity: usize) -> Self { + Self { + data: IndexMap::with_capacity(capacity), + } + } + + pub fn iter(&self) -> impl Iterator { + self.data.iter().map(|(k, v)| (*k, v)) + } + + #[allow(clippy::len_without_is_empty)] + pub fn len(&self) -> usize { + self.data.len() + } + + pub fn contains(&self, specifier: SpecifierId) -> bool { + self.data.contains_key(&specifier) + } + + pub fn add(&mut self, specifier: SpecifierId, value: TData) { + self.data.insert(specifier, value); + } + + pub fn get(&self, specifier: SpecifierId) -> Option<&TData> { + self.data.get(&specifier) + } +} + +impl<'a, TData> SpecifierDataStore +where + TData: DenoRtSerializable<'a> + 'a, +{ + pub fn serialize( + &'a self, + builder: &mut capacity_builder::BytesBuilder<'a, Vec>, + ) { + builder.append_le(self.len() as u32); + for (specifier, value) in self.iter() { + builder.append(specifier); + value.serialize(builder); + } + } +} + +impl<'a, TData> DenoRtDeserializable<'a> for SpecifierDataStore +where + TData: DenoRtDeserializable<'a>, +{ + fn deserialize(input: &'a [u8]) -> std::io::Result<(&'a [u8], Self)> { + let (input, len) = read_u32_as_usize(input)?; + let mut data = IndexMap::with_capacity(len); + let mut input = input; + for _ in 0..len { + let (new_input, specifier) = SpecifierId::deserialize(input)?; + let (new_input, value) = TData::deserialize(new_input)?; + data.insert(specifier, value); + input = new_input; + } + Ok((input, Self { data })) + } +} + +fn read_bytes_with_u32_len(input: &[u8]) -> std::io::Result<(&[u8], &[u8])> { + let (input, len) = read_u32_as_usize(input)?; + let (input, data) = read_bytes(input, len)?; + Ok((input, data)) +} + +fn read_u32_as_usize(input: &[u8]) -> std::io::Result<(&[u8], usize)> { + read_u32(input).map(|(input, len)| (input, len as usize)) +} + +fn read_u32(input: &[u8]) -> std::io::Result<(&[u8], u32)> { + let (input, len_bytes) = read_bytes(input, 4)?; + let len = u32::from_le_bytes(len_bytes.try_into().unwrap()); + Ok((input, len)) +} + +fn read_u8(input: &[u8]) -> std::io::Result<(&[u8], u8)> { + check_has_len(input, 1)?; + Ok((&input[1..], input[0])) +} + +fn read_bytes(input: &[u8], len: usize) -> std::io::Result<(&[u8], &[u8])> { + check_has_len(input, len)?; + let (len_bytes, input) = input.split_at(len); + Ok((input, len_bytes)) +} + +#[inline(always)] +fn check_has_len(input: &[u8], len: usize) -> std::io::Result<()> { + if input.len() < len { + Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Unexpected end of data", + )) + } else { + Ok(()) + } +} diff --git a/cli/lib/standalone/mod.rs b/cli/lib/standalone/mod.rs index 6e173a457a0f1c..42a5f2012230fb 100644 --- a/cli/lib/standalone/mod.rs +++ b/cli/lib/standalone/mod.rs @@ -1,3 +1,4 @@ // Copyright 2018-2025 the Deno authors. MIT license. +pub mod binary; pub mod virtual_fs; diff --git a/cli/lib/standalone/virtual_fs.rs b/cli/lib/standalone/virtual_fs.rs index 5fc17f27b7c1fd..124c2a00021e0a 100644 --- a/cli/lib/standalone/virtual_fs.rs +++ b/cli/lib/standalone/virtual_fs.rs @@ -1,20 +1,27 @@ // Copyright 2018-2025 the Deno authors. MIT license. use std::cmp::Ordering; +use std::collections::hash_map::Entry; +use std::collections::HashMap; +use std::collections::VecDeque; +use std::fmt; use std::path::Path; use std::path::PathBuf; +use deno_path_util::normalize_path; +use deno_path_util::strip_unc_prefix; +use deno_runtime::colors; +use deno_runtime::deno_core::anyhow::bail; +use deno_runtime::deno_core::anyhow::Context; +use deno_runtime::deno_core::error::AnyError; +use indexmap::IndexSet; +use serde::de; +use serde::de::SeqAccess; +use serde::de::Visitor; use serde::Deserialize; +use serde::Deserializer; use serde::Serialize; - -#[derive(Debug, Copy, Clone)] -pub enum VfsFileSubDataKind { - /// Raw bytes of the file. - Raw, - /// Bytes to use for module loading. For example, for TypeScript - /// files this will be the transpiled JavaScript source. - ModuleGraph, -} +use serde::Serializer; #[derive(Debug, PartialEq, Eq)] pub enum WindowsSystemRootablePath { @@ -24,6 +31,14 @@ pub enum WindowsSystemRootablePath { } impl WindowsSystemRootablePath { + pub fn root_for_current_os() -> Self { + if cfg!(windows) { + WindowsSystemRootablePath::WindowSystemRoot + } else { + WindowsSystemRootablePath::Path(PathBuf::from("/")) + } + } + pub fn join(&self, name_component: &str) -> PathBuf { // this method doesn't handle multiple components debug_assert!( @@ -110,6 +125,10 @@ impl VirtualDirectoryEntries { self.0.get_mut(index) } + pub fn get_by_index(&self, index: usize) -> Option<&VfsEntry> { + self.0.get(index) + } + pub fn binary_search( &self, name: &str, @@ -180,27 +199,67 @@ pub struct VirtualDirectory { pub entries: VirtualDirectoryEntries, } -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[derive(Debug, Clone, Copy)] pub struct OffsetWithLength { - #[serde(rename = "o")] pub offset: u64, - #[serde(rename = "l")] pub len: u64, } +// serialize as an array in order to save space +impl Serialize for OffsetWithLength { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let array = [self.offset, self.len]; + array.serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for OffsetWithLength { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct OffsetWithLengthVisitor; + + impl<'de> Visitor<'de> for OffsetWithLengthVisitor { + type Value = OffsetWithLength; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("an array with two elements: [offset, len]") + } + + fn visit_seq(self, mut seq: A) -> Result + where + A: SeqAccess<'de>, + { + let offset = seq + .next_element()? + .ok_or_else(|| de::Error::invalid_length(0, &self))?; + let len = seq + .next_element()? + .ok_or_else(|| de::Error::invalid_length(1, &self))?; + Ok(OffsetWithLength { offset, len }) + } + } + + deserializer.deserialize_seq(OffsetWithLengthVisitor) + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct VirtualFile { #[serde(rename = "n")] pub name: String, #[serde(rename = "o")] pub offset: OffsetWithLength, - /// Offset file to use for module loading when it differs from the - /// raw file. Often this will be the same offset as above for data - /// such as JavaScript files, but for TypeScript files the `offset` - /// will be the original raw bytes when included as an asset and this - /// offset will be to the transpiled JavaScript source. - #[serde(rename = "m")] - pub module_graph_offset: OffsetWithLength, + #[serde(rename = "m", skip_serializing_if = "Option::is_none")] + pub transpiled_offset: Option, + #[serde(rename = "c", skip_serializing_if = "Option::is_none")] + pub cjs_export_analysis_offset: Option, + #[serde(rename = "s", skip_serializing_if = "Option::is_none")] + pub source_map_offset: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -294,3 +353,647 @@ impl VfsEntry { } } } + +pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str = + ".deno_compile_node_modules"; + +#[derive(Debug)] +pub struct BuiltVfs { + pub root_path: WindowsSystemRootablePath, + pub case_sensitivity: FileSystemCaseSensitivity, + pub entries: VirtualDirectoryEntries, + pub files: Vec>, +} + +#[derive(Debug, Default)] +struct FilesData { + files: Vec>, + current_offset: u64, + file_offsets: HashMap<(String, usize), OffsetWithLength>, +} + +impl FilesData { + pub fn file_bytes(&self, offset: OffsetWithLength) -> Option<&[u8]> { + if offset.len == 0 { + return Some(&[]); + } + + // the debug assertions in this method should never happen + // because it would indicate providing an offset not in the vfs + let mut count: u64 = 0; + for file in &self.files { + // clippy wanted a match + match count.cmp(&offset.offset) { + Ordering::Equal => { + debug_assert_eq!(offset.len, file.len() as u64); + if offset.len == file.len() as u64 { + return Some(file); + } else { + return None; + } + } + Ordering::Less => { + count += file.len() as u64; + } + Ordering::Greater => { + debug_assert!(false); + return None; + } + } + } + debug_assert!(false); + None + } + + pub fn add_data(&mut self, data: Vec) -> OffsetWithLength { + if data.is_empty() { + return OffsetWithLength { offset: 0, len: 0 }; + } + let checksum = crate::util::checksum::gen(&[&data]); + match self.file_offsets.entry((checksum, data.len())) { + Entry::Occupied(occupied_entry) => { + let offset_and_len = *occupied_entry.get(); + debug_assert_eq!(data.len() as u64, offset_and_len.len); + offset_and_len + } + Entry::Vacant(vacant_entry) => { + let offset_and_len = OffsetWithLength { + offset: self.current_offset, + len: data.len() as u64, + }; + vacant_entry.insert(offset_and_len); + self.current_offset += offset_and_len.len; + self.files.push(data); + offset_and_len + } + } + } +} + +pub struct AddFileDataOptions { + pub data: Vec, + pub maybe_transpiled: Option>, + pub maybe_source_map: Option>, + pub maybe_cjs_export_analysis: Option>, +} + +#[derive(Debug)] +pub struct VfsBuilder { + executable_root: VirtualDirectory, + files: FilesData, + /// The minimum root directory that should be included in the VFS. + min_root_dir: Option, + case_sensitivity: FileSystemCaseSensitivity, +} + +impl Default for VfsBuilder { + fn default() -> Self { + Self::new() + } +} + +impl VfsBuilder { + pub fn new() -> Self { + Self { + executable_root: VirtualDirectory { + name: "/".to_string(), + entries: Default::default(), + }, + files: Default::default(), + min_root_dir: Default::default(), + // This is not exactly correct because file systems on these OSes + // may be case-sensitive or not based on the directory, but this + // is a good enough approximation and limitation. In the future, + // we may want to store this information per directory instead + // depending on the feedback we get. + case_sensitivity: if cfg!(windows) || cfg!(target_os = "macos") { + FileSystemCaseSensitivity::Insensitive + } else { + FileSystemCaseSensitivity::Sensitive + }, + } + } + + pub fn case_sensitivity(&self) -> FileSystemCaseSensitivity { + self.case_sensitivity + } + + pub fn files_len(&self) -> usize { + self.files.files.len() + } + + pub fn file_bytes(&self, offset: OffsetWithLength) -> Option<&[u8]> { + self.files.file_bytes(offset) + } + + /// Add a directory that might be the minimum root directory + /// of the VFS. + /// + /// For example, say the user has a deno.json and specifies an + /// import map in a parent directory. The import map won't be + /// included in the VFS, but its base will meaning we need to + /// tell the VFS builder to include the base of the import map + /// by calling this method. + pub fn add_possible_min_root_dir(&mut self, path: &Path) { + self.add_dir_raw(path); + + match &self.min_root_dir { + Some(WindowsSystemRootablePath::WindowSystemRoot) => { + // already the root dir + } + Some(WindowsSystemRootablePath::Path(current_path)) => { + let mut common_components = Vec::new(); + for (a, b) in current_path.components().zip(path.components()) { + if a != b { + break; + } + common_components.push(a); + } + if common_components.is_empty() { + self.min_root_dir = + Some(WindowsSystemRootablePath::root_for_current_os()); + } else { + self.min_root_dir = Some(WindowsSystemRootablePath::Path( + common_components.iter().collect(), + )); + } + } + None => { + self.min_root_dir = + Some(WindowsSystemRootablePath::Path(path.to_path_buf())); + } + } + } + + pub fn add_dir_recursive(&mut self, path: &Path) -> Result<(), AnyError> { + let target_path = self.resolve_target_path(path)?; + self.add_dir_recursive_not_symlink(&target_path) + } + + fn add_dir_recursive_not_symlink( + &mut self, + path: &Path, + ) -> Result<(), AnyError> { + self.add_dir_raw(path); + // ok, building fs implementation + #[allow(clippy::disallowed_methods)] + let read_dir = std::fs::read_dir(path) + .with_context(|| format!("Reading {}", path.display()))?; + + let mut dir_entries = + read_dir.into_iter().collect::, _>>()?; + dir_entries.sort_by_cached_key(|entry| entry.file_name()); // determinism + + for entry in dir_entries { + let file_type = entry.file_type()?; + let path = entry.path(); + + if file_type.is_dir() { + self.add_dir_recursive_not_symlink(&path)?; + } else if file_type.is_file() { + self.add_file_at_path_not_symlink(&path)?; + } else if file_type.is_symlink() { + match self.add_symlink(&path) { + Ok(target) => match target { + SymlinkTarget::File(target) => { + self.add_file_at_path_not_symlink(&target)? + } + SymlinkTarget::Dir(target) => { + self.add_dir_recursive_not_symlink(&target)?; + } + }, + Err(err) => { + log::warn!( + "{} Failed resolving symlink. Ignoring.\n Path: {}\n Message: {:#}", + colors::yellow("Warning"), + path.display(), + err + ); + } + } + } + } + + Ok(()) + } + + fn add_dir_raw(&mut self, path: &Path) -> &mut VirtualDirectory { + log::debug!("Ensuring directory '{}'", path.display()); + debug_assert!(path.is_absolute()); + let mut current_dir = &mut self.executable_root; + + for component in path.components() { + if matches!(component, std::path::Component::RootDir) { + continue; + } + let name = component.as_os_str().to_string_lossy(); + let index = current_dir.entries.insert_or_modify( + &name, + self.case_sensitivity, + || { + VfsEntry::Dir(VirtualDirectory { + name: name.to_string(), + entries: Default::default(), + }) + }, + |_| { + // ignore + }, + ); + match current_dir.entries.get_mut_by_index(index) { + Some(VfsEntry::Dir(dir)) => { + current_dir = dir; + } + _ => unreachable!(), + }; + } + + current_dir + } + + pub fn get_system_root_dir_mut(&mut self) -> &mut VirtualDirectory { + &mut self.executable_root + } + + pub fn get_dir_mut(&mut self, path: &Path) -> Option<&mut VirtualDirectory> { + debug_assert!(path.is_absolute()); + let mut current_dir = &mut self.executable_root; + + for component in path.components() { + if matches!(component, std::path::Component::RootDir) { + continue; + } + let name = component.as_os_str().to_string_lossy(); + let entry = current_dir + .entries + .get_mut_by_name(&name, self.case_sensitivity)?; + match entry { + VfsEntry::Dir(dir) => { + current_dir = dir; + } + _ => unreachable!("{}", path.display()), + }; + } + + Some(current_dir) + } + + pub fn add_file_at_path(&mut self, path: &Path) -> Result<(), AnyError> { + // ok, building fs implementation + #[allow(clippy::disallowed_methods)] + let file_bytes = std::fs::read(path) + .with_context(|| format!("Reading {}", path.display()))?; + self.add_file_with_data( + path, + AddFileDataOptions { + data: file_bytes, + maybe_cjs_export_analysis: None, + maybe_transpiled: None, + maybe_source_map: None, + }, + ) + } + + fn add_file_at_path_not_symlink( + &mut self, + path: &Path, + ) -> Result<(), AnyError> { + // ok, building fs implementation + #[allow(clippy::disallowed_methods)] + let file_bytes = std::fs::read(path) + .with_context(|| format!("Reading {}", path.display()))?; + self.add_file_with_data_raw(path, file_bytes) + } + + pub fn add_file_with_data( + &mut self, + path: &Path, + options: AddFileDataOptions, + ) -> Result<(), AnyError> { + // ok, fs implementation + #[allow(clippy::disallowed_methods)] + let metadata = std::fs::symlink_metadata(path).with_context(|| { + format!("Resolving target path for '{}'", path.display()) + })?; + if metadata.is_symlink() { + let target = self.add_symlink(path)?.into_path_buf(); + self.add_file_with_data_raw_options(&target, options) + } else { + self.add_file_with_data_raw_options(path, options) + } + } + + pub fn add_file_with_data_raw( + &mut self, + path: &Path, + data: Vec, + ) -> Result<(), AnyError> { + self.add_file_with_data_raw_options( + path, + AddFileDataOptions { + data, + maybe_transpiled: None, + maybe_cjs_export_analysis: None, + maybe_source_map: None, + }, + ) + } + + fn add_file_with_data_raw_options( + &mut self, + path: &Path, + options: AddFileDataOptions, + ) -> Result<(), AnyError> { + log::debug!("Adding file '{}'", path.display()); + let case_sensitivity = self.case_sensitivity; + + let offset_and_len = self.files.add_data(options.data); + let transpiled_offset = options + .maybe_transpiled + .map(|data| self.files.add_data(data)); + let source_map_offset = options + .maybe_source_map + .map(|data| self.files.add_data(data)); + let cjs_export_analysis_offset = options + .maybe_cjs_export_analysis + .map(|data| self.files.add_data(data)); + let dir = self.add_dir_raw(path.parent().unwrap()); + let name = path.file_name().unwrap().to_string_lossy(); + + dir.entries.insert_or_modify( + &name, + case_sensitivity, + || { + VfsEntry::File(VirtualFile { + name: name.to_string(), + offset: offset_and_len, + transpiled_offset, + cjs_export_analysis_offset, + source_map_offset, + }) + }, + |entry| match entry { + VfsEntry::File(virtual_file) => { + virtual_file.offset = offset_and_len; + // doesn't overwrite to None + if transpiled_offset.is_some() { + virtual_file.transpiled_offset = transpiled_offset; + } + if source_map_offset.is_some() { + virtual_file.source_map_offset = source_map_offset; + } + if cjs_export_analysis_offset.is_some() { + virtual_file.cjs_export_analysis_offset = + cjs_export_analysis_offset; + } + } + VfsEntry::Dir(_) | VfsEntry::Symlink(_) => unreachable!(), + }, + ); + + Ok(()) + } + + fn resolve_target_path(&mut self, path: &Path) -> Result { + // ok, fs implementation + #[allow(clippy::disallowed_methods)] + let metadata = std::fs::symlink_metadata(path).with_context(|| { + format!("Resolving target path for '{}'", path.display()) + })?; + if metadata.is_symlink() { + Ok(self.add_symlink(path)?.into_path_buf()) + } else { + Ok(path.to_path_buf()) + } + } + + pub fn add_symlink( + &mut self, + path: &Path, + ) -> Result { + self.add_symlink_inner(path, &mut IndexSet::new()) + } + + fn add_symlink_inner( + &mut self, + path: &Path, + visited: &mut IndexSet, + ) -> Result { + log::debug!("Adding symlink '{}'", path.display()); + let target = strip_unc_prefix( + // ok, fs implementation + #[allow(clippy::disallowed_methods)] + std::fs::read_link(path) + .with_context(|| format!("Reading symlink '{}'", path.display()))?, + ); + let case_sensitivity = self.case_sensitivity; + let target = normalize_path(path.parent().unwrap().join(&target)); + let dir = self.add_dir_raw(path.parent().unwrap()); + let name = path.file_name().unwrap().to_string_lossy(); + dir.entries.insert_or_modify( + &name, + case_sensitivity, + || { + VfsEntry::Symlink(VirtualSymlink { + name: name.to_string(), + dest_parts: VirtualSymlinkParts::from_path(&target), + }) + }, + |_| { + // ignore previously inserted + }, + ); + // ok, fs implementation + #[allow(clippy::disallowed_methods)] + let target_metadata = + std::fs::symlink_metadata(&target).with_context(|| { + format!("Reading symlink target '{}'", target.display()) + })?; + if target_metadata.is_symlink() { + if !visited.insert(target.clone()) { + // todo: probably don't error in this scenario + bail!( + "Circular symlink detected: {} -> {}", + visited + .iter() + .map(|p| p.display().to_string()) + .collect::>() + .join(" -> "), + target.display() + ); + } + self.add_symlink_inner(&target, visited) + } else if target_metadata.is_dir() { + Ok(SymlinkTarget::Dir(target)) + } else { + Ok(SymlinkTarget::File(target)) + } + } + + /// Adds the CJS export analysis to the provided file. + /// + /// Warning: This will panic if the file wasn't properly + /// setup before calling this. + pub fn add_cjs_export_analysis(&mut self, path: &Path, data: Vec) { + self.add_data_for_file_or_panic(path, data, |file, offset_with_length| { + file.cjs_export_analysis_offset = Some(offset_with_length); + }) + } + + fn add_data_for_file_or_panic( + &mut self, + path: &Path, + data: Vec, + update_file: impl FnOnce(&mut VirtualFile, OffsetWithLength), + ) { + let offset_with_length = self.files.add_data(data); + let case_sensitivity = self.case_sensitivity; + let dir = self.get_dir_mut(path.parent().unwrap()).unwrap(); + let name = path.file_name().unwrap().to_string_lossy(); + let file = dir + .entries + .get_mut_by_name(&name, case_sensitivity) + .unwrap(); + match file { + VfsEntry::File(virtual_file) => { + update_file(virtual_file, offset_with_length); + } + VfsEntry::Dir(_) | VfsEntry::Symlink(_) => { + unreachable!() + } + } + } + + /// Iterates through all the files in the virtual file system. + pub fn iter_files( + &self, + ) -> impl Iterator + '_ { + FileIterator { + pending_dirs: VecDeque::from([( + WindowsSystemRootablePath::root_for_current_os(), + &self.executable_root, + )]), + current_dir_index: 0, + } + } + + pub fn build(self) -> BuiltVfs { + fn strip_prefix_from_symlinks( + dir: &mut VirtualDirectory, + parts: &[String], + ) { + for entry in dir.entries.iter_mut() { + match entry { + VfsEntry::Dir(dir) => { + strip_prefix_from_symlinks(dir, parts); + } + VfsEntry::File(_) => {} + VfsEntry::Symlink(symlink) => { + let parts = symlink + .dest_parts + .take_parts() + .into_iter() + .skip(parts.len()) + .collect(); + symlink.dest_parts.set_parts(parts); + } + } + } + } + + let mut current_dir = self.executable_root; + let mut current_path = WindowsSystemRootablePath::root_for_current_os(); + loop { + if current_dir.entries.len() != 1 { + break; + } + if self.min_root_dir.as_ref() == Some(¤t_path) { + break; + } + match current_dir.entries.iter().next().unwrap() { + VfsEntry::Dir(dir) => { + if dir.name == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME { + // special directory we want to maintain + break; + } + match current_dir.entries.remove(0) { + VfsEntry::Dir(dir) => { + current_path = + WindowsSystemRootablePath::Path(current_path.join(&dir.name)); + current_dir = dir; + } + _ => unreachable!(), + }; + } + VfsEntry::File(_) | VfsEntry::Symlink(_) => break, + } + } + if let WindowsSystemRootablePath::Path(path) = ¤t_path { + strip_prefix_from_symlinks( + &mut current_dir, + VirtualSymlinkParts::from_path(path).parts(), + ); + } + BuiltVfs { + root_path: current_path, + case_sensitivity: self.case_sensitivity, + entries: current_dir.entries, + files: self.files.files, + } + } +} + +struct FileIterator<'a> { + pending_dirs: VecDeque<(WindowsSystemRootablePath, &'a VirtualDirectory)>, + current_dir_index: usize, +} + +impl<'a> Iterator for FileIterator<'a> { + type Item = (PathBuf, &'a VirtualFile); + + fn next(&mut self) -> Option { + while !self.pending_dirs.is_empty() { + let (dir_path, current_dir) = self.pending_dirs.front()?; + if let Some(entry) = + current_dir.entries.get_by_index(self.current_dir_index) + { + self.current_dir_index += 1; + match entry { + VfsEntry::Dir(virtual_directory) => { + self.pending_dirs.push_back(( + WindowsSystemRootablePath::Path( + dir_path.join(&virtual_directory.name), + ), + virtual_directory, + )); + } + VfsEntry::File(virtual_file) => { + return Some((dir_path.join(&virtual_file.name), virtual_file)); + } + VfsEntry::Symlink(_) => { + // ignore + } + } + } else { + self.pending_dirs.pop_front(); + self.current_dir_index = 0; + } + } + None + } +} + +#[derive(Debug)] +pub enum SymlinkTarget { + File(PathBuf), + Dir(PathBuf), +} + +impl SymlinkTarget { + pub fn into_path_buf(self) -> PathBuf { + match self { + Self::File(path) => path, + Self::Dir(path) => path, + } + } +} diff --git a/cli/cache/common.rs b/cli/lib/util/hash.rs similarity index 97% rename from cli/cache/common.rs rename to cli/lib/util/hash.rs index da607a27f260da..213c83ba5b4f61 100644 --- a/cli/cache/common.rs +++ b/cli/lib/util/hash.rs @@ -3,6 +3,7 @@ use std::hash::Hasher; /// A very fast insecure hasher that uses the xxHash algorithm. +#[derive(Debug, Clone)] pub struct FastInsecureHasher(twox_hash::XxHash64); impl FastInsecureHasher { diff --git a/cli/util/logger.rs b/cli/lib/util/logger.rs similarity index 74% rename from cli/util/logger.rs rename to cli/lib/util/logger.rs index 2bd4760ebdf924..b280dc22ed6fcb 100644 --- a/cli/util/logger.rs +++ b/cli/lib/util/logger.rs @@ -2,44 +2,33 @@ use std::io::Write; -use deno_telemetry::OtelConfig; -use deno_telemetry::OtelConsoleConfig; +use deno_runtime::deno_telemetry; +use deno_runtime::deno_telemetry::OtelConfig; +use deno_runtime::deno_telemetry::OtelConsoleConfig; -use super::draw_thread::DrawThread; - -struct CliLogger { +struct CliLogger { otel_console_config: OtelConsoleConfig, logger: env_logger::Logger, + on_log_start: FnOnLogStart, + on_log_end: FnOnLogEnd, } -impl CliLogger { - pub fn new( - logger: env_logger::Logger, - otel_console_config: OtelConsoleConfig, - ) -> Self { - Self { - logger, - otel_console_config, - } - } - +impl CliLogger { pub fn filter(&self) -> log::LevelFilter { self.logger.filter() } } -impl log::Log for CliLogger { +impl log::Log + for CliLogger +{ fn enabled(&self, metadata: &log::Metadata) -> bool { self.logger.enabled(metadata) } fn log(&self, record: &log::Record) { if self.enabled(record.metadata()) { - // it was considered to hold the draw thread's internal lock - // across logging, but if outputting to stderr blocks then that - // could potentially block other threads that access the draw - // thread's state - DrawThread::hide(); + (self.on_log_start)(); match self.otel_console_config { OtelConsoleConfig::Ignore => { @@ -54,7 +43,7 @@ impl log::Log for CliLogger { } } - DrawThread::show(); + (self.on_log_end)(); } } @@ -63,8 +52,20 @@ impl log::Log for CliLogger { } } -pub fn init(maybe_level: Option, otel_config: Option) { - let log_level = maybe_level.unwrap_or(log::Level::Info); +pub struct InitLoggingOptions { + pub on_log_start: FnOnLogStart, + pub on_log_end: FnOnLogEnd, + pub maybe_level: Option, + pub otel_config: Option, +} + +pub fn init< + FOnLogStart: Fn() + Send + Sync + 'static, + FnOnLogEnd: Fn() + Send + Sync + 'static, +>( + options: InitLoggingOptions, +) { + let log_level = options.maybe_level.unwrap_or(log::Level::Info); let logger = env_logger::Builder::from_env( env_logger::Env::new() // Use `DENO_LOG` and `DENO_LOG_STYLE` instead of `RUST_` prefix @@ -117,12 +118,15 @@ pub fn init(maybe_level: Option, otel_config: Option) { }) .build(); - let cli_logger = CliLogger::new( + let cli_logger = CliLogger { + on_log_start: options.on_log_start, + on_log_end: options.on_log_end, logger, - otel_config + otel_console_config: options + .otel_config .map(|c| c.console) .unwrap_or(OtelConsoleConfig::Ignore), - ); + }; let max_level = cli_logger.filter(); let r = log::set_boxed_logger(Box::new(cli_logger)); if r.is_ok() { diff --git a/cli/lib/util/mod.rs b/cli/lib/util/mod.rs index 8371440750de2b..27643a20098457 100644 --- a/cli/lib/util/mod.rs +++ b/cli/lib/util/mod.rs @@ -1,3 +1,8 @@ // Copyright 2018-2025 the Deno authors. MIT license. pub mod checksum; +pub mod hash; +pub mod logger; +pub mod result; +pub mod text_encoding; +pub mod v8; diff --git a/cli/util/result.rs b/cli/lib/util/result.rs similarity index 84% rename from cli/util/result.rs rename to cli/lib/util/result.rs index 0c1a75b1ce0c08..3e302e5dfc654e 100644 --- a/cli/util/result.rs +++ b/cli/lib/util/result.rs @@ -4,10 +4,10 @@ use std::convert::Infallible; use std::fmt::Debug; use std::fmt::Display; -use deno_core::error::AnyError; -use deno_core::error::CoreError; use deno_error::JsErrorBox; use deno_error::JsErrorClass; +use deno_runtime::deno_core::error::AnyError; +use deno_runtime::deno_core::error::CoreError; pub trait InfallibleResultExt { fn unwrap_infallible(self) -> T; @@ -36,7 +36,7 @@ pub fn any_and_jserrorbox_downcast_ref< }) .or_else(|| { err.downcast_ref::().and_then(|e| match e { - CoreError::JsNative(e) => e.as_any().downcast_ref::(), + CoreError::JsBox(e) => e.as_any().downcast_ref::(), _ => None, }) }) diff --git a/cli/lib/util/text_encoding.rs b/cli/lib/util/text_encoding.rs new file mode 100644 index 00000000000000..5b6e5f43b6c7e3 --- /dev/null +++ b/cli/lib/util/text_encoding.rs @@ -0,0 +1,45 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::borrow::Cow; +use std::sync::Arc; + +#[inline(always)] +pub fn from_utf8_lossy_owned(bytes: Vec) -> String { + match String::from_utf8_lossy(&bytes) { + Cow::Owned(code) => code, + // SAFETY: `String::from_utf8_lossy` guarantees that the result is valid + // UTF-8 if `Cow::Borrowed` is returned. + Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(bytes) }, + } +} + +#[inline(always)] +pub fn from_utf8_lossy_cow(bytes: Cow<[u8]>) -> Cow { + match bytes { + Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes), + Cow::Owned(bytes) => Cow::Owned(from_utf8_lossy_owned(bytes)), + } +} + +/// Converts an `Arc` to an `Arc<[u8]>`. +#[allow(dead_code)] +pub fn arc_str_to_bytes(arc_str: Arc) -> Arc<[u8]> { + let raw = Arc::into_raw(arc_str); + // SAFETY: This is safe because they have the same memory layout. + unsafe { Arc::from_raw(raw as *const [u8]) } +} + +/// Converts an `Arc` to an `Arc` if able. +#[allow(dead_code)] +pub fn arc_u8_to_arc_str( + arc_u8: Arc<[u8]>, +) -> Result, std::str::Utf8Error> { + // Check that the string is valid UTF-8. + std::str::from_utf8(&arc_u8)?; + // SAFETY: the string is valid UTF-8, and the layout Arc<[u8]> is the same as + // Arc. This is proven by the From> impl for Arc<[u8]> from the + // standard library. + Ok(unsafe { + std::mem::transmute::, std::sync::Arc>(arc_u8) + }) +} diff --git a/cli/lib/util/v8.rs b/cli/lib/util/v8.rs new file mode 100644 index 00000000000000..976fbf531b421b --- /dev/null +++ b/cli/lib/util/v8.rs @@ -0,0 +1,14 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +#[inline(always)] +pub fn construct_v8_flags( + default_v8_flags: &[String], + v8_flags: &[String], + env_v8_flags: Vec, +) -> Vec { + std::iter::once("UNUSED_BUT_NECESSARY_ARG0".to_owned()) + .chain(default_v8_flags.iter().cloned()) + .chain(env_v8_flags) + .chain(v8_flags.iter().cloned()) + .collect::>() +} diff --git a/cli/lib/version.rs b/cli/lib/version.rs new file mode 100644 index 00000000000000..88a25dffeb115f --- /dev/null +++ b/cli/lib/version.rs @@ -0,0 +1,94 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::borrow::Cow; + +use deno_runtime::deno_telemetry::OtelRuntimeConfig; + +use crate::shared::ReleaseChannel; + +pub fn otel_runtime_config() -> OtelRuntimeConfig { + OtelRuntimeConfig { + runtime_name: Cow::Borrowed("deno"), + runtime_version: Cow::Borrowed(crate::version::DENO_VERSION_INFO.deno), + } +} + +const GIT_COMMIT_HASH: &str = env!("GIT_COMMIT_HASH"); +const TYPESCRIPT: &str = "5.6.2"; +const DENO_VERSION: &str = env!("DENO_VERSION"); +// TODO(bartlomieju): ideally we could remove this const. +const IS_CANARY: bool = option_env!("DENO_CANARY").is_some(); +// TODO(bartlomieju): this is temporary, to allow Homebrew to cut RC releases as well +const IS_RC: bool = option_env!("DENO_RC").is_some(); + +pub static DENO_VERSION_INFO: std::sync::LazyLock = + std::sync::LazyLock::new(|| { + let release_channel = libsui::find_section("denover") + .and_then(|buf| std::str::from_utf8(buf).ok()) + .and_then(|str_| ReleaseChannel::deserialize(str_).ok()) + .unwrap_or({ + if IS_CANARY { + ReleaseChannel::Canary + } else if IS_RC { + ReleaseChannel::Rc + } else { + ReleaseChannel::Stable + } + }); + + DenoVersionInfo { + deno: if release_channel == ReleaseChannel::Canary { + concat!(env!("DENO_VERSION"), "+", env!("GIT_COMMIT_HASH_SHORT")) + } else { + env!("DENO_VERSION") + }, + + release_channel, + + git_hash: GIT_COMMIT_HASH, + + // Keep in sync with `deno` field. + user_agent: if release_channel == ReleaseChannel::Canary { + concat!( + "Deno/", + env!("DENO_VERSION"), + "+", + env!("GIT_COMMIT_HASH_SHORT") + ) + } else { + concat!("Deno/", env!("DENO_VERSION")) + }, + + typescript: TYPESCRIPT, + } + }); + +pub struct DenoVersionInfo { + /// Human-readable version of the current Deno binary. + /// + /// For stable release, a semver, eg. `v1.46.2`. + /// For canary release, a semver + 7-char git hash, eg. `v1.46.3+asdfqwq`. + pub deno: &'static str, + + pub release_channel: ReleaseChannel, + + /// A full git hash. + pub git_hash: &'static str, + + /// A user-agent header that will be used in HTTP client. + pub user_agent: &'static str, + + pub typescript: &'static str, +} + +impl DenoVersionInfo { + /// For stable release, a semver like, eg. `v1.46.2`. + /// For canary release a full git hash, eg. `9bdab6fb6b93eb43b1930f40987fa4997287f9c8`. + pub fn version_or_git_hash(&self) -> &'static str { + if self.release_channel == ReleaseChannel::Canary { + self.git_hash + } else { + DENO_VERSION + } + } +} diff --git a/cli/lib/version.txt b/cli/lib/version.txt new file mode 100644 index 00000000000000..9671f9a9bd8464 --- /dev/null +++ b/cli/lib/version.txt @@ -0,0 +1 @@ +2.1.7 \ No newline at end of file diff --git a/cli/lib/worker.rs b/cli/lib/worker.rs index 7c9071d0babc50..a8c5528677ce91 100644 --- a/cli/lib/worker.rs +++ b/cli/lib/worker.rs @@ -1,11 +1,14 @@ // Copyright 2018-2025 the Deno authors. MIT license. +use std::path::Path; use std::path::PathBuf; use std::rc::Rc; use std::sync::Arc; use deno_core::error::JsError; use deno_node::NodeRequireLoaderRc; +use deno_path_util::url_from_file_path; +use deno_path_util::url_to_file_path; use deno_resolver::npm::DenoInNpmPackageChecker; use deno_resolver::npm::NpmResolver; use deno_runtime::colors; @@ -25,12 +28,12 @@ use deno_runtime::deno_node::NodeExtInitServices; use deno_runtime::deno_node::NodeRequireLoader; use deno_runtime::deno_node::NodeResolver; use deno_runtime::deno_permissions::PermissionsContainer; +use deno_runtime::deno_process::NpmProcessStateProviderRc; use deno_runtime::deno_telemetry::OtelConfig; use deno_runtime::deno_tls::RootCertStoreProvider; use deno_runtime::deno_web::BlobStore; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::inspector_server::InspectorServer; -use deno_runtime::ops::process::NpmProcessStateProviderRc; use deno_runtime::ops::worker_host::CreateWebWorkerCb; use deno_runtime::web_worker::WebWorker; use deno_runtime::web_worker::WebWorkerOptions; @@ -42,9 +45,11 @@ use deno_runtime::BootstrapOptions; use deno_runtime::WorkerExecutionMode; use deno_runtime::WorkerLogLevel; use deno_runtime::UNSTABLE_GRANULAR_FLAGS; +use node_resolver::errors::ResolvePkgJsonBinExportError; +use node_resolver::UrlOrPath; use url::Url; -use crate::env::has_trace_permissions_enabled; +use crate::args::has_trace_permissions_enabled; use crate::sys::DenoLibSys; use crate::util::checksum; @@ -113,9 +118,9 @@ impl StorageKeyResolver { } } -// TODO(bartlomieju): this should be moved to some other place, added to avoid string -// duplication between worker setups and `deno info` output. pub fn get_cache_storage_dir() -> PathBuf { + // ok because this won't ever be used by the js runtime + #[allow(clippy::disallowed_methods)] // Note: we currently use temp_dir() to avoid managing storage size. std::env::temp_dir().join("deno_cache") } @@ -131,10 +136,34 @@ pub fn create_isolate_create_params() -> Option { }) } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum ResolveNpmBinaryEntrypointError { + #[class(inherit)] + #[error(transparent)] + PathToUrl(#[from] deno_path_util::PathToUrlError), + #[class(inherit)] + #[error(transparent)] + ResolvePkgJsonBinExport(ResolvePkgJsonBinExportError), + #[class(generic)] + #[error("{original:#}\n\nFallback failed: {fallback:#}")] + Fallback { + fallback: ResolveNpmBinaryEntrypointFallbackError, + original: ResolvePkgJsonBinExportError, + }, +} + +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum ResolveNpmBinaryEntrypointFallbackError { + #[class(inherit)] + #[error(transparent)] + PackageSubpathResolve(node_resolver::errors::PackageSubpathResolveError), + #[class(generic)] + #[error("Cannot find module '{0}'")] + ModuleNotFound(UrlOrPath), +} + pub struct LibMainWorkerOptions { pub argv: Vec, - pub deno_version: &'static str, - pub deno_user_agent: &'static str, pub log_level: WorkerLogLevel, pub enable_op_summary_metrics: bool, pub enable_testing_features: bool, @@ -263,7 +292,7 @@ impl LibWorkerFactorySharedState { main_module: args.main_module.clone(), worker_id: args.worker_id, bootstrap: BootstrapOptions { - deno_version: shared.options.deno_version.to_string(), + deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(), args: shared.options.argv.clone(), cpu_count: std::thread::available_parallelism() .map(|p| p.get()) @@ -278,7 +307,7 @@ impl LibWorkerFactorySharedState { is_stdout_tty: deno_terminal::is_stdout_tty(), is_stderr_tty: deno_terminal::is_stderr_tty(), unstable_features, - user_agent: shared.options.deno_user_agent.to_string(), + user_agent: crate::version::DENO_VERSION_INFO.user_agent.to_string(), inspect: shared.options.is_inspecting, has_node_modules_dir: shared.options.has_node_modules_dir, argv0: shared.options.argv0.clone(), @@ -359,6 +388,21 @@ impl LibMainWorkerFactory { } } + pub fn create_main_worker( + &self, + mode: WorkerExecutionMode, + permissions: PermissionsContainer, + main_module: Url, + ) -> Result { + self.create_custom_worker( + mode, + main_module, + permissions, + vec![], + Default::default(), + ) + } + pub fn create_custom_worker( &self, mode: WorkerExecutionMode, @@ -420,7 +464,7 @@ impl LibMainWorkerFactory { let options = WorkerOptions { bootstrap: BootstrapOptions { - deno_version: shared.options.deno_version.to_string(), + deno_version: crate::version::DENO_VERSION_INFO.deno.to_string(), args: shared.options.argv.clone(), cpu_count: std::thread::available_parallelism() .map(|p| p.get()) @@ -435,7 +479,7 @@ impl LibMainWorkerFactory { is_stderr_tty: deno_terminal::is_stderr_tty(), color_level: colors::get_color_level(), unstable_features, - user_agent: shared.options.deno_user_agent.to_string(), + user_agent: crate::version::DENO_VERSION_INFO.user_agent.to_string(), inspect: shared.options.is_inspecting, has_node_modules_dir: shared.options.has_node_modules_dir, argv0: shared.options.argv0.clone(), @@ -476,6 +520,84 @@ impl LibMainWorkerFactory { worker, }) } + + pub fn resolve_npm_binary_entrypoint( + &self, + package_folder: &Path, + sub_path: Option<&str>, + ) -> Result { + match self + .shared + .node_resolver + .resolve_binary_export(package_folder, sub_path) + { + Ok(path) => Ok(url_from_file_path(&path)?), + Err(original_err) => { + // if the binary entrypoint was not found, fallback to regular node resolution + let result = + self.resolve_binary_entrypoint_fallback(package_folder, sub_path); + match result { + Ok(Some(path)) => Ok(url_from_file_path(&path)?), + Ok(None) => { + Err(ResolveNpmBinaryEntrypointError::ResolvePkgJsonBinExport( + original_err, + )) + } + Err(fallback_err) => Err(ResolveNpmBinaryEntrypointError::Fallback { + original: original_err, + fallback: fallback_err, + }), + } + } + } + } + + /// resolve the binary entrypoint using regular node resolution + fn resolve_binary_entrypoint_fallback( + &self, + package_folder: &Path, + sub_path: Option<&str>, + ) -> Result, ResolveNpmBinaryEntrypointFallbackError> { + // only fallback if the user specified a sub path + if sub_path.is_none() { + // it's confusing to users if the package doesn't have any binary + // entrypoint and we just execute the main script which will likely + // have blank output, so do not resolve the entrypoint in this case + return Ok(None); + } + + let specifier = self + .shared + .node_resolver + .resolve_package_subpath_from_deno_module( + package_folder, + sub_path, + /* referrer */ None, + node_resolver::ResolutionMode::Import, + node_resolver::NodeResolutionKind::Execution, + ) + .map_err( + ResolveNpmBinaryEntrypointFallbackError::PackageSubpathResolve, + )?; + let path = match specifier { + UrlOrPath::Url(ref url) => match url_to_file_path(url) { + Ok(path) => path, + Err(_) => { + return Err(ResolveNpmBinaryEntrypointFallbackError::ModuleNotFound( + specifier, + )); + } + }, + UrlOrPath::Path(path) => path, + }; + if self.shared.sys.fs_exists_no_err(&path) { + Ok(Some(path)) + } else { + Err(ResolveNpmBinaryEntrypointFallbackError::ModuleNotFound( + UrlOrPath::Path(path), + )) + } + } } pub struct LibMainWorker { @@ -536,6 +658,33 @@ impl LibMainWorker { self.worker.evaluate_module(id).await } + pub async fn run(&mut self) -> Result { + log::debug!("main_module {}", self.main_module); + + self.execute_main_module().await?; + self.worker.dispatch_load_event()?; + + loop { + self + .worker + .run_event_loop(/* wait for inspector */ false) + .await?; + + let web_continue = self.worker.dispatch_beforeunload_event()?; + if !web_continue { + let node_continue = self.worker.dispatch_process_beforeexit_event()?; + if !node_continue { + break; + } + } + } + + self.worker.dispatch_unload_event()?; + self.worker.dispatch_process_exit_event()?; + + Ok(self.worker.exit_code()) + } + #[inline] pub async fn run_event_loop( &mut self, diff --git a/cli/lsp/analysis.rs b/cli/lsp/analysis.rs index f8f382f5944476..44e9bf4dfd0c08 100644 --- a/cli/lsp/analysis.rs +++ b/cli/lsp/analysis.rs @@ -449,9 +449,7 @@ impl<'a> TsResponseImportMapper<'a> { .pkg_json_resolver(specifier) // the specifier might have a closer package.json, but we // want the root of the package's package.json - .get_closest_package_json_from_file_path( - &package_root_folder.join("package.json"), - ) + .get_closest_package_json(&package_root_folder.join("package.json")) .ok() .flatten()?; let root_folder = package_json.path.parent()?; diff --git a/cli/lsp/cache.rs b/cli/lsp/cache.rs index a65bbd5efeea01..9ac2a80d46dd15 100644 --- a/cli/lsp/cache.rs +++ b/cli/lsp/cache.rs @@ -8,9 +8,9 @@ use std::time::SystemTime; use deno_core::url::Url; use deno_core::ModuleSpecifier; -use deno_lib::cache::DenoDir; use deno_path_util::url_to_file_path; +use crate::cache::DenoDir; use crate::cache::GlobalHttpCache; use crate::cache::HttpCache; use crate::cache::LocalLspHttpCache; @@ -70,7 +70,7 @@ fn calculate_fs_version_in_cache( #[derive(Debug, Clone)] pub struct LspCache { - deno_dir: DenoDir, + deno_dir: DenoDir, global: Arc, vendors_by_scope: BTreeMap>>, } @@ -94,8 +94,10 @@ impl LspCache { .ok() }); let sys = CliSys::default(); - let deno_dir = DenoDir::new(sys.clone(), global_cache_path) - .expect("should be infallible with absolute custom root"); + let deno_dir_root = + deno_cache_dir::resolve_deno_dir(&sys, global_cache_path) + .expect("should be infallible with absolute custom root"); + let deno_dir = DenoDir::new(sys.clone(), deno_dir_root); let global = Arc::new(GlobalHttpCache::new(sys, deno_dir.remote_folder_path())); Self { @@ -121,7 +123,7 @@ impl LspCache { .collect(); } - pub fn deno_dir(&self) -> &DenoDir { + pub fn deno_dir(&self) -> &DenoDir { &self.deno_dir } diff --git a/cli/lsp/config.rs b/cli/lsp/config.rs index 98c4498a1a8a43..4ca4255fea1c51 100644 --- a/cli/lsp/config.rs +++ b/cli/lsp/config.rs @@ -18,6 +18,7 @@ use deno_config::deno_json::LintConfig; use deno_config::deno_json::NodeModulesDirMode; use deno_config::deno_json::TestConfig; use deno_config::deno_json::TsConfig; +use deno_config::deno_json::TsConfigWithIgnoredOptions; use deno_config::glob::FilePatterns; use deno_config::glob::PathOrPatternSet; use deno_config::workspace::CreateResolverOptions; @@ -41,11 +42,13 @@ use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::url::Url; use deno_core::ModuleSpecifier; -use deno_lib::env::has_flag_env_var; +use deno_lib::args::has_flag_env_var; +use deno_lib::util::hash::FastInsecureHasher; use deno_lint::linter::LintConfig as DenoLintConfig; use deno_npm::npm_rc::ResolvedNpmRc; use deno_package_json::PackageJsonCache; use deno_path_util::url_to_file_path; +use deno_resolver::npmrc::discover_npmrc_from_workspace; use deno_resolver::sloppy_imports::SloppyImportsCachedFs; use deno_runtime::deno_node::PackageJson; use indexmap::IndexSet; @@ -55,13 +58,11 @@ use tower_lsp::lsp_types as lsp; use super::logging::lsp_log; use super::lsp_custom; use super::urls::url_to_uri; -use crate::args::discover_npmrc_from_workspace; use crate::args::CliLockfile; use crate::args::CliLockfileReadFromPathOptions; use crate::args::ConfigFile; use crate::args::LintFlags; use crate::args::LintOptions; -use crate::cache::FastInsecureHasher; use crate::file_fetcher::CliFileFetcher; use crate::lsp::logging::lsp_warn; use crate::resolver::CliSloppyImportsResolver; @@ -1168,14 +1169,13 @@ impl Default for LspTsConfig { } impl LspTsConfig { - pub fn new(config_file: Option<&ConfigFile>) -> Self { - let mut ts_config = Self::default(); - match ts_config.inner.merge_tsconfig_from_config_file(config_file) { - Ok(Some(ignored_options)) => lsp_warn!("{}", ignored_options), - Err(err) => lsp_warn!("{}", err), - _ => {} + pub fn new(raw_ts_config: TsConfigWithIgnoredOptions) -> Self { + let mut base_ts_config = Self::default(); + for ignored_options in &raw_ts_config.ignored_options { + lsp_warn!("{}", ignored_options) } - ts_config + base_ts_config.inner.merge_mut(raw_ts_config.ts_config); + base_ts_config } } @@ -1362,21 +1362,22 @@ impl ConfigData { } // todo(dsherret): cache this so we don't load this so many times - let npmrc = discover_npmrc_from_workspace(&member_dir.workspace) - .inspect(|(_, path)| { - if let Some(path) = path { - lsp_log!(" Resolved .npmrc: \"{}\"", path.display()); - - if let Ok(specifier) = ModuleSpecifier::from_file_path(path) { - add_watched_file(specifier, ConfigWatchedFileType::NpmRc); + let npmrc = + discover_npmrc_from_workspace(&CliSys::default(), &member_dir.workspace) + .inspect(|(_, path)| { + if let Some(path) = path { + lsp_log!(" Resolved .npmrc: \"{}\"", path.display()); + + if let Ok(specifier) = ModuleSpecifier::from_file_path(path) { + add_watched_file(specifier, ConfigWatchedFileType::NpmRc); + } } - } - }) - .inspect_err(|err| { - lsp_warn!(" Couldn't read .npmrc for \"{scope}\": {err}"); - }) - .map(|(r, _)| r) - .ok(); + }) + .inspect_err(|err| { + lsp_warn!(" Couldn't read .npmrc for \"{scope}\": {err}"); + }) + .map(|(r, _)| Arc::new(r)) + .ok(); let default_file_pattern_base = scope.to_file_path().unwrap_or_else(|_| PathBuf::from("/")); let fmt_config = Arc::new( @@ -1424,9 +1425,10 @@ impl ConfigData { .unwrap_or_default(), ); - let ts_config = LspTsConfig::new( - member_dir.workspace.root_deno_json().map(|c| c.as_ref()), - ); + let ts_config = member_dir + .to_raw_user_provided_tsconfig() + .map(LspTsConfig::new) + .unwrap_or_default(); let deno_lint_config = if ts_config.inner.0.get("jsx").and_then(|v| v.as_str()) == Some("react") @@ -1672,7 +1674,6 @@ impl ConfigData { ) -> Option { self .member_dir - .workspace .to_maybe_jsx_import_source_config() .ok() .flatten() @@ -2048,7 +2049,7 @@ impl deno_config::deno_json::DenoJsonCache for DenoJsonMemCache { } } -#[derive(Default)] +#[derive(Debug, Default)] struct PackageJsonMemCache(Mutex>>); impl deno_package_json::PackageJsonCache for PackageJsonMemCache { diff --git a/cli/lsp/diagnostics.rs b/cli/lsp/diagnostics.rs index 3e3e31de285780..126e8ef01dfcc7 100644 --- a/cli/lsp/diagnostics.rs +++ b/cli/lsp/diagnostics.rs @@ -265,7 +265,7 @@ impl TsDiagnosticsStore { } pub fn should_send_diagnostic_batch_index_notifications() -> bool { - deno_lib::env::has_flag_env_var( + deno_lib::args::has_flag_env_var( "DENO_DONT_USE_INTERNAL_LSP_DIAGNOSTIC_SYNC_FLAG", ) } diff --git a/cli/lsp/documents.rs b/cli/lsp/documents.rs index 6a7a08b5a1d5eb..b2bf37edf2d8c7 100644 --- a/cli/lsp/documents.rs +++ b/cli/lsp/documents.rs @@ -936,7 +936,7 @@ impl FileSystemDocuments { file_referrer.cloned(), ) } else if specifier.scheme() == "data" { - let source = deno_graph::source::RawDataUrl::parse(specifier) + let source = deno_media_type::data_url::RawDataUrl::parse(specifier) .ok()? .decode() .ok()?; @@ -1756,10 +1756,11 @@ fn bytes_to_content( // we use the dts representation for Wasm modules Ok(deno_graph::source::wasm::wasm_module_to_dts(&bytes)?) } else { - Ok(deno_graph::source::decode_owned_source( - specifier, - bytes, - maybe_charset, + let charset = maybe_charset.unwrap_or_else(|| { + deno_media_type::encoding::detect_charset(specifier, &bytes) + }); + Ok(deno_media_type::encoding::decode_owned_source( + charset, bytes, )?) } } diff --git a/cli/lsp/language_server.rs b/cli/lsp/language_server.rs index c2fddc08bd4fbb..012cdd1e452e87 100644 --- a/cli/lsp/language_server.rs +++ b/cli/lsp/language_server.rs @@ -13,8 +13,6 @@ use std::sync::Arc; use deno_ast::MediaType; use deno_cache_dir::file_fetcher::CacheSetting; -use deno_config::workspace::WorkspaceDirectory; -use deno_config::workspace::WorkspaceDiscoverOptions; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; use deno_core::resolve_url; @@ -25,9 +23,12 @@ use deno_core::unsync::spawn; use deno_core::url; use deno_core::url::Url; use deno_core::ModuleSpecifier; +use deno_graph::CheckJsOption; use deno_graph::GraphKind; use deno_graph::Resolution; -use deno_lib::env::has_flag_env_var; +use deno_lib::args::get_root_cert_store; +use deno_lib::args::CaData; +use deno_lib::version::DENO_VERSION_INFO; use deno_path_util::url_to_file_path; use deno_runtime::deno_tls::rustls::RootCertStore; use deno_runtime::deno_tls::RootCertStoreProvider; @@ -94,10 +95,6 @@ use super::tsc::TsServer; use super::urls; use super::urls::uri_to_url; use super::urls::url_to_uri; -use crate::args::create_default_npmrc; -use crate::args::get_root_cert_store; -use crate::args::CaData; -use crate::args::CliOptions; use crate::args::Flags; use crate::args::InternalFlags; use crate::args::UnstableFmtOptions; @@ -254,7 +251,7 @@ impl LanguageServer { force_global_cache: bool, ) -> LspResult> { async fn create_graph_for_caching( - cli_options: CliOptions, + factory: CliFactory, roots: Vec, open_docs: Vec>, ) -> Result<(), AnyError> { @@ -262,8 +259,6 @@ impl LanguageServer { .into_iter() .map(|d| (d.specifier().clone(), d)) .collect::>(); - let cli_options = Arc::new(cli_options); - let factory = CliFactory::from_cli_options(cli_options.clone()); let module_graph_builder = factory.module_graph_builder().await?; let module_graph_creator = factory.module_graph_creator().await?; let mut inner_loader = module_graph_builder.create_graph_loader(); @@ -285,16 +280,18 @@ impl LanguageServer { &roots, graph_util::GraphValidOptions { kind: GraphKind::All, - check_js: false, + check_js: CheckJsOption::False, exit_integrity_errors: false, }, )?; // Update the lockfile on the file system with anything new // found after caching - if let Some(lockfile) = cli_options.maybe_lockfile() { - if let Err(err) = &lockfile.write_if_changed() { - lsp_warn!("{:#}", err); + if let Ok(cli_options) = factory.cli_options() { + if let Some(lockfile) = cli_options.maybe_lockfile() { + if let Err(err) = &lockfile.write_if_changed() { + lsp_warn!("{:#}", err); + } } } @@ -318,11 +315,11 @@ impl LanguageServer { match prepare_cache_result { Ok(result) => { // cache outside the lock - let cli_options = result.cli_options; + let cli_factory = result.cli_factory; let roots = result.roots; let open_docs = result.open_docs; let handle = spawn(async move { - create_graph_for_caching(cli_options, roots, open_docs).await + create_graph_for_caching(cli_factory, roots, open_docs).await }); if let Err(err) = handle.await.unwrap() { @@ -703,7 +700,7 @@ impl Inner { let version = format!( "{} ({}, {})", - crate::version::DENO_VERSION_INFO.deno, + DENO_VERSION_INFO.deno, env!("PROFILE"), env!("TARGET") ); @@ -1883,7 +1880,7 @@ impl Inner { })?; let asset_or_doc = self.get_asset_or_document(&action_data.specifier)?; let line_index = asset_or_doc.line_index(); - let mut refactor_edit_info = self + let refactor_edit_info = self .ts_server .get_edits_for_refactor( self.snapshot(), @@ -1904,19 +1901,34 @@ impl Inner { )), asset_or_doc.scope().cloned(), ) - .await?; - if kind_suffix == ".rewrite.function.returnType" - || kind_suffix == ".move.newFile" - { - refactor_edit_info.edits = - fix_ts_import_changes(&refactor_edit_info.edits, self).map_err( - |err| { - error!("Unable to remap changes: {:#}", err); - LspError::internal_error() - }, - )? + .await; + + match refactor_edit_info { + Ok(mut refactor_edit_info) => { + if kind_suffix == ".rewrite.function.returnType" + || kind_suffix == ".move.newFile" + { + refactor_edit_info.edits = + fix_ts_import_changes(&refactor_edit_info.edits, self).map_err( + |err| { + error!("Unable to remap changes: {:#}", err); + LspError::internal_error() + }, + )? + } + code_action.edit = refactor_edit_info.to_workspace_edit(self)?; + } + Err(err) => { + // TODO(nayeemrmn): Investigate cause for + // https://github.com/denoland/deno/issues/27778. Prevent popups for + // this error for now. + if kind_suffix == ".move.newFile" { + lsp_warn!("{:#}", err); + } else { + return Err(err); + } + } } - code_action.edit = refactor_edit_info.to_workspace_edit(self)?; code_action } else { // The code action doesn't need to be resolved @@ -3476,7 +3488,7 @@ impl tower_lsp::LanguageServer for LanguageServer { } struct PrepareCacheResult { - cli_options: CliOptions, + cli_factory: CliFactory, roots: Vec, open_docs: Vec>, } @@ -3610,66 +3622,44 @@ impl Inner { let initial_cwd = config_data .and_then(|d| d.scope.to_file_path().ok()) .unwrap_or_else(|| self.initial_cwd.clone()); - let workspace = match config_data { - Some(d) => d.member_dir.clone(), - None => Arc::new(WorkspaceDirectory::discover( - &CliSys::default(), - deno_config::workspace::WorkspaceDiscoverStart::Paths(&[ - initial_cwd.clone() - ]), - &WorkspaceDiscoverOptions { - deno_json_cache: None, - pkg_json_cache: None, - workspace_cache: None, - additional_config_file_names: &[], - discover_pkg_json: !has_flag_env_var("DENO_NO_PACKAGE_JSON"), - maybe_vendor_override: if force_global_cache { - Some(deno_config::workspace::VendorEnablement::Disable) - } else { - None - }, - }, - )?), - }; - let cli_options = CliOptions::new( - &CliSys::default(), - Arc::new(Flags { - internal: InternalFlags { - cache_path: Some(self.cache.deno_dir().root.clone()), - ..Default::default() - }, - ca_stores: workspace_settings.certificate_stores.clone(), - ca_data: workspace_settings.tls_certificate.clone().map(CaData::File), - unsafely_ignore_certificate_errors: workspace_settings - .unsafely_ignore_certificate_errors - .clone(), - import_map_path: config_data.and_then(|d| { - d.import_map_from_settings - .as_ref() - .map(|url| url.to_string()) - }), - // bit of a hack to force the lsp to cache the @types/node package - type_check_mode: crate::args::TypeCheckMode::Local, - permissions: crate::args::PermissionFlags { - // allow remote import permissions in the lsp for now - allow_import: Some(vec![]), - ..Default::default() - }, + let mut cli_factory = CliFactory::from_flags(Arc::new(Flags { + internal: InternalFlags { + cache_path: Some(self.cache.deno_dir().root.clone()), ..Default::default() + }, + ca_stores: workspace_settings.certificate_stores.clone(), + ca_data: workspace_settings.tls_certificate.clone().map(CaData::File), + unsafely_ignore_certificate_errors: workspace_settings + .unsafely_ignore_certificate_errors + .clone(), + import_map_path: config_data.and_then(|d| { + d.import_map_from_settings + .as_ref() + .map(|url| url.to_string()) }), - initial_cwd, - config_data.and_then(|d| d.lockfile.clone()), - config_data - .and_then(|d| d.npmrc.clone()) - .unwrap_or_else(create_default_npmrc), - workspace, - force_global_cache, - None, - )?; + // bit of a hack to force the lsp to cache the @types/node package + type_check_mode: crate::args::TypeCheckMode::Local, + permissions: crate::args::PermissionFlags { + // allow remote import permissions in the lsp for now + allow_import: Some(vec![]), + ..Default::default() + }, + vendor: if force_global_cache { + Some(false) + } else { + None + }, + no_lock: force_global_cache, + ..Default::default() + })); + cli_factory.set_initial_cwd(initial_cwd); + if let Some(d) = &config_data { + cli_factory.set_workspace_dir(d.member_dir.clone()); + }; let open_docs = self.documents.documents(DocumentsFilter::OpenDiagnosable); Ok(PrepareCacheResult { - cli_options, + cli_factory, open_docs, roots, }) diff --git a/cli/lsp/npm.rs b/cli/lsp/npm.rs index d53c8cb2ab6bae..20e48ba49b88ac 100644 --- a/cli/lsp/npm.rs +++ b/cli/lsp/npm.rs @@ -6,16 +6,18 @@ use dashmap::DashMap; use deno_core::anyhow::anyhow; use deno_core::error::AnyError; use deno_core::serde_json; +use deno_core::url::Url; use deno_npm::npm_rc::NpmRc; use deno_semver::package::PackageNv; use deno_semver::Version; +use once_cell::sync::Lazy; use serde::Deserialize; use super::search::PackageSearchApi; -use crate::args::npm_registry_url; use crate::file_fetcher::CliFileFetcher; use crate::file_fetcher::TextDecodedFile; use crate::npm::NpmFetchResolver; +use crate::sys::CliSys; #[derive(Debug)] pub struct CliNpmSearchApi { @@ -111,6 +113,14 @@ fn parse_npm_search_response(source: &str) -> Result, AnyError> { Ok(objects.into_iter().map(|o| o.package.name).collect()) } +// this is buried here because generally you want to use the ResolvedNpmRc instead of this. +fn npm_registry_url() -> &'static Url { + static NPM_REGISTRY_DEFAULT_URL: Lazy = + Lazy::new(|| deno_resolver::npmrc::npm_registry_url(&CliSys::default())); + + &NPM_REGISTRY_DEFAULT_URL +} + #[cfg(test)] mod tests { use super::*; diff --git a/cli/lsp/resolver.rs b/cli/lsp/resolver.rs index 1b393ad22ba6d9..f0ab45f5aef69d 100644 --- a/cli/lsp/resolver.rs +++ b/cli/lsp/resolver.rs @@ -28,20 +28,21 @@ use deno_resolver::npm::managed::NpmResolutionCell; use deno_resolver::npm::CreateInNpmPkgCheckerOptions; use deno_resolver::npm::DenoInNpmPackageChecker; use deno_resolver::npm::NpmReqResolverOptions; +use deno_resolver::npmrc::create_default_npmrc; use deno_resolver::DenoResolverOptions; use deno_resolver::NodeAndNpmReqResolver; -use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use deno_semver::jsr::JsrPackageReqReference; use deno_semver::npm::NpmPackageReqReference; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; use indexmap::IndexMap; +use node_resolver::DenoIsBuiltInNodeModuleChecker; use node_resolver::NodeResolutionKind; +use node_resolver::PackageJsonThreadLocalCache; use node_resolver::ResolutionMode; use super::cache::LspCache; use super::jsr::JsrCacheResolver; -use crate::args::create_default_npmrc; use crate::args::CliLockfile; use crate::args::LifecycleScriptsConfig; use crate::args::NpmCachingStrategy; @@ -152,7 +153,7 @@ impl LspScopeResolver { let maybe_jsx_import_source_config = config_data.and_then(|d| d.maybe_jsx_import_source_config()); let graph_imports = config_data - .and_then(|d| d.member_dir.workspace.to_compiler_option_types().ok()) + .and_then(|d| d.member_dir.to_compiler_option_types().ok()) .map(|imports| { Arc::new( imports @@ -206,6 +207,8 @@ impl LspScopeResolver { NodeResolutionKind::Execution, ) }) + .ok()? + .into_url() .ok()?, )) .0; @@ -256,25 +259,26 @@ impl LspScopeResolver { root_node_modules_dir: byonm_npm_resolver .root_node_modules_path() .map(|p| p.to_path_buf()), - sys: CliSys::default(), + sys: factory.sys.clone(), pkg_json_resolver: self.pkg_json_resolver.clone(), }, ) } CliNpmResolver::Managed(managed_npm_resolver) => { CliNpmResolverCreateOptions::Managed({ + let sys = CliSys::default(); let npmrc = self .config_data .as_ref() .and_then(|d| d.npmrc.clone()) - .unwrap_or_else(create_default_npmrc); + .unwrap_or_else(|| Arc::new(create_default_npmrc(&sys))); let npm_cache_dir = Arc::new(NpmCacheDir::new( - &CliSys::default(), + &sys, managed_npm_resolver.global_cache_root_path().to_path_buf(), npmrc.get_all_known_registries_urls(), )); CliManagedNpmResolverCreateOptions { - sys: CliSys::default(), + sys, npm_cache_dir, maybe_node_modules_path: managed_npm_resolver .root_node_modules_path() @@ -520,6 +524,8 @@ impl LspResolver { resolution_mode, NodeResolutionKind::Types, ) + .ok()? + .into_url() .ok()?, ))) } @@ -674,7 +680,11 @@ struct ResolverFactory<'a> { impl<'a> ResolverFactory<'a> { pub fn new(config_data: Option<&'a Arc>) -> Self { let sys = CliSys::default(); - let pkg_json_resolver = Arc::new(CliPackageJsonResolver::new(sys.clone())); + let pkg_json_resolver = Arc::new(CliPackageJsonResolver::new( + sys.clone(), + // this should be ok because we handle clearing this cache often in the LSP + Some(Arc::new(PackageJsonThreadLocalCache)), + )); Self { config_data, pkg_json_resolver, @@ -696,7 +706,7 @@ impl<'a> ResolverFactory<'a> { let sys = CliSys::default(); let options = if enable_byonm { CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { - sys, + sys: self.sys.clone(), pkg_json_resolver: self.pkg_json_resolver.clone(), root_node_modules_dir: self.config_data.and_then(|config_data| { config_data.node_modules_dir.clone().or_else(|| { @@ -710,7 +720,7 @@ impl<'a> ResolverFactory<'a> { let npmrc = self .config_data .and_then(|d| d.npmrc.clone()) - .unwrap_or_else(create_default_npmrc); + .unwrap_or_else(|| Arc::new(create_default_npmrc(&sys))); let npm_cache_dir = Arc::new(NpmCacheDir::new( &sys, cache.deno_dir().npm_folder_path(), @@ -917,7 +927,7 @@ impl<'a> ResolverFactory<'a> { let npm_resolver = self.services.npm_resolver.as_ref()?; Some(Arc::new(CliNodeResolver::new( self.in_npm_pkg_checker().clone(), - RealIsBuiltInNodeModuleChecker, + DenoIsBuiltInNodeModuleChecker, npm_resolver.clone(), self.pkg_json_resolver.clone(), self.sys.clone(), @@ -978,19 +988,25 @@ pub struct SingleReferrerGraphResolver<'a> { } impl<'a> deno_graph::source::Resolver for SingleReferrerGraphResolver<'a> { - fn default_jsx_import_source(&self) -> Option { + fn default_jsx_import_source( + &self, + _referrer: &ModuleSpecifier, + ) -> Option { self .jsx_import_source_config .and_then(|c| c.default_specifier.clone()) } - fn default_jsx_import_source_types(&self) -> Option { + fn default_jsx_import_source_types( + &self, + _referrer: &ModuleSpecifier, + ) -> Option { self .jsx_import_source_config .and_then(|c| c.default_types_specifier.clone()) } - fn jsx_import_source_module(&self) -> &str { + fn jsx_import_source_module(&self, _referrer: &ModuleSpecifier) -> &str { self .jsx_import_source_config .map(|c| c.module.as_str()) diff --git a/cli/lsp/tsc.rs b/cli/lsp/tsc.rs index 32352d9f2620e8..0010e046f7767d 100644 --- a/cli/lsp/tsc.rs +++ b/cli/lsp/tsc.rs @@ -39,6 +39,7 @@ use deno_core::ModuleSpecifier; use deno_core::OpState; use deno_core::PollEventLoopOptions; use deno_core::RuntimeOptions; +use deno_lib::util::result::InfallibleResultExt; use deno_lib::worker::create_isolate_create_params; use deno_path_util::url_to_file_path; use deno_runtime::deno_node::SUPPORTED_BUILTIN_NODE_MODULES; @@ -96,7 +97,6 @@ use crate::tsc::ResolveArgs; use crate::tsc::MISSING_DEPENDENCY_SPECIFIER; use crate::util::path::relative_specifier; use crate::util::path::to_percent_decoded_str; -use crate::util::result::InfallibleResultExt; use crate::util::v8::convert; static BRACKET_ACCESSOR_RE: Lazy = @@ -3683,6 +3683,10 @@ impl CompletionInfo { position: u32, language_server: &language_server::Inner, ) -> lsp::CompletionResponse { + // A cache for costly resolution computations. + // On a test project, it was found to speed up completion requests + // by 10-20x and contained ~300 entries for 8000 completion items. + let mut cache = HashMap::with_capacity(512); let items = self .entries .iter() @@ -3694,6 +3698,7 @@ impl CompletionInfo { specifier, position, language_server, + &mut cache, ) }) .collect(); @@ -3898,6 +3903,7 @@ impl CompletionEntry { self.insert_text.clone() } + #[allow(clippy::too_many_arguments)] pub fn as_completion_item( &self, line_index: Arc, @@ -3906,6 +3912,7 @@ impl CompletionEntry { specifier: &ModuleSpecifier, position: u32, language_server: &language_server::Inner, + resolution_cache: &mut HashMap<(ModuleSpecifier, ModuleSpecifier), String>, ) -> Option { let mut label = self.name.clone(); let mut label_details: Option = None; @@ -3964,14 +3971,18 @@ impl CompletionEntry { } } } - if let Some(source) = &self.source { let mut display_source = source.clone(); if let Some(import_data) = &self.auto_import_data { let import_mapper = language_server.get_ts_response_import_mapper(specifier); - if let Some(mut new_specifier) = import_mapper - .check_specifier(&import_data.normalized, specifier) + let maybe_cached = resolution_cache + .get(&(import_data.normalized.clone(), specifier.clone())) + .cloned(); + if let Some(mut new_specifier) = maybe_cached + .or_else(|| { + import_mapper.check_specifier(&import_data.normalized, specifier) + }) .or_else(|| relative_specifier(specifier, &import_data.normalized)) .or_else(|| { ModuleSpecifier::parse(&import_data.raw.module_specifier) @@ -3979,6 +3990,10 @@ impl CompletionEntry { .then(|| import_data.normalized.to_string()) }) { + resolution_cache.insert( + (import_data.normalized.clone(), specifier.clone()), + new_specifier.clone(), + ); if new_specifier.contains("/node_modules/") { return None; } diff --git a/cli/lsp/urls.rs b/cli/lsp/urls.rs index 068e4ad4d52fa2..9c9f50dc34e07c 100644 --- a/cli/lsp/urls.rs +++ b/cli/lsp/urls.rs @@ -219,7 +219,8 @@ impl LspUrlMap { let uri_str = if specifier.scheme() == "asset" { format!("deno:/asset{}", specifier.path()) } else if specifier.scheme() == "data" { - let data_url = deno_graph::source::RawDataUrl::parse(specifier)?; + let data_url = + deno_media_type::data_url::RawDataUrl::parse(specifier)?; let media_type = data_url.media_type(); let extension = if media_type == MediaType::Unknown { "" diff --git a/cli/main.rs b/cli/main.rs index f97ea81e5df23a..d4e9f28fb94348 100644 --- a/cli/main.rs +++ b/cli/main.rs @@ -17,16 +17,18 @@ mod node; mod npm; mod ops; mod resolver; -mod shared; mod standalone; -mod sys; mod task_runner; mod tools; mod tsc; mod util; -mod version; mod worker; +pub mod sys { + #[allow(clippy::disallowed_types)] // ok, definition + pub type CliSys = sys_traits::impls::RealSys; +} + use std::env; use std::future::Future; use std::io::IsTerminal; @@ -40,18 +42,22 @@ use deno_core::error::AnyError; use deno_core::error::CoreError; use deno_core::futures::FutureExt; use deno_core::unsync::JoinHandle; +use deno_lib::util::result::any_and_jserrorbox_downcast_ref; use deno_resolver::npm::ByonmResolvePkgFolderFromDenoReqError; use deno_resolver::npm::ResolvePkgFolderFromDenoReqError; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics; use deno_runtime::WorkerExecutionMode; pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS; +use deno_telemetry::OtelConfig; use deno_terminal::colors; use factory::CliFactory; -use standalone::MODULE_NOT_FOUND; -use standalone::UNSUPPORTED_SCHEME; + +const MODULE_NOT_FOUND: &str = "Module not found"; +const UNSUPPORTED_SCHEME: &str = "Unsupported scheme"; use self::npm::ResolveSnapshotError; +use self::util::draw_thread::DrawThread; use crate::args::flags_from_vec; use crate::args::DenoSubcommand; use crate::args::Flags; @@ -128,7 +134,7 @@ async fn run_subcommand(flags: Arc) -> Result { tools::check::check(flags, check_flags).await }), DenoSubcommand::Clean => spawn_subcommand(async move { - tools::clean::clean() + tools::clean::clean(flags) }), DenoSubcommand::Compile(compile_flags) => spawn_subcommand(async { tools::compile::compile(flags, compile_flags).await @@ -201,7 +207,7 @@ async fn run_subcommand(flags: Arc) -> Result { match result { Ok(v) => Ok(v), Err(script_err) => { - if let Some(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_))) = util::result::any_and_jserrorbox_downcast_ref::(&script_err) { + if let Some(worker::CreateCustomWorkerError::ResolvePkgFolderFromDenoReq(ResolvePkgFolderFromDenoReqError::Byonm(ByonmResolvePkgFolderFromDenoReqError::UnmatchedReq(_)))) = any_and_jserrorbox_downcast_ref::(&script_err) { if flags.node_modules_dir.is_none() { let mut flags = flags.deref().clone(); let watch = match &flags.subcommand { @@ -351,7 +357,7 @@ fn setup_panic_hook() { eprintln!("var set and include the backtrace in your report."); eprintln!(); eprintln!("Platform: {} {}", env::consts::OS, env::consts::ARCH); - eprintln!("Version: {}", version::DENO_VERSION_INFO.deno); + eprintln!("Version: {}", deno_lib::version::DENO_VERSION_INFO.deno); eprintln!("Args: {:?}", env::args().collect::>()); eprintln!(); orig_hook(panic_info); @@ -373,13 +379,11 @@ fn exit_for_error(error: AnyError) -> ! { let mut error_code = 1; if let Some(CoreError::Js(e)) = - util::result::any_and_jserrorbox_downcast_ref::(&error) + any_and_jserrorbox_downcast_ref::(&error) { error_string = format_js_error(e); } else if let Some(e @ ResolveSnapshotError { .. }) = - util::result::any_and_jserrorbox_downcast_ref::( - &error, - ) + any_and_jserrorbox_downcast_ref::(&error) { if let Some(e) = e.maybe_integrity_check_error() { error_string = e.to_string(); @@ -442,19 +446,19 @@ fn resolve_flags_and_init( if err.kind() == clap::error::ErrorKind::DisplayVersion => { // Ignore results to avoid BrokenPipe errors. - util::logger::init(None, None); + init_logging(None, None); let _ = err.print(); deno_runtime::exit(0); } Err(err) => { - util::logger::init(None, None); + init_logging(None, None); exit_for_error(AnyError::from(err)) } }; let otel_config = flags.otel_config(); - deno_telemetry::init(crate::args::otel_runtime_config(), &otel_config)?; - util::logger::init(flags.log_level, Some(otel_config)); + deno_telemetry::init(deno_lib::version::otel_runtime_config(), &otel_config)?; + init_logging(flags.log_level, Some(otel_config)); // TODO(bartlomieju): remove in Deno v2.5 and hard error then. if flags.unstable_config.legacy_flag_enabled { @@ -487,3 +491,19 @@ fn resolve_flags_and_init( Ok(flags) } + +fn init_logging( + maybe_level: Option, + otel_config: Option, +) { + deno_lib::util::logger::init(deno_lib::util::logger::InitLoggingOptions { + maybe_level, + otel_config, + // it was considered to hold the draw thread's internal lock + // across logging, but if outputting to stderr blocks then that + // could potentially block other threads that access the draw + // thread's state + on_log_start: DrawThread::hide, + on_log_end: DrawThread::show, + }) +} diff --git a/cli/module_loader.rs b/cli/module_loader.rs index 2b0ebca986a530..fda17d6b5ee669 100644 --- a/cli/module_loader.rs +++ b/cli/module_loader.rs @@ -13,8 +13,6 @@ use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleKind; -use deno_core::anyhow::anyhow; -use deno_core::anyhow::Context; use deno_core::error::AnyError; use deno_core::error::ModuleLoaderError; use deno_core::futures::future::FutureExt; @@ -39,16 +37,22 @@ use deno_graph::ModuleGraph; use deno_graph::ModuleGraphError; use deno_graph::Resolution; use deno_graph::WasmModule; +use deno_lib::loader::ModuleCodeStringSource; +use deno_lib::loader::NpmModuleLoadError; +use deno_lib::loader::StrippingTypesNodeModulesError; use deno_lib::npm::NpmRegistryReadPermissionChecker; +use deno_lib::util::hash::FastInsecureHasher; use deno_lib::worker::CreateModuleLoaderResult; use deno_lib::worker::ModuleLoaderFactory; use deno_resolver::npm::DenoInNpmPackageChecker; use deno_runtime::code_cache; use deno_runtime::deno_node::create_host_defined_options; +use deno_runtime::deno_node::ops::require::UnableToGetCwdError; use deno_runtime::deno_node::NodeRequireLoader; use deno_runtime::deno_permissions::PermissionsContainer; use deno_semver::npm::NpmPackageReqReference; use node_resolver::errors::ClosestPkgJsonError; +use node_resolver::DenoIsBuiltInNodeModuleChecker; use node_resolver::InNpmPackageChecker; use node_resolver::NodeResolutionKind; use node_resolver::ResolutionMode; @@ -60,7 +64,6 @@ use crate::args::CliOptions; use crate::args::DenoSubcommand; use crate::args::TsTypeLib; use crate::cache::CodeCache; -use crate::cache::FastInsecureHasher; use crate::cache::ParsedSourceCache; use crate::emit::Emitter; use crate::graph_container::MainModuleGraphContainer; @@ -70,15 +73,13 @@ use crate::graph_util::enhance_graph_error; use crate::graph_util::CreateGraphOptions; use crate::graph_util::EnhanceGraphErrorMode; use crate::graph_util::ModuleGraphBuilder; +use crate::node::CliCjsCodeAnalyzer; use crate::node::CliNodeCodeTranslator; use crate::node::CliNodeResolver; use crate::npm::CliNpmResolver; use crate::resolver::CliCjsTracker; use crate::resolver::CliNpmReqResolver; use crate::resolver::CliResolver; -use crate::resolver::ModuleCodeStringSource; -use crate::resolver::NotSupportedKindInNpmError; -use crate::resolver::NpmModuleLoader; use crate::sys::CliSys; use crate::tools::check; use crate::tools::check::CheckError; @@ -87,6 +88,14 @@ use crate::util::progress_bar::ProgressBar; use crate::util::text_encoding::code_without_source_map; use crate::util::text_encoding::source_map_from_code; +pub type CliNpmModuleLoader = deno_lib::loader::NpmModuleLoader< + CliCjsCodeAnalyzer, + DenoInNpmPackageChecker, + DenoIsBuiltInNodeModuleChecker, + CliNpmResolver, + CliSys, +>; + #[derive(Debug, thiserror::Error, deno_error::JsError)] pub enum PrepareModuleLoadError { #[class(inherit)] @@ -99,6 +108,11 @@ pub enum PrepareModuleLoadError { Check(#[from] CheckError), #[class(inherit)] #[error(transparent)] + AtomicWriteFileWithRetries( + #[from] crate::args::AtomicWriteFileWithRetriesError, + ), + #[class(inherit)] + #[error(transparent)] Other(#[from] JsErrorBox), } @@ -205,7 +219,6 @@ impl ModuleLoadPreparer { check::CheckOptions { build_fast_check_graph: true, lib, - log_ignored_options: false, reload: self.options.reload_flag(), type_check_mode: self.options.type_check_mode(), }, @@ -242,7 +255,7 @@ struct SharedCliModuleLoaderState { module_load_preparer: Arc, node_code_translator: Arc, node_resolver: Arc, - npm_module_loader: NpmModuleLoader, + npm_module_loader: CliNpmModuleLoader, npm_registry_permission_checker: Arc>, npm_req_resolver: Arc, @@ -304,7 +317,7 @@ impl CliModuleLoaderFactory { module_load_preparer: Arc, node_code_translator: Arc, node_resolver: Arc, - npm_module_loader: NpmModuleLoader, + npm_module_loader: CliNpmModuleLoader, npm_registry_permission_checker: Arc< NpmRegistryReadPermissionChecker, >, @@ -419,6 +432,55 @@ impl ModuleLoaderFactory for CliModuleLoaderFactory { } } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum LoadCodeSourceError { + #[class(inherit)] + #[error(transparent)] + NpmModuleLoad(NpmModuleLoadError), + #[class(inherit)] + #[error(transparent)] + LoadPreparedModule(#[from] LoadPreparedModuleError), + #[class(generic)] + #[error("Loading unprepared module: {}{}", .specifier, .maybe_referrer.as_ref().map(|r| format!(", imported from: {}", r)).unwrap_or_default())] + LoadUnpreparedModule { + specifier: ModuleSpecifier, + maybe_referrer: Option, + }, +} + +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum LoadPreparedModuleError { + #[class(inherit)] + #[error(transparent)] + NpmModuleLoad(#[from] crate::emit::EmitParsedSourceHelperError), + #[class(inherit)] + #[error(transparent)] + LoadMaybeCjs(#[from] LoadMaybeCjsError), + #[class(inherit)] + #[error(transparent)] + Other(#[from] JsErrorBox), +} + +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum LoadMaybeCjsError { + #[class(inherit)] + #[error(transparent)] + NpmModuleLoad(#[from] crate::emit::EmitParsedSourceHelperError), + #[class(inherit)] + #[error(transparent)] + TranslateCjsToEsm(#[from] node_resolver::analyze::TranslateCjsToEsmError), +} + +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(inherit)] +#[error("Could not resolve '{reference}'")] +pub struct CouldNotResolveError { + reference: deno_semver::npm::NpmPackageNvReference, + #[source] + #[inherit] + source: node_resolver::errors::PackageSubpathResolveError, +} + struct CliModuleLoaderInner { lib: TsTypeLib, is_worker: bool, @@ -443,7 +505,10 @@ impl maybe_referrer: Option<&ModuleSpecifier>, requested_module_type: RequestedModuleType, ) -> Result { - let code_source = self.load_code_source(specifier, maybe_referrer).await?; + let code_source = self + .load_code_source(specifier, maybe_referrer) + .await + .map_err(JsErrorBox::from_err)?; let code = if self.shared.is_inspecting || code_source.media_type == MediaType::Wasm { @@ -504,7 +569,7 @@ impl &self, specifier: &ModuleSpecifier, maybe_referrer: Option<&ModuleSpecifier>, - ) -> Result { + ) -> Result { if let Some(code_source) = self.load_prepared_module(specifier).await? { return Ok(code_source); } @@ -513,14 +578,14 @@ impl .shared .npm_module_loader .load(specifier, maybe_referrer) - .await; + .await + .map_err(LoadCodeSourceError::NpmModuleLoad); } - let mut msg = format!("Loading unprepared module: {specifier}"); - if let Some(referrer) = maybe_referrer { - msg = format!("{}, imported from: {}", msg, referrer.as_str()); - } - Err(anyhow!(msg)) + Err(LoadCodeSourceError::LoadUnpreparedModule { + specifier: specifier.clone(), + maybe_referrer: maybe_referrer.cloned(), + }) } fn resolve_referrer( @@ -540,11 +605,13 @@ impl } else if referrer == "." { // main module, use the initial cwd deno_core::resolve_path(referrer, &self.shared.initial_cwd) - .map_err(|e| e.into()) + .map_err(|e| JsErrorBox::from_err(e).into()) } else { // this cwd check is slow, so try to avoid it - let cwd = std::env::current_dir().context("Unable to get CWD")?; - deno_core::resolve_path(referrer, &cwd).map_err(|e| e.into()) + let cwd = std::env::current_dir() + .map_err(|e| JsErrorBox::from_err(UnableToGetCwdError(e)))?; + deno_core::resolve_path(referrer, &cwd) + .map_err(|e| JsErrorBox::from_err(e).into()) } } @@ -599,7 +666,12 @@ impl ResolutionMode::Import, NodeResolutionKind::Execution, ) - .map_err(|e| JsErrorBox::from_err(e).into()); + .map_err(|e| JsErrorBox::from_err(e).into()) + .and_then(|url_or_path| { + url_or_path + .into_url() + .map_err(|e| JsErrorBox::from_err(e).into()) + }); } } @@ -622,9 +694,14 @@ impl ResolutionMode::Import, NodeResolutionKind::Execution, ) - .with_context(|| { - format!("Could not resolve '{}'.", module.nv_reference) + .map_err(|source| { + JsErrorBox::from_err(CouldNotResolveError { + reference: module.nv_reference.clone(), + source, + }) })? + .into_url() + .map_err(JsErrorBox::from_err)? } Some(Module::Node(module)) => module.specifier.clone(), Some(Module::Js(module)) => module.specifier.clone(), @@ -644,7 +721,7 @@ impl async fn load_prepared_module( &self, specifier: &ModuleSpecifier, - ) -> Result, AnyError> { + ) -> Result, LoadPreparedModuleError> { // Note: keep this in sync with the sync version below let graph = self.graph_container.graph(); match self.load_prepared_module_or_defer_emit(&graph, specifier)? { @@ -676,7 +753,8 @@ impl }) => self .load_maybe_cjs(specifier, media_type, source) .await - .map(Some), + .map(Some) + .map_err(LoadPreparedModuleError::LoadMaybeCjs), None => Ok(None), } } @@ -837,7 +915,7 @@ impl specifier: &ModuleSpecifier, media_type: MediaType, original_source: &Arc, - ) -> Result { + ) -> Result { let js_source = if media_type.is_emittable() { Cow::Owned( self @@ -1186,8 +1264,7 @@ impl NodeRequireLoader let specifier = deno_path_util::url_from_file_path(path) .map_err(JsErrorBox::from_err)?; if self.in_npm_pkg_checker.in_npm_package(&specifier) { - return Err(JsErrorBox::from_err(NotSupportedKindInNpmError { - media_type, + return Err(JsErrorBox::from_err(StrippingTypesNodeModulesError { specifier, })); } diff --git a/cli/node.rs b/cli/node.rs index 892e25914a17de..2e87035cae04c2 100644 --- a/cli/node.rs +++ b/cli/node.rs @@ -5,15 +5,15 @@ use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; -use deno_core::error::AnyError; +use deno_error::JsErrorBox; use deno_graph::ParsedSourceStore; use deno_resolver::npm::DenoInNpmPackageChecker; use deno_runtime::deno_fs; -use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use node_resolver::analyze::CjsAnalysis as ExtNodeCjsAnalysis; use node_resolver::analyze::CjsAnalysisExports; use node_resolver::analyze::CjsCodeAnalyzer; use node_resolver::analyze::NodeCodeTranslator; +use node_resolver::DenoIsBuiltInNodeModuleChecker; use serde::Deserialize; use serde::Serialize; @@ -27,7 +27,7 @@ use crate::sys::CliSys; pub type CliNodeCodeTranslator = NodeCodeTranslator< CliCjsCodeAnalyzer, DenoInNpmPackageChecker, - RealIsBuiltInNodeModuleChecker, + DenoIsBuiltInNodeModuleChecker, CliNpmResolver, CliSys, >; @@ -75,7 +75,7 @@ impl CliCjsCodeAnalyzer { &self, specifier: &ModuleSpecifier, source: &str, - ) -> Result { + ) -> Result { let source_hash = CacheDBHash::from_hashable(source); if let Some(analysis) = self.cache.get_cjs_analysis(specifier.as_str(), source_hash) @@ -92,7 +92,9 @@ impl CliCjsCodeAnalyzer { } let cjs_tracker = self.cjs_tracker.clone(); - let is_maybe_cjs = cjs_tracker.is_maybe_cjs(specifier, media_type)?; + let is_maybe_cjs = cjs_tracker + .is_maybe_cjs(specifier, media_type) + .map_err(JsErrorBox::from_err)?; let analysis = if is_maybe_cjs { let maybe_parsed_source = self .parsed_source_cache @@ -102,9 +104,10 @@ impl CliCjsCodeAnalyzer { deno_core::unsync::spawn_blocking({ let specifier = specifier.clone(); let source: Arc = source.into(); - move || -> Result<_, AnyError> { - let parsed_source = - maybe_parsed_source.map(Ok).unwrap_or_else(|| { + move || -> Result<_, JsErrorBox> { + let parsed_source = maybe_parsed_source + .map(Ok) + .unwrap_or_else(|| { deno_ast::parse_program(deno_ast::ParseParams { specifier, text: source, @@ -113,13 +116,16 @@ impl CliCjsCodeAnalyzer { scope_analysis: false, maybe_syntax: None, }) - })?; + }) + .map_err(JsErrorBox::from_err)?; let is_script = parsed_source.compute_is_script(); - let is_cjs = cjs_tracker.is_cjs_with_known_is_script( - parsed_source.specifier(), - media_type, - is_script, - )?; + let is_cjs = cjs_tracker + .is_cjs_with_known_is_script( + parsed_source.specifier(), + media_type, + is_script, + ) + .map_err(JsErrorBox::from_err)?; if is_cjs { let analysis = parsed_source.analyze_cjs(); Ok(CliCjsAnalysis::Cjs { @@ -151,7 +157,7 @@ impl CjsCodeAnalyzer for CliCjsCodeAnalyzer { &self, specifier: &ModuleSpecifier, source: Option>, - ) -> Result, AnyError> { + ) -> Result, JsErrorBox> { let source = match source { Some(source) => source, None => { diff --git a/cli/npm/byonm.rs b/cli/npm/byonm.rs deleted file mode 100644 index 8dc498bb04b06a..00000000000000 --- a/cli/npm/byonm.rs +++ /dev/null @@ -1,32 +0,0 @@ -// Copyright 2018-2025 the Deno authors. MIT license. - -use std::sync::Arc; - -use deno_core::serde_json; -use deno_resolver::npm::ByonmNpmResolver; -use deno_resolver::npm::ByonmNpmResolverCreateOptions; -use deno_runtime::ops::process::NpmProcessStateProvider; - -use crate::args::NpmProcessState; -use crate::args::NpmProcessStateKind; -use crate::sys::CliSys; - -pub type CliByonmNpmResolverCreateOptions = - ByonmNpmResolverCreateOptions; -pub type CliByonmNpmResolver = ByonmNpmResolver; - -#[derive(Debug)] -pub struct CliByonmNpmProcessStateProvider(pub Arc); - -impl NpmProcessStateProvider for CliByonmNpmProcessStateProvider { - fn get_npm_process_state(&self) -> String { - serde_json::to_string(&NpmProcessState { - kind: NpmProcessStateKind::Byonm, - local_node_modules_path: self - .0 - .root_node_modules_path() - .map(|p| p.to_string_lossy().to_string()), - }) - .unwrap() - } -} diff --git a/cli/npm/installer/common/lifecycle_scripts.rs b/cli/npm/installer/common/lifecycle_scripts.rs index a0d821cdfc88b1..64b06aecbf348a 100644 --- a/cli/npm/installer/common/lifecycle_scripts.rs +++ b/cli/npm/installer/common/lifecycle_scripts.rs @@ -220,7 +220,7 @@ impl<'a> LifecycleScripts<'a> { get_package_path, ); let init_cwd = &self.config.initial_cwd; - let process_state = crate::npm::managed::npm_process_state( + let process_state = deno_lib::npm::npm_process_state( snapshot.as_valid_serialized(), Some(root_node_modules_dir_path), ); @@ -240,7 +240,7 @@ impl<'a> LifecycleScripts<'a> { // However, if we concurrently run scripts in the future we will // have to have multiple temp files. let temp_file_fd = - deno_runtime::ops::process::npm_process_state_tempfile( + deno_runtime::deno_process::npm_process_state_tempfile( process_state.as_bytes(), ) .map_err(LifecycleScriptsError::CreateNpmProcessState)?; @@ -248,7 +248,7 @@ impl<'a> LifecycleScripts<'a> { let _temp_file = unsafe { std::fs::File::from_raw_io_handle(temp_file_fd) }; // make sure the file gets closed env_vars.insert( - deno_runtime::ops::process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME + deno_runtime::deno_process::NPM_RESOLUTION_STATE_FD_ENV_VAR_NAME .to_string(), (temp_file_fd as usize).to_string(), ); diff --git a/cli/npm/installer/global.rs b/cli/npm/installer/global.rs index a6b296c6d8477f..f074c62174abae 100644 --- a/cli/npm/installer/global.rs +++ b/cli/npm/installer/global.rs @@ -9,6 +9,7 @@ use async_trait::async_trait; use deno_core::futures::stream::FuturesUnordered; use deno_core::futures::StreamExt; use deno_error::JsErrorBox; +use deno_lib::util::hash::FastInsecureHasher; use deno_npm::NpmResolutionPackage; use deno_npm::NpmSystemInfo; use deno_resolver::npm::managed::NpmResolutionCell; @@ -17,7 +18,6 @@ use super::common::lifecycle_scripts::LifecycleScriptsStrategy; use super::common::NpmPackageFsInstaller; use super::PackageCaching; use crate::args::LifecycleScriptsConfig; -use crate::cache::FastInsecureHasher; use crate::colors; use crate::npm::CliNpmCache; use crate::npm::CliNpmTarballCache; diff --git a/cli/npm/installer/local.rs b/cli/npm/installer/local.rs index 87288c6c8e2428..5e14b607f7af97 100644 --- a/cli/npm/installer/local.rs +++ b/cli/npm/installer/local.rs @@ -2,6 +2,7 @@ //! Code for local node_modules resolution. +use std::borrow::Cow; use std::cell::RefCell; use std::cmp::Ordering; use std::collections::hash_map::Entry; @@ -312,7 +313,7 @@ async fn sync_resolution_with_fs( ); let sub_node_modules = folder_path.join("node_modules"); let package_path = - join_package_name(&sub_node_modules, &package.id.nv.name); + join_package_name(Cow::Owned(sub_node_modules), &package.id.nv.name); let cache_folder = cache.package_folder_for_nv(&package.id.nv); deno_core::unsync::spawn_blocking({ @@ -350,7 +351,7 @@ async fn sync_resolution_with_fs( let sub_node_modules = folder_path.join("node_modules"); let package_path = - join_package_name(&sub_node_modules, &package.id.nv.name); + join_package_name(Cow::Owned(sub_node_modules), &package.id.nv.name); lifecycle_scripts.add(package, package_path.into()); } @@ -367,14 +368,16 @@ async fn sync_resolution_with_fs( if !initialized_file.exists() { let sub_node_modules = destination_path.join("node_modules"); let package_path = - join_package_name(&sub_node_modules, &package.id.nv.name); + join_package_name(Cow::Owned(sub_node_modules), &package.id.nv.name); let source_path = join_package_name( - &deno_local_registry_dir - .join(get_package_folder_id_folder_name( - &package_cache_folder_id.with_no_count(), - )) - .join("node_modules"), + Cow::Owned( + deno_local_registry_dir + .join(get_package_folder_id_folder_name( + &package_cache_folder_id.with_no_count(), + )) + .join("node_modules"), + ), &package.id.nv.name, ); @@ -407,14 +410,16 @@ async fn sync_resolution_with_fs( get_package_folder_id_folder_name(&dep_cache_folder_id); if dep_setup_cache.insert(name, &dep_folder_name) { let dep_folder_path = join_package_name( - &deno_local_registry_dir - .join(dep_folder_name) - .join("node_modules"), + Cow::Owned( + deno_local_registry_dir + .join(dep_folder_name) + .join("node_modules"), + ), &dep_id.nv.name, ); symlink_package_dir( &dep_folder_path, - &join_package_name(&sub_node_modules, name), + &join_package_name(Cow::Borrowed(&sub_node_modules), name), )?; } } @@ -468,9 +473,11 @@ async fn sync_resolution_with_fs( &remote_pkg.get_package_cache_folder_id(), ); let local_registry_package_path = join_package_name( - &deno_local_registry_dir - .join(&target_folder_name) - .join("node_modules"), + Cow::Owned( + deno_local_registry_dir + .join(&target_folder_name) + .join("node_modules"), + ), &remote_pkg.id.nv.name, ); if install_in_child { @@ -496,7 +503,10 @@ async fn sync_resolution_with_fs( { symlink_package_dir( &local_registry_package_path, - &join_package_name(root_node_modules_dir_path, remote_alias), + &join_package_name( + Cow::Borrowed(root_node_modules_dir_path), + remote_alias, + ), )?; } } @@ -526,15 +536,20 @@ async fn sync_resolution_with_fs( get_package_folder_id_folder_name(&package.get_package_cache_folder_id()); if setup_cache.insert_root_symlink(&id.nv.name, &target_folder_name) { let local_registry_package_path = join_package_name( - &deno_local_registry_dir - .join(target_folder_name) - .join("node_modules"), + Cow::Owned( + deno_local_registry_dir + .join(target_folder_name) + .join("node_modules"), + ), &id.nv.name, ); symlink_package_dir( &local_registry_package_path, - &join_package_name(root_node_modules_dir_path, &id.nv.name), + &join_package_name( + Cow::Borrowed(root_node_modules_dir_path), + &id.nv.name, + ), )?; } } @@ -556,15 +571,20 @@ async fn sync_resolution_with_fs( if setup_cache.insert_deno_symlink(&package.id.nv.name, &target_folder_name) { let local_registry_package_path = join_package_name( - &deno_local_registry_dir - .join(target_folder_name) - .join("node_modules"), + Cow::Owned( + deno_local_registry_dir + .join(target_folder_name) + .join("node_modules"), + ), &package.id.nv.name, ); symlink_package_dir( &local_registry_package_path, - &join_package_name(&deno_node_modules_dir, &package.id.nv.name), + &join_package_name( + Cow::Borrowed(&deno_node_modules_dir), + &package.id.nv.name, + ), )?; } } @@ -986,13 +1006,17 @@ fn junction_or_symlink_dir( } } -fn join_package_name(path: &Path, package_name: &str) -> PathBuf { - let mut path = path.to_path_buf(); +fn join_package_name(mut path: Cow, package_name: &str) -> PathBuf { // ensure backslashes are used on windows for part in package_name.split('/') { - path = path.join(part); + match path { + Cow::Borrowed(inner) => path = Cow::Owned(inner.join(part)), + Cow::Owned(ref mut path) => { + path.push(part); + } + } } - path + path.into_owned() } #[cfg(test)] diff --git a/cli/npm/managed.rs b/cli/npm/managed.rs index 4122c881f1aefc..14ba088d89164f 100644 --- a/cli/npm/managed.rs +++ b/cli/npm/managed.rs @@ -1,11 +1,9 @@ // Copyright 2018-2025 the Deno authors. MIT license. -use std::path::Path; use std::path::PathBuf; use std::sync::Arc; use deno_core::parking_lot::Mutex; -use deno_core::serde_json; use deno_error::JsError; use deno_error::JsErrorBox; use deno_npm::registry::NpmRegistryApi; @@ -13,14 +11,10 @@ use deno_npm::resolution::NpmResolutionSnapshot; use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; use deno_resolver::npm::managed::ManagedNpmResolverCreateOptions; use deno_resolver::npm::managed::NpmResolutionCell; -use deno_resolver::npm::ManagedNpmResolverRc; -use deno_runtime::ops::process::NpmProcessStateProvider; use thiserror::Error; use super::CliNpmRegistryInfoProvider; use crate::args::CliLockfile; -use crate::args::NpmProcessState; -use crate::args::NpmProcessStateKind; use crate::sys::CliSys; pub type CliManagedNpmResolverCreateOptions = @@ -207,27 +201,3 @@ async fn snapshot_from_lockfile( .await?; Ok(snapshot) } - -pub fn npm_process_state( - snapshot: ValidSerializedNpmResolutionSnapshot, - node_modules_path: Option<&Path>, -) -> String { - serde_json::to_string(&NpmProcessState { - kind: NpmProcessStateKind::Snapshot(snapshot.into_serialized()), - local_node_modules_path: node_modules_path - .map(|p| p.to_string_lossy().to_string()), - }) - .unwrap() -} - -#[derive(Debug)] -pub struct CliManagedNpmProcessStateProvider(pub ManagedNpmResolverRc); - -impl NpmProcessStateProvider for CliManagedNpmProcessStateProvider { - fn get_npm_process_state(&self) -> String { - npm_process_state( - self.0.resolution().serialized_valid_snapshot(), - self.0.root_node_modules_path(), - ) - } -} diff --git a/cli/npm/mod.rs b/cli/npm/mod.rs index fc0916cc18303f..388b007f811f8c 100644 --- a/cli/npm/mod.rs +++ b/cli/npm/mod.rs @@ -1,6 +1,5 @@ // Copyright 2018-2025 the Deno authors. MIT license. -mod byonm; pub mod installer; mod managed; @@ -10,15 +9,15 @@ use dashmap::DashMap; use deno_core::serde_json; use deno_core::url::Url; use deno_error::JsErrorBox; +use deno_lib::version::DENO_VERSION_INFO; use deno_npm::npm_rc::ResolvedNpmRc; use deno_npm::registry::NpmPackageInfo; -use deno_runtime::ops::process::NpmProcessStateProviderRc; +use deno_resolver::npm::ByonmNpmResolverCreateOptions; use deno_semver::package::PackageNv; use deno_semver::package::PackageReq; use http::HeaderName; use http::HeaderValue; -pub use self::byonm::CliByonmNpmResolverCreateOptions; pub use self::managed::CliManagedNpmResolverCreateOptions; pub use self::managed::CliNpmResolverManagedSnapshotOption; pub use self::managed::NpmResolutionInitializer; @@ -37,6 +36,8 @@ pub type CliNpmResolver = deno_resolver::npm::NpmResolver; pub type CliManagedNpmResolver = deno_resolver::npm::ManagedNpmResolver; pub type CliNpmResolverCreateOptions = deno_resolver::npm::NpmResolverCreateOptions; +pub type CliByonmNpmResolverCreateOptions = + ByonmNpmResolverCreateOptions; #[derive(Debug)] pub struct CliNpmCacheHttpClient { @@ -56,19 +57,6 @@ impl CliNpmCacheHttpClient { } } -pub fn create_npm_process_state_provider( - npm_resolver: &CliNpmResolver, -) -> NpmProcessStateProviderRc { - match npm_resolver { - CliNpmResolver::Byonm(byonm_npm_resolver) => Arc::new( - byonm::CliByonmNpmProcessStateProvider(byonm_npm_resolver.clone()), - ), - CliNpmResolver::Managed(managed_npm_resolver) => Arc::new( - managed::CliManagedNpmProcessStateProvider(managed_npm_resolver.clone()), - ), - } -} - #[async_trait::async_trait(?Send)] impl deno_npm_cache::NpmCacheHttpClient for CliNpmCacheHttpClient { async fn download_with_retries_on_any_tokio_runtime( @@ -195,8 +183,8 @@ pub const NPM_CONFIG_USER_AGENT_ENV_VAR: &str = "npm_config_user_agent"; pub fn get_npm_config_user_agent() -> String { format!( "deno/{} npm/? deno/{} {} {}", - env!("CARGO_PKG_VERSION"), - env!("CARGO_PKG_VERSION"), + DENO_VERSION_INFO.deno, + DENO_VERSION_INFO.deno, std::env::consts::OS, std::env::consts::ARCH ) diff --git a/cli/resolver.rs b/cli/resolver.rs index 5677767fdd049f..5dcdadb7ff579d 100644 --- a/cli/resolver.rs +++ b/cli/resolver.rs @@ -1,17 +1,11 @@ // Copyright 2018-2025 the Deno authors. MIT license. -use std::borrow::Cow; use std::sync::Arc; use async_trait::async_trait; use dashmap::DashSet; -use deno_ast::MediaType; use deno_config::workspace::MappedResolutionDiagnostic; use deno_config::workspace::MappedResolutionError; -use deno_core::anyhow::Context; -use deno_core::error::AnyError; -use deno_core::url::Url; -use deno_core::ModuleSourceCode; use deno_core::ModuleSpecifier; use deno_error::JsErrorBox; use deno_graph::source::ResolveError; @@ -23,23 +17,19 @@ use deno_resolver::npm::DenoInNpmPackageChecker; use deno_resolver::sloppy_imports::SloppyImportsCachedFs; use deno_resolver::sloppy_imports::SloppyImportsResolver; use deno_runtime::colors; -use deno_runtime::deno_fs; use deno_runtime::deno_node::is_builtin_node_module; -use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; use deno_semver::package::PackageReq; +use node_resolver::DenoIsBuiltInNodeModuleChecker; use node_resolver::NodeResolutionKind; use node_resolver::ResolutionMode; -use thiserror::Error; use crate::args::NpmCachingStrategy; use crate::args::DENO_DISABLE_PEDANTIC_NODE_WARNINGS; -use crate::node::CliNodeCodeTranslator; use crate::npm::installer::NpmInstaller; use crate::npm::installer::PackageCaching; use crate::npm::CliNpmResolver; use crate::sys::CliSys; use crate::util::sync::AtomicFlag; -use crate::util::text_encoding::from_utf8_lossy_cow; pub type CliCjsTracker = deno_resolver::cjs::CjsTracker; @@ -50,130 +40,18 @@ pub type CliSloppyImportsResolver = SloppyImportsResolver; pub type CliDenoResolver = deno_resolver::DenoResolver< DenoInNpmPackageChecker, - RealIsBuiltInNodeModuleChecker, + DenoIsBuiltInNodeModuleChecker, CliNpmResolver, CliSloppyImportsCachedFs, CliSys, >; pub type CliNpmReqResolver = deno_resolver::npm::NpmReqResolver< DenoInNpmPackageChecker, - RealIsBuiltInNodeModuleChecker, + DenoIsBuiltInNodeModuleChecker, CliNpmResolver, CliSys, >; -pub struct ModuleCodeStringSource { - pub code: ModuleSourceCode, - pub found_url: ModuleSpecifier, - pub media_type: MediaType, -} - -#[derive(Debug, Error, deno_error::JsError)] -#[class(type)] -#[error("{media_type} files are not supported in npm packages: {specifier}")] -pub struct NotSupportedKindInNpmError { - pub media_type: MediaType, - pub specifier: Url, -} - -// todo(dsherret): move to module_loader.rs (it seems to be here due to use in standalone) -#[derive(Clone)] -pub struct NpmModuleLoader { - cjs_tracker: Arc, - fs: Arc, - node_code_translator: Arc, -} - -impl NpmModuleLoader { - pub fn new( - cjs_tracker: Arc, - fs: Arc, - node_code_translator: Arc, - ) -> Self { - Self { - cjs_tracker, - node_code_translator, - fs, - } - } - - pub async fn load( - &self, - specifier: &ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - ) -> Result { - let file_path = specifier.to_file_path().unwrap(); - let code = self - .fs - .read_file_async(file_path.clone(), None) - .await - .map_err(AnyError::from) - .with_context(|| { - if file_path.is_dir() { - // directory imports are not allowed when importing from an - // ES module, so provide the user with a helpful error message - let dir_path = file_path; - let mut msg = "Directory import ".to_string(); - msg.push_str(&dir_path.to_string_lossy()); - if let Some(referrer) = &maybe_referrer { - msg.push_str(" is not supported resolving import from "); - msg.push_str(referrer.as_str()); - let entrypoint_name = ["index.mjs", "index.js", "index.cjs"] - .iter() - .find(|e| dir_path.join(e).is_file()); - if let Some(entrypoint_name) = entrypoint_name { - msg.push_str("\nDid you mean to import "); - msg.push_str(entrypoint_name); - msg.push_str(" within the directory?"); - } - } - msg - } else { - let mut msg = "Unable to load ".to_string(); - msg.push_str(&file_path.to_string_lossy()); - if let Some(referrer) = &maybe_referrer { - msg.push_str(" imported from "); - msg.push_str(referrer.as_str()); - } - msg - } - })?; - - let media_type = MediaType::from_specifier(specifier); - if media_type.is_emittable() { - return Err(AnyError::from(NotSupportedKindInNpmError { - media_type, - specifier: specifier.clone(), - })); - } - - let code = if self.cjs_tracker.is_maybe_cjs(specifier, media_type)? { - // translate cjs to esm if it's cjs and inject node globals - let code = from_utf8_lossy_cow(code); - ModuleSourceCode::String( - self - .node_code_translator - .translate_cjs_to_esm(specifier, Some(code)) - .await? - .into_owned() - .into(), - ) - } else { - // esm and json code is untouched - ModuleSourceCode::Bytes(match code { - Cow::Owned(bytes) => bytes.into_boxed_slice().into(), - Cow::Borrowed(bytes) => bytes.into(), - }) - }; - - Ok(ModuleCodeStringSource { - code, - found_url: specifier.clone(), - media_type: MediaType::from_specifier(specifier), - }) - } -} - #[derive(Debug, Default)] pub struct FoundPackageJsonDepFlag(AtomicFlag); diff --git a/cli/rt/Cargo.toml b/cli/rt/Cargo.toml new file mode 100644 index 00000000000000..f31af40f845994 --- /dev/null +++ b/cli/rt/Cargo.toml @@ -0,0 +1,63 @@ +# Copyright 2018-2025 the Deno authors. MIT license. + +[package] +name = "denort" +version = "2.1.7" +authors.workspace = true +default-run = "denort" +edition.workspace = true +license.workspace = true +publish = false +repository.workspace = true +description = "Provides the denort executable" + +[[bin]] +name = "denort" +path = "main.rs" +doc = false + +[[test]] +name = "integration" +path = "integration_tests_runner.rs" +harness = false + +[build-dependencies] +deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting", "only_snapshotted_js_sources"] } +deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } + +[dependencies] +deno_cache_dir = { workspace = true, features = ["sync"] } +deno_config = { workspace = true, features = ["sync", "workspace"] } +deno_core = { workspace = true, features = ["include_js_files_for_snapshotting"] } +deno_error.workspace = true +deno_lib.workspace = true +deno_media_type = { workspace = true, features = ["data_url", "decoding"] } +deno_npm.workspace = true +deno_package_json = { workspace = true, features = ["sync"] } +deno_path_util.workspace = true +deno_resolver = { workspace = true, features = ["sync"] } +deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting"] } +deno_semver.workspace = true +deno_snapshots.workspace = true +deno_terminal.workspace = true +libsui = "0.5.0" +node_resolver.workspace = true + +async-trait.workspace = true +bincode = "=1.3.3" +import_map = { version = "=0.21.0", features = ["ext"] } +indexmap.workspace = true +log = { workspace = true, features = ["serde"] } +serde.workspace = true +serde_json.workspace = true +sys_traits = { workspace = true, features = ["getrandom", "filetime", "libc", "real", "strip_unc", "winapi"] } +thiserror.workspace = true +tokio.workspace = true +tokio-util.workspace = true +twox-hash.workspace = true +url.workspace = true + +[dev-dependencies] +pretty_assertions.workspace = true +sys_traits = { workspace = true, features = ["memory"] } +test_util.workspace = true diff --git a/cli/rt/binary.rs b/cli/rt/binary.rs new file mode 100644 index 00000000000000..19aad257ca6562 --- /dev/null +++ b/cli/rt/binary.rs @@ -0,0 +1,682 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::borrow::Cow; +use std::collections::HashMap; +use std::ffi::OsString; +use std::io::ErrorKind; +use std::path::Path; +use std::path::PathBuf; +use std::sync::Arc; + +use deno_core::anyhow::bail; +use deno_core::anyhow::Context; +use deno_core::error::AnyError; +use deno_core::serde_json; +use deno_core::url::Url; +use deno_core::FastString; +use deno_core::ModuleSourceCode; +use deno_core::ModuleType; +use deno_error::JsError; +use deno_error::JsErrorBox; +use deno_lib::standalone::binary::DenoRtDeserializable; +use deno_lib::standalone::binary::Metadata; +use deno_lib::standalone::binary::RemoteModuleEntry; +use deno_lib::standalone::binary::SpecifierDataStore; +use deno_lib::standalone::binary::SpecifierId; +use deno_lib::standalone::binary::MAGIC_BYTES; +use deno_lib::standalone::virtual_fs::VirtualDirectory; +use deno_lib::standalone::virtual_fs::VirtualDirectoryEntries; +use deno_media_type::MediaType; +use deno_npm::resolution::SerializedNpmResolutionSnapshot; +use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage; +use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; +use deno_npm::NpmPackageId; +use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_fs::RealFs; +use deno_runtime::deno_io::fs::FsError; +use deno_semver::package::PackageReq; +use deno_semver::StackString; +use indexmap::IndexMap; +use thiserror::Error; + +use crate::file_system::FileBackedVfs; +use crate::file_system::VfsRoot; + +pub struct StandaloneData { + pub metadata: Metadata, + pub modules: Arc, + pub npm_snapshot: Option, + pub root_path: PathBuf, + pub vfs: Arc, +} + +/// This function will try to run this binary as a standalone binary +/// produced by `deno compile`. It determines if this is a standalone +/// binary by skipping over the trailer width at the end of the file, +/// then checking for the magic trailer string `d3n0l4nd`. If found, +/// the bundle is executed. If not, this function exits with `Ok(None)`. +pub fn extract_standalone( + cli_args: Cow>, +) -> Result, AnyError> { + let Some(data) = libsui::find_section("d3n0l4nd") else { + return Ok(None); + }; + + let root_path = { + let maybe_current_exe = std::env::current_exe().ok(); + let current_exe_name = maybe_current_exe + .as_ref() + .and_then(|p| p.file_name()) + .map(|p| p.to_string_lossy()) + // should never happen + .unwrap_or_else(|| Cow::Borrowed("binary")); + std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name)) + }; + let root_url = deno_path_util::url_from_directory_path(&root_path)?; + + let DeserializedDataSection { + mut metadata, + npm_snapshot, + modules_store: remote_modules, + vfs_root_entries, + vfs_files_data, + } = match deserialize_binary_data_section(&root_url, data)? { + Some(data_section) => data_section, + None => return Ok(None), + }; + + let cli_args = cli_args.into_owned(); + metadata.argv.reserve(cli_args.len() - 1); + for arg in cli_args.into_iter().skip(1) { + metadata.argv.push(arg.into_string().unwrap()); + } + let vfs = { + let fs_root = VfsRoot { + dir: VirtualDirectory { + // align the name of the directory with the root dir + name: root_path.file_name().unwrap().to_string_lossy().to_string(), + entries: vfs_root_entries, + }, + root_path: root_path.clone(), + start_file_offset: 0, + }; + Arc::new(FileBackedVfs::new( + Cow::Borrowed(vfs_files_data), + fs_root, + metadata.vfs_case_sensitivity, + )) + }; + Ok(Some(StandaloneData { + metadata, + modules: Arc::new(StandaloneModules { + modules: remote_modules, + vfs: vfs.clone(), + }), + npm_snapshot, + root_path, + vfs, + })) +} + +pub struct DeserializedDataSection { + pub metadata: Metadata, + pub npm_snapshot: Option, + pub modules_store: RemoteModulesStore, + pub vfs_root_entries: VirtualDirectoryEntries, + pub vfs_files_data: &'static [u8], +} + +pub fn deserialize_binary_data_section( + root_dir_url: &Url, + data: &'static [u8], +) -> Result, AnyError> { + fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> { + if input.len() < MAGIC_BYTES.len() { + bail!("Unexpected end of data. Could not find magic bytes."); + } + let (magic_bytes, input) = input.split_at(MAGIC_BYTES.len()); + if magic_bytes != MAGIC_BYTES { + return Ok((input, false)); + } + Ok((input, true)) + } + + let (input, found) = read_magic_bytes(data)?; + if !found { + return Ok(None); + } + + // 1. Metadata + let (input, data) = + read_bytes_with_u64_len(input).context("reading metadata")?; + let metadata: Metadata = + serde_json::from_slice(data).context("deserializing metadata")?; + // 2. Npm snapshot + let (input, data) = + read_bytes_with_u64_len(input).context("reading npm snapshot")?; + let npm_snapshot = if data.is_empty() { + None + } else { + Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?) + }; + // 3. Specifiers + let (input, specifiers_store) = + SpecifierStore::deserialize(root_dir_url, input) + .context("deserializing specifiers")?; + // 4. Redirects + let (input, redirects_store) = + SpecifierDataStore::::deserialize(input) + .context("deserializing redirects")?; + // 5. Remote modules + let (input, remote_modules_store) = + SpecifierDataStore::>::deserialize(input) + .context("deserializing remote modules")?; + // 6. VFS + let (input, data) = read_bytes_with_u64_len(input).context("vfs")?; + let vfs_root_entries: VirtualDirectoryEntries = + serde_json::from_slice(data).context("deserializing vfs data")?; + let (input, vfs_files_data) = + read_bytes_with_u64_len(input).context("reading vfs files data")?; + + // finally ensure we read the magic bytes at the end + let (_input, found) = read_magic_bytes(input)?; + if !found { + bail!("Could not find magic bytes at the end of the data."); + } + + let modules_store = RemoteModulesStore::new( + specifiers_store, + redirects_store, + remote_modules_store, + ); + + Ok(Some(DeserializedDataSection { + metadata, + npm_snapshot, + modules_store, + vfs_root_entries, + vfs_files_data, + })) +} + +struct SpecifierStore { + data: IndexMap, SpecifierId>, + reverse: IndexMap>, +} + +impl SpecifierStore { + pub fn deserialize<'a>( + root_dir_url: &Url, + input: &'a [u8], + ) -> std::io::Result<(&'a [u8], Self)> { + let (input, len) = read_u32_as_usize(input)?; + let mut data = IndexMap::with_capacity(len); + let mut reverse = IndexMap::with_capacity(len); + let mut input = input; + for _ in 0..len { + let (new_input, specifier_str) = read_string_lossy(input)?; + let specifier = match Url::parse(&specifier_str) { + Ok(url) => url, + Err(err) => match root_dir_url.join(&specifier_str) { + Ok(url) => url, + Err(_) => { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + err, + )); + } + }, + }; + let (new_input, id) = SpecifierId::deserialize(new_input)?; + let specifier = Arc::new(specifier); + data.insert(specifier.clone(), id); + reverse.insert(id, specifier); + input = new_input; + } + Ok((input, Self { data, reverse })) + } + + pub fn get_id(&self, specifier: &Url) -> Option { + self.data.get(specifier).cloned() + } + + pub fn get_specifier(&self, specifier_id: SpecifierId) -> Option<&Url> { + self.reverse.get(&specifier_id).map(|url| url.as_ref()) + } +} + +pub struct StandaloneModules { + modules: RemoteModulesStore, + vfs: Arc, +} + +impl StandaloneModules { + pub fn resolve_specifier<'a>( + &'a self, + specifier: &'a Url, + ) -> Result, TooManyRedirectsError> { + if specifier.scheme() == "file" { + Ok(Some(specifier)) + } else { + self.modules.resolve_specifier(specifier) + } + } + + pub fn has_file(&self, path: &Path) -> bool { + self.vfs.file_entry(path).is_ok() + } + + pub fn read<'a>( + &'a self, + specifier: &'a Url, + ) -> Result>, JsErrorBox> { + if specifier.scheme() == "file" { + let path = deno_path_util::url_to_file_path(specifier) + .map_err(JsErrorBox::from_err)?; + let mut transpiled = None; + let mut source_map = None; + let mut cjs_export_analysis = None; + let bytes = match self.vfs.file_entry(&path) { + Ok(entry) => { + let bytes = self + .vfs + .read_file_all(entry) + .map_err(JsErrorBox::from_err)?; + transpiled = entry + .transpiled_offset + .and_then(|t| self.vfs.read_file_offset_with_len(t).ok()); + source_map = entry + .source_map_offset + .and_then(|t| self.vfs.read_file_offset_with_len(t).ok()); + cjs_export_analysis = entry + .cjs_export_analysis_offset + .and_then(|t| self.vfs.read_file_offset_with_len(t).ok()); + bytes + } + Err(err) if err.kind() == ErrorKind::NotFound => { + match RealFs.read_file_sync(&path, None) { + Ok(bytes) => bytes, + Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => { + return Ok(None) + } + Err(err) => return Err(JsErrorBox::from_err(err)), + } + } + Err(err) => return Err(JsErrorBox::from_err(err)), + }; + Ok(Some(DenoCompileModuleData { + media_type: MediaType::from_specifier(specifier), + specifier, + data: bytes, + transpiled, + source_map, + cjs_export_analysis, + })) + } else { + self.modules.read(specifier).map_err(JsErrorBox::from_err) + } + } +} + +pub struct DenoCompileModuleData<'a> { + pub specifier: &'a Url, + pub media_type: MediaType, + pub data: Cow<'static, [u8]>, + pub transpiled: Option>, + pub source_map: Option>, + pub cjs_export_analysis: Option>, +} + +impl<'a> DenoCompileModuleData<'a> { + pub fn into_parts(self) -> (&'a Url, ModuleType, DenoCompileModuleSource) { + fn into_string_unsafe(data: Cow<'static, [u8]>) -> DenoCompileModuleSource { + match data { + Cow::Borrowed(d) => DenoCompileModuleSource::String( + // SAFETY: we know this is a valid utf8 string + unsafe { std::str::from_utf8_unchecked(d) }, + ), + Cow::Owned(d) => DenoCompileModuleSource::Bytes(Cow::Owned(d)), + } + } + + let data = self.transpiled.unwrap_or(self.data); + let (media_type, source) = match self.media_type { + MediaType::JavaScript + | MediaType::Jsx + | MediaType::Mjs + | MediaType::Cjs + | MediaType::TypeScript + | MediaType::Mts + | MediaType::Cts + | MediaType::Dts + | MediaType::Dmts + | MediaType::Dcts + | MediaType::Tsx => (ModuleType::JavaScript, into_string_unsafe(data)), + MediaType::Json => (ModuleType::Json, into_string_unsafe(data)), + MediaType::Wasm => { + (ModuleType::Wasm, DenoCompileModuleSource::Bytes(data)) + } + // just assume javascript if we made it here + MediaType::Css | MediaType::SourceMap | MediaType::Unknown => { + (ModuleType::JavaScript, DenoCompileModuleSource::Bytes(data)) + } + }; + (self.specifier, media_type, source) + } +} + +pub enum DenoCompileModuleSource { + String(&'static str), + Bytes(Cow<'static, [u8]>), +} + +impl DenoCompileModuleSource { + pub fn into_for_v8(self) -> ModuleSourceCode { + fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode { + ModuleSourceCode::Bytes(match data { + Cow::Borrowed(d) => d.into(), + Cow::Owned(d) => d.into_boxed_slice().into(), + }) + } + + match self { + // todo(https://github.com/denoland/deno_core/pull/943): store whether + // the string is ascii or not ahead of time so we can avoid the is_ascii() + // check in FastString::from_static + Self::String(s) => ModuleSourceCode::String(FastString::from_static(s)), + Self::Bytes(b) => into_bytes(b), + } + } +} + +#[derive(Debug, Error, JsError)] +#[class(generic)] +#[error("Too many redirects resolving: {0}")] +pub struct TooManyRedirectsError(Url); + +pub struct RemoteModulesStore { + specifiers: SpecifierStore, + redirects: SpecifierDataStore, + remote_modules: SpecifierDataStore>, +} + +impl RemoteModulesStore { + fn new( + specifiers: SpecifierStore, + redirects: SpecifierDataStore, + remote_modules: SpecifierDataStore>, + ) -> Self { + Self { + specifiers, + redirects, + remote_modules, + } + } + + pub fn resolve_specifier<'a>( + &'a self, + specifier: &'a Url, + ) -> Result, TooManyRedirectsError> { + let Some(mut current) = self.specifiers.get_id(specifier) else { + return Ok(None); + }; + let mut count = 0; + loop { + if count > 10 { + return Err(TooManyRedirectsError(specifier.clone())); + } + match self.redirects.get(current) { + Some(to) => { + current = *to; + count += 1; + } + None => { + if count == 0 { + return Ok(Some(specifier)); + } else { + return Ok(self.specifiers.get_specifier(current)); + } + } + } + } + } + + pub fn read<'a>( + &'a self, + original_specifier: &'a Url, + ) -> Result>, TooManyRedirectsError> { + #[allow(clippy::ptr_arg)] + fn handle_cow_ref(data: &Cow<'static, [u8]>) -> Cow<'static, [u8]> { + match data { + Cow::Borrowed(data) => Cow::Borrowed(data), + Cow::Owned(data) => { + // this variant should never happen because the data + // should always be borrowed static in denort + debug_assert!(false); + Cow::Owned(data.clone()) + } + } + } + + let mut count = 0; + let Some(mut specifier) = self.specifiers.get_id(original_specifier) else { + return Ok(None); + }; + loop { + if count > 10 { + return Err(TooManyRedirectsError(original_specifier.clone())); + } + match self.redirects.get(specifier) { + Some(to) => { + specifier = *to; + count += 1; + } + None => { + let Some(entry) = self.remote_modules.get(specifier) else { + return Ok(None); + }; + return Ok(Some(DenoCompileModuleData { + specifier: if count == 0 { + original_specifier + } else { + self.specifiers.get_specifier(specifier).unwrap() + }, + media_type: entry.media_type, + data: handle_cow_ref(&entry.data), + transpiled: entry.maybe_transpiled.as_ref().map(handle_cow_ref), + source_map: entry.maybe_source_map.as_ref().map(handle_cow_ref), + cjs_export_analysis: entry + .maybe_cjs_export_analysis + .as_ref() + .map(handle_cow_ref), + })); + } + } + } + } +} + +fn deserialize_npm_snapshot( + input: &[u8], +) -> Result { + fn parse_id(input: &[u8]) -> Result<(&[u8], NpmPackageId), AnyError> { + let (input, id) = read_string_lossy(input)?; + let id = NpmPackageId::from_serialized(&id)?; + Ok((input, id)) + } + + #[allow(clippy::needless_lifetimes)] // clippy bug + fn parse_root_package<'a>( + id_to_npm_id: &'a impl Fn(usize) -> Result, + ) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a + { + |input| { + let (input, req) = read_string_lossy(input)?; + let req = PackageReq::from_str(&req)?; + let (input, id) = read_u32_as_usize(input)?; + Ok((input, (req, id_to_npm_id(id)?))) + } + } + + #[allow(clippy::needless_lifetimes)] // clippy bug + fn parse_package_dep<'a>( + id_to_npm_id: &'a impl Fn(usize) -> Result, + ) -> impl Fn(&[u8]) -> Result<(&[u8], (StackString, NpmPackageId)), AnyError> + 'a + { + |input| { + let (input, req) = read_string_lossy(input)?; + let (input, id) = read_u32_as_usize(input)?; + let req = StackString::from_cow(req); + Ok((input, (req, id_to_npm_id(id)?))) + } + } + + fn parse_package<'a>( + input: &'a [u8], + id: NpmPackageId, + id_to_npm_id: &impl Fn(usize) -> Result, + ) -> Result<(&'a [u8], SerializedNpmResolutionSnapshotPackage), AnyError> { + let (input, deps_len) = read_u32_as_usize(input)?; + let (input, dependencies) = + parse_hashmap_n_times(input, deps_len, parse_package_dep(id_to_npm_id))?; + Ok(( + input, + SerializedNpmResolutionSnapshotPackage { + id, + system: Default::default(), + dist: Default::default(), + dependencies, + optional_dependencies: Default::default(), + bin: None, + scripts: Default::default(), + deprecated: Default::default(), + }, + )) + } + + let (input, packages_len) = read_u32_as_usize(input)?; + + // get a hashmap of all the npm package ids to their serialized ids + let (input, data_ids_to_npm_ids) = + parse_vec_n_times(input, packages_len, parse_id) + .context("deserializing id")?; + let data_id_to_npm_id = |id: usize| { + data_ids_to_npm_ids + .get(id) + .cloned() + .ok_or_else(|| deno_core::anyhow::anyhow!("Invalid npm package id")) + }; + + let (input, root_packages_len) = read_u32_as_usize(input)?; + let (input, root_packages) = parse_hashmap_n_times( + input, + root_packages_len, + parse_root_package(&data_id_to_npm_id), + ) + .context("deserializing root package")?; + let (input, packages) = + parse_vec_n_times_with_index(input, packages_len, |input, index| { + parse_package(input, data_id_to_npm_id(index)?, &data_id_to_npm_id) + }) + .context("deserializing package")?; + + if !input.is_empty() { + bail!("Unexpected data left over"); + } + + Ok( + SerializedNpmResolutionSnapshot { + packages, + root_packages, + } + // this is ok because we have already verified that all the + // identifiers found in the snapshot are valid via the + // npm package id -> npm package id mapping + .into_valid_unsafe(), + ) +} + +fn parse_hashmap_n_times( + mut input: &[u8], + times: usize, + parse: impl Fn(&[u8]) -> Result<(&[u8], (TKey, TValue)), AnyError>, +) -> Result<(&[u8], HashMap), AnyError> { + let mut results = HashMap::with_capacity(times); + for _ in 0..times { + let result = parse(input); + let (new_input, (key, value)) = result?; + results.insert(key, value); + input = new_input; + } + Ok((input, results)) +} + +fn parse_vec_n_times( + input: &[u8], + times: usize, + parse: impl Fn(&[u8]) -> Result<(&[u8], TResult), AnyError>, +) -> Result<(&[u8], Vec), AnyError> { + parse_vec_n_times_with_index(input, times, |input, _index| parse(input)) +} + +fn parse_vec_n_times_with_index( + mut input: &[u8], + times: usize, + parse: impl Fn(&[u8], usize) -> Result<(&[u8], TResult), AnyError>, +) -> Result<(&[u8], Vec), AnyError> { + let mut results = Vec::with_capacity(times); + for i in 0..times { + let result = parse(input, i); + let (new_input, result) = result?; + results.push(result); + input = new_input; + } + Ok((input, results)) +} + +fn read_bytes_with_u64_len(input: &[u8]) -> std::io::Result<(&[u8], &[u8])> { + let (input, len) = read_u64(input)?; + let (input, data) = read_bytes(input, len as usize)?; + Ok((input, data)) +} + +fn read_bytes_with_u32_len(input: &[u8]) -> std::io::Result<(&[u8], &[u8])> { + let (input, len) = read_u32_as_usize(input)?; + let (input, data) = read_bytes(input, len)?; + Ok((input, data)) +} + +fn read_bytes(input: &[u8], len: usize) -> std::io::Result<(&[u8], &[u8])> { + check_has_len(input, len)?; + let (len_bytes, input) = input.split_at(len); + Ok((input, len_bytes)) +} + +#[inline(always)] +fn check_has_len(input: &[u8], len: usize) -> std::io::Result<()> { + if input.len() < len { + Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "Unexpected end of data", + )) + } else { + Ok(()) + } +} + +fn read_string_lossy(input: &[u8]) -> std::io::Result<(&[u8], Cow)> { + let (input, data_bytes) = read_bytes_with_u32_len(input)?; + Ok((input, String::from_utf8_lossy(data_bytes))) +} + +fn read_u32_as_usize(input: &[u8]) -> std::io::Result<(&[u8], usize)> { + let (input, len_bytes) = read_bytes(input, 4)?; + let len = u32::from_le_bytes(len_bytes.try_into().unwrap()); + Ok((input, len as usize)) +} + +fn read_u64(input: &[u8]) -> std::io::Result<(&[u8], u64)> { + let (input, len_bytes) = read_bytes(input, 8)?; + let len = u64::from_le_bytes(len_bytes.try_into().unwrap()); + Ok((input, len)) +} diff --git a/cli/rt/build.rs b/cli/rt/build.rs new file mode 100644 index 00000000000000..486e203dd6050e --- /dev/null +++ b/cli/rt/build.rs @@ -0,0 +1,11 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +fn main() { + // Skip building from docs.rs. + if std::env::var_os("DOCS_RS").is_some() { + return; + } + + deno_runtime::deno_napi::print_linker_flags("denort"); + deno_runtime::deno_webgpu::print_linker_flags("denort"); +} diff --git a/cli/standalone/code_cache.rs b/cli/rt/code_cache.rs similarity index 96% rename from cli/standalone/code_cache.rs rename to cli/rt/code_cache.rs index de9ff2a14142f3..c97638abd193ac 100644 --- a/cli/standalone/code_cache.rs +++ b/cli/rt/code_cache.rs @@ -1,6 +1,5 @@ // Copyright 2018-2025 the Deno authors. MIT license. -use std::collections::BTreeMap; use std::collections::HashMap; use std::io::BufReader; use std::io::BufWriter; @@ -10,17 +9,15 @@ use std::path::Path; use std::path::PathBuf; use std::sync::Arc; -use deno_ast::ModuleSpecifier; use deno_core::anyhow::bail; use deno_core::error::AnyError; use deno_core::parking_lot::Mutex; use deno_core::unsync::sync::AtomicFlag; +use deno_lib::util::hash::FastInsecureHasher; use deno_path_util::get_atomic_path; use deno_runtime::code_cache::CodeCache; use deno_runtime::code_cache::CodeCacheType; - -use crate::cache::FastInsecureHasher; -use crate::worker::CliCodeCache; +use url::Url; enum CodeCacheStrategy { FirstRun(FirstRunCodeCacheStrategy), @@ -76,12 +73,27 @@ impl DenoCompileCodeCache { } } } + + pub fn for_deno_core(self: Arc) -> Arc { + self.clone() + } + + pub fn enabled(&self) -> bool { + match &self.strategy { + CodeCacheStrategy::FirstRun(strategy) => { + !strategy.is_finished.is_raised() + } + CodeCacheStrategy::SubsequentRun(strategy) => { + !strategy.is_finished.is_raised() + } + } + } } impl CodeCache for DenoCompileCodeCache { fn get_sync( &self, - specifier: &ModuleSpecifier, + specifier: &Url, code_cache_type: CodeCacheType, source_hash: u64, ) -> Option> { @@ -106,7 +118,7 @@ impl CodeCache for DenoCompileCodeCache { fn set_sync( &self, - specifier: ModuleSpecifier, + specifier: Url, code_cache_type: CodeCacheType, source_hash: u64, bytes: &[u8], @@ -152,23 +164,6 @@ impl CodeCache for DenoCompileCodeCache { } } -impl CliCodeCache for DenoCompileCodeCache { - fn enabled(&self) -> bool { - match &self.strategy { - CodeCacheStrategy::FirstRun(strategy) => { - !strategy.is_finished.is_raised() - } - CodeCacheStrategy::SubsequentRun(strategy) => { - !strategy.is_finished.is_raised() - } - } - } - - fn as_code_cache(self: Arc) -> Arc { - self - } -} - type CodeCacheKey = (String, CodeCacheType); struct FirstRunCodeCacheData { @@ -216,7 +211,7 @@ struct SubsequentRunCodeCacheStrategy { impl SubsequentRunCodeCacheStrategy { fn take_from_cache( &self, - specifier: &ModuleSpecifier, + specifier: &Url, code_cache_type: CodeCacheType, source_hash: u64, ) -> Option> { @@ -395,8 +390,6 @@ fn deserialize_with_reader( #[cfg(test)] mod test { - use std::fs::File; - use test_util::TempDir; use super::*; @@ -463,8 +456,8 @@ mod test { fn code_cache() { let temp_dir = TempDir::new(); let file_path = temp_dir.path().join("cache.bin").to_path_buf(); - let url1 = ModuleSpecifier::parse("https://deno.land/example1.js").unwrap(); - let url2 = ModuleSpecifier::parse("https://deno.land/example2.js").unwrap(); + let url1 = Url::parse("https://deno.land/example1.js").unwrap(); + let url2 = Url::parse("https://deno.land/example2.js").unwrap(); // first run { let code_cache = DenoCompileCodeCache::new(file_path.clone(), 1234); diff --git a/cli/rt/file_system.rs b/cli/rt/file_system.rs new file mode 100644 index 00000000000000..097674f0156fe8 --- /dev/null +++ b/cli/rt/file_system.rs @@ -0,0 +1,1713 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::borrow::Cow; +use std::cell::RefCell; +use std::collections::HashSet; +use std::io::ErrorKind; +use std::io::SeekFrom; +use std::ops::Range; +use std::path::Path; +use std::path::PathBuf; +use std::rc::Rc; +use std::sync::Arc; +use std::time::Duration; +use std::time::SystemTime; + +use deno_core::BufMutView; +use deno_core::BufView; +use deno_core::ResourceHandleFd; +use deno_lib::standalone::virtual_fs::FileSystemCaseSensitivity; +use deno_lib::standalone::virtual_fs::OffsetWithLength; +use deno_lib::standalone::virtual_fs::VfsEntry; +use deno_lib::standalone::virtual_fs::VfsEntryRef; +use deno_lib::standalone::virtual_fs::VirtualDirectory; +use deno_lib::standalone::virtual_fs::VirtualFile; +use deno_lib::sys::DenoLibSys; +use deno_runtime::deno_fs::AccessCheckCb; +use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_fs::FsDirEntry; +use deno_runtime::deno_fs::FsFileType; +use deno_runtime::deno_fs::OpenOptions; +use deno_runtime::deno_fs::RealFs; +use deno_runtime::deno_io; +use deno_runtime::deno_io::fs::File as DenoFile; +use deno_runtime::deno_io::fs::FsError; +use deno_runtime::deno_io::fs::FsResult; +use deno_runtime::deno_io::fs::FsStat; +use deno_runtime::deno_node::ExtNodeSys; +use sys_traits::boxed::BoxedFsDirEntry; +use sys_traits::boxed::BoxedFsMetadataValue; +use sys_traits::boxed::FsMetadataBoxed; +use sys_traits::boxed::FsReadDirBoxed; +use sys_traits::FsCopy; +use url::Url; + +#[derive(Debug, Clone)] +pub struct DenoRtSys(Arc); + +impl DenoRtSys { + pub fn new(vfs: Arc) -> Self { + Self(vfs) + } + + pub fn is_specifier_in_vfs(&self, specifier: &Url) -> bool { + deno_path_util::url_to_file_path(specifier) + .map(|p| self.is_in_vfs(&p)) + .unwrap_or(false) + } + + pub fn is_in_vfs(&self, path: &Path) -> bool { + self.0.is_path_within(path) + } + + fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> { + if self.0.is_path_within(path) { + Err(FsError::NotSupported) + } else { + Ok(()) + } + } + + fn copy_to_real_path( + &self, + oldpath: &Path, + newpath: &Path, + ) -> std::io::Result { + let old_file = self.0.file_entry(oldpath)?; + let old_file_bytes = self.0.read_file_all(old_file)?; + let len = old_file_bytes.len() as u64; + RealFs + .write_file_sync( + newpath, + OpenOptions { + read: false, + write: true, + create: true, + truncate: true, + append: false, + create_new: false, + mode: None, + }, + None, + &old_file_bytes, + ) + .map_err(|err| err.into_io_error())?; + Ok(len) + } +} + +#[async_trait::async_trait(?Send)] +impl FileSystem for DenoRtSys { + fn cwd(&self) -> FsResult { + RealFs.cwd() + } + + fn tmp_dir(&self) -> FsResult { + RealFs.tmp_dir() + } + + fn chdir(&self, path: &Path) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.chdir(path) + } + + fn umask(&self, mask: Option) -> FsResult { + RealFs.umask(mask) + } + + fn open_sync( + &self, + path: &Path, + options: OpenOptions, + access_check: Option, + ) -> FsResult> { + if self.0.is_path_within(path) { + Ok(Rc::new(self.0.open_file(path)?)) + } else { + RealFs.open_sync(path, options, access_check) + } + } + async fn open_async<'a>( + &'a self, + path: PathBuf, + options: OpenOptions, + access_check: Option>, + ) -> FsResult> { + if self.0.is_path_within(&path) { + Ok(Rc::new(self.0.open_file(&path)?)) + } else { + RealFs.open_async(path, options, access_check).await + } + } + + fn mkdir_sync( + &self, + path: &Path, + recursive: bool, + mode: Option, + ) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.mkdir_sync(path, recursive, mode) + } + async fn mkdir_async( + &self, + path: PathBuf, + recursive: bool, + mode: Option, + ) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs.mkdir_async(path, recursive, mode).await + } + + fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.chmod_sync(path, mode) + } + async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs.chmod_async(path, mode).await + } + + fn chown_sync( + &self, + path: &Path, + uid: Option, + gid: Option, + ) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.chown_sync(path, uid, gid) + } + async fn chown_async( + &self, + path: PathBuf, + uid: Option, + gid: Option, + ) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs.chown_async(path, uid, gid).await + } + + fn lchown_sync( + &self, + path: &Path, + uid: Option, + gid: Option, + ) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.lchown_sync(path, uid, gid) + } + + async fn lchown_async( + &self, + path: PathBuf, + uid: Option, + gid: Option, + ) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs.lchown_async(path, uid, gid).await + } + + fn remove_sync(&self, path: &Path, recursive: bool) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.remove_sync(path, recursive) + } + async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs.remove_async(path, recursive).await + } + + fn copy_file_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { + self.error_if_in_vfs(newpath)?; + if self.0.is_path_within(oldpath) { + self + .copy_to_real_path(oldpath, newpath) + .map(|_| ()) + .map_err(FsError::Io) + } else { + RealFs.copy_file_sync(oldpath, newpath) + } + } + async fn copy_file_async( + &self, + oldpath: PathBuf, + newpath: PathBuf, + ) -> FsResult<()> { + self.error_if_in_vfs(&newpath)?; + if self.0.is_path_within(&oldpath) { + let fs = self.clone(); + tokio::task::spawn_blocking(move || { + fs.copy_to_real_path(&oldpath, &newpath) + .map(|_| ()) + .map_err(FsError::Io) + }) + .await? + } else { + RealFs.copy_file_async(oldpath, newpath).await + } + } + + fn cp_sync(&self, from: &Path, to: &Path) -> FsResult<()> { + self.error_if_in_vfs(to)?; + + RealFs.cp_sync(from, to) + } + async fn cp_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> { + self.error_if_in_vfs(&to)?; + + RealFs.cp_async(from, to).await + } + + fn stat_sync(&self, path: &Path) -> FsResult { + if self.0.is_path_within(path) { + Ok(self.0.stat(path)?.as_fs_stat()) + } else { + RealFs.stat_sync(path) + } + } + async fn stat_async(&self, path: PathBuf) -> FsResult { + if self.0.is_path_within(&path) { + Ok(self.0.stat(&path)?.as_fs_stat()) + } else { + RealFs.stat_async(path).await + } + } + + fn lstat_sync(&self, path: &Path) -> FsResult { + if self.0.is_path_within(path) { + Ok(self.0.lstat(path)?.as_fs_stat()) + } else { + RealFs.lstat_sync(path) + } + } + async fn lstat_async(&self, path: PathBuf) -> FsResult { + if self.0.is_path_within(&path) { + Ok(self.0.lstat(&path)?.as_fs_stat()) + } else { + RealFs.lstat_async(path).await + } + } + + fn realpath_sync(&self, path: &Path) -> FsResult { + if self.0.is_path_within(path) { + Ok(self.0.canonicalize(path)?) + } else { + RealFs.realpath_sync(path) + } + } + async fn realpath_async(&self, path: PathBuf) -> FsResult { + if self.0.is_path_within(&path) { + Ok(self.0.canonicalize(&path)?) + } else { + RealFs.realpath_async(path).await + } + } + + fn read_dir_sync(&self, path: &Path) -> FsResult> { + if self.0.is_path_within(path) { + Ok(self.0.read_dir(path)?) + } else { + RealFs.read_dir_sync(path) + } + } + async fn read_dir_async(&self, path: PathBuf) -> FsResult> { + if self.0.is_path_within(&path) { + Ok(self.0.read_dir(&path)?) + } else { + RealFs.read_dir_async(path).await + } + } + + fn rename_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { + self.error_if_in_vfs(oldpath)?; + self.error_if_in_vfs(newpath)?; + RealFs.rename_sync(oldpath, newpath) + } + async fn rename_async( + &self, + oldpath: PathBuf, + newpath: PathBuf, + ) -> FsResult<()> { + self.error_if_in_vfs(&oldpath)?; + self.error_if_in_vfs(&newpath)?; + RealFs.rename_async(oldpath, newpath).await + } + + fn link_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { + self.error_if_in_vfs(oldpath)?; + self.error_if_in_vfs(newpath)?; + RealFs.link_sync(oldpath, newpath) + } + async fn link_async( + &self, + oldpath: PathBuf, + newpath: PathBuf, + ) -> FsResult<()> { + self.error_if_in_vfs(&oldpath)?; + self.error_if_in_vfs(&newpath)?; + RealFs.link_async(oldpath, newpath).await + } + + fn symlink_sync( + &self, + oldpath: &Path, + newpath: &Path, + file_type: Option, + ) -> FsResult<()> { + self.error_if_in_vfs(oldpath)?; + self.error_if_in_vfs(newpath)?; + RealFs.symlink_sync(oldpath, newpath, file_type) + } + async fn symlink_async( + &self, + oldpath: PathBuf, + newpath: PathBuf, + file_type: Option, + ) -> FsResult<()> { + self.error_if_in_vfs(&oldpath)?; + self.error_if_in_vfs(&newpath)?; + RealFs.symlink_async(oldpath, newpath, file_type).await + } + + fn read_link_sync(&self, path: &Path) -> FsResult { + if self.0.is_path_within(path) { + Ok(self.0.read_link(path)?) + } else { + RealFs.read_link_sync(path) + } + } + async fn read_link_async(&self, path: PathBuf) -> FsResult { + if self.0.is_path_within(&path) { + Ok(self.0.read_link(&path)?) + } else { + RealFs.read_link_async(path).await + } + } + + fn truncate_sync(&self, path: &Path, len: u64) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.truncate_sync(path, len) + } + async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs.truncate_async(path, len).await + } + + fn utime_sync( + &self, + path: &Path, + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.utime_sync(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) + } + async fn utime_async( + &self, + path: PathBuf, + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs + .utime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) + .await + } + + fn lutime_sync( + &self, + path: &Path, + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()> { + self.error_if_in_vfs(path)?; + RealFs.lutime_sync(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) + } + async fn lutime_async( + &self, + path: PathBuf, + atime_secs: i64, + atime_nanos: u32, + mtime_secs: i64, + mtime_nanos: u32, + ) -> FsResult<()> { + self.error_if_in_vfs(&path)?; + RealFs + .lutime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) + .await + } +} + +impl ExtNodeSys for DenoRtSys {} +impl DenoLibSys for DenoRtSys {} + +impl sys_traits::BaseFsHardLink for DenoRtSys { + #[inline] + fn base_fs_hard_link(&self, src: &Path, dst: &Path) -> std::io::Result<()> { + self.link_sync(src, dst).map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsRead for DenoRtSys { + #[inline] + fn base_fs_read(&self, path: &Path) -> std::io::Result> { + self + .read_file_sync(path, None) + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::FsMetadataValue for FileBackedVfsMetadata { + fn file_type(&self) -> sys_traits::FileType { + self.file_type + } + + fn len(&self) -> u64 { + self.len + } + + fn accessed(&self) -> std::io::Result { + Err(not_supported("accessed time")) + } + + fn created(&self) -> std::io::Result { + Err(not_supported("created time")) + } + + fn changed(&self) -> std::io::Result { + Err(not_supported("changed time")) + } + + fn modified(&self) -> std::io::Result { + Err(not_supported("modified time")) + } + + fn dev(&self) -> std::io::Result { + Ok(0) + } + + fn ino(&self) -> std::io::Result { + Ok(0) + } + + fn mode(&self) -> std::io::Result { + Ok(0) + } + + fn nlink(&self) -> std::io::Result { + Ok(0) + } + + fn uid(&self) -> std::io::Result { + Ok(0) + } + + fn gid(&self) -> std::io::Result { + Ok(0) + } + + fn rdev(&self) -> std::io::Result { + Ok(0) + } + + fn blksize(&self) -> std::io::Result { + Ok(0) + } + + fn blocks(&self) -> std::io::Result { + Ok(0) + } + + fn is_block_device(&self) -> std::io::Result { + Ok(false) + } + + fn is_char_device(&self) -> std::io::Result { + Ok(false) + } + + fn is_fifo(&self) -> std::io::Result { + Ok(false) + } + + fn is_socket(&self) -> std::io::Result { + Ok(false) + } + + fn file_attributes(&self) -> std::io::Result { + Ok(0) + } +} + +fn not_supported(name: &str) -> std::io::Error { + std::io::Error::new( + ErrorKind::Unsupported, + format!( + "{} is not supported for an embedded deno compile file", + name + ), + ) +} + +impl sys_traits::FsDirEntry for FileBackedVfsDirEntry { + type Metadata = BoxedFsMetadataValue; + + fn file_name(&self) -> Cow { + Cow::Borrowed(self.metadata.name.as_ref()) + } + + fn file_type(&self) -> std::io::Result { + Ok(self.metadata.file_type) + } + + fn metadata(&self) -> std::io::Result { + Ok(BoxedFsMetadataValue(Box::new(self.metadata.clone()))) + } + + fn path(&self) -> Cow { + Cow::Owned(self.parent_path.join(&self.metadata.name)) + } +} + +impl sys_traits::BaseFsReadDir for DenoRtSys { + type ReadDirEntry = BoxedFsDirEntry; + + fn base_fs_read_dir( + &self, + path: &Path, + ) -> std::io::Result< + Box> + '_>, + > { + if self.0.is_path_within(path) { + let entries = self.0.read_dir_with_metadata(path)?; + Ok(Box::new( + entries.map(|entry| Ok(BoxedFsDirEntry::new(entry))), + )) + } else { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.fs_read_dir_boxed(path) + } + } +} + +impl sys_traits::BaseFsCanonicalize for DenoRtSys { + #[inline] + fn base_fs_canonicalize(&self, path: &Path) -> std::io::Result { + self.realpath_sync(path).map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsMetadata for DenoRtSys { + type Metadata = BoxedFsMetadataValue; + + #[inline] + fn base_fs_metadata(&self, path: &Path) -> std::io::Result { + if self.0.is_path_within(path) { + Ok(BoxedFsMetadataValue::new(self.0.stat(path)?)) + } else { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.fs_metadata_boxed(path) + } + } + + #[inline] + fn base_fs_symlink_metadata( + &self, + path: &Path, + ) -> std::io::Result { + if self.0.is_path_within(path) { + Ok(BoxedFsMetadataValue::new(self.0.lstat(path)?)) + } else { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.fs_symlink_metadata_boxed(path) + } + } +} + +impl sys_traits::BaseFsCopy for DenoRtSys { + #[inline] + fn base_fs_copy(&self, from: &Path, to: &Path) -> std::io::Result { + self + .error_if_in_vfs(to) + .map_err(|err| err.into_io_error())?; + if self.0.is_path_within(from) { + self.copy_to_real_path(from, to) + } else { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.fs_copy(from, to) + } + } +} + +impl sys_traits::BaseFsCloneFile for DenoRtSys { + fn base_fs_clone_file( + &self, + _from: &Path, + _to: &Path, + ) -> std::io::Result<()> { + // will cause a fallback in the code that uses this + Err(not_supported("cloning files")) + } +} + +impl sys_traits::BaseFsCreateDir for DenoRtSys { + #[inline] + fn base_fs_create_dir( + &self, + path: &Path, + options: &sys_traits::CreateDirOptions, + ) -> std::io::Result<()> { + self + .mkdir_sync(path, options.recursive, options.mode) + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsRemoveFile for DenoRtSys { + #[inline] + fn base_fs_remove_file(&self, path: &Path) -> std::io::Result<()> { + self + .remove_sync(path, false) + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::BaseFsRename for DenoRtSys { + #[inline] + fn base_fs_rename(&self, from: &Path, to: &Path) -> std::io::Result<()> { + self + .rename_sync(from, to) + .map_err(|err| err.into_io_error()) + } +} + +pub enum FsFileAdapter { + Real(sys_traits::impls::RealFsFile), + Vfs(FileBackedVfsFile), +} + +impl sys_traits::FsFile for FsFileAdapter {} + +impl sys_traits::FsFileAsRaw for FsFileAdapter { + #[cfg(windows)] + fn fs_file_as_raw_handle(&self) -> Option { + match self { + Self::Real(file) => file.fs_file_as_raw_handle(), + Self::Vfs(_) => None, + } + } + + #[cfg(unix)] + fn fs_file_as_raw_fd(&self) -> Option { + match self { + Self::Real(file) => file.fs_file_as_raw_fd(), + Self::Vfs(_) => None, + } + } +} + +impl sys_traits::FsFileSyncData for FsFileAdapter { + fn fs_file_sync_data(&mut self) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_sync_data(), + Self::Vfs(_) => Ok(()), + } + } +} + +impl sys_traits::FsFileSyncAll for FsFileAdapter { + fn fs_file_sync_all(&mut self) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_sync_all(), + Self::Vfs(_) => Ok(()), + } + } +} + +impl sys_traits::FsFileSetPermissions for FsFileAdapter { + #[inline] + fn fs_file_set_permissions(&mut self, mode: u32) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_set_permissions(mode), + Self::Vfs(_) => Ok(()), + } + } +} + +impl std::io::Read for FsFileAdapter { + #[inline] + fn read(&mut self, buf: &mut [u8]) -> std::io::Result { + match self { + Self::Real(file) => file.read(buf), + Self::Vfs(file) => file.read_to_buf(buf), + } + } +} + +impl std::io::Seek for FsFileAdapter { + fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result { + match self { + Self::Real(file) => file.seek(pos), + Self::Vfs(file) => file.seek(pos), + } + } +} + +impl std::io::Write for FsFileAdapter { + #[inline] + fn write(&mut self, buf: &[u8]) -> std::io::Result { + match self { + Self::Real(file) => file.write(buf), + Self::Vfs(_) => Err(not_supported("writing files")), + } + } + + #[inline] + fn flush(&mut self) -> std::io::Result<()> { + match self { + Self::Real(file) => file.flush(), + Self::Vfs(_) => Err(not_supported("writing files")), + } + } +} + +impl sys_traits::FsFileSetLen for FsFileAdapter { + #[inline] + fn fs_file_set_len(&mut self, len: u64) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_set_len(len), + Self::Vfs(_) => Err(not_supported("setting file length")), + } + } +} + +impl sys_traits::FsFileSetTimes for FsFileAdapter { + fn fs_file_set_times( + &mut self, + times: sys_traits::FsFileTimes, + ) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_set_times(times), + Self::Vfs(_) => Err(not_supported("setting file times")), + } + } +} + +impl sys_traits::FsFileLock for FsFileAdapter { + fn fs_file_lock( + &mut self, + mode: sys_traits::FsFileLockMode, + ) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_lock(mode), + Self::Vfs(_) => Err(not_supported("locking files")), + } + } + + fn fs_file_try_lock( + &mut self, + mode: sys_traits::FsFileLockMode, + ) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_try_lock(mode), + Self::Vfs(_) => Err(not_supported("locking files")), + } + } + + fn fs_file_unlock(&mut self) -> std::io::Result<()> { + match self { + Self::Real(file) => file.fs_file_unlock(), + Self::Vfs(_) => Err(not_supported("unlocking files")), + } + } +} + +impl sys_traits::FsFileIsTerminal for FsFileAdapter { + #[inline] + fn fs_file_is_terminal(&self) -> bool { + match self { + Self::Real(file) => file.fs_file_is_terminal(), + Self::Vfs(_) => false, + } + } +} + +impl sys_traits::BaseFsOpen for DenoRtSys { + type File = FsFileAdapter; + + fn base_fs_open( + &self, + path: &Path, + options: &sys_traits::OpenOptions, + ) -> std::io::Result { + if self.0.is_path_within(path) { + Ok(FsFileAdapter::Vfs(self.0.open_file(path)?)) + } else { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + Ok(FsFileAdapter::Real( + sys_traits::impls::RealSys.base_fs_open(path, options)?, + )) + } + } +} + +impl sys_traits::BaseFsSymlinkDir for DenoRtSys { + fn base_fs_symlink_dir(&self, src: &Path, dst: &Path) -> std::io::Result<()> { + self + .symlink_sync(src, dst, Some(FsFileType::Directory)) + .map_err(|err| err.into_io_error()) + } +} + +impl sys_traits::SystemRandom for DenoRtSys { + #[inline] + fn sys_random(&self, buf: &mut [u8]) -> std::io::Result<()> { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.sys_random(buf) + } +} + +impl sys_traits::SystemTimeNow for DenoRtSys { + #[inline] + fn sys_time_now(&self) -> SystemTime { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.sys_time_now() + } +} + +impl sys_traits::ThreadSleep for DenoRtSys { + #[inline] + fn thread_sleep(&self, dur: Duration) { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.thread_sleep(dur) + } +} + +impl sys_traits::EnvCurrentDir for DenoRtSys { + fn env_current_dir(&self) -> std::io::Result { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.env_current_dir() + } +} + +impl sys_traits::BaseEnvVar for DenoRtSys { + fn base_env_var_os( + &self, + key: &std::ffi::OsStr, + ) -> Option { + #[allow(clippy::disallowed_types)] // ok because we're implementing the fs + sys_traits::impls::RealSys.base_env_var_os(key) + } +} + +#[derive(Debug)] +pub struct VfsRoot { + pub dir: VirtualDirectory, + pub root_path: PathBuf, + pub start_file_offset: u64, +} + +impl VfsRoot { + fn find_entry<'a>( + &'a self, + path: &Path, + case_sensitivity: FileSystemCaseSensitivity, + ) -> std::io::Result<(PathBuf, VfsEntryRef<'a>)> { + self.find_entry_inner(path, &mut HashSet::new(), case_sensitivity) + } + + fn find_entry_inner<'a>( + &'a self, + path: &Path, + seen: &mut HashSet, + case_sensitivity: FileSystemCaseSensitivity, + ) -> std::io::Result<(PathBuf, VfsEntryRef<'a>)> { + let mut path = Cow::Borrowed(path); + loop { + let (resolved_path, entry) = + self.find_entry_no_follow_inner(&path, seen, case_sensitivity)?; + match entry { + VfsEntryRef::Symlink(symlink) => { + if !seen.insert(path.to_path_buf()) { + return Err(std::io::Error::new( + std::io::ErrorKind::Other, + "circular symlinks", + )); + } + path = Cow::Owned(symlink.resolve_dest_from_root(&self.root_path)); + } + _ => { + return Ok((resolved_path, entry)); + } + } + } + } + + fn find_entry_no_follow( + &self, + path: &Path, + case_sensitivity: FileSystemCaseSensitivity, + ) -> std::io::Result<(PathBuf, VfsEntryRef)> { + self.find_entry_no_follow_inner(path, &mut HashSet::new(), case_sensitivity) + } + + fn find_entry_no_follow_inner<'a>( + &'a self, + path: &Path, + seen: &mut HashSet, + case_sensitivity: FileSystemCaseSensitivity, + ) -> std::io::Result<(PathBuf, VfsEntryRef<'a>)> { + let relative_path = match path.strip_prefix(&self.root_path) { + Ok(p) => p, + Err(_) => { + return Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + "path not found", + )); + } + }; + let mut final_path = self.root_path.clone(); + let mut current_entry = VfsEntryRef::Dir(&self.dir); + for component in relative_path.components() { + let component = component.as_os_str(); + let current_dir = match current_entry { + VfsEntryRef::Dir(dir) => { + final_path.push(component); + dir + } + VfsEntryRef::Symlink(symlink) => { + let dest = symlink.resolve_dest_from_root(&self.root_path); + let (resolved_path, entry) = + self.find_entry_inner(&dest, seen, case_sensitivity)?; + final_path = resolved_path; // overwrite with the new resolved path + match entry { + VfsEntryRef::Dir(dir) => { + final_path.push(component); + dir + } + _ => { + return Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + "path not found", + )); + } + } + } + _ => { + return Err(std::io::Error::new( + std::io::ErrorKind::NotFound, + "path not found", + )); + } + }; + let component = component.to_string_lossy(); + current_entry = current_dir + .entries + .get_by_name(&component, case_sensitivity) + .ok_or_else(|| { + std::io::Error::new(std::io::ErrorKind::NotFound, "path not found") + })? + .as_ref(); + } + + Ok((final_path, current_entry)) + } +} + +pub struct FileBackedVfsFile { + file: VirtualFile, + pos: RefCell, + vfs: Arc, +} + +impl FileBackedVfsFile { + pub fn seek(&self, pos: SeekFrom) -> std::io::Result { + match pos { + SeekFrom::Start(pos) => { + *self.pos.borrow_mut() = pos; + Ok(pos) + } + SeekFrom::End(offset) => { + if offset < 0 && -offset as u64 > self.file.offset.len { + let msg = "An attempt was made to move the file pointer before the beginning of the file."; + Err(std::io::Error::new( + std::io::ErrorKind::PermissionDenied, + msg, + )) + } else { + let mut current_pos = self.pos.borrow_mut(); + *current_pos = if offset >= 0 { + self.file.offset.len - (offset as u64) + } else { + self.file.offset.len + (-offset as u64) + }; + Ok(*current_pos) + } + } + SeekFrom::Current(offset) => { + let mut current_pos = self.pos.borrow_mut(); + if offset >= 0 { + *current_pos += offset as u64; + } else if -offset as u64 > *current_pos { + return Err(std::io::Error::new(std::io::ErrorKind::PermissionDenied, "An attempt was made to move the file pointer before the beginning of the file.")); + } else { + *current_pos -= -offset as u64; + } + Ok(*current_pos) + } + } + } + + pub fn read_to_buf(&self, buf: &mut [u8]) -> std::io::Result { + let read_pos = { + let mut pos = self.pos.borrow_mut(); + let read_pos = *pos; + // advance the position due to the read + *pos = std::cmp::min(self.file.offset.len, *pos + buf.len() as u64); + read_pos + }; + self.vfs.read_file(&self.file, read_pos, buf) + } + + fn read_to_end(&self) -> FsResult> { + let read_pos = { + let mut pos = self.pos.borrow_mut(); + let read_pos = *pos; + // todo(dsherret): should this always set it to the end of the file? + if *pos < self.file.offset.len { + // advance the position due to the read + *pos = self.file.offset.len; + } + read_pos + }; + if read_pos > self.file.offset.len { + return Ok(Cow::Borrowed(&[])); + } + if read_pos == 0 { + Ok(self.vfs.read_file_all(&self.file)?) + } else { + let size = (self.file.offset.len - read_pos) as usize; + let mut buf = vec![0; size]; + self.vfs.read_file(&self.file, read_pos, &mut buf)?; + Ok(Cow::Owned(buf)) + } + } +} + +#[async_trait::async_trait(?Send)] +impl deno_io::fs::File for FileBackedVfsFile { + fn read_sync(self: Rc, buf: &mut [u8]) -> FsResult { + self.read_to_buf(buf).map_err(Into::into) + } + async fn read_byob( + self: Rc, + mut buf: BufMutView, + ) -> FsResult<(usize, BufMutView)> { + // this is fast, no need to spawn a task + let nread = self.read_to_buf(&mut buf)?; + Ok((nread, buf)) + } + + fn write_sync(self: Rc, _buf: &[u8]) -> FsResult { + Err(FsError::NotSupported) + } + async fn write( + self: Rc, + _buf: BufView, + ) -> FsResult { + Err(FsError::NotSupported) + } + + fn write_all_sync(self: Rc, _buf: &[u8]) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn write_all(self: Rc, _buf: BufView) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn read_all_sync(self: Rc) -> FsResult> { + self.read_to_end() + } + async fn read_all_async(self: Rc) -> FsResult> { + // this is fast, no need to spawn a task + self.read_to_end() + } + + fn chmod_sync(self: Rc, _pathmode: u32) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn chmod_async(self: Rc, _mode: u32) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn seek_sync(self: Rc, pos: SeekFrom) -> FsResult { + self.seek(pos).map_err(|err| err.into()) + } + async fn seek_async(self: Rc, pos: SeekFrom) -> FsResult { + self.seek(pos).map_err(|err| err.into()) + } + + fn datasync_sync(self: Rc) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn datasync_async(self: Rc) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn sync_sync(self: Rc) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn sync_async(self: Rc) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn stat_sync(self: Rc) -> FsResult { + Err(FsError::NotSupported) + } + async fn stat_async(self: Rc) -> FsResult { + Err(FsError::NotSupported) + } + + fn lock_sync(self: Rc, _exclusive: bool) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn lock_async(self: Rc, _exclusive: bool) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn unlock_sync(self: Rc) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn unlock_async(self: Rc) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn truncate_sync(self: Rc, _len: u64) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn truncate_async(self: Rc, _len: u64) -> FsResult<()> { + Err(FsError::NotSupported) + } + + fn utime_sync( + self: Rc, + _atime_secs: i64, + _atime_nanos: u32, + _mtime_secs: i64, + _mtime_nanos: u32, + ) -> FsResult<()> { + Err(FsError::NotSupported) + } + async fn utime_async( + self: Rc, + _atime_secs: i64, + _atime_nanos: u32, + _mtime_secs: i64, + _mtime_nanos: u32, + ) -> FsResult<()> { + Err(FsError::NotSupported) + } + + // lower level functionality + fn as_stdio(self: Rc) -> FsResult { + Err(FsError::NotSupported) + } + fn backing_fd(self: Rc) -> Option { + None + } + fn try_clone_inner(self: Rc) -> FsResult> { + Ok(self) + } +} + +#[derive(Debug, Clone)] +pub struct FileBackedVfsDirEntry { + pub parent_path: PathBuf, + pub metadata: FileBackedVfsMetadata, +} + +#[derive(Debug, Clone)] +pub struct FileBackedVfsMetadata { + pub name: String, + pub file_type: sys_traits::FileType, + pub len: u64, +} + +impl FileBackedVfsMetadata { + pub fn from_vfs_entry_ref(vfs_entry: VfsEntryRef) -> Self { + FileBackedVfsMetadata { + file_type: match vfs_entry { + VfsEntryRef::Dir(_) => sys_traits::FileType::Dir, + VfsEntryRef::File(_) => sys_traits::FileType::File, + VfsEntryRef::Symlink(_) => sys_traits::FileType::Symlink, + }, + name: vfs_entry.name().to_string(), + len: match vfs_entry { + VfsEntryRef::Dir(_) => 0, + VfsEntryRef::File(file) => file.offset.len, + VfsEntryRef::Symlink(_) => 0, + }, + } + } + pub fn as_fs_stat(&self) -> FsStat { + FsStat { + is_directory: self.file_type == sys_traits::FileType::Dir, + is_file: self.file_type == sys_traits::FileType::File, + is_symlink: self.file_type == sys_traits::FileType::Symlink, + atime: None, + birthtime: None, + mtime: None, + ctime: None, + blksize: 0, + size: self.len, + dev: 0, + ino: 0, + mode: 0, + nlink: 0, + uid: 0, + gid: 0, + rdev: 0, + blocks: 0, + is_block_device: false, + is_char_device: false, + is_fifo: false, + is_socket: false, + } + } +} + +#[derive(Debug)] +pub struct FileBackedVfs { + vfs_data: Cow<'static, [u8]>, + fs_root: VfsRoot, + case_sensitivity: FileSystemCaseSensitivity, +} + +impl FileBackedVfs { + pub fn new( + data: Cow<'static, [u8]>, + fs_root: VfsRoot, + case_sensitivity: FileSystemCaseSensitivity, + ) -> Self { + Self { + vfs_data: data, + fs_root, + case_sensitivity, + } + } + + pub fn root(&self) -> &Path { + &self.fs_root.root_path + } + + pub fn is_path_within(&self, path: &Path) -> bool { + path.starts_with(&self.fs_root.root_path) + } + + pub fn open_file( + self: &Arc, + path: &Path, + ) -> std::io::Result { + let file = self.file_entry(path)?; + Ok(FileBackedVfsFile { + file: file.clone(), + vfs: self.clone(), + pos: Default::default(), + }) + } + + pub fn read_dir(&self, path: &Path) -> std::io::Result> { + let dir = self.dir_entry(path)?; + Ok( + dir + .entries + .iter() + .map(|entry| FsDirEntry { + name: entry.name().to_string(), + is_file: matches!(entry, VfsEntry::File(_)), + is_directory: matches!(entry, VfsEntry::Dir(_)), + is_symlink: matches!(entry, VfsEntry::Symlink(_)), + }) + .collect(), + ) + } + + pub fn read_dir_with_metadata<'a>( + &'a self, + path: &Path, + ) -> std::io::Result + 'a> { + let dir = self.dir_entry(path)?; + let path = path.to_path_buf(); + Ok(dir.entries.iter().map(move |entry| FileBackedVfsDirEntry { + parent_path: path.to_path_buf(), + metadata: FileBackedVfsMetadata::from_vfs_entry_ref(entry.as_ref()), + })) + } + + pub fn read_link(&self, path: &Path) -> std::io::Result { + let (_, entry) = self + .fs_root + .find_entry_no_follow(path, self.case_sensitivity)?; + match entry { + VfsEntryRef::Symlink(symlink) => { + Ok(symlink.resolve_dest_from_root(&self.fs_root.root_path)) + } + VfsEntryRef::Dir(_) | VfsEntryRef::File(_) => Err(std::io::Error::new( + std::io::ErrorKind::Other, + "not a symlink", + )), + } + } + + pub fn lstat(&self, path: &Path) -> std::io::Result { + let (_, entry) = self + .fs_root + .find_entry_no_follow(path, self.case_sensitivity)?; + Ok(FileBackedVfsMetadata::from_vfs_entry_ref(entry)) + } + + pub fn stat(&self, path: &Path) -> std::io::Result { + let (_, entry) = self.fs_root.find_entry(path, self.case_sensitivity)?; + Ok(FileBackedVfsMetadata::from_vfs_entry_ref(entry)) + } + + pub fn canonicalize(&self, path: &Path) -> std::io::Result { + let (path, _) = self.fs_root.find_entry(path, self.case_sensitivity)?; + Ok(path) + } + + pub fn read_file_all( + &self, + file: &VirtualFile, + ) -> std::io::Result> { + self.read_file_offset_with_len(file.offset) + } + + pub fn read_file_offset_with_len( + &self, + offset_with_len: OffsetWithLength, + ) -> std::io::Result> { + let read_range = + self.get_read_range(offset_with_len, 0, offset_with_len.len)?; + match &self.vfs_data { + Cow::Borrowed(data) => Ok(Cow::Borrowed(&data[read_range])), + Cow::Owned(data) => Ok(Cow::Owned(data[read_range].to_vec())), + } + } + + pub fn read_file( + &self, + file: &VirtualFile, + pos: u64, + buf: &mut [u8], + ) -> std::io::Result { + let read_range = self.get_read_range(file.offset, pos, buf.len() as u64)?; + let read_len = read_range.len(); + buf[..read_len].copy_from_slice(&self.vfs_data[read_range]); + Ok(read_len) + } + + fn get_read_range( + &self, + file_offset_and_len: OffsetWithLength, + pos: u64, + len: u64, + ) -> std::io::Result> { + if pos > file_offset_and_len.len { + return Err(std::io::Error::new( + std::io::ErrorKind::UnexpectedEof, + "unexpected EOF", + )); + } + let file_offset = + self.fs_root.start_file_offset + file_offset_and_len.offset; + let start = file_offset + pos; + let end = file_offset + std::cmp::min(pos + len, file_offset_and_len.len); + Ok(start as usize..end as usize) + } + + pub fn dir_entry(&self, path: &Path) -> std::io::Result<&VirtualDirectory> { + let (_, entry) = self.fs_root.find_entry(path, self.case_sensitivity)?; + match entry { + VfsEntryRef::Dir(dir) => Ok(dir), + VfsEntryRef::Symlink(_) => unreachable!(), + VfsEntryRef::File(_) => Err(std::io::Error::new( + std::io::ErrorKind::Other, + "path is a file", + )), + } + } + + pub fn file_entry(&self, path: &Path) -> std::io::Result<&VirtualFile> { + let (_, entry) = self.fs_root.find_entry(path, self.case_sensitivity)?; + match entry { + VfsEntryRef::Dir(_) => Err(std::io::Error::new( + std::io::ErrorKind::Other, + "path is a directory", + )), + VfsEntryRef::Symlink(_) => unreachable!(), + VfsEntryRef::File(file) => Ok(file), + } + } +} + +#[cfg(test)] +mod test { + use std::io::Write; + + use deno_lib::standalone::virtual_fs::VfsBuilder; + use test_util::assert_contains; + use test_util::TempDir; + + use super::*; + + #[track_caller] + fn read_file(vfs: &FileBackedVfs, path: &Path) -> String { + let file = vfs.file_entry(path).unwrap(); + String::from_utf8(vfs.read_file_all(file).unwrap().into_owned()).unwrap() + } + + #[test] + fn builds_and_uses_virtual_fs() { + let temp_dir = TempDir::new(); + // we canonicalize the temp directory because the vfs builder + // will canonicalize the root path + let src_path = temp_dir.path().canonicalize().join("src"); + src_path.create_dir_all(); + src_path.join("sub_dir").create_dir_all(); + src_path.join("e.txt").write("e"); + src_path.symlink_file("e.txt", "sub_dir/e.txt"); + let src_path = src_path.to_path_buf(); + let mut builder = VfsBuilder::new(); + builder + .add_file_with_data_raw(&src_path.join("a.txt"), "data".into()) + .unwrap(); + builder + .add_file_with_data_raw(&src_path.join("b.txt"), "data".into()) + .unwrap(); + assert_eq!(builder.files_len(), 1); // because duplicate data + builder + .add_file_with_data_raw(&src_path.join("c.txt"), "c".into()) + .unwrap(); + builder + .add_file_with_data_raw( + &src_path.join("sub_dir").join("d.txt"), + "d".into(), + ) + .unwrap(); + builder.add_file_at_path(&src_path.join("e.txt")).unwrap(); + builder + .add_symlink(&src_path.join("sub_dir").join("e.txt")) + .unwrap(); + + // get the virtual fs + let (dest_path, virtual_fs) = into_virtual_fs(builder, &temp_dir); + + assert_eq!(read_file(&virtual_fs, &dest_path.join("a.txt")), "data"); + assert_eq!(read_file(&virtual_fs, &dest_path.join("b.txt")), "data"); + + // attempt reading a symlink + assert_eq!( + read_file(&virtual_fs, &dest_path.join("sub_dir").join("e.txt")), + "e", + ); + + // canonicalize symlink + assert_eq!( + virtual_fs + .canonicalize(&dest_path.join("sub_dir").join("e.txt")) + .unwrap(), + dest_path.join("e.txt"), + ); + + // metadata + assert_eq!( + virtual_fs + .lstat(&dest_path.join("sub_dir").join("e.txt")) + .unwrap() + .file_type, + sys_traits::FileType::Symlink, + ); + assert_eq!( + virtual_fs + .stat(&dest_path.join("sub_dir").join("e.txt")) + .unwrap() + .file_type, + sys_traits::FileType::File, + ); + assert_eq!( + virtual_fs + .stat(&dest_path.join("sub_dir")) + .unwrap() + .file_type, + sys_traits::FileType::Dir, + ); + assert_eq!( + virtual_fs.stat(&dest_path.join("e.txt")).unwrap().file_type, + sys_traits::FileType::File + ); + } + + #[test] + fn test_include_dir_recursive() { + let temp_dir = TempDir::new(); + let temp_dir_path = temp_dir.path().canonicalize(); + temp_dir.create_dir_all("src/nested/sub_dir"); + temp_dir.write("src/a.txt", "data"); + temp_dir.write("src/b.txt", "data"); + temp_dir.path().symlink_dir( + temp_dir_path.join("src/nested/sub_dir"), + temp_dir_path.join("src/sub_dir_link"), + ); + temp_dir.write("src/nested/sub_dir/c.txt", "c"); + + // build and create the virtual fs + let src_path = temp_dir_path.join("src").to_path_buf(); + let mut builder = VfsBuilder::new(); + builder.add_dir_recursive(&src_path).unwrap(); + let (dest_path, virtual_fs) = into_virtual_fs(builder, &temp_dir); + + assert_eq!(read_file(&virtual_fs, &dest_path.join("a.txt")), "data",); + assert_eq!(read_file(&virtual_fs, &dest_path.join("b.txt")), "data",); + + assert_eq!( + read_file( + &virtual_fs, + &dest_path.join("nested").join("sub_dir").join("c.txt") + ), + "c", + ); + assert_eq!( + read_file(&virtual_fs, &dest_path.join("sub_dir_link").join("c.txt")), + "c", + ); + assert_eq!( + virtual_fs + .lstat(&dest_path.join("sub_dir_link")) + .unwrap() + .file_type, + sys_traits::FileType::Symlink, + ); + + assert_eq!( + virtual_fs + .canonicalize(&dest_path.join("sub_dir_link").join("c.txt")) + .unwrap(), + dest_path.join("nested").join("sub_dir").join("c.txt"), + ); + } + + fn into_virtual_fs( + builder: VfsBuilder, + temp_dir: &TempDir, + ) -> (PathBuf, FileBackedVfs) { + let virtual_fs_file = temp_dir.path().join("virtual_fs"); + let vfs = builder.build(); + { + let mut file = std::fs::File::create(&virtual_fs_file).unwrap(); + for file_data in &vfs.files { + file.write_all(file_data).unwrap(); + } + } + let dest_path = temp_dir.path().join("dest"); + let data = std::fs::read(&virtual_fs_file).unwrap(); + ( + dest_path.to_path_buf(), + FileBackedVfs::new( + Cow::Owned(data), + VfsRoot { + dir: VirtualDirectory { + name: "".to_string(), + entries: vfs.entries, + }, + root_path: dest_path.to_path_buf(), + start_file_offset: 0, + }, + FileSystemCaseSensitivity::Sensitive, + ), + ) + } + + #[test] + fn circular_symlink() { + let temp_dir = TempDir::new(); + let src_path = temp_dir.path().canonicalize().join("src"); + src_path.create_dir_all(); + src_path.symlink_file("a.txt", "b.txt"); + src_path.symlink_file("b.txt", "c.txt"); + src_path.symlink_file("c.txt", "a.txt"); + let src_path = src_path.to_path_buf(); + let mut builder = VfsBuilder::new(); + let err = builder + .add_symlink(src_path.join("a.txt").as_path()) + .unwrap_err(); + assert_contains!(err.to_string(), "Circular symlink detected",); + } + + #[tokio::test] + async fn test_open_file() { + let temp_dir = TempDir::new(); + let temp_path = temp_dir.path().canonicalize(); + let mut builder = VfsBuilder::new(); + builder + .add_file_with_data_raw( + temp_path.join("a.txt").as_path(), + "0123456789".to_string().into_bytes(), + ) + .unwrap(); + let (dest_path, virtual_fs) = into_virtual_fs(builder, &temp_dir); + let virtual_fs = Arc::new(virtual_fs); + let file = virtual_fs.open_file(&dest_path.join("a.txt")).unwrap(); + file.seek(SeekFrom::Current(2)).unwrap(); + let mut buf = vec![0; 2]; + file.read_to_buf(&mut buf).unwrap(); + assert_eq!(buf, b"23"); + file.read_to_buf(&mut buf).unwrap(); + assert_eq!(buf, b"45"); + file.seek(SeekFrom::Current(-4)).unwrap(); + file.read_to_buf(&mut buf).unwrap(); + assert_eq!(buf, b"23"); + file.seek(SeekFrom::Start(2)).unwrap(); + file.read_to_buf(&mut buf).unwrap(); + assert_eq!(buf, b"23"); + file.seek(SeekFrom::End(2)).unwrap(); + file.read_to_buf(&mut buf).unwrap(); + assert_eq!(buf, b"89"); + file.seek(SeekFrom::Current(-8)).unwrap(); + file.read_to_buf(&mut buf).unwrap(); + assert_eq!(buf, b"23"); + assert_eq!( + file + .seek(SeekFrom::Current(-5)) + .unwrap_err() + .to_string(), + "An attempt was made to move the file pointer before the beginning of the file." + ); + // go beyond the file length, then back + file.seek(SeekFrom::Current(40)).unwrap(); + file.seek(SeekFrom::Current(-38)).unwrap(); + let file = Rc::new(file); + let read_buf = file.clone().read(2).await.unwrap(); + assert_eq!(read_buf.to_vec(), b"67"); + file.clone().seek_sync(SeekFrom::Current(-2)).unwrap(); + + // read to the end of the file + let all_buf = file.clone().read_all_sync().unwrap(); + assert_eq!(all_buf.to_vec(), b"6789"); + file.clone().seek_sync(SeekFrom::Current(-9)).unwrap(); + + // try try_clone_inner and read_all_async + let all_buf = file + .try_clone_inner() + .unwrap() + .read_all_async() + .await + .unwrap(); + assert_eq!(all_buf.to_vec(), b"123456789"); + } +} diff --git a/cli/rt/integration_tests_runner.rs b/cli/rt/integration_tests_runner.rs new file mode 100644 index 00000000000000..63f2abe4606c79 --- /dev/null +++ b/cli/rt/integration_tests_runner.rs @@ -0,0 +1,5 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +pub fn main() { + // this file exists to cause the executable to be built when running cargo test +} diff --git a/cli/mainrt.rs b/cli/rt/main.rs similarity index 66% rename from cli/mainrt.rs rename to cli/rt/main.rs index 8eea3f85ed5ae4..60b5a2fb961e15 100644 --- a/cli/mainrt.rs +++ b/cli/rt/main.rs @@ -1,46 +1,27 @@ // Copyright 2018-2025 the Deno authors. MIT license. -// Allow unused code warnings because we share -// code between the two bin targets. -#![allow(dead_code)] -#![allow(unused_imports)] - -mod standalone; - -mod args; -mod cache; -mod emit; -mod file_fetcher; -mod http_util; -mod js; -mod node; -mod npm; -mod resolver; -mod shared; -mod sys; -mod task_runner; -mod util; -mod version; -mod worker; - use std::borrow::Cow; -use std::collections::HashMap; use std::env; -use std::env::current_exe; use std::sync::Arc; use deno_core::error::AnyError; use deno_core::error::CoreError; -use deno_core::error::JsError; +use deno_lib::util::result::any_and_jserrorbox_downcast_ref; +use deno_lib::version::otel_runtime_config; +use deno_runtime::deno_telemetry::OtelConfig; use deno_runtime::fmt_errors::format_js_error; use deno_runtime::tokio_util::create_and_run_current_thread_with_maybe_metrics; -pub use deno_runtime::UNSTABLE_GRANULAR_FLAGS; use deno_terminal::colors; use indexmap::IndexMap; -use standalone::DenoCompileFileSystem; -use crate::args::Flags; -use crate::util::result::any_and_jserrorbox_downcast_ref; +use self::binary::extract_standalone; +use self::file_system::DenoRtSys; + +mod binary; +mod code_cache; +mod file_system; +mod node; +mod run; pub(crate) fn unstable_exit_cb(feature: &str, api_name: &str) { log::error!( @@ -87,27 +68,26 @@ fn load_env_vars(env_vars: &IndexMap) { fn main() { deno_runtime::deno_permissions::mark_standalone(); let args: Vec<_> = env::args_os().collect(); - let standalone = standalone::extract_standalone(Cow::Owned(args)); + let standalone = extract_standalone(Cow::Owned(args)); let future = async move { match standalone { Ok(Some(data)) => { - deno_telemetry::init( - crate::args::otel_runtime_config(), + deno_runtime::deno_telemetry::init( + otel_runtime_config(), &data.metadata.otel_config, )?; - util::logger::init( + init_logging( data.metadata.log_level, Some(data.metadata.otel_config.clone()), ); load_env_vars(&data.metadata.env_vars_from_env_file); - let fs = DenoCompileFileSystem::new(data.vfs.clone()); - let sys = crate::sys::CliSys::DenoCompile(fs.clone()); - let exit_code = standalone::run(Arc::new(fs), sys, data).await?; + let sys = DenoRtSys::new(data.vfs.clone()); + let exit_code = run::run(Arc::new(sys.clone()), sys, data).await?; deno_runtime::exit(exit_code); } Ok(None) => Ok(()), Err(err) => { - util::logger::init(None, None); + init_logging(None, None); Err(err) } } @@ -115,3 +95,15 @@ fn main() { unwrap_or_exit(create_and_run_current_thread_with_maybe_metrics(future)); } + +fn init_logging( + maybe_level: Option, + otel_config: Option, +) { + deno_lib::util::logger::init(deno_lib::util::logger::InitLoggingOptions { + maybe_level, + otel_config, + on_log_start: || {}, + on_log_end: || {}, + }) +} diff --git a/cli/rt/node.rs b/cli/rt/node.rs new file mode 100644 index 00000000000000..5d2ba5c4e8035f --- /dev/null +++ b/cli/rt/node.rs @@ -0,0 +1,165 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::borrow::Cow; +use std::sync::Arc; + +use deno_core::url::Url; +use deno_error::JsErrorBox; +use deno_lib::loader::NpmModuleLoader; +use deno_lib::standalone::binary::CjsExportAnalysisEntry; +use deno_media_type::MediaType; +use deno_resolver::npm::DenoInNpmPackageChecker; +use deno_resolver::npm::NpmReqResolver; +use deno_runtime::deno_fs::FileSystem; +use node_resolver::analyze::CjsAnalysis; +use node_resolver::analyze::CjsAnalysisExports; +use node_resolver::analyze::NodeCodeTranslator; +use node_resolver::DenoIsBuiltInNodeModuleChecker; + +use crate::binary::StandaloneModules; +use crate::file_system::DenoRtSys; + +pub type DenoRtCjsTracker = + deno_resolver::cjs::CjsTracker; +pub type DenoRtNpmResolver = deno_resolver::npm::NpmResolver; +pub type DenoRtNpmModuleLoader = NpmModuleLoader< + CjsCodeAnalyzer, + DenoInNpmPackageChecker, + DenoIsBuiltInNodeModuleChecker, + DenoRtNpmResolver, + DenoRtSys, +>; +pub type DenoRtNodeCodeTranslator = NodeCodeTranslator< + CjsCodeAnalyzer, + DenoInNpmPackageChecker, + DenoIsBuiltInNodeModuleChecker, + DenoRtNpmResolver, + DenoRtSys, +>; +pub type DenoRtNodeResolver = deno_runtime::deno_node::NodeResolver< + DenoInNpmPackageChecker, + DenoRtNpmResolver, + DenoRtSys, +>; +pub type DenoRtNpmReqResolver = NpmReqResolver< + DenoInNpmPackageChecker, + DenoIsBuiltInNodeModuleChecker, + DenoRtNpmResolver, + DenoRtSys, +>; + +pub struct CjsCodeAnalyzer { + cjs_tracker: Arc, + modules: Arc, + sys: DenoRtSys, +} + +impl CjsCodeAnalyzer { + pub fn new( + cjs_tracker: Arc, + modules: Arc, + sys: DenoRtSys, + ) -> Self { + Self { + cjs_tracker, + modules, + sys, + } + } + + fn inner_cjs_analysis<'a>( + &self, + specifier: &Url, + source: Cow<'a, str>, + ) -> Result, JsErrorBox> { + let media_type = MediaType::from_specifier(specifier); + if media_type == MediaType::Json { + return Ok(CjsAnalysis::Cjs(CjsAnalysisExports { + exports: vec![], + reexports: vec![], + })); + } + + let cjs_tracker = self.cjs_tracker.clone(); + let is_maybe_cjs = cjs_tracker + .is_maybe_cjs(specifier, media_type) + .map_err(JsErrorBox::from_err)?; + let analysis = if is_maybe_cjs { + let data = self + .modules + .read(specifier)? + .and_then(|d| d.cjs_export_analysis); + match data { + Some(data) => { + let data: CjsExportAnalysisEntry = bincode::deserialize(&data) + .map_err(|err| JsErrorBox::generic(err.to_string()))?; + match data { + CjsExportAnalysisEntry::Esm => { + cjs_tracker.set_is_known_script(specifier, false); + CjsAnalysis::Esm(source) + } + CjsExportAnalysisEntry::Cjs(analysis) => { + cjs_tracker.set_is_known_script(specifier, true); + CjsAnalysis::Cjs(analysis) + } + } + } + None => { + if log::log_enabled!(log::Level::Debug) { + if self.sys.is_specifier_in_vfs(specifier) { + log::debug!( + "No CJS export analysis was stored for '{}'. Assuming ESM. This might indicate a bug in Deno.", + specifier + ); + } else { + log::debug!( + "Analyzing potentially CommonJS files is not supported at runtime in a compiled executable ({}). Assuming ESM.", + specifier + ); + } + } + // assume ESM as we don't have access to swc here + CjsAnalysis::Esm(source) + } + } + } else { + CjsAnalysis::Esm(source) + }; + + Ok(analysis) + } +} + +#[async_trait::async_trait(?Send)] +impl node_resolver::analyze::CjsCodeAnalyzer for CjsCodeAnalyzer { + async fn analyze_cjs<'a>( + &self, + specifier: &Url, + source: Option>, + ) -> Result, JsErrorBox> { + let source = match source { + Some(source) => source, + None => { + if let Ok(path) = deno_path_util::url_to_file_path(specifier) { + // todo(dsherret): should this use the sync method instead? + if let Ok(source_from_file) = + self.sys.read_text_file_lossy_async(path, None).await + { + source_from_file + } else { + return Ok(CjsAnalysis::Cjs(CjsAnalysisExports { + exports: vec![], + reexports: vec![], + })); + } + } else { + return Ok(CjsAnalysis::Cjs(CjsAnalysisExports { + exports: vec![], + reexports: vec![], + })); + } + } + }; + self.inner_cjs_analysis(specifier, source) + } +} diff --git a/cli/rt/run.rs b/cli/rt/run.rs new file mode 100644 index 00000000000000..53190f24339e13 --- /dev/null +++ b/cli/rt/run.rs @@ -0,0 +1,1009 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +use std::borrow::Cow; +use std::path::PathBuf; +use std::rc::Rc; +use std::sync::Arc; +use std::sync::OnceLock; + +use deno_cache_dir::npm::NpmCacheDir; +use deno_config::workspace::MappedResolution; +use deno_config::workspace::ResolverWorkspaceJsrPackage; +use deno_config::workspace::WorkspaceResolver; +use deno_core::error::AnyError; +use deno_core::error::ModuleLoaderError; +use deno_core::futures::future::LocalBoxFuture; +use deno_core::futures::FutureExt; +use deno_core::url::Url; +use deno_core::v8_set_flags; +use deno_core::FastString; +use deno_core::FeatureChecker; +use deno_core::ModuleLoader; +use deno_core::ModuleSourceCode; +use deno_core::ModuleType; +use deno_core::RequestedModuleType; +use deno_core::ResolutionKind; +use deno_core::SourceCodeCacheInfo; +use deno_error::JsErrorBox; +use deno_lib::args::get_root_cert_store; +use deno_lib::args::npm_pkg_req_ref_to_binary_command; +use deno_lib::args::CaData; +use deno_lib::args::RootCertStoreLoadError; +use deno_lib::loader::NpmModuleLoader; +use deno_lib::npm::create_npm_process_state_provider; +use deno_lib::npm::NpmRegistryReadPermissionChecker; +use deno_lib::npm::NpmRegistryReadPermissionCheckerMode; +use deno_lib::standalone::binary::NodeModules; +use deno_lib::util::hash::FastInsecureHasher; +use deno_lib::util::text_encoding::from_utf8_lossy_cow; +use deno_lib::util::text_encoding::from_utf8_lossy_owned; +use deno_lib::util::v8::construct_v8_flags; +use deno_lib::worker::CreateModuleLoaderResult; +use deno_lib::worker::LibMainWorkerFactory; +use deno_lib::worker::LibMainWorkerOptions; +use deno_lib::worker::ModuleLoaderFactory; +use deno_lib::worker::StorageKeyResolver; +use deno_media_type::MediaType; +use deno_npm::npm_rc::ResolvedNpmRc; +use deno_npm::resolution::NpmResolutionSnapshot; +use deno_package_json::PackageJsonDepValue; +use deno_resolver::cjs::CjsTracker; +use deno_resolver::cjs::IsCjsResolutionMode; +use deno_resolver::npm::managed::ManagedInNpmPkgCheckerCreateOptions; +use deno_resolver::npm::managed::ManagedNpmResolverCreateOptions; +use deno_resolver::npm::managed::NpmResolutionCell; +use deno_resolver::npm::ByonmNpmResolverCreateOptions; +use deno_resolver::npm::CreateInNpmPkgCheckerOptions; +use deno_resolver::npm::DenoInNpmPackageChecker; +use deno_resolver::npm::NpmReqResolver; +use deno_resolver::npm::NpmReqResolverOptions; +use deno_resolver::npm::NpmResolver; +use deno_resolver::npm::NpmResolverCreateOptions; +use deno_runtime::code_cache::CodeCache; +use deno_runtime::deno_fs::FileSystem; +use deno_runtime::deno_node::create_host_defined_options; +use deno_runtime::deno_node::NodeRequireLoader; +use deno_runtime::deno_permissions::Permissions; +use deno_runtime::deno_permissions::PermissionsContainer; +use deno_runtime::deno_tls::rustls::RootCertStore; +use deno_runtime::deno_tls::RootCertStoreProvider; +use deno_runtime::deno_web::BlobStore; +use deno_runtime::permissions::RuntimePermissionDescriptorParser; +use deno_runtime::WorkerExecutionMode; +use deno_runtime::WorkerLogLevel; +use deno_semver::npm::NpmPackageReqReference; +use node_resolver::analyze::NodeCodeTranslator; +use node_resolver::errors::ClosestPkgJsonError; +use node_resolver::DenoIsBuiltInNodeModuleChecker; +use node_resolver::NodeResolutionKind; +use node_resolver::NodeResolver; +use node_resolver::PackageJsonResolver; +use node_resolver::PackageJsonThreadLocalCache; +use node_resolver::ResolutionMode; + +use crate::binary::DenoCompileModuleSource; +use crate::binary::StandaloneData; +use crate::binary::StandaloneModules; +use crate::code_cache::DenoCompileCodeCache; +use crate::file_system::DenoRtSys; +use crate::file_system::FileBackedVfs; +use crate::node::CjsCodeAnalyzer; +use crate::node::DenoRtCjsTracker; +use crate::node::DenoRtNodeCodeTranslator; +use crate::node::DenoRtNodeResolver; +use crate::node::DenoRtNpmModuleLoader; +use crate::node::DenoRtNpmReqResolver; + +struct SharedModuleLoaderState { + cjs_tracker: Arc, + code_cache: Option>, + modules: Arc, + node_code_translator: Arc, + node_resolver: Arc, + npm_module_loader: Arc, + npm_registry_permission_checker: NpmRegistryReadPermissionChecker, + npm_req_resolver: Arc, + vfs: Arc, + workspace_resolver: WorkspaceResolver, +} + +impl SharedModuleLoaderState { + fn get_code_cache( + &self, + specifier: &Url, + source: &[u8], + ) -> Option { + let Some(code_cache) = &self.code_cache else { + return None; + }; + if !code_cache.enabled() { + return None; + } + // deno version is already included in the root cache key + let hash = FastInsecureHasher::new_without_deno_version() + .write_hashable(source) + .finish(); + let data = code_cache.get_sync( + specifier, + deno_runtime::code_cache::CodeCacheType::EsModule, + hash, + ); + Some(SourceCodeCacheInfo { + hash, + data: data.map(Cow::Owned), + }) + } +} + +#[derive(Clone)] +struct EmbeddedModuleLoader { + shared: Arc, +} + +impl std::fmt::Debug for EmbeddedModuleLoader { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("EmbeddedModuleLoader").finish() + } +} + +impl ModuleLoader for EmbeddedModuleLoader { + fn resolve( + &self, + raw_specifier: &str, + referrer: &str, + kind: ResolutionKind, + ) -> Result { + let referrer = if referrer == "." { + if kind != ResolutionKind::MainModule { + return Err( + JsErrorBox::generic(format!( + "Expected to resolve main module, got {:?} instead.", + kind + )) + .into(), + ); + } + let current_dir = std::env::current_dir().unwrap(); + deno_core::resolve_path(".", ¤t_dir) + .map_err(JsErrorBox::from_err)? + } else { + Url::parse(referrer).map_err(|err| { + JsErrorBox::type_error(format!( + "Referrer uses invalid specifier: {}", + err + )) + })? + }; + let referrer_kind = if self + .shared + .cjs_tracker + .is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer)) + .map_err(JsErrorBox::from_err)? + { + ResolutionMode::Require + } else { + ResolutionMode::Import + }; + + if self.shared.node_resolver.in_npm_package(&referrer) { + return Ok( + self + .shared + .node_resolver + .resolve( + raw_specifier, + &referrer, + referrer_kind, + NodeResolutionKind::Execution, + ) + .and_then(|res| res.into_url()) + .map_err(JsErrorBox::from_err)?, + ); + } + + let mapped_resolution = self + .shared + .workspace_resolver + .resolve(raw_specifier, &referrer); + + match mapped_resolution { + Ok(MappedResolution::WorkspaceJsrPackage { specifier, .. }) => { + Ok(specifier) + } + Ok(MappedResolution::WorkspaceNpmPackage { + target_pkg_json: pkg_json, + sub_path, + .. + }) => Ok( + self + .shared + .node_resolver + .resolve_package_subpath_from_deno_module( + pkg_json.dir_path(), + sub_path.as_deref(), + Some(&referrer), + referrer_kind, + NodeResolutionKind::Execution, + ) + .map_err(JsErrorBox::from_err) + .and_then(|url_or_path| { + url_or_path.into_url().map_err(JsErrorBox::from_err) + })?, + ), + Ok(MappedResolution::PackageJson { + dep_result, + sub_path, + alias, + .. + }) => match dep_result + .as_ref() + .map_err(|e| JsErrorBox::from_err(e.clone()))? + { + PackageJsonDepValue::Req(req) => Ok( + self + .shared + .npm_req_resolver + .resolve_req_with_sub_path( + req, + sub_path.as_deref(), + &referrer, + referrer_kind, + NodeResolutionKind::Execution, + ) + .map_err(JsErrorBox::from_err) + .and_then(|url_or_path| { + url_or_path.into_url().map_err(JsErrorBox::from_err) + })?, + ), + PackageJsonDepValue::Workspace(version_req) => { + let pkg_folder = self + .shared + .workspace_resolver + .resolve_workspace_pkg_json_folder_for_pkg_json_dep( + alias, + version_req, + ) + .map_err(JsErrorBox::from_err)?; + Ok( + self + .shared + .node_resolver + .resolve_package_subpath_from_deno_module( + pkg_folder, + sub_path.as_deref(), + Some(&referrer), + referrer_kind, + NodeResolutionKind::Execution, + ) + .map_err(JsErrorBox::from_err) + .and_then(|url_or_path| { + url_or_path.into_url().map_err(JsErrorBox::from_err) + })?, + ) + } + }, + Ok(MappedResolution::Normal { specifier, .. }) + | Ok(MappedResolution::ImportMap { specifier, .. }) => { + if let Ok(reference) = + NpmPackageReqReference::from_specifier(&specifier) + { + return Ok( + self + .shared + .npm_req_resolver + .resolve_req_reference( + &reference, + &referrer, + referrer_kind, + NodeResolutionKind::Execution, + ) + .map_err(JsErrorBox::from_err) + .and_then(|url_or_path| { + url_or_path.into_url().map_err(JsErrorBox::from_err) + })?, + ); + } + + if specifier.scheme() == "jsr" { + if let Some(specifier) = self + .shared + .modules + .resolve_specifier(&specifier) + .map_err(JsErrorBox::from_err)? + { + return Ok(specifier.clone()); + } + } + + Ok( + self + .shared + .node_resolver + .handle_if_in_node_modules(&specifier) + .unwrap_or(specifier), + ) + } + Err(err) + if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" => + { + let maybe_res = self + .shared + .npm_req_resolver + .resolve_if_for_npm_pkg( + raw_specifier, + &referrer, + referrer_kind, + NodeResolutionKind::Execution, + ) + .map_err(JsErrorBox::from_err)?; + if let Some(res) = maybe_res { + return Ok(res.into_url().map_err(JsErrorBox::from_err)?); + } + Err(JsErrorBox::from_err(err).into()) + } + Err(err) => Err(JsErrorBox::from_err(err).into()), + } + } + + fn get_host_defined_options<'s>( + &self, + scope: &mut deno_core::v8::HandleScope<'s>, + name: &str, + ) -> Option> { + let name = Url::parse(name).ok()?; + if self.shared.node_resolver.in_npm_package(&name) { + Some(create_host_defined_options(scope)) + } else { + None + } + } + + fn load( + &self, + original_specifier: &Url, + maybe_referrer: Option<&Url>, + _is_dynamic: bool, + _requested_module_type: RequestedModuleType, + ) -> deno_core::ModuleLoadResponse { + if original_specifier.scheme() == "data" { + let data_url_text = + match deno_media_type::data_url::RawDataUrl::parse(original_specifier) + .and_then(|url| url.decode()) + { + Ok(response) => response, + Err(err) => { + return deno_core::ModuleLoadResponse::Sync(Err( + JsErrorBox::type_error(format!("{:#}", err)).into(), + )); + } + }; + return deno_core::ModuleLoadResponse::Sync(Ok( + deno_core::ModuleSource::new( + deno_core::ModuleType::JavaScript, + ModuleSourceCode::String(data_url_text.into()), + original_specifier, + None, + ), + )); + } + + if self.shared.node_resolver.in_npm_package(original_specifier) { + let shared = self.shared.clone(); + let original_specifier = original_specifier.clone(); + let maybe_referrer = maybe_referrer.cloned(); + return deno_core::ModuleLoadResponse::Async( + async move { + let code_source = shared + .npm_module_loader + .load(&original_specifier, maybe_referrer.as_ref()) + .await + .map_err(JsErrorBox::from_err)?; + let code_cache_entry = shared.get_code_cache( + &code_source.found_url, + code_source.code.as_bytes(), + ); + Ok(deno_core::ModuleSource::new_with_redirect( + match code_source.media_type { + MediaType::Json => ModuleType::Json, + _ => ModuleType::JavaScript, + }, + code_source.code, + &original_specifier, + &code_source.found_url, + code_cache_entry, + )) + } + .boxed_local(), + ); + } + + match self.shared.modules.read(original_specifier) { + Ok(Some(module)) => { + let media_type = module.media_type; + let (module_specifier, module_type, module_source) = + module.into_parts(); + let is_maybe_cjs = match self + .shared + .cjs_tracker + .is_maybe_cjs(original_specifier, media_type) + { + Ok(is_maybe_cjs) => is_maybe_cjs, + Err(err) => { + return deno_core::ModuleLoadResponse::Sync(Err( + JsErrorBox::type_error(format!("{:?}", err)).into(), + )); + } + }; + if is_maybe_cjs { + let original_specifier = original_specifier.clone(); + let module_specifier = module_specifier.clone(); + let shared = self.shared.clone(); + deno_core::ModuleLoadResponse::Async( + async move { + let source = match module_source { + DenoCompileModuleSource::String(string) => { + Cow::Borrowed(string) + } + DenoCompileModuleSource::Bytes(module_code_bytes) => { + match module_code_bytes { + Cow::Owned(bytes) => { + Cow::Owned(from_utf8_lossy_owned(bytes)) + } + Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes), + } + } + }; + let source = shared + .node_code_translator + .translate_cjs_to_esm(&module_specifier, Some(source)) + .await + .map_err(JsErrorBox::from_err)?; + let module_source = match source { + Cow::Owned(source) => ModuleSourceCode::String(source.into()), + Cow::Borrowed(source) => { + ModuleSourceCode::String(FastString::from_static(source)) + } + }; + let code_cache_entry = shared + .get_code_cache(&module_specifier, module_source.as_bytes()); + Ok(deno_core::ModuleSource::new_with_redirect( + module_type, + module_source, + &original_specifier, + &module_specifier, + code_cache_entry, + )) + } + .boxed_local(), + ) + } else { + let module_source = module_source.into_for_v8(); + let code_cache_entry = self + .shared + .get_code_cache(module_specifier, module_source.as_bytes()); + deno_core::ModuleLoadResponse::Sync(Ok( + deno_core::ModuleSource::new_with_redirect( + module_type, + module_source, + original_specifier, + module_specifier, + code_cache_entry, + ), + )) + } + } + Ok(None) => deno_core::ModuleLoadResponse::Sync(Err( + JsErrorBox::type_error(format!( + "Module not found: {}", + original_specifier + )) + .into(), + )), + Err(err) => deno_core::ModuleLoadResponse::Sync(Err( + JsErrorBox::type_error(format!("{:?}", err)).into(), + )), + } + } + + fn code_cache_ready( + &self, + specifier: Url, + source_hash: u64, + code_cache_data: &[u8], + ) -> LocalBoxFuture<'static, ()> { + if let Some(code_cache) = &self.shared.code_cache { + code_cache.set_sync( + specifier, + deno_runtime::code_cache::CodeCacheType::EsModule, + source_hash, + code_cache_data, + ); + } + std::future::ready(()).boxed_local() + } + + fn get_source_map(&self, file_name: &str) -> Option> { + let url = Url::parse(file_name).ok()?; + let data = self.shared.modules.read(&url).ok()??; + data.source_map + } + + fn get_source_mapped_source_line( + &self, + file_name: &str, + line_number: usize, + ) -> Option { + let specifier = Url::parse(file_name).ok()?; + let data = self.shared.modules.read(&specifier).ok()??; + + let source = String::from_utf8_lossy(&data.data); + // Do NOT use .lines(): it skips the terminating empty line. + // (due to internally using_terminator() instead of .split()) + let lines: Vec<&str> = source.split('\n').collect(); + if line_number >= lines.len() { + Some(format!( + "{} Couldn't format source line: Line {} is out of bounds (source may have changed at runtime)", + crate::colors::yellow("Warning"), line_number + 1, + )) + } else { + Some(lines[line_number].to_string()) + } + } +} + +impl NodeRequireLoader for EmbeddedModuleLoader { + fn ensure_read_permission<'a>( + &self, + permissions: &mut dyn deno_runtime::deno_node::NodePermissions, + path: &'a std::path::Path, + ) -> Result, JsErrorBox> { + if self.shared.modules.has_file(path) { + // allow reading if the file is in the snapshot + return Ok(Cow::Borrowed(path)); + } + + self + .shared + .npm_registry_permission_checker + .ensure_read_permission(permissions, path) + .map_err(JsErrorBox::from_err) + } + + fn load_text_file_lossy( + &self, + path: &std::path::Path, + ) -> Result, JsErrorBox> { + let file_entry = self + .shared + .vfs + .file_entry(path) + .map_err(JsErrorBox::from_err)?; + let file_bytes = self + .shared + .vfs + .read_file_offset_with_len( + file_entry.transpiled_offset.unwrap_or(file_entry.offset), + ) + .map_err(JsErrorBox::from_err)?; + Ok(from_utf8_lossy_cow(file_bytes)) + } + + fn is_maybe_cjs(&self, specifier: &Url) -> Result { + let media_type = MediaType::from_specifier(specifier); + self.shared.cjs_tracker.is_maybe_cjs(specifier, media_type) + } +} + +struct StandaloneModuleLoaderFactory { + shared: Arc, +} + +impl StandaloneModuleLoaderFactory { + pub fn create_result(&self) -> CreateModuleLoaderResult { + let loader = Rc::new(EmbeddedModuleLoader { + shared: self.shared.clone(), + }); + CreateModuleLoaderResult { + module_loader: loader.clone(), + node_require_loader: loader, + } + } +} + +impl ModuleLoaderFactory for StandaloneModuleLoaderFactory { + fn create_for_main( + &self, + _root_permissions: PermissionsContainer, + ) -> CreateModuleLoaderResult { + self.create_result() + } + + fn create_for_worker( + &self, + _parent_permissions: PermissionsContainer, + _permissions: PermissionsContainer, + ) -> CreateModuleLoaderResult { + self.create_result() + } +} + +struct StandaloneRootCertStoreProvider { + ca_stores: Option>, + ca_data: Option, + cell: OnceLock>, +} + +impl RootCertStoreProvider for StandaloneRootCertStoreProvider { + fn get_or_try_init(&self) -> Result<&RootCertStore, JsErrorBox> { + self + .cell + // get_or_try_init was not stable yet when this was written + .get_or_init(|| { + get_root_cert_store(None, self.ca_stores.clone(), self.ca_data.clone()) + }) + .as_ref() + .map_err(|err| JsErrorBox::from_err(err.clone())) + } +} + +pub async fn run( + fs: Arc, + sys: DenoRtSys, + data: StandaloneData, +) -> Result { + let StandaloneData { + metadata, + modules, + npm_snapshot, + root_path, + vfs, + } = data; + let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider { + ca_stores: metadata.ca_stores, + ca_data: metadata.ca_data.map(CaData::Bytes), + cell: Default::default(), + }); + // use a dummy npm registry url + let npm_registry_url = Url::parse("https://localhost/").unwrap(); + let root_dir_url = Arc::new(Url::from_directory_path(&root_path).unwrap()); + let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap(); + let npm_global_cache_dir = root_path.join(".deno_compile_node_modules"); + let pkg_json_resolver = Arc::new(PackageJsonResolver::new( + sys.clone(), + Some(Arc::new(PackageJsonThreadLocalCache)), + )); + let npm_registry_permission_checker = { + let mode = match &metadata.node_modules { + Some(NodeModules::Managed { + node_modules_dir: Some(path), + }) => NpmRegistryReadPermissionCheckerMode::Local(PathBuf::from(path)), + Some(NodeModules::Byonm { .. }) => { + NpmRegistryReadPermissionCheckerMode::Byonm + } + Some(NodeModules::Managed { + node_modules_dir: None, + }) + | None => NpmRegistryReadPermissionCheckerMode::Global( + npm_global_cache_dir.clone(), + ), + }; + NpmRegistryReadPermissionChecker::new(sys.clone(), mode) + }; + let (in_npm_pkg_checker, npm_resolver) = match metadata.node_modules { + Some(NodeModules::Managed { node_modules_dir }) => { + // create an npmrc that uses the fake npm_registry_url to resolve packages + let npmrc = Arc::new(ResolvedNpmRc { + default_config: deno_npm::npm_rc::RegistryConfigWithUrl { + registry_url: npm_registry_url.clone(), + config: Default::default(), + }, + scopes: Default::default(), + registry_configs: Default::default(), + }); + let npm_cache_dir = Arc::new(NpmCacheDir::new( + &sys, + npm_global_cache_dir, + npmrc.get_all_known_registries_urls(), + )); + let snapshot = npm_snapshot.unwrap(); + let maybe_node_modules_path = node_modules_dir + .map(|node_modules_dir| root_path.join(node_modules_dir)); + let in_npm_pkg_checker = + DenoInNpmPackageChecker::new(CreateInNpmPkgCheckerOptions::Managed( + ManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: npm_cache_dir.root_dir_url(), + maybe_node_modules_path: maybe_node_modules_path.as_deref(), + }, + )); + let npm_resolution = + Arc::new(NpmResolutionCell::new(NpmResolutionSnapshot::new(snapshot))); + let npm_resolver = NpmResolver::::new::( + NpmResolverCreateOptions::Managed(ManagedNpmResolverCreateOptions { + npm_resolution, + npm_cache_dir, + sys: sys.clone(), + maybe_node_modules_path, + npm_system_info: Default::default(), + npmrc, + }), + ); + (in_npm_pkg_checker, npm_resolver) + } + Some(NodeModules::Byonm { + root_node_modules_dir, + }) => { + let root_node_modules_dir = + root_node_modules_dir.map(|p| vfs.root().join(p)); + let in_npm_pkg_checker = + DenoInNpmPackageChecker::new(CreateInNpmPkgCheckerOptions::Byonm); + let npm_resolver = NpmResolver::::new::( + NpmResolverCreateOptions::Byonm(ByonmNpmResolverCreateOptions { + sys: sys.clone(), + pkg_json_resolver: pkg_json_resolver.clone(), + root_node_modules_dir, + }), + ); + (in_npm_pkg_checker, npm_resolver) + } + None => { + // Packages from different registries are already inlined in the binary, + // so no need to create actual `.npmrc` configuration. + let npmrc = create_default_npmrc(); + let npm_cache_dir = Arc::new(NpmCacheDir::new( + &sys, + npm_global_cache_dir, + npmrc.get_all_known_registries_urls(), + )); + let in_npm_pkg_checker = + DenoInNpmPackageChecker::new(CreateInNpmPkgCheckerOptions::Managed( + ManagedInNpmPkgCheckerCreateOptions { + root_cache_dir_url: npm_cache_dir.root_dir_url(), + maybe_node_modules_path: None, + }, + )); + let npm_resolution = Arc::new(NpmResolutionCell::default()); + let npm_resolver = NpmResolver::::new::( + NpmResolverCreateOptions::Managed(ManagedNpmResolverCreateOptions { + npm_resolution, + sys: sys.clone(), + npm_cache_dir, + maybe_node_modules_path: None, + npm_system_info: Default::default(), + npmrc: create_default_npmrc(), + }), + ); + (in_npm_pkg_checker, npm_resolver) + } + }; + + let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some(); + let node_resolver = Arc::new(NodeResolver::new( + in_npm_pkg_checker.clone(), + DenoIsBuiltInNodeModuleChecker, + npm_resolver.clone(), + pkg_json_resolver.clone(), + sys.clone(), + node_resolver::ConditionsFromResolutionMode::default(), + )); + let cjs_tracker = Arc::new(CjsTracker::new( + in_npm_pkg_checker.clone(), + pkg_json_resolver.clone(), + if metadata.unstable_config.detect_cjs { + IsCjsResolutionMode::ImplicitTypeCommonJs + } else if metadata.workspace_resolver.package_jsons.is_empty() { + IsCjsResolutionMode::Disabled + } else { + IsCjsResolutionMode::ExplicitTypeCommonJs + }, + )); + let npm_req_resolver = Arc::new(NpmReqResolver::new(NpmReqResolverOptions { + sys: sys.clone(), + in_npm_pkg_checker: in_npm_pkg_checker.clone(), + node_resolver: node_resolver.clone(), + npm_resolver: npm_resolver.clone(), + })); + let cjs_esm_code_analyzer = + CjsCodeAnalyzer::new(cjs_tracker.clone(), modules.clone(), sys.clone()); + let node_code_translator = Arc::new(NodeCodeTranslator::new( + cjs_esm_code_analyzer, + in_npm_pkg_checker, + node_resolver.clone(), + npm_resolver.clone(), + pkg_json_resolver.clone(), + sys.clone(), + )); + let workspace_resolver = { + let import_map = match metadata.workspace_resolver.import_map { + Some(import_map) => Some( + import_map::parse_from_json_with_options( + root_dir_url.join(&import_map.specifier).unwrap(), + &import_map.json, + import_map::ImportMapOptions { + address_hook: None, + expand_imports: true, + }, + )? + .import_map, + ), + None => None, + }; + let pkg_jsons = metadata + .workspace_resolver + .package_jsons + .into_iter() + .map(|(relative_path, json)| { + let path = root_dir_url + .join(&relative_path) + .unwrap() + .to_file_path() + .unwrap(); + let pkg_json = + deno_package_json::PackageJson::load_from_value(path, json); + Arc::new(pkg_json) + }) + .collect(); + WorkspaceResolver::new_raw( + root_dir_url.clone(), + import_map, + metadata + .workspace_resolver + .jsr_pkgs + .iter() + .map(|pkg| ResolverWorkspaceJsrPackage { + is_patch: false, // only used for enhancing the diagnostic, which isn't shown in deno compile + base: root_dir_url.join(&pkg.relative_base).unwrap(), + name: pkg.name.clone(), + version: pkg.version.clone(), + exports: pkg.exports.clone(), + }) + .collect(), + pkg_jsons, + metadata.workspace_resolver.pkg_json_resolution, + ) + }; + let code_cache = match metadata.code_cache_key { + Some(code_cache_key) => Some(Arc::new(DenoCompileCodeCache::new( + root_path.with_file_name(format!( + "{}.cache", + root_path.file_name().unwrap().to_string_lossy() + )), + code_cache_key, + ))), + None => { + log::debug!("Code cache disabled."); + None + } + }; + let module_loader_factory = StandaloneModuleLoaderFactory { + shared: Arc::new(SharedModuleLoaderState { + cjs_tracker: cjs_tracker.clone(), + code_cache: code_cache.clone(), + modules, + node_code_translator: node_code_translator.clone(), + node_resolver: node_resolver.clone(), + npm_module_loader: Arc::new(NpmModuleLoader::new( + cjs_tracker.clone(), + node_code_translator, + sys.clone(), + )), + npm_registry_permission_checker, + npm_req_resolver, + vfs: vfs.clone(), + workspace_resolver, + }), + }; + + let permissions = { + let mut permissions = metadata.permissions; + // grant read access to the vfs + match &mut permissions.allow_read { + Some(vec) if vec.is_empty() => { + // do nothing, already granted + } + Some(vec) => { + vec.push(root_path.to_string_lossy().to_string()); + } + None => { + permissions.allow_read = + Some(vec![root_path.to_string_lossy().to_string()]); + } + } + + let desc_parser = + Arc::new(RuntimePermissionDescriptorParser::new(sys.clone())); + let permissions = + Permissions::from_options(desc_parser.as_ref(), &permissions)?; + PermissionsContainer::new(desc_parser, permissions) + }; + let feature_checker = Arc::new({ + let mut checker = FeatureChecker::default(); + checker.set_exit_cb(Box::new(crate::unstable_exit_cb)); + for feature in metadata.unstable_config.features { + // `metadata` is valid for the whole lifetime of the program, so we + // can leak the string here. + checker.enable_feature(feature.leak()); + } + checker + }); + let lib_main_worker_options = LibMainWorkerOptions { + argv: metadata.argv, + log_level: WorkerLogLevel::Info, + enable_op_summary_metrics: false, + enable_testing_features: false, + has_node_modules_dir, + inspect_brk: false, + inspect_wait: false, + strace_ops: None, + is_inspecting: false, + skip_op_registration: true, + location: metadata.location, + argv0: NpmPackageReqReference::from_specifier(&main_module) + .ok() + .map(|req_ref| npm_pkg_req_ref_to_binary_command(&req_ref)) + .or(std::env::args().next()), + node_debug: std::env::var("NODE_DEBUG").ok(), + origin_data_folder_path: None, + seed: metadata.seed, + unsafely_ignore_certificate_errors: metadata + .unsafely_ignore_certificate_errors, + node_ipc: None, + serve_port: None, + serve_host: None, + otel_config: metadata.otel_config, + startup_snapshot: deno_snapshots::CLI_SNAPSHOT, + }; + let worker_factory = LibMainWorkerFactory::new( + Arc::new(BlobStore::default()), + code_cache.map(|c| c.for_deno_core()), + feature_checker, + fs, + None, + Box::new(module_loader_factory), + node_resolver.clone(), + create_npm_process_state_provider(&npm_resolver), + pkg_json_resolver, + root_cert_store_provider, + StorageKeyResolver::empty(), + sys.clone(), + lib_main_worker_options, + ); + + // Initialize v8 once from the main thread. + v8_set_flags(construct_v8_flags(&[], &metadata.v8_flags, vec![])); + // TODO(bartlomieju): remove last argument once Deploy no longer needs it + deno_core::JsRuntime::init_platform(None, true); + + let main_module = match NpmPackageReqReference::from_specifier(&main_module) { + Ok(package_ref) => { + let pkg_folder = npm_resolver.resolve_pkg_folder_from_deno_module_req( + package_ref.req(), + &deno_path_util::url_from_file_path(&vfs.root().join("package.json"))?, + )?; + worker_factory + .resolve_npm_binary_entrypoint(&pkg_folder, package_ref.sub_path())? + } + Err(_) => main_module, + }; + + let mut worker = worker_factory.create_main_worker( + WorkerExecutionMode::Run, + permissions, + main_module, + )?; + + let exit_code = worker.run().await?; + Ok(exit_code) +} + +fn create_default_npmrc() -> Arc { + // this is fine because multiple registries are combined into + // one when compiling the binary + Arc::new(ResolvedNpmRc { + default_config: deno_npm::npm_rc::RegistryConfigWithUrl { + registry_url: Url::parse("https://registry.npmjs.org").unwrap(), + config: Default::default(), + }, + scopes: Default::default(), + registry_configs: Default::default(), + }) +} diff --git a/cli/schemas/lint-rules.v1.json b/cli/schemas/lint-rules.v1.json index 71d17849588c8e..87bd4e26003ac9 100644 --- a/cli/schemas/lint-rules.v1.json +++ b/cli/schemas/lint-rules.v1.json @@ -19,6 +19,17 @@ "fresh-server-event-handlers", "getter-return", "guard-for-in", + "jsx-boolean-value", + "jsx-button-has-type", + "jsx-curly-braces", + "jsx-key", + "jsx-no-children-prop", + "jsx-no-comment-text-nodes", + "jsx-no-duplicate-props", + "jsx-no-unescaped-entities", + "jsx-no-useless-fragment", + "jsx-props-no-spread-multi", + "jsx-void-dom-elements-no-children", "no-array-constructor", "no-async-promise-executor", "no-await-in-loop", @@ -70,7 +81,7 @@ "no-non-null-assertion", "no-obj-calls", "no-octal", - "no-process-globals", + "no-process-global", "no-prototype-builtins", "no-redeclare", "no-regex-spaces", @@ -92,6 +103,7 @@ "no-unsafe-negation", "no-unused-labels", "no-unused-vars", + "no-useless-rename", "no-var", "no-window", "no-window-prefix", @@ -101,6 +113,9 @@ "prefer-const", "prefer-namespace-keyword", "prefer-primordials", + "react-no-danger", + "react-no-danger-with-children", + "react-rules-of-hooks", "require-await", "require-yield", "single-var-declarator", diff --git a/cli/snapshot/Cargo.toml b/cli/snapshot/Cargo.toml new file mode 100644 index 00000000000000..c023af04a3b17d --- /dev/null +++ b/cli/snapshot/Cargo.toml @@ -0,0 +1,20 @@ +# Copyright 2018-2025 the Deno authors. MIT license. + +[package] +name = "deno_snapshots" +version = "0.2.0" +authors.workspace = true +edition.workspace = true +license.workspace = true +readme = "README.md" +repository.workspace = true +description = "v8 snapshot used by the Deno CLI" + +[lib] +path = "lib.rs" + +[features] +disable = [] + +[build-dependencies] +deno_runtime = { workspace = true, features = ["include_js_files_for_snapshotting", "only_snapshotted_js_sources", "snapshot"] } diff --git a/cli/snapshot/README.md b/cli/snapshot/README.md new file mode 100644 index 00000000000000..d52dead6b2c035 --- /dev/null +++ b/cli/snapshot/README.md @@ -0,0 +1,3 @@ +# deno_snapshots + +v8 snapshot used in the Deno CLI. diff --git a/cli/snapshot/build.rs b/cli/snapshot/build.rs new file mode 100644 index 00000000000000..9f08ac0e9ea715 --- /dev/null +++ b/cli/snapshot/build.rs @@ -0,0 +1,30 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +#[cfg(not(feature = "disable"))] +mod shared; + +fn main() { + #[cfg(not(feature = "disable"))] + { + let o = std::path::PathBuf::from(std::env::var_os("OUT_DIR").unwrap()); + let cli_snapshot_path = o.join("CLI_SNAPSHOT.bin"); + create_cli_snapshot(cli_snapshot_path); + } +} + +#[cfg(not(feature = "disable"))] +fn create_cli_snapshot(snapshot_path: std::path::PathBuf) { + use deno_runtime::ops::bootstrap::SnapshotOptions; + + let snapshot_options = SnapshotOptions { + ts_version: shared::TS_VERSION.to_string(), + v8_version: deno_runtime::deno_core::v8::VERSION_STRING, + target: std::env::var("TARGET").unwrap(), + }; + + deno_runtime::snapshot::create_runtime_snapshot( + snapshot_path, + snapshot_options, + vec![], + ); +} diff --git a/cli/snapshot/lib.rs b/cli/snapshot/lib.rs new file mode 100644 index 00000000000000..e5af4bcf6b4da3 --- /dev/null +++ b/cli/snapshot/lib.rs @@ -0,0 +1,13 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +#[cfg(not(feature = "disable"))] +pub static CLI_SNAPSHOT: Option<&[u8]> = Some(include_bytes!(concat!( + env!("OUT_DIR"), + "/CLI_SNAPSHOT.bin" +))); +#[cfg(feature = "disable")] +pub static CLI_SNAPSHOT: Option<&[u8]> = None; + +mod shared; + +pub use shared::TS_VERSION; diff --git a/cli/snapshot/shared.rs b/cli/snapshot/shared.rs new file mode 100644 index 00000000000000..eec982776a74dc --- /dev/null +++ b/cli/snapshot/shared.rs @@ -0,0 +1,3 @@ +// Copyright 2018-2025 the Deno authors. MIT license. + +pub static TS_VERSION: &str = "5.6.2"; diff --git a/cli/standalone/binary.rs b/cli/standalone/binary.rs index 5334b4719da7de..59744a0c72509e 100644 --- a/cli/standalone/binary.rs +++ b/cli/standalone/binary.rs @@ -1,110 +1,71 @@ // Copyright 2018-2025 the Deno authors. MIT license. use std::borrow::Cow; -use std::collections::BTreeMap; +use std::cell::Cell; use std::collections::HashMap; use std::collections::VecDeque; use std::env; -use std::env::current_exe; use std::ffi::OsString; use std::fs; use std::fs::File; -use std::future::Future; -use std::io::ErrorKind; -use std::io::Read; -use std::io::Seek; -use std::io::SeekFrom; -use std::io::Write; -use std::ops::Range; use std::path::Component; use std::path::Path; use std::path::PathBuf; -use std::process::Command; -use std::sync::Arc; +use capacity_builder::BytesAppendable; use deno_ast::MediaType; use deno_ast::ModuleKind; use deno_ast::ModuleSpecifier; -use deno_config::workspace::PackageJsonDepResolution; -use deno_config::workspace::ResolverWorkspaceJsrPackage; -use deno_config::workspace::Workspace; use deno_config::workspace::WorkspaceResolver; use deno_core::anyhow::bail; use deno_core::anyhow::Context; use deno_core::error::AnyError; -use deno_core::futures::io::AllowStdIo; -use deno_core::futures::AsyncReadExt; -use deno_core::futures::AsyncSeekExt; use deno_core::serde_json; use deno_core::url::Url; use deno_graph::ModuleGraph; -use deno_lib::cache::DenoDir; -use deno_lib::standalone::virtual_fs::FileSystemCaseSensitivity; +use deno_lib::args::CaData; +use deno_lib::args::UnstableConfig; +use deno_lib::shared::ReleaseChannel; +use deno_lib::standalone::binary::CjsExportAnalysisEntry; +use deno_lib::standalone::binary::Metadata; +use deno_lib::standalone::binary::NodeModules; +use deno_lib::standalone::binary::RemoteModuleEntry; +use deno_lib::standalone::binary::SerializedResolverWorkspaceJsrPackage; +use deno_lib::standalone::binary::SerializedWorkspaceResolver; +use deno_lib::standalone::binary::SerializedWorkspaceResolverImportMap; +use deno_lib::standalone::binary::SpecifierDataStore; +use deno_lib::standalone::binary::SpecifierId; +use deno_lib::standalone::binary::MAGIC_BYTES; +use deno_lib::standalone::virtual_fs::BuiltVfs; +use deno_lib::standalone::virtual_fs::VfsBuilder; use deno_lib::standalone::virtual_fs::VfsEntry; -use deno_lib::standalone::virtual_fs::VfsFileSubDataKind; use deno_lib::standalone::virtual_fs::VirtualDirectory; use deno_lib::standalone::virtual_fs::VirtualDirectoryEntries; use deno_lib::standalone::virtual_fs::WindowsSystemRootablePath; +use deno_lib::standalone::virtual_fs::DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME; +use deno_lib::util::hash::FastInsecureHasher; +use deno_lib::version::DENO_VERSION_INFO; use deno_npm::resolution::SerializedNpmResolutionSnapshot; -use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage; -use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; -use deno_npm::NpmPackageId; use deno_npm::NpmSystemInfo; use deno_path_util::url_from_directory_path; -use deno_path_util::url_from_file_path; use deno_path_util::url_to_file_path; -use deno_runtime::deno_fs; -use deno_runtime::deno_fs::FileSystem; -use deno_runtime::deno_fs::RealFs; -use deno_runtime::deno_io::fs::FsError; -use deno_runtime::deno_node::PackageJson; -use deno_runtime::deno_permissions::PermissionsOptions; -use deno_semver::npm::NpmVersionReqParseError; -use deno_semver::package::PackageReq; -use deno_semver::Version; -use deno_semver::VersionReqSpecifierParseError; -use deno_telemetry::OtelConfig; use indexmap::IndexMap; -use log::Level; -use serde::Deserialize; -use serde::Serialize; - -use super::file_system::DenoCompileFileSystem; -use super::serialization::deserialize_binary_data_section; -use super::serialization::serialize_binary_data_section; -use super::serialization::DenoCompileModuleData; -use super::serialization::DeserializedDataSection; -use super::serialization::RemoteModulesStore; -use super::serialization::RemoteModulesStoreBuilder; -use super::serialization::SourceMapStore; +use node_resolver::analyze::CjsAnalysis; +use node_resolver::analyze::CjsCodeAnalyzer; + use super::virtual_fs::output_vfs; -use super::virtual_fs::BuiltVfs; -use super::virtual_fs::FileBackedVfs; -use super::virtual_fs::VfsBuilder; -use super::virtual_fs::VfsRoot; -use crate::args::CaData; use crate::args::CliOptions; use crate::args::CompileFlags; -use crate::args::NpmInstallDepsProvider; -use crate::args::PermissionFlags; -use crate::args::UnstableConfig; -use crate::cache::FastInsecureHasher; +use crate::cache::DenoDir; use crate::emit::Emitter; -use crate::file_fetcher::CliFileFetcher; use crate::http_util::HttpClientProvider; +use crate::node::CliCjsCodeAnalyzer; use crate::npm::CliNpmResolver; use crate::resolver::CliCjsTracker; -use crate::shared::ReleaseChannel; -use crate::sys::CliSys; use crate::util::archive; -use crate::util::fs::canonicalize_path; -use crate::util::fs::canonicalize_path_maybe_not_exists; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; -pub static DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME: &str = - ".deno_compile_node_modules"; - /// A URL that can be designated as the base for relative URLs. /// /// After creation, this URL may be used to get the key for a @@ -150,104 +111,60 @@ impl<'a> StandaloneRelativeFileBaseUrl<'a> { } } -#[derive(Deserialize, Serialize)] -pub enum NodeModules { - Managed { - /// Relative path for the node_modules directory in the vfs. - node_modules_dir: Option, - }, - Byonm { - root_node_modules_dir: Option, - }, +struct SpecifierStore<'a> { + data: IndexMap<&'a Url, SpecifierId>, } -#[derive(Deserialize, Serialize)] -pub struct SerializedWorkspaceResolverImportMap { - pub specifier: String, - pub json: String, -} +impl<'a> SpecifierStore<'a> { + pub fn with_capacity(capacity: usize) -> Self { + Self { + data: IndexMap::with_capacity(capacity), + } + } -#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct SerializedResolverWorkspaceJsrPackage { - pub relative_base: String, - pub name: String, - pub version: Option, - pub exports: IndexMap, -} + pub fn get_or_add(&mut self, specifier: &'a Url) -> SpecifierId { + let len = self.data.len(); + let entry = self.data.entry(specifier); + match entry { + indexmap::map::Entry::Occupied(occupied_entry) => *occupied_entry.get(), + indexmap::map::Entry::Vacant(vacant_entry) => { + let new_id = SpecifierId::new(len as u32); + vacant_entry.insert(new_id); + new_id + } + } + } -#[derive(Deserialize, Serialize)] -pub struct SerializedWorkspaceResolver { - pub import_map: Option, - pub jsr_pkgs: Vec, - pub package_jsons: BTreeMap, - pub pkg_json_resolution: PackageJsonDepResolution, + pub fn for_serialization( + self, + base_url: &StandaloneRelativeFileBaseUrl<'a>, + ) -> SpecifierStoreForSerialization<'a> { + SpecifierStoreForSerialization { + data: self + .data + .into_iter() + .map(|(specifier, id)| (base_url.specifier_key(specifier), id)) + .collect(), + } + } } -// Note: Don't use hashmaps/hashsets. Ensure the serialization -// is deterministic. -#[derive(Deserialize, Serialize)] -pub struct Metadata { - pub argv: Vec, - pub seed: Option, - pub code_cache_key: Option, - pub permissions: PermissionsOptions, - pub location: Option, - pub v8_flags: Vec, - pub log_level: Option, - pub ca_stores: Option>, - pub ca_data: Option>, - pub unsafely_ignore_certificate_errors: Option>, - pub env_vars_from_env_file: IndexMap, - pub workspace_resolver: SerializedWorkspaceResolver, - pub entrypoint_key: String, - pub node_modules: Option, - pub unstable_config: UnstableConfig, - pub otel_config: OtelConfig, - pub vfs_case_sensitivity: FileSystemCaseSensitivity, +struct SpecifierStoreForSerialization<'a> { + data: Vec<(Cow<'a, str>, SpecifierId)>, } -#[allow(clippy::too_many_arguments)] -fn write_binary_bytes( - mut file_writer: File, - original_bin: Vec, - metadata: &Metadata, - npm_snapshot: Option, - remote_modules: &RemoteModulesStoreBuilder, - source_map_store: &SourceMapStore, - vfs: &BuiltVfs, - compile_flags: &CompileFlags, -) -> Result<(), AnyError> { - let data_section_bytes = serialize_binary_data_section( - metadata, - npm_snapshot, - remote_modules, - source_map_store, - vfs, - ) - .context("Serializing binary data section.")?; - - let target = compile_flags.resolve_target(); - if target.contains("linux") { - libsui::Elf::new(&original_bin).append( - "d3n0l4nd", - &data_section_bytes, - &mut file_writer, - )?; - } else if target.contains("windows") { - let mut pe = libsui::PortableExecutable::from(&original_bin)?; - if let Some(icon) = compile_flags.icon.as_ref() { - let icon = std::fs::read(icon)?; - pe = pe.set_icon(&icon)?; +impl<'a> BytesAppendable<'a> for &'a SpecifierStoreForSerialization<'a> { + fn append_to_builder( + self, + builder: &mut capacity_builder::BytesBuilder<'a, TBytes>, + ) { + builder.append_le(self.data.len() as u32); + for (specifier_str, id) in &self.data { + builder.append_le(specifier_str.len() as u32); + builder.append(specifier_str.as_ref()); + builder.append(*id); } - - pe.write_resource("d3n0l4nd", data_section_bytes)? - .build(&mut file_writer)?; - } else if target.contains("darwin") { - libsui::Macho::from(original_bin)? - .write_section("d3n0l4nd", data_section_bytes)? - .build_and_sign(&mut file_writer)?; } - Ok(()) } pub fn is_standalone_binary(exe_path: &Path) -> bool { @@ -260,146 +177,6 @@ pub fn is_standalone_binary(exe_path: &Path) -> bool { || libsui::utils::is_macho(&data) } -pub struct StandaloneData { - pub metadata: Metadata, - pub modules: StandaloneModules, - pub npm_snapshot: Option, - pub root_path: PathBuf, - pub source_maps: SourceMapStore, - pub vfs: Arc, -} - -pub struct StandaloneModules { - remote_modules: RemoteModulesStore, - vfs: Arc, -} - -impl StandaloneModules { - pub fn resolve_specifier<'a>( - &'a self, - specifier: &'a ModuleSpecifier, - ) -> Result, AnyError> { - if specifier.scheme() == "file" { - Ok(Some(specifier)) - } else { - self.remote_modules.resolve_specifier(specifier) - } - } - - pub fn has_file(&self, path: &Path) -> bool { - self.vfs.file_entry(path).is_ok() - } - - pub fn read<'a>( - &'a self, - specifier: &'a ModuleSpecifier, - kind: VfsFileSubDataKind, - ) -> Result>, AnyError> { - if specifier.scheme() == "file" { - let path = deno_path_util::url_to_file_path(specifier)?; - let bytes = match self.vfs.file_entry(&path) { - Ok(entry) => self.vfs.read_file_all(entry, kind)?, - Err(err) if err.kind() == ErrorKind::NotFound => { - match RealFs.read_file_sync(&path, None) { - Ok(bytes) => bytes, - Err(FsError::Io(err)) if err.kind() == ErrorKind::NotFound => { - return Ok(None) - } - Err(err) => return Err(err.into()), - } - } - Err(err) => return Err(err.into()), - }; - Ok(Some(DenoCompileModuleData { - media_type: MediaType::from_specifier(specifier), - specifier, - data: bytes, - })) - } else { - self.remote_modules.read(specifier).map(|maybe_entry| { - maybe_entry.map(|entry| DenoCompileModuleData { - media_type: entry.media_type, - specifier: entry.specifier, - data: match kind { - VfsFileSubDataKind::Raw => entry.data, - VfsFileSubDataKind::ModuleGraph => { - entry.transpiled_data.unwrap_or(entry.data) - } - }, - }) - }) - } - } -} - -/// This function will try to run this binary as a standalone binary -/// produced by `deno compile`. It determines if this is a standalone -/// binary by skipping over the trailer width at the end of the file, -/// then checking for the magic trailer string `d3n0l4nd`. If found, -/// the bundle is executed. If not, this function exits with `Ok(None)`. -pub fn extract_standalone( - cli_args: Cow>, -) -> Result, AnyError> { - let Some(data) = libsui::find_section("d3n0l4nd") else { - return Ok(None); - }; - - let DeserializedDataSection { - mut metadata, - npm_snapshot, - remote_modules, - source_maps, - vfs_root_entries, - vfs_files_data, - } = match deserialize_binary_data_section(data)? { - Some(data_section) => data_section, - None => return Ok(None), - }; - - let root_path = { - let maybe_current_exe = std::env::current_exe().ok(); - let current_exe_name = maybe_current_exe - .as_ref() - .and_then(|p| p.file_name()) - .map(|p| p.to_string_lossy()) - // should never happen - .unwrap_or_else(|| Cow::Borrowed("binary")); - std::env::temp_dir().join(format!("deno-compile-{}", current_exe_name)) - }; - let cli_args = cli_args.into_owned(); - metadata.argv.reserve(cli_args.len() - 1); - for arg in cli_args.into_iter().skip(1) { - metadata.argv.push(arg.into_string().unwrap()); - } - let vfs = { - let fs_root = VfsRoot { - dir: VirtualDirectory { - // align the name of the directory with the root dir - name: root_path.file_name().unwrap().to_string_lossy().to_string(), - entries: vfs_root_entries, - }, - root_path: root_path.clone(), - start_file_offset: 0, - }; - Arc::new(FileBackedVfs::new( - Cow::Borrowed(vfs_files_data), - fs_root, - metadata.vfs_case_sensitivity, - )) - }; - Ok(Some(StandaloneData { - metadata, - modules: StandaloneModules { - remote_modules, - vfs: vfs.clone(), - }, - npm_snapshot, - root_path, - source_maps, - vfs, - })) -} - pub struct WriteBinOptions<'a> { pub writer: File, pub display_output_filename: &'a str, @@ -410,11 +187,11 @@ pub struct WriteBinOptions<'a> { } pub struct DenoCompileBinaryWriter<'a> { + cjs_code_analyzer: CliCjsCodeAnalyzer, cjs_tracker: &'a CliCjsTracker, cli_options: &'a CliOptions, - deno_dir: &'a DenoDir, + deno_dir: &'a DenoDir, emitter: &'a Emitter, - file_fetcher: &'a CliFileFetcher, http_client_provider: &'a HttpClientProvider, npm_resolver: &'a CliNpmResolver, workspace_resolver: &'a WorkspaceResolver, @@ -424,22 +201,22 @@ pub struct DenoCompileBinaryWriter<'a> { impl<'a> DenoCompileBinaryWriter<'a> { #[allow(clippy::too_many_arguments)] pub fn new( + cjs_code_analyzer: CliCjsCodeAnalyzer, cjs_tracker: &'a CliCjsTracker, cli_options: &'a CliOptions, - deno_dir: &'a DenoDir, + deno_dir: &'a DenoDir, emitter: &'a Emitter, - file_fetcher: &'a CliFileFetcher, http_client_provider: &'a HttpClientProvider, npm_resolver: &'a CliNpmResolver, workspace_resolver: &'a WorkspaceResolver, npm_system_info: NpmSystemInfo, ) -> Self { Self { + cjs_code_analyzer, cjs_tracker, cli_options, deno_dir, emitter, - file_fetcher, http_client_provider, npm_resolver, workspace_resolver, @@ -475,7 +252,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { ) } } - self.write_standalone_binary(options, original_binary) + self.write_standalone_binary(options, original_binary).await } async fn get_base_binary( @@ -495,19 +272,14 @@ impl<'a> DenoCompileBinaryWriter<'a> { let target = compile_flags.resolve_target(); let binary_name = format!("denort-{target}.zip"); - let binary_path_suffix = - match crate::version::DENO_VERSION_INFO.release_channel { - ReleaseChannel::Canary => { - format!( - "canary/{}/{}", - crate::version::DENO_VERSION_INFO.git_hash, - binary_name - ) - } - _ => { - format!("release/v{}/{}", env!("CARGO_PKG_VERSION"), binary_name) - } - }; + let binary_path_suffix = match DENO_VERSION_INFO.release_channel { + ReleaseChannel::Canary => { + format!("canary/{}/{}", DENO_VERSION_INFO.git_hash, binary_name) + } + _ => { + format!("release/v{}/{}", DENO_VERSION_INFO.deno, binary_name) + } + }; let download_directory = self.deno_dir.dl_folder_path(); let binary_path = download_directory.join(&binary_path_suffix); @@ -578,7 +350,7 @@ impl<'a> DenoCompileBinaryWriter<'a> { /// This functions creates a standalone deno binary by appending a bundle /// and magic trailer to the currently executing binary. #[allow(clippy::too_many_arguments)] - fn write_standalone_binary( + async fn write_standalone_binary( &self, options: WriteBinOptions<'_>, original_bin: Vec, @@ -622,23 +394,50 @@ impl<'a> DenoCompileBinaryWriter<'a> { .add_file_at_path(&path) .with_context(|| format!("Including {}", path.display()))?; } - let mut remote_modules_store = RemoteModulesStoreBuilder::default(); - let mut source_maps = Vec::with_capacity(graph.specifiers_count()); - // todo(dsherret): transpile in parallel + let specifiers_count = graph.specifiers_count(); + let mut specifier_store = SpecifierStore::with_capacity(specifiers_count); + let mut remote_modules_store = + SpecifierDataStore::with_capacity(specifiers_count); + // todo(dsherret): transpile and analyze CJS in parallel for module in graph.modules() { if module.specifier().scheme() == "data" { continue; // don't store data urls as an entry as they're in the code } - let (maybe_original_source, maybe_transpiled, media_type) = match module { + let mut maybe_source_map = None; + let mut maybe_transpiled = None; + let mut maybe_cjs_analysis = None; + let (maybe_original_source, media_type) = match module { deno_graph::Module::Js(m) => { - let original_bytes = m.source.as_bytes().to_vec(); - let maybe_transpiled = if m.media_type.is_emittable() { - let is_cjs = self.cjs_tracker.is_cjs_with_known_is_script( - &m.specifier, + let specifier = &m.specifier; + let original_bytes = m.source.as_bytes(); + if self.cjs_tracker.is_maybe_cjs(specifier, m.media_type)? { + if self.cjs_tracker.is_cjs_with_known_is_script( + specifier, m.media_type, m.is_script, - )?; - let module_kind = ModuleKind::from_is_cjs(is_cjs); + )? { + let cjs_analysis = self + .cjs_code_analyzer + .analyze_cjs( + module.specifier(), + Some(Cow::Borrowed(m.source.as_ref())), + ) + .await?; + maybe_cjs_analysis = Some(match cjs_analysis { + CjsAnalysis::Esm(_) => CjsExportAnalysisEntry::Esm, + CjsAnalysis::Cjs(exports) => { + CjsExportAnalysisEntry::Cjs(exports) + } + }); + } else { + maybe_cjs_analysis = Some(CjsExportAnalysisEntry::Esm); + } + } + if m.media_type.is_emittable() { + let module_kind = match maybe_cjs_analysis.as_ref() { + Some(CjsExportAnalysisEntry::Cjs(_)) => ModuleKind::Cjs, + _ => ModuleKind::Esm, + }; let (source, source_map) = self.emitter.emit_parsed_source_for_deno_compile( &m.specifier, @@ -647,60 +446,67 @@ impl<'a> DenoCompileBinaryWriter<'a> { &m.source, )?; if source != m.source.as_ref() { - source_maps.push((&m.specifier, source_map)); - Some(source.into_bytes()) - } else { - None + maybe_source_map = Some(source_map.into_bytes()); + maybe_transpiled = Some(source.into_bytes()); } - } else { - None - }; - (Some(original_bytes), maybe_transpiled, m.media_type) + } + (Some(original_bytes), m.media_type) } deno_graph::Module::Json(m) => { - (Some(m.source.as_bytes().to_vec()), None, m.media_type) + (Some(m.source.as_bytes()), m.media_type) } deno_graph::Module::Wasm(m) => { - (Some(m.source.to_vec()), None, MediaType::Wasm) + (Some(m.source.as_ref()), MediaType::Wasm) } deno_graph::Module::Npm(_) | deno_graph::Module::Node(_) - | deno_graph::Module::External(_) => (None, None, MediaType::Unknown), + | deno_graph::Module::External(_) => (None, MediaType::Unknown), }; if let Some(original_source) = maybe_original_source { + let maybe_cjs_export_analysis = maybe_cjs_analysis + .as_ref() + .map(bincode::serialize) + .transpose()?; if module.specifier().scheme() == "file" { let file_path = deno_path_util::url_to_file_path(module.specifier())?; vfs .add_file_with_data( &file_path, - original_source, - VfsFileSubDataKind::Raw, + deno_lib::standalone::virtual_fs::AddFileDataOptions { + data: original_source.to_vec(), + maybe_transpiled, + maybe_source_map, + maybe_cjs_export_analysis, + }, ) .with_context(|| { format!("Failed adding '{}'", file_path.display()) })?; - if let Some(transpiled_source) = maybe_transpiled { - vfs - .add_file_with_data( - &file_path, - transpiled_source, - VfsFileSubDataKind::ModuleGraph, - ) - .with_context(|| { - format!("Failed adding '{}'", file_path.display()) - })?; - } } else { + let specifier_id = specifier_store.get_or_add(module.specifier()); remote_modules_store.add( - module.specifier(), - media_type, - original_source, - maybe_transpiled, + specifier_id, + RemoteModuleEntry { + media_type, + data: Cow::Borrowed(original_source), + maybe_transpiled: maybe_transpiled.map(Cow::Owned), + maybe_source_map: maybe_source_map.map(Cow::Owned), + maybe_cjs_export_analysis: maybe_cjs_export_analysis + .map(Cow::Owned), + }, ); } } } - remote_modules_store.add_redirects(&graph.redirects); + + let mut redirects_store = + SpecifierDataStore::with_capacity(graph.redirects.len()); + for (from, to) in &graph.redirects { + redirects_store.add( + specifier_store.get_or_add(from), + specifier_store.get_or_add(to), + ); + } if let Some(import_map) = self.workspace_resolver.maybe_import_map() { if let Ok(file_path) = url_to_file_path(import_map.base_url()) { @@ -718,7 +524,48 @@ impl<'a> DenoCompileBinaryWriter<'a> { } } + // do CJS export analysis on all the files in the VFS + // todo(dsherret): analyze cjs in parallel + let mut to_add = Vec::new(); + for (file_path, file) in vfs.iter_files() { + if file.cjs_export_analysis_offset.is_some() { + continue; // already analyzed + } + let specifier = deno_path_util::url_from_file_path(&file_path)?; + let media_type = MediaType::from_specifier(&specifier); + if self.cjs_tracker.is_maybe_cjs(&specifier, media_type)? { + let maybe_source = vfs + .file_bytes(file.offset) + .map(|text| String::from_utf8_lossy(text)); + let cjs_analysis_result = self + .cjs_code_analyzer + .analyze_cjs(&specifier, maybe_source) + .await; + let maybe_analysis = match cjs_analysis_result { + Ok(CjsAnalysis::Esm(_)) => Some(CjsExportAnalysisEntry::Esm), + Ok(CjsAnalysis::Cjs(exports)) => { + Some(CjsExportAnalysisEntry::Cjs(exports)) + } + Err(err) => { + log::debug!( + "Ignoring cjs export analysis for '{}': {}", + specifier, + err + ); + None + } + }; + if let Some(analysis) = &maybe_analysis { + to_add.push((file_path, bincode::serialize(analysis)?)); + } + } + } + for (file_path, analysis) in to_add { + vfs.add_cjs_export_analysis(&file_path, analysis); + } + let vfs = self.build_vfs_consolidating_global_npm_cache(vfs); + let root_dir_url = match &vfs.root_path { WindowsSystemRootablePath::Path(dir) => { Some(url_from_directory_path(dir)?) @@ -744,14 +591,6 @@ impl<'a> DenoCompileBinaryWriter<'a> { None }; - let mut source_map_store = SourceMapStore::with_capacity(source_maps.len()); - for (specifier, source_map) in source_maps { - source_map_store.add( - Cow::Owned(root_dir_url.specifier_key(specifier).into_owned()), - Cow::Owned(source_map.into_bytes()), - ); - } - let node_modules = match &self.npm_resolver { CliNpmResolver::Managed(_) => { npm_snapshot.as_ref().map(|_| NodeModules::Managed { @@ -861,17 +700,34 @@ impl<'a> DenoCompileBinaryWriter<'a> { vfs_case_sensitivity: vfs.case_sensitivity, }; - write_binary_bytes( - writer, - original_bin, + let (data_section_bytes, section_sizes) = serialize_binary_data_section( &metadata, npm_snapshot.map(|s| s.into_serialized()), + &specifier_store.for_serialization(&root_dir_url), + &redirects_store, &remote_modules_store, - &source_map_store, &vfs, - compile_flags, ) - .context("Writing binary bytes") + .context("Serializing binary data section.")?; + + log::info!( + "\n{} {}", + crate::colors::bold("Files:"), + crate::util::display::human_size(section_sizes.vfs as f64) + ); + log::info!( + "{} {}", + crate::colors::bold("Metadata:"), + crate::util::display::human_size(section_sizes.metadata as f64) + ); + log::info!( + "{} {}\n", + crate::colors::bold("Remote modules:"), + crate::util::display::human_size(section_sizes.remote_modules as f64) + ); + + write_binary_bytes(writer, original_bin, data_section_bytes, compile_flags) + .context("Writing binary bytes") } fn fill_npm_vfs(&self, builder: &mut VfsBuilder) -> Result<(), AnyError> { @@ -1042,6 +898,166 @@ impl<'a> DenoCompileBinaryWriter<'a> { } } +#[allow(clippy::too_many_arguments)] +fn write_binary_bytes( + mut file_writer: File, + original_bin: Vec, + data_section_bytes: Vec, + compile_flags: &CompileFlags, +) -> Result<(), AnyError> { + let target = compile_flags.resolve_target(); + if target.contains("linux") { + libsui::Elf::new(&original_bin).append( + "d3n0l4nd", + &data_section_bytes, + &mut file_writer, + )?; + } else if target.contains("windows") { + let mut pe = libsui::PortableExecutable::from(&original_bin)?; + if let Some(icon) = compile_flags.icon.as_ref() { + let icon = std::fs::read(icon)?; + pe = pe.set_icon(&icon)?; + } + + pe.write_resource("d3n0l4nd", data_section_bytes)? + .build(&mut file_writer)?; + } else if target.contains("darwin") { + libsui::Macho::from(original_bin)? + .write_section("d3n0l4nd", data_section_bytes)? + .build_and_sign(&mut file_writer)?; + } + Ok(()) +} + +struct BinaryDataSectionSizes { + metadata: usize, + remote_modules: usize, + vfs: usize, +} + +/// Binary format: +/// * d3n0l4nd +/// * +/// * +/// * +/// * +/// * +/// * +/// * +/// * d3n0l4nd +#[allow(clippy::too_many_arguments)] +fn serialize_binary_data_section( + metadata: &Metadata, + npm_snapshot: Option, + specifiers: &SpecifierStoreForSerialization, + redirects: &SpecifierDataStore, + remote_modules: &SpecifierDataStore>, + vfs: &BuiltVfs, +) -> Result<(Vec, BinaryDataSectionSizes), AnyError> { + let metadata = serde_json::to_string(metadata)?; + let npm_snapshot = + npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default(); + let serialized_vfs = serde_json::to_string(&vfs.entries)?; + + let remote_modules_len = Cell::new(0); + let metadata_len = Cell::new(0); + let vfs_len = Cell::new(0); + + let bytes = capacity_builder::BytesBuilder::build(|builder| { + builder.append(MAGIC_BYTES); + // 1. Metadata + { + builder.append_le(metadata.len() as u64); + builder.append(&metadata); + } + // 2. Npm snapshot + { + builder.append_le(npm_snapshot.len() as u64); + builder.append(&npm_snapshot); + } + metadata_len.set(builder.len()); + // 3. Specifiers + builder.append(specifiers); + // 4. Redirects + redirects.serialize(builder); + // 5. Remote modules + remote_modules.serialize(builder); + remote_modules_len.set(builder.len() - metadata_len.get()); + // 6. VFS + { + builder.append_le(serialized_vfs.len() as u64); + builder.append(&serialized_vfs); + let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::(); + builder.append_le(vfs_bytes_len); + for file in &vfs.files { + builder.append(file); + } + } + vfs_len.set(builder.len() - remote_modules_len.get()); + + // write the magic bytes at the end so we can use it + // to make sure we've deserialized correctly + builder.append(MAGIC_BYTES); + })?; + + Ok(( + bytes, + BinaryDataSectionSizes { + metadata: metadata_len.get(), + remote_modules: remote_modules_len.get(), + vfs: vfs_len.get(), + }, + )) +} + +fn serialize_npm_snapshot( + mut snapshot: SerializedNpmResolutionSnapshot, +) -> Vec { + fn append_string(bytes: &mut Vec, string: &str) { + let len = string.len() as u32; + bytes.extend_from_slice(&len.to_le_bytes()); + bytes.extend_from_slice(string.as_bytes()); + } + + snapshot.packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism + let ids_to_stored_ids = snapshot + .packages + .iter() + .enumerate() + .map(|(i, pkg)| (&pkg.id, i as u32)) + .collect::>(); + + let mut root_packages: Vec<_> = snapshot.root_packages.iter().collect(); + root_packages.sort(); + let mut bytes = Vec::new(); + + bytes.extend_from_slice(&(snapshot.packages.len() as u32).to_le_bytes()); + for pkg in &snapshot.packages { + append_string(&mut bytes, &pkg.id.as_serialized()); + } + + bytes.extend_from_slice(&(root_packages.len() as u32).to_le_bytes()); + for (req, id) in root_packages { + append_string(&mut bytes, &req.to_string()); + let id = ids_to_stored_ids.get(&id).unwrap(); + bytes.extend_from_slice(&id.to_le_bytes()); + } + + for pkg in &snapshot.packages { + let deps_len = pkg.dependencies.len() as u32; + bytes.extend_from_slice(&deps_len.to_le_bytes()); + let mut deps: Vec<_> = pkg.dependencies.iter().collect(); + deps.sort(); + for (req, id) in deps { + append_string(&mut bytes, req); + let id = ids_to_stored_ids.get(&id).unwrap(); + bytes.extend_from_slice(&id.to_le_bytes()); + } + } + + bytes +} + fn get_denort_path(deno_exe: PathBuf) -> Option { let mut denort = deno_exe; denort.set_file_name(if cfg!(windows) { diff --git a/cli/standalone/file_system.rs b/cli/standalone/file_system.rs deleted file mode 100644 index c4b3ebe7288a32..00000000000000 --- a/cli/standalone/file_system.rs +++ /dev/null @@ -1,884 +0,0 @@ -// Copyright 2018-2025 the Deno authors. MIT license. - -use std::borrow::Cow; -use std::io::ErrorKind; -use std::path::Path; -use std::path::PathBuf; -use std::rc::Rc; -use std::sync::Arc; -use std::time::Duration; -use std::time::SystemTime; - -use deno_lib::standalone::virtual_fs::VfsFileSubDataKind; -use deno_runtime::deno_fs::AccessCheckCb; -use deno_runtime::deno_fs::FileSystem; -use deno_runtime::deno_fs::FsDirEntry; -use deno_runtime::deno_fs::FsFileType; -use deno_runtime::deno_fs::OpenOptions; -use deno_runtime::deno_fs::RealFs; -use deno_runtime::deno_io::fs::File; -use deno_runtime::deno_io::fs::FsError; -use deno_runtime::deno_io::fs::FsResult; -use deno_runtime::deno_io::fs::FsStat; -use sys_traits::boxed::BoxedFsDirEntry; -use sys_traits::boxed::BoxedFsMetadataValue; -use sys_traits::boxed::FsMetadataBoxed; -use sys_traits::boxed::FsReadDirBoxed; -use sys_traits::FsCopy; -use sys_traits::FsMetadata; - -use super::virtual_fs::FileBackedVfs; -use super::virtual_fs::FileBackedVfsDirEntry; -use super::virtual_fs::FileBackedVfsFile; -use super::virtual_fs::FileBackedVfsMetadata; - -#[derive(Debug, Clone)] -pub struct DenoCompileFileSystem(Arc); - -impl DenoCompileFileSystem { - pub fn new(vfs: Arc) -> Self { - Self(vfs) - } - - fn error_if_in_vfs(&self, path: &Path) -> FsResult<()> { - if self.0.is_path_within(path) { - Err(FsError::NotSupported) - } else { - Ok(()) - } - } - - fn copy_to_real_path( - &self, - oldpath: &Path, - newpath: &Path, - ) -> std::io::Result { - let old_file = self.0.file_entry(oldpath)?; - let old_file_bytes = - self.0.read_file_all(old_file, VfsFileSubDataKind::Raw)?; - let len = old_file_bytes.len() as u64; - RealFs - .write_file_sync( - newpath, - OpenOptions { - read: false, - write: true, - create: true, - truncate: true, - append: false, - create_new: false, - mode: None, - }, - None, - &old_file_bytes, - ) - .map_err(|err| err.into_io_error())?; - Ok(len) - } -} - -#[async_trait::async_trait(?Send)] -impl FileSystem for DenoCompileFileSystem { - fn cwd(&self) -> FsResult { - RealFs.cwd() - } - - fn tmp_dir(&self) -> FsResult { - RealFs.tmp_dir() - } - - fn chdir(&self, path: &Path) -> FsResult<()> { - self.error_if_in_vfs(path)?; - RealFs.chdir(path) - } - - fn umask(&self, mask: Option) -> FsResult { - RealFs.umask(mask) - } - - fn open_sync( - &self, - path: &Path, - options: OpenOptions, - access_check: Option, - ) -> FsResult> { - if self.0.is_path_within(path) { - Ok(Rc::new(self.0.open_file(path)?)) - } else { - RealFs.open_sync(path, options, access_check) - } - } - async fn open_async<'a>( - &'a self, - path: PathBuf, - options: OpenOptions, - access_check: Option>, - ) -> FsResult> { - if self.0.is_path_within(&path) { - Ok(Rc::new(self.0.open_file(&path)?)) - } else { - RealFs.open_async(path, options, access_check).await - } - } - - fn mkdir_sync( - &self, - path: &Path, - recursive: bool, - mode: Option, - ) -> FsResult<()> { - self.error_if_in_vfs(path)?; - RealFs.mkdir_sync(path, recursive, mode) - } - async fn mkdir_async( - &self, - path: PathBuf, - recursive: bool, - mode: Option, - ) -> FsResult<()> { - self.error_if_in_vfs(&path)?; - RealFs.mkdir_async(path, recursive, mode).await - } - - fn chmod_sync(&self, path: &Path, mode: u32) -> FsResult<()> { - self.error_if_in_vfs(path)?; - RealFs.chmod_sync(path, mode) - } - async fn chmod_async(&self, path: PathBuf, mode: u32) -> FsResult<()> { - self.error_if_in_vfs(&path)?; - RealFs.chmod_async(path, mode).await - } - - fn chown_sync( - &self, - path: &Path, - uid: Option, - gid: Option, - ) -> FsResult<()> { - self.error_if_in_vfs(path)?; - RealFs.chown_sync(path, uid, gid) - } - async fn chown_async( - &self, - path: PathBuf, - uid: Option, - gid: Option, - ) -> FsResult<()> { - self.error_if_in_vfs(&path)?; - RealFs.chown_async(path, uid, gid).await - } - - fn lchown_sync( - &self, - path: &Path, - uid: Option, - gid: Option, - ) -> FsResult<()> { - self.error_if_in_vfs(path)?; - RealFs.lchown_sync(path, uid, gid) - } - - async fn lchown_async( - &self, - path: PathBuf, - uid: Option, - gid: Option, - ) -> FsResult<()> { - self.error_if_in_vfs(&path)?; - RealFs.lchown_async(path, uid, gid).await - } - - fn remove_sync(&self, path: &Path, recursive: bool) -> FsResult<()> { - self.error_if_in_vfs(path)?; - RealFs.remove_sync(path, recursive) - } - async fn remove_async(&self, path: PathBuf, recursive: bool) -> FsResult<()> { - self.error_if_in_vfs(&path)?; - RealFs.remove_async(path, recursive).await - } - - fn copy_file_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { - self.error_if_in_vfs(newpath)?; - if self.0.is_path_within(oldpath) { - self - .copy_to_real_path(oldpath, newpath) - .map(|_| ()) - .map_err(FsError::Io) - } else { - RealFs.copy_file_sync(oldpath, newpath) - } - } - async fn copy_file_async( - &self, - oldpath: PathBuf, - newpath: PathBuf, - ) -> FsResult<()> { - self.error_if_in_vfs(&newpath)?; - if self.0.is_path_within(&oldpath) { - let fs = self.clone(); - tokio::task::spawn_blocking(move || { - fs.copy_to_real_path(&oldpath, &newpath) - .map(|_| ()) - .map_err(FsError::Io) - }) - .await? - } else { - RealFs.copy_file_async(oldpath, newpath).await - } - } - - fn cp_sync(&self, from: &Path, to: &Path) -> FsResult<()> { - self.error_if_in_vfs(to)?; - - RealFs.cp_sync(from, to) - } - async fn cp_async(&self, from: PathBuf, to: PathBuf) -> FsResult<()> { - self.error_if_in_vfs(&to)?; - - RealFs.cp_async(from, to).await - } - - fn stat_sync(&self, path: &Path) -> FsResult { - if self.0.is_path_within(path) { - Ok(self.0.stat(path)?.as_fs_stat()) - } else { - RealFs.stat_sync(path) - } - } - async fn stat_async(&self, path: PathBuf) -> FsResult { - if self.0.is_path_within(&path) { - Ok(self.0.stat(&path)?.as_fs_stat()) - } else { - RealFs.stat_async(path).await - } - } - - fn lstat_sync(&self, path: &Path) -> FsResult { - if self.0.is_path_within(path) { - Ok(self.0.lstat(path)?.as_fs_stat()) - } else { - RealFs.lstat_sync(path) - } - } - async fn lstat_async(&self, path: PathBuf) -> FsResult { - if self.0.is_path_within(&path) { - Ok(self.0.lstat(&path)?.as_fs_stat()) - } else { - RealFs.lstat_async(path).await - } - } - - fn realpath_sync(&self, path: &Path) -> FsResult { - if self.0.is_path_within(path) { - Ok(self.0.canonicalize(path)?) - } else { - RealFs.realpath_sync(path) - } - } - async fn realpath_async(&self, path: PathBuf) -> FsResult { - if self.0.is_path_within(&path) { - Ok(self.0.canonicalize(&path)?) - } else { - RealFs.realpath_async(path).await - } - } - - fn read_dir_sync(&self, path: &Path) -> FsResult> { - if self.0.is_path_within(path) { - Ok(self.0.read_dir(path)?) - } else { - RealFs.read_dir_sync(path) - } - } - async fn read_dir_async(&self, path: PathBuf) -> FsResult> { - if self.0.is_path_within(&path) { - Ok(self.0.read_dir(&path)?) - } else { - RealFs.read_dir_async(path).await - } - } - - fn rename_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { - self.error_if_in_vfs(oldpath)?; - self.error_if_in_vfs(newpath)?; - RealFs.rename_sync(oldpath, newpath) - } - async fn rename_async( - &self, - oldpath: PathBuf, - newpath: PathBuf, - ) -> FsResult<()> { - self.error_if_in_vfs(&oldpath)?; - self.error_if_in_vfs(&newpath)?; - RealFs.rename_async(oldpath, newpath).await - } - - fn link_sync(&self, oldpath: &Path, newpath: &Path) -> FsResult<()> { - self.error_if_in_vfs(oldpath)?; - self.error_if_in_vfs(newpath)?; - RealFs.link_sync(oldpath, newpath) - } - async fn link_async( - &self, - oldpath: PathBuf, - newpath: PathBuf, - ) -> FsResult<()> { - self.error_if_in_vfs(&oldpath)?; - self.error_if_in_vfs(&newpath)?; - RealFs.link_async(oldpath, newpath).await - } - - fn symlink_sync( - &self, - oldpath: &Path, - newpath: &Path, - file_type: Option, - ) -> FsResult<()> { - self.error_if_in_vfs(oldpath)?; - self.error_if_in_vfs(newpath)?; - RealFs.symlink_sync(oldpath, newpath, file_type) - } - async fn symlink_async( - &self, - oldpath: PathBuf, - newpath: PathBuf, - file_type: Option, - ) -> FsResult<()> { - self.error_if_in_vfs(&oldpath)?; - self.error_if_in_vfs(&newpath)?; - RealFs.symlink_async(oldpath, newpath, file_type).await - } - - fn read_link_sync(&self, path: &Path) -> FsResult { - if self.0.is_path_within(path) { - Ok(self.0.read_link(path)?) - } else { - RealFs.read_link_sync(path) - } - } - async fn read_link_async(&self, path: PathBuf) -> FsResult { - if self.0.is_path_within(&path) { - Ok(self.0.read_link(&path)?) - } else { - RealFs.read_link_async(path).await - } - } - - fn truncate_sync(&self, path: &Path, len: u64) -> FsResult<()> { - self.error_if_in_vfs(path)?; - RealFs.truncate_sync(path, len) - } - async fn truncate_async(&self, path: PathBuf, len: u64) -> FsResult<()> { - self.error_if_in_vfs(&path)?; - RealFs.truncate_async(path, len).await - } - - fn utime_sync( - &self, - path: &Path, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()> { - self.error_if_in_vfs(path)?; - RealFs.utime_sync(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) - } - async fn utime_async( - &self, - path: PathBuf, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()> { - self.error_if_in_vfs(&path)?; - RealFs - .utime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) - .await - } - - fn lutime_sync( - &self, - path: &Path, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()> { - self.error_if_in_vfs(path)?; - RealFs.lutime_sync(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) - } - async fn lutime_async( - &self, - path: PathBuf, - atime_secs: i64, - atime_nanos: u32, - mtime_secs: i64, - mtime_nanos: u32, - ) -> FsResult<()> { - self.error_if_in_vfs(&path)?; - RealFs - .lutime_async(path, atime_secs, atime_nanos, mtime_secs, mtime_nanos) - .await - } -} - -impl sys_traits::BaseFsHardLink for DenoCompileFileSystem { - #[inline] - fn base_fs_hard_link(&self, src: &Path, dst: &Path) -> std::io::Result<()> { - self.link_sync(src, dst).map_err(|err| err.into_io_error()) - } -} - -impl sys_traits::BaseFsRead for DenoCompileFileSystem { - #[inline] - fn base_fs_read(&self, path: &Path) -> std::io::Result> { - self - .read_file_sync(path, None) - .map_err(|err| err.into_io_error()) - } -} - -impl sys_traits::FsMetadataValue for FileBackedVfsMetadata { - fn file_type(&self) -> sys_traits::FileType { - self.file_type - } - - fn len(&self) -> u64 { - self.len - } - - fn accessed(&self) -> std::io::Result { - Err(not_supported("accessed time")) - } - - fn created(&self) -> std::io::Result { - Err(not_supported("created time")) - } - - fn changed(&self) -> std::io::Result { - Err(not_supported("changed time")) - } - - fn modified(&self) -> std::io::Result { - Err(not_supported("modified time")) - } - - fn dev(&self) -> std::io::Result { - Ok(0) - } - - fn ino(&self) -> std::io::Result { - Ok(0) - } - - fn mode(&self) -> std::io::Result { - Ok(0) - } - - fn nlink(&self) -> std::io::Result { - Ok(0) - } - - fn uid(&self) -> std::io::Result { - Ok(0) - } - - fn gid(&self) -> std::io::Result { - Ok(0) - } - - fn rdev(&self) -> std::io::Result { - Ok(0) - } - - fn blksize(&self) -> std::io::Result { - Ok(0) - } - - fn blocks(&self) -> std::io::Result { - Ok(0) - } - - fn is_block_device(&self) -> std::io::Result { - Ok(false) - } - - fn is_char_device(&self) -> std::io::Result { - Ok(false) - } - - fn is_fifo(&self) -> std::io::Result { - Ok(false) - } - - fn is_socket(&self) -> std::io::Result { - Ok(false) - } - - fn file_attributes(&self) -> std::io::Result { - Ok(0) - } -} - -fn not_supported(name: &str) -> std::io::Error { - std::io::Error::new( - ErrorKind::Unsupported, - format!( - "{} is not supported for an embedded deno compile file", - name - ), - ) -} - -impl sys_traits::FsDirEntry for FileBackedVfsDirEntry { - type Metadata = BoxedFsMetadataValue; - - fn file_name(&self) -> Cow { - Cow::Borrowed(self.metadata.name.as_ref()) - } - - fn file_type(&self) -> std::io::Result { - Ok(self.metadata.file_type) - } - - fn metadata(&self) -> std::io::Result { - Ok(BoxedFsMetadataValue(Box::new(self.metadata.clone()))) - } - - fn path(&self) -> Cow { - Cow::Owned(self.parent_path.join(&self.metadata.name)) - } -} - -impl sys_traits::BaseFsReadDir for DenoCompileFileSystem { - type ReadDirEntry = BoxedFsDirEntry; - - fn base_fs_read_dir( - &self, - path: &Path, - ) -> std::io::Result< - Box> + '_>, - > { - if self.0.is_path_within(path) { - let entries = self.0.read_dir_with_metadata(path)?; - Ok(Box::new( - entries.map(|entry| Ok(BoxedFsDirEntry::new(entry))), - )) - } else { - #[allow(clippy::disallowed_types)] // ok because we're implementing the fs - sys_traits::impls::RealSys.fs_read_dir_boxed(path) - } - } -} - -impl sys_traits::BaseFsCanonicalize for DenoCompileFileSystem { - #[inline] - fn base_fs_canonicalize(&self, path: &Path) -> std::io::Result { - self.realpath_sync(path).map_err(|err| err.into_io_error()) - } -} - -impl sys_traits::BaseFsMetadata for DenoCompileFileSystem { - type Metadata = BoxedFsMetadataValue; - - #[inline] - fn base_fs_metadata(&self, path: &Path) -> std::io::Result { - if self.0.is_path_within(path) { - Ok(BoxedFsMetadataValue::new(self.0.stat(path)?)) - } else { - #[allow(clippy::disallowed_types)] // ok because we're implementing the fs - sys_traits::impls::RealSys.fs_metadata_boxed(path) - } - } - - #[inline] - fn base_fs_symlink_metadata( - &self, - path: &Path, - ) -> std::io::Result { - if self.0.is_path_within(path) { - Ok(BoxedFsMetadataValue::new(self.0.lstat(path)?)) - } else { - #[allow(clippy::disallowed_types)] // ok because we're implementing the fs - sys_traits::impls::RealSys.fs_symlink_metadata_boxed(path) - } - } -} - -impl sys_traits::BaseFsCopy for DenoCompileFileSystem { - #[inline] - fn base_fs_copy(&self, from: &Path, to: &Path) -> std::io::Result { - self - .error_if_in_vfs(to) - .map_err(|err| err.into_io_error())?; - if self.0.is_path_within(from) { - self.copy_to_real_path(from, to) - } else { - #[allow(clippy::disallowed_types)] // ok because we're implementing the fs - sys_traits::impls::RealSys.fs_copy(from, to) - } - } -} - -impl sys_traits::BaseFsCloneFile for DenoCompileFileSystem { - fn base_fs_clone_file( - &self, - _from: &Path, - _to: &Path, - ) -> std::io::Result<()> { - // will cause a fallback in the code that uses this - Err(not_supported("cloning files")) - } -} - -impl sys_traits::BaseFsCreateDir for DenoCompileFileSystem { - #[inline] - fn base_fs_create_dir( - &self, - path: &Path, - options: &sys_traits::CreateDirOptions, - ) -> std::io::Result<()> { - self - .mkdir_sync(path, options.recursive, options.mode) - .map_err(|err| err.into_io_error()) - } -} - -impl sys_traits::BaseFsRemoveFile for DenoCompileFileSystem { - #[inline] - fn base_fs_remove_file(&self, path: &Path) -> std::io::Result<()> { - self - .remove_sync(path, false) - .map_err(|err| err.into_io_error()) - } -} - -impl sys_traits::BaseFsRename for DenoCompileFileSystem { - #[inline] - fn base_fs_rename(&self, from: &Path, to: &Path) -> std::io::Result<()> { - self - .rename_sync(from, to) - .map_err(|err| err.into_io_error()) - } -} - -pub enum FsFileAdapter { - Real(sys_traits::impls::RealFsFile), - Vfs(FileBackedVfsFile), -} - -impl sys_traits::FsFile for FsFileAdapter {} - -impl sys_traits::FsFileAsRaw for FsFileAdapter { - #[cfg(windows)] - fn fs_file_as_raw_handle(&self) -> Option { - match self { - Self::Real(file) => file.fs_file_as_raw_handle(), - Self::Vfs(_) => None, - } - } - - #[cfg(unix)] - fn fs_file_as_raw_fd(&self) -> Option { - match self { - Self::Real(file) => file.fs_file_as_raw_fd(), - Self::Vfs(_) => None, - } - } -} - -impl sys_traits::FsFileSyncData for FsFileAdapter { - fn fs_file_sync_data(&mut self) -> std::io::Result<()> { - match self { - Self::Real(file) => file.fs_file_sync_data(), - Self::Vfs(_) => Ok(()), - } - } -} - -impl sys_traits::FsFileSyncAll for FsFileAdapter { - fn fs_file_sync_all(&mut self) -> std::io::Result<()> { - match self { - Self::Real(file) => file.fs_file_sync_all(), - Self::Vfs(_) => Ok(()), - } - } -} - -impl sys_traits::FsFileSetPermissions for FsFileAdapter { - #[inline] - fn fs_file_set_permissions(&mut self, mode: u32) -> std::io::Result<()> { - match self { - Self::Real(file) => file.fs_file_set_permissions(mode), - Self::Vfs(_) => Ok(()), - } - } -} - -impl std::io::Read for FsFileAdapter { - #[inline] - fn read(&mut self, buf: &mut [u8]) -> std::io::Result { - match self { - Self::Real(file) => file.read(buf), - Self::Vfs(file) => file.read_to_buf(buf), - } - } -} - -impl std::io::Seek for FsFileAdapter { - fn seek(&mut self, pos: std::io::SeekFrom) -> std::io::Result { - match self { - Self::Real(file) => file.seek(pos), - Self::Vfs(file) => file.seek(pos), - } - } -} - -impl std::io::Write for FsFileAdapter { - #[inline] - fn write(&mut self, buf: &[u8]) -> std::io::Result { - match self { - Self::Real(file) => file.write(buf), - Self::Vfs(_) => Err(not_supported("writing files")), - } - } - - #[inline] - fn flush(&mut self) -> std::io::Result<()> { - match self { - Self::Real(file) => file.flush(), - Self::Vfs(_) => Err(not_supported("writing files")), - } - } -} - -impl sys_traits::FsFileSetLen for FsFileAdapter { - #[inline] - fn fs_file_set_len(&mut self, len: u64) -> std::io::Result<()> { - match self { - Self::Real(file) => file.fs_file_set_len(len), - Self::Vfs(_) => Err(not_supported("setting file length")), - } - } -} - -impl sys_traits::FsFileSetTimes for FsFileAdapter { - fn fs_file_set_times( - &mut self, - times: sys_traits::FsFileTimes, - ) -> std::io::Result<()> { - match self { - Self::Real(file) => file.fs_file_set_times(times), - Self::Vfs(_) => Err(not_supported("setting file times")), - } - } -} - -impl sys_traits::FsFileLock for FsFileAdapter { - fn fs_file_lock( - &mut self, - mode: sys_traits::FsFileLockMode, - ) -> std::io::Result<()> { - match self { - Self::Real(file) => file.fs_file_lock(mode), - Self::Vfs(_) => Err(not_supported("locking files")), - } - } - - fn fs_file_try_lock( - &mut self, - mode: sys_traits::FsFileLockMode, - ) -> std::io::Result<()> { - match self { - Self::Real(file) => file.fs_file_try_lock(mode), - Self::Vfs(_) => Err(not_supported("locking files")), - } - } - - fn fs_file_unlock(&mut self) -> std::io::Result<()> { - match self { - Self::Real(file) => file.fs_file_unlock(), - Self::Vfs(_) => Err(not_supported("unlocking files")), - } - } -} - -impl sys_traits::FsFileIsTerminal for FsFileAdapter { - #[inline] - fn fs_file_is_terminal(&self) -> bool { - match self { - Self::Real(file) => file.fs_file_is_terminal(), - Self::Vfs(_) => false, - } - } -} - -impl sys_traits::BaseFsOpen for DenoCompileFileSystem { - type File = FsFileAdapter; - - fn base_fs_open( - &self, - path: &Path, - options: &sys_traits::OpenOptions, - ) -> std::io::Result { - if self.0.is_path_within(path) { - Ok(FsFileAdapter::Vfs(self.0.open_file(path)?)) - } else { - #[allow(clippy::disallowed_types)] // ok because we're implementing the fs - Ok(FsFileAdapter::Real( - sys_traits::impls::RealSys.base_fs_open(path, options)?, - )) - } - } -} - -impl sys_traits::BaseFsSymlinkDir for DenoCompileFileSystem { - fn base_fs_symlink_dir(&self, src: &Path, dst: &Path) -> std::io::Result<()> { - self - .symlink_sync(src, dst, Some(FsFileType::Directory)) - .map_err(|err| err.into_io_error()) - } -} - -impl sys_traits::SystemRandom for DenoCompileFileSystem { - #[inline] - fn sys_random(&self, buf: &mut [u8]) -> std::io::Result<()> { - #[allow(clippy::disallowed_types)] // ok because we're implementing the fs - sys_traits::impls::RealSys.sys_random(buf) - } -} - -impl sys_traits::SystemTimeNow for DenoCompileFileSystem { - #[inline] - fn sys_time_now(&self) -> SystemTime { - #[allow(clippy::disallowed_types)] // ok because we're implementing the fs - sys_traits::impls::RealSys.sys_time_now() - } -} - -impl sys_traits::ThreadSleep for DenoCompileFileSystem { - #[inline] - fn thread_sleep(&self, dur: Duration) { - #[allow(clippy::disallowed_types)] // ok because we're implementing the fs - sys_traits::impls::RealSys.thread_sleep(dur) - } -} - -impl sys_traits::EnvCurrentDir for DenoCompileFileSystem { - fn env_current_dir(&self) -> std::io::Result { - #[allow(clippy::disallowed_types)] // ok because we're implementing the fs - sys_traits::impls::RealSys.env_current_dir() - } -} - -impl sys_traits::BaseEnvVar for DenoCompileFileSystem { - fn base_env_var_os( - &self, - key: &std::ffi::OsStr, - ) -> Option { - #[allow(clippy::disallowed_types)] // ok because we're implementing the fs - sys_traits::impls::RealSys.base_env_var_os(key) - } -} diff --git a/cli/standalone/mod.rs b/cli/standalone/mod.rs index f2a0859e8f8d7d..81ca2b4ff1fa98 100644 --- a/cli/standalone/mod.rs +++ b/cli/standalone/mod.rs @@ -1,1042 +1,4 @@ // Copyright 2018-2025 the Deno authors. MIT license. -// Allow unused code warnings because we share -// code between the two bin targets. -#![allow(dead_code)] -#![allow(unused_imports)] - -use std::borrow::Cow; -use std::path::PathBuf; -use std::rc::Rc; -use std::sync::Arc; - -use binary::StandaloneData; -use binary::StandaloneModules; -use code_cache::DenoCompileCodeCache; -use deno_ast::MediaType; -use deno_cache_dir::file_fetcher::CacheSetting; -use deno_cache_dir::npm::NpmCacheDir; -use deno_config::workspace::MappedResolution; -use deno_config::workspace::MappedResolutionError; -use deno_config::workspace::ResolverWorkspaceJsrPackage; -use deno_config::workspace::WorkspaceResolver; -use deno_core::anyhow::Context; -use deno_core::error::AnyError; -use deno_core::error::ModuleLoaderError; -use deno_core::futures::future::LocalBoxFuture; -use deno_core::futures::FutureExt; -use deno_core::v8_set_flags; -use deno_core::FastString; -use deno_core::FeatureChecker; -use deno_core::ModuleLoader; -use deno_core::ModuleSourceCode; -use deno_core::ModuleSpecifier; -use deno_core::ModuleType; -use deno_core::RequestedModuleType; -use deno_core::ResolutionKind; -use deno_core::SourceCodeCacheInfo; -use deno_error::JsErrorBox; -use deno_lib::cache::DenoDirProvider; -use deno_lib::npm::NpmRegistryReadPermissionChecker; -use deno_lib::npm::NpmRegistryReadPermissionCheckerMode; -use deno_lib::standalone::virtual_fs::VfsFileSubDataKind; -use deno_lib::worker::CreateModuleLoaderResult; -use deno_lib::worker::LibMainWorkerFactory; -use deno_lib::worker::LibMainWorkerOptions; -use deno_lib::worker::ModuleLoaderFactory; -use deno_lib::worker::StorageKeyResolver; -use deno_npm::npm_rc::ResolvedNpmRc; -use deno_npm::resolution::NpmResolutionSnapshot; -use deno_package_json::PackageJsonDepValue; -use deno_resolver::cjs::IsCjsResolutionMode; -use deno_resolver::npm::managed::ManagedInNpmPkgCheckerCreateOptions; -use deno_resolver::npm::managed::NpmResolutionCell; -use deno_resolver::npm::ByonmNpmResolverCreateOptions; -use deno_resolver::npm::CreateInNpmPkgCheckerOptions; -use deno_resolver::npm::DenoInNpmPackageChecker; -use deno_resolver::npm::NpmReqResolverOptions; -use deno_runtime::deno_fs; -use deno_runtime::deno_fs::FileSystem; -use deno_runtime::deno_node::create_host_defined_options; -use deno_runtime::deno_node::NodeRequireLoader; -use deno_runtime::deno_node::NodeResolver; -use deno_runtime::deno_node::RealIsBuiltInNodeModuleChecker; -use deno_runtime::deno_permissions::Permissions; -use deno_runtime::deno_permissions::PermissionsContainer; -use deno_runtime::deno_tls::rustls::RootCertStore; -use deno_runtime::deno_tls::RootCertStoreProvider; -use deno_runtime::deno_web::BlobStore; -use deno_runtime::permissions::RuntimePermissionDescriptorParser; -use deno_runtime::WorkerExecutionMode; -use deno_runtime::WorkerLogLevel; -use deno_semver::npm::NpmPackageReqReference; -use import_map::parse_from_json; -use node_resolver::analyze::NodeCodeTranslator; -use node_resolver::errors::ClosestPkgJsonError; -use node_resolver::NodeResolutionKind; -use node_resolver::ResolutionMode; -use serialization::DenoCompileModuleSource; -use serialization::SourceMapStore; -use virtual_fs::FileBackedVfs; - -use crate::args::create_default_npmrc; -use crate::args::get_root_cert_store; -use crate::args::npm_pkg_req_ref_to_binary_command; -use crate::args::CaData; -use crate::args::NpmInstallDepsProvider; -use crate::cache::Caches; -use crate::cache::FastInsecureHasher; -use crate::cache::NodeAnalysisCache; -use crate::http_util::HttpClientProvider; -use crate::node::CliCjsCodeAnalyzer; -use crate::node::CliNodeCodeTranslator; -use crate::node::CliNodeResolver; -use crate::node::CliPackageJsonResolver; -use crate::npm::create_npm_process_state_provider; -use crate::npm::CliByonmNpmResolverCreateOptions; -use crate::npm::CliManagedNpmResolverCreateOptions; -use crate::npm::CliNpmResolver; -use crate::npm::CliNpmResolverCreateOptions; -use crate::npm::CliNpmResolverManagedSnapshotOption; -use crate::npm::NpmResolutionInitializer; -use crate::resolver::CliCjsTracker; -use crate::resolver::CliNpmReqResolver; -use crate::resolver::NpmModuleLoader; -use crate::sys::CliSys; -use crate::util::progress_bar::ProgressBar; -use crate::util::progress_bar::ProgressBarStyle; -use crate::util::text_encoding::from_utf8_lossy_cow; -use crate::util::v8::construct_v8_flags; -use crate::worker::CliCodeCache; -use crate::worker::CliMainWorkerFactory; -use crate::worker::CliMainWorkerOptions; - pub mod binary; -mod code_cache; -mod file_system; -mod serialization; mod virtual_fs; - -pub use binary::extract_standalone; -pub use binary::is_standalone_binary; -pub use binary::DenoCompileBinaryWriter; - -use self::binary::Metadata; -pub use self::file_system::DenoCompileFileSystem; - -struct SharedModuleLoaderState { - cjs_tracker: Arc, - code_cache: Option>, - fs: Arc, - modules: StandaloneModules, - node_code_translator: Arc, - node_resolver: Arc, - npm_module_loader: Arc, - npm_registry_permission_checker: NpmRegistryReadPermissionChecker, - npm_req_resolver: Arc, - npm_resolver: CliNpmResolver, - source_maps: SourceMapStore, - vfs: Arc, - workspace_resolver: WorkspaceResolver, -} - -impl SharedModuleLoaderState { - fn get_code_cache( - &self, - specifier: &ModuleSpecifier, - source: &[u8], - ) -> Option { - let Some(code_cache) = &self.code_cache else { - return None; - }; - if !code_cache.enabled() { - return None; - } - // deno version is already included in the root cache key - let hash = FastInsecureHasher::new_without_deno_version() - .write_hashable(source) - .finish(); - let data = code_cache.get_sync( - specifier, - deno_runtime::code_cache::CodeCacheType::EsModule, - hash, - ); - Some(SourceCodeCacheInfo { - hash, - data: data.map(Cow::Owned), - }) - } -} - -#[derive(Clone)] -struct EmbeddedModuleLoader { - shared: Arc, -} - -impl std::fmt::Debug for EmbeddedModuleLoader { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("EmbeddedModuleLoader").finish() - } -} - -pub const MODULE_NOT_FOUND: &str = "Module not found"; -pub const UNSUPPORTED_SCHEME: &str = "Unsupported scheme"; - -impl ModuleLoader for EmbeddedModuleLoader { - fn resolve( - &self, - raw_specifier: &str, - referrer: &str, - kind: ResolutionKind, - ) -> Result { - let referrer = if referrer == "." { - if kind != ResolutionKind::MainModule { - return Err( - JsErrorBox::generic(format!( - "Expected to resolve main module, got {:?} instead.", - kind - )) - .into(), - ); - } - let current_dir = std::env::current_dir().unwrap(); - deno_core::resolve_path(".", ¤t_dir)? - } else { - ModuleSpecifier::parse(referrer).map_err(|err| { - JsErrorBox::type_error(format!( - "Referrer uses invalid specifier: {}", - err - )) - })? - }; - let referrer_kind = if self - .shared - .cjs_tracker - .is_maybe_cjs(&referrer, MediaType::from_specifier(&referrer)) - .map_err(JsErrorBox::from_err)? - { - ResolutionMode::Require - } else { - ResolutionMode::Import - }; - - if self.shared.node_resolver.in_npm_package(&referrer) { - return Ok( - self - .shared - .node_resolver - .resolve( - raw_specifier, - &referrer, - referrer_kind, - NodeResolutionKind::Execution, - ) - .map_err(JsErrorBox::from_err)? - .into_url(), - ); - } - - let mapped_resolution = self - .shared - .workspace_resolver - .resolve(raw_specifier, &referrer); - - match mapped_resolution { - Ok(MappedResolution::WorkspaceJsrPackage { specifier, .. }) => { - Ok(specifier) - } - Ok(MappedResolution::WorkspaceNpmPackage { - target_pkg_json: pkg_json, - sub_path, - .. - }) => Ok( - self - .shared - .node_resolver - .resolve_package_subpath_from_deno_module( - pkg_json.dir_path(), - sub_path.as_deref(), - Some(&referrer), - referrer_kind, - NodeResolutionKind::Execution, - ) - .map_err(JsErrorBox::from_err)?, - ), - Ok(MappedResolution::PackageJson { - dep_result, - sub_path, - alias, - .. - }) => match dep_result - .as_ref() - .map_err(|e| JsErrorBox::from_err(e.clone()))? - { - PackageJsonDepValue::Req(req) => self - .shared - .npm_req_resolver - .resolve_req_with_sub_path( - req, - sub_path.as_deref(), - &referrer, - referrer_kind, - NodeResolutionKind::Execution, - ) - .map_err(|e| JsErrorBox::from_err(e).into()), - PackageJsonDepValue::Workspace(version_req) => { - let pkg_folder = self - .shared - .workspace_resolver - .resolve_workspace_pkg_json_folder_for_pkg_json_dep( - alias, - version_req, - ) - .map_err(JsErrorBox::from_err)?; - Ok( - self - .shared - .node_resolver - .resolve_package_subpath_from_deno_module( - pkg_folder, - sub_path.as_deref(), - Some(&referrer), - referrer_kind, - NodeResolutionKind::Execution, - ) - .map_err(JsErrorBox::from_err)?, - ) - } - }, - Ok(MappedResolution::Normal { specifier, .. }) - | Ok(MappedResolution::ImportMap { specifier, .. }) => { - if let Ok(reference) = - NpmPackageReqReference::from_specifier(&specifier) - { - return Ok( - self - .shared - .npm_req_resolver - .resolve_req_reference( - &reference, - &referrer, - referrer_kind, - NodeResolutionKind::Execution, - ) - .map_err(JsErrorBox::from_err)?, - ); - } - - if specifier.scheme() == "jsr" { - if let Some(specifier) = - self.shared.modules.resolve_specifier(&specifier)? - { - return Ok(specifier.clone()); - } - } - - Ok( - self - .shared - .node_resolver - .handle_if_in_node_modules(&specifier) - .unwrap_or(specifier), - ) - } - Err(err) - if err.is_unmapped_bare_specifier() && referrer.scheme() == "file" => - { - let maybe_res = self - .shared - .npm_req_resolver - .resolve_if_for_npm_pkg( - raw_specifier, - &referrer, - referrer_kind, - NodeResolutionKind::Execution, - ) - .map_err(JsErrorBox::from_err)?; - if let Some(res) = maybe_res { - return Ok(res.into_url()); - } - Err(JsErrorBox::from_err(err).into()) - } - Err(err) => Err(JsErrorBox::from_err(err).into()), - } - } - - fn get_host_defined_options<'s>( - &self, - scope: &mut deno_core::v8::HandleScope<'s>, - name: &str, - ) -> Option> { - let name = deno_core::ModuleSpecifier::parse(name).ok()?; - if self.shared.node_resolver.in_npm_package(&name) { - Some(create_host_defined_options(scope)) - } else { - None - } - } - - fn load( - &self, - original_specifier: &ModuleSpecifier, - maybe_referrer: Option<&ModuleSpecifier>, - _is_dynamic: bool, - _requested_module_type: RequestedModuleType, - ) -> deno_core::ModuleLoadResponse { - if original_specifier.scheme() == "data" { - let data_url_text = - match deno_graph::source::RawDataUrl::parse(original_specifier) - .and_then(|url| url.decode()) - { - Ok(response) => response, - Err(err) => { - return deno_core::ModuleLoadResponse::Sync(Err( - JsErrorBox::type_error(format!("{:#}", err)).into(), - )); - } - }; - return deno_core::ModuleLoadResponse::Sync(Ok( - deno_core::ModuleSource::new( - deno_core::ModuleType::JavaScript, - ModuleSourceCode::String(data_url_text.into()), - original_specifier, - None, - ), - )); - } - - if self.shared.node_resolver.in_npm_package(original_specifier) { - let shared = self.shared.clone(); - let original_specifier = original_specifier.clone(); - let maybe_referrer = maybe_referrer.cloned(); - return deno_core::ModuleLoadResponse::Async( - async move { - let code_source = shared - .npm_module_loader - .load(&original_specifier, maybe_referrer.as_ref()) - .await?; - let code_cache_entry = shared.get_code_cache( - &code_source.found_url, - code_source.code.as_bytes(), - ); - Ok(deno_core::ModuleSource::new_with_redirect( - match code_source.media_type { - MediaType::Json => ModuleType::Json, - _ => ModuleType::JavaScript, - }, - code_source.code, - &original_specifier, - &code_source.found_url, - code_cache_entry, - )) - } - .boxed_local(), - ); - } - - match self - .shared - .modules - .read(original_specifier, VfsFileSubDataKind::ModuleGraph) - { - Ok(Some(module)) => { - let media_type = module.media_type; - let (module_specifier, module_type, module_source) = - module.into_parts(); - let is_maybe_cjs = match self - .shared - .cjs_tracker - .is_maybe_cjs(original_specifier, media_type) - { - Ok(is_maybe_cjs) => is_maybe_cjs, - Err(err) => { - return deno_core::ModuleLoadResponse::Sync(Err( - JsErrorBox::type_error(format!("{:?}", err)).into(), - )); - } - }; - if is_maybe_cjs { - let original_specifier = original_specifier.clone(); - let module_specifier = module_specifier.clone(); - let shared = self.shared.clone(); - deno_core::ModuleLoadResponse::Async( - async move { - let source = match module_source { - DenoCompileModuleSource::String(string) => { - Cow::Borrowed(string) - } - DenoCompileModuleSource::Bytes(module_code_bytes) => { - match module_code_bytes { - Cow::Owned(bytes) => Cow::Owned( - crate::util::text_encoding::from_utf8_lossy_owned(bytes), - ), - Cow::Borrowed(bytes) => String::from_utf8_lossy(bytes), - } - } - }; - let source = shared - .node_code_translator - .translate_cjs_to_esm(&module_specifier, Some(source)) - .await?; - let module_source = match source { - Cow::Owned(source) => ModuleSourceCode::String(source.into()), - Cow::Borrowed(source) => { - ModuleSourceCode::String(FastString::from_static(source)) - } - }; - let code_cache_entry = shared - .get_code_cache(&module_specifier, module_source.as_bytes()); - Ok(deno_core::ModuleSource::new_with_redirect( - module_type, - module_source, - &original_specifier, - &module_specifier, - code_cache_entry, - )) - } - .boxed_local(), - ) - } else { - let module_source = module_source.into_for_v8(); - let code_cache_entry = self - .shared - .get_code_cache(module_specifier, module_source.as_bytes()); - deno_core::ModuleLoadResponse::Sync(Ok( - deno_core::ModuleSource::new_with_redirect( - module_type, - module_source, - original_specifier, - module_specifier, - code_cache_entry, - ), - )) - } - } - Ok(None) => deno_core::ModuleLoadResponse::Sync(Err( - JsErrorBox::type_error(format!( - "{MODULE_NOT_FOUND}: {}", - original_specifier - )) - .into(), - )), - Err(err) => deno_core::ModuleLoadResponse::Sync(Err( - JsErrorBox::type_error(format!("{:?}", err)).into(), - )), - } - } - - fn code_cache_ready( - &self, - specifier: ModuleSpecifier, - source_hash: u64, - code_cache_data: &[u8], - ) -> LocalBoxFuture<'static, ()> { - if let Some(code_cache) = &self.shared.code_cache { - code_cache.set_sync( - specifier, - deno_runtime::code_cache::CodeCacheType::EsModule, - source_hash, - code_cache_data, - ); - } - std::future::ready(()).boxed_local() - } - - fn get_source_map(&self, file_name: &str) -> Option> { - if file_name.starts_with("file:///") { - let url = - deno_path_util::url_from_directory_path(self.shared.vfs.root()).ok()?; - let file_url = ModuleSpecifier::parse(file_name).ok()?; - let relative_path = url.make_relative(&file_url)?; - self.shared.source_maps.get(&relative_path) - } else { - self.shared.source_maps.get(file_name) - } - .map(Cow::Borrowed) - } - - fn get_source_mapped_source_line( - &self, - file_name: &str, - line_number: usize, - ) -> Option { - let specifier = ModuleSpecifier::parse(file_name).ok()?; - let data = self - .shared - .modules - .read(&specifier, VfsFileSubDataKind::Raw) - .ok()??; - - let source = String::from_utf8_lossy(&data.data); - // Do NOT use .lines(): it skips the terminating empty line. - // (due to internally using_terminator() instead of .split()) - let lines: Vec<&str> = source.split('\n').collect(); - if line_number >= lines.len() { - Some(format!( - "{} Couldn't format source line: Line {} is out of bounds (source may have changed at runtime)", - crate::colors::yellow("Warning"), line_number + 1, - )) - } else { - Some(lines[line_number].to_string()) - } - } -} - -impl NodeRequireLoader for EmbeddedModuleLoader { - fn ensure_read_permission<'a>( - &self, - permissions: &mut dyn deno_runtime::deno_node::NodePermissions, - path: &'a std::path::Path, - ) -> Result, JsErrorBox> { - if self.shared.modules.has_file(path) { - // allow reading if the file is in the snapshot - return Ok(Cow::Borrowed(path)); - } - - self - .shared - .npm_registry_permission_checker - .ensure_read_permission(permissions, path) - .map_err(JsErrorBox::from_err) - } - - fn load_text_file_lossy( - &self, - path: &std::path::Path, - ) -> Result, JsErrorBox> { - let file_entry = self - .shared - .vfs - .file_entry(path) - .map_err(JsErrorBox::from_err)?; - let file_bytes = self - .shared - .vfs - .read_file_all(file_entry, VfsFileSubDataKind::ModuleGraph) - .map_err(JsErrorBox::from_err)?; - Ok(from_utf8_lossy_cow(file_bytes)) - } - - fn is_maybe_cjs( - &self, - specifier: &ModuleSpecifier, - ) -> Result { - let media_type = MediaType::from_specifier(specifier); - self.shared.cjs_tracker.is_maybe_cjs(specifier, media_type) - } -} - -struct StandaloneModuleLoaderFactory { - shared: Arc, -} - -impl StandaloneModuleLoaderFactory { - pub fn create_result(&self) -> CreateModuleLoaderResult { - let loader = Rc::new(EmbeddedModuleLoader { - shared: self.shared.clone(), - }); - CreateModuleLoaderResult { - module_loader: loader.clone(), - node_require_loader: loader, - } - } -} - -impl ModuleLoaderFactory for StandaloneModuleLoaderFactory { - fn create_for_main( - &self, - _root_permissions: PermissionsContainer, - ) -> CreateModuleLoaderResult { - self.create_result() - } - - fn create_for_worker( - &self, - _parent_permissions: PermissionsContainer, - _permissions: PermissionsContainer, - ) -> CreateModuleLoaderResult { - self.create_result() - } -} - -struct StandaloneRootCertStoreProvider { - ca_stores: Option>, - ca_data: Option, - cell: once_cell::sync::OnceCell, -} - -impl RootCertStoreProvider for StandaloneRootCertStoreProvider { - fn get_or_try_init(&self) -> Result<&RootCertStore, JsErrorBox> { - self.cell.get_or_try_init(|| { - get_root_cert_store(None, self.ca_stores.clone(), self.ca_data.clone()) - .map_err(JsErrorBox::from_err) - }) - } -} - -pub async fn run( - fs: Arc, - sys: CliSys, - data: StandaloneData, -) -> Result { - let StandaloneData { - metadata, - modules, - npm_snapshot, - root_path, - source_maps, - vfs, - } = data; - let deno_dir_provider = Arc::new(DenoDirProvider::new(sys.clone(), None)); - let root_cert_store_provider = Arc::new(StandaloneRootCertStoreProvider { - ca_stores: metadata.ca_stores, - ca_data: metadata.ca_data.map(CaData::Bytes), - cell: Default::default(), - }); - // use a dummy npm registry url - let npm_registry_url = ModuleSpecifier::parse("https://localhost/").unwrap(); - let root_dir_url = - Arc::new(ModuleSpecifier::from_directory_path(&root_path).unwrap()); - let main_module = root_dir_url.join(&metadata.entrypoint_key).unwrap(); - let npm_global_cache_dir = root_path.join(".deno_compile_node_modules"); - let pkg_json_resolver = Arc::new(CliPackageJsonResolver::new(sys.clone())); - let npm_registry_permission_checker = { - let mode = match &metadata.node_modules { - Some(binary::NodeModules::Managed { - node_modules_dir: Some(path), - }) => NpmRegistryReadPermissionCheckerMode::Local(PathBuf::from(path)), - Some(binary::NodeModules::Byonm { .. }) => { - NpmRegistryReadPermissionCheckerMode::Byonm - } - Some(binary::NodeModules::Managed { - node_modules_dir: None, - }) - | None => NpmRegistryReadPermissionCheckerMode::Global( - npm_global_cache_dir.clone(), - ), - }; - NpmRegistryReadPermissionChecker::new(sys.clone(), mode) - }; - let (in_npm_pkg_checker, npm_resolver) = match metadata.node_modules { - Some(binary::NodeModules::Managed { node_modules_dir }) => { - // create an npmrc that uses the fake npm_registry_url to resolve packages - let npmrc = Arc::new(ResolvedNpmRc { - default_config: deno_npm::npm_rc::RegistryConfigWithUrl { - registry_url: npm_registry_url.clone(), - config: Default::default(), - }, - scopes: Default::default(), - registry_configs: Default::default(), - }); - let npm_cache_dir = Arc::new(NpmCacheDir::new( - &sys, - npm_global_cache_dir, - npmrc.get_all_known_registries_urls(), - )); - let snapshot = npm_snapshot.unwrap(); - let maybe_node_modules_path = node_modules_dir - .map(|node_modules_dir| root_path.join(node_modules_dir)); - let in_npm_pkg_checker = - DenoInNpmPackageChecker::new(CreateInNpmPkgCheckerOptions::Managed( - ManagedInNpmPkgCheckerCreateOptions { - root_cache_dir_url: npm_cache_dir.root_dir_url(), - maybe_node_modules_path: maybe_node_modules_path.as_deref(), - }, - )); - let npm_resolution = - Arc::new(NpmResolutionCell::new(NpmResolutionSnapshot::new(snapshot))); - let npm_resolver = - CliNpmResolver::new(CliNpmResolverCreateOptions::Managed( - CliManagedNpmResolverCreateOptions { - npm_resolution, - npm_cache_dir, - sys: sys.clone(), - maybe_node_modules_path, - npm_system_info: Default::default(), - npmrc, - }, - )); - (in_npm_pkg_checker, npm_resolver) - } - Some(binary::NodeModules::Byonm { - root_node_modules_dir, - }) => { - let root_node_modules_dir = - root_node_modules_dir.map(|p| vfs.root().join(p)); - let in_npm_pkg_checker = - DenoInNpmPackageChecker::new(CreateInNpmPkgCheckerOptions::Byonm); - let npm_resolver = CliNpmResolver::new( - CliNpmResolverCreateOptions::Byonm(CliByonmNpmResolverCreateOptions { - sys: sys.clone(), - pkg_json_resolver: pkg_json_resolver.clone(), - root_node_modules_dir, - }), - ); - (in_npm_pkg_checker, npm_resolver) - } - None => { - // Packages from different registries are already inlined in the binary, - // so no need to create actual `.npmrc` configuration. - let npmrc = create_default_npmrc(); - let npm_cache_dir = Arc::new(NpmCacheDir::new( - &sys, - npm_global_cache_dir, - npmrc.get_all_known_registries_urls(), - )); - let in_npm_pkg_checker = - DenoInNpmPackageChecker::new(CreateInNpmPkgCheckerOptions::Managed( - ManagedInNpmPkgCheckerCreateOptions { - root_cache_dir_url: npm_cache_dir.root_dir_url(), - maybe_node_modules_path: None, - }, - )); - let npm_resolution = Arc::new(NpmResolutionCell::default()); - let npm_resolver = - CliNpmResolver::new(CliNpmResolverCreateOptions::Managed( - CliManagedNpmResolverCreateOptions { - npm_resolution, - sys: sys.clone(), - npm_cache_dir, - maybe_node_modules_path: None, - npm_system_info: Default::default(), - npmrc: create_default_npmrc(), - }, - )); - (in_npm_pkg_checker, npm_resolver) - } - }; - - let has_node_modules_dir = npm_resolver.root_node_modules_path().is_some(); - let node_resolver = Arc::new(NodeResolver::new( - in_npm_pkg_checker.clone(), - RealIsBuiltInNodeModuleChecker, - npm_resolver.clone(), - pkg_json_resolver.clone(), - sys.clone(), - node_resolver::ConditionsFromResolutionMode::default(), - )); - let cjs_tracker = Arc::new(CliCjsTracker::new( - in_npm_pkg_checker.clone(), - pkg_json_resolver.clone(), - if metadata.unstable_config.detect_cjs { - IsCjsResolutionMode::ImplicitTypeCommonJs - } else if metadata.workspace_resolver.package_jsons.is_empty() { - IsCjsResolutionMode::Disabled - } else { - IsCjsResolutionMode::ExplicitTypeCommonJs - }, - )); - let cache_db = Caches::new(deno_dir_provider.clone()); - let node_analysis_cache = NodeAnalysisCache::new(cache_db.node_analysis_db()); - let npm_req_resolver = - Arc::new(CliNpmReqResolver::new(NpmReqResolverOptions { - sys: sys.clone(), - in_npm_pkg_checker: in_npm_pkg_checker.clone(), - node_resolver: node_resolver.clone(), - npm_resolver: npm_resolver.clone(), - })); - let cjs_esm_code_analyzer = CliCjsCodeAnalyzer::new( - node_analysis_cache, - cjs_tracker.clone(), - fs.clone(), - None, - ); - let node_code_translator = Arc::new(NodeCodeTranslator::new( - cjs_esm_code_analyzer, - in_npm_pkg_checker, - node_resolver.clone(), - npm_resolver.clone(), - pkg_json_resolver.clone(), - sys.clone(), - )); - let workspace_resolver = { - let import_map = match metadata.workspace_resolver.import_map { - Some(import_map) => Some( - import_map::parse_from_json_with_options( - root_dir_url.join(&import_map.specifier).unwrap(), - &import_map.json, - import_map::ImportMapOptions { - address_hook: None, - expand_imports: true, - }, - )? - .import_map, - ), - None => None, - }; - let pkg_jsons = metadata - .workspace_resolver - .package_jsons - .into_iter() - .map(|(relative_path, json)| { - let path = root_dir_url - .join(&relative_path) - .unwrap() - .to_file_path() - .unwrap(); - let pkg_json = - deno_package_json::PackageJson::load_from_value(path, json); - Arc::new(pkg_json) - }) - .collect(); - WorkspaceResolver::new_raw( - root_dir_url.clone(), - import_map, - metadata - .workspace_resolver - .jsr_pkgs - .iter() - .map(|pkg| ResolverWorkspaceJsrPackage { - is_patch: false, // only used for enhancing the diagnostic, which isn't shown in deno compile - base: root_dir_url.join(&pkg.relative_base).unwrap(), - name: pkg.name.clone(), - version: pkg.version.clone(), - exports: pkg.exports.clone(), - }) - .collect(), - pkg_jsons, - metadata.workspace_resolver.pkg_json_resolution, - ) - }; - let code_cache = match metadata.code_cache_key { - Some(code_cache_key) => Some(Arc::new(DenoCompileCodeCache::new( - root_path.with_file_name(format!( - "{}.cache", - root_path.file_name().unwrap().to_string_lossy() - )), - code_cache_key, - )) as Arc), - None => { - log::debug!("Code cache disabled."); - None - } - }; - let module_loader_factory = StandaloneModuleLoaderFactory { - shared: Arc::new(SharedModuleLoaderState { - cjs_tracker: cjs_tracker.clone(), - code_cache: code_cache.clone(), - fs: fs.clone(), - modules, - node_code_translator: node_code_translator.clone(), - node_resolver: node_resolver.clone(), - npm_module_loader: Arc::new(NpmModuleLoader::new( - cjs_tracker.clone(), - fs.clone(), - node_code_translator, - )), - npm_registry_permission_checker, - npm_resolver: npm_resolver.clone(), - npm_req_resolver, - source_maps, - vfs, - workspace_resolver, - }), - }; - - let permissions = { - let mut permissions = metadata.permissions; - // grant read access to the vfs - match &mut permissions.allow_read { - Some(vec) if vec.is_empty() => { - // do nothing, already granted - } - Some(vec) => { - vec.push(root_path.to_string_lossy().to_string()); - } - None => { - permissions.allow_read = - Some(vec![root_path.to_string_lossy().to_string()]); - } - } - - let desc_parser = - Arc::new(RuntimePermissionDescriptorParser::new(sys.clone())); - let permissions = - Permissions::from_options(desc_parser.as_ref(), &permissions)?; - PermissionsContainer::new(desc_parser, permissions) - }; - let feature_checker = Arc::new({ - let mut checker = FeatureChecker::default(); - checker.set_exit_cb(Box::new(crate::unstable_exit_cb)); - for feature in metadata.unstable_config.features { - // `metadata` is valid for the whole lifetime of the program, so we - // can leak the string here. - checker.enable_feature(feature.leak()); - } - checker - }); - let lib_main_worker_options = LibMainWorkerOptions { - argv: metadata.argv, - log_level: WorkerLogLevel::Info, - enable_op_summary_metrics: false, - enable_testing_features: false, - has_node_modules_dir, - inspect_brk: false, - inspect_wait: false, - strace_ops: None, - is_inspecting: false, - skip_op_registration: true, - location: metadata.location, - argv0: NpmPackageReqReference::from_specifier(&main_module) - .ok() - .map(|req_ref| npm_pkg_req_ref_to_binary_command(&req_ref)) - .or(std::env::args().next()), - node_debug: std::env::var("NODE_DEBUG").ok(), - origin_data_folder_path: None, - seed: metadata.seed, - unsafely_ignore_certificate_errors: metadata - .unsafely_ignore_certificate_errors, - node_ipc: None, - serve_port: None, - serve_host: None, - deno_version: crate::version::DENO_VERSION_INFO.deno, - deno_user_agent: crate::version::DENO_VERSION_INFO.user_agent, - otel_config: metadata.otel_config, - startup_snapshot: crate::js::deno_isolate_init(), - }; - let lib_main_worker_factory = LibMainWorkerFactory::new( - Arc::new(BlobStore::default()), - code_cache.map(|c| c.as_code_cache()), - feature_checker, - fs, - None, - Box::new(module_loader_factory), - node_resolver.clone(), - create_npm_process_state_provider(&npm_resolver), - pkg_json_resolver, - root_cert_store_provider, - StorageKeyResolver::empty(), - sys.clone(), - lib_main_worker_options, - ); - // todo(dsherret): use LibMainWorker directly here and don't use CliMainWorkerFactory - let cli_main_worker_options = CliMainWorkerOptions { - create_hmr_runner: None, - create_coverage_collector: None, - needs_test_modules: false, - default_npm_caching_strategy: crate::args::NpmCachingStrategy::Lazy, - }; - let worker_factory = CliMainWorkerFactory::new( - lib_main_worker_factory, - None, - None, - node_resolver, - None, - npm_resolver, - sys, - cli_main_worker_options, - permissions, - ); - - // Initialize v8 once from the main thread. - v8_set_flags(construct_v8_flags(&[], &metadata.v8_flags, vec![])); - // TODO(bartlomieju): remove last argument once Deploy no longer needs it - deno_core::JsRuntime::init_platform(None, true); - - let mut worker = worker_factory - .create_main_worker(WorkerExecutionMode::Run, main_module) - .await?; - - let exit_code = worker.run().await?; - Ok(exit_code) -} diff --git a/cli/standalone/serialization.rs b/cli/standalone/serialization.rs deleted file mode 100644 index ab345917a3d12a..00000000000000 --- a/cli/standalone/serialization.rs +++ /dev/null @@ -1,784 +0,0 @@ -// Copyright 2018-2025 the Deno authors. MIT license. - -use std::borrow::Cow; -use std::cell::Cell; -use std::collections::BTreeMap; -use std::collections::HashMap; -use std::io::Write; - -use capacity_builder::BytesAppendable; -use deno_ast::swc::common::source_map; -use deno_ast::MediaType; -use deno_core::anyhow::bail; -use deno_core::anyhow::Context; -use deno_core::error::AnyError; -use deno_core::serde_json; -use deno_core::url::Url; -use deno_core::FastString; -use deno_core::ModuleSourceCode; -use deno_core::ModuleType; -use deno_lib::standalone::virtual_fs::VirtualDirectoryEntries; -use deno_npm::resolution::SerializedNpmResolutionSnapshot; -use deno_npm::resolution::SerializedNpmResolutionSnapshotPackage; -use deno_npm::resolution::ValidSerializedNpmResolutionSnapshot; -use deno_npm::NpmPackageId; -use deno_semver::package::PackageReq; -use deno_semver::StackString; -use indexmap::IndexMap; - -use super::binary::Metadata; -use super::virtual_fs::BuiltVfs; -use super::virtual_fs::VfsBuilder; - -const MAGIC_BYTES: &[u8; 8] = b"d3n0l4nd"; - -/// Binary format: -/// * d3n0l4nd -/// * -/// * -/// * -/// * -/// * -/// * -/// * d3n0l4nd -pub fn serialize_binary_data_section( - metadata: &Metadata, - npm_snapshot: Option, - remote_modules: &RemoteModulesStoreBuilder, - source_map_store: &SourceMapStore, - vfs: &BuiltVfs, -) -> Result, AnyError> { - let metadata = serde_json::to_string(metadata)?; - let npm_snapshot = - npm_snapshot.map(serialize_npm_snapshot).unwrap_or_default(); - let serialized_vfs = serde_json::to_string(&vfs.entries)?; - - let bytes = capacity_builder::BytesBuilder::build(|builder| { - builder.append(MAGIC_BYTES); - // 1. Metadata - { - builder.append_le(metadata.len() as u64); - builder.append(&metadata); - } - // 2. Npm snapshot - { - builder.append_le(npm_snapshot.len() as u64); - builder.append(&npm_snapshot); - } - // 3. Remote modules - { - remote_modules.write(builder); - } - // 4. VFS - { - builder.append_le(serialized_vfs.len() as u64); - builder.append(&serialized_vfs); - let vfs_bytes_len = vfs.files.iter().map(|f| f.len() as u64).sum::(); - builder.append_le(vfs_bytes_len); - for file in &vfs.files { - builder.append(file); - } - } - // 5. Source maps - { - builder.append_le(source_map_store.data.len() as u32); - for (specifier, source_map) in &source_map_store.data { - builder.append_le(specifier.len() as u32); - builder.append(specifier); - builder.append_le(source_map.len() as u32); - builder.append(source_map.as_ref()); - } - } - - // write the magic bytes at the end so we can use it - // to make sure we've deserialized correctly - builder.append(MAGIC_BYTES); - })?; - - Ok(bytes) -} - -pub struct DeserializedDataSection { - pub metadata: Metadata, - pub npm_snapshot: Option, - pub remote_modules: RemoteModulesStore, - pub source_maps: SourceMapStore, - pub vfs_root_entries: VirtualDirectoryEntries, - pub vfs_files_data: &'static [u8], -} - -pub fn deserialize_binary_data_section( - data: &'static [u8], -) -> Result, AnyError> { - fn read_magic_bytes(input: &[u8]) -> Result<(&[u8], bool), AnyError> { - if input.len() < MAGIC_BYTES.len() { - bail!("Unexpected end of data. Could not find magic bytes."); - } - let (magic_bytes, input) = input.split_at(MAGIC_BYTES.len()); - if magic_bytes != MAGIC_BYTES { - return Ok((input, false)); - } - Ok((input, true)) - } - - #[allow(clippy::type_complexity)] - fn read_source_map_entry( - input: &[u8], - ) -> Result<(&[u8], (Cow, &[u8])), AnyError> { - let (input, specifier) = read_string_lossy(input)?; - let (input, source_map) = read_bytes_with_u32_len(input)?; - Ok((input, (specifier, source_map))) - } - - let (input, found) = read_magic_bytes(data)?; - if !found { - return Ok(None); - } - - // 1. Metadata - let (input, data) = - read_bytes_with_u64_len(input).context("reading metadata")?; - let metadata: Metadata = - serde_json::from_slice(data).context("deserializing metadata")?; - // 2. Npm snapshot - let (input, data) = - read_bytes_with_u64_len(input).context("reading npm snapshot")?; - let npm_snapshot = if data.is_empty() { - None - } else { - Some(deserialize_npm_snapshot(data).context("deserializing npm snapshot")?) - }; - // 3. Remote modules - let (input, remote_modules) = - RemoteModulesStore::build(input).context("deserializing remote modules")?; - // 4. VFS - let (input, data) = read_bytes_with_u64_len(input).context("vfs")?; - let vfs_root_entries: VirtualDirectoryEntries = - serde_json::from_slice(data).context("deserializing vfs data")?; - let (input, vfs_files_data) = - read_bytes_with_u64_len(input).context("reading vfs files data")?; - // 5. Source maps - let (mut input, source_map_data_len) = read_u32_as_usize(input)?; - let mut source_maps = SourceMapStore::with_capacity(source_map_data_len); - for _ in 0..source_map_data_len { - let (current_input, (specifier, source_map)) = - read_source_map_entry(input)?; - input = current_input; - source_maps.add(specifier, Cow::Borrowed(source_map)); - } - - // finally ensure we read the magic bytes at the end - let (_input, found) = read_magic_bytes(input)?; - if !found { - bail!("Could not find magic bytes at the end of the data."); - } - - Ok(Some(DeserializedDataSection { - metadata, - npm_snapshot, - remote_modules, - source_maps, - vfs_root_entries, - vfs_files_data, - })) -} - -#[derive(Default)] -pub struct RemoteModulesStoreBuilder { - specifiers: Vec<(String, u64)>, - data: Vec<(MediaType, Vec, Option>)>, - data_byte_len: u64, - redirects: Vec<(String, String)>, - redirects_len: u64, -} - -impl RemoteModulesStoreBuilder { - pub fn add( - &mut self, - specifier: &Url, - media_type: MediaType, - data: Vec, - maybe_transpiled: Option>, - ) { - log::debug!("Adding '{}' ({})", specifier, media_type); - let specifier = specifier.to_string(); - self.specifiers.push((specifier, self.data_byte_len)); - let maybe_transpiled_len = match &maybe_transpiled { - // data length (4 bytes), data - Some(data) => 4 + data.len() as u64, - None => 0, - }; - // media type (1 byte), data length (4 bytes), data, has transpiled (1 byte), transpiled length - self.data_byte_len += 1 + 4 + data.len() as u64 + 1 + maybe_transpiled_len; - self.data.push((media_type, data, maybe_transpiled)); - } - - pub fn add_redirects(&mut self, redirects: &BTreeMap) { - self.redirects.reserve(redirects.len()); - for (from, to) in redirects { - log::debug!("Adding redirect '{}' -> '{}'", from, to); - let from = from.to_string(); - let to = to.to_string(); - self.redirects_len += (4 + from.len() + 4 + to.len()) as u64; - self.redirects.push((from, to)); - } - } - - fn write<'a, TBytes: capacity_builder::BytesType>( - &'a self, - builder: &mut capacity_builder::BytesBuilder<'a, TBytes>, - ) { - builder.append_le(self.specifiers.len() as u32); - builder.append_le(self.redirects.len() as u32); - for (specifier, offset) in &self.specifiers { - builder.append_le(specifier.len() as u32); - builder.append(specifier); - builder.append_le(*offset); - } - for (from, to) in &self.redirects { - builder.append_le(from.len() as u32); - builder.append(from); - builder.append_le(to.len() as u32); - builder.append(to); - } - builder.append_le( - self - .data - .iter() - .map(|(_, data, maybe_transpiled)| { - 1 + 4 - + (data.len() as u64) - + 1 - + match maybe_transpiled { - Some(transpiled) => 4 + (transpiled.len() as u64), - None => 0, - } - }) - .sum::(), - ); - for (media_type, data, maybe_transpiled) in &self.data { - builder.append(serialize_media_type(*media_type)); - builder.append_le(data.len() as u32); - builder.append(data); - if let Some(transpiled) = maybe_transpiled { - builder.append(1); - builder.append_le(transpiled.len() as u32); - builder.append(transpiled); - } else { - builder.append(0); - } - } - } -} - -pub enum DenoCompileModuleSource { - String(&'static str), - Bytes(Cow<'static, [u8]>), -} - -impl DenoCompileModuleSource { - pub fn into_for_v8(self) -> ModuleSourceCode { - fn into_bytes(data: Cow<'static, [u8]>) -> ModuleSourceCode { - ModuleSourceCode::Bytes(match data { - Cow::Borrowed(d) => d.into(), - Cow::Owned(d) => d.into_boxed_slice().into(), - }) - } - - match self { - // todo(https://github.com/denoland/deno_core/pull/943): store whether - // the string is ascii or not ahead of time so we can avoid the is_ascii() - // check in FastString::from_static - Self::String(s) => ModuleSourceCode::String(FastString::from_static(s)), - Self::Bytes(b) => into_bytes(b), - } - } -} - -pub struct SourceMapStore { - data: IndexMap, Cow<'static, [u8]>>, -} - -impl SourceMapStore { - pub fn with_capacity(capacity: usize) -> Self { - Self { - data: IndexMap::with_capacity(capacity), - } - } - - pub fn add( - &mut self, - specifier: Cow<'static, str>, - source_map: Cow<'static, [u8]>, - ) { - self.data.insert(specifier, source_map); - } - - pub fn get(&self, specifier: &str) -> Option<&[u8]> { - self.data.get(specifier).map(|v| v.as_ref()) - } -} - -pub struct DenoCompileModuleData<'a> { - pub specifier: &'a Url, - pub media_type: MediaType, - pub data: Cow<'static, [u8]>, -} - -impl<'a> DenoCompileModuleData<'a> { - pub fn into_parts(self) -> (&'a Url, ModuleType, DenoCompileModuleSource) { - fn into_string_unsafe(data: Cow<'static, [u8]>) -> DenoCompileModuleSource { - match data { - Cow::Borrowed(d) => DenoCompileModuleSource::String( - // SAFETY: we know this is a valid utf8 string - unsafe { std::str::from_utf8_unchecked(d) }, - ), - Cow::Owned(d) => DenoCompileModuleSource::Bytes(Cow::Owned(d)), - } - } - - let (media_type, source) = match self.media_type { - MediaType::JavaScript - | MediaType::Jsx - | MediaType::Mjs - | MediaType::Cjs - | MediaType::TypeScript - | MediaType::Mts - | MediaType::Cts - | MediaType::Dts - | MediaType::Dmts - | MediaType::Dcts - | MediaType::Tsx => { - (ModuleType::JavaScript, into_string_unsafe(self.data)) - } - MediaType::Json => (ModuleType::Json, into_string_unsafe(self.data)), - MediaType::Wasm => { - (ModuleType::Wasm, DenoCompileModuleSource::Bytes(self.data)) - } - // just assume javascript if we made it here - MediaType::Css | MediaType::SourceMap | MediaType::Unknown => ( - ModuleType::JavaScript, - DenoCompileModuleSource::Bytes(self.data), - ), - }; - (self.specifier, media_type, source) - } -} - -pub struct RemoteModuleEntry<'a> { - pub specifier: &'a Url, - pub media_type: MediaType, - pub data: Cow<'static, [u8]>, - pub transpiled_data: Option>, -} - -enum RemoteModulesStoreSpecifierValue { - Data(usize), - Redirect(Url), -} - -pub struct RemoteModulesStore { - specifiers: HashMap, - files_data: &'static [u8], -} - -impl RemoteModulesStore { - fn build(input: &'static [u8]) -> Result<(&'static [u8], Self), AnyError> { - fn read_specifier(input: &[u8]) -> Result<(&[u8], (Url, u64)), AnyError> { - let (input, specifier) = read_string_lossy(input)?; - let specifier = Url::parse(&specifier)?; - let (input, offset) = read_u64(input)?; - Ok((input, (specifier, offset))) - } - - fn read_redirect(input: &[u8]) -> Result<(&[u8], (Url, Url)), AnyError> { - let (input, from) = read_string_lossy(input)?; - let from = Url::parse(&from)?; - let (input, to) = read_string_lossy(input)?; - let to = Url::parse(&to)?; - Ok((input, (from, to))) - } - - fn read_headers( - input: &[u8], - ) -> Result<(&[u8], HashMap), AnyError> - { - let (input, specifiers_len) = read_u32_as_usize(input)?; - let (mut input, redirects_len) = read_u32_as_usize(input)?; - let mut specifiers = - HashMap::with_capacity(specifiers_len + redirects_len); - for _ in 0..specifiers_len { - let (current_input, (specifier, offset)) = - read_specifier(input).context("reading specifier")?; - input = current_input; - specifiers.insert( - specifier, - RemoteModulesStoreSpecifierValue::Data(offset as usize), - ); - } - - for _ in 0..redirects_len { - let (current_input, (from, to)) = read_redirect(input)?; - input = current_input; - specifiers.insert(from, RemoteModulesStoreSpecifierValue::Redirect(to)); - } - - Ok((input, specifiers)) - } - - let (input, specifiers) = read_headers(input)?; - let (input, files_data) = read_bytes_with_u64_len(input)?; - - Ok(( - input, - Self { - specifiers, - files_data, - }, - )) - } - - pub fn resolve_specifier<'a>( - &'a self, - specifier: &'a Url, - ) -> Result, AnyError> { - let mut count = 0; - let mut current = specifier; - loop { - if count > 10 { - bail!("Too many redirects resolving '{}'", specifier); - } - match self.specifiers.get(current) { - Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => { - current = to; - count += 1; - } - Some(RemoteModulesStoreSpecifierValue::Data(_)) => { - return Ok(Some(current)); - } - None => { - return Ok(None); - } - } - } - } - - pub fn read<'a>( - &'a self, - original_specifier: &'a Url, - ) -> Result>, AnyError> { - let mut count = 0; - let mut specifier = original_specifier; - loop { - if count > 10 { - bail!("Too many redirects resolving '{}'", original_specifier); - } - match self.specifiers.get(specifier) { - Some(RemoteModulesStoreSpecifierValue::Redirect(to)) => { - specifier = to; - count += 1; - } - Some(RemoteModulesStoreSpecifierValue::Data(offset)) => { - let input = &self.files_data[*offset..]; - let (input, media_type_byte) = read_bytes(input, 1)?; - let media_type = deserialize_media_type(media_type_byte[0])?; - let (input, data) = read_bytes_with_u32_len(input)?; - check_has_len(input, 1)?; - let (input, has_transpiled) = (&input[1..], input[0]); - let (_, transpiled_data) = match has_transpiled { - 0 => (input, None), - 1 => { - let (input, data) = read_bytes_with_u32_len(input)?; - (input, Some(data)) - } - value => bail!( - "Invalid transpiled data flag: {}. Compiled data is corrupt.", - value - ), - }; - return Ok(Some(RemoteModuleEntry { - specifier, - media_type, - data: Cow::Borrowed(data), - transpiled_data: transpiled_data.map(Cow::Borrowed), - })); - } - None => { - return Ok(None); - } - } - } - } -} - -fn serialize_npm_snapshot( - mut snapshot: SerializedNpmResolutionSnapshot, -) -> Vec { - fn append_string(bytes: &mut Vec, string: &str) { - let len = string.len() as u32; - bytes.extend_from_slice(&len.to_le_bytes()); - bytes.extend_from_slice(string.as_bytes()); - } - - snapshot.packages.sort_by(|a, b| a.id.cmp(&b.id)); // determinism - let ids_to_stored_ids = snapshot - .packages - .iter() - .enumerate() - .map(|(i, pkg)| (&pkg.id, i as u32)) - .collect::>(); - - let mut root_packages: Vec<_> = snapshot.root_packages.iter().collect(); - root_packages.sort(); - let mut bytes = Vec::new(); - - bytes.extend_from_slice(&(snapshot.packages.len() as u32).to_le_bytes()); - for pkg in &snapshot.packages { - append_string(&mut bytes, &pkg.id.as_serialized()); - } - - bytes.extend_from_slice(&(root_packages.len() as u32).to_le_bytes()); - for (req, id) in root_packages { - append_string(&mut bytes, &req.to_string()); - let id = ids_to_stored_ids.get(&id).unwrap(); - bytes.extend_from_slice(&id.to_le_bytes()); - } - - for pkg in &snapshot.packages { - let deps_len = pkg.dependencies.len() as u32; - bytes.extend_from_slice(&deps_len.to_le_bytes()); - let mut deps: Vec<_> = pkg.dependencies.iter().collect(); - deps.sort(); - for (req, id) in deps { - append_string(&mut bytes, req); - let id = ids_to_stored_ids.get(&id).unwrap(); - bytes.extend_from_slice(&id.to_le_bytes()); - } - } - - bytes -} - -fn deserialize_npm_snapshot( - input: &[u8], -) -> Result { - fn parse_id(input: &[u8]) -> Result<(&[u8], NpmPackageId), AnyError> { - let (input, id) = read_string_lossy(input)?; - let id = NpmPackageId::from_serialized(&id)?; - Ok((input, id)) - } - - #[allow(clippy::needless_lifetimes)] // clippy bug - fn parse_root_package<'a>( - id_to_npm_id: &'a impl Fn(usize) -> Result, - ) -> impl Fn(&[u8]) -> Result<(&[u8], (PackageReq, NpmPackageId)), AnyError> + 'a - { - |input| { - let (input, req) = read_string_lossy(input)?; - let req = PackageReq::from_str(&req)?; - let (input, id) = read_u32_as_usize(input)?; - Ok((input, (req, id_to_npm_id(id)?))) - } - } - - #[allow(clippy::needless_lifetimes)] // clippy bug - fn parse_package_dep<'a>( - id_to_npm_id: &'a impl Fn(usize) -> Result, - ) -> impl Fn(&[u8]) -> Result<(&[u8], (StackString, NpmPackageId)), AnyError> + 'a - { - |input| { - let (input, req) = read_string_lossy(input)?; - let (input, id) = read_u32_as_usize(input)?; - let req = StackString::from_cow(req); - Ok((input, (req, id_to_npm_id(id)?))) - } - } - - fn parse_package<'a>( - input: &'a [u8], - id: NpmPackageId, - id_to_npm_id: &impl Fn(usize) -> Result, - ) -> Result<(&'a [u8], SerializedNpmResolutionSnapshotPackage), AnyError> { - let (input, deps_len) = read_u32_as_usize(input)?; - let (input, dependencies) = - parse_hashmap_n_times(input, deps_len, parse_package_dep(id_to_npm_id))?; - Ok(( - input, - SerializedNpmResolutionSnapshotPackage { - id, - system: Default::default(), - dist: Default::default(), - dependencies, - optional_dependencies: Default::default(), - bin: None, - scripts: Default::default(), - deprecated: Default::default(), - }, - )) - } - - let (input, packages_len) = read_u32_as_usize(input)?; - - // get a hashmap of all the npm package ids to their serialized ids - let (input, data_ids_to_npm_ids) = - parse_vec_n_times(input, packages_len, parse_id) - .context("deserializing id")?; - let data_id_to_npm_id = |id: usize| { - data_ids_to_npm_ids - .get(id) - .cloned() - .ok_or_else(|| deno_core::anyhow::anyhow!("Invalid npm package id")) - }; - - let (input, root_packages_len) = read_u32_as_usize(input)?; - let (input, root_packages) = parse_hashmap_n_times( - input, - root_packages_len, - parse_root_package(&data_id_to_npm_id), - ) - .context("deserializing root package")?; - let (input, packages) = - parse_vec_n_times_with_index(input, packages_len, |input, index| { - parse_package(input, data_id_to_npm_id(index)?, &data_id_to_npm_id) - }) - .context("deserializing package")?; - - if !input.is_empty() { - bail!("Unexpected data left over"); - } - - Ok( - SerializedNpmResolutionSnapshot { - packages, - root_packages, - } - // this is ok because we have already verified that all the - // identifiers found in the snapshot are valid via the - // npm package id -> npm package id mapping - .into_valid_unsafe(), - ) -} - -fn serialize_media_type(media_type: MediaType) -> u8 { - match media_type { - MediaType::JavaScript => 0, - MediaType::Jsx => 1, - MediaType::Mjs => 2, - MediaType::Cjs => 3, - MediaType::TypeScript => 4, - MediaType::Mts => 5, - MediaType::Cts => 6, - MediaType::Dts => 7, - MediaType::Dmts => 8, - MediaType::Dcts => 9, - MediaType::Tsx => 10, - MediaType::Json => 11, - MediaType::Wasm => 12, - MediaType::Css => 13, - MediaType::SourceMap => 14, - MediaType::Unknown => 15, - } -} - -fn deserialize_media_type(value: u8) -> Result { - match value { - 0 => Ok(MediaType::JavaScript), - 1 => Ok(MediaType::Jsx), - 2 => Ok(MediaType::Mjs), - 3 => Ok(MediaType::Cjs), - 4 => Ok(MediaType::TypeScript), - 5 => Ok(MediaType::Mts), - 6 => Ok(MediaType::Cts), - 7 => Ok(MediaType::Dts), - 8 => Ok(MediaType::Dmts), - 9 => Ok(MediaType::Dcts), - 10 => Ok(MediaType::Tsx), - 11 => Ok(MediaType::Json), - 12 => Ok(MediaType::Wasm), - 13 => Ok(MediaType::Css), - 14 => Ok(MediaType::SourceMap), - 15 => Ok(MediaType::Unknown), - _ => bail!("Unknown media type value: {}", value), - } -} - -fn parse_hashmap_n_times( - mut input: &[u8], - times: usize, - parse: impl Fn(&[u8]) -> Result<(&[u8], (TKey, TValue)), AnyError>, -) -> Result<(&[u8], HashMap), AnyError> { - let mut results = HashMap::with_capacity(times); - for _ in 0..times { - let result = parse(input); - let (new_input, (key, value)) = result?; - results.insert(key, value); - input = new_input; - } - Ok((input, results)) -} - -fn parse_vec_n_times( - input: &[u8], - times: usize, - parse: impl Fn(&[u8]) -> Result<(&[u8], TResult), AnyError>, -) -> Result<(&[u8], Vec), AnyError> { - parse_vec_n_times_with_index(input, times, |input, _index| parse(input)) -} - -fn parse_vec_n_times_with_index( - mut input: &[u8], - times: usize, - parse: impl Fn(&[u8], usize) -> Result<(&[u8], TResult), AnyError>, -) -> Result<(&[u8], Vec), AnyError> { - let mut results = Vec::with_capacity(times); - for i in 0..times { - let result = parse(input, i); - let (new_input, result) = result?; - results.push(result); - input = new_input; - } - Ok((input, results)) -} - -fn read_bytes_with_u64_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> { - let (input, len) = read_u64(input)?; - let (input, data) = read_bytes(input, len as usize)?; - Ok((input, data)) -} - -fn read_bytes_with_u32_len(input: &[u8]) -> Result<(&[u8], &[u8]), AnyError> { - let (input, len) = read_u32_as_usize(input)?; - let (input, data) = read_bytes(input, len)?; - Ok((input, data)) -} - -fn read_bytes(input: &[u8], len: usize) -> Result<(&[u8], &[u8]), AnyError> { - check_has_len(input, len)?; - let (len_bytes, input) = input.split_at(len); - Ok((input, len_bytes)) -} - -#[inline(always)] -fn check_has_len(input: &[u8], len: usize) -> Result<(), AnyError> { - if input.len() < len { - bail!("Unexpected end of data."); - } - Ok(()) -} - -fn read_string_lossy(input: &[u8]) -> Result<(&[u8], Cow), AnyError> { - let (input, data_bytes) = read_bytes_with_u32_len(input)?; - Ok((input, String::from_utf8_lossy(data_bytes))) -} - -fn read_u32_as_usize(input: &[u8]) -> Result<(&[u8], usize), AnyError> { - let (input, len_bytes) = read_bytes(input, 4)?; - let len = u32::from_le_bytes(len_bytes.try_into()?); - Ok((input, len as usize)) -} - -fn read_u64(input: &[u8]) -> Result<(&[u8], u64), AnyError> { - let (input, len_bytes) = read_bytes(input, 8)?; - let len = u64::from_le_bytes(len_bytes.try_into()?); - Ok((input, len)) -} diff --git a/cli/standalone/virtual_fs.rs b/cli/standalone/virtual_fs.rs index 4f761d0d1592b9..a3e023620f98dc 100644 --- a/cli/standalone/virtual_fs.rs +++ b/cli/standalone/virtual_fs.rs @@ -1,497 +1,21 @@ // Copyright 2018-2025 the Deno authors. MIT license. use std::borrow::Cow; -use std::cell::RefCell; -use std::cmp::Ordering; -use std::collections::HashMap; use std::collections::HashSet; -use std::fs::File; -use std::io::Read; -use std::io::Seek; -use std::io::SeekFrom; -use std::ops::Range; -use std::path::Path; use std::path::PathBuf; -use std::rc::Rc; -use std::sync::Arc; -use deno_core::anyhow::anyhow; -use deno_core::anyhow::bail; -use deno_core::anyhow::Context; -use deno_core::error::AnyError; -use deno_core::parking_lot::Mutex; -use deno_core::BufMutView; -use deno_core::BufView; -use deno_core::ResourceHandleFd; -use deno_lib::standalone::virtual_fs::FileSystemCaseSensitivity; +use deno_lib::standalone::virtual_fs::BuiltVfs; use deno_lib::standalone::virtual_fs::OffsetWithLength; use deno_lib::standalone::virtual_fs::VfsEntry; -use deno_lib::standalone::virtual_fs::VfsEntryRef; -use deno_lib::standalone::virtual_fs::VfsFileSubDataKind; use deno_lib::standalone::virtual_fs::VirtualDirectory; use deno_lib::standalone::virtual_fs::VirtualDirectoryEntries; use deno_lib::standalone::virtual_fs::VirtualFile; -use deno_lib::standalone::virtual_fs::VirtualSymlink; use deno_lib::standalone::virtual_fs::VirtualSymlinkParts; use deno_lib::standalone::virtual_fs::WindowsSystemRootablePath; -use deno_path_util::normalize_path; -use deno_path_util::strip_unc_prefix; -use deno_runtime::deno_fs::FsDirEntry; -use deno_runtime::deno_io; -use deno_runtime::deno_io::fs::FsError; -use deno_runtime::deno_io::fs::FsResult; -use deno_runtime::deno_io::fs::FsStat; -use indexmap::IndexSet; -use serde::Deserialize; -use serde::Serialize; -use thiserror::Error; +use deno_lib::standalone::virtual_fs::DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME; -use super::binary::DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME; -use crate::util; use crate::util::display::human_size; use crate::util::display::DisplayTreeNode; -use crate::util::fs::canonicalize_path; - -#[derive(Debug)] -pub struct BuiltVfs { - pub root_path: WindowsSystemRootablePath, - pub case_sensitivity: FileSystemCaseSensitivity, - pub entries: VirtualDirectoryEntries, - pub files: Vec>, -} - -#[derive(Debug)] -pub struct VfsBuilder { - executable_root: VirtualDirectory, - files: Vec>, - current_offset: u64, - file_offsets: HashMap, - /// The minimum root directory that should be included in the VFS. - min_root_dir: Option, - case_sensitivity: FileSystemCaseSensitivity, -} - -impl VfsBuilder { - pub fn new() -> Self { - Self { - executable_root: VirtualDirectory { - name: "/".to_string(), - entries: Default::default(), - }, - files: Vec::new(), - current_offset: 0, - file_offsets: Default::default(), - min_root_dir: Default::default(), - // This is not exactly correct because file systems on these OSes - // may be case-sensitive or not based on the directory, but this - // is a good enough approximation and limitation. In the future, - // we may want to store this information per directory instead - // depending on the feedback we get. - case_sensitivity: if cfg!(windows) || cfg!(target_os = "macos") { - FileSystemCaseSensitivity::Insensitive - } else { - FileSystemCaseSensitivity::Sensitive - }, - } - } - - pub fn case_sensitivity(&self) -> FileSystemCaseSensitivity { - self.case_sensitivity - } - - /// Add a directory that might be the minimum root directory - /// of the VFS. - /// - /// For example, say the user has a deno.json and specifies an - /// import map in a parent directory. The import map won't be - /// included in the VFS, but its base will meaning we need to - /// tell the VFS builder to include the base of the import map - /// by calling this method. - pub fn add_possible_min_root_dir(&mut self, path: &Path) { - self.add_dir_raw(path); - - match &self.min_root_dir { - Some(WindowsSystemRootablePath::WindowSystemRoot) => { - // already the root dir - } - Some(WindowsSystemRootablePath::Path(current_path)) => { - let mut common_components = Vec::new(); - for (a, b) in current_path.components().zip(path.components()) { - if a != b { - break; - } - common_components.push(a); - } - if common_components.is_empty() { - if cfg!(windows) { - self.min_root_dir = - Some(WindowsSystemRootablePath::WindowSystemRoot); - } else { - self.min_root_dir = - Some(WindowsSystemRootablePath::Path(PathBuf::from("/"))); - } - } else { - self.min_root_dir = Some(WindowsSystemRootablePath::Path( - common_components.iter().collect(), - )); - } - } - None => { - self.min_root_dir = - Some(WindowsSystemRootablePath::Path(path.to_path_buf())); - } - } - } - - pub fn add_dir_recursive(&mut self, path: &Path) -> Result<(), AnyError> { - let target_path = self.resolve_target_path(path)?; - self.add_dir_recursive_not_symlink(&target_path) - } - - fn add_dir_recursive_not_symlink( - &mut self, - path: &Path, - ) -> Result<(), AnyError> { - self.add_dir_raw(path); - let read_dir = std::fs::read_dir(path) - .with_context(|| format!("Reading {}", path.display()))?; - - let mut dir_entries = - read_dir.into_iter().collect::, _>>()?; - dir_entries.sort_by_cached_key(|entry| entry.file_name()); // determinism - - for entry in dir_entries { - let file_type = entry.file_type()?; - let path = entry.path(); - - if file_type.is_dir() { - self.add_dir_recursive_not_symlink(&path)?; - } else if file_type.is_file() { - self.add_file_at_path_not_symlink(&path)?; - } else if file_type.is_symlink() { - match self.add_symlink(&path) { - Ok(target) => match target { - SymlinkTarget::File(target) => { - self.add_file_at_path_not_symlink(&target)? - } - SymlinkTarget::Dir(target) => { - self.add_dir_recursive_not_symlink(&target)?; - } - }, - Err(err) => { - log::warn!( - "{} Failed resolving symlink. Ignoring.\n Path: {}\n Message: {:#}", - crate::colors::yellow("Warning"), - path.display(), - err - ); - } - } - } - } - - Ok(()) - } - - fn add_dir_raw(&mut self, path: &Path) -> &mut VirtualDirectory { - log::debug!("Ensuring directory '{}'", path.display()); - debug_assert!(path.is_absolute()); - let mut current_dir = &mut self.executable_root; - - for component in path.components() { - if matches!(component, std::path::Component::RootDir) { - continue; - } - let name = component.as_os_str().to_string_lossy(); - let index = current_dir.entries.insert_or_modify( - &name, - self.case_sensitivity, - || { - VfsEntry::Dir(VirtualDirectory { - name: name.to_string(), - entries: Default::default(), - }) - }, - |_| { - // ignore - }, - ); - match current_dir.entries.get_mut_by_index(index) { - Some(VfsEntry::Dir(dir)) => { - current_dir = dir; - } - _ => unreachable!(), - }; - } - - current_dir - } - - pub fn get_system_root_dir_mut(&mut self) -> &mut VirtualDirectory { - &mut self.executable_root - } - - pub fn get_dir_mut(&mut self, path: &Path) -> Option<&mut VirtualDirectory> { - debug_assert!(path.is_absolute()); - let mut current_dir = &mut self.executable_root; - - for component in path.components() { - if matches!(component, std::path::Component::RootDir) { - continue; - } - let name = component.as_os_str().to_string_lossy(); - let entry = current_dir - .entries - .get_mut_by_name(&name, self.case_sensitivity)?; - match entry { - VfsEntry::Dir(dir) => { - current_dir = dir; - } - _ => unreachable!(), - }; - } - - Some(current_dir) - } - - pub fn add_file_at_path(&mut self, path: &Path) -> Result<(), AnyError> { - let file_bytes = std::fs::read(path) - .with_context(|| format!("Reading {}", path.display()))?; - self.add_file_with_data(path, file_bytes, VfsFileSubDataKind::Raw) - } - - fn add_file_at_path_not_symlink( - &mut self, - path: &Path, - ) -> Result<(), AnyError> { - let file_bytes = std::fs::read(path) - .with_context(|| format!("Reading {}", path.display()))?; - self.add_file_with_data_inner(path, file_bytes, VfsFileSubDataKind::Raw) - } - - pub fn add_file_with_data( - &mut self, - path: &Path, - data: Vec, - sub_data_kind: VfsFileSubDataKind, - ) -> Result<(), AnyError> { - let metadata = std::fs::symlink_metadata(path).with_context(|| { - format!("Resolving target path for '{}'", path.display()) - })?; - if metadata.is_symlink() { - let target = self.add_symlink(path)?.into_path_buf(); - self.add_file_with_data_inner(&target, data, sub_data_kind) - } else { - self.add_file_with_data_inner(path, data, sub_data_kind) - } - } - - fn add_file_with_data_inner( - &mut self, - path: &Path, - data: Vec, - sub_data_kind: VfsFileSubDataKind, - ) -> Result<(), AnyError> { - log::debug!("Adding file '{}'", path.display()); - let checksum = deno_lib::util::checksum::gen(&[&data]); - let case_sensitivity = self.case_sensitivity; - let offset = if let Some(offset) = self.file_offsets.get(&checksum) { - // duplicate file, reuse an old offset - *offset - } else { - self.file_offsets.insert(checksum, self.current_offset); - self.current_offset - }; - - let dir = self.add_dir_raw(path.parent().unwrap()); - let name = path.file_name().unwrap().to_string_lossy(); - let offset_and_len = OffsetWithLength { - offset, - len: data.len() as u64, - }; - dir.entries.insert_or_modify( - &name, - case_sensitivity, - || { - VfsEntry::File(VirtualFile { - name: name.to_string(), - offset: offset_and_len, - module_graph_offset: offset_and_len, - }) - }, - |entry| match entry { - VfsEntry::File(virtual_file) => match sub_data_kind { - VfsFileSubDataKind::Raw => { - virtual_file.offset = offset_and_len; - } - VfsFileSubDataKind::ModuleGraph => { - virtual_file.module_graph_offset = offset_and_len; - } - }, - VfsEntry::Dir(_) | VfsEntry::Symlink(_) => unreachable!(), - }, - ); - - // new file, update the list of files - if self.current_offset == offset { - self.files.push(data); - self.current_offset += offset_and_len.len; - } - - Ok(()) - } - - fn resolve_target_path(&mut self, path: &Path) -> Result { - let metadata = std::fs::symlink_metadata(path).with_context(|| { - format!("Resolving target path for '{}'", path.display()) - })?; - if metadata.is_symlink() { - Ok(self.add_symlink(path)?.into_path_buf()) - } else { - Ok(path.to_path_buf()) - } - } - - fn add_symlink(&mut self, path: &Path) -> Result { - self.add_symlink_inner(path, &mut IndexSet::new()) - } - - fn add_symlink_inner( - &mut self, - path: &Path, - visited: &mut IndexSet, - ) -> Result { - log::debug!("Adding symlink '{}'", path.display()); - let target = strip_unc_prefix( - std::fs::read_link(path) - .with_context(|| format!("Reading symlink '{}'", path.display()))?, - ); - let case_sensitivity = self.case_sensitivity; - let target = normalize_path(path.parent().unwrap().join(&target)); - let dir = self.add_dir_raw(path.parent().unwrap()); - let name = path.file_name().unwrap().to_string_lossy(); - dir.entries.insert_or_modify( - &name, - case_sensitivity, - || { - VfsEntry::Symlink(VirtualSymlink { - name: name.to_string(), - dest_parts: VirtualSymlinkParts::from_path(&target), - }) - }, - |_| { - // ignore previously inserted - }, - ); - let target_metadata = - std::fs::symlink_metadata(&target).with_context(|| { - format!("Reading symlink target '{}'", target.display()) - })?; - if target_metadata.is_symlink() { - if !visited.insert(target.clone()) { - // todo: probably don't error in this scenario - bail!( - "Circular symlink detected: {} -> {}", - visited - .iter() - .map(|p| p.display().to_string()) - .collect::>() - .join(" -> "), - target.display() - ); - } - self.add_symlink_inner(&target, visited) - } else if target_metadata.is_dir() { - Ok(SymlinkTarget::Dir(target)) - } else { - Ok(SymlinkTarget::File(target)) - } - } - - pub fn build(self) -> BuiltVfs { - fn strip_prefix_from_symlinks( - dir: &mut VirtualDirectory, - parts: &[String], - ) { - for entry in dir.entries.iter_mut() { - match entry { - VfsEntry::Dir(dir) => { - strip_prefix_from_symlinks(dir, parts); - } - VfsEntry::File(_) => {} - VfsEntry::Symlink(symlink) => { - let parts = symlink - .dest_parts - .take_parts() - .into_iter() - .skip(parts.len()) - .collect(); - symlink.dest_parts.set_parts(parts); - } - } - } - } - - let mut current_dir = self.executable_root; - let mut current_path = if cfg!(windows) { - WindowsSystemRootablePath::WindowSystemRoot - } else { - WindowsSystemRootablePath::Path(PathBuf::from("/")) - }; - loop { - if current_dir.entries.len() != 1 { - break; - } - if self.min_root_dir.as_ref() == Some(¤t_path) { - break; - } - match current_dir.entries.iter().next().unwrap() { - VfsEntry::Dir(dir) => { - if dir.name == DENO_COMPILE_GLOBAL_NODE_MODULES_DIR_NAME { - // special directory we want to maintain - break; - } - match current_dir.entries.remove(0) { - VfsEntry::Dir(dir) => { - current_path = - WindowsSystemRootablePath::Path(current_path.join(&dir.name)); - current_dir = dir; - } - _ => unreachable!(), - }; - } - VfsEntry::File(_) | VfsEntry::Symlink(_) => break, - } - } - if let WindowsSystemRootablePath::Path(path) = ¤t_path { - strip_prefix_from_symlinks( - &mut current_dir, - VirtualSymlinkParts::from_path(path).parts(), - ); - } - BuiltVfs { - root_path: current_path, - case_sensitivity: self.case_sensitivity, - entries: current_dir.entries, - files: self.files, - } - } -} - -#[derive(Debug)] -enum SymlinkTarget { - File(PathBuf), - Dir(PathBuf), -} - -impl SymlinkTarget { - pub fn into_path_buf(self) -> PathBuf { - match self { - Self::File(path) => path, - Self::Dir(path) => path, - } - } -} pub fn output_vfs(vfs: &BuiltVfs, executable_name: &str) { if !log::log_enabled!(log::Level::Info) { @@ -506,11 +30,7 @@ pub fn output_vfs(vfs: &BuiltVfs, executable_name: &str) { let display_tree = vfs_as_display_tree(vfs, executable_name); display_tree.print(&mut text).unwrap(); // unwrap ok because it's writing to a string log::info!("\n{}\n", deno_terminal::colors::bold("Embedded Files")); - log::info!("{}\n", text.trim()); - log::info!( - "Size: {}\n", - human_size(vfs.files.iter().map(|f| f.len() as f64).sum()) - ); + log::info!("{}", text.trim()); } fn vfs_as_display_tree( @@ -680,8 +200,13 @@ fn vfs_as_display_tree( let mut size = Size::default(); add_offset_to_size(file.offset, &mut size, seen_offsets); - if file.module_graph_offset.offset != file.offset.offset { - add_offset_to_size(file.module_graph_offset, &mut size, seen_offsets); + let maybe_offsets = [ + file.transpiled_offset, + file.source_map_offset, + file.cjs_export_analysis_offset, + ]; + for offset in maybe_offsets.into_iter().flatten() { + add_offset_to_size(offset, &mut size, seen_offsets); } size } @@ -841,846 +366,14 @@ fn vfs_as_display_tree( } } -#[derive(Debug)] -pub struct VfsRoot { - pub dir: VirtualDirectory, - pub root_path: PathBuf, - pub start_file_offset: u64, -} - -impl VfsRoot { - fn find_entry<'a>( - &'a self, - path: &Path, - case_sensitivity: FileSystemCaseSensitivity, - ) -> std::io::Result<(PathBuf, VfsEntryRef<'a>)> { - self.find_entry_inner(path, &mut HashSet::new(), case_sensitivity) - } - - fn find_entry_inner<'a>( - &'a self, - path: &Path, - seen: &mut HashSet, - case_sensitivity: FileSystemCaseSensitivity, - ) -> std::io::Result<(PathBuf, VfsEntryRef<'a>)> { - let mut path = Cow::Borrowed(path); - loop { - let (resolved_path, entry) = - self.find_entry_no_follow_inner(&path, seen, case_sensitivity)?; - match entry { - VfsEntryRef::Symlink(symlink) => { - if !seen.insert(path.to_path_buf()) { - return Err(std::io::Error::new( - std::io::ErrorKind::Other, - "circular symlinks", - )); - } - path = Cow::Owned(symlink.resolve_dest_from_root(&self.root_path)); - } - _ => { - return Ok((resolved_path, entry)); - } - } - } - } - - fn find_entry_no_follow( - &self, - path: &Path, - case_sensitivity: FileSystemCaseSensitivity, - ) -> std::io::Result<(PathBuf, VfsEntryRef)> { - self.find_entry_no_follow_inner(path, &mut HashSet::new(), case_sensitivity) - } - - fn find_entry_no_follow_inner<'a>( - &'a self, - path: &Path, - seen: &mut HashSet, - case_sensitivity: FileSystemCaseSensitivity, - ) -> std::io::Result<(PathBuf, VfsEntryRef<'a>)> { - let relative_path = match path.strip_prefix(&self.root_path) { - Ok(p) => p, - Err(_) => { - return Err(std::io::Error::new( - std::io::ErrorKind::NotFound, - "path not found", - )); - } - }; - let mut final_path = self.root_path.clone(); - let mut current_entry = VfsEntryRef::Dir(&self.dir); - for component in relative_path.components() { - let component = component.as_os_str(); - let current_dir = match current_entry { - VfsEntryRef::Dir(dir) => { - final_path.push(component); - dir - } - VfsEntryRef::Symlink(symlink) => { - let dest = symlink.resolve_dest_from_root(&self.root_path); - let (resolved_path, entry) = - self.find_entry_inner(&dest, seen, case_sensitivity)?; - final_path = resolved_path; // overwrite with the new resolved path - match entry { - VfsEntryRef::Dir(dir) => { - final_path.push(component); - dir - } - _ => { - return Err(std::io::Error::new( - std::io::ErrorKind::NotFound, - "path not found", - )); - } - } - } - _ => { - return Err(std::io::Error::new( - std::io::ErrorKind::NotFound, - "path not found", - )); - } - }; - let component = component.to_string_lossy(); - current_entry = current_dir - .entries - .get_by_name(&component, case_sensitivity) - .ok_or_else(|| { - std::io::Error::new(std::io::ErrorKind::NotFound, "path not found") - })? - .as_ref(); - } - - Ok((final_path, current_entry)) - } -} - -pub struct FileBackedVfsFile { - file: VirtualFile, - pos: RefCell, - vfs: Arc, -} - -impl FileBackedVfsFile { - pub fn seek(&self, pos: SeekFrom) -> std::io::Result { - match pos { - SeekFrom::Start(pos) => { - *self.pos.borrow_mut() = pos; - Ok(pos) - } - SeekFrom::End(offset) => { - if offset < 0 && -offset as u64 > self.file.offset.len { - let msg = "An attempt was made to move the file pointer before the beginning of the file."; - Err(std::io::Error::new( - std::io::ErrorKind::PermissionDenied, - msg, - )) - } else { - let mut current_pos = self.pos.borrow_mut(); - *current_pos = if offset >= 0 { - self.file.offset.len - (offset as u64) - } else { - self.file.offset.len + (-offset as u64) - }; - Ok(*current_pos) - } - } - SeekFrom::Current(offset) => { - let mut current_pos = self.pos.borrow_mut(); - if offset >= 0 { - *current_pos += offset as u64; - } else if -offset as u64 > *current_pos { - return Err(std::io::Error::new(std::io::ErrorKind::PermissionDenied, "An attempt was made to move the file pointer before the beginning of the file.")); - } else { - *current_pos -= -offset as u64; - } - Ok(*current_pos) - } - } - } - - pub fn read_to_buf(&self, buf: &mut [u8]) -> std::io::Result { - let read_pos = { - let mut pos = self.pos.borrow_mut(); - let read_pos = *pos; - // advance the position due to the read - *pos = std::cmp::min(self.file.offset.len, *pos + buf.len() as u64); - read_pos - }; - self.vfs.read_file(&self.file, read_pos, buf) - } - - fn read_to_end(&self) -> FsResult> { - let read_pos = { - let mut pos = self.pos.borrow_mut(); - let read_pos = *pos; - // todo(dsherret): should this always set it to the end of the file? - if *pos < self.file.offset.len { - // advance the position due to the read - *pos = self.file.offset.len; - } - read_pos - }; - if read_pos > self.file.offset.len { - return Ok(Cow::Borrowed(&[])); - } - if read_pos == 0 { - Ok( - self - .vfs - .read_file_all(&self.file, VfsFileSubDataKind::Raw)?, - ) - } else { - let size = (self.file.offset.len - read_pos) as usize; - let mut buf = vec![0; size]; - self.vfs.read_file(&self.file, read_pos, &mut buf)?; - Ok(Cow::Owned(buf)) - } - } -} - -#[async_trait::async_trait(?Send)] -impl deno_io::fs::File for FileBackedVfsFile { - fn read_sync(self: Rc, buf: &mut [u8]) -> FsResult { - self.read_to_buf(buf).map_err(Into::into) - } - async fn read_byob( - self: Rc, - mut buf: BufMutView, - ) -> FsResult<(usize, BufMutView)> { - // this is fast, no need to spawn a task - let nread = self.read_to_buf(&mut buf)?; - Ok((nread, buf)) - } - - fn write_sync(self: Rc, _buf: &[u8]) -> FsResult { - Err(FsError::NotSupported) - } - async fn write( - self: Rc, - _buf: BufView, - ) -> FsResult { - Err(FsError::NotSupported) - } - - fn write_all_sync(self: Rc, _buf: &[u8]) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn write_all(self: Rc, _buf: BufView) -> FsResult<()> { - Err(FsError::NotSupported) - } - - fn read_all_sync(self: Rc) -> FsResult> { - self.read_to_end() - } - async fn read_all_async(self: Rc) -> FsResult> { - // this is fast, no need to spawn a task - self.read_to_end() - } - - fn chmod_sync(self: Rc, _pathmode: u32) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn chmod_async(self: Rc, _mode: u32) -> FsResult<()> { - Err(FsError::NotSupported) - } - - fn seek_sync(self: Rc, pos: SeekFrom) -> FsResult { - self.seek(pos).map_err(|err| err.into()) - } - async fn seek_async(self: Rc, pos: SeekFrom) -> FsResult { - self.seek(pos).map_err(|err| err.into()) - } - - fn datasync_sync(self: Rc) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn datasync_async(self: Rc) -> FsResult<()> { - Err(FsError::NotSupported) - } - - fn sync_sync(self: Rc) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn sync_async(self: Rc) -> FsResult<()> { - Err(FsError::NotSupported) - } - - fn stat_sync(self: Rc) -> FsResult { - Err(FsError::NotSupported) - } - async fn stat_async(self: Rc) -> FsResult { - Err(FsError::NotSupported) - } - - fn lock_sync(self: Rc, _exclusive: bool) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn lock_async(self: Rc, _exclusive: bool) -> FsResult<()> { - Err(FsError::NotSupported) - } - - fn unlock_sync(self: Rc) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn unlock_async(self: Rc) -> FsResult<()> { - Err(FsError::NotSupported) - } - - fn truncate_sync(self: Rc, _len: u64) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn truncate_async(self: Rc, _len: u64) -> FsResult<()> { - Err(FsError::NotSupported) - } - - fn utime_sync( - self: Rc, - _atime_secs: i64, - _atime_nanos: u32, - _mtime_secs: i64, - _mtime_nanos: u32, - ) -> FsResult<()> { - Err(FsError::NotSupported) - } - async fn utime_async( - self: Rc, - _atime_secs: i64, - _atime_nanos: u32, - _mtime_secs: i64, - _mtime_nanos: u32, - ) -> FsResult<()> { - Err(FsError::NotSupported) - } - - // lower level functionality - fn as_stdio(self: Rc) -> FsResult { - Err(FsError::NotSupported) - } - fn backing_fd(self: Rc) -> Option { - None - } - fn try_clone_inner(self: Rc) -> FsResult> { - Ok(self) - } -} - -#[derive(Debug, Clone)] -pub struct FileBackedVfsDirEntry { - pub parent_path: PathBuf, - pub metadata: FileBackedVfsMetadata, -} - -#[derive(Debug, Clone)] -pub struct FileBackedVfsMetadata { - pub name: String, - pub file_type: sys_traits::FileType, - pub len: u64, -} - -impl FileBackedVfsMetadata { - pub fn from_vfs_entry_ref(vfs_entry: VfsEntryRef) -> Self { - FileBackedVfsMetadata { - file_type: match vfs_entry { - VfsEntryRef::Dir(_) => sys_traits::FileType::Dir, - VfsEntryRef::File(_) => sys_traits::FileType::File, - VfsEntryRef::Symlink(_) => sys_traits::FileType::Symlink, - }, - name: vfs_entry.name().to_string(), - len: match vfs_entry { - VfsEntryRef::Dir(_) => 0, - VfsEntryRef::File(file) => file.offset.len, - VfsEntryRef::Symlink(_) => 0, - }, - } - } - pub fn as_fs_stat(&self) -> FsStat { - FsStat { - is_directory: self.file_type == sys_traits::FileType::Dir, - is_file: self.file_type == sys_traits::FileType::File, - is_symlink: self.file_type == sys_traits::FileType::Symlink, - atime: None, - birthtime: None, - mtime: None, - ctime: None, - blksize: 0, - size: self.len, - dev: 0, - ino: 0, - mode: 0, - nlink: 0, - uid: 0, - gid: 0, - rdev: 0, - blocks: 0, - is_block_device: false, - is_char_device: false, - is_fifo: false, - is_socket: false, - } - } -} - -#[derive(Debug)] -pub struct FileBackedVfs { - vfs_data: Cow<'static, [u8]>, - fs_root: VfsRoot, - case_sensitivity: FileSystemCaseSensitivity, -} - -impl FileBackedVfs { - pub fn new( - data: Cow<'static, [u8]>, - fs_root: VfsRoot, - case_sensitivity: FileSystemCaseSensitivity, - ) -> Self { - Self { - vfs_data: data, - fs_root, - case_sensitivity, - } - } - - pub fn root(&self) -> &Path { - &self.fs_root.root_path - } - - pub fn is_path_within(&self, path: &Path) -> bool { - path.starts_with(&self.fs_root.root_path) - } - - pub fn open_file( - self: &Arc, - path: &Path, - ) -> std::io::Result { - let file = self.file_entry(path)?; - Ok(FileBackedVfsFile { - file: file.clone(), - vfs: self.clone(), - pos: Default::default(), - }) - } - - pub fn read_dir(&self, path: &Path) -> std::io::Result> { - let dir = self.dir_entry(path)?; - Ok( - dir - .entries - .iter() - .map(|entry| FsDirEntry { - name: entry.name().to_string(), - is_file: matches!(entry, VfsEntry::File(_)), - is_directory: matches!(entry, VfsEntry::Dir(_)), - is_symlink: matches!(entry, VfsEntry::Symlink(_)), - }) - .collect(), - ) - } - - pub fn read_dir_with_metadata<'a>( - &'a self, - path: &Path, - ) -> std::io::Result + 'a> { - let dir = self.dir_entry(path)?; - let path = path.to_path_buf(); - Ok(dir.entries.iter().map(move |entry| FileBackedVfsDirEntry { - parent_path: path.to_path_buf(), - metadata: FileBackedVfsMetadata::from_vfs_entry_ref(entry.as_ref()), - })) - } - - pub fn read_link(&self, path: &Path) -> std::io::Result { - let (_, entry) = self - .fs_root - .find_entry_no_follow(path, self.case_sensitivity)?; - match entry { - VfsEntryRef::Symlink(symlink) => { - Ok(symlink.resolve_dest_from_root(&self.fs_root.root_path)) - } - VfsEntryRef::Dir(_) | VfsEntryRef::File(_) => Err(std::io::Error::new( - std::io::ErrorKind::Other, - "not a symlink", - )), - } - } - - pub fn lstat(&self, path: &Path) -> std::io::Result { - let (_, entry) = self - .fs_root - .find_entry_no_follow(path, self.case_sensitivity)?; - Ok(FileBackedVfsMetadata::from_vfs_entry_ref(entry)) - } - - pub fn stat(&self, path: &Path) -> std::io::Result { - let (_, entry) = self.fs_root.find_entry(path, self.case_sensitivity)?; - Ok(FileBackedVfsMetadata::from_vfs_entry_ref(entry)) - } - - pub fn canonicalize(&self, path: &Path) -> std::io::Result { - let (path, _) = self.fs_root.find_entry(path, self.case_sensitivity)?; - Ok(path) - } - - pub fn read_file_all( - &self, - file: &VirtualFile, - sub_data_kind: VfsFileSubDataKind, - ) -> std::io::Result> { - let read_len = match sub_data_kind { - VfsFileSubDataKind::Raw => file.offset.len, - VfsFileSubDataKind::ModuleGraph => file.module_graph_offset.len, - }; - let read_range = self.get_read_range(file, sub_data_kind, 0, read_len)?; - match &self.vfs_data { - Cow::Borrowed(data) => Ok(Cow::Borrowed(&data[read_range])), - Cow::Owned(data) => Ok(Cow::Owned(data[read_range].to_vec())), - } - } - - pub fn read_file( - &self, - file: &VirtualFile, - pos: u64, - buf: &mut [u8], - ) -> std::io::Result { - let read_range = self.get_read_range( - file, - VfsFileSubDataKind::Raw, - pos, - buf.len() as u64, - )?; - let read_len = read_range.len(); - buf[..read_len].copy_from_slice(&self.vfs_data[read_range]); - Ok(read_len) - } - - fn get_read_range( - &self, - file: &VirtualFile, - sub_data_kind: VfsFileSubDataKind, - pos: u64, - len: u64, - ) -> std::io::Result> { - let file_offset_and_len = match sub_data_kind { - VfsFileSubDataKind::Raw => file.offset, - VfsFileSubDataKind::ModuleGraph => file.module_graph_offset, - }; - if pos > file_offset_and_len.len { - return Err(std::io::Error::new( - std::io::ErrorKind::UnexpectedEof, - "unexpected EOF", - )); - } - let file_offset = - self.fs_root.start_file_offset + file_offset_and_len.offset; - let start = file_offset + pos; - let end = file_offset + std::cmp::min(pos + len, file_offset_and_len.len); - Ok(start as usize..end as usize) - } - - pub fn dir_entry(&self, path: &Path) -> std::io::Result<&VirtualDirectory> { - let (_, entry) = self.fs_root.find_entry(path, self.case_sensitivity)?; - match entry { - VfsEntryRef::Dir(dir) => Ok(dir), - VfsEntryRef::Symlink(_) => unreachable!(), - VfsEntryRef::File(_) => Err(std::io::Error::new( - std::io::ErrorKind::Other, - "path is a file", - )), - } - } - - pub fn file_entry(&self, path: &Path) -> std::io::Result<&VirtualFile> { - let (_, entry) = self.fs_root.find_entry(path, self.case_sensitivity)?; - match entry { - VfsEntryRef::Dir(_) => Err(std::io::Error::new( - std::io::ErrorKind::Other, - "path is a directory", - )), - VfsEntryRef::Symlink(_) => unreachable!(), - VfsEntryRef::File(file) => Ok(file), - } - } -} - #[cfg(test)] mod test { - use std::io::Write; - use console_static_text::ansi::strip_ansi_codes; - use deno_io::fs::File; - use test_util::assert_contains; + use deno_lib::standalone::virtual_fs::VfsBuilder; use test_util::TempDir; use super::*; - #[track_caller] - fn read_file(vfs: &FileBackedVfs, path: &Path) -> String { - let file = vfs.file_entry(path).unwrap(); - String::from_utf8( - vfs - .read_file_all(file, VfsFileSubDataKind::Raw) - .unwrap() - .into_owned(), - ) - .unwrap() - } - - #[test] - fn builds_and_uses_virtual_fs() { - let temp_dir = TempDir::new(); - // we canonicalize the temp directory because the vfs builder - // will canonicalize the root path - let src_path = temp_dir.path().canonicalize().join("src"); - src_path.create_dir_all(); - src_path.join("sub_dir").create_dir_all(); - src_path.join("e.txt").write("e"); - src_path.symlink_file("e.txt", "sub_dir/e.txt"); - let src_path = src_path.to_path_buf(); - let mut builder = VfsBuilder::new(); - builder - .add_file_with_data_inner( - &src_path.join("a.txt"), - "data".into(), - VfsFileSubDataKind::Raw, - ) - .unwrap(); - builder - .add_file_with_data_inner( - &src_path.join("b.txt"), - "data".into(), - VfsFileSubDataKind::Raw, - ) - .unwrap(); - assert_eq!(builder.files.len(), 1); // because duplicate data - builder - .add_file_with_data_inner( - &src_path.join("c.txt"), - "c".into(), - VfsFileSubDataKind::Raw, - ) - .unwrap(); - builder - .add_file_with_data_inner( - &src_path.join("sub_dir").join("d.txt"), - "d".into(), - VfsFileSubDataKind::Raw, - ) - .unwrap(); - builder.add_file_at_path(&src_path.join("e.txt")).unwrap(); - builder - .add_symlink(&src_path.join("sub_dir").join("e.txt")) - .unwrap(); - - // get the virtual fs - let (dest_path, virtual_fs) = into_virtual_fs(builder, &temp_dir); - - assert_eq!(read_file(&virtual_fs, &dest_path.join("a.txt")), "data"); - assert_eq!(read_file(&virtual_fs, &dest_path.join("b.txt")), "data"); - - // attempt reading a symlink - assert_eq!( - read_file(&virtual_fs, &dest_path.join("sub_dir").join("e.txt")), - "e", - ); - - // canonicalize symlink - assert_eq!( - virtual_fs - .canonicalize(&dest_path.join("sub_dir").join("e.txt")) - .unwrap(), - dest_path.join("e.txt"), - ); - - // metadata - assert_eq!( - virtual_fs - .lstat(&dest_path.join("sub_dir").join("e.txt")) - .unwrap() - .file_type, - sys_traits::FileType::Symlink, - ); - assert_eq!( - virtual_fs - .stat(&dest_path.join("sub_dir").join("e.txt")) - .unwrap() - .file_type, - sys_traits::FileType::File, - ); - assert_eq!( - virtual_fs - .stat(&dest_path.join("sub_dir")) - .unwrap() - .file_type, - sys_traits::FileType::Dir, - ); - assert_eq!( - virtual_fs.stat(&dest_path.join("e.txt")).unwrap().file_type, - sys_traits::FileType::File - ); - } - - #[test] - fn test_include_dir_recursive() { - let temp_dir = TempDir::new(); - let temp_dir_path = temp_dir.path().canonicalize(); - temp_dir.create_dir_all("src/nested/sub_dir"); - temp_dir.write("src/a.txt", "data"); - temp_dir.write("src/b.txt", "data"); - util::fs::symlink_dir( - &crate::sys::CliSys::default(), - temp_dir_path.join("src/nested/sub_dir").as_path(), - temp_dir_path.join("src/sub_dir_link").as_path(), - ) - .unwrap(); - temp_dir.write("src/nested/sub_dir/c.txt", "c"); - - // build and create the virtual fs - let src_path = temp_dir_path.join("src").to_path_buf(); - let mut builder = VfsBuilder::new(); - builder.add_dir_recursive(&src_path).unwrap(); - let (dest_path, virtual_fs) = into_virtual_fs(builder, &temp_dir); - - assert_eq!(read_file(&virtual_fs, &dest_path.join("a.txt")), "data",); - assert_eq!(read_file(&virtual_fs, &dest_path.join("b.txt")), "data",); - - assert_eq!( - read_file( - &virtual_fs, - &dest_path.join("nested").join("sub_dir").join("c.txt") - ), - "c", - ); - assert_eq!( - read_file(&virtual_fs, &dest_path.join("sub_dir_link").join("c.txt")), - "c", - ); - assert_eq!( - virtual_fs - .lstat(&dest_path.join("sub_dir_link")) - .unwrap() - .file_type, - sys_traits::FileType::Symlink, - ); - - assert_eq!( - virtual_fs - .canonicalize(&dest_path.join("sub_dir_link").join("c.txt")) - .unwrap(), - dest_path.join("nested").join("sub_dir").join("c.txt"), - ); - } - - fn into_virtual_fs( - builder: VfsBuilder, - temp_dir: &TempDir, - ) -> (PathBuf, FileBackedVfs) { - let virtual_fs_file = temp_dir.path().join("virtual_fs"); - let vfs = builder.build(); - { - let mut file = std::fs::File::create(&virtual_fs_file).unwrap(); - for file_data in &vfs.files { - file.write_all(file_data).unwrap(); - } - } - let dest_path = temp_dir.path().join("dest"); - let data = std::fs::read(&virtual_fs_file).unwrap(); - ( - dest_path.to_path_buf(), - FileBackedVfs::new( - Cow::Owned(data), - VfsRoot { - dir: VirtualDirectory { - name: "".to_string(), - entries: vfs.entries, - }, - root_path: dest_path.to_path_buf(), - start_file_offset: 0, - }, - FileSystemCaseSensitivity::Sensitive, - ), - ) - } - - #[test] - fn circular_symlink() { - let temp_dir = TempDir::new(); - let src_path = temp_dir.path().canonicalize().join("src"); - src_path.create_dir_all(); - src_path.symlink_file("a.txt", "b.txt"); - src_path.symlink_file("b.txt", "c.txt"); - src_path.symlink_file("c.txt", "a.txt"); - let src_path = src_path.to_path_buf(); - let mut builder = VfsBuilder::new(); - let err = builder - .add_symlink(src_path.join("a.txt").as_path()) - .unwrap_err(); - assert_contains!(err.to_string(), "Circular symlink detected",); - } - - #[tokio::test] - async fn test_open_file() { - let temp_dir = TempDir::new(); - let temp_path = temp_dir.path().canonicalize(); - let mut builder = VfsBuilder::new(); - builder - .add_file_with_data_inner( - temp_path.join("a.txt").as_path(), - "0123456789".to_string().into_bytes(), - VfsFileSubDataKind::Raw, - ) - .unwrap(); - let (dest_path, virtual_fs) = into_virtual_fs(builder, &temp_dir); - let virtual_fs = Arc::new(virtual_fs); - let file = virtual_fs.open_file(&dest_path.join("a.txt")).unwrap(); - file.seek(SeekFrom::Current(2)).unwrap(); - let mut buf = vec![0; 2]; - file.read_to_buf(&mut buf).unwrap(); - assert_eq!(buf, b"23"); - file.read_to_buf(&mut buf).unwrap(); - assert_eq!(buf, b"45"); - file.seek(SeekFrom::Current(-4)).unwrap(); - file.read_to_buf(&mut buf).unwrap(); - assert_eq!(buf, b"23"); - file.seek(SeekFrom::Start(2)).unwrap(); - file.read_to_buf(&mut buf).unwrap(); - assert_eq!(buf, b"23"); - file.seek(SeekFrom::End(2)).unwrap(); - file.read_to_buf(&mut buf).unwrap(); - assert_eq!(buf, b"89"); - file.seek(SeekFrom::Current(-8)).unwrap(); - file.read_to_buf(&mut buf).unwrap(); - assert_eq!(buf, b"23"); - assert_eq!( - file - .seek(SeekFrom::Current(-5)) - .unwrap_err() - .to_string(), - "An attempt was made to move the file pointer before the beginning of the file." - ); - // go beyond the file length, then back - file.seek(SeekFrom::Current(40)).unwrap(); - file.seek(SeekFrom::Current(-38)).unwrap(); - let file = Rc::new(file); - let read_buf = file.clone().read(2).await.unwrap(); - assert_eq!(read_buf.to_vec(), b"67"); - file.clone().seek_sync(SeekFrom::Current(-2)).unwrap(); - - // read to the end of the file - let all_buf = file.clone().read_all_sync().unwrap(); - assert_eq!(all_buf.to_vec(), b"6789"); - file.clone().seek_sync(SeekFrom::Current(-9)).unwrap(); - - // try try_clone_inner and read_all_async - let all_buf = file - .try_clone_inner() - .unwrap() - .read_all_async() - .await - .unwrap(); - assert_eq!(all_buf.to_vec(), b"123456789"); - } - #[test] fn test_vfs_as_display_tree() { let temp_dir = TempDir::new(); diff --git a/cli/sys.rs b/cli/sys.rs deleted file mode 100644 index e551eab2e85d51..00000000000000 --- a/cli/sys.rs +++ /dev/null @@ -1,232 +0,0 @@ -// Copyright 2018-2025 the Deno authors. MIT license. - -// todo(dsherret): this should instead use conditional compilation and directly -// surface the underlying implementation. -// -// The problem atm is that there's no way to have conditional compilation for -// denort or the deno binary. We should extract out denort to a separate binary. - -use std::borrow::Cow; -use std::path::Path; -use std::path::PathBuf; - -use sys_traits::boxed::BoxedFsDirEntry; -use sys_traits::boxed::BoxedFsFile; -use sys_traits::boxed::BoxedFsMetadataValue; -use sys_traits::boxed::FsMetadataBoxed; -use sys_traits::boxed::FsOpenBoxed; -use sys_traits::boxed::FsReadDirBoxed; -use sys_traits::CreateDirOptions; - -use crate::standalone::DenoCompileFileSystem; - -#[derive(Debug, Clone)] -pub enum CliSys { - #[allow(dead_code)] // will be dead code for denort - #[allow(clippy::disallowed_types)] // ok because sys impl - Real(sys_traits::impls::RealSys), - #[allow(dead_code)] // will be dead code for deno - DenoCompile(DenoCompileFileSystem), -} - -impl deno_lib::sys::DenoLibSys for CliSys {} - -impl Default for CliSys { - fn default() -> Self { - Self::Real(sys_traits::impls::RealSys) - } -} - -impl deno_runtime::deno_node::ExtNodeSys for CliSys {} - -impl sys_traits::BaseFsCloneFile for CliSys { - fn base_fs_clone_file(&self, src: &Path, dst: &Path) -> std::io::Result<()> { - match self { - Self::Real(sys) => sys.base_fs_clone_file(src, dst), - Self::DenoCompile(sys) => sys.base_fs_clone_file(src, dst), - } - } -} - -impl sys_traits::BaseFsSymlinkDir for CliSys { - fn base_fs_symlink_dir(&self, src: &Path, dst: &Path) -> std::io::Result<()> { - match self { - Self::Real(sys) => sys.base_fs_symlink_dir(src, dst), - Self::DenoCompile(sys) => sys.base_fs_symlink_dir(src, dst), - } - } -} - -impl sys_traits::BaseFsCopy for CliSys { - fn base_fs_copy(&self, src: &Path, dst: &Path) -> std::io::Result { - match self { - Self::Real(sys) => sys.base_fs_copy(src, dst), - Self::DenoCompile(sys) => sys.base_fs_copy(src, dst), - } - } -} - -impl sys_traits::BaseFsHardLink for CliSys { - fn base_fs_hard_link(&self, src: &Path, dst: &Path) -> std::io::Result<()> { - match self { - Self::Real(sys) => sys.base_fs_hard_link(src, dst), - Self::DenoCompile(sys) => sys.base_fs_hard_link(src, dst), - } - } -} - -impl sys_traits::BaseFsRead for CliSys { - fn base_fs_read(&self, p: &Path) -> std::io::Result> { - match self { - Self::Real(sys) => sys.base_fs_read(p), - Self::DenoCompile(sys) => sys.base_fs_read(p), - } - } -} - -impl sys_traits::BaseFsReadDir for CliSys { - type ReadDirEntry = BoxedFsDirEntry; - - fn base_fs_read_dir( - &self, - p: &Path, - ) -> std::io::Result< - Box> + '_>, - > { - match self { - Self::Real(sys) => sys.fs_read_dir_boxed(p), - Self::DenoCompile(sys) => sys.fs_read_dir_boxed(p), - } - } -} - -impl sys_traits::BaseFsCanonicalize for CliSys { - fn base_fs_canonicalize(&self, p: &Path) -> std::io::Result { - match self { - Self::Real(sys) => sys.base_fs_canonicalize(p), - Self::DenoCompile(sys) => sys.base_fs_canonicalize(p), - } - } -} - -impl sys_traits::BaseFsMetadata for CliSys { - type Metadata = BoxedFsMetadataValue; - - fn base_fs_metadata(&self, path: &Path) -> std::io::Result { - match self { - Self::Real(sys) => sys.fs_metadata_boxed(path), - Self::DenoCompile(sys) => sys.fs_metadata_boxed(path), - } - } - - fn base_fs_symlink_metadata( - &self, - path: &Path, - ) -> std::io::Result { - match self { - Self::Real(sys) => sys.fs_symlink_metadata_boxed(path), - Self::DenoCompile(sys) => sys.fs_symlink_metadata_boxed(path), - } - } -} - -impl sys_traits::BaseFsCreateDir for CliSys { - fn base_fs_create_dir( - &self, - p: &Path, - options: &CreateDirOptions, - ) -> std::io::Result<()> { - match self { - Self::Real(sys) => sys.base_fs_create_dir(p, options), - Self::DenoCompile(sys) => sys.base_fs_create_dir(p, options), - } - } -} - -impl sys_traits::BaseFsOpen for CliSys { - type File = BoxedFsFile; - - fn base_fs_open( - &self, - path: &Path, - options: &sys_traits::OpenOptions, - ) -> std::io::Result { - match self { - Self::Real(sys) => sys.fs_open_boxed(path, options), - Self::DenoCompile(sys) => sys.fs_open_boxed(path, options), - } - } -} - -impl sys_traits::BaseFsRemoveFile for CliSys { - fn base_fs_remove_file(&self, p: &Path) -> std::io::Result<()> { - match self { - Self::Real(sys) => sys.base_fs_remove_file(p), - Self::DenoCompile(sys) => sys.base_fs_remove_file(p), - } - } -} - -impl sys_traits::BaseFsRename for CliSys { - fn base_fs_rename(&self, old: &Path, new: &Path) -> std::io::Result<()> { - match self { - Self::Real(sys) => sys.base_fs_rename(old, new), - Self::DenoCompile(sys) => sys.base_fs_rename(old, new), - } - } -} - -impl sys_traits::SystemRandom for CliSys { - fn sys_random(&self, buf: &mut [u8]) -> std::io::Result<()> { - match self { - Self::Real(sys) => sys.sys_random(buf), - Self::DenoCompile(sys) => sys.sys_random(buf), - } - } -} - -impl sys_traits::SystemTimeNow for CliSys { - fn sys_time_now(&self) -> std::time::SystemTime { - match self { - Self::Real(sys) => sys.sys_time_now(), - Self::DenoCompile(sys) => sys.sys_time_now(), - } - } -} - -impl sys_traits::ThreadSleep for CliSys { - fn thread_sleep(&self, dur: std::time::Duration) { - match self { - Self::Real(sys) => sys.thread_sleep(dur), - Self::DenoCompile(sys) => sys.thread_sleep(dur), - } - } -} - -impl sys_traits::EnvCurrentDir for CliSys { - fn env_current_dir(&self) -> std::io::Result { - match self { - Self::Real(sys) => sys.env_current_dir(), - Self::DenoCompile(sys) => sys.env_current_dir(), - } - } -} - -impl sys_traits::BaseEnvVar for CliSys { - fn base_env_var_os( - &self, - key: &std::ffi::OsStr, - ) -> Option { - match self { - Self::Real(sys) => sys.base_env_var_os(key), - Self::DenoCompile(sys) => sys.base_env_var_os(key), - } - } -} - -impl sys_traits::EnvHomeDir for CliSys { - fn env_home_dir(&self) -> Option { - #[allow(clippy::disallowed_types)] // ok because sys impl - sys_traits::impls::RealSys.env_home_dir() - } -} diff --git a/cli/tools/bench/mod.rs b/cli/tools/bench/mod.rs index 6a57c4ce6ca7d4..a316e60b52dfc9 100644 --- a/cli/tools/bench/mod.rs +++ b/cli/tools/bench/mod.rs @@ -48,6 +48,7 @@ use crate::util::fs::collect_specifiers; use crate::util::path::is_script_ext; use crate::util::path::matches_pattern_or_exact_path; use crate::worker::CliMainWorkerFactory; +use crate::worker::CreateCustomWorkerError; mod mitata; mod reporters; @@ -164,7 +165,7 @@ async fn bench_specifier( .await { Ok(()) => Ok(()), - Err(CoreError::Js(error)) => { + Err(CreateCustomWorkerError::Core(CoreError::Js(error))) => { sender.send(BenchEvent::UncaughtError( specifier.to_string(), Box::new(error), @@ -182,7 +183,7 @@ async fn bench_specifier_inner( specifier: ModuleSpecifier, sender: &UnboundedSender, filter: TestFilter, -) -> Result<(), CoreError> { +) -> Result<(), CreateCustomWorkerError> { let mut worker = worker_factory .create_custom_worker( WorkerExecutionMode::Bench, @@ -201,7 +202,7 @@ async fn bench_specifier_inner( // Ensure that there are no pending exceptions before we start running tests worker.run_up_to_duration(Duration::from_millis(0)).await?; - worker.dispatch_load_event()?; + worker.dispatch_load_event().map_err(CoreError::Js)?; let benchmarks = { let state_rc = worker.js_runtime.op_state(); @@ -236,11 +237,13 @@ async fn bench_specifier_inner( used_only, names: benchmarks.iter().map(|(d, _)| d.name.clone()).collect(), })) - .map_err(JsErrorBox::from_err)?; + .map_err(JsErrorBox::from_err) + .map_err(CoreError::JsBox)?; for (desc, function) in benchmarks { sender .send(BenchEvent::Wait(desc.id)) - .map_err(JsErrorBox::from_err)?; + .map_err(JsErrorBox::from_err) + .map_err(CoreError::JsBox)?; let call = worker.js_runtime.call(&function); let result = worker .js_runtime @@ -249,18 +252,26 @@ async fn bench_specifier_inner( let scope = &mut worker.js_runtime.handle_scope(); let result = v8::Local::new(scope, result); let result = serde_v8::from_v8::(scope, result) - .map_err(JsErrorBox::from_err)?; + .map_err(JsErrorBox::from_err) + .map_err(CoreError::JsBox)?; sender .send(BenchEvent::Result(desc.id, result)) - .map_err(JsErrorBox::from_err)?; + .map_err(JsErrorBox::from_err) + .map_err(CoreError::JsBox)?; } // Ignore `defaultPrevented` of the `beforeunload` event. We don't allow the // event loop to continue beyond what's needed to await results. - worker.dispatch_beforeunload_event()?; - worker.dispatch_process_beforeexit_event()?; - worker.dispatch_unload_event()?; - worker.dispatch_process_exit_event()?; + worker + .dispatch_beforeunload_event() + .map_err(CoreError::Js)?; + worker + .dispatch_process_beforeexit_event() + .map_err(CoreError::Js)?; + worker.dispatch_unload_event().map_err(CoreError::Js)?; + worker + .dispatch_process_exit_event() + .map_err(CoreError::Js)?; // Ensure the worker has settled so we can catch any remaining unhandled rejections. We don't // want to wait forever here. diff --git a/cli/tools/bench/reporters.rs b/cli/tools/bench/reporters.rs index 68a0c56bce04e4..c3df53b76ab6f5 100644 --- a/cli/tools/bench/reporters.rs +++ b/cli/tools/bench/reporters.rs @@ -1,10 +1,10 @@ // Copyright 2018-2025 the Deno authors. MIT license. +use deno_lib::version::DENO_VERSION_INFO; use serde::Serialize; use super::*; use crate::tools::test::TestFailureFormatOptions; -use crate::version; pub trait BenchReporter { fn report_group_summary(&mut self); @@ -31,11 +31,7 @@ impl Default for JsonReporterOutput { fn default() -> Self { Self { version: JSON_SCHEMA_VERSION, - runtime: format!( - "{} {}", - version::DENO_VERSION_INFO.user_agent, - env!("TARGET") - ), + runtime: format!("{} {}", DENO_VERSION_INFO.user_agent, env!("TARGET")), cpu: mitata::cpu::name(), benches: vec![], } @@ -163,7 +159,7 @@ impl BenchReporter for ConsoleReporter { "{}\n", colors::gray(format!( "Runtime | Deno {} ({})", - crate::version::DENO_VERSION_INFO.deno, + DENO_VERSION_INFO.deno, env!("TARGET") )) ); diff --git a/cli/tools/check.rs b/cli/tools/check.rs index e850b1900f76a2..223dadd6c99179 100644 --- a/cli/tools/check.rs +++ b/cli/tools/check.rs @@ -1,34 +1,39 @@ // Copyright 2018-2025 the Deno authors. MIT license. +use std::collections::HashMap; use std::collections::HashSet; use std::collections::VecDeque; +use std::rc::Rc; use std::sync::Arc; use deno_ast::MediaType; use deno_ast::ModuleSpecifier; use deno_config::deno_json; +use deno_config::deno_json::CompilerOptionTypesDeserializeError; +use deno_config::workspace::WorkspaceDirectory; use deno_core::error::AnyError; +use deno_core::url::Url; use deno_error::JsErrorBox; use deno_graph::Module; use deno_graph::ModuleError; use deno_graph::ModuleGraph; use deno_graph::ModuleLoadError; +use deno_lib::util::hash::FastInsecureHasher; use deno_semver::npm::NpmPackageNvReference; use deno_terminal::colors; +use indexmap::IndexMap; use once_cell::sync::Lazy; use regex::Regex; -use crate::args::check_warn_tsconfig; +use crate::args::deno_json::TsConfigResolver; use crate::args::CheckFlags; use crate::args::CliOptions; use crate::args::Flags; use crate::args::TsConfig; -use crate::args::TsConfigType; use crate::args::TsTypeLib; use crate::args::TypeCheckMode; use crate::cache::CacheDBHash; use crate::cache::Caches; -use crate::cache::FastInsecureHasher; use crate::cache::TypeCheckCache; use crate::factory::CliFactory; use crate::graph_util::maybe_additional_sloppy_imports_message; @@ -87,6 +92,35 @@ pub async fn check( .await } +#[derive(Debug, thiserror::Error, deno_error::JsError)] +#[class(type)] +#[error("Type checking failed.")] +pub struct FailedTypeCheckingError; + +#[derive(Debug, thiserror::Error, deno_error::JsError)] +pub enum CheckError { + #[class(inherit)] + #[error(transparent)] + FailedTypeChecking(#[from] FailedTypeCheckingError), + #[class(inherit)] + #[error(transparent)] + ToMaybeJsxImportSourceConfig( + #[from] deno_json::ToMaybeJsxImportSourceConfigError, + ), + #[class(inherit)] + #[error(transparent)] + TscExec(#[from] tsc::ExecError), + #[class(inherit)] + #[error(transparent)] + CompilerOptionTypesDeserialize(#[from] CompilerOptionTypesDeserializeError), + #[class(inherit)] + #[error(transparent)] + CompilerOptionsParse(#[from] deno_json::CompilerOptionsParseError), + #[class(inherit)] + #[error(transparent)] + Other(#[from] JsErrorBox), +} + /// Options for performing a check of a module graph. Note that the decision to /// emit or not is determined by the `ts_config` settings. pub struct CheckOptions { @@ -97,8 +131,6 @@ pub struct CheckOptions { pub build_fast_check_graph: bool, /// Default type library to type check with. pub lib: TsTypeLib, - /// Whether to log about any ignored compiler options. - pub log_ignored_options: bool, /// If true, valid `.tsbuildinfo` files will be ignored and type checking /// will always occur. pub reload: bool, @@ -115,27 +147,7 @@ pub struct TypeChecker { node_resolver: Arc, npm_resolver: CliNpmResolver, sys: CliSys, -} - -#[derive(Debug, thiserror::Error, deno_error::JsError)] -pub enum CheckError { - #[class(inherit)] - #[error(transparent)] - Diagnostics(#[from] Diagnostics), - #[class(inherit)] - #[error(transparent)] - ConfigFile(#[from] deno_json::ConfigFileError), - #[class(inherit)] - #[error(transparent)] - ToMaybeJsxImportSourceConfig( - #[from] deno_json::ToMaybeJsxImportSourceConfigError, - ), - #[class(inherit)] - #[error(transparent)] - TscExec(#[from] tsc::ExecError), - #[class(inherit)] - #[error(transparent)] - Other(#[from] JsErrorBox), + tsconfig_resolver: Arc, } impl TypeChecker { @@ -149,6 +161,7 @@ impl TypeChecker { npm_installer: Option>, npm_resolver: CliNpmResolver, sys: CliSys, + tsconfig_resolver: Arc, ) -> Self { Self { caches, @@ -159,6 +172,7 @@ impl TypeChecker { npm_installer, npm_resolver, sys, + tsconfig_resolver, } } @@ -171,13 +185,20 @@ impl TypeChecker { graph: ModuleGraph, options: CheckOptions, ) -> Result, CheckError> { - let (graph, mut diagnostics) = - self.check_diagnostics(graph, options).await?; - diagnostics.emit_warnings(); - if diagnostics.is_empty() { - Ok(graph) + let mut diagnostics = self.check_diagnostics(graph, options).await?; + let mut failed = false; + for result in diagnostics.by_ref() { + let mut diagnostics = result?; + diagnostics.emit_warnings(); + if diagnostics.has_diagnostic() { + failed = true; + log::error!("{}\n", diagnostics); + } + } + if failed { + Err(FailedTypeCheckingError.into()) } else { - Err(diagnostics.into()) + Ok(diagnostics.into_graph()) } } @@ -189,7 +210,7 @@ impl TypeChecker { &self, mut graph: ModuleGraph, options: CheckOptions, - ) -> Result<(Arc, Diagnostics), CheckError> { + ) -> Result { fn check_state_hash(resolver: &CliNpmResolver) -> Option { match resolver { CliNpmResolver::Byonm(_) => { @@ -214,7 +235,9 @@ impl TypeChecker { } if !options.type_check_mode.is_true() || graph.roots.is_empty() { - return Ok((graph.into(), Default::default())); + return Ok(DiagnosticsByFolderIterator( + DiagnosticsByFolderIteratorInner::Empty(Arc::new(graph)), + )); } // node built-in specifiers use the @types/node package to determine @@ -226,18 +249,7 @@ impl TypeChecker { } } - log::debug!("Type checking."); - let ts_config_result = self - .cli_options - .resolve_ts_config_for_emit(TsConfigType::Check { lib: options.lib })?; - if options.log_ignored_options { - check_warn_tsconfig(&ts_config_result); - } - - let type_check_mode = options.type_check_mode; - let ts_config = ts_config_result.ts_config; - let cache = TypeCheckCache::new(self.caches.type_checking_cache_db()); - let check_js = ts_config.get_check_js(); + log::debug!("Type checking"); // add fast check to the graph before getting the roots if options.build_fast_check_graph { @@ -249,72 +261,299 @@ impl TypeChecker { )?; } - let filter_remote_diagnostics = |d: &tsc::Diagnostic| { - if self.is_remote_diagnostic(d) { - type_check_mode == TypeCheckMode::All && d.include_when_remote() - } else { - true + let graph = Arc::new(graph); + + // split the roots by what we can send to the ts compiler all at once + let grouped_roots = + self.group_roots_by_compiler_options(&graph, options.lib)?; + + Ok(DiagnosticsByFolderIterator( + DiagnosticsByFolderIteratorInner::Real(DiagnosticsByFolderRealIterator { + graph, + sys: &self.sys, + cjs_tracker: &self.cjs_tracker, + node_resolver: &self.node_resolver, + npm_resolver: &self.npm_resolver, + tsconfig_resolver: &self.tsconfig_resolver, + log_level: self.cli_options.log_level(), + npm_check_state_hash: check_state_hash(&self.npm_resolver), + type_check_cache: TypeCheckCache::new( + self.caches.type_checking_cache_db(), + ), + grouped_roots, + options, + seen_diagnotics: Default::default(), + }), + )) + } + + /// Groups the roots based on the compiler options, which includes the + /// resolved TsConfig and resolved compilerOptions.types + fn group_roots_by_compiler_options<'a>( + &'a self, + graph: &ModuleGraph, + lib: TsTypeLib, + ) -> Result, CheckGroupInfo>, CheckError> { + let mut imports_for_specifier: HashMap, Rc>> = + HashMap::with_capacity(self.tsconfig_resolver.folder_count()); + let mut roots_by_config: IndexMap<_, CheckGroupInfo> = + IndexMap::with_capacity(self.tsconfig_resolver.folder_count()); + for root in &graph.roots { + let folder = self.tsconfig_resolver.folder_for_specifier(root); + let imports = + match imports_for_specifier.entry(folder.dir.dir_url().clone()) { + std::collections::hash_map::Entry::Occupied(entry) => { + entry.get().clone() + } + std::collections::hash_map::Entry::Vacant(vacant_entry) => { + let value = Rc::new(resolve_graph_imports_for_workspace_dir( + graph, + &folder.dir, + )); + vacant_entry.insert(value.clone()); + value + } + }; + let tsconfig = folder.lib_tsconfig(lib)?; + let key = CheckGroupKey { + ts_config: tsconfig, + imports, + }; + let entry = roots_by_config.entry(key); + let entry = match entry { + indexmap::map::Entry::Occupied(entry) => entry.into_mut(), + indexmap::map::Entry::Vacant(entry) => entry.insert(CheckGroupInfo { + roots: Default::default(), + // this is slightly hacky. It's used as the referrer for resolving + // npm imports in the key + referrer: folder + .dir + .maybe_deno_json() + .map(|d| d.specifier.clone()) + .unwrap_or_else(|| folder.dir.dir_url().as_ref().clone()), + }), + }; + entry.roots.push(root.clone()); + } + Ok(roots_by_config) + } +} + +fn resolve_graph_imports_for_workspace_dir( + graph: &ModuleGraph, + dir: &WorkspaceDirectory, +) -> Vec { + fn resolve_graph_imports_for_referrer<'a>( + graph: &'a ModuleGraph, + referrer: &'a Url, + ) -> Option + 'a> { + let imports = graph.imports.get(referrer)?; + Some( + imports + .dependencies + .values() + .filter_map(|dep| dep.get_type().or_else(|| dep.get_code())) + .map(|url| graph.resolve(url)) + .cloned(), + ) + } + + let root_deno_json = dir.workspace.root_deno_json(); + let member_deno_json = dir.maybe_deno_json().filter(|c| { + Some(&c.specifier) != root_deno_json.as_ref().map(|c| &c.specifier) + }); + let mut specifiers = root_deno_json + .map(|c| resolve_graph_imports_for_referrer(graph, &c.specifier)) + .into_iter() + .flatten() + .flatten() + .chain( + member_deno_json + .map(|c| resolve_graph_imports_for_referrer(graph, &c.specifier)) + .into_iter() + .flatten() + .flatten(), + ) + .collect::>(); + specifiers.sort(); + specifiers +} + +/// Key to use to group roots together by config. +#[derive(Debug, Hash, PartialEq, Eq)] +struct CheckGroupKey<'a> { + ts_config: &'a Arc, + imports: Rc>, +} + +struct CheckGroupInfo { + roots: Vec, + referrer: Url, +} + +pub struct DiagnosticsByFolderIterator<'a>( + DiagnosticsByFolderIteratorInner<'a>, +); + +impl<'a> DiagnosticsByFolderIterator<'a> { + pub fn into_graph(self) -> Arc { + match self.0 { + DiagnosticsByFolderIteratorInner::Empty(module_graph) => module_graph, + DiagnosticsByFolderIteratorInner::Real(r) => r.graph, + } + } +} + +impl<'a> Iterator for DiagnosticsByFolderIterator<'a> { + type Item = Result; + + fn next(&mut self) -> Option { + match &mut self.0 { + DiagnosticsByFolderIteratorInner::Empty(_) => None, + DiagnosticsByFolderIteratorInner::Real(r) => r.next(), + } + } +} + +enum DiagnosticsByFolderIteratorInner<'a> { + Empty(Arc), + Real(DiagnosticsByFolderRealIterator<'a>), +} + +struct DiagnosticsByFolderRealIterator<'a> { + graph: Arc, + sys: &'a CliSys, + cjs_tracker: &'a Arc, + node_resolver: &'a Arc, + npm_resolver: &'a CliNpmResolver, + tsconfig_resolver: &'a TsConfigResolver, + type_check_cache: TypeCheckCache, + grouped_roots: IndexMap, CheckGroupInfo>, + log_level: Option, + npm_check_state_hash: Option, + seen_diagnotics: HashSet, + options: CheckOptions, +} + +impl<'a> Iterator for DiagnosticsByFolderRealIterator<'a> { + type Item = Result; + + fn next(&mut self) -> Option { + let (group_key, group_info) = self.grouped_roots.shift_remove_index(0)?; + let mut result = self.check_diagnostics_in_folder(&group_key, group_info); + if let Ok(diagnostics) = &mut result { + diagnostics.retain(|d| { + if let (Some(file_name), Some(start)) = (&d.file_name, &d.start) { + let data = format!( + "{}{}:{}:{}{}", + d.code, + file_name, + start.line, + start.character, + d.message_text.as_deref().unwrap_or_default() + ); + self.seen_diagnotics.insert(data) + } else { + // show these for each type of config + true + } + }); + } + Some(result) + } +} + +impl<'a> DiagnosticsByFolderRealIterator<'a> { + #[allow(clippy::too_many_arguments)] + fn check_diagnostics_in_folder( + &self, + group_key: &'a CheckGroupKey<'a>, + group_info: CheckGroupInfo, + ) -> Result { + fn log_provided_roots(provided_roots: &[Url]) { + for root in provided_roots { + log::info!( + "{} {}", + colors::green("Check"), + to_percent_decoded_str(root.as_str()) + ); } - }; + } + + // walk the graph + let ts_config = group_key.ts_config; + let mut graph_walker = GraphWalker::new( + &self.graph, + self.sys, + self.node_resolver, + self.npm_resolver, + self.tsconfig_resolver, + self.npm_check_state_hash, + ts_config.as_ref(), + self.options.type_check_mode, + ); + let mut provided_roots = group_info.roots; + for import in group_key.imports.iter() { + graph_walker.add_config_import(import, &group_info.referrer); + } + + for root in &provided_roots { + graph_walker.add_root(root); + } + let TscRoots { roots: root_names, missing_diagnostics, maybe_check_hash, - } = get_tsc_roots( - &self.sys, - &self.npm_resolver, - &self.node_resolver, - &graph, - check_js, - check_state_hash(&self.npm_resolver), - type_check_mode, - &ts_config, - ); + } = graph_walker.into_tsc_roots(); - let missing_diagnostics = - missing_diagnostics.filter(filter_remote_diagnostics); + let mut missing_diagnostics = missing_diagnostics.filter(|d| { + self.should_include_diagnostic(self.options.type_check_mode, d) + }); + missing_diagnostics.apply_fast_check_source_maps(&self.graph); - if root_names.is_empty() && missing_diagnostics.is_empty() { - return Ok((graph.into(), Default::default())); + if root_names.is_empty() { + if missing_diagnostics.has_diagnostic() { + log_provided_roots(&provided_roots); + } + return Ok(missing_diagnostics); } - if !options.reload { + + if !self.options.reload && !missing_diagnostics.has_diagnostic() { // do not type check if we know this is type checked if let Some(check_hash) = maybe_check_hash { - if cache.has_check_hash(check_hash) { - log::debug!("Already type checked."); - return Ok((graph.into(), Default::default())); + if self.type_check_cache.has_check_hash(check_hash) { + log::debug!("Already type checked {}", group_info.referrer); + return Ok(Default::default()); } } } - for root in &graph.roots { - let root_str = root.as_str(); - log::info!( - "{} {}", - colors::green("Check"), - to_percent_decoded_str(root_str) - ); - } + // log out the roots that we're checking + log_provided_roots(&provided_roots); + + // the first root will always either be the specifier that the user provided + // or the first specifier in a directory + let first_root = provided_roots.remove(0); // while there might be multiple roots, we can't "merge" the build info, so we // try to retrieve the build info for first root, which is the most common use // case. - let maybe_tsbuildinfo = if options.reload { + let maybe_tsbuildinfo = if self.options.reload { None } else { - cache.get_tsbuildinfo(&graph.roots[0]) + self.type_check_cache.get_tsbuildinfo(&first_root) }; // to make tsc build info work, we need to consistently hash modules, so that // tsc can better determine if an emit is still valid or not, so we provide // that data here. let tsconfig_hash_data = FastInsecureHasher::new_deno_versioned() - .write(&ts_config.as_bytes()) + .write_hashable(ts_config) .finish(); - let graph = Arc::new(graph); let response = tsc::exec(tsc::Request { - config: ts_config, - debug: self.cli_options.log_level() == Some(log::Level::Debug), - graph: graph.clone(), + config: ts_config.clone(), + debug: self.log_level == Some(log::Level::Debug), + graph: self.graph.clone(), hash_data: tsconfig_hash_data, maybe_npm: Some(tsc::RequestNpmState { cjs_tracker: self.cjs_tracker.clone(), @@ -323,30 +562,46 @@ impl TypeChecker { }), maybe_tsbuildinfo, root_names, - check_mode: type_check_mode, + check_mode: self.options.type_check_mode, })?; - let response_diagnostics = - response.diagnostics.filter(filter_remote_diagnostics); - + let mut response_diagnostics = response.diagnostics.filter(|d| { + self.should_include_diagnostic(self.options.type_check_mode, d) + }); + response_diagnostics.apply_fast_check_source_maps(&self.graph); let mut diagnostics = missing_diagnostics; diagnostics.extend(response_diagnostics); - diagnostics.apply_fast_check_source_maps(&graph); - if let Some(tsbuildinfo) = response.maybe_tsbuildinfo { - cache.set_tsbuildinfo(&graph.roots[0], &tsbuildinfo); + self + .type_check_cache + .set_tsbuildinfo(&first_root, &tsbuildinfo); } - if diagnostics.is_empty() { + if !diagnostics.has_diagnostic() { if let Some(check_hash) = maybe_check_hash { - cache.add_check_hash(check_hash); + self.type_check_cache.add_check_hash(check_hash); } } log::debug!("{}", response.stats); - Ok((graph, diagnostics)) + Ok(diagnostics) + } + + fn should_include_diagnostic( + &self, + type_check_mode: TypeCheckMode, + d: &tsc::Diagnostic, + ) -> bool { + // this shouldn't check for duplicate diagnostics across folders because + // we don't want to accidentally mark a folder as being successful and save + // to the check cache if a previous folder caused a diagnostic + if self.is_remote_diagnostic(d) { + type_check_mode == TypeCheckMode::All && d.include_when_remote() + } else { + true + } } fn is_remote_diagnostic(&self, d: &tsc::Diagnostic) -> bool { @@ -370,27 +625,201 @@ struct TscRoots { maybe_check_hash: Option, } -/// Transform the graph into root specifiers that we can feed `tsc`. We have to -/// provide the media type for root modules because `tsc` does not "resolve" the -/// media type like other modules, as well as a root specifier needs any -/// redirects resolved. We need to include all the emittable files in -/// the roots, so they get type checked and optionally emitted, -/// otherwise they would be ignored if only imported into JavaScript. -#[allow(clippy::too_many_arguments)] -fn get_tsc_roots( - sys: &CliSys, - npm_resolver: &CliNpmResolver, - node_resolver: &CliNodeResolver, - graph: &ModuleGraph, - check_js: bool, - npm_cache_state_hash: Option, - type_check_mode: TypeCheckMode, - ts_config: &TsConfig, -) -> TscRoots { +struct GraphWalker<'a> { + graph: &'a ModuleGraph, + sys: &'a CliSys, + node_resolver: &'a CliNodeResolver, + npm_resolver: &'a CliNpmResolver, + tsconfig_resolver: &'a TsConfigResolver, + maybe_hasher: Option, + seen: HashSet<&'a Url>, + pending: VecDeque<(&'a Url, bool)>, + has_seen_node_builtin: bool, + roots: Vec<(ModuleSpecifier, MediaType)>, + missing_diagnostics: tsc::Diagnostics, +} + +impl<'a> GraphWalker<'a> { + #[allow(clippy::too_many_arguments)] + pub fn new( + graph: &'a ModuleGraph, + sys: &'a CliSys, + node_resolver: &'a CliNodeResolver, + npm_resolver: &'a CliNpmResolver, + tsconfig_resolver: &'a TsConfigResolver, + npm_cache_state_hash: Option, + ts_config: &TsConfig, + type_check_mode: TypeCheckMode, + ) -> Self { + let maybe_hasher = npm_cache_state_hash.map(|npm_cache_state_hash| { + let mut hasher = FastInsecureHasher::new_deno_versioned(); + hasher.write_hashable(npm_cache_state_hash); + hasher.write_u8(match type_check_mode { + TypeCheckMode::All => 0, + TypeCheckMode::Local => 1, + TypeCheckMode::None => 2, + }); + hasher.write_hashable(graph.has_node_specifier); + hasher.write_hashable(ts_config); + hasher + }); + Self { + graph, + sys, + node_resolver, + npm_resolver, + tsconfig_resolver, + maybe_hasher, + seen: HashSet::with_capacity( + graph.imports.len() + graph.specifiers_count(), + ), + pending: VecDeque::new(), + has_seen_node_builtin: false, + roots: Vec::with_capacity(graph.imports.len() + graph.specifiers_count()), + missing_diagnostics: Default::default(), + } + } + + pub fn add_config_import(&mut self, specifier: &'a Url, referrer: &Url) { + let specifier = self.graph.resolve(specifier); + if self.seen.insert(specifier) { + if let Ok(nv_ref) = NpmPackageNvReference::from_specifier(specifier) { + match self.resolve_npm_nv_ref(&nv_ref, referrer) { + Some(resolved) => { + let mt = MediaType::from_specifier(&resolved); + self.roots.push((resolved, mt)); + } + None => { + self + .missing_diagnostics + .push(tsc::Diagnostic::from_missing_error( + specifier, + None, + maybe_additional_sloppy_imports_message(self.sys, specifier), + )); + } + } + } else { + self.pending.push_back((specifier, false)); + self.resolve_pending(); + } + } + } + + pub fn add_root(&mut self, root: &'a Url) { + let specifier = self.graph.resolve(root); + if self.seen.insert(specifier) { + self.pending.push_back((specifier, false)); + } + + self.resolve_pending() + } + + /// Transform the graph into root specifiers that we can feed `tsc`. We have to + /// provide the media type for root modules because `tsc` does not "resolve" the + /// media type like other modules, as well as a root specifier needs any + /// redirects resolved. We need to include all the emittable files in + /// the roots, so they get type checked and optionally emitted, + /// otherwise they would be ignored if only imported into JavaScript. + pub fn into_tsc_roots(self) -> TscRoots { + TscRoots { + roots: self.roots, + missing_diagnostics: self.missing_diagnostics, + maybe_check_hash: self.maybe_hasher.map(|h| CacheDBHash::new(h.finish())), + } + } + + fn resolve_pending(&mut self) { + while let Some((specifier, is_dynamic)) = self.pending.pop_front() { + let module = match self.graph.try_get(specifier) { + Ok(Some(module)) => module, + Ok(None) => continue, + Err(ModuleError::Missing(specifier, maybe_range)) => { + if !is_dynamic { + self + .missing_diagnostics + .push(tsc::Diagnostic::from_missing_error( + specifier, + maybe_range.as_ref(), + maybe_additional_sloppy_imports_message(self.sys, specifier), + )); + } + continue; + } + Err(ModuleError::LoadingErr( + specifier, + maybe_range, + ModuleLoadError::Loader(_), + )) => { + // these will be errors like attempting to load a directory + if !is_dynamic { + self + .missing_diagnostics + .push(tsc::Diagnostic::from_missing_error( + specifier, + maybe_range.as_ref(), + maybe_additional_sloppy_imports_message(self.sys, specifier), + )); + } + continue; + } + Err(_) => continue, + }; + if is_dynamic && !self.seen.insert(specifier) { + continue; + } + if let Some(entry) = self.maybe_get_check_entry(module) { + self.roots.push(entry); + } + + let mut maybe_module_dependencies = None; + let mut maybe_types_dependency = None; + match module { + Module::Js(module) => { + maybe_module_dependencies = + Some(module.dependencies_prefer_fast_check()); + maybe_types_dependency = module + .maybe_types_dependency + .as_ref() + .and_then(|d| d.dependency.ok()); + } + Module::Wasm(module) => { + maybe_module_dependencies = Some(&module.dependencies); + } + Module::Json(_) | Module::Npm(_) | Module::External(_) => {} + Module::Node(_) => { + if !self.has_seen_node_builtin { + self.has_seen_node_builtin = true; + // inject a specifier that will resolve node types + self.roots.push(( + ModuleSpecifier::parse("asset:///node_types.d.ts").unwrap(), + MediaType::Dts, + )); + } + } + } + + if let Some(deps) = maybe_module_dependencies { + for dep in deps.values() { + // walk both the code and type dependencies + if let Some(specifier) = dep.get_code() { + self.handle_specifier(specifier, dep.is_dynamic); + } + if let Some(specifier) = dep.get_type() { + self.handle_specifier(specifier, dep.is_dynamic); + } + } + } + + if let Some(dep) = maybe_types_dependency { + self.handle_specifier(&dep.specifier, false); + } + } + } + fn maybe_get_check_entry( + &mut self, module: &deno_graph::Module, - check_js: bool, - hasher: Option<&mut FastInsecureHasher>, ) -> Option<(ModuleSpecifier, MediaType)> { match module { Module::Js(module) => { @@ -408,7 +837,11 @@ fn get_tsc_roots( | MediaType::Mjs | MediaType::Cjs | MediaType::Jsx => { - if check_js || has_ts_check(module.media_type, &module.source) { + if self + .tsconfig_resolver + .check_js_for_specifier(&module.specifier) + || has_ts_check(module.media_type, &module.source) + { Some((module.specifier.clone(), module.media_type)) } else { None @@ -421,7 +854,7 @@ fn get_tsc_roots( | MediaType::Unknown => None, }; if result.is_some() { - if let Some(hasher) = hasher { + if let Some(hasher) = &mut self.maybe_hasher { hasher.write_str(module.specifier.as_str()); hasher.write_str( // the fast check module will only be set when publishing @@ -446,21 +879,21 @@ fn get_tsc_roots( None } Module::Json(module) => { - if let Some(hasher) = hasher { + if let Some(hasher) = &mut self.maybe_hasher { hasher.write_str(module.specifier.as_str()); hasher.write_str(&module.source); } None } Module::Wasm(module) => { - if let Some(hasher) = hasher { + if let Some(hasher) = &mut self.maybe_hasher { hasher.write_str(module.specifier.as_str()); hasher.write_str(&module.source_dts); } Some((module.specifier.clone(), MediaType::Dmts)) } Module::External(module) => { - if let Some(hasher) = hasher { + if let Some(hasher) = &mut self.maybe_hasher { hasher.write_str(module.specifier.as_str()); } @@ -469,205 +902,44 @@ fn get_tsc_roots( } } - let mut result = TscRoots { - roots: Vec::with_capacity(graph.specifiers_count()), - missing_diagnostics: Default::default(), - maybe_check_hash: None, - }; - let mut maybe_hasher = npm_cache_state_hash.map(|npm_cache_state_hash| { - let mut hasher = FastInsecureHasher::new_deno_versioned(); - hasher.write_hashable(npm_cache_state_hash); - hasher.write_u8(match type_check_mode { - TypeCheckMode::All => 0, - TypeCheckMode::Local => 1, - TypeCheckMode::None => 2, - }); - hasher.write_hashable(graph.has_node_specifier); - hasher.write(&ts_config.as_bytes()); - hasher - }); - - if graph.has_node_specifier { - // inject a specifier that will resolve node types - result.roots.push(( - ModuleSpecifier::parse("asset:///node_types.d.ts").unwrap(), - MediaType::Dts, - )); - } - - let mut seen = - HashSet::with_capacity(graph.imports.len() + graph.specifiers_count()); - let mut pending = VecDeque::new(); - - // put in the global types first so that they're resolved before anything else - for (referrer, import) in graph.imports.iter() { - for specifier in import - .dependencies - .values() - .filter_map(|dep| dep.get_type().or_else(|| dep.get_code())) - { - let specifier = graph.resolve(specifier); - if seen.insert(specifier) { - if let Ok(nv_ref) = NpmPackageNvReference::from_specifier(specifier) { - let Some(resolved) = - resolve_npm_nv_ref(npm_resolver, node_resolver, &nv_ref, referrer) - else { - result.missing_diagnostics.push( - tsc::Diagnostic::from_missing_error( - specifier, - None, - maybe_additional_sloppy_imports_message(sys, specifier), - ), - ); - continue; - }; - let mt = MediaType::from_specifier(&resolved); - result.roots.push((resolved, mt)); - } else { - pending.push_back((specifier, false)); - } + fn handle_specifier( + &mut self, + specifier: &'a ModuleSpecifier, + is_dynamic: bool, + ) { + let specifier = self.graph.resolve(specifier); + if is_dynamic { + if !self.seen.contains(specifier) { + self.pending.push_back((specifier, true)); } + } else if self.seen.insert(specifier) { + self.pending.push_back((specifier, false)); } } - // then the roots - for root in &graph.roots { - let specifier = graph.resolve(root); - if seen.insert(specifier) { - pending.push_back((specifier, false)); - } - } - - // now walk the graph that only includes the fast check dependencies - while let Some((specifier, is_dynamic)) = pending.pop_front() { - let module = match graph.try_get(specifier) { - Ok(Some(module)) => module, - Ok(None) => continue, - Err(ModuleError::Missing(specifier, maybe_range)) => { - if !is_dynamic { - result - .missing_diagnostics - .push(tsc::Diagnostic::from_missing_error( - specifier, - maybe_range.as_ref(), - maybe_additional_sloppy_imports_message(sys, specifier), - )); - } - continue; - } - Err(ModuleError::LoadingErr( - specifier, - maybe_range, - ModuleLoadError::Loader(_), - )) => { - // these will be errors like attempting to load a directory - if !is_dynamic { - result - .missing_diagnostics - .push(tsc::Diagnostic::from_missing_error( - specifier, - maybe_range.as_ref(), - maybe_additional_sloppy_imports_message(sys, specifier), - )); - } - continue; - } - Err(_) => continue, - }; - if is_dynamic && !seen.insert(specifier) { - continue; - } - if let Some(entry) = - maybe_get_check_entry(module, check_js, maybe_hasher.as_mut()) - { - result.roots.push(entry); - } - - let mut maybe_module_dependencies = None; - let mut maybe_types_dependency = None; - if let Module::Js(module) = module { - maybe_module_dependencies = Some(module.dependencies_prefer_fast_check()); - maybe_types_dependency = module - .maybe_types_dependency - .as_ref() - .and_then(|d| d.dependency.ok()); - } else if let Module::Wasm(module) = module { - maybe_module_dependencies = Some(&module.dependencies); - } - - fn handle_specifier<'a>( - graph: &'a ModuleGraph, - seen: &mut HashSet<&'a ModuleSpecifier>, - pending: &mut VecDeque<(&'a ModuleSpecifier, bool)>, - specifier: &'a ModuleSpecifier, - is_dynamic: bool, - ) { - let specifier = graph.resolve(specifier); - if is_dynamic { - if !seen.contains(specifier) { - pending.push_back((specifier, true)); - } - } else if seen.insert(specifier) { - pending.push_back((specifier, false)); - } - } - - if let Some(deps) = maybe_module_dependencies { - for dep in deps.values() { - // walk both the code and type dependencies - if let Some(specifier) = dep.get_code() { - handle_specifier( - graph, - &mut seen, - &mut pending, - specifier, - dep.is_dynamic, - ); - } - if let Some(specifier) = dep.get_type() { - handle_specifier( - graph, - &mut seen, - &mut pending, - specifier, - dep.is_dynamic, - ); - } - } - } - - if let Some(dep) = maybe_types_dependency { - handle_specifier(graph, &mut seen, &mut pending, &dep.specifier, false); - } + fn resolve_npm_nv_ref( + &self, + nv_ref: &NpmPackageNvReference, + referrer: &ModuleSpecifier, + ) -> Option { + let pkg_dir = self + .npm_resolver + .as_managed() + .unwrap() + .resolve_pkg_folder_from_deno_module(nv_ref.nv()) + .ok()?; + let resolved = self + .node_resolver + .resolve_package_subpath_from_deno_module( + &pkg_dir, + nv_ref.sub_path(), + Some(referrer), + node_resolver::ResolutionMode::Import, + node_resolver::NodeResolutionKind::Types, + ) + .ok()?; + resolved.into_url().ok() } - - result.maybe_check_hash = - maybe_hasher.map(|hasher| CacheDBHash::new(hasher.finish())); - - result -} - -fn resolve_npm_nv_ref( - npm_resolver: &CliNpmResolver, - node_resolver: &CliNodeResolver, - nv_ref: &NpmPackageNvReference, - referrer: &ModuleSpecifier, -) -> Option { - let pkg_dir = npm_resolver - .as_managed() - .unwrap() - .resolve_pkg_folder_from_deno_module(nv_ref.nv()) - .ok()?; - let resolved = node_resolver - .resolve_package_subpath_from_deno_module( - &pkg_dir, - nv_ref.sub_path(), - Some(referrer), - node_resolver::ResolutionMode::Import, - node_resolver::NodeResolutionKind::Types, - ) - .ok()?; - Some(resolved) } /// Matches the `@ts-check` pragma. diff --git a/cli/tools/clean.rs b/cli/tools/clean.rs index a550d2826a25e3..9e681840db12e5 100644 --- a/cli/tools/clean.rs +++ b/cli/tools/clean.rs @@ -1,14 +1,15 @@ // Copyright 2018-2025 the Deno authors. MIT license. use std::path::Path; +use std::sync::Arc; use deno_core::anyhow::Context; use deno_core::error::AnyError; -use deno_lib::cache::DenoDir; +use crate::args::Flags; use crate::colors; use crate::display; -use crate::sys::CliSys; +use crate::factory::CliFactory; use crate::util::progress_bar::ProgressBar; use crate::util::progress_bar::ProgressBarStyle; use crate::util::progress_bar::ProgressMessagePrompt; @@ -29,8 +30,9 @@ impl CleanState { } } -pub fn clean() -> Result<(), AnyError> { - let deno_dir = DenoDir::new(CliSys::default(), None)?; +pub fn clean(flags: Arc) -> Result<(), AnyError> { + let factory = CliFactory::from_flags(flags); + let deno_dir = factory.deno_dir()?; if deno_dir.root.exists() { let no_of_files = walkdir::WalkDir::new(&deno_dir.root).into_iter().count(); let progress_bar = ProgressBar::new(ProgressBarStyle::ProgressBars); diff --git a/cli/tools/compile.rs b/cli/tools/compile.rs index 96dd6798f53202..75a36e7896d3ba 100644 --- a/cli/tools/compile.rs +++ b/cli/tools/compile.rs @@ -20,7 +20,6 @@ use deno_terminal::colors; use rand::Rng; use super::installer::infer_name_from_url; -use crate::args::check_warn_tsconfig; use crate::args::CompileFlags; use crate::args::Flags; use crate::factory::CliFactory; @@ -84,9 +83,6 @@ pub async fn compile( graph }; - let ts_config_for_emit = cli_options - .resolve_ts_config_for_emit(deno_config::deno_json::TsConfigType::Emit)?; - check_warn_tsconfig(&ts_config_for_emit); log::info!( "{} {} to {}", colors::green("Compile"), diff --git a/cli/tools/coverage/mod.rs b/cli/tools/coverage/mod.rs index 9b6ef81ea39848..31c41126ecfcbc 100644 --- a/cli/tools/coverage/mod.rs +++ b/cli/tools/coverage/mod.rs @@ -18,10 +18,12 @@ use deno_config::glob::PathOrPatternSet; use deno_core::anyhow::anyhow; use deno_core::anyhow::Context; use deno_core::error::AnyError; +use deno_core::error::CoreError; use deno_core::serde_json; use deno_core::sourcemap::SourceMap; use deno_core::url::Url; use deno_core::LocalInspectorSession; +use deno_error::JsErrorBox; use deno_resolver::npm::DenoInNpmPackageChecker; use node_resolver::InNpmPackageChecker; use regex::Regex; @@ -53,7 +55,7 @@ pub struct CoverageCollector { #[async_trait::async_trait(?Send)] impl crate::worker::CoverageCollector for CoverageCollector { - async fn start_collecting(&mut self) -> Result<(), AnyError> { + async fn start_collecting(&mut self) -> Result<(), CoreError> { self.enable_debugger().await?; self.enable_profiler().await?; self @@ -67,7 +69,7 @@ impl crate::worker::CoverageCollector for CoverageCollector { Ok(()) } - async fn stop_collecting(&mut self) -> Result<(), AnyError> { + async fn stop_collecting(&mut self) -> Result<(), CoreError> { fs::create_dir_all(&self.dir)?; let script_coverages = self.take_precise_coverage().await?.result; @@ -88,7 +90,8 @@ impl crate::worker::CoverageCollector for CoverageCollector { let filepath = self.dir.join(filename); let mut out = BufWriter::new(File::create(&filepath)?); - let coverage = serde_json::to_string(&script_coverage)?; + let coverage = serde_json::to_string(&script_coverage) + .map_err(JsErrorBox::from_err)?; let formatted_coverage = format_json(&filepath, &coverage, &Default::default()) .ok() @@ -111,7 +114,7 @@ impl CoverageCollector { Self { dir, session } } - async fn enable_debugger(&mut self) -> Result<(), AnyError> { + async fn enable_debugger(&mut self) -> Result<(), CoreError> { self .session .post_message::<()>("Debugger.enable", None) @@ -119,7 +122,7 @@ impl CoverageCollector { Ok(()) } - async fn enable_profiler(&mut self) -> Result<(), AnyError> { + async fn enable_profiler(&mut self) -> Result<(), CoreError> { self .session .post_message::<()>("Profiler.enable", None) @@ -127,7 +130,7 @@ impl CoverageCollector { Ok(()) } - async fn disable_debugger(&mut self) -> Result<(), AnyError> { + async fn disable_debugger(&mut self) -> Result<(), CoreError> { self .session .post_message::<()>("Debugger.disable", None) @@ -135,7 +138,7 @@ impl CoverageCollector { Ok(()) } - async fn disable_profiler(&mut self) -> Result<(), AnyError> { + async fn disable_profiler(&mut self) -> Result<(), CoreError> { self .session .post_message::<()>("Profiler.disable", None) @@ -146,26 +149,28 @@ impl CoverageCollector { async fn start_precise_coverage( &mut self, parameters: cdp::StartPreciseCoverageArgs, - ) -> Result { + ) -> Result { let return_value = self .session .post_message("Profiler.startPreciseCoverage", Some(parameters)) .await?; - let return_object = serde_json::from_value(return_value)?; + let return_object = + serde_json::from_value(return_value).map_err(JsErrorBox::from_err)?; Ok(return_object) } async fn take_precise_coverage( &mut self, - ) -> Result { + ) -> Result { let return_value = self .session .post_message::<()>("Profiler.takePreciseCoverage", None) .await?; - let return_object = serde_json::from_value(return_value)?; + let return_object = + serde_json::from_value(return_value).map_err(JsErrorBox::from_err)?; Ok(return_object) } @@ -603,7 +608,7 @@ pub fn cover_files( let module_kind = ModuleKind::from_is_cjs( cjs_tracker.is_maybe_cjs(&file.specifier, file.media_type)?, ); - Some(match emitter.maybe_cached_emit(&file.specifier, module_kind, &file.source) { + Some(match emitter.maybe_cached_emit(&file.specifier, module_kind, &file.source)? { Some(code) => code, None => { return Err(anyhow!( diff --git a/cli/tools/coverage/reporter.rs b/cli/tools/coverage/reporter.rs index bc6e85b47e2c34..4f0325117cb944 100644 --- a/cli/tools/coverage/reporter.rs +++ b/cli/tools/coverage/reporter.rs @@ -11,6 +11,7 @@ use std::path::PathBuf; use deno_core::error::AnyError; use deno_core::url::Url; +use deno_lib::version::DENO_VERSION_INFO; use super::util; use super::CoverageReport; @@ -559,7 +560,7 @@ impl HtmlCoverageReporter { /// Creates footer part of the contents for html report. pub fn create_html_footer(&self, now: &str) -> String { - let version = env!("CARGO_PKG_VERSION"); + let version = DENO_VERSION_INFO.deno; format!( "