diff --git a/.changesets/feat_avery_persisted_queries.md b/.changesets/feat_avery_persisted_queries.md new file mode 100644 index 00000000000..c80f2ee3fd1 --- /dev/null +++ b/.changesets/feat_avery_persisted_queries.md @@ -0,0 +1,80 @@ +### feat: `preview_persisted_queries` w/opt-in safelisting ([PR #3347](https://github.com/apollographql/router/pull/3347)) + +# Persisted Queries + +> ⚠️ **This is an [Enterprise feature](https://www.apollographql.com/blog/platform/evaluating-apollo-router-understanding-free-and-open-vs-commercial-features/) of the Apollo Router.** It requires an organization with a [GraphOS Enterprise plan](https://www.apollographql.com/pricing/) and the feature to be enabled for your account. +> +> If your organization _doesn't_ currently have an Enterprise plan, you can test out this functionality by signing up for a free [Enterprise trial](https://www.apollographql.com/docs/graphos/org/plans/#enterprise-trials) and reaching out to enable the feature for your account. + +## Overview + +The persisted queries feature allows you to pre-register operations, allowing clients to send an operation ID over the wire and execute the associated operation. Each operation defines the exact shape of a GraphQL operation that the router expects clients to send. In its simplest form, Persisted Queries (PQ’s) can be used like Automatic Persisted Queries (APQ’s) with one key difference: sending an operation body is never allowed for a PQ. Registering persisted operations allows locking down the router to log unregistered operations, or to reject them outright. + +### Main Configurations + +* **Unregistered operation monitoring** + * Your router can allow all GraphQL operations, while emitting structured traces containing unregistered operation bodies. +* **Operation safelisting** + * Reject unregistered operations + * Require all operations to be sent as an ID + +## Usage + +```yaml title="router.yaml" +preview_persisted_queries: + enabled: true +``` + +This enables additive PQs. + +Requires `APOLLO_KEY` and `APOLLO_GRAPH_REF` to start up properly (to fetch the license key and the persisted queries themselves), and the graph variant must be linked to a persisted query list. This is only available in preview right now and has to be enabled for a graph. + +To create a persisted query list and link it to your graph, see our [mock docs](https://docs.google.com/document/d/16EcmcbjmwLfDfAhpMWdF9bHPG8kZ38htXKL-ozVPOUQ/edit#heading=h.r8r7mfcvvw4f), it walks you through enabling the preview feature for your graph, creating a persisted query list, and publishing operations to it from Rover. + +The router will not start up until all persisted queries have been read into a `std::collections::HashMap` mapping ID to their body. Additionally, just the bodies are stored in a `std::collections::HashSet`. + +After the router starts, persisted queries can be sent over the wire like so: + +```sh +curl http://localhost:4000/ -X POST --json \ +'{"extensions":{"persistedQuery":{"version":1,"sha256Hash":"dc67510fb4289672bea757e862d6b00e83db5d3cbbcfb15260601b6f29bb2b8f"}}}' +``` + +2) [./examples/persisted-queries/safelist_pq_log_only.yaml](https://github.com/apollographql/router-private/raw/avery/persisted-queries/examples/persisted-queries/safelist_pq_log_only.yaml) + +```yaml title="router.yaml" +preview_persisted_queries: + enabled: true + log_unpersisted_queries: true +``` + +Starting the router with this configuration logs freeform GraphQL operations that do not match a persisted query. + +3) [./examples/persisted-queries/safelist_pq.yaml](https://github.com/apollographql/router-private/raw/avery/persisted-queries/examples/persisted-queries/safelist_pq.yaml) + +```yaml title="router.yaml" +preview_persisted_queries: + enabled: true + safelist: + enabled: true +apq: + enabled: false +``` + +Starting the router with this configuration will require all operations sent over the wire to match either the ID (O(1) retrieval from `HashMap`) or the body (O(1) retrieval from `HashSet`). APQ is enabled by default, and is incompatible with the persisted queries feature (clients are not allowed to register their own persisted queries, they must be pre-published), therefore it must be disabled to start properly. An error is returned if APQ is not explicitly disabled in `router.yaml`. + +4) [./examples/persisted-queries/safelist_pq_require_id.yaml](https://github.com/apollographql/router-private/raw/avery/persisted-queries/examples/persisted-queries/safelist_pq_require_id.yaml) + +```yaml title="router.yaml" +preview_persisted_queries: + enabled: true + safelist: + enabled: true + require_id: true +apq: + enabled: false +``` + +This configuration is a stricter version of safelisting that rejects all freeform GraphQL requests, even if they match the body of a persisted query. + +By [@EverlastingBugstopper](https://github.com/EverlastingBugstopper) in https://github.com/apollographql/router/pull/3347 \ No newline at end of file diff --git a/Cargo.lock b/Cargo.lock index 968efbd7a08..3d8e85adf2f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -175,15 +175,15 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41ed9a86bf92ae6580e0a31281f65a1b1d867c0cc68d5346e2ae128dddfa6a7d" +checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" [[package]] name = "anstyle-parse" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e765fd216e48e067936442276d1d57399e37bce53c264d6fefbe298080cb57ee" +checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" dependencies = [ "utf8parse", ] @@ -608,7 +608,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -619,7 +619,7 @@ checksum = "b9ccdd8f2a161be9bd5c023df56f1b2a0bd1d83872ae53b71a84a12c9bf6e842" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -648,7 +648,7 @@ dependencies = [ "async-trait", "axum-core", "base64 0.21.2", - "bitflags", + "bitflags 1.3.2", "bytes", "futures-util", "headers", @@ -788,6 +788,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitflags" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" + [[package]] name = "block-buffer" version = "0.10.4" @@ -879,7 +885,7 @@ dependencies = [ "proc-macro2", "quote", "str_inflector", - "syn 2.0.18", + "syn 2.0.22", "thiserror", "try_match 0.4.1", ] @@ -1039,7 +1045,7 @@ checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" dependencies = [ "ansi_term", "atty", - "bitflags", + "bitflags 1.3.2", "strsim 0.8.0", "textwrap 0.11.0", "unicode-width", @@ -1052,7 +1058,7 @@ version = "3.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" dependencies = [ - "bitflags", + "bitflags 1.3.2", "clap_lex 0.2.4", "indexmap 1.9.3", "textwrap 0.16.0", @@ -1090,7 +1096,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -1235,9 +1241,9 @@ dependencies = [ [[package]] name = "const-oid" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520fbf3c07483f94e3e3ca9d0cfd913d7718ef2483d2cfd91c0d9e91474ab913" +checksum = "6340df57935414636969091153f35f68d9f00bbc8fb4a9c6054706c213e6c6bc" [[package]] name = "const-random" @@ -1330,9 +1336,9 @@ checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" [[package]] name = "cpufeatures" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e4c1eaa2012c47becbbad2ab175484c2a84d1185b566fb2cc5b8707343dfe58" +checksum = "03e69e28e9f7f77debdedbaafa2866e1de9ba56df55a8bd7cfc724c25a09987c" dependencies = [ "libc", ] @@ -1413,22 +1419,22 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.14" +version = "0.9.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", "cfg-if", "crossbeam-utils", - "memoffset", + "memoffset 0.9.0", "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.15" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" dependencies = [ "cfg-if", ] @@ -1496,18 +1502,31 @@ dependencies = [ [[package]] name = "curve25519-dalek" -version = "4.0.0-rc.2" +version = "4.0.0-rc.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03d928d978dbec61a1167414f5ec534f24bea0d7a0d24dd9b6233d3d8223e585" +checksum = "436ace70fc06e06f7f689d2624dc4e2f0ea666efb5aa704215f7249ae6e047a7" dependencies = [ "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", "fiat-crypto", - "packed_simd_2", "platforms", + "rustc_version 0.4.0", "subtle", "zeroize", ] +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.22", +] + [[package]] name = "daggy" version = "0.8.0" @@ -1713,7 +1732,7 @@ checksum = "53e0efad4403bfc52dc201159c4b842a246a14b98c64b55dfd0f2d89729dfeb8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -1838,7 +1857,7 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -2166,7 +2185,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -2220,13 +2239,13 @@ dependencies = [ "pretty_env_logger", "rand 0.8.5", "redis-protocol", - "rustls 0.21.1", + "rustls 0.21.2", "rustls-native-certs", "rustls-webpki", "semver 1.0.17", "sha-1", "tokio", - "tokio-rustls 0.24.0", + "tokio-rustls 0.24.1", "tokio-stream", "tokio-util", "url", @@ -2323,7 +2342,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -2440,14 +2459,14 @@ checksum = "e77ac7b51b8e6313251737fcef4b1c01a2ea102bde68415b62c0ee9268fec357" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] name = "gimli" -version = "0.27.2" +version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0a93d233ebf96623465aad4046a8d3aa4da22d4f4beba5388838c8a434bbb4" +checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" [[package]] name = "git2" @@ -2455,7 +2474,7 @@ version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccf7f68c2995f392c49fffb4f95ae2c873297830eb25c6bc4c114ce8f4562acc" dependencies = [ - "bitflags", + "bitflags 1.3.2", "libc", "libgit2-sys", "log", @@ -2587,9 +2606,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.19" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d357c7ae988e7d2182f7d7871d0b963962420b0678b0997ce7de72001aeab782" +checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" dependencies = [ "bytes", "fnv", @@ -2659,7 +2678,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" dependencies = [ "base64 0.13.1", - "bitflags", + "bitflags 1.3.2", "bytes", "headers-core", "http", @@ -2716,15 +2735,6 @@ dependencies = [ "libc", ] -[[package]] -name = "hermit-abi" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" -dependencies = [ - "libc", -] - [[package]] name = "hermit-abi" version = "0.3.1" @@ -2904,9 +2914,9 @@ checksum = "0646026eb1b3eea4cd9ba47912ea5ce9cc07713d105b1a14698f4e6433d348b7" dependencies = [ "http", "hyper", - "rustls 0.21.1", + "rustls 0.21.2", "tokio", - "tokio-rustls 0.24.0", + "tokio-rustls 0.24.1", ] [[package]] @@ -2982,7 +2992,7 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff" dependencies = [ - "bitflags", + "bitflags 1.3.2", "inotify-sys", "libc", ] @@ -3077,19 +3087,18 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.7.2" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12b6ee2129af8d4fb011108c73d99a1b83a85977f23b82460c0ae2e25bb4b57f" +checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" [[package]] name = "is-terminal" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" +checksum = "24fddda5af7e54bf7da53067d6e802dbcc381d0a8eef629df528e3ebf68755cb" dependencies = [ "hermit-abi 0.3.1", - "io-lifetimes", - "rustix", + "rustix 0.38.1", "windows-sys 0.48.0", ] @@ -3134,9 +3143,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.63" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f37a4a5928311ac501dee68b3c7613a1037d0edb30c8e5427bd832d55d1b790" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" dependencies = [ "wasm-bindgen", ] @@ -3231,7 +3240,7 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8367585489f01bc55dd27404dcf56b95e6da061a256a666ab23be9ba96a2e587" dependencies = [ - "bitflags", + "bitflags 1.3.2", "libc", ] @@ -3298,12 +3307,6 @@ dependencies = [ "pkg-config", ] -[[package]] -name = "libm" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fc7aa29613bd6a620df431842069224d8bc9011086b1db4c0e0cd47fa03ec9a" - [[package]] name = "libm" version = "0.2.7" @@ -3362,7 +3365,7 @@ checksum = "279a77bf40c85a08513aca203635b96610ebf0e37a92cb0cee76e04da100a426" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -3371,6 +3374,12 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +[[package]] +name = "linux-raw-sys" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" + [[package]] name = "lock_api" version = "0.4.10" @@ -3447,6 +3456,15 @@ dependencies = [ "autocfg", ] +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + [[package]] name = "memory-stats" version = "1.1.0" @@ -3486,7 +3504,7 @@ checksum = "4901771e1d44ddb37964565c654a3223ba41a594d02b8da471cc4464912b5cfa" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -3627,7 +3645,7 @@ version = "5.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "729f63e1ca555a43fe3efa4f3efdf4801c479da85b432242a7b726f353c88486" dependencies = [ - "bitflags", + "bitflags 1.3.2", "filetime", "inotify", "kqueue", @@ -3684,13 +3702,13 @@ dependencies = [ [[package]] name = "num-bigint-dig" -version = "0.8.2" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2399c9463abc5f909349d8aa9ba080e0b88b3ce2885389b60b993f39b1a56905" +checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151" dependencies = [ "byteorder", "lazy_static", - "libm 0.2.7", + "libm", "num-integer", "num-iter", "num-traits", @@ -3754,16 +3772,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", - "libm 0.2.7", + "libm", ] [[package]] name = "num_cpus" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.2.6", + "hermit-abi 0.3.1", "libc", ] @@ -3829,9 +3847,9 @@ dependencies = [ [[package]] name = "openssl-sys" -version = "0.9.88" +version = "0.9.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2ce0f250f34a308dcfdbb351f511359857d4ed2134ba715a4eadd46e1ffd617" +checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6" dependencies = [ "cc", "libc", @@ -4043,9 +4061,9 @@ dependencies = [ [[package]] name = "os_str_bytes" -version = "6.5.0" +version = "6.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ceedf44fb00f2d1984b0bc98102627ce622e083e49a5bacdb3e514fa4238e267" +checksum = "4d5d9eb14b174ee9aa2ef96dc2b94637a2d4b6e7cb873c7e171f0c20c6cf3eac" [[package]] name = "outref" @@ -4099,16 +4117,6 @@ dependencies = [ "sha2", ] -[[package]] -name = "packed_simd_2" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1914cd452d8fccd6f9db48147b29fd4ae05bea9dc5d9ad578509f72415de282" -dependencies = [ - "cfg-if", - "libm 0.1.4", -] - [[package]] name = "parking" version = "2.1.0" @@ -4160,7 +4168,7 @@ dependencies = [ "libc", "redox_syscall 0.3.5", "smallvec", - "windows-targets 0.48.0", + "windows-targets 0.48.1", ] [[package]] @@ -4195,9 +4203,9 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pest" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e68e84bfb01f0507134eac1e9b410a12ba379d064eab48c50ba4ce329a527b70" +checksum = "f73935e4d55e2abf7f130186537b19e7a4abc886a0252380b59248af473a3fc9" dependencies = [ "thiserror", "ucd-trie", @@ -4205,9 +4213,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b79d4c71c865a25a4322296122e3924d30bc8ee0834c8bfc8b95f7f054afbfb" +checksum = "aef623c9bbfa0eedf5a0efba11a5ee83209c326653ca31ff019bec3a95bfff2b" dependencies = [ "pest", "pest_generator", @@ -4215,22 +4223,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c435bf1076437b851ebc8edc3a18442796b30f1728ffea6262d59bbe28b077e" +checksum = "b3e8cba4ec22bada7fc55ffe51e2deb6a0e0db2d0b7ab0b103acc80d2510c190" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] name = "pest_meta" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "745a452f8eb71e39ffd8ee32b3c5f51d03845f99786fa9b68db6ff509c505411" +checksum = "a01f71cb40bd8bb94232df14b946909e14660e33fc05db3e50ae2a82d7ea0ca0" dependencies = [ "once_cell", "pest", @@ -4251,22 +4259,22 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c95a7476719eab1e366eaf73d0260af3021184f18177925b07f54b30089ceead" +checksum = "6e138fdd8263907a2b0e1b4e80b7e58c721126479b6e6eedfb1b402acea7b9bd" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39407670928234ebc5e6e580247dd567ad73a3578460c5990f9503df207e8f07" +checksum = "d1fef411b303e3e12d534fb6e7852de82da56edd937d895125821fb7c09436c7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -4317,9 +4325,9 @@ checksum = "e3d7ddaed09e0eb771a79ab0fd64609ba0afb0a8366421957936ad14cbd13630" [[package]] name = "plotters" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97" +checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" dependencies = [ "num-traits", "plotters-backend", @@ -4330,15 +4338,15 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142" +checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" [[package]] name = "plotters-svg" -version = "0.3.3" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f" +checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" dependencies = [ "plotters-backend", ] @@ -4356,9 +4364,9 @@ dependencies = [ [[package]] name = "polyval" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef234e08c11dfcb2e56f79fd70f6f2eb7f025c0ce2333e82f4f0518ecad30c6" +checksum = "d52cff9d1d4dee5fe6d03729099f4a310a41179e0a10dbf542039873f2e826fb" dependencies = [ "cfg-if", "cpufeatures", @@ -4473,9 +4481,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.60" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec2b086b7a862cf4de201096214fa870344cf922b2b30c167badb3af3195406" +checksum = "7b368fba921b0dce7e60f5e04ec15e565b3303972b42bcfde1d0713b881959eb" dependencies = [ "unicode-ident", ] @@ -4592,9 +4600,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.28" +version = "1.0.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" +checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" dependencies = [ "proc-macro2", ] @@ -4732,7 +4740,7 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -4741,7 +4749,7 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -4810,14 +4818,14 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.1", + "rustls 0.21.2", "rustls-native-certs", "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", "tokio", - "tokio-rustls 0.24.0", + "tokio-rustls 0.24.1", "tokio-util", "tower-service", "url", @@ -4853,7 +4861,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c2a11a646ef5d4e4a9d5cf80c7e4ecb20f9b1954292d5c5e6d6cbc8d33728ec" dependencies = [ "ahash 0.8.3", - "bitflags", + "bitflags 1.3.2", "instant", "num-traits", "rhai_codegen", @@ -5005,7 +5013,7 @@ checksum = "64449cfef9483a475ed56ae30e2da5ee96448789fb2aa240a04beb6a055078bf" dependencies = [ "countme", "hashbrown 0.12.3", - "memoffset", + "memoffset 0.8.0", "rustc-hash", "text-size", ] @@ -5046,9 +5054,9 @@ dependencies = [ [[package]] name = "rust-embed" -version = "6.7.0" +version = "6.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b73e721f488c353141288f223b599b4ae9303ecf3e62923f40a492f0634a4dc3" +checksum = "a36224c3276f8c4ebc8c20f158eca7ca4359c8db89991c4925132aaaf6702661" dependencies = [ "rust-embed-impl", "rust-embed-utils", @@ -5057,22 +5065,22 @@ dependencies = [ [[package]] name = "rust-embed-impl" -version = "6.6.0" +version = "6.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e22ce362f5561923889196595504317a4372b84210e6e335da529a65ea5452b5" +checksum = "49b94b81e5b2c284684141a2fb9e2a31be90638caf040bf9afbc5a0416afe1ac" dependencies = [ "proc-macro2", "quote", "rust-embed-utils", - "syn 2.0.18", + "syn 2.0.22", "walkdir 2.3.3", ] [[package]] name = "rust-embed-utils" -version = "7.5.0" +version = "7.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512b0ab6853f7e14e3c8754acb43d6f748bb9ced66aa5915a6553ac8213f7731" +checksum = "9d38ff6bf570dc3bb7100fce9f7b60c33fa71d80e88da3f2580df4ff2bdded74" dependencies = [ "sha2", "walkdir 2.3.3", @@ -5110,15 +5118,28 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.19" +version = "0.37.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d" +checksum = "62f25693a73057a1b4cb56179dd3c7ea21a7c6c5ee7d85781f5749b46f34b79c" dependencies = [ - "bitflags", + "bitflags 1.3.2", "errno", "io-lifetimes", "libc", - "linux-raw-sys", + "linux-raw-sys 0.3.8", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustix" +version = "0.38.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbc6396159432b5c8490d4e301d8c705f61860b8b6c863bf79942ce5401968f3" +dependencies = [ + "bitflags 2.3.3", + "errno", + "libc", + "linux-raw-sys 0.4.3", "windows-sys 0.48.0", ] @@ -5136,9 +5157,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.1" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c911ba11bc8433e811ce56fde130ccf32f5127cab0e0194e9c68c5a5b671791e" +checksum = "e32ca28af694bc1bbf399c33a516dbdf1c90090b8ab23c2bc24f834aa2247f5f" dependencies = [ "log", "ring", @@ -5148,9 +5169,9 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0167bac7a9f490495f3c33013e7722b53cb087ecbe082fb0c6387c96f634ea50" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" dependencies = [ "openssl-probe", "rustls-pemfile", @@ -5297,7 +5318,7 @@ version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc758eb7bffce5b308734e9b0c1468893cae9ff70ebf13e7090be8dcbcc83a8" dependencies = [ - "bitflags", + "bitflags 1.3.2", "core-foundation", "core-foundation-sys", "libc", @@ -5361,7 +5382,7 @@ checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -5810,9 +5831,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.18" +version = "2.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e" +checksum = "2efbeae7acf4eabd6bcdcbd11c92f45231ddda7539edc7806bd1a04a03b24616" dependencies = [ "proc-macro2", "quote", @@ -5845,7 +5866,7 @@ dependencies = [ "cfg-if", "fastrand", "redox_syscall 0.3.5", - "rustix", + "rustix 0.37.21", "windows-sys 0.48.0", ] @@ -5928,9 +5949,9 @@ dependencies = [ [[package]] name = "text-size" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a" +checksum = "f18aa187839b2bdb1ad2fa35ead8c4c2976b64e4363c386d45ac0f7ee85c9233" [[package]] name = "textwrap" @@ -5975,7 +5996,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -6051,9 +6072,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.21" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f3403384eaacbca9923fa06940178ac13e4edb725486d70e8e15881d0c836cc" +checksum = "ea9e1b3cf1243ae005d9e74085d4d542f3125458f3a81af210d901dcd7411efd" dependencies = [ "itoa", "serde", @@ -6149,7 +6170,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -6165,11 +6186,11 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.24.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0d409377ff5b1e3ca6437aa86c1eb7d40c134bfec254e44c830defa92669db5" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.1", + "rustls 0.21.2", "tokio", ] @@ -6240,17 +6261,17 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a76a9312f5ba4c2dec6b9161fdf25d87ad8a09256ccea5a556fef03c706a10f" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" [[package]] name = "toml_edit" -version = "0.19.10" +version = "0.19.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2380d56e8670370eee6566b0bfd4265f65b3f432e8c6d85623f728d4fa31f739" +checksum = "266f016b7f039eec8a1a80dfe6156b633d208b9fccca5e4db1d6775b0c4e34a7" dependencies = [ - "indexmap 1.9.3", + "indexmap 2.0.0", "toml_datetime", "winnow", ] @@ -6360,7 +6381,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f873044bf02dd1e8239e9c1293ea39dad76dc594ec16185d0a1bf31d8dc8d858" dependencies = [ "async-compression", - "bitflags", + "bitflags 1.3.2", "bytes", "futures-core", "futures-util", @@ -6416,13 +6437,13 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.24" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f57e3ca2a01450b1a921183a9c9cbfda207fd822cef4ccb00a65402cbba7a74" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -6570,7 +6591,7 @@ checksum = "b0a91713132798caecb23c977488945566875e7b61b902fb111979871cbff34e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -6834,7 +6855,7 @@ version = "0.71.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a4bbfd886a9c2f87170438c0cdb6b1ddbfe80412ab591c83d24c7e48e487313" dependencies = [ - "bitflags", + "bitflags 1.3.2", "fslock", "once_cell", "which", @@ -6904,11 +6925,10 @@ dependencies = [ [[package]] name = "want" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ - "log", "try-lock", ] @@ -6926,9 +6946,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -6936,24 +6956,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b04bc93f9d6bdee709f6bd2118f57dd6679cf1176a1af464fca3ab0d66d8fb" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.36" +version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d1985d03709c53167ce907ff394f5316aa22cb4e12761295c5dc57dacb6297e" +checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" dependencies = [ "cfg-if", "js-sys", @@ -6963,9 +6983,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14d6b024f1a526bb0234f52840389927257beb670610081360e5a03c5df9c258" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -6973,22 +6993,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed9d5b4305409d1fc9482fee2d7f9bcbf24b3972bf59817ef757e23982242a93" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasm-streams" @@ -7005,9 +7025,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.63" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bdd9ef4e984da1187bf8110c5cf5b845fbc87a23602cdf912386a76fcd3a7c2" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" dependencies = [ "js-sys", "wasm-bindgen", @@ -7116,7 +7136,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.0", + "windows-targets 0.48.1", ] [[package]] @@ -7136,9 +7156,9 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.48.0" +version = "0.48.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" dependencies = [ "windows_aarch64_gnullvm 0.48.0", "windows_aarch64_msvc 0.48.0", @@ -7235,9 +7255,9 @@ checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" [[package]] name = "winnow" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61de7bac303dc551fe038e2b3cef0f571087a47571ea6e79a87692ac99b99699" +checksum = "ca0ace3845f0d96209f0375e6d367e3eb87eb65d27d445bdc9f1843a26f39448" dependencies = [ "memchr", ] @@ -7281,11 +7301,11 @@ checksum = "f8dab7ac864710bdea6594becbea5b5050333cf34fefb0dc319567eb347950d4" [[package]] name = "x25519-dalek" -version = "2.0.0-rc.2" +version = "2.0.0-rc.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fabd6e16dd08033932fc3265ad4510cc2eab24656058a6dcb107ffe274abcc95" +checksum = "ec7fae07da688e17059d5886712c933bb0520f15eff2e09cfa18e30968f4e63a" dependencies = [ - "curve25519-dalek 4.0.0-rc.2", + "curve25519-dalek 4.0.0-rc.3", "rand_core 0.6.4", "serde", "zeroize", @@ -7323,7 +7343,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] diff --git a/apollo-router/Cargo.toml b/apollo-router/Cargo.toml index b11b8f37430..e4cf36aabbe 100644 --- a/apollo-router/Cargo.toml +++ b/apollo-router/Cargo.toml @@ -109,6 +109,7 @@ lazy_static = "1.4.0" libc = "0.2.147" linkme = "0.3.10" lru = "0.8.1" +maplit = "1.0.2" mediatype = "0.19.14" mockall = "0.11.4" miette = { version = "5.9.0", features = ["fancy"] } @@ -214,6 +215,7 @@ url = { version = "2.4.0", features = ["serde"] } urlencoding = "2.1.2" uuid = { version = "1.4.0", features = ["serde", "v4"] } yaml-rust = "0.4.5" +wiremock = "0.5.19" wsl = "0.1.0" tokio-tungstenite = { version = "0.18.0", features = ["rustls-tls-native-roots"] } tokio-rustls = "0.23.4" diff --git a/apollo-router/src/axum_factory/tests.rs b/apollo-router/src/axum_factory/tests.rs index 017dd70eb1f..12635e69586 100644 --- a/apollo-router/src/axum_factory/tests.rs +++ b/apollo-router/src/axum_factory/tests.rs @@ -2319,10 +2319,13 @@ async fn test_supergraph_timeout() { let service = RouterCreator::new( QueryAnalysisLayer::new(supergraph_creator.schema(), Arc::clone(&conf)).await, Arc::new(supergraph_creator), - Arc::clone(&conf), + conf.clone(), + Default::default(), ) .await + .unwrap() .make(); + // keep the server handle around otherwise it will immediately shutdown let (_server, client) = init_with_config(service, conf.clone(), MultiMap::new()) .await diff --git a/apollo-router/src/configuration/mod.rs b/apollo-router/src/configuration/mod.rs index 326b2671d45..31526f2d64a 100644 --- a/apollo-router/src/configuration/mod.rs +++ b/apollo-router/src/configuration/mod.rs @@ -2,6 +2,7 @@ pub(crate) mod cors; pub(crate) mod expansion; mod experimental; +mod persisted_query; mod schema; pub(crate) mod subgraph; #[cfg(test)] @@ -25,6 +26,8 @@ use derivative::Derivative; use displaydoc::Display; use itertools::Itertools; use once_cell::sync::Lazy; +pub(crate) use persisted_query::PersistedQueries; +pub(crate) use persisted_query::PersistedQueriesSafelist; use regex::Regex; use rustls::Certificate; use rustls::PrivateKey; @@ -61,6 +64,7 @@ use crate::plugins::subscription::SubscriptionConfig; use crate::plugins::subscription::APOLLO_SUBSCRIPTION_PLUGIN; #[cfg(not(test))] use crate::plugins::subscription::APOLLO_SUBSCRIPTION_PLUGIN_NAME; +use crate::uplink::UplinkConfig; use crate::ApolloRouterError; // TODO: Talk it through with the teams @@ -144,6 +148,12 @@ pub struct Configuration { #[serde(default)] pub(crate) apq: Apq, + // NOTE: when renaming this to move out of preview, also update paths + // in `uplink/license.rs`. + /// Configures managed persisted queries + #[serde(default)] + pub preview_persisted_queries: PersistedQueries, + // NOTE: when renaming this to move out of preview, also update paths // in `configuration/expansion.rs` and `uplink/license.rs`. /// Operation limits @@ -164,6 +174,10 @@ pub struct Configuration { #[serde(flatten)] pub(crate) apollo_plugins: ApolloPlugins, + /// Uplink configuration. + #[serde(skip)] + pub uplink: Option, + #[serde(default, skip_serializing, skip_deserializing)] pub(crate) notify: Notify, } @@ -188,8 +202,11 @@ impl<'de> serde::Deserialize<'de> for Configuration { apollo_plugins: ApolloPlugins, tls: Tls, apq: Apq, + preview_persisted_queries: PersistedQueries, preview_operation_limits: OperationLimits, experimental_chaos: Chaos, + #[serde(skip)] + uplink: UplinkConfig, } let ad_hoc: AdHocConfiguration = serde::Deserialize::deserialize(deserializer)?; @@ -203,8 +220,10 @@ impl<'de> serde::Deserialize<'de> for Configuration { .apollo_plugins(ad_hoc.apollo_plugins.plugins) .tls(ad_hoc.tls) .apq(ad_hoc.apq) + .persisted_query(ad_hoc.preview_persisted_queries) .operation_limits(ad_hoc.preview_operation_limits) .chaos(ad_hoc.experimental_chaos) + .uplink(ad_hoc.uplink) .build() .map_err(|e| serde::de::Error::custom(e.to_string())) } @@ -236,8 +255,10 @@ impl Configuration { tls: Option, notify: Option>, apq: Option, + persisted_query: Option, operation_limits: Option, chaos: Option, + uplink: Option, ) -> Result { #[cfg(not(test))] let notify_queue_cap = match apollo_plugins.get(APOLLO_SUBSCRIPTION_PLUGIN_NAME) { @@ -260,6 +281,7 @@ impl Configuration { homepage: homepage.unwrap_or_default(), cors: cors.unwrap_or_default(), apq: apq.unwrap_or_default(), + preview_persisted_queries: persisted_query.unwrap_or_default(), preview_operation_limits: operation_limits.unwrap_or_default(), experimental_chaos: chaos.unwrap_or_default(), plugins: UserPlugins { @@ -269,6 +291,7 @@ impl Configuration { plugins: apollo_plugins, }, tls: tls.unwrap_or_default(), + uplink, #[cfg(test)] notify: notify.unwrap_or_default(), #[cfg(not(test))] @@ -302,8 +325,10 @@ impl Configuration { tls: Option, notify: Option>, apq: Option, + persisted_query: Option, operation_limits: Option, chaos: Option, + uplink: Option, ) -> Result { let configuration = Self { validated_yaml: Default::default(), @@ -323,6 +348,8 @@ impl Configuration { tls: tls.unwrap_or_default(), notify: notify.unwrap_or_default(), apq: apq.unwrap_or_default(), + preview_persisted_queries: persisted_query.unwrap_or_default(), + uplink, }; configuration.validate() @@ -683,6 +710,18 @@ pub(crate) struct Apq { pub(crate) subgraph: SubgraphConfiguration, } +#[cfg(test)] +#[buildstructor::buildstructor] +impl Apq { + #[builder] + pub(crate) fn fake_new(enabled: Option) -> Self { + Self { + enabled: enabled.unwrap_or_else(default_apq), + ..Default::default() + } + } +} + /// Subgraph level Automatic Persisted Queries (APQ) configuration #[derive(Debug, Clone, Default, Deserialize, Serialize, JsonSchema)] #[serde(deny_unknown_fields)] diff --git a/apollo-router/src/configuration/persisted_query.rs b/apollo-router/src/configuration/persisted_query.rs new file mode 100644 index 00000000000..d86c34b8da4 --- /dev/null +++ b/apollo-router/src/configuration/persisted_query.rs @@ -0,0 +1,97 @@ +use schemars::JsonSchema; +use serde::Deserialize; +use serde::Serialize; + +/// Persisted Queries (PQ) configuration +#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema)] +#[serde(deny_unknown_fields)] +pub struct PersistedQueries { + /// Activates Persisted Queries (disabled by default) + #[serde(default = "default_pq")] + pub enabled: bool, + + /// Enabling this field configures the router to log any freeform GraphQL request that is not in the persisted query list + #[serde(default = "default_log_unknown")] + pub log_unknown: bool, + + /// Restricts execution of operations that are not found in the Persisted Query List + #[serde(default)] + pub safelist: PersistedQueriesSafelist, +} + +#[cfg(test)] +#[buildstructor::buildstructor] +impl PersistedQueries { + #[builder] + pub(crate) fn new( + enabled: Option, + log_unknown: Option, + safelist: Option, + ) -> Self { + Self { + enabled: enabled.unwrap_or_else(default_pq), + safelist: safelist.unwrap_or_default(), + log_unknown: log_unknown.unwrap_or_else(default_log_unknown), + } + } +} + +/// Persisted Queries (PQ) Safelisting configuration +#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema)] +#[serde(deny_unknown_fields)] +pub struct PersistedQueriesSafelist { + /// Enables using the peristed query list as a safelist (disabled by default) + #[serde(default = "default_safelist")] + pub enabled: bool, + + /// Enabling this field configures the router to reject any request that does not include the persisted query ID + #[serde(default = "default_require_id")] + pub require_id: bool, +} + +#[cfg(test)] +#[buildstructor::buildstructor] +impl PersistedQueriesSafelist { + #[builder] + pub(crate) fn new(enabled: Option, require_id: Option) -> Self { + Self { + enabled: enabled.unwrap_or_else(default_safelist), + require_id: require_id.unwrap_or_else(default_require_id), + } + } +} + +impl Default for PersistedQueries { + fn default() -> Self { + Self { + enabled: default_pq(), + safelist: PersistedQueriesSafelist::default(), + log_unknown: default_log_unknown(), + } + } +} + +impl Default for PersistedQueriesSafelist { + fn default() -> Self { + Self { + enabled: default_safelist(), + require_id: default_require_id(), + } + } +} + +const fn default_pq() -> bool { + false +} + +const fn default_safelist() -> bool { + false +} + +const fn default_require_id() -> bool { + false +} + +const fn default_log_unknown() -> bool { + false +} diff --git a/apollo-router/src/configuration/snapshots/apollo_router__configuration__tests__schema_generation.snap b/apollo-router/src/configuration/snapshots/apollo_router__configuration__tests__schema_generation.snap index 81775d911d5..2f4998b154b 100644 --- a/apollo-router/src/configuration/snapshots/apollo_router__configuration__tests__schema_generation.snap +++ b/apollo-router/src/configuration/snapshots/apollo_router__configuration__tests__schema_generation.snap @@ -1221,6 +1221,52 @@ expression: "&schema" }, "additionalProperties": false }, + "preview_persisted_queries": { + "description": "Configures managed persisted queries", + "default": { + "enabled": false, + "log_unknown": false, + "safelist": { + "enabled": false, + "require_id": false + } + }, + "type": "object", + "properties": { + "enabled": { + "description": "Activates Persisted Queries (disabled by default)", + "default": false, + "type": "boolean" + }, + "log_unknown": { + "description": "Enabling this field configures the router to log any freeform GraphQL request that is not in the persisted query list", + "default": false, + "type": "boolean" + }, + "safelist": { + "description": "Restricts execution of operations that are not found in the Persisted Query List", + "default": { + "enabled": false, + "require_id": false + }, + "type": "object", + "properties": { + "enabled": { + "description": "Enables using the peristed query list as a safelist (disabled by default)", + "default": false, + "type": "boolean" + }, + "require_id": { + "description": "Enabling this field configures the router to reject any request that does not include the persisted query ID", + "default": false, + "type": "boolean" + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, "rhai": { "description": "Configuration for the Rhai Plugin", "type": "object", diff --git a/apollo-router/src/executable.rs b/apollo-router/src/executable.rs index 73c0c7af751..e69e6c95d30 100644 --- a/apollo-router/src/executable.rs +++ b/apollo-router/src/executable.rs @@ -25,13 +25,14 @@ use url::Url; use crate::configuration::generate_config_schema; use crate::configuration::generate_upgrade; -use crate::configuration::ConfigurationError; use crate::configuration::Discussed; use crate::plugins::telemetry::reload::init_telemetry; use crate::router::ConfigurationSource; use crate::router::RouterHttpServer; use crate::router::SchemaSource; use crate::router::ShutdownSource; +use crate::uplink::Endpoints; +use crate::uplink::UplinkConfig; use crate::LicenseSource; #[cfg(all( @@ -241,6 +242,42 @@ pub struct Opt { pub(crate) version: bool, } +impl Opt { + pub(crate) fn uplink_config(&self) -> Result { + Ok(UplinkConfig { + apollo_key: self + .apollo_key + .clone() + .ok_or(Self::err_require_opt("APOLLO_KEY"))?, + apollo_graph_ref: self + .apollo_graph_ref + .clone() + .ok_or(Self::err_require_opt("APOLLO_GRAPH_REF"))?, + endpoints: self + .apollo_uplink_endpoints + .as_ref() + .map(|endpoints| Self::parse_endpoints(endpoints)) + .transpose()?, + poll_interval: self.apollo_uplink_poll_interval, + timeout: self.apollo_uplink_timeout, + }) + } + + fn parse_endpoints(endpoints: &str) -> std::result::Result { + Ok(Endpoints::fallback( + endpoints + .split(',') + .map(|endpoint| Url::parse(endpoint.trim())) + .collect::, ParseError>>() + .map_err(|err| anyhow!("invalid Apollo Uplink endpoint, {}", err))?, + )) + } + + fn err_require_opt(env_var: &str) -> anyhow::Error { + anyhow!("Use of Apollo Graph OS requires setting the {env_var} environment variable") + } +} + /// Wrapper so that clap can display the default config path in the help message. /// Uses ProjectDirs to get the default location. #[derive(Debug)] @@ -419,19 +456,6 @@ impl Executable { license: Option, mut opt: Opt, ) -> Result<()> { - let uplink_endpoints: Option> = opt - .apollo_uplink_endpoints - .map(|e| { - e.split(',') - .map(|endpoint| Url::parse(endpoint.trim())) - .collect::, ParseError>>() - }) - .transpose() - .map_err(|err| ConfigurationError::InvalidConfiguration { - message: - "invalid apollo-uplink-endpoints, this must be a list of comma separated URLs", - error: err.to_string(), - })?; if opt.apollo_uplink_poll_interval < Duration::from_secs(10) { return Err(anyhow!("apollo-uplink-poll-interval must be at least 10s")); } @@ -471,7 +495,8 @@ impl Executable { }; let apollo_router_msg = format!("Apollo Router v{} // (c) Apollo Graph, Inc. // Licensed as ELv2 (https://go.apollo.dev/elv2)", std::env!("CARGO_PKG_VERSION")); - let schema = match (schema, &opt.supergraph_path, &opt.apollo_key) { + + let schema_source = match (schema, &opt.supergraph_path, &opt.apollo_key) { (Some(_), Some(_), _) => { return Err(anyhow!( "--supergraph and APOLLO_ROUTER_SUPERGRAPH_PATH cannot be used when a custom schema source is in use" @@ -493,18 +518,10 @@ impl Executable { delay: None, } } - (_, None, Some(apollo_key)) => { + (_, None, Some(_apollo_key)) => { tracing::info!("{apollo_router_msg}"); tracing::info!("{apollo_telemetry_msg}"); - - let apollo_graph_ref = opt.apollo_graph_ref.as_ref().ok_or_else(||anyhow!("cannot fetch the supergraph from Apollo Studio without setting the APOLLO_GRAPH_REF environment variable"))?; - SchemaSource::Registry { - apollo_key: apollo_key.to_string(), - apollo_graph_ref: apollo_graph_ref.to_string(), - urls: uplink_endpoints.clone(), - poll_interval: opt.apollo_uplink_poll_interval, - timeout: opt.apollo_uplink_timeout - } + SchemaSource::Registry(opt.uplink_config()?) } _ => { return Err(anyhow!( @@ -545,7 +562,10 @@ impl Executable { // 1. explicit path from cli // 2. env APOLLO_ROUTER_LICENSE // 3. uplink - let license = license.unwrap_or_else(|| { + + let license = if let Some(license) = license { + license + } else { match ( &opt.apollo_router_license, &opt.apollo_router_license_path, @@ -564,21 +584,18 @@ impl Executable { } } (Some(_license), _, _, _) => LicenseSource::Env, - (_, _, Some(apollo_key), Some(apollo_graph_ref)) => LicenseSource::Registry { - apollo_key: apollo_key.to_string(), - apollo_graph_ref: apollo_graph_ref.to_string(), - urls: uplink_endpoints.clone(), - poll_interval: opt.apollo_uplink_poll_interval, - timeout: opt.apollo_uplink_timeout, - }, + (_, _, Some(_apollo_key), Some(_apollo_graph_ref)) => { + LicenseSource::Registry(opt.uplink_config()?) + } _ => LicenseSource::default(), } - }); + }; let router = RouterHttpServer::builder() .configuration(configuration) - .schema(schema) + .and_uplink(opt.uplink_config().ok()) + .schema(schema_source) .license(license) .shutdown(shutdown.unwrap_or(ShutdownSource::CtrlC)) .start(); diff --git a/apollo-router/src/lib.rs b/apollo-router/src/lib.rs index 296dcc8a4d2..9b0f04db47d 100644 --- a/apollo-router/src/lib.rs +++ b/apollo-router/src/lib.rs @@ -71,7 +71,7 @@ mod router_factory; pub mod services; pub(crate) mod spec; mod state_machine; -mod test_harness; +pub mod test_harness; pub mod tracer; mod uplink; @@ -90,6 +90,7 @@ pub use crate::router::ShutdownSource; pub use crate::router_factory::Endpoint; pub use crate::test_harness::MockedSubgraphs; pub use crate::test_harness::TestHarness; +pub use crate::uplink::UplinkConfig; /// Not part of the public API #[doc(hidden)] diff --git a/apollo-router/src/plugins/include_subgraph_errors.rs b/apollo-router/src/plugins/include_subgraph_errors.rs index fcdd82633bb..62d5cb6ebec 100644 --- a/apollo-router/src/plugins/include_subgraph_errors.rs +++ b/apollo-router/src/plugins/include_subgraph_errors.rs @@ -215,9 +215,11 @@ mod test { RouterCreator::new( QueryAnalysisLayer::new(supergraph_creator.schema(), Default::default()).await, Arc::new(supergraph_creator), + Arc::new(Configuration::default()), Default::default(), ) .await + .unwrap() .make() .boxed() } diff --git a/apollo-router/src/plugins/traffic_shaping/mod.rs b/apollo-router/src/plugins/traffic_shaping/mod.rs index f183edf0f46..5eb6576c112 100644 --- a/apollo-router/src/plugins/traffic_shaping/mod.rs +++ b/apollo-router/src/plugins/traffic_shaping/mod.rs @@ -600,9 +600,11 @@ mod test { RouterCreator::new( QueryAnalysisLayer::new(supergraph_creator.schema(), Default::default()).await, Arc::new(supergraph_creator), + Arc::new(Configuration::default()), Default::default(), ) .await + .unwrap() .make() .boxed() } diff --git a/apollo-router/src/router/event/configuration.rs b/apollo-router/src/router/event/configuration.rs index fd3e6c542aa..a007832cff0 100644 --- a/apollo-router/src/router/event/configuration.rs +++ b/apollo-router/src/router/event/configuration.rs @@ -11,6 +11,7 @@ use futures::prelude::*; use crate::router::Event; use crate::router::Event::NoMoreConfiguration; use crate::router::Event::UpdateConfiguration; +use crate::uplink::UplinkConfig; use crate::Configuration; type ConfigurationStream = Pin + Send>>; @@ -57,12 +58,21 @@ impl Default for ConfigurationSource { impl ConfigurationSource { /// Convert this config into a stream regardless of if is static or not. Allows for unified handling later. - pub(crate) fn into_stream(self) -> impl Stream { + pub(crate) fn into_stream( + self, + uplink_config: Option, + ) -> impl Stream { match self { - ConfigurationSource::Static(instance) => { + ConfigurationSource::Static(mut instance) => { + instance.uplink = uplink_config; stream::iter(vec![UpdateConfiguration(*instance)]).boxed() } - ConfigurationSource::Stream(stream) => stream.map(UpdateConfiguration).boxed(), + ConfigurationSource::Stream(stream) => stream + .map(move |mut c| { + c.uplink = uplink_config.clone(); + UpdateConfiguration(c) + }) + .boxed(), #[allow(deprecated)] ConfigurationSource::File { path, @@ -78,16 +88,18 @@ impl ConfigurationSource { stream::empty().boxed() } else { match ConfigurationSource::read_config(&path) { - Ok(configuration) => { + Ok(mut configuration) => { if watch { crate::files::watch(&path) .filter_map(move |_| { let path = path.clone(); + let uplink_config = uplink_config.clone(); async move { match ConfigurationSource::read_config_async(&path) .await { - Ok(configuration) => { + Ok(mut configuration) => { + configuration.uplink = uplink_config.clone(); Some(UpdateConfiguration(configuration)) } Err(err) => { @@ -99,6 +111,7 @@ impl ConfigurationSource { }) .boxed() } else { + configuration.uplink = uplink_config.clone(); stream::once(future::ready(UpdateConfiguration(configuration))) .boxed() } @@ -140,6 +153,7 @@ mod tests { use super::*; use crate::files::tests::create_temp_file; use crate::files::tests::write_and_flush; + use crate::uplink::UplinkConfig; #[tokio::test(flavor = "multi_thread")] async fn config_by_file_watching() { @@ -151,7 +165,7 @@ mod tests { watch: true, delay: None, } - .into_stream() + .into_stream(Some(UplinkConfig::default())) .boxed(); // First update is guaranteed @@ -182,7 +196,7 @@ mod tests { watch: true, delay: None, } - .into_stream(); + .into_stream(Some(UplinkConfig::default())); // First update fails because the file is invalid. assert!(matches!(stream.next().await.unwrap(), NoMoreConfiguration)); @@ -197,7 +211,7 @@ mod tests { watch: true, delay: None, } - .into_stream(); + .into_stream(Some(UplinkConfig::default())); // First update fails because the file is invalid. assert!(matches!(stream.next().await.unwrap(), NoMoreConfiguration)); @@ -214,7 +228,7 @@ mod tests { watch: false, delay: None, } - .into_stream(); + .into_stream(Some(UplinkConfig::default())); assert!(matches!( stream.next().await.unwrap(), UpdateConfiguration(_) diff --git a/apollo-router/src/router/event/license.rs b/apollo-router/src/router/event/license.rs index ab49a70cab2..fea8a141a7b 100644 --- a/apollo-router/src/router/event/license.rs +++ b/apollo-router/src/router/event/license.rs @@ -1,13 +1,11 @@ use std::path::PathBuf; use std::pin::Pin; use std::str::FromStr; -use std::time::Duration; use derivative::Derivative; use derive_more::Display; use derive_more::From; use futures::prelude::*; -use url::Url; use crate::router::Event; use crate::router::Event::NoMoreLicense; @@ -15,7 +13,7 @@ use crate::uplink::license_enforcement::License; use crate::uplink::license_stream::LicenseQuery; use crate::uplink::license_stream::LicenseStreamExt; use crate::uplink::stream_from_uplink; -use crate::uplink::Endpoints; +use crate::uplink::UplinkConfig; type LicenseStream = Pin + Send>>; @@ -49,22 +47,7 @@ pub enum LicenseSource { /// Apollo uplink. #[display(fmt = "Registry")] - Registry { - /// The Apollo key: `` - apollo_key: String, - - /// The apollo graph reference: `@` - apollo_graph_ref: String, - - /// The endpoint polled to fetch its latest supergraph schema. - urls: Option>, - - /// The duration between polling - poll_interval: Duration, - - /// The HTTP client timeout for each poll - timeout: Duration, - }, + Registry(UplinkConfig), } impl Default for LicenseSource { @@ -131,29 +114,20 @@ impl LicenseSource { } } } - LicenseSource::Registry { - apollo_key, - apollo_graph_ref, - urls, - poll_interval, - timeout, - } => stream_from_uplink::( - apollo_key, - apollo_graph_ref, - urls.map(Endpoints::fallback), - poll_interval, - timeout, - ) - .filter_map(|res| { - future::ready(match res { - Ok(license) => Some(license), - Err(e) => { - tracing::error!("{}", e); - None - } - }) - }) - .boxed(), + + LicenseSource::Registry(uplink_config) => { + stream_from_uplink::(uplink_config) + .filter_map(|res| { + future::ready(match res { + Ok(license) => Some(license), + Err(e) => { + tracing::error!("{}", e); + None + } + }) + }) + .boxed() + } LicenseSource::Env => { // EXPERIMENTAL and not subject to semver. match std::env::var("APOLLO_ROUTER_LICENSE").map(|e| License::from_str(&e)) { diff --git a/apollo-router/src/router/event/schema.rs b/apollo-router/src/router/event/schema.rs index a64f85ab23f..661bdd053bd 100644 --- a/apollo-router/src/router/event/schema.rs +++ b/apollo-router/src/router/event/schema.rs @@ -6,14 +6,13 @@ use derivative::Derivative; use derive_more::Display; use derive_more::From; use futures::prelude::*; -use url::Url; use crate::router::Event; use crate::router::Event::NoMoreSchema; use crate::router::Event::UpdateSchema; use crate::uplink::schema_stream::SupergraphSdlQuery; use crate::uplink::stream_from_uplink; -use crate::uplink::Endpoints; +use crate::uplink::UplinkConfig; type SchemaStream = Pin + Send>>; @@ -47,22 +46,7 @@ pub enum SchemaSource { /// Apollo managed federation. #[display(fmt = "Registry")] - Registry { - /// The Apollo key: `` - apollo_key: String, - - /// The apollo graph reference: `@` - apollo_graph_ref: String, - - /// The endpoint polled to fetch its latest supergraph schema. - urls: Option>, - - /// The duration between polling - poll_interval: Duration, - - /// The HTTP client timeout for each poll - timeout: Duration, - }, + Registry(UplinkConfig), } impl From<&'_ str> for SchemaSource { @@ -124,29 +108,19 @@ impl SchemaSource { } } } - SchemaSource::Registry { - apollo_key, - apollo_graph_ref, - urls, - poll_interval, - timeout, - } => stream_from_uplink::( - apollo_key, - apollo_graph_ref, - urls.map(Endpoints::fallback), - poll_interval, - timeout, - ) - .filter_map(|res| { - future::ready(match res { - Ok(schema) => Some(UpdateSchema(schema)), - Err(e) => { - tracing::error!("{}", e); - None - } - }) - }) - .boxed(), + SchemaSource::Registry(uplink_config) => { + stream_from_uplink::(uplink_config) + .filter_map(|res| { + future::ready(match res { + Ok(schema) => Some(UpdateSchema(schema)), + Err(e) => { + tracing::error!("{}", e); + None + } + }) + }) + .boxed() + } } .chain(stream::iter(vec![NoMoreSchema])) } diff --git a/apollo-router/src/router/mod.rs b/apollo-router/src/router/mod.rs index 255b2cd387c..754966d0b28 100644 --- a/apollo-router/src/router/mod.rs +++ b/apollo-router/src/router/mod.rs @@ -35,7 +35,7 @@ use crate::orbiter::OrbiterRouterSuperServiceFactory; use crate::router_factory::YamlRouterFactory; use crate::state_machine::ListenAddresses; use crate::state_machine::StateMachine; - +use crate::uplink::UplinkConfig; /// The entry point for running the Router’s HTTP server. /// /// # Examples @@ -99,6 +99,10 @@ impl RouterHttpServer { /// Specifies where to find the router license which controls if commercial features are enabled or not. /// If not provided then commercial features will not be enabled. /// + /// * `.uplink(impl Into<`[UplinkConfig]>`)` + /// Optional. + /// Specifies the Uplink configuration options. + /// /// * `.shutdown(impl Into<`[`ShutdownSource`]`>)` /// Optional. /// Specifies when the server should gracefully shut down. @@ -124,12 +128,14 @@ impl RouterHttpServer { configuration: Option, license: Option, shutdown: Option, + uplink: Option, ) -> RouterHttpServer { let (shutdown_sender, shutdown_receiver) = oneshot::channel::<()>(); let event_stream = generate_event_stream( shutdown.unwrap_or(ShutdownSource::CtrlC), configuration.unwrap_or_default(), schema, + uplink, license.unwrap_or_default(), shutdown_receiver, ); @@ -218,6 +224,7 @@ fn generate_event_stream( shutdown: ShutdownSource, configuration: ConfigurationSource, schema: SchemaSource, + uplink_config: Option, license: LicenseSource, shutdown_receiver: oneshot::Receiver<()>, ) -> impl Stream { @@ -229,7 +236,7 @@ fn generate_event_stream( license.into_stream().boxed(), reload_source.clone().into_stream().boxed(), configuration - .into_stream() + .into_stream(uplink_config) .map(move |config_event| { if let Event::UpdateConfiguration(config) = &config_event { reload_source.set_period(&config.experimental_chaos.force_reload) diff --git a/apollo-router/src/router_factory.rs b/apollo-router/src/router_factory.rs index 853d8ae24cf..76934047102 100644 --- a/apollo-router/src/router_factory.rs +++ b/apollo-router/src/router_factory.rs @@ -221,9 +221,11 @@ impl RouterSuperServiceFactory for YamlRouterFactory { let query_parsing_layer = QueryAnalysisLayer::new(supergraph_creator.schema(), Arc::clone(&configuration)).await; - if let Some(router) = previous_router { + let mut persisted_query_manifest_poller = None; + + if let Some(previous_router) = previous_router { if configuration.supergraph.query_planning.warmed_up_queries > 0 { - let cache_keys = router + let cache_keys = previous_router .cache_keys(configuration.supergraph.query_planning.warmed_up_queries) .await; @@ -238,14 +240,22 @@ impl RouterSuperServiceFactory for YamlRouterFactory { .await; } } - } + + // capture the manifest poller for persisted queries and pass it on to the new router + // so it can keep running without needing to re-fetch every single operation on reload. + persisted_query_manifest_poller = previous_router + .persisted_query_layer + .manifest_poller + .clone(); + }; Ok(Self::RouterFactory::new( query_parsing_layer, Arc::new(supergraph_creator), configuration, + persisted_query_manifest_poller, ) - .await) + .await?) } } diff --git a/apollo-router/src/services/layers/apq.rs b/apollo-router/src/services/layers/apq.rs index 1a08ec5e267..2b27b817b22 100644 --- a/apollo-router/src/services/layers/apq.rs +++ b/apollo-router/src/services/layers/apq.rs @@ -38,7 +38,7 @@ impl PersistedQuery { .and_then(|value| serde_json_bytes::from_value(value.clone()).ok()) } - /// Attempt to decode the sha256 hash in a `PersistedQuery` + /// Attempt to decode the sha256 hash in a [`PersistedQuery`] pub(crate) fn decode_hash(self) -> Option<(String, Vec)> { hex::decode(self.sha256hash.as_bytes()) .ok() diff --git a/apollo-router/src/services/layers/mod.rs b/apollo-router/src/services/layers/mod.rs index 19b6ab6550b..0f921f09863 100644 --- a/apollo-router/src/services/layers/mod.rs +++ b/apollo-router/src/services/layers/mod.rs @@ -2,5 +2,6 @@ pub(crate) mod allow_only_http_post_mutations; pub(crate) mod apq; pub(crate) mod content_negociation; +pub(crate) mod persisted_queries; pub(crate) mod query_analysis; pub(crate) mod static_page; diff --git a/apollo-router/src/services/layers/persisted_queries/id_extractor.rs b/apollo-router/src/services/layers/persisted_queries/id_extractor.rs new file mode 100644 index 00000000000..fcd1b9b0fe2 --- /dev/null +++ b/apollo-router/src/services/layers/persisted_queries/id_extractor.rs @@ -0,0 +1,54 @@ +//! Persisted Query ID extractor + +use crate::services::layers::apq::PersistedQuery; +use crate::services::SupergraphRequest; + +#[derive(Debug, Clone)] +pub(crate) struct PersistedQueryIdExtractor; + +impl PersistedQueryIdExtractor { + pub(crate) fn extract_id(request: &SupergraphRequest) -> Option { + PersistedQuery::maybe_from_request(request).map(|pq| pq.sha256hash) + } +} + +#[cfg(test)] +mod tests { + use serde_json::json; + + use super::*; + + fn build_supergraph_request_with_pq_extension( + persisted: &serde_json::Value, + ) -> SupergraphRequest { + SupergraphRequest::fake_builder() + .extension("persistedQuery", persisted.clone()) + .build() + .unwrap() + } + + fn assert_can_extract_id(expected_id: String, request: SupergraphRequest) { + assert_eq!( + PersistedQueryIdExtractor::extract_id(&request), + Some(expected_id) + ) + } + + fn assert_cannot_extract_id(request: SupergraphRequest) { + assert_eq!(PersistedQueryIdExtractor::extract_id(&request), None) + } + + #[test] + fn it_cannot_extract_id_from_request_extensions_without_version() { + let hash = "ecf4edb46db40b5132295c0291d62fb65d6759a9eedfa4d5d612dd5ec54a6b36".to_string(); + let persisted = json!({ "sha256Hash": &hash }); + assert_cannot_extract_id(build_supergraph_request_with_pq_extension(&persisted)) + } + + #[test] + fn it_can_extract_id_from_request_extensions_with_version() { + let hash = "ecf4edb46db40b5132295c0291d62fb65d6759a9eedfa4d5d612dd5ec54a6b36".to_string(); + let persisted = json!({ "sha256Hash": &hash, "version": 1 }); + assert_can_extract_id(hash, build_supergraph_request_with_pq_extension(&persisted)) + } +} diff --git a/apollo-router/src/services/layers/persisted_queries/manifest_poller.rs b/apollo-router/src/services/layers/persisted_queries/manifest_poller.rs new file mode 100644 index 00000000000..8f21979d4bd --- /dev/null +++ b/apollo-router/src/services/layers/persisted_queries/manifest_poller.rs @@ -0,0 +1,364 @@ +//! Persisted query manifest poller. Once created, will poll for updates continuously, reading persisted queries into memory. + +use std::collections::HashMap; +use std::collections::HashSet; +use std::sync::Arc; +use std::sync::RwLock; + +use anyhow::anyhow; +use futures::prelude::*; +use reqwest::Client; +use serde::Deserialize; +use serde::Serialize; +use tokio::sync::mpsc; +use tower::BoxError; + +use crate::uplink::persisted_queries_manifest_stream::MaybePersistedQueriesManifestChunks; +use crate::uplink::persisted_queries_manifest_stream::PersistedQueriesManifestChunk; +use crate::uplink::persisted_queries_manifest_stream::PersistedQueriesManifestQuery; +use crate::uplink::stream_from_uplink; +use crate::uplink::UplinkConfig; + +/// An in memory cache of persisted queries. +pub(crate) type PersistedQueryManifest = HashMap; + +/// An in memory cache of persisted query bodies. +pub(crate) type PersistedQuerySet = HashSet; + +/// Manages polling uplink for persisted query chunks and unpacking those chunks into a [`PersistedQueryManifest`]. +#[derive(Debug)] +pub(crate) struct PersistedQueryManifestPoller { + persisted_query_manifest: Arc>, + persisted_query_bodies: Arc>, + shutdown_sender: mpsc::Sender<()>, +} + +impl PersistedQueryManifestPoller { + /// Create a new [`PersistedQueryManifestPoller`] from CLI options and YAML configuration. + /// Starts polling immediately and this function only returns after all chunks have been fetched + /// and the [`PersistedQueryManifest`] has been fully populated. + pub(crate) async fn new(uplink_config: &UplinkConfig) -> Result { + let persisted_query_manifest = Arc::new(RwLock::new(PersistedQueryManifest::new())); + let persisted_query_bodies = Arc::new(RwLock::new(PersistedQuerySet::new())); + + let http_client = Client::builder().timeout(uplink_config.timeout).build() + .map_err(|e| { + anyhow!( + "could not initialize HTTP client for fetching persisted queries manifest chunks: {}", + e + ) + })?; + + let (shutdown_sender, shutdown_receiver) = mpsc::channel::<()>(1); + let (ready_sender, mut ready_receiver) = mpsc::channel::(1); + + // start polling uplink for persisted query chunks + tokio::task::spawn(poll_uplink( + uplink_config.clone(), + persisted_query_manifest.clone(), + persisted_query_bodies.clone(), + ready_sender, + shutdown_receiver, + http_client, + )); + + // wait for the uplink poller to report its first success and continue + // or report the error + match ready_receiver.recv().await { + Some(startup_result) => match startup_result { + ManifestPollResultOnStartup::LoadedOperations => (), + ManifestPollResultOnStartup::Err(error) => return Err(error), + }, + None => { + return Err( + anyhow!("could not receive ready event for persisted query layer").into(), + ); + } + } + + Ok(Self { + shutdown_sender, + persisted_query_manifest, + persisted_query_bodies, + }) + } + + /// Send a shutdown message to the background task that is polling for persisted query chunks. + pub(crate) async fn shutdown(&self) -> Result<(), BoxError> { + self.shutdown_sender + .send(()) + .await + .map_err(|_| anyhow!("could not send shutdown event in persisted query layer").into()) + } + + pub(crate) fn get_operation_body(&self, persisted_query_id: &str) -> Option { + let persisted_query_manifest = self.persisted_query_manifest.read().unwrap_or_else(|e| { + panic!("could not acquire read lock on persisted query manifest: {e}") + }); + persisted_query_manifest.get(persisted_query_id).cloned() + } + + pub(crate) fn is_operation_persisted(&self, query: &str) -> bool { + let persisted_query_bodies = self.persisted_query_bodies.read().unwrap_or_else(|e| { + panic!("could not acquire read lock on persisted query body se: {e}") + }); + persisted_query_bodies.contains(query) + } +} + +async fn poll_uplink( + uplink_config: UplinkConfig, + existing_persisted_query_manifest: Arc>, + existing_persisted_query_bodies: Arc>, + ready_sender: mpsc::Sender, + mut shutdown_event_receiver: mpsc::Receiver<()>, + http_client: Client, +) { + let mut uplink_executor = stream::select_all(vec![ + stream_from_uplink::( + uplink_config.clone(), + ) + .filter_map(|res| { + let http_client = http_client.clone(); + let graph_ref = uplink_config.apollo_graph_ref.clone(); + async move { + match res { + Ok(Some(chunks)) => match manifest_from_chunks(chunks, http_client).await { + Ok(new_manifest) => Some(ManifestPollEvent::NewManifest(new_manifest)), + Err(e) => Some(ManifestPollEvent::FetchError(e)), + }, + Ok(None) => Some(ManifestPollEvent::NoPersistedQueryList { graph_ref }), + Err(e) => Some(ManifestPollEvent::Err(e.into())), + } + } + }) + .boxed(), + shutdown_event_receiver + .recv() + .into_stream() + .filter_map(|res| { + future::ready(match res { + Some(()) => Some(ManifestPollEvent::Shutdown), + None => Some(ManifestPollEvent::Err( + anyhow!("could not receive shutdown event for persisted query layer") + .into(), + )), + }) + }) + .boxed(), + ]) + .take_while(|msg| future::ready(!matches!(msg, ManifestPollEvent::Shutdown))) + .boxed(); + + let mut resolved_first_pq_manifest = false; + + while let Some(event) = uplink_executor.next().await { + match event { + ManifestPollEvent::NewManifest(new_manifest) => { + // copy all of the bodies in the new manifest into a hash set before acquiring any locks + let new_bodies: HashSet = new_manifest.values().cloned().collect(); + existing_persisted_query_manifest + .write() + .map(|mut existing_manifest| { + existing_persisted_query_bodies + .write() + .map(|mut existing_bodies| { + // update the existing map of pq id to pq body to be the new collection + *existing_manifest = new_manifest; + + // update the set of pq bodies from the values in the new collection + *existing_bodies = new_bodies; + }) + .unwrap_or_else(|e| { + panic!( + "could not acquire write lock on persisted query body set: {e}" + ) + }); + }) + .unwrap_or_else(|e| { + panic!("could not acquire write lock on persisted query manifest: {e}") + }); + + if !resolved_first_pq_manifest { + send_startup_event( + &ready_sender, + ManifestPollResultOnStartup::LoadedOperations, + ) + .await; + resolved_first_pq_manifest = true; + } + } + ManifestPollEvent::FetchError(e) => { + send_startup_event( + &ready_sender, + ManifestPollResultOnStartup::Err( + anyhow!("could not fetch persisted queries: {e}").into(), + ), + ) + .await + } + ManifestPollEvent::Err(e) => { + send_startup_event(&ready_sender, ManifestPollResultOnStartup::Err(e)).await + } + ManifestPollEvent::NoPersistedQueryList { graph_ref } => { + send_startup_event( + &ready_sender, + ManifestPollResultOnStartup::Err( + anyhow!("no persisted query list found for graph ref {}", &graph_ref) + .into(), + ), + ) + .await + } + // this event is a no-op because we `take_while` on messages that are not this one + ManifestPollEvent::Shutdown => (), + } + } + + async fn send_startup_event( + ready_sender: &mpsc::Sender, + message: ManifestPollResultOnStartup, + ) { + if let Err(e) = ready_sender.send(message).await { + tracing::debug!("could not send startup event for the persisted query layer: {e}"); + } + } +} + +async fn manifest_from_chunks( + new_chunks: Vec, + http_client: Client, +) -> Result { + let mut new_persisted_query_manifest = PersistedQueryManifest::new(); + tracing::debug!("ingesting new persisted queries: {:?}", &new_chunks); + // TODO: consider doing these fetches in parallel + for new_chunk in new_chunks { + add_chunk_to_operations( + new_chunk, + &mut new_persisted_query_manifest, + http_client.clone(), + ) + .await? + } + + tracing::info!( + "Loaded {} persisted queries.", + new_persisted_query_manifest.len() + ); + + Ok(new_persisted_query_manifest) +} + +async fn add_chunk_to_operations( + chunk: PersistedQueriesManifestChunk, + operations: &mut PersistedQueryManifest, + http_client: Client, +) -> anyhow::Result<()> { + // TODO: chunk URLs will eventually respond with fallback URLs, when it does, implement falling back here + if let Some(chunk_url) = chunk.urls.get(0) { + let chunk = http_client + .get(chunk_url.clone()) + .send() + .await + .and_then(|r| r.error_for_status()) + .map_err(|e| { + anyhow!( + "error fetching persisted queries manifest chunk from {}: {}", + chunk_url, + e + ) + })? + .json::() + .await + .map_err(|e| { + anyhow!( + "error reading body of persisted queries manifest chunk from {}: {}", + chunk_url, + e + ) + })?; + + if chunk.format != "apollo-persisted-query-manifest" { + return Err(anyhow!( + "chunk format is not 'apollo-persisted-query-manifest'" + )); + } + + if chunk.version != 1 { + return Err(anyhow!("persisted query manifest chunk version is not 1")); + } + + for operation in chunk.operations { + operations.insert(operation.id, operation.body); + } + + Ok(()) + } else { + Err(anyhow!( + "persisted query chunk did not include any URLs to fetch operations from" + )) + } +} + +/// Types of events produced by the manifest poller. +#[derive(Debug)] +pub(crate) enum ManifestPollEvent { + NewManifest(PersistedQueryManifest), + NoPersistedQueryList { graph_ref: String }, + Err(BoxError), + FetchError(BoxError), + Shutdown, +} + +/// The result of the first time build of the persisted query manifest. +#[derive(Debug)] +pub(crate) enum ManifestPollResultOnStartup { + LoadedOperations, + Err(BoxError), +} + +/// The format of each persisted query chunk returned from uplink. +#[derive(Debug, Clone, Deserialize, Serialize)] +pub(crate) struct SignedUrlChunk { + pub(crate) format: String, + pub(crate) version: u64, + pub(crate) operations: Vec, +} + +/// A single operation containing an ID and a body, +#[derive(Debug, Clone, Deserialize, Serialize)] +pub(crate) struct Operation { + pub(crate) id: String, + pub(crate) body: String, +} + +#[cfg(test)] +mod tests { + use url::Url; + + use super::*; + use crate::test_harness::mocks::persisted_queries::*; + use crate::uplink::Endpoints; + + #[tokio::test(flavor = "multi_thread")] + async fn poller_can_get_operation_bodies() { + let (id, body, manifest) = fake_manifest(); + let (_mock_guard, uplink_config) = mock_pq_uplink(&manifest).await; + let manifest_manager = PersistedQueryManifestPoller::new(&uplink_config) + .await + .unwrap(); + assert_eq!(manifest_manager.get_operation_body(&id), Some(body)) + } + + #[tokio::test(flavor = "multi_thread")] + async fn poller_wont_start_without_uplink_connection() { + let uplink_endpoint = Url::parse("https://definitely.not.uplink").unwrap(); + assert!( + PersistedQueryManifestPoller::new(&UplinkConfig::for_tests(Endpoints::fallback(vec![ + uplink_endpoint + ]))) + .await + .is_err() + ); + } +} diff --git a/apollo-router/src/services/layers/persisted_queries/mod.rs b/apollo-router/src/services/layers/persisted_queries/mod.rs new file mode 100644 index 00000000000..cc4cea56d91 --- /dev/null +++ b/apollo-router/src/services/layers/persisted_queries/mod.rs @@ -0,0 +1,951 @@ +use std::sync::Arc; + +mod id_extractor; +mod manifest_poller; + +use anyhow::anyhow; +use http::header::CACHE_CONTROL; +use http::HeaderValue; +use id_extractor::PersistedQueryIdExtractor; +pub(crate) use manifest_poller::PersistedQueryManifestPoller; +use tower::BoxError; + +use crate::configuration::PersistedQueriesSafelist; +use crate::graphql::Error as GraphQLError; +use crate::services::SupergraphRequest; +use crate::services::SupergraphResponse; +use crate::Configuration; +use crate::UplinkConfig; + +const DONT_CACHE_RESPONSE_VALUE: &str = "private, no-cache, must-revalidate"; + +#[derive(Debug)] +pub(crate) struct PersistedQueryLayer { + /// Manages polling uplink for persisted queries + /// it maintains its state between schema reloads and continues running. + pub(crate) manifest_poller: Option>, + + /// Tracks whether APQ is also enabled. + /// If it is, this layer won't reject operations it can't find in the manifest, + /// instead passing on execution to the APQ layer, which will return an error + /// if it can _also_ not find the operation. + apq_enabled: bool, + + /// Tracks whether to log incoming queries that are not in the persisted query list. + log_unknown: bool, + + /// Safelisting configuration. + safelist_config: PersistedQueriesSafelist, +} + +impl PersistedQueryLayer { + /// Create a new [`PersistedQueryLayer`] from CLI options, YAML configuration, + /// and optionally, an existing persisted query manifest poller. + pub(crate) async fn new( + configuration: &Configuration, + previous_manifest_poller: Option>, + ) -> Result { + if configuration.preview_persisted_queries.enabled { + if configuration.uplink.is_none() { + return Err(anyhow!("persisted queries requires Apollo GraphOS. ensure that you have set APOLLO_KEY and APOLLO_GRAPH_REF environment variables").into()); + } + if configuration.apq.enabled && configuration.preview_persisted_queries.safelist.enabled + { + return Err(anyhow!("invalid configuration: preview_persisted_queries.safelist.enabled = true, which is incompatible with apq.enabled = true. you must disable apq in your configuration to enable persisted queries with safelisting").into()); + } + Self::new_enabled( + configuration, + configuration + .uplink + .as_ref() + .expect("uplink config was checked above, qed"), + previous_manifest_poller, + ) + .await + } else { + Self::new_disabled(configuration, previous_manifest_poller).await + } + } + + /// Create a new enabled [`PersistedQueryLayer`] using the existing manifest poller if it exists, + /// keeping state intact during state machine reloads + /// or starting a new poller from CLI options and YAML configuration. + async fn new_enabled( + configuration: &Configuration, + uplink_config: &UplinkConfig, + preexisting_manifest_poller: Option>, + ) -> Result { + Self::new_with_manifest_poller( + configuration, + Some( + // use the existing manifest poller if it already exists so chunks don't need refetching + // no configuration options could have changed for the manifest poller because uplink + // configuration options come from CLI options, not YAML, so it's safe to re-use. + if let Some(previous_manifest_poller) = preexisting_manifest_poller.clone() { + previous_manifest_poller + } else { + Arc::new(PersistedQueryManifestPoller::new(uplink_config).await?) + }, + ), + ) + } + + /// Create a new disabled [`PersistedQueryLayer`] shutting down the existing manifest poller if it exists. + async fn new_disabled( + configuration: &Configuration, + preexisting_manifest_poller: Option>, + ) -> Result { + if let Some(preexisting_manifest_poller) = preexisting_manifest_poller { + preexisting_manifest_poller.shutdown().await?; + } + + Self::new_with_manifest_poller(configuration, None) + } + + fn new_with_manifest_poller( + configuration: &Configuration, + manifest_poller: Option>, + ) -> Result { + Ok(Self { + manifest_poller, + apq_enabled: configuration.apq.enabled, + safelist_config: configuration.preview_persisted_queries.safelist.clone(), + log_unknown: configuration.preview_persisted_queries.log_unknown, + }) + } + + /// Run a request through the layer. + /// Takes care of: + /// 1) resolving a persisted query ID to a query body + /// 2) matching a freeform GraphQL request against persisted queries, optionally rejecting it based on configuration + /// 3) continuing to the next stage of the router + pub(crate) fn supergraph_request( + &self, + request: SupergraphRequest, + ) -> Result { + if let Some(manifest_poller) = &self.manifest_poller { + if let Some(persisted_query_id) = PersistedQueryIdExtractor::extract_id(&request) { + self.replace_query_id_with_operation_body( + request, + manifest_poller.clone(), + &persisted_query_id, + ) + } else { + self.handle_freeform_graphql(request, manifest_poller.clone()) + } + } else { + Ok(request) + } + } + + /// Places an operation body on a [`SupergraphRequest`] if it has been persisted + pub(crate) fn replace_query_id_with_operation_body( + &self, + mut request: SupergraphRequest, + manifest_poller: Arc, + persisted_query_id: &str, + ) -> Result { + if request.supergraph_request.body().query.is_some() { + if self.apq_enabled { + // if the request has a query and an ID, and APQ is enabled, continue with normal execution. + // safelisting and APQ are incomaptible with each other - therefore we don't need to check + // if the ID in the requests exactly maps to the body in the persisted query manifest, + // we can just ignore the ID and let APQ handle it for us + assert!(!self.safelist_config.enabled); + Ok(request) + } else { + Err(supergraph_err_cannot_send_id_and_body_with_apq_disabled( + request, + )) + } + } else { + // if there is no query, look up the persisted query in the manifest + // and put the body on the `supergraph_request` + if let Some(persisted_query_body) = + manifest_poller.get_operation_body(persisted_query_id) + { + let mut body = request.supergraph_request.body_mut(); + body.query = Some(persisted_query_body); + body.extensions.remove("persistedQuery"); + Ok(request) + } else if self.apq_enabled { + // if APQ is also enabled, pass the request along to the APQ plugin + // where it will do its own lookup + Ok(request) + } else { + // if APQ is not enabled, return an error indicating the query was not found + Err(supergraph_err_operation_not_found( + request, + persisted_query_id, + )) + } + } + } + + /// Handles incoming freeform GraphQL requests according to the safelisting configuration options + pub(crate) fn handle_freeform_graphql( + &self, + request: SupergraphRequest, + manifest_poller: Arc, + ) -> Result { + if let Some(operation_body) = request.supergraph_request.body().query.as_ref() { + let mut is_persisted = None; + + if self.log_unknown + && !is_operation_persisted( + &mut is_persisted, + manifest_poller.clone(), + operation_body, + ) + { + tracing::warn!(message = "unknown operation", operation_body); + } + + if self.safelist_config.enabled { + if self.safelist_config.require_id { + Err(supergraph_err_pq_id_required(request)) + } else if is_operation_persisted(&mut is_persisted, manifest_poller, operation_body) + { + // if the freeform GraphQL body we received was found in the manifest, + // allow the request to continue execution + Ok(request) + } else { + Err(supergraph_err_operation_not_in_safelist(request)) + } + } else { + // if the request already has a query, continue with normal execution + // because there is no need to substitute the body + // and freeform GraphQL is always allowed if safelisting is not enabled + Ok(request) + } + } else { + // if the request doesn't have a query, continue with normal execution + // if APQ is enabled, it will handle this request, otherwise this request + // is likely to eventually result in an error because there is no query specified + Ok(request) + } + } +} + +#[derive(Debug, Clone, Eq, PartialEq)] +enum ErrorCacheStrategy { + Cache, + DontCache, +} + +impl ErrorCacheStrategy { + fn get_supergraph_response( + &self, + graphql_error: GraphQLError, + request: SupergraphRequest, + ) -> SupergraphResponse { + let mut error_builder = SupergraphResponse::error_builder() + .error(graphql_error) + .context(request.context); + + if matches!(self, Self::DontCache) { + // Persisted query errors (especially "not registered") need to be uncached, because + // if we accidentally end up in a state where clients are "ahead" of Routers, + // we don't want them to get "stuck" believing we don't know the PQ if we + // catch up afterwards. + error_builder = error_builder.header( + CACHE_CONTROL, + HeaderValue::from_static(DONT_CACHE_RESPONSE_VALUE), + ); + } + + error_builder.build().expect("response is valid") + } +} + +/// checks if the query body is persisted, storing the result in a local cache +/// can be called multiple times and the full map lookup will only occur once +fn is_operation_persisted( + is_persisted: &mut Option, + manifest_poller: Arc, + operation_body: &str, +) -> bool { + if let Some(result) = is_persisted { + *result + } else { + let result = manifest_poller.is_operation_persisted(operation_body); + *is_persisted = Some(result); + result + } +} + +fn graphql_err_operation_not_found(persisted_query_id: &str) -> GraphQLError { + graphql_err( + "PERSISTED_QUERY_NOT_IN_LIST", + &format!("Persisted query '{persisted_query_id}' not found in the persisted query list"), + ) +} + +fn supergraph_err_operation_not_found( + request: SupergraphRequest, + persisted_query_id: &str, +) -> SupergraphResponse { + supergraph_err( + graphql_err_operation_not_found(persisted_query_id), + request, + ErrorCacheStrategy::DontCache, + ) +} + +fn graphql_err_cannot_send_id_and_body() -> GraphQLError { + graphql_err( + "CANNOT_SEND_PQ_ID_AND_BODY", + "Sending a persisted query ID and a body in the same request is disallowed", + ) +} + +fn supergraph_err_cannot_send_id_and_body_with_apq_disabled( + request: SupergraphRequest, +) -> SupergraphResponse { + supergraph_err( + graphql_err_cannot_send_id_and_body(), + request, + ErrorCacheStrategy::DontCache, + ) +} + +fn graphql_err_operation_not_in_safelist() -> GraphQLError { + graphql_err( + "QUERY_NOT_IN_SAFELIST", + "The operation body was not found in the persisted query safelist", + ) +} + +fn supergraph_err_operation_not_in_safelist(request: SupergraphRequest) -> SupergraphResponse { + supergraph_err( + graphql_err_operation_not_in_safelist(), + request, + ErrorCacheStrategy::DontCache, + ) +} + +fn graphql_err_pq_id_required() -> GraphQLError { + graphql_err("PERSISTED_QUERY_ID_REQUIRED", + "This endpoint does not allow freeform GraphQL requests; operations must be sent by ID in the persisted queries GraphQL extension.", + ) +} + +fn supergraph_err_pq_id_required(request: SupergraphRequest) -> SupergraphResponse { + supergraph_err( + graphql_err_pq_id_required(), + request, + ErrorCacheStrategy::Cache, + ) +} + +fn graphql_err(code: &str, message: &str) -> GraphQLError { + GraphQLError::builder() + .extension_code(code) + .message(message) + .build() +} + +fn supergraph_err( + graphql_error: GraphQLError, + request: SupergraphRequest, + cache_strategy: ErrorCacheStrategy, +) -> SupergraphResponse { + cache_strategy.get_supergraph_response(graphql_error, request) +} + +#[cfg(test)] +mod tests { + use std::time::Duration; + + use serde_json::json; + + use super::*; + use crate::configuration::Apq; + use crate::configuration::PersistedQueries; + use crate::test_harness::mocks::persisted_queries::*; + + #[tokio::test(flavor = "multi_thread")] + async fn disabled_pq_layer_has_no_poller() { + let (_mock_guard, uplink_config) = mock_empty_pq_uplink().await; + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query(PersistedQueries::builder().enabled(false).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + assert!(pq_layer.manifest_poller.is_none()); + } + + #[tokio::test(flavor = "multi_thread")] + async fn enabled_pq_layer_has_poller() { + let (_mock_guard, uplink_config) = mock_empty_pq_uplink().await; + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query(PersistedQueries::builder().enabled(true).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + assert!(pq_layer.manifest_poller.is_some()) + } + + #[tokio::test] + async fn poller_waits_to_start() { + let (_id, _body, manifest) = fake_manifest(); + let delay = Duration::from_secs(2); + let (_mock_guard, uplink_config) = mock_pq_uplink_with_delay(&manifest, delay).await; + let now = tokio::time::Instant::now(); + + assert!(PersistedQueryManifestPoller::new(&uplink_config) + .await + .is_ok()); + + assert!(now.elapsed() >= delay); + } + + #[tokio::test(flavor = "multi_thread")] + async fn enabled_pq_layer_can_run_pq() { + let (id, body, manifest) = fake_manifest(); + + let (_mock_guard, uplink_config) = mock_pq_uplink(&manifest).await; + + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query(PersistedQueries::builder().enabled(true).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + let incoming_request = SupergraphRequest::fake_builder() + .extension("persistedQuery", json!({"version": 1, "sha256Hash": id})) + .build() + .unwrap(); + + assert!(incoming_request.supergraph_request.body().query.is_none()); + + let result = pq_layer.supergraph_request(incoming_request); + if let Ok(request) = result { + assert_eq!(request.supergraph_request.body().query, Some(body)); + } else { + panic!("pq layer returned response instead of putting the query on the request"); + } + } + + #[tokio::test(flavor = "multi_thread")] + async fn pq_layer_passes_on_to_apq_layer_when_id_not_found() { + let (_id, _body, manifest) = fake_manifest(); + + let (_mock_guard, uplink_config) = mock_pq_uplink(&manifest).await; + + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query(PersistedQueries::builder().enabled(true).build()) + .apq(Apq::fake_builder().enabled(true).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + let incoming_request = SupergraphRequest::fake_builder() + .extension( + "persistedQuery", + json!({"version": 1, "sha256Hash": "this-id-is-invalid"}), + ) + .build() + .unwrap(); + + assert!(incoming_request.supergraph_request.body().query.is_none()); + + let result = pq_layer.supergraph_request(incoming_request); + if let Ok(request) = result { + assert!(request.supergraph_request.body().query.is_none()); + } else { + panic!("pq layer returned response instead of continuing to APQ layer"); + } + } + + #[tokio::test(flavor = "multi_thread")] + async fn pq_layer_errors_when_id_not_found_and_apq_disabled() { + let (_id, _body, manifest) = fake_manifest(); + + let (_mock_guard, uplink_config) = mock_pq_uplink(&manifest).await; + + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query(PersistedQueries::builder().enabled(true).build()) + .apq(Apq::fake_builder().enabled(false).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + let invalid_id = "this-id-is-invalid"; + let incoming_request = SupergraphRequest::fake_builder() + .extension( + "persistedQuery", + json!({"version": 1, "sha256Hash": invalid_id}), + ) + .build() + .unwrap(); + + assert!(incoming_request.supergraph_request.body().query.is_none()); + + let result = pq_layer.supergraph_request(incoming_request); + if let Err(mut response) = result { + if let Some(response) = response.next_response().await { + assert_eq!( + response.errors, + vec![graphql_err_operation_not_found(invalid_id)] + ); + } else { + panic!("could not get response from pq layer"); + } + } else { + panic!("pq layer returned request instead of returning an error response"); + } + } + + #[tokio::test(flavor = "multi_thread")] + async fn enabled_apq_configuration_tracked_in_pq_layer() { + let (_mock_guard, uplink_config) = mock_empty_pq_uplink().await; + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .apq(Apq::fake_builder().enabled(true).build()) + .persisted_query(PersistedQueries::builder().enabled(true).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + assert!(pq_layer.apq_enabled) + } + + #[tokio::test(flavor = "multi_thread")] + async fn disabled_apq_configuration_tracked_in_pq_layer() { + let (_mock_guard, uplink_config) = mock_empty_pq_uplink().await; + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .apq(Apq::fake_builder().enabled(false).build()) + .uplink(uplink_config) + .persisted_query(PersistedQueries::builder().enabled(true).build()) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + assert!(!pq_layer.apq_enabled) + } + + #[tokio::test(flavor = "multi_thread")] + async fn enabled_safelist_configuration_tracked_in_pq_layer() { + let safelist_config = PersistedQueriesSafelist::builder().enabled(true).build(); + let (_mock_guard, uplink_config) = mock_empty_pq_uplink().await; + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query( + PersistedQueries::builder() + .enabled(true) + .safelist(safelist_config) + .build(), + ) + .uplink(uplink_config) + .apq(Apq::fake_builder().enabled(false).build()) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + assert!(pq_layer.safelist_config.enabled) + } + + #[tokio::test(flavor = "multi_thread")] + async fn pq_layer_allows_freeform_graphql_when_in_safelist() { + let (_id, _body, manifest) = fake_manifest(); + + let (_mock_guard, uplink_config) = mock_pq_uplink(&manifest).await; + + let safelist_config = PersistedQueriesSafelist::builder().enabled(true).build(); + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query( + PersistedQueries::builder() + .enabled(true) + .safelist(safelist_config) + .build(), + ) + .uplink(uplink_config) + .apq(Apq::fake_builder().enabled(false).build()) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + + let incoming_request = SupergraphRequest::fake_builder() + .query("query NamedQuery { typename }") + .build() + .unwrap(); + + assert!(incoming_request.supergraph_request.body().query.is_some()); + + let result = pq_layer.supergraph_request(incoming_request); + if let Err(mut response) = result { + if let Some(response) = response.next_response().await { + assert_eq!( + response.errors, + vec![graphql_err_operation_not_in_safelist()] + ); + } else { + panic!("could not get response from pq layer"); + } + } else { + panic!("pq layer returned request instead of returning an error response"); + } + } + + #[tokio::test(flavor = "multi_thread")] + async fn pq_layer_rejects_invalid_ids_with_safelisting_enabled() { + let (_id, _body, manifest) = fake_manifest(); + + let (_mock_guard, uplink_config) = mock_pq_uplink(&manifest).await; + + let safelist_config = PersistedQueriesSafelist::builder().enabled(true).build(); + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query( + PersistedQueries::builder() + .enabled(true) + .safelist(safelist_config) + .build(), + ) + .uplink(uplink_config) + .apq(Apq::fake_builder().enabled(false).build()) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + let invalid_id = "this-id-is-invalid"; + let incoming_request = SupergraphRequest::fake_builder() + .extension( + "persistedQuery", + json!({"version": 1, "sha256Hash": invalid_id}), + ) + .build() + .unwrap(); + + assert!(incoming_request.supergraph_request.body().query.is_none()); + + let result = pq_layer.supergraph_request(incoming_request); + if let Err(mut response) = result { + if let Some(response) = response.next_response().await { + assert_eq!( + response.errors, + vec![graphql_err_operation_not_found(invalid_id)] + ); + } else { + panic!("could not get response from pq layer"); + } + } else { + panic!("pq layer returned request instead of returning an error response"); + } + } + + #[tokio::test(flavor = "multi_thread")] + async fn apq_and_pq_safelisting_is_invalid_config() { + let (_mock_guard, uplink_config) = mock_empty_pq_uplink().await; + let safelist_config = PersistedQueriesSafelist::builder().enabled(true).build(); + let pq_layer_result = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query( + PersistedQueries::builder() + .enabled(true) + .safelist(safelist_config) + .build(), + ) + .apq(Apq::fake_builder().enabled(true).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await; + assert!(pq_layer_result.is_err()); + } + + #[tokio::test(flavor = "multi_thread")] + async fn require_id_disabled_by_default_with_safelisting_enabled_in_pq_layer() { + let safelist_config = PersistedQueriesSafelist::builder().enabled(true).build(); + let (_mock_guard, uplink_config) = mock_empty_pq_uplink().await; + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query( + PersistedQueries::builder() + .enabled(true) + .safelist(safelist_config) + .build(), + ) + .apq(Apq::fake_builder().enabled(false).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + assert!(!pq_layer.safelist_config.require_id) + } + + #[tokio::test(flavor = "multi_thread")] + async fn safelisting_require_id_can_be_enabled_in_pq_layer() { + let safelist_config = PersistedQueriesSafelist::builder() + .enabled(true) + .require_id(true) + .build(); + let (_mock_guard, uplink_config) = mock_empty_pq_uplink().await; + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query( + PersistedQueries::builder() + .enabled(true) + .safelist(safelist_config) + .build(), + ) + .apq(Apq::fake_builder().enabled(false).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + assert!(pq_layer.safelist_config.require_id) + } + + #[tokio::test(flavor = "multi_thread")] + async fn safelisting_require_id_rejects_freeform_graphql_in_pq_layer() { + let safelist_config = PersistedQueriesSafelist::builder() + .enabled(true) + .require_id(true) + .build(); + let (_mock_guard, uplink_config) = mock_empty_pq_uplink().await; + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query( + PersistedQueries::builder() + .enabled(true) + .safelist(safelist_config) + .build(), + ) + .apq(Apq::fake_builder().enabled(false).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + + let incoming_request = SupergraphRequest::fake_builder() + .query("query { typename }") + .build() + .unwrap(); + + assert!(incoming_request.supergraph_request.body().query.is_some()); + + let result = pq_layer.supergraph_request(incoming_request); + if let Err(mut response) = result { + if let Some(response) = response.next_response().await { + assert_eq!(response.errors, vec![graphql_err_pq_id_required()]); + } else { + panic!("could not get response from pq layer"); + } + } else { + panic!("pq layer returned request instead of returning an error response"); + } + } + + #[tokio::test(flavor = "multi_thread")] + async fn safelisting_disabled_by_default_in_pq_layer() { + let (_mock_guard, uplink_config) = mock_empty_pq_uplink().await; + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query(PersistedQueries::builder().enabled(true).build()) + .apq(Apq::fake_builder().enabled(false).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + assert!(!pq_layer.safelist_config.enabled) + } + + #[tokio::test(flavor = "multi_thread")] + async fn disabled_safelist_configuration_tracked_in_pq_layer() { + let (_mock_guard, uplink_config) = mock_empty_pq_uplink().await; + let safelist_config = PersistedQueriesSafelist::builder().enabled(false).build(); + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query( + PersistedQueries::builder() + .enabled(true) + .safelist(safelist_config) + .build(), + ) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + assert!(!pq_layer.safelist_config.enabled) + } + + #[tokio::test(flavor = "multi_thread")] + async fn can_pass_different_body_from_published_pq_id_with_apq_enabled() { + let (id, _body, manifest) = fake_manifest(); + let (_mock_guard, uplink_config) = mock_pq_uplink(&manifest).await; + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query(PersistedQueries::builder().enabled(true).build()) + .apq(Apq::fake_builder().enabled(true).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + let incoming_request = SupergraphRequest::fake_builder() + .extension("persistedQuery", json!({"version": 1, "sha256Hash": id})) + .query("invalid body") + .build() + .unwrap(); + + assert!(incoming_request.supergraph_request.body().query.is_some()); + + let result = pq_layer.supergraph_request(incoming_request); + assert!(result.is_ok()) + } + + #[tokio::test(flavor = "multi_thread")] + async fn cannot_pass_different_body_as_published_pq_id_with_apq_disabled() { + let (id, _body, manifest) = fake_manifest(); + let (_mock_guard, uplink_config) = mock_pq_uplink(&manifest).await; + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query(PersistedQueries::builder().enabled(true).build()) + .apq(Apq::fake_builder().enabled(false).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + let incoming_request = SupergraphRequest::fake_builder() + .extension("persistedQuery", json!({"version": 1, "sha256Hash": id})) + .query("invalid body") + .build() + .unwrap(); + + assert!(incoming_request.supergraph_request.body().query.is_some()); + + let result = pq_layer.supergraph_request(incoming_request); + if let Err(mut response) = result { + if let Some(response) = response.next_response().await { + assert_eq!(response.errors, vec![graphql_err_cannot_send_id_and_body()]); + } else { + panic!("could not get response from pq layer"); + } + } else { + panic!("pq layer returned request instead of returning an error response"); + } + } + + #[tokio::test(flavor = "multi_thread")] + async fn cannot_pass_same_body_as_published_pq_id_with_apq_disabled() { + let (id, body, manifest) = fake_manifest(); + let (_mock_guard, uplink_config) = mock_pq_uplink(&manifest).await; + let pq_layer = PersistedQueryLayer::new( + &Configuration::fake_builder() + .persisted_query(PersistedQueries::builder().enabled(true).build()) + .apq(Apq::fake_builder().enabled(false).build()) + .uplink(uplink_config) + .build() + .unwrap(), + None, + ) + .await + .unwrap(); + let incoming_request = SupergraphRequest::fake_builder() + .extension("persistedQuery", json!({"version": 1, "sha256Hash": id})) + .query(body) + .build() + .unwrap(); + + assert!(incoming_request.supergraph_request.body().query.is_some()); + + let result = pq_layer.supergraph_request(incoming_request); + if let Err(mut response) = result { + if let Some(response) = response.next_response().await { + assert_eq!(response.errors, vec![graphql_err_cannot_send_id_and_body()]); + } else { + panic!("could not get response from pq layer"); + } + } else { + panic!("pq layer returned request instead of returning an error response"); + } + } + + #[tokio::test(flavor = "multi_thread")] + async fn can_memoize_is_persisted() { + let mut is_persisted = None; + + let (_id, body, manifest) = fake_manifest(); + + let (_mock_guard, uplink_config) = mock_pq_uplink(&manifest).await; + + let manifest_poller = Arc::new( + PersistedQueryManifestPoller::new(&uplink_config) + .await + .unwrap(), + ); + + assert_eq!(is_persisted, None); + assert!(is_operation_persisted( + &mut is_persisted, + manifest_poller.clone(), + &body + )); + assert_eq!(is_persisted, Some(true)); + assert!(is_operation_persisted( + &mut is_persisted, + manifest_poller, + &body + )); + } +} diff --git a/apollo-router/src/services/router_service.rs b/apollo-router/src/services/router_service.rs index 0fa61d38261..7082c634827 100644 --- a/apollo-router/src/services/router_service.rs +++ b/apollo-router/src/services/router_service.rs @@ -52,6 +52,8 @@ use crate::protocols::multipart::ProtocolMode; use crate::query_planner::QueryPlanResult; use crate::router_factory::RouterFactory; use crate::services::layers::content_negociation::GRAPHQL_JSON_RESPONSE_HEADER_VALUE; +use crate::services::layers::persisted_queries::PersistedQueryLayer; +use crate::services::layers::persisted_queries::PersistedQueryManifestPoller; use crate::services::RouterRequest; use crate::services::RouterResponse; use crate::services::SupergraphRequest; @@ -65,6 +67,7 @@ use crate::ListenAddr; pub(crate) struct RouterService { supergraph_creator: Arc, apq_layer: APQLayer, + pub(crate) persisted_query_layer: Arc, query_analysis_layer: QueryAnalysisLayer, experimental_http_max_request_bytes: usize, } @@ -73,12 +76,14 @@ impl RouterService { pub(crate) fn new( supergraph_creator: Arc, apq_layer: APQLayer, + persisted_query_layer: Arc, query_analysis_layer: QueryAnalysisLayer, experimental_http_max_request_bytes: usize, ) -> Self { RouterService { supergraph_creator, apq_layer, + persisted_query_layer, query_analysis_layer, experimental_http_max_request_bytes, } @@ -119,8 +124,10 @@ pub(crate) async fn from_supergraph_mock_callback_and_configuration( QueryAnalysisLayer::new(supergraph_creator.schema(), Arc::clone(&configuration)).await, Arc::new(supergraph_creator), configuration, + None, ) .await + .unwrap() .make() } @@ -166,9 +173,11 @@ pub(crate) async fn empty() -> impl Service< RouterCreator::new( QueryAnalysisLayer::new(supergraph_creator.schema(), Default::default()).await, Arc::new(supergraph_creator), - Default::default(), + Arc::new(Configuration::default()), + None, ) .await + .unwrap() .make() } @@ -191,6 +200,7 @@ impl Service for RouterService { let supergraph_creator = self.supergraph_creator.clone(); let apq = self.apq_layer.clone(); + let persisted_query_layer = self.persisted_query_layer.clone(); let query_analysis = self.query_analysis_layer.clone(); let experimental_http_max_request_bytes = self.experimental_http_max_request_bytes; @@ -277,7 +287,12 @@ impl Service for RouterService { compiler: None, }; - let request_res = apq.supergraph_request(request).await; + let mut request_res = persisted_query_layer.supergraph_request(request); + + if let Ok(request) = request_res { + request_res = apq.supergraph_request(request).await; + } + let SupergraphResponse { response, context } = match request_res { Err(response) => response, Ok(request) => match query_analysis.supergraph_request(request).await { @@ -441,6 +456,7 @@ pub(crate) struct RouterCreator { supergraph_creator: Arc, static_page: StaticPageLayer, apq_layer: APQLayer, + pub(crate) persisted_query_layer: Arc, query_analysis_layer: QueryAnalysisLayer, experimental_http_max_request_bytes: usize, } @@ -474,7 +490,8 @@ impl RouterCreator { query_analysis_layer: QueryAnalysisLayer, supergraph_creator: Arc, configuration: Arc, - ) -> Self { + persisted_query_manifest_poller: Option>, + ) -> Result { let static_page = StaticPageLayer::new(&configuration); let apq_layer = if configuration.apq.enabled { APQLayer::with_cache( @@ -485,7 +502,11 @@ impl RouterCreator { APQLayer::disabled() }; - Self { + let persisted_query_layer = Arc::new( + PersistedQueryLayer::new(&configuration, persisted_query_manifest_poller).await?, + ); + + Ok(Self { supergraph_creator, static_page, apq_layer, @@ -493,7 +514,8 @@ impl RouterCreator { experimental_http_max_request_bytes: configuration .preview_operation_limits .experimental_http_max_request_bytes, - } + persisted_query_layer, + }) } pub(crate) fn make( @@ -507,6 +529,7 @@ impl RouterCreator { let router_service = content_negociation::RouterLayer::default().layer(RouterService::new( self.supergraph_creator.clone(), self.apq_layer.clone(), + self.persisted_query_layer.clone(), self.query_analysis_layer.clone(), self.experimental_http_max_request_bytes, )); diff --git a/apollo-router/src/state_machine.rs b/apollo-router/src/state_machine.rs index 573ae448875..5004092a1d8 100644 --- a/apollo-router/src/state_machine.rs +++ b/apollo-router/src/state_machine.rs @@ -1202,9 +1202,10 @@ mod tests { move |_configuration: &Arc, _, previous_router_service_factory: &Option<&MockMyRouterFactory>, - _extra_plugins: &Option)>>| { - previous_router_service_factory.is_some() - }, + _extra_plugins: &Option)>>| + { + previous_router_service_factory.is_some() + }, ) .returning(move |_, _, _, _| { let mut router = MockMyRouterFactory::new(); diff --git a/apollo-router/src/test_harness.rs b/apollo-router/src/test_harness.rs index c167f83f18c..5939502e132 100644 --- a/apollo-router/src/test_harness.rs +++ b/apollo-router/src/test_harness.rs @@ -1,4 +1,7 @@ +//! Test harness and mocks for the Apollo Router. + use std::collections::HashMap; +use std::default::Default; use std::sync::Arc; use tower::BoxError; @@ -25,6 +28,9 @@ use crate::services::supergraph; use crate::services::HasSchema; use crate::services::SupergraphCreator; +/// Mocks for services the Apollo Router must integrate with. +pub mod mocks; + #[cfg(test)] pub(crate) mod http_client; @@ -135,7 +141,8 @@ impl<'a> TestHarness<'a> { self, configuration: serde_json::Value, ) -> Result { - Ok(self.configuration(serde_json::from_value(configuration)?)) + let configuration: Configuration = serde_json::from_value(configuration)?; + Ok(self.configuration(Arc::new(configuration))) } /// Adds an extra, already instanciated plugin. @@ -267,8 +274,10 @@ impl<'a> TestHarness<'a> { QueryAnalysisLayer::new(supergraph_creator.schema(), Arc::clone(&config)).await, Arc::new(supergraph_creator), config, + None, ) - .await; + .await + .unwrap(); Ok(tower::service_fn(move |request: router::Request| { let router = ServiceBuilder::new().service(router_creator.make()).boxed(); @@ -289,9 +298,11 @@ impl<'a> TestHarness<'a> { let router_creator = RouterCreator::new( QueryAnalysisLayer::new(supergraph_creator.schema(), Arc::clone(&config)).await, Arc::new(supergraph_creator), - Arc::clone(&config), + config.clone(), + None, ) - .await; + .await?; + let web_endpoints = router_creator.web_endpoints(); let live = Arc::new(std::sync::atomic::AtomicBool::new(false)); diff --git a/apollo-router/src/test_harness/mocks/mod.rs b/apollo-router/src/test_harness/mocks/mod.rs new file mode 100644 index 00000000000..3f153b3e121 --- /dev/null +++ b/apollo-router/src/test_harness/mocks/mod.rs @@ -0,0 +1,2 @@ +/// Mocks for the persisted queries uplink integration. +pub mod persisted_queries; diff --git a/apollo-router/src/test_harness/mocks/persisted_queries.rs b/apollo-router/src/test_harness/mocks/persisted_queries.rs new file mode 100644 index 00000000000..822c91bf59f --- /dev/null +++ b/apollo-router/src/test_harness/mocks/persisted_queries.rs @@ -0,0 +1,118 @@ +use std::collections::HashMap; +use std::time::Duration; + +use maplit::hashmap; +use serde::Deserialize; +use serde::Serialize; +use serde_json::json; +use url::Url; +use wiremock::matchers::method; +use wiremock::Mock; +use wiremock::MockServer; +use wiremock::ResponseTemplate; + +use crate::uplink::Endpoints; +use crate::uplink::UplinkConfig; + +/// Get a query ID, body, and a PQ manifest with that ID and body. +pub fn fake_manifest() -> (String, String, HashMap) { + let id = "1234".to_string(); + let body = r#"query { typename }"#.to_string(); + let manifest = hashmap! { id.to_string() => body.to_string() }; + (id, body, manifest) +} + +/// Mocks an uplink server with a persisted query list containing no operations. +pub async fn mock_empty_pq_uplink() -> (UplinkMockGuard, UplinkConfig) { + mock_pq_uplink(&HashMap::new()).await +} + +/// Mocks an uplink server with a persisted query list with a delay. +pub async fn mock_pq_uplink_with_delay( + manifest: &HashMap, + delay: Duration, +) -> (UplinkMockGuard, UplinkConfig) { + do_mock_pq_uplink(manifest, Some(delay)).await +} + +/// Mocks an uplink server with a persisted query list containing operations passed to this function. +pub async fn mock_pq_uplink(manifest: &HashMap) -> (UplinkMockGuard, UplinkConfig) { + do_mock_pq_uplink(manifest, None).await +} + +/// Guards for the uplink and GCS mock servers, dropping these structs shuts down the server. +pub struct UplinkMockGuard { + _uplink_mock_guard: MockServer, + _gcs_mock_guard: MockServer, +} + +#[derive(Deserialize, Serialize)] +struct Operation { + id: String, + body: String, +} + +async fn do_mock_pq_uplink( + manifest: &HashMap, + delay: Option, +) -> (UplinkMockGuard, UplinkConfig) { + let operations: Vec = manifest + // clone the manifest so the caller can still make assertions about it + .clone() + .drain() + .map(|(id, body)| Operation { id, body }) + .collect(); + + let mock_gcs_server = MockServer::start().await; + + let gcs_response = ResponseTemplate::new(200).set_body_json(json!({ + "format": "apollo-persisted-query-manifest", + "version": 1, + "operations": operations + })); + + Mock::given(method("GET")) + .respond_with(gcs_response) + .mount(&mock_gcs_server) + .await; + + let mock_gcs_server_uri: Url = mock_gcs_server.uri().parse().unwrap(); + + let mock_uplink_server = MockServer::start().await; + + let mut gcs_response = ResponseTemplate::new(200).set_body_json(json!({ + "data": { + "persistedQueries": { + "__typename": "PersistedQueriesResult", + "id": "889406d7-b4f8-44df-a499-6c1e3c1bea09:1", + "minDelaySeconds": 60, + "chunks": [ + { + "id": "graph-id/889406a1-b4f8-44df-a499-6c1e3c1bea09/ec8ae3ae3eb00c738031dbe81603489b5d24fbf58f15bdeec1587282ee4e6eea", + "urls": [ + mock_gcs_server_uri + ] + } + ] + } + } + })); + + if let Some(delay) = delay { + gcs_response = gcs_response.set_delay(delay); + } + + Mock::given(method("POST")) + .respond_with(gcs_response) + .mount(&mock_uplink_server) + .await; + + let url = mock_uplink_server.uri().parse().unwrap(); + ( + UplinkMockGuard { + _uplink_mock_guard: mock_uplink_server, + _gcs_mock_guard: mock_gcs_server, + }, + UplinkConfig::for_tests(Endpoints::fallback(vec![url])), + ) +} diff --git a/apollo-router/src/uplink/license_enforcement.rs b/apollo-router/src/uplink/license_enforcement.rs index a4d6421b48b..8017139a1c1 100644 --- a/apollo-router/src/uplink/license_enforcement.rs +++ b/apollo-router/src/uplink/license_enforcement.rs @@ -176,6 +176,10 @@ impl LicenseEnforcementReport { .path("$.preview_operation_limits.max_aliases") .name("Operation aliases limiting") .build(), + ConfigurationRestriction::builder() + .path("$.preview_persisted_queries") + .name("Persisted queries") + .build(), ] } } diff --git a/apollo-router/src/uplink/license_stream.rs b/apollo-router/src/uplink/license_stream.rs index e2c04df617a..79ec956d022 100644 --- a/apollo-router/src/uplink/license_stream.rs +++ b/apollo-router/src/uplink/license_stream.rs @@ -243,6 +243,7 @@ mod test { use crate::uplink::license_stream::LicenseQuery; use crate::uplink::license_stream::LicenseStreamExt; use crate::uplink::stream_from_uplink; + use crate::uplink::UplinkConfig; #[tokio::test] async fn integration_test() { @@ -250,13 +251,13 @@ mod test { std::env::var("TEST_APOLLO_KEY"), std::env::var("TEST_APOLLO_GRAPH_REF"), ) { - let results = stream_from_uplink::( + let results = stream_from_uplink::(UplinkConfig { apollo_key, apollo_graph_ref, - None, - Duration::from_secs(1), - Duration::from_secs(5), - ) + endpoints: None, + poll_interval: Duration::from_secs(1), + timeout: Duration::from_secs(5), + }) .take(1) .collect::>() .await; diff --git a/apollo-router/src/uplink/mod.rs b/apollo-router/src/uplink/mod.rs index efbfa6cd729..f9afb7ce492 100644 --- a/apollo-router/src/uplink/mod.rs +++ b/apollo-router/src/uplink/mod.rs @@ -12,6 +12,7 @@ use url::Url; pub(crate) mod license_enforcement; pub(crate) mod license_stream; +pub(crate) mod persisted_queries_manifest_stream; pub(crate) mod schema_stream; const GCP_URL: &str = "https://uplink.api.apollographql.com"; @@ -32,6 +33,7 @@ pub(crate) enum Error { UplinkErrorNoRetry { code: String, message: String }, } +#[derive(Debug)] pub(crate) struct UplinkRequest { api_key: String, graph_ref: String, @@ -59,7 +61,8 @@ where }, } -pub(crate) enum Endpoints { +#[derive(Debug, Clone)] +pub enum Endpoints { Fallback { urls: Vec, }, @@ -118,14 +121,44 @@ impl Endpoints { } } +/// Configuration for polling Apollo Uplink. +/// This struct does not change on router reloads - they are all sourced from CLI options. +#[derive(Debug, Clone, Default)] +pub struct UplinkConfig { + /// The Apollo key: `` + pub apollo_key: String, + + /// The apollo graph reference: `@` + pub apollo_graph_ref: String, + + /// The endpoints polled. + pub endpoints: Option, + + /// The duration between polling + pub poll_interval: Duration, + + /// The HTTP client timeout for each poll + pub timeout: Duration, +} + +impl UplinkConfig { + /// Mock uplink configuration options for use in tests + /// A nice pattern is to use wiremock to start an uplink mocker and pass the URL here. + pub fn for_tests(uplink_endpoints: Endpoints) -> Self { + Self { + apollo_key: "key".to_string(), + apollo_graph_ref: "graph".to_string(), + endpoints: Some(uplink_endpoints), + poll_interval: Duration::from_secs(2), + timeout: Duration::from_secs(5), + } + } +} + /// Regularly fetch from Uplink -/// If urls are supplied then they will be called round robin +/// If urls are supplied then they will be called round robin pub(crate) fn stream_from_uplink( - api_key: String, - graph_ref: String, - endpoints: Option, - mut interval: Duration, - timeout: Duration, + mut uplink_config: UplinkConfig, ) -> impl Stream> where Query: graphql_client::GraphQLQuery, @@ -133,22 +166,27 @@ where ::Variables: From + Send + Sync, Response: Send + 'static + Debug, { - let (sender, receiver) = channel(2); let query = query_name::(); + let (sender, receiver) = channel(2); let task = async move { let mut last_id = None; - let mut endpoints = endpoints.unwrap_or_default(); + let mut endpoints = uplink_config.endpoints.unwrap_or_default(); loop { - let query_body = Query::build_query( - UplinkRequest { - graph_ref: graph_ref.to_string(), - api_key: api_key.to_string(), - id: last_id.clone(), - } - .into(), - ); - - match fetch::(&query_body, &mut endpoints.iter(), timeout).await { + let variables = UplinkRequest { + graph_ref: uplink_config.apollo_graph_ref.to_string(), + api_key: uplink_config.apollo_key.to_string(), + id: last_id.clone(), + }; + + let query_body = Query::build_query(variables.into()); + + match fetch::( + &query_body, + &mut endpoints.iter(), + uplink_config.timeout, + ) + .await + { Ok(response) => { tracing::info!( counter.apollo_router_uplink_fetch_count_total = 1, @@ -162,7 +200,7 @@ where delay, } => { last_id = Some(id); - interval = Duration::from_secs(delay); + uplink_config.poll_interval = Duration::from_secs(delay); if let Err(e) = sender.send(Ok(response)).await { tracing::debug!("failed to push to stream. This is likely to be because the router is shutting down: {e}"); @@ -170,13 +208,12 @@ where } } UplinkResponse::Unchanged { id, delay } => { - tracing::debug!("uplink response did not change"); // Preserve behavior for schema uplink errors where id and delay are not reset if they are not provided on error. if let Some(id) = id { last_id = Some(id); } if let Some(delay) = delay { - interval = Duration::from_secs(delay); + uplink_config.poll_interval = Duration::from_secs(delay); } } UplinkResponse::Error { @@ -212,7 +249,7 @@ where } } - tokio::time::sleep(interval).await; + tokio::time::sleep(uplink_config.poll_interval).await; } }; drop(tokio::task::spawn(task.with_current_subscriber())); @@ -237,7 +274,6 @@ where match http_request::(url.as_str(), request_body, timeout).await { Ok(response) => { let response = response.data.map(Into::into); - match &response { None => { tracing::info!( @@ -355,6 +391,7 @@ mod test { use crate::uplink::stream_from_uplink; use crate::uplink::Endpoints; use crate::uplink::Error; + use crate::uplink::UplinkConfig; use crate::uplink::UplinkRequest; use crate::uplink::UplinkResponse; @@ -416,6 +453,26 @@ mod test { } } + fn mock_uplink_config_with_fallback_urls(urls: Vec) -> UplinkConfig { + UplinkConfig { + apollo_key: "dummy_key".to_string(), + apollo_graph_ref: "dummy_graph_ref".to_string(), + endpoints: Some(Endpoints::fallback(urls)), + poll_interval: Duration::from_secs(0), + timeout: Duration::from_secs(1), + } + } + + fn mock_uplink_config_with_round_robin_urls(urls: Vec) -> UplinkConfig { + UplinkConfig { + apollo_key: "dummy_key".to_string(), + apollo_graph_ref: "dummy_graph_ref".to_string(), + endpoints: Some(Endpoints::round_robin(urls)), + poll_interval: Duration::from_secs(0), + timeout: Duration::from_secs(1), + } + } + #[test] fn test_round_robin_endpoints() { let url1 = Url::parse("http://example1.com").expect("url must be valid"); @@ -453,11 +510,7 @@ mod test { .await; let results = stream_from_uplink::( - "dummy_key".to_string(), - "dummy_graph_ref".to_string(), - Some(Endpoints::fallback(vec![url1, url2])), - Duration::from_secs(0), - Duration::from_secs(1), + mock_uplink_config_with_fallback_urls(vec![url1, url2]), ) .take(2) .collect::>() @@ -482,11 +535,7 @@ mod test { .await; let results = stream_from_uplink::( - "dummy_key".to_string(), - "dummy_graph_ref".to_string(), - Some(Endpoints::round_robin(vec![url1, url2])), - Duration::from_secs(0), - Duration::from_secs(1), + mock_uplink_config_with_round_robin_urls(vec![url1, url2]), ) .take(2) .collect::>() @@ -505,11 +554,7 @@ mod test { .build() .await; let results = stream_from_uplink::( - "dummy_key".to_string(), - "dummy_graph_ref".to_string(), - Some(Endpoints::fallback(vec![url1, url2])), - Duration::from_secs(0), - Duration::from_secs(1), + mock_uplink_config_with_fallback_urls(vec![url1, url2]), ) .take(2) .collect::>() @@ -527,11 +572,7 @@ mod test { .build() .await; let results = stream_from_uplink::( - "dummy_key".to_string(), - "dummy_graph_ref".to_string(), - Some(Endpoints::fallback(vec![url1, url2])), - Duration::from_secs(0), - Duration::from_secs(1), + mock_uplink_config_with_fallback_urls(vec![url1, url2]), ) .collect::>() .await; @@ -561,11 +602,7 @@ mod test { .build() .await; let results = stream_from_uplink::( - "dummy_key".to_string(), - "dummy_graph_ref".to_string(), - Some(Endpoints::fallback(vec![url1, url2, url3])), - Duration::from_secs(0), - Duration::from_secs(1), + mock_uplink_config_with_fallback_urls(vec![url1, url2, url3]), ) .take(2) .collect::>() @@ -596,11 +633,7 @@ mod test { .build() .await; let results = stream_from_uplink::( - "dummy_key".to_string(), - "dummy_graph_ref".to_string(), - Some(Endpoints::fallback(vec![url1, url2, url3])), - Duration::from_secs(0), - Duration::from_secs(1), + mock_uplink_config_with_fallback_urls(vec![url1, url2, url3]), ) .take(2) .collect::>() @@ -630,11 +663,7 @@ mod test { .build() .await; let results = stream_from_uplink::( - "dummy_key".to_string(), - "dummy_graph_ref".to_string(), - Some(Endpoints::round_robin(vec![url1, url2, url3])), - Duration::from_secs(0), - Duration::from_secs(1), + mock_uplink_config_with_round_robin_urls(vec![url1, url2, url3]), ) .take(2) .collect::>() @@ -664,11 +693,7 @@ mod test { .build() .await; let results = stream_from_uplink::( - "dummy_key".to_string(), - "dummy_graph_ref".to_string(), - Some(Endpoints::round_robin(vec![url1, url2, url3])), - Duration::from_secs(0), - Duration::from_secs(1), + mock_uplink_config_with_round_robin_urls(vec![url1, url2, url3]), ) .take(2) .collect::>() @@ -686,11 +711,7 @@ mod test { .build() .await; let results = stream_from_uplink::( - "dummy_key".to_string(), - "dummy_graph_ref".to_string(), - Some(Endpoints::round_robin(vec![url1, url2, url3])), - Duration::from_secs(0), - Duration::from_secs(1), + mock_uplink_config_with_round_robin_urls(vec![url1, url2, url3]), ) .take(1) .collect::>() @@ -710,11 +731,7 @@ mod test { .build() .await; let results = stream_from_uplink::( - "dummy_key".to_string(), - "dummy_graph_ref".to_string(), - Some(Endpoints::round_robin(vec![url1, url2, url3])), - Duration::from_secs(0), - Duration::from_secs(1), + mock_uplink_config_with_round_robin_urls(vec![url1, url2, url3]), ) .take(2) .collect::>() @@ -738,11 +755,7 @@ mod test { .build() .await; let results = stream_from_uplink::( - "dummy_key".to_string(), - "dummy_graph_ref".to_string(), - Some(Endpoints::round_robin(vec![url1, url2])), - Duration::from_secs(0), - Duration::from_secs(1), + mock_uplink_config_with_round_robin_urls(vec![url1, url2]), ) .take(1) .collect::>() diff --git a/apollo-router/src/uplink/persisted_queries_manifest_query.graphql b/apollo-router/src/uplink/persisted_queries_manifest_query.graphql new file mode 100644 index 00000000000..6d1ae0eaafb --- /dev/null +++ b/apollo-router/src/uplink/persisted_queries_manifest_query.graphql @@ -0,0 +1,21 @@ +query PersistedQueriesManifestQuery($apiKey: String!, $graph_ref: String!, $ifAfterId: ID) { + persistedQueries(ref: $graph_ref, apiKey: $apiKey, ifAfterId: $ifAfterId) { + __typename + ... on PersistedQueriesResult { + id + minDelaySeconds + chunks { + id + urls + } + } + ... on Unchanged { + id + minDelaySeconds + } + ... on FetchError { + code + message + } + } +} diff --git a/apollo-router/src/uplink/persisted_queries_manifest_stream.rs b/apollo-router/src/uplink/persisted_queries_manifest_stream.rs new file mode 100644 index 00000000000..d30818430df --- /dev/null +++ b/apollo-router/src/uplink/persisted_queries_manifest_stream.rs @@ -0,0 +1,160 @@ +// tonic does not derive `Eq` for the gRPC message types, which causes a warning from Clippy. The +// current suggestion is to explicitly allow the lint in the module that imports the protos. +// Read more: https://github.com/hyperium/tonic/issues/1056 +#![allow(clippy::derive_partial_eq_without_eq)] + +use graphql_client::GraphQLQuery; + +use crate::uplink::persisted_queries_manifest_stream::persisted_queries_manifest_query::FetchErrorCode; +use crate::uplink::persisted_queries_manifest_stream::persisted_queries_manifest_query::PersistedQueriesManifestQueryPersistedQueries; +use crate::uplink::persisted_queries_manifest_stream::persisted_queries_manifest_query::PersistedQueriesManifestQueryPersistedQueriesOnPersistedQueriesResultChunks; +use crate::uplink::UplinkRequest; +use crate::uplink::UplinkResponse; + +#[derive(GraphQLQuery)] +#[graphql( + query_path = "src/uplink/persisted_queries_manifest_query.graphql", + schema_path = "src/uplink/uplink.graphql", + request_derives = "Debug", + response_derives = "PartialEq, Debug, Deserialize", + deprecated = "warn" +)] + +pub(crate) struct PersistedQueriesManifestQuery; + +impl From for persisted_queries_manifest_query::Variables { + fn from(req: UplinkRequest) -> Self { + persisted_queries_manifest_query::Variables { + api_key: req.api_key, + graph_ref: req.graph_ref, + if_after_id: req.id, + } + } +} + +#[derive(Debug, Clone, Eq, PartialEq)] +pub(crate) struct PersistedQueriesManifestChunk { + pub(crate) id: String, + pub(crate) urls: Vec, +} + +impl PersistedQueriesManifestChunk { + fn from_query_chunks( + query_chunks: &PersistedQueriesManifestQueryPersistedQueriesOnPersistedQueriesResultChunks, + ) -> Self { + Self { + id: query_chunks.id.clone(), + urls: query_chunks.urls.clone(), + } + } +} + +pub(crate) type PersistedQueriesManifestChunks = Vec; +pub(crate) type MaybePersistedQueriesManifestChunks = Option; + +impl From + for UplinkResponse +{ + fn from(response: persisted_queries_manifest_query::ResponseData) -> Self { + match response.persisted_queries { + PersistedQueriesManifestQueryPersistedQueries::PersistedQueriesResult(response) => { + if let Some(chunks) = response.chunks { + let chunks = chunks + .iter() + .map(PersistedQueriesManifestChunk::from_query_chunks) + .collect(); + UplinkResponse::New { + response: Some(chunks), + id: response.id, + // this will truncate the number of seconds to under u64::MAX, which should be + // a large enough delay anyway + delay: response.min_delay_seconds as u64, + } + } else { + UplinkResponse::New { + // no persisted query list is associated with this variant + response: None, + id: response.id, + delay: response.min_delay_seconds as u64, + } + } + } + PersistedQueriesManifestQueryPersistedQueries::Unchanged(response) => { + UplinkResponse::Unchanged { + id: Some(response.id), + delay: Some(response.min_delay_seconds as u64), + } + } + PersistedQueriesManifestQueryPersistedQueries::FetchError(err) => { + UplinkResponse::Error { + retry_later: err.code == FetchErrorCode::RETRY_LATER, + code: match err.code { + FetchErrorCode::AUTHENTICATION_FAILED => { + "AUTHENTICATION_FAILED".to_string() + } + FetchErrorCode::ACCESS_DENIED => "ACCESS_DENIED".to_string(), + FetchErrorCode::UNKNOWN_REF => "UNKNOWN_REF".to_string(), + FetchErrorCode::RETRY_LATER => "RETRY_LATER".to_string(), + FetchErrorCode::NOT_IMPLEMENTED_ON_THIS_INSTANCE => { + "NOT_IMPLEMENTED_ON_THIS_INSTANCE".to_string() + } + FetchErrorCode::Other(other) => other, + }, + message: err.message, + } + } + } + } +} + +#[cfg(test)] +mod test { + use std::str::FromStr; + use std::time::Duration; + + use futures::stream::StreamExt; + use url::Url; + + use crate::uplink::persisted_queries_manifest_stream::MaybePersistedQueriesManifestChunks; + use crate::uplink::persisted_queries_manifest_stream::PersistedQueriesManifestQuery; + use crate::uplink::stream_from_uplink; + use crate::uplink::Endpoints; + use crate::uplink::UplinkConfig; + use crate::uplink::GCP_URL; + + #[tokio::test] + async fn integration_test() { + if let (Ok(apollo_key), Ok(apollo_graph_ref)) = ( + std::env::var("TEST_APOLLO_KEY"), + std::env::var("TEST_APOLLO_GRAPH_REF"), + ) { + // TODO: Add AWS_URL when that exists + for url in &[GCP_URL] { + let results = stream_from_uplink::< + PersistedQueriesManifestQuery, + MaybePersistedQueriesManifestChunks, + >(UplinkConfig { + apollo_key: apollo_key.clone(), + apollo_graph_ref: apollo_graph_ref.clone(), + endpoints: Some(Endpoints::fallback(vec![ + Url::from_str(url).expect("url must be valid") + ])), + poll_interval: Duration::from_secs(1), + timeout: Duration::from_secs(5), + }) + .take(1) + .collect::>() + .await; + + let persisted_query_manifest = results + .get(0) + .unwrap_or_else(|| panic!("expected one result from {}", url)) + .as_ref() + .unwrap_or_else(|_| panic!("schema should be OK from {}", url)) + .as_ref() + .unwrap(); + assert!(!persisted_query_manifest.is_empty()) + } + } + } +} diff --git a/apollo-router/src/uplink/schema_stream.rs b/apollo-router/src/uplink/schema_stream.rs index 29970025d1b..67e8ad44eb5 100644 --- a/apollo-router/src/uplink/schema_stream.rs +++ b/apollo-router/src/uplink/schema_stream.rs @@ -74,6 +74,7 @@ mod test { use crate::uplink::schema_stream::SupergraphSdlQuery; use crate::uplink::stream_from_uplink; use crate::uplink::Endpoints; + use crate::uplink::UplinkConfig; use crate::uplink::AWS_URL; use crate::uplink::GCP_URL; @@ -84,15 +85,15 @@ mod test { std::env::var("TEST_APOLLO_KEY"), std::env::var("TEST_APOLLO_GRAPH_REF"), ) { - let results = stream_from_uplink::( + let results = stream_from_uplink::(UplinkConfig { apollo_key, apollo_graph_ref, - Some(Endpoints::fallback(vec![ + endpoints: Some(Endpoints::fallback(vec![ Url::from_str(url).expect("url must be valid") ])), - Duration::from_secs(1), - Duration::from_secs(5), - ) + poll_interval: Duration::from_secs(1), + timeout: Duration::from_secs(5), + }) .take(1) .collect::>() .await; diff --git a/apollo-router/tests/integration_tests.rs b/apollo-router/tests/integration_tests.rs index e7cdba407e0..6ed582d8c97 100644 --- a/apollo-router/tests/integration_tests.rs +++ b/apollo-router/tests/integration_tests.rs @@ -14,6 +14,8 @@ use apollo_router::plugin::PluginInit; use apollo_router::services::router; use apollo_router::services::subgraph; use apollo_router::services::supergraph; +use apollo_router::test_harness::mocks::persisted_queries::*; +use apollo_router::Configuration; use apollo_router::Context; use futures::StreamExt; use http::header::ACCEPT; @@ -459,6 +461,137 @@ async fn automated_persisted_queries() { assert_eq!(registry.totals(), expected_service_hits); } +#[tokio::test(flavor = "multi_thread")] +async fn persisted_queries() { + use hyper::header::HeaderValue; + use serde_json::json; + + /// Construct a persisted query request from an ID. + fn pq_request(persisted_query_id: &str) -> router::Request { + supergraph::Request::fake_builder() + .extension( + "persistedQuery", + json!({ + "version": 1, + "sha256Hash": persisted_query_id + }), + ) + .build() + .expect("expecting valid request") + .try_into() + .expect("could not convert supergraph::Request to router::Request") + } + + // set up a PQM with one query + const PERSISTED_QUERY_ID: &str = "GetMyNameID"; + const PERSISTED_QUERY_BODY: &str = "query GetMyName { me { name } }"; + let expected_data = serde_json_bytes::json!({ + "me": { + "name": "Ada Lovelace" + } + }); + + let (_mock_guard, uplink_config) = mock_pq_uplink( + &hashmap! { PERSISTED_QUERY_ID.to_string() => PERSISTED_QUERY_BODY.to_string() }, + ) + .await; + + let config = serde_json::json!({ + "preview_persisted_queries": { + "enabled": true + }, + "apq": { + "enabled": false + } + }); + + let mut config: Configuration = serde_json::from_value(config).unwrap(); + config.uplink = Some(uplink_config); + let (router, registry) = setup_router_and_registry_with_config(config).await.unwrap(); + + // Successfully run a persisted query. + let actual = query_with_router(router.clone(), pq_request(PERSISTED_QUERY_ID)).await; + assert!(actual.errors.is_empty()); + assert_eq!(actual.data.as_ref(), Some(&expected_data)); + assert_eq!(registry.totals(), hashmap! {"accounts".to_string() => 1}); + + // Error on unpersisted query. + const UNKNOWN_QUERY_ID: &str = "unknown_query"; + const UNPERSISTED_QUERY_BODY: &str = "query GetYourName { you: me { name } }"; + let expected_data = serde_json_bytes::json!({ + "you": { + "name": "Ada Lovelace" + } + }); + let actual = query_with_router(router.clone(), pq_request(UNKNOWN_QUERY_ID)).await; + assert_eq!( + actual.errors, + vec![apollo_router::graphql::Error::builder() + .message(&format!( + "Persisted query '{UNKNOWN_QUERY_ID}' not found in the persisted query list" + )) + .extension_code("PERSISTED_QUERY_NOT_IN_LIST") + .build()] + ); + assert_eq!(actual.data, None); + assert_eq!(registry.totals(), hashmap! {"accounts".to_string() => 1}); + + // We didn't break normal GETs. + let actual = query_with_router( + router.clone(), + supergraph::Request::fake_builder() + .query(UNPERSISTED_QUERY_BODY) + .method(Method::GET) + .build() + .unwrap() + .try_into() + .unwrap(), + ) + .await; + assert!(actual.errors.is_empty()); + assert_eq!(actual.data.as_ref(), Some(&expected_data)); + assert_eq!(registry.totals(), hashmap! {"accounts".to_string() => 2}); + + // We didn't break normal POSTs. + let actual = query_with_router( + router.clone(), + supergraph::Request::fake_builder() + .query(UNPERSISTED_QUERY_BODY) + .method(Method::POST) + .build() + .unwrap() + .try_into() + .unwrap(), + ) + .await; + assert!(actual.errors.is_empty()); + assert_eq!(actual.data, Some(expected_data)); + assert_eq!(registry.totals(), hashmap! {"accounts".to_string() => 3}); + + // Proper error when sending malformed request body + let actual = query_with_router( + router.clone(), + http::Request::builder() + .uri("http://default") + .method(Method::POST) + .header( + CONTENT_TYPE, + HeaderValue::from_static(APPLICATION_JSON.essence_str()), + ) + .body(router::Body::empty()) + .unwrap() + .into(), + ) + .await; + assert_eq!(actual.errors.len(), 1); + + assert_eq!(actual.errors[0].message, "Invalid GraphQL request"); + assert_eq!( + actual.errors[0].extensions["code"], + "INVALID_GRAPHQL_REQUEST" + ); +} + #[tokio::test(flavor = "multi_thread")] async fn missing_variables() { let request = supergraph::Request::fake_builder() @@ -957,6 +1090,20 @@ async fn fallible_setup_router_and_registry( Ok((router, counting_registry)) } +async fn setup_router_and_registry_with_config( + config: Configuration, +) -> Result<(router::BoxCloneService, CountingServiceRegistry), BoxError> { + let counting_registry = CountingServiceRegistry::new(); + let router = apollo_router::TestHarness::builder() + .with_subgraph_network_requests() + .configuration(Arc::new(config)) + .schema(include_str!("fixtures/supergraph.graphql")) + .extra_plugin(counting_registry.clone()) + .build_router() + .await?; + Ok((router, counting_registry)) +} + async fn setup_router_and_registry( config: serde_json::Value, ) -> (router::BoxCloneService, CountingServiceRegistry) { @@ -1125,9 +1272,17 @@ async fn all_stock_router_example_yamls_are_valid() { let raw_yaml = std::fs::read_to_string(entry_path) .unwrap_or_else(|e| panic!("unable to read {display_path}: {e}")); { - let yaml = serde_yaml::from_str::(&raw_yaml) + let mut configuration: Configuration = serde_yaml::from_str(&raw_yaml) .unwrap_or_else(|e| panic!("unable to parse YAML {display_path}: {e}")); - fallible_setup_router_and_registry(yaml) + let (_mock_guard, configuration) = + if configuration.preview_persisted_queries.enabled { + let (_mock_guard, uplink_config) = mock_empty_pq_uplink().await; + configuration.uplink = Some(uplink_config); + (Some(_mock_guard), configuration) + } else { + (None, configuration) + }; + setup_router_and_registry_with_config(configuration) .await .unwrap_or_else(|e| { panic!("unable to start up router for {display_path}: {e}"); diff --git a/docs/source/config.json b/docs/source/config.json index 0991dfa71f5..5df58fbad1b 100644 --- a/docs/source/config.json +++ b/docs/source/config.json @@ -1,28 +1,19 @@ { "title": "Router (self-hosted)", - "algoliaFilters": [ - "docset:router" - ], + "algoliaFilters": ["docset:router"], "sidebar": { "Introduction": "/", "Quickstart": "/quickstart", "Moving from @apollo/gateway": "/migrating-from-gateway", "Federation version support": "/federation-version-support", - "Enterprise features": [ - "/enterprise-features", - [ - "enterprise" - ] - ], + "Enterprise features": ["/enterprise-features", ["enterprise"]], "Configuring the Router": { "Overview": "/configuration/overview", "Caching": { "In-memory caching": "/configuration/in-memory-caching", "Distributed caching": [ "/configuration/distributed-caching", - [ - "enterprise" - ] + ["enterprise"] ] }, "Debugging": { @@ -36,19 +27,8 @@ "Security": { "CORS": "/configuration/cors", "CSRF prevention": "/configuration/csrf", - "JWT Authentication": [ - "/configuration/authn-jwt", - [ - "enterprise" - ] - ], - "Operation limits": [ - "/configuration/operation-limits", - [ - "enterprise", - "preview" - ] - ], + "JWT Authentication": ["/configuration/authn-jwt", ["enterprise"]], + "Operation limits": ["/configuration/operation-limits", ["enterprise"]], "Privacy and data collection": "/privacy" } }, @@ -59,21 +39,15 @@ "GraphQL Subscriptions": { "Subscriptions setup": [ "/executing-operations/subscription-support", - [ - "enterprise" - ] + ["enterprise"] ], "Subgraph protocol: HTTP callback": [ "/executing-operations/subscription-callback-protocol", - [ - "enterprise" - ] + ["enterprise", "preview"] ], "Client protocol: HTTP multipart": [ "/executing-operations/subscription-multipart-protocol", - [ - "enterprise" - ] + ["enterprise"] ] } }, @@ -98,12 +72,7 @@ "Overview": "/customizations/overview", "Rhai scripts": "/customizations/rhai", "Rhai API reference": "/customizations/rhai-api", - "External coprocessing": [ - "/customizations/coprocessor", - [ - "enterprise" - ] - ], + "External coprocessing": ["/customizations/coprocessor", ["enterprise"]], "Native Rust plugins": "/customizations/native", "Custom router binary": "/customizations/custom-binary" }, diff --git a/docs/source/executing-operations/subscription-callback-protocol.mdx b/docs/source/executing-operations/subscription-callback-protocol.mdx index e0718992d7a..d77e0c0a550 100644 --- a/docs/source/executing-operations/subscription-callback-protocol.mdx +++ b/docs/source/executing-operations/subscription-callback-protocol.mdx @@ -173,4 +173,4 @@ Payload example for `POST` on `http://localhost:4000/callback/c4a9d1b8-dc57-44ab - Every received events the _Event source_ calls the callback endpoint with a [`next` payload](#next) - If an error appears the _Event source_ calls the callback endpoint with a [`complete` payload with errors field](#complete) - If the stream of events is done then send a [`complete` payload WITHOUT errors field](#complete) -8. _Event source_ returns empty body containing a new header `subscription-protocol: callback` in answer to the initial call from _Apollo Router_ +8. _Event source_ returns empty body containing a new header `subscription-protocol: callback` in answer to the initial call from _Apollo Router_ \ No newline at end of file diff --git a/docs/source/executing-operations/subscription-support.mdx b/docs/source/executing-operations/subscription-support.mdx index 6f69c67c5df..c8a36444e00 100644 --- a/docs/source/executing-operations/subscription-support.mdx +++ b/docs/source/executing-operations/subscription-support.mdx @@ -423,4 +423,4 @@ subscription: #highlight-end ``` -If a client attempts to execute a subscription on your router when it's already at `max_open_subscriptions`, the router rejects the client's request with an error. +If a client attempts to execute a subscription on your router when it's already at `max_open_subscriptions`, the router rejects the client's request with an error. \ No newline at end of file diff --git a/examples/persisted-queries/additive_pq.yaml b/examples/persisted-queries/additive_pq.yaml new file mode 100644 index 00000000000..6c768442975 --- /dev/null +++ b/examples/persisted-queries/additive_pq.yaml @@ -0,0 +1,11 @@ +# this example demonstrates enabling additive persisted queries - enable running queries by ID if they +# have been published to a Persisted Query List (PQL) that is linked to your GraphOS variant + +# usage: +# 1) start the router with: +# APOLLO_KEY="my-api-key" APOLLO_GRAPH_REF="my-graph@my-variant" cargo run -- -c ./additive_pq.yaml +# 2) make requests against the router: +# curl --get http://localhost:4000 --header 'content-type: application/json' --data-urlencode 'extensions={"persistedQuery":{"sha256Hash":"hash-of-operation", "version": 1}}' + +preview_persisted_queries: + enabled: true diff --git a/examples/persisted-queries/pq_log_unknown.yaml b/examples/persisted-queries/pq_log_unknown.yaml new file mode 100644 index 00000000000..bf17dc9a1bc --- /dev/null +++ b/examples/persisted-queries/pq_log_unknown.yaml @@ -0,0 +1,12 @@ +# this example demonstrates enabling persisted queries and logging all operations +# that have not been published to a Persisted Query List (PQL) that is linked to your GraphOS variant + +# usage: +# 1) start the router with: +# APOLLO_KEY="my-api-key" APOLLO_GRAPH_REF="my-graph@my-variant" cargo run -- -c ./pq_log_unkown.yaml +# 2) make requests against the router: +# curl --get http://localhost:4000 --header 'content-type: application/json' --data-urlencode 'extensions={"persistedQuery":{"sha256Hash":"hash-of-operation", "version": 1}}' + +preview_persisted_queries: + enabled: true + log_unknown: true diff --git a/examples/persisted-queries/safelist_pq.yaml b/examples/persisted-queries/safelist_pq.yaml new file mode 100644 index 00000000000..3a900a06f75 --- /dev/null +++ b/examples/persisted-queries/safelist_pq.yaml @@ -0,0 +1,16 @@ +# this example demonstrates enabling persisted queries and requiring that all operations +# have been published to a Persisted Query List (PQL) that is linked to your GraphOS variant +# freeform GraphQL that matches published operations may still be executed + +# usage: +# 1) start the router with: +# APOLLO_KEY="my-api-key" APOLLO_GRAPH_REF="my-graph@my-variant" cargo run -- -c ./safelist_pq.yaml +# 2) make requests against the router: +# curl --get http://localhost:4000 --header 'content-type: application/json' --data-urlencode 'extensions={"persistedQuery":{"sha256Hash":"hash-of-operation", "version": 1}}' + +preview_persisted_queries: + enabled: true + safelist: + enabled: true +apq: + enabled: false \ No newline at end of file diff --git a/examples/persisted-queries/safelist_pq_require_id.yaml b/examples/persisted-queries/safelist_pq_require_id.yaml new file mode 100644 index 00000000000..8cd6855c8a9 --- /dev/null +++ b/examples/persisted-queries/safelist_pq_require_id.yaml @@ -0,0 +1,17 @@ +# this example demonstrates enabling persisted queries and requiring that all operations +# have been published to a Persisted Query List (PQL) that is linked to your GraphOS variant +# freeform GraphQL that matches published operations may NOT be executed + +# usage: +# 1) start the router with: +# APOLLO_KEY="my-api-key" APOLLO_GRAPH_REF="my-graph@my-variant" cargo run -- -c ./safelist_pq_require_id.yaml +# 2) make requests against the router: +# curl --get http://localhost:4000 --header 'content-type: application/json' --data-urlencode 'extensions={"persistedQuery":{"sha256Hash":"hash-of-operation", "version": 1}}' + +preview_persisted_queries: + enabled: true + safelist: + enabled: true + require_id: true +apq: + enabled: false diff --git a/licenses.html b/licenses.html index b1e3a9e9d2a..3f410f864e5 100644 --- a/licenses.html +++ b/licenses.html @@ -44,10 +44,10 @@

Third Party Licenses

Overview of licenses:

    -
  • MIT License (95)
  • -
  • Apache License 2.0 (56)
  • -
  • BSD 3-Clause "New" or "Revised" License (10)
  • +
  • MIT License (97)
  • +
  • Apache License 2.0 (60)
  • ISC License (10)
  • +
  • BSD 3-Clause "New" or "Revised" License (9)
  • Mozilla Public License 2.0 (3)
  • Elastic License 2.0 (2)
  • BSD 2-Clause "Simplified" License (1)
  • @@ -1944,6 +1944,216 @@

    Used by:

    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + + +
  • +

    Apache License 2.0

    +

    Used by:

    + +
                                     Apache License
    +                           Version 2.0, January 2004
    +                        http://www.apache.org/licenses/
    +
    +   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
    +
    +   1. Definitions.
    +
    +      "License" shall mean the terms and conditions for use, reproduction,
    +      and distribution as defined by Sections 1 through 9 of this document.
    +
    +      "Licensor" shall mean the copyright owner or entity authorized by
    +      the copyright owner that is granting the License.
    +
    +      "Legal Entity" shall mean the union of the acting entity and all
    +      other entities that control, are controlled by, or are under common
    +      control with that entity. For the purposes of this definition,
    +      "control" means (i) the power, direct or indirect, to cause the
    +      direction or management of such entity, whether by contract or
    +      otherwise, or (ii) ownership of fifty percent (50%) or more of the
    +      outstanding shares, or (iii) beneficial ownership of such entity.
    +
    +      "You" (or "Your") shall mean an individual or Legal Entity
    +      exercising permissions granted by this License.
    +
    +      "Source" form shall mean the preferred form for making modifications,
    +      including but not limited to software source code, documentation
    +      source, and configuration files.
    +
    +      "Object" form shall mean any form resulting from mechanical
    +      transformation or translation of a Source form, including but
    +      not limited to compiled object code, generated documentation,
    +      and conversions to other media types.
    +
    +      "Work" shall mean the work of authorship, whether in Source or
    +      Object form, made available under the License, as indicated by a
    +      copyright notice that is included in or attached to the work
    +      (an example is provided in the Appendix below).
    +
    +      "Derivative Works" shall mean any work, whether in Source or Object
    +      form, that is based on (or derived from) the Work and for which the
    +      editorial revisions, annotations, elaborations, or other modifications
    +      represent, as a whole, an original work of authorship. For the purposes
    +      of this License, Derivative Works shall not include works that remain
    +      separable from, or merely link (or bind by name) to the interfaces of,
    +      the Work and Derivative Works thereof.
    +
    +      "Contribution" shall mean any work of authorship, including
    +      the original version of the Work and any modifications or additions
    +      to that Work or Derivative Works thereof, that is intentionally
    +      submitted to Licensor for inclusion in the Work by the copyright owner
    +      or by an individual or Legal Entity authorized to submit on behalf of
    +      the copyright owner. For the purposes of this definition, "submitted"
    +      means any form of electronic, verbal, or written communication sent
    +      to the Licensor or its representatives, including but not limited to
    +      communication on electronic mailing lists, source code control systems,
    +      and issue tracking systems that are managed by, or on behalf of, the
    +      Licensor for the purpose of discussing and improving the Work, but
    +      excluding communication that is conspicuously marked or otherwise
    +      designated in writing by the copyright owner as "Not a Contribution."
    +
    +      "Contributor" shall mean Licensor and any individual or Legal Entity
    +      on behalf of whom a Contribution has been received by Licensor and
    +      subsequently incorporated within the Work.
    +
    +   2. Grant of Copyright License. Subject to the terms and conditions of
    +      this License, each Contributor hereby grants to You a perpetual,
    +      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +      copyright license to reproduce, prepare Derivative Works of,
    +      publicly display, publicly perform, sublicense, and distribute the
    +      Work and such Derivative Works in Source or Object form.
    +
    +   3. Grant of Patent License. Subject to the terms and conditions of
    +      this License, each Contributor hereby grants to You a perpetual,
    +      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +      (except as stated in this section) patent license to make, have made,
    +      use, offer to sell, sell, import, and otherwise transfer the Work,
    +      where such license applies only to those patent claims licensable
    +      by such Contributor that are necessarily infringed by their
    +      Contribution(s) alone or by combination of their Contribution(s)
    +      with the Work to which such Contribution(s) was submitted. If You
    +      institute patent litigation against any entity (including a
    +      cross-claim or counterclaim in a lawsuit) alleging that the Work
    +      or a Contribution incorporated within the Work constitutes direct
    +      or contributory patent infringement, then any patent licenses
    +      granted to You under this License for that Work shall terminate
    +      as of the date such litigation is filed.
    +
    +   4. Redistribution. You may reproduce and distribute copies of the
    +      Work or Derivative Works thereof in any medium, with or without
    +      modifications, and in Source or Object form, provided that You
    +      meet the following conditions:
    +
    +      (a) You must give any other recipients of the Work or
    +          Derivative Works a copy of this License; and
    +
    +      (b) You must cause any modified files to carry prominent notices
    +          stating that You changed the files; and
    +
    +      (c) You must retain, in the Source form of any Derivative Works
    +          that You distribute, all copyright, patent, trademark, and
    +          attribution notices from the Source form of the Work,
    +          excluding those notices that do not pertain to any part of
    +          the Derivative Works; and
    +
    +      (d) If the Work includes a "NOTICE" text file as part of its
    +          distribution, then any Derivative Works that You distribute must
    +          include a readable copy of the attribution notices contained
    +          within such NOTICE file, excluding those notices that do not
    +          pertain to any part of the Derivative Works, in at least one
    +          of the following places: within a NOTICE text file distributed
    +          as part of the Derivative Works; within the Source form or
    +          documentation, if provided along with the Derivative Works; or,
    +          within a display generated by the Derivative Works, if and
    +          wherever such third-party notices normally appear. The contents
    +          of the NOTICE file are for informational purposes only and
    +          do not modify the License. You may add Your own attribution
    +          notices within Derivative Works that You distribute, alongside
    +          or as an addendum to the NOTICE text from the Work, provided
    +          that such additional attribution notices cannot be construed
    +          as modifying the License.
    +
    +      You may add Your own copyright statement to Your modifications and
    +      may provide additional or different license terms and conditions
    +      for use, reproduction, or distribution of Your modifications, or
    +      for any such Derivative Works as a whole, provided Your use,
    +      reproduction, and distribution of the Work otherwise complies with
    +      the conditions stated in this License.
    +
    +   5. Submission of Contributions. Unless You explicitly state otherwise,
    +      any Contribution intentionally submitted for inclusion in the Work
    +      by You to the Licensor shall be under the terms and conditions of
    +      this License, without any additional terms or conditions.
    +      Notwithstanding the above, nothing herein shall supersede or modify
    +      the terms of any separate license agreement you may have executed
    +      with Licensor regarding such Contributions.
    +
    +   6. Trademarks. This License does not grant permission to use the trade
    +      names, trademarks, service marks, or product names of the Licensor,
    +      except as required for reasonable and customary use in describing the
    +      origin of the Work and reproducing the content of the NOTICE file.
    +
    +   7. Disclaimer of Warranty. Unless required by applicable law or
    +      agreed to in writing, Licensor provides the Work (and each
    +      Contributor provides its Contributions) on an "AS IS" BASIS,
    +      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
    +      implied, including, without limitation, any warranties or conditions
    +      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
    +      PARTICULAR PURPOSE. You are solely responsible for determining the
    +      appropriateness of using or redistributing the Work and assume any
    +      risks associated with Your exercise of permissions under this License.
    +
    +   8. Limitation of Liability. In no event and under no legal theory,
    +      whether in tort (including negligence), contract, or otherwise,
    +      unless required by applicable law (such as deliberate and grossly
    +      negligent acts) or agreed to in writing, shall any Contributor be
    +      liable to You for damages, including any direct, indirect, special,
    +      incidental, or consequential damages of any character arising as a
    +      result of this License or out of the use or inability to use the
    +      Work (including but not limited to damages for loss of goodwill,
    +      work stoppage, computer failure or malfunction, or any and all
    +      other commercial damages or losses), even if such Contributor
    +      has been advised of the possibility of such damages.
    +
    +   9. Accepting Warranty or Additional Liability. While redistributing
    +      the Work or Derivative Works thereof, You may choose to offer,
    +      and charge a fee for, acceptance of support, warranty, indemnity,
    +      or other liability obligations and/or rights consistent with this
    +      License. However, in accepting such obligations, You may act only
    +      on Your own behalf and on Your sole responsibility, not on behalf
    +      of any other Contributor, and only if You agree to indemnify,
    +      defend, and hold each Contributor harmless for any liability
    +      incurred by, or claims asserted against, such Contributor by reason
    +      of your accepting any such warranty or additional liability.
    +
    +   END OF TERMS AND CONDITIONS
    +
    +   APPENDIX: How to apply the Apache License to your work.
    +
    +      To apply the Apache License to your work, attach the following
    +      boilerplate notice, with the fields enclosed by brackets "{}"
    +      replaced with your own identifying information. (Don't include
    +      the brackets!)  The text should be enclosed in the appropriate
    +      comment syntax for the file format. We also recommend that a
    +      file or class name and description of purpose be included on the
    +      same "printed page" as the copyright notice for easier
    +      identification within third-party archives.
    +
    +   Copyright 2019 Michael P. Jung
    +
    +   Licensed under the Apache License, Version 2.0 (the "License");
    +   you may not use this file except in compliance with the License.
    +   You may obtain a copy of the License at
    +
    +       http://www.apache.org/licenses/LICENSE-2.0
    +
    +   Unless required by applicable law or agreed to in writing, software
    +   distributed under the License is distributed on an "AS IS" BASIS,
    +   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    +   See the License for the specific language governing permissions and
    +   limitations under the License.
    +
     
  • @@ -2140,7 +2350,220 @@

    Used by:

    same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2019 TiKV Project Authors. + Copyright 2019 TiKV Project Authors. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +
  • +
  • +

    Apache License 2.0

    +

    Used by:

    + +
                                     Apache License
    +                           Version 2.0, January 2004
    +                        http://www.apache.org/licenses/
    +
    +   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
    +
    +   1. Definitions.
    +
    +      "License" shall mean the terms and conditions for use, reproduction,
    +      and distribution as defined by Sections 1 through 9 of this document.
    +
    +      "Licensor" shall mean the copyright owner or entity authorized by
    +      the copyright owner that is granting the License.
    +
    +      "Legal Entity" shall mean the union of the acting entity and all
    +      other entities that control, are controlled by, or are under common
    +      control with that entity. For the purposes of this definition,
    +      "control" means (i) the power, direct or indirect, to cause the
    +      direction or management of such entity, whether by contract or
    +      otherwise, or (ii) ownership of fifty percent (50%) or more of the
    +      outstanding shares, or (iii) beneficial ownership of such entity.
    +
    +      "You" (or "Your") shall mean an individual or Legal Entity
    +      exercising permissions granted by this License.
    +
    +      "Source" form shall mean the preferred form for making modifications,
    +      including but not limited to software source code, documentation
    +      source, and configuration files.
    +
    +      "Object" form shall mean any form resulting from mechanical
    +      transformation or translation of a Source form, including but
    +      not limited to compiled object code, generated documentation,
    +      and conversions to other media types.
    +
    +      "Work" shall mean the work of authorship, whether in Source or
    +      Object form, made available under the License, as indicated by a
    +      copyright notice that is included in or attached to the work
    +      (an example is provided in the Appendix below).
    +
    +      "Derivative Works" shall mean any work, whether in Source or Object
    +      form, that is based on (or derived from) the Work and for which the
    +      editorial revisions, annotations, elaborations, or other modifications
    +      represent, as a whole, an original work of authorship. For the purposes
    +      of this License, Derivative Works shall not include works that remain
    +      separable from, or merely link (or bind by name) to the interfaces of,
    +      the Work and Derivative Works thereof.
    +
    +      "Contribution" shall mean any work of authorship, including
    +      the original version of the Work and any modifications or additions
    +      to that Work or Derivative Works thereof, that is intentionally
    +      submitted to Licensor for inclusion in the Work by the copyright owner
    +      or by an individual or Legal Entity authorized to submit on behalf of
    +      the copyright owner. For the purposes of this definition, "submitted"
    +      means any form of electronic, verbal, or written communication sent
    +      to the Licensor or its representatives, including but not limited to
    +      communication on electronic mailing lists, source code control systems,
    +      and issue tracking systems that are managed by, or on behalf of, the
    +      Licensor for the purpose of discussing and improving the Work, but
    +      excluding communication that is conspicuously marked or otherwise
    +      designated in writing by the copyright owner as "Not a Contribution."
    +
    +      "Contributor" shall mean Licensor and any individual or Legal Entity
    +      on behalf of whom a Contribution has been received by Licensor and
    +      subsequently incorporated within the Work.
    +
    +   2. Grant of Copyright License. Subject to the terms and conditions of
    +      this License, each Contributor hereby grants to You a perpetual,
    +      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +      copyright license to reproduce, prepare Derivative Works of,
    +      publicly display, publicly perform, sublicense, and distribute the
    +      Work and such Derivative Works in Source or Object form.
    +
    +   3. Grant of Patent License. Subject to the terms and conditions of
    +      this License, each Contributor hereby grants to You a perpetual,
    +      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    +      (except as stated in this section) patent license to make, have made,
    +      use, offer to sell, sell, import, and otherwise transfer the Work,
    +      where such license applies only to those patent claims licensable
    +      by such Contributor that are necessarily infringed by their
    +      Contribution(s) alone or by combination of their Contribution(s)
    +      with the Work to which such Contribution(s) was submitted. If You
    +      institute patent litigation against any entity (including a
    +      cross-claim or counterclaim in a lawsuit) alleging that the Work
    +      or a Contribution incorporated within the Work constitutes direct
    +      or contributory patent infringement, then any patent licenses
    +      granted to You under this License for that Work shall terminate
    +      as of the date such litigation is filed.
    +
    +   4. Redistribution. You may reproduce and distribute copies of the
    +      Work or Derivative Works thereof in any medium, with or without
    +      modifications, and in Source or Object form, provided that You
    +      meet the following conditions:
    +
    +      (a) You must give any other recipients of the Work or
    +          Derivative Works a copy of this License; and
    +
    +      (b) You must cause any modified files to carry prominent notices
    +          stating that You changed the files; and
    +
    +      (c) You must retain, in the Source form of any Derivative Works
    +          that You distribute, all copyright, patent, trademark, and
    +          attribution notices from the Source form of the Work,
    +          excluding those notices that do not pertain to any part of
    +          the Derivative Works; and
    +
    +      (d) If the Work includes a "NOTICE" text file as part of its
    +          distribution, then any Derivative Works that You distribute must
    +          include a readable copy of the attribution notices contained
    +          within such NOTICE file, excluding those notices that do not
    +          pertain to any part of the Derivative Works, in at least one
    +          of the following places: within a NOTICE text file distributed
    +          as part of the Derivative Works; within the Source form or
    +          documentation, if provided along with the Derivative Works; or,
    +          within a display generated by the Derivative Works, if and
    +          wherever such third-party notices normally appear. The contents
    +          of the NOTICE file are for informational purposes only and
    +          do not modify the License. You may add Your own attribution
    +          notices within Derivative Works that You distribute, alongside
    +          or as an addendum to the NOTICE text from the Work, provided
    +          that such additional attribution notices cannot be construed
    +          as modifying the License.
    +
    +      You may add Your own copyright statement to Your modifications and
    +      may provide additional or different license terms and conditions
    +      for use, reproduction, or distribution of Your modifications, or
    +      for any such Derivative Works as a whole, provided Your use,
    +      reproduction, and distribution of the Work otherwise complies with
    +      the conditions stated in this License.
    +
    +   5. Submission of Contributions. Unless You explicitly state otherwise,
    +      any Contribution intentionally submitted for inclusion in the Work
    +      by You to the Licensor shall be under the terms and conditions of
    +      this License, without any additional terms or conditions.
    +      Notwithstanding the above, nothing herein shall supersede or modify
    +      the terms of any separate license agreement you may have executed
    +      with Licensor regarding such Contributions.
    +
    +   6. Trademarks. This License does not grant permission to use the trade
    +      names, trademarks, service marks, or product names of the Licensor,
    +      except as required for reasonable and customary use in describing the
    +      origin of the Work and reproducing the content of the NOTICE file.
    +
    +   7. Disclaimer of Warranty. Unless required by applicable law or
    +      agreed to in writing, Licensor provides the Work (and each
    +      Contributor provides its Contributions) on an "AS IS" BASIS,
    +      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
    +      implied, including, without limitation, any warranties or conditions
    +      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
    +      PARTICULAR PURPOSE. You are solely responsible for determining the
    +      appropriateness of using or redistributing the Work and assume any
    +      risks associated with Your exercise of permissions under this License.
    +
    +   8. Limitation of Liability. In no event and under no legal theory,
    +      whether in tort (including negligence), contract, or otherwise,
    +      unless required by applicable law (such as deliberate and grossly
    +      negligent acts) or agreed to in writing, shall any Contributor be
    +      liable to You for damages, including any direct, indirect, special,
    +      incidental, or consequential damages of any character arising as a
    +      result of this License or out of the use or inability to use the
    +      Work (including but not limited to damages for loss of goodwill,
    +      work stoppage, computer failure or malfunction, or any and all
    +      other commercial damages or losses), even if such Contributor
    +      has been advised of the possibility of such damages.
    +
    +   9. Accepting Warranty or Additional Liability. While redistributing
    +      the Work or Derivative Works thereof, You may choose to offer,
    +      and charge a fee for, acceptance of support, warranty, indemnity,
    +      or other liability obligations and/or rights consistent with this
    +      License. However, in accepting such obligations, You may act only
    +      on Your own behalf and on Your sole responsibility, not on behalf
    +      of any other Contributor, and only if You agree to indemnify,
    +      defend, and hold each Contributor harmless for any liability
    +      incurred by, or claims asserted against, such Contributor by reason
    +      of your accepting any such warranty or additional liability.
    +
    +   END OF TERMS AND CONDITIONS
    +
    +   APPENDIX: How to apply the Apache License to your work.
    +
    +      To apply the Apache License to your work, attach the following
    +      boilerplate notice, with the fields enclosed by brackets "{}"
    +      replaced with your own identifying information. (Don't include
    +      the brackets!)  The text should be enclosed in the appropriate
    +      comment syntax for the file format. We also recommend that a
    +      file or class name and description of purpose be included on the
    +      same "printed page" as the copyright notice for easier
    +      identification within third-party archives.
    +
    +   Copyright {yyyy} {name of copyright owner}
     
        Licensed under the Apache License, Version 2.0 (the "License");
        you may not use this file except in compliance with the License.
    @@ -2159,11 +2582,21 @@ 

    Used by:

    Apache License 2.0

    Used by:

                                     Apache License
                                Version 2.0, January 2004
    @@ -2366,27 +2799,14 @@ 

    Used by:

    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. +
  • Apache License 2.0

    Used by:

                                     Apache License
                                Version 2.0, January 2004
    @@ -2565,18 +2985,8 @@ 

    Used by:

    END OF TERMS AND CONDITIONS - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} + Copyright 2019 Yoshua Wuyts + Copyright 2016-2018 Michael Tilli (Pyfisch) & `httpdate` contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -2589,7 +2999,6 @@

    Used by:

    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -
  • @@ -3194,6 +3603,7 @@

    Used by:

  • serde_derive
  • serde_json
  • serde_path_to_error
  • +
  • serde_qs
  • serde_urlencoded
  • syn
  • thiserror
  • @@ -4439,7 +4849,7 @@

    Apache License 2.0

    Used by:

                                  Apache License
                             Version 2.0, January 2004
    @@ -6337,11 +6747,13 @@ 

    Used by:

  • base64
  • base64
  • bitflags
  • +
  • bitflags
  • bstr
  • bumpalo
  • bytes-utils
  • cc
  • cfg-if
  • +
  • ci_info
  • concurrent-queue
  • const-random
  • const-random-macro
  • @@ -6356,6 +6768,7 @@

    Used by:

  • env_logger
  • env_logger
  • envmnt
  • +
  • equivalent
  • event-listener
  • fastrand
  • filetime
  • @@ -6365,16 +6778,17 @@

    Used by:

  • form_urlencoded
  • fraction
  • fsio
  • +
  • futures-lite
  • gimli
  • git2
  • group
  • hashbrown
  • +
  • hashbrown
  • hdrhistogram
  • heck
  • heck
  • hermit-abi
  • hermit-abi
  • -
  • hermit-abi
  • httparse
  • humantime-serde
  • hyper-rustls
  • @@ -6383,6 +6797,7 @@

    Used by:

  • idna
  • if_chain
  • indexmap
  • +
  • indexmap
  • inventory
  • io-lifetimes
  • itertools
  • @@ -6391,11 +6806,12 @@

    Used by:

  • lazy_static
  • libfuzzer-sys
  • libgit2-sys
  • -
  • libm
  • libm
  • libz-sys
  • linux-raw-sys
  • +
  • linux-raw-sys
  • lock_api
  • +
  • maplit
  • mime
  • mockall
  • mockall_derive
  • @@ -6413,7 +6829,7 @@

    Used by:

  • once_cell
  • openssl-probe
  • openssl-src
  • -
  • packed_simd_2
  • +
  • parking
  • parking_lot
  • parking_lot
  • parking_lot_core
  • @@ -6442,6 +6858,7 @@

    Used by:

  • rustc_version
  • rustc_version
  • rustix
  • +
  • rustix
  • rustls
  • rustls
  • rustls-native-certs
  • @@ -6464,7 +6881,6 @@

    Used by:

  • socket2
  • syn
  • tempfile
  • -
  • text-size
  • thread_local
  • threadpool
  • tikv-jemalloc-sys
  • @@ -6472,6 +6888,7 @@

    Used by:

  • toml_datetime
  • toml_edit
  • try_match
  • +
  • try_match
  • tungstenite
  • typed-builder
  • typetag
  • @@ -6486,6 +6903,7 @@

    Used by:

  • url
  • uuid
  • version_check
  • +
  • waker-fn
  • wasi
  • wasi
  • wasm-bindgen
  • @@ -6495,6 +6913,7 @@

    Used by:

  • wasm-bindgen-macro-support
  • wasm-bindgen-shared
  • web-sys
  • +
  • wiremock
  • yaml-rust
  • yansi
@@ -8170,8 +8589,11 @@

Used by:

                              Apache License
                         Version 2.0, January 2004
@@ -9008,7 +9430,9 @@ 

Apache License 2.0

Used by:

                              Apache License
                         Version 2.0, January 2004
@@ -9632,6 +10056,41 @@ 

Used by:

limitations under the License.
+
  • +

    Apache License 2.0

    +

    Used by:

    + +
    # Contributing
    +
    +## License
    +
    +Licensed under either of
    +
    + * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
    + * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
    +
    +at your option.
    +
    +### Contribution
    +
    +Unless you explicitly state otherwise, any contribution intentionally submitted
    +for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any
    +additional terms or conditions.
    +
    +
  • +
  • +

    Apache License 2.0

    +

    Used by:

    + +
    ../../LICENSE-APACHE
    +
  • Apache License 2.0

    Used by:

    @@ -10281,13 +10740,12 @@

    Used by:

    • apollo-compiler
    • apollo-encoder
    • -
    • apollo-encoder
    • -
    • apollo-parser
    • -
    • apollo-parser
    • -
    • apollo-smith
    • askama_shared
    • backtrace-ext
    • block-modes
    • +
    • buildstructor
    • +
    • curve25519-dalek-derive
    • +
    • deadpool-runtime
    • dunce
    • graphql-introspection-query
    • graphql_client
    • @@ -10302,6 +10760,7 @@

      Used by:

    • thrift
    • tikv-jemallocator
    • try_match_inner
    • +
    • try_match_inner
    • unic-char-property
    • unic-char-range
    • unic-common
    • @@ -10629,7 +11088,6 @@

      Apache License 2.0

      Used by:

      Copyright [2022] [Bryn Cooke]
       
      @@ -10913,79 +11371,6 @@ 

      Used by:

      contributors may be used to endorse or promote products derived from this software without specific prior written permission. -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS -IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED -TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER -OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -
      - -
    • -

      BSD 3-Clause "New" or "Revised" License

      -

      Used by:

      - -
      Copyright (c) 2016-2021 isis agora lovecruft. All rights reserved.
      -Copyright (c) 2016-2021 Henry de Valence. All rights reserved.
      -
      -Redistribution and use in source and binary forms, with or without
      -modification, are permitted provided that the following conditions are
      -met:
      -
      -1. Redistributions of source code must retain the above copyright
      -notice, this list of conditions and the following disclaimer.
      -
      -2. Redistributions in binary form must reproduce the above copyright
      -notice, this list of conditions and the following disclaimer in the
      -documentation and/or other materials provided with the distribution.
      -
      -3. Neither the name of the copyright holder nor the names of its
      -contributors may be used to endorse or promote products derived from
      -this software without specific prior written permission.
      -
      -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
      -IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
      -TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
      -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
      -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
      -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
      -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
      -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
      -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
      -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
      -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
      -
      -========================================================================
      -
      -Portions of curve25519-dalek were originally derived from Adam Langley's
      -Go ed25519 implementation, found at <https://github.com/agl/ed25519/>,
      -under the following licence:
      -
      -========================================================================
      -
      -Copyright (c) 2012 The Go Authors. All rights reserved.
      -
      -Redistribution and use in source and binary forms, with or without
      -modification, are permitted provided that the following conditions are
      -met:
      -
      -   * Redistributions of source code must retain the above copyright
      -notice, this list of conditions and the following disclaimer.
      -   * Redistributions in binary form must reproduce the above
      -copyright notice, this list of conditions and the following disclaimer
      -in the documentation and/or other materials provided with the
      -distribution.
      -   * Neither the name of Google Inc. nor the names of its
      -contributors may be used to endorse or promote products derived from
      -this software without specific prior written permission.
      -
       THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
       IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
       TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
      @@ -11076,6 +11461,7 @@ 

      BSD 3-Clause "New" or "Revised" Licens

      Used by:

      Copyright (c) <year> <owner>. 
       
      @@ -12520,6 +12906,21 @@ 

      Used by:

      OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +
      +
    • +
    • +

      MIT License

      +

      Used by:

      + +
      Copyright (c) 2019 David Pedersen
      +
      +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
      +
      +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
      +
      +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
       
    • @@ -13101,6 +13502,35 @@

      Used by:

      The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +
  • + +
  • +

    MIT License

    +

    Used by:

    + +
    MIT License
    +
    +Copyright (c) 2019 Bojan
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
     IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
     FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    @@ -13346,7 +13776,7 @@ 

    Used by:

    MIT License

    Used by:

    MIT License
     
    @@ -13569,6 +13999,7 @@ 

    MIT License

    Used by:

    Permission is hereby granted, free of charge, to any
     person obtaining a copy of this software and associated
    @@ -14426,6 +14857,8 @@ 

    Used by:

  • aho-corasick
  • byteorder
  • globset
  • +
  • memchr
  • +
  • regex-automata
  • same-file
  • termcolor
  • walkdir
  • diff --git a/xtask/Cargo.lock b/xtask/Cargo.lock index 2eee370ad64..fde85598dba 100644 --- a/xtask/Cargo.lock +++ b/xtask/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.19.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" +checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" dependencies = [ "gimli", ] @@ -19,9 +19,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "aho-corasick" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67fc08ce920c31afb70f013dcce1bfc3a3195de6a228474e45e1f145b36f8d04" +checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" dependencies = [ "memchr", ] @@ -58,15 +58,15 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41ed9a86bf92ae6580e0a31281f65a1b1d867c0cc68d5346e2ae128dddfa6a7d" +checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" [[package]] name = "anstyle-parse" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e765fd216e48e067936442276d1d57399e37bce53c264d6fefbe298080cb57ee" +checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" dependencies = [ "utf8parse", ] @@ -110,15 +110,15 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "backtrace" -version = "0.3.67" +version = "0.3.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" +checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" dependencies = [ "addr2line", "cc", "cfg-if", "libc", - "miniz_oxide 0.6.2", + "miniz_oxide", "object", "rustc-demangle", ] @@ -141,6 +141,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +[[package]] +name = "bitflags" +version = "2.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" + [[package]] name = "block-buffer" version = "0.10.4" @@ -256,7 +262,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -315,9 +321,9 @@ checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" [[package]] name = "cpufeatures" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e4c1eaa2012c47becbbad2ab175484c2a84d1185b566fb2cc5b8707343dfe58" +checksum = "03e69e28e9f7f77debdedbaafa2866e1de9ba56df55a8bd7cfc724c25a09987c" dependencies = [ "libc", ] @@ -333,9 +339,9 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.15" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" +checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" dependencies = [ "cfg-if", ] @@ -442,7 +448,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" dependencies = [ "crc32fast", - "miniz_oxide 0.7.1", + "miniz_oxide", ] [[package]] @@ -453,9 +459,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "form_urlencoded" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" dependencies = [ "percent-encoding", ] @@ -596,9 +602,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.19" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d357c7ae988e7d2182f7d7871d0b963962420b0678b0997ce7de72001aeab782" +checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" dependencies = [ "bytes", "fnv", @@ -625,15 +631,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -[[package]] -name = "hermit-abi" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee512640fe35acbfb4bb779db6f0d80704c2cacfa2e39b601ef3e3f47d1ae4c7" -dependencies = [ - "libc", -] - [[package]] name = "hermit-abi" version = "0.3.1" @@ -676,9 +673,9 @@ checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" [[package]] name = "hyper" -version = "0.14.26" +version = "0.14.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab302d72a6f11a3b910431ff93aae7e773078c769f0a3ef15fb9ec692ed147d4" +checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" dependencies = [ "bytes", "futures-channel", @@ -713,9 +710,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.56" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0722cd7114b7de04316e7ea5456a0bbb20e4adb46fd27a3697adb812cff0f37c" +checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -736,9 +733,9 @@ dependencies = [ [[package]] name = "idna" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -785,26 +782,25 @@ version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" dependencies = [ - "hermit-abi 0.3.1", + "hermit-abi", "libc", "windows-sys 0.48.0", ] [[package]] name = "ipnet" -version = "2.7.2" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12b6ee2129af8d4fb011108c73d99a1b83a85977f23b82460c0ae2e25bb4b57f" +checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" [[package]] name = "is-terminal" -version = "0.4.7" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f" +checksum = "24fddda5af7e54bf7da53067d6e802dbcc381d0a8eef629df528e3ebf68755cb" dependencies = [ - "hermit-abi 0.3.1", - "io-lifetimes", - "rustix", + "hermit-abi", + "rustix 0.38.1", "windows-sys 0.48.0", ] @@ -825,9 +821,9 @@ checksum = "453ad9f582a441959e5f0d088b02ce04cfe8d51a8eaf077f12ac6d3e94164ca6" [[package]] name = "js-sys" -version = "0.3.63" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f37a4a5928311ac501dee68b3c7613a1037d0edb30c8e5427bd832d55d1b790" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" dependencies = [ "wasm-bindgen", ] @@ -856,14 +852,17 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +[[package]] +name = "linux-raw-sys" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" + [[package]] name = "log" -version = "0.4.17" +version = "0.4.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" -dependencies = [ - "cfg-if", -] +checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" [[package]] name = "memchr" @@ -886,15 +885,6 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" -[[package]] -name = "miniz_oxide" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b275950c28b37e794e8c55d88aeb5e139d0ce23fdbbeda68f8d7174abdf9e8fa" -dependencies = [ - "adler", -] - [[package]] name = "miniz_oxide" version = "0.7.1" @@ -906,14 +896,13 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.6" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b9d9a46eff5b4ff64b45a9e316a6d1e0bc719ef429cbec4dc630684212bfdf9" +checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" dependencies = [ "libc", - "log", "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.45.0", + "windows-sys 0.48.0", ] [[package]] @@ -936,28 +925,28 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fac9e2da13b5eb447a6ce3d392f23a29d8694bff781bf03a16cd9ac8697593b" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.2.6", + "hermit-abi", "libc", ] [[package]] name = "object" -version = "0.30.4" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385" +checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.17.1" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "openssl-probe" @@ -967,15 +956,15 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "percent-encoding" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" +checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pest" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e68e84bfb01f0507134eac1e9b410a12ba379d064eab48c50ba4ce329a527b70" +checksum = "f73935e4d55e2abf7f130186537b19e7a4abc886a0252380b59248af473a3fc9" dependencies = [ "thiserror", "ucd-trie", @@ -983,9 +972,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b79d4c71c865a25a4322296122e3924d30bc8ee0834c8bfc8b95f7f054afbfb" +checksum = "aef623c9bbfa0eedf5a0efba11a5ee83209c326653ca31ff019bec3a95bfff2b" dependencies = [ "pest", "pest_generator", @@ -993,22 +982,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c435bf1076437b851ebc8edc3a18442796b30f1728ffea6262d59bbe28b077e" +checksum = "b3e8cba4ec22bada7fc55ffe51e2deb6a0e0db2d0b7ab0b103acc80d2510c190" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] name = "pest_meta" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "745a452f8eb71e39ffd8ee32b3c5f51d03845f99786fa9b68db6ff509c505411" +checksum = "a01f71cb40bd8bb94232df14b946909e14660e33fc05db3e50ae2a82d7ea0ca0" dependencies = [ "once_cell", "pest", @@ -1035,18 +1024,18 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "proc-macro2" -version = "1.0.59" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b" +checksum = "7b368fba921b0dce7e60f5e04ec15e565b3303972b42bcfde1d0713b881959eb" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.28" +version = "1.0.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488" +checksum = "573015e8ab27661678357f27dc26460738fd2b6c86e46f386fde94cb5d913105" dependencies = [ "proc-macro2", ] @@ -1098,7 +1087,7 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -1107,7 +1096,7 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" dependencies = [ - "bitflags", + "bitflags 1.3.2", ] [[package]] @@ -1190,23 +1179,36 @@ checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" [[package]] name = "rustix" -version = "0.37.19" +version = "0.37.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d" +checksum = "62f25693a73057a1b4cb56179dd3c7ea21a7c6c5ee7d85781f5749b46f34b79c" dependencies = [ - "bitflags", + "bitflags 1.3.2", "errno", "io-lifetimes", "libc", - "linux-raw-sys", + "linux-raw-sys 0.3.8", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustix" +version = "0.38.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbc6396159432b5c8490d4e301d8c705f61860b8b6c863bf79942ce5401968f3" +dependencies = [ + "bitflags 2.3.3", + "errno", + "libc", + "linux-raw-sys 0.4.3", "windows-sys 0.48.0", ] [[package]] name = "rustls" -version = "0.21.1" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c911ba11bc8433e811ce56fde130ccf32f5127cab0e0194e9c68c5a5b671791e" +checksum = "e32ca28af694bc1bbf399c33a516dbdf1c90090b8ab23c2bc24f834aa2247f5f" dependencies = [ "log", "ring", @@ -1216,9 +1218,9 @@ dependencies = [ [[package]] name = "rustls-native-certs" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0167bac7a9f490495f3c33013e7722b53cb087ecbe082fb0c6387c96f634ea50" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" dependencies = [ "openssl-probe", "rustls-pemfile", @@ -1228,9 +1230,9 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d194b56d58803a43635bdc398cd17e383d6f71f9182b9a192c127ca42494a59b" +checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" dependencies = [ "base64 0.21.2", ] @@ -1285,7 +1287,7 @@ version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc758eb7bffce5b308734e9b0c1468893cae9ff70ebf13e7090be8dcbcc83a8" dependencies = [ - "bitflags", + "bitflags 1.3.2", "core-foundation", "core-foundation-sys", "libc", @@ -1328,14 +1330,14 @@ checksum = "d9735b638ccc51c28bf6914d90a2e9725b377144fc612c49a611fddd1b631d68" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] name = "serde_json" -version = "1.0.96" +version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" +checksum = "46266871c240a00b8f503b877622fe33430b3c7d963bdc0f2adc511e54a1eae3" dependencies = [ "itoa", "ryu", @@ -1362,9 +1364,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.6" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" +checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" dependencies = [ "cfg-if", "cpufeatures", @@ -1427,9 +1429,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.18" +version = "2.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e" +checksum = "2efbeae7acf4eabd6bcdcbd11c92f45231ddda7539edc7806bd1a04a03b24616" dependencies = [ "proc-macro2", "quote", @@ -1455,15 +1457,16 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.5.0" +version = "3.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" +checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6" dependencies = [ + "autocfg", "cfg-if", "fastrand", "redox_syscall 0.3.5", - "rustix", - "windows-sys 0.45.0", + "rustix 0.37.21", + "windows-sys 0.48.0", ] [[package]] @@ -1483,7 +1486,7 @@ checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", ] [[package]] @@ -1530,9 +1533,9 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.24.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0d409377ff5b1e3ca6437aa86c1eb7d40c134bfec254e44c830defa92669db5" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ "rustls", "tokio", @@ -1640,9 +1643,9 @@ checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" [[package]] name = "url" -version = "2.3.1" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" +checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" dependencies = [ "form_urlencoded", "idna", @@ -1679,11 +1682,10 @@ dependencies = [ [[package]] name = "want" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ - "log", "try-lock", ] @@ -1701,9 +1703,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -1711,24 +1713,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b04bc93f9d6bdee709f6bd2118f57dd6679cf1176a1af464fca3ab0d66d8fb" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.36" +version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d1985d03709c53167ce907ff394f5316aa22cb4e12761295c5dc57dacb6297e" +checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" dependencies = [ "cfg-if", "js-sys", @@ -1738,9 +1740,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14d6b024f1a526bb0234f52840389927257beb670610081360e5a03c5df9c258" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -1748,28 +1750,28 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.18", + "syn 2.0.22", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.86" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed9d5b4305409d1fc9482fee2d7f9bcbf24b3972bf59817ef757e23982242a93" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "web-sys" -version = "0.3.63" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bdd9ef4e984da1187bf8110c5cf5b845fbc87a23602cdf912386a76fcd3a7c2" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" dependencies = [ "js-sys", "wasm-bindgen", @@ -1842,7 +1844,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" dependencies = [ - "windows-targets 0.48.0", + "windows-targets 0.48.1", ] [[package]] @@ -1875,7 +1877,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.0", + "windows-targets 0.48.1", ] [[package]] @@ -1895,9 +1897,9 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.48.0" +version = "0.48.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b1eb6f0cd7c80c79759c929114ef071b87354ce476d9d94271031c0497adfd5" +checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" dependencies = [ "windows_aarch64_gnullvm 0.48.0", "windows_aarch64_msvc 0.48.0",