diff --git a/.github/actions/spin-ci-dependencies/action.yml b/.github/actions/spin-ci-dependencies/action.yml index 4cdb3db4b0..78d3a6541f 100644 --- a/.github/actions/spin-ci-dependencies/action.yml +++ b/.github/actions/spin-ci-dependencies/action.yml @@ -8,7 +8,7 @@ inputs: type: bool rust-version: description: 'Rust version to setup' - default: '1.76' + default: '1.79' required: false type: string diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 47645d25ce..1ca7deeca5 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -21,7 +21,7 @@ concurrency: env: CARGO_TERM_COLOR: always - RUST_VERSION: 1.76 + RUST_VERSION: 1.79 jobs: dependency-review: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 104bb2ef77..cae50a614f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,7 +11,7 @@ on: concurrency: ${{ github.workflow }}-${{ github.ref }} env: - RUST_VERSION: 1.76 + RUST_VERSION: 1.79 jobs: build-and-sign: diff --git a/Cargo.lock b/Cargo.lock index cb82b0f49e..ce4d4be2b3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -76,12 +76,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "aliasable" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd" - [[package]] name = "allocator-api2" version = "0.2.16" @@ -187,6 +181,12 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" +[[package]] +name = "arc-swap" +version = "1.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" + [[package]] name = "arrayvec" version = "0.5.2" @@ -485,9 +485,9 @@ checksum = "fbb36e985947064623dbd357f727af08ffd077f93d696782f3c56365fa2e2799" [[package]] name = "async-trait" -version = "0.1.79" +version = "0.1.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681" +checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" dependencies = [ "proc-macro2", "quote", @@ -545,7 +545,7 @@ dependencies = [ "pin-project-lite", "rustversion", "serde 1.0.197", - "sync_wrapper", + "sync_wrapper 0.1.2", "tower", "tower-layer", "tower-service", @@ -585,7 +585,7 @@ dependencies = [ "paste", "pin-project", "rand 0.8.5", - "reqwest 0.12.4", + "reqwest 0.12.7", "rustc_version", "serde 1.0.197", "serde_json", @@ -739,6 +739,15 @@ dependencies = [ "typenum", ] +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest", +] + [[package]] name = "block-buffer" version = "0.10.4" @@ -827,12 +836,6 @@ dependencies = [ "serde 1.0.197", ] -[[package]] -name = "bytesize" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e368af43e418a04d52505cf3dbc23dda4e3407ae2fa99fd0e4f308ce546acc" - [[package]] name = "bzip2" version = "0.4.4" @@ -892,7 +895,7 @@ source = "git+https://github.com/huggingface/candle?rev=b80348d22f8f0dadb6cc4101 dependencies = [ "byteorder", "candle-gemm", - "half 2.4.0", + "half", "memmap2 0.7.1", "num-traits 0.2.18", "num_cpus", @@ -983,7 +986,7 @@ dependencies = [ "candle-gemm-common", "candle-gemm-f32", "dyn-stack", - "half 2.4.0", + "half", "lazy_static 1.4.0", "num-complex", "num-traits 0.2.18", @@ -1184,12 +1187,6 @@ dependencies = [ "toml 0.8.14", ] -[[package]] -name = "cast" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" - [[package]] name = "cc" version = "1.0.90" @@ -1233,7 +1230,7 @@ dependencies = [ "num-traits 0.2.18", "serde 1.0.197", "wasm-bindgen", - "windows-targets 0.52.4", + "windows-targets 0.52.6", ] [[package]] @@ -1266,17 +1263,6 @@ dependencies = [ "libloading", ] -[[package]] -name = "clap" -version = "2.34.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" -dependencies = [ - "bitflags 1.3.2", - "textwrap 0.11.0", - "unicode-width", -] - [[package]] name = "clap" version = "3.2.25" @@ -1291,7 +1277,7 @@ dependencies = [ "once_cell", "strsim 0.10.0", "termcolor", - "textwrap 0.16.1", + "textwrap", ] [[package]] @@ -1472,7 +1458,7 @@ dependencies = [ "flate2", "json5", "libtest-mimic 0.7.3", - "reqwest 0.12.4", + "reqwest 0.12.7", "serde 1.0.197", "tar", "test-environment", @@ -1662,44 +1648,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "criterion" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" -dependencies = [ - "atty", - "cast", - "clap 2.34.0", - "criterion-plot", - "csv", - "futures", - "itertools 0.10.5", - "lazy_static 1.4.0", - "num-traits 0.2.18", - "oorandom", - "plotters", - "rayon", - "regex", - "serde 1.0.197", - "serde_cbor", - "serde_derive", - "serde_json", - "tinytemplate", - "tokio", - "walkdir", -] - -[[package]] -name = "criterion-plot" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876" -dependencies = [ - "cast", - "itertools 0.10.5", -] - [[package]] name = "crossbeam" version = "0.8.4" @@ -1765,7 +1713,7 @@ dependencies = [ "bitflags 1.3.2", "crossterm_winapi", "libc", - "mio", + "mio 0.8.11", "parking_lot", "signal-hook", "signal-hook-mio", @@ -1809,27 +1757,6 @@ dependencies = [ "typenum", ] -[[package]] -name = "csv" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" -dependencies = [ - "csv-core", - "itoa", - "ryu", - "serde 1.0.197", -] - -[[package]] -name = "csv-core" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70" -dependencies = [ - "memchr", -] - [[package]] name = "ctrlc" version = "3.4.4" @@ -2387,6 +2314,21 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "expander" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2c470c71d91ecbd179935b24170459e926382eaaa86b590b78814e180d8a8e2" +dependencies = [ + "blake2", + "file-guard", + "fs-err", + "prettyplease", + "proc-macro2", + "quote", + "syn 2.0.58", +] + [[package]] name = "fallible-iterator" version = "0.2.0" @@ -2452,6 +2394,16 @@ dependencies = [ "subtle", ] +[[package]] +name = "file-guard" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21ef72acf95ec3d7dbf61275be556299490a245f017cf084bd23b4f68cf9407c" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "filetime" version = "0.2.23" @@ -2527,6 +2479,15 @@ dependencies = [ "percent-encoding", ] +[[package]] +name = "fs-err" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88a41f105fe1d5b6b34b2055e3dc59bb79b46b48b2040b9e6c7b4b5de097aa41" +dependencies = [ + "autocfg", +] + [[package]] name = "fs-set-times" version = "0.20.1" @@ -3082,12 +3043,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "half" -version = "1.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b43ede17f21864e81be2fa654110bf1e793774238d86ef8555c37e6519c0403" - [[package]] name = "half" version = "2.4.0" @@ -3259,12 +3214,12 @@ dependencies = [ [[package]] name = "http-body-util" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", - "futures-core", + "futures-util", "http 1.1.0", "http-body 1.0.0", "pin-project-lite", @@ -3325,7 +3280,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.10", + "socket2 0.5.6", "tokio", "tower-service", "tracing", @@ -3334,9 +3289,9 @@ dependencies = [ [[package]] name = "hyper" -version = "1.2.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "186548d73ac615b32a73aafe38fb4f56c0d340e110e5a200bcadbaf2e199263a" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" dependencies = [ "bytes", "futures-channel", @@ -3385,6 +3340,23 @@ dependencies = [ "webpki-roots 0.26.1", ] +[[package]] +name = "hyper-rustls" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" +dependencies = [ + "futures-util", + "http 1.1.0", + "hyper 1.4.1", + "hyper-util", + "rustls 0.23.7", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.0", + "tower-service", +] + [[package]] name = "hyper-timeout" version = "0.4.1" @@ -3418,7 +3390,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.2.0", + "hyper 1.4.1", "hyper-util", "native-tls", "tokio", @@ -3437,7 +3409,7 @@ dependencies = [ "futures-util", "http 1.1.0", "http-body 1.0.0", - "hyper 1.2.0", + "hyper 1.4.1", "pin-project-lite", "socket2 0.5.6", "tokio", @@ -3935,7 +3907,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" dependencies = [ "cfg-if", - "windows-targets 0.52.4", + "windows-targets 0.52.6", ] [[package]] @@ -4138,12 +4110,7 @@ version = "0.2.0-dev" source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" dependencies = [ "llm-base", - "llm-bloom", - "llm-gpt2", - "llm-gptj", - "llm-gptneox", "llm-llama", - "llm-mpt", "serde 1.0.197", "tracing", ] @@ -4155,7 +4122,7 @@ source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a87 dependencies = [ "bytemuck", "ggml", - "half 2.4.0", + "half", "llm-samplers", "memmap2 0.5.10", "partial_sort", @@ -4168,39 +4135,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "llm-bloom" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" -dependencies = [ - "llm-base", -] - -[[package]] -name = "llm-gpt2" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" -dependencies = [ - "bytemuck", - "llm-base", -] - -[[package]] -name = "llm-gptj" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" -dependencies = [ - "llm-base", -] - -[[package]] -name = "llm-gptneox" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" -dependencies = [ - "llm-base", -] - [[package]] name = "llm-llama" version = "0.2.0-dev" @@ -4210,14 +4144,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "llm-mpt" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" -dependencies = [ - "llm-base", -] - [[package]] name = "llm-samplers" version = "0.0.6" @@ -4537,6 +4463,18 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "mio" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +dependencies = [ + "hermit-abi 0.3.9", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.52.0", +] + [[package]] name = "monostate" version = "0.1.11" @@ -4579,7 +4517,7 @@ dependencies = [ "keyed_priority_queue", "lazy_static 1.4.0", "lru 0.12.3", - "mio", + "mio 0.8.11", "mysql_common", "native-tls", "once_cell", @@ -4750,7 +4688,7 @@ dependencies = [ "inotify", "kqueue", "libc", - "mio", + "mio 0.8.11", "walkdir", "windows-sys 0.45.0", ] @@ -4781,11 +4719,10 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ - "autocfg", "num-integer", "num-traits 0.2.18", ] @@ -4941,7 +4878,7 @@ dependencies = [ "lazy_static 1.4.0", "olpc-cjson", "regex", - "reqwest 0.12.4", + "reqwest 0.12.7", "serde 1.0.197", "serde_json", "sha2", @@ -4965,7 +4902,7 @@ dependencies = [ "lazy_static 1.4.0", "olpc-cjson", "regex", - "reqwest 0.12.4", + "reqwest 0.12.7", "serde 1.0.197", "serde_json", "sha2", @@ -5031,12 +4968,6 @@ dependencies = [ "pkg-config", ] -[[package]] -name = "oorandom" -version = "11.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" - [[package]] name = "opaque-debug" version = "0.3.1" @@ -5218,163 +5149,50 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" [[package]] -name = "ouroboros" -version = "0.18.3" +name = "overload" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b7be5a8a3462b752f4be3ff2b2bf2f7f1d00834902e46be2a4d68b87b0573c" -dependencies = [ - "aliasable", - "ouroboros_macro", - "static_assertions", -] +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" [[package]] -name = "ouroboros_macro" -version = "0.18.3" +name = "p256" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b645dcde5f119c2c454a92d0dfa271a2a3b205da92e4292a68ead4bdbfde1f33" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" dependencies = [ - "heck 0.4.1", - "itertools 0.12.1", - "proc-macro2", - "proc-macro2-diagnostics", - "quote", - "syn 2.0.58", + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2", ] [[package]] -name = "outbound-http" -version = "2.8.0-pre0" -dependencies = [ - "anyhow", - "http 0.2.12", - "reqwest 0.11.27", - "spin-app", - "spin-core", - "spin-expressions", - "spin-locked-app", - "spin-outbound-networking", - "spin-telemetry", - "spin-world", - "terminal", - "tracing", - "url", -] +name = "parking" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" [[package]] -name = "outbound-mqtt" -version = "2.8.0-pre0" +name = "parking_lot" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ - "anyhow", - "rumqttc", - "spin-app", - "spin-core", - "spin-expressions", - "spin-outbound-networking", - "spin-world", - "table", - "tokio", - "tracing", + "lock_api", + "parking_lot_core", ] [[package]] -name = "outbound-mysql" -version = "2.8.0-pre0" +name = "parking_lot_core" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ - "anyhow", - "flate2", - "mysql_async", - "mysql_common", - "spin-app", - "spin-core", - "spin-expressions", - "spin-outbound-networking", - "spin-world", - "table", - "tokio", - "tracing", - "url", -] - -[[package]] -name = "outbound-pg" -version = "2.8.0-pre0" -dependencies = [ - "anyhow", - "native-tls", - "postgres-native-tls", - "spin-app", - "spin-core", - "spin-expressions", - "spin-outbound-networking", - "spin-world", - "table", - "tokio", - "tokio-postgres", - "tracing", -] - -[[package]] -name = "outbound-redis" -version = "2.8.0-pre0" -dependencies = [ - "anyhow", - "redis 0.21.7", - "spin-app", - "spin-core", - "spin-expressions", - "spin-outbound-networking", - "spin-world", - "table", - "tokio", - "tracing", -] - -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - -[[package]] -name = "p256" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" -dependencies = [ - "ecdsa", - "elliptic-curve", - "primeorder", - "sha2", -] - -[[package]] -name = "parking" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" - -[[package]] -name = "parking_lot" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall 0.4.1", - "smallvec", - "windows-targets 0.48.5", + "cfg-if", + "libc", + "redox_syscall 0.4.1", + "smallvec", + "windows-targets 0.48.5", ] [[package]] @@ -5651,34 +5469,6 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" -[[package]] -name = "plotters" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2c224ba00d7cadd4d5c660deaf2098e5e80e07846537c51f9cfa4be50c1fd45" -dependencies = [ - "num-traits 0.2.18", - "plotters-backend", - "plotters-svg", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "plotters-backend" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e76628b4d3a7581389a35d5b6e2139607ad7c75b17aed325f210aa91f4a9609" - -[[package]] -name = "plotters-svg" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38f6d39893cca0701371e3c27294f09797214b86f1fb951b89ade8ec04e2abab" -dependencies = [ - "plotters-backend", -] - [[package]] name = "polling" version = "2.8.0" @@ -5849,19 +5639,6 @@ dependencies = [ "unicode-ident", ] -[[package]] -name = "proc-macro2-diagnostics" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.58", - "version_check", - "yansi", -] - [[package]] name = "proc-quote" version = "0.4.0" @@ -5913,7 +5690,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80b776a1b2dc779f5ee0641f8ade0125bc1298dd41a9a0c16d8bd57b42d222b1" dependencies = [ "bytes", - "heck 0.4.1", + "heck 0.5.0", "itertools 0.12.1", "log", "multimap", @@ -6196,6 +5973,29 @@ dependencies = [ "url", ] +[[package]] +name = "redis" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e902a69d09078829137b4a5d9d082e0490393537badd7c91a3d69d14639e115f" +dependencies = [ + "arc-swap", + "async-trait", + "bytes", + "combine", + "futures-util", + "itoa", + "num-bigint", + "percent-encoding", + "pin-project-lite", + "ryu", + "sha1_smol", + "socket2 0.5.6", + "tokio", + "tokio-util 0.7.10", + "url", +] + [[package]] name = "redox_syscall" version = "0.2.16" @@ -6319,8 +6119,8 @@ dependencies = [ "serde 1.0.197", "serde_json", "serde_urlencoded", - "sync_wrapper", - "system-configuration", + "sync_wrapper 0.1.2", + "system-configuration 0.5.1", "tokio", "tokio-native-tls", "tokio-rustls 0.24.1", @@ -6332,14 +6132,14 @@ dependencies = [ "wasm-streams", "web-sys", "webpki-roots 0.25.4", - "winreg 0.50.0", + "winreg", ] [[package]] name = "reqwest" -version = "0.12.4" +version = "0.12.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10" +checksum = "f8f4955649ef5c38cc7f9e8aa41761d48fb9677197daea9984dc54f56aad5e63" dependencies = [ "base64 0.22.0", "bytes", @@ -6351,7 +6151,8 @@ dependencies = [ "http 1.1.0", "http-body 1.0.0", "http-body-util", - "hyper 1.2.0", + "hyper 1.4.1", + "hyper-rustls 0.27.2", "hyper-tls 0.6.0", "hyper-util", "ipnet", @@ -6366,8 +6167,8 @@ dependencies = [ "serde 1.0.197", "serde_json", "serde_urlencoded", - "sync_wrapper", - "system-configuration", + "sync_wrapper 1.0.1", + "system-configuration 0.6.0", "tokio", "tokio-native-tls", "tokio-socks", @@ -6378,7 +6179,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "winreg 0.52.0", + "windows-registry", ] [[package]] @@ -6604,6 +6405,21 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rustls" +version = "0.23.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebbbdb961df0ad3f2652da8f3fdc4b36122f568f968f45ad3316f26c025c677b" +dependencies = [ + "log", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.2", + "subtle", + "zeroize", +] + [[package]] name = "rustls-native-certs" version = "0.7.0" @@ -6696,9 +6512,9 @@ dependencies = [ [[package]] name = "sanitize-filename" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08c502bdb638f1396509467cb0580ef3b29aa2a45c5d43e5d84928241280296c" +checksum = "2ed72fbaf78e6f2d41744923916966c4fbe3d7c74e3037a8ee482f1115572603" dependencies = [ "lazy_static 1.4.0", "regex", @@ -6862,16 +6678,6 @@ dependencies = [ "serde 1.0.197", ] -[[package]] -name = "serde_cbor" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" -dependencies = [ - "half 1.8.3", - "serde 1.0.197", -] - [[package]] name = "serde_derive" version = "1.0.197" @@ -7105,7 +6911,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af" dependencies = [ "libc", - "mio", + "mio 0.8.11", "signal-hook", ] @@ -7260,10 +7066,8 @@ version = "2.8.0-pre0" dependencies = [ "anyhow", "async-trait", - "ouroboros", "serde 1.0.197", "serde_json", - "spin-core", "spin-locked-app", "spin-serde", "thiserror", @@ -7308,7 +7112,7 @@ dependencies = [ "glob", "hex", "http-body-util", - "hyper 1.2.0", + "hyper 1.4.1", "hyper-util", "indicatif 0.17.8", "is-terminal", @@ -7317,14 +7121,11 @@ dependencies = [ "levenshtein", "nix 0.24.3", "openssl", - "outbound-http", - "outbound-mqtt", - "outbound-redis", "path-absolutize", "rand 0.8.5", "redis 0.24.0", "regex", - "reqwest 0.12.4", + "reqwest 0.12.7", "rpassword", "runtime-tests", "semver", @@ -7349,7 +7150,6 @@ dependencies = [ "spin-trigger", "spin-trigger-http", "spin-trigger-redis", - "spin-variables", "subprocess", "tempfile", "terminal", @@ -7414,59 +7214,347 @@ version = "2.8.0-pre0" dependencies = [ "anyhow", "async-trait", - "bytes", - "cap-primitives 3.0.0", - "cap-std 3.0.0", - "futures", - "http 1.1.0", - "io-extras", - "rustix 0.37.27", + "serde_json", "spin-componentize", - "spin-telemetry", - "system-interface", + "spin-factor-wasi", + "spin-factors", + "spin-factors-test", + "spin-locked-app", + "tokio", + "tracing", + "wasmtime", + "wasmtime-wasi", +] + +[[package]] +name = "spin-doctor" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "async-trait", + "glob", + "reqwest 0.11.27", + "serde 1.0.197", + "similar", + "spin-common", + "spin-manifest", "tempfile", + "terminal", "tokio", + "toml 0.8.14", + "toml_edit 0.20.7", + "tracing", + "ui-testing", +] + +[[package]] +name = "spin-expressions" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "async-trait", + "dotenvy", + "once_cell", + "serde 1.0.197", + "spin-locked-app", + "thiserror", + "tokio", + "toml 0.5.11", +] + +[[package]] +name = "spin-factor-key-value" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "serde 1.0.197", + "spin-factor-key-value-redis", + "spin-factor-key-value-spin", + "spin-factors", + "spin-factors-test", + "spin-key-value", + "spin-world", + "tempfile", + "tokio", + "toml 0.8.14", +] + +[[package]] +name = "spin-factor-key-value-azure" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "serde 1.0.197", + "spin-factor-key-value", + "spin-key-value-azure", +] + +[[package]] +name = "spin-factor-key-value-redis" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "serde 1.0.197", + "spin-factor-key-value", + "spin-key-value-redis", +] + +[[package]] +name = "spin-factor-key-value-spin" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "serde 1.0.197", + "spin-factor-key-value", + "spin-key-value-sqlite", +] + +[[package]] +name = "spin-factor-llm" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "async-trait", + "serde 1.0.197", + "spin-factors", + "spin-factors-test", + "spin-llm-local", + "spin-llm-remote-http", + "spin-locked-app", + "spin-world", + "tokio", + "toml 0.8.14", + "tracing", + "url", +] + +[[package]] +name = "spin-factor-outbound-http" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "http 1.1.0", + "http-body-util", + "hyper 1.4.1", + "reqwest 0.11.27", + "rustls 0.23.7", + "spin-factor-outbound-networking", + "spin-factor-variables", + "spin-factors", + "spin-factors-test", + "spin-telemetry", + "spin-world", + "terminal", + "tokio", + "tokio-rustls 0.26.0", "tracing", - "wasi-common", "wasmtime", "wasmtime-wasi", "wasmtime-wasi-http", ] [[package]] -name = "spin-doctor" +name = "spin-factor-outbound-mqtt" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "rumqttc", + "spin-core", + "spin-factor-outbound-networking", + "spin-factor-variables", + "spin-factors", + "spin-factors-test", + "spin-world", + "table", + "tokio", + "tracing", +] + +[[package]] +name = "spin-factor-outbound-mysql" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "flate2", + "mysql_async", + "mysql_common", + "spin-app", + "spin-core", + "spin-expressions", + "spin-factor-outbound-networking", + "spin-factor-variables", + "spin-factors", + "spin-factors-test", + "spin-outbound-networking", + "spin-world", + "table", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "spin-factor-outbound-networking" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "futures-util", + "http 1.1.0", + "ipnet", + "rustls 0.23.7", + "rustls-pemfile 2.1.2", + "rustls-pki-types", + "serde 1.0.197", + "spin-factor-variables", + "spin-factor-wasi", + "spin-factors", + "spin-factors-test", + "spin-outbound-networking", + "spin-serde", + "tempfile", + "tokio", + "toml 0.8.14", + "tracing", + "wasmtime-wasi", + "webpki-roots 0.26.1", +] + +[[package]] +name = "spin-factor-outbound-pg" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "native-tls", + "postgres-native-tls", + "spin-core", + "spin-factor-outbound-networking", + "spin-factor-variables", + "spin-factors", + "spin-factors-test", + "spin-world", + "table", + "tokio", + "tokio-postgres", + "tracing", +] + +[[package]] +name = "spin-factor-outbound-redis" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "redis 0.21.7", + "spin-core", + "spin-factor-outbound-networking", + "spin-factor-variables", + "spin-factors", + "spin-factors-test", + "spin-world", + "table", + "tokio", + "tracing", +] + +[[package]] +name = "spin-factor-sqlite" +version = "2.8.0-pre0" +dependencies = [ + "async-trait", + "serde 1.0.197", + "spin-factors", + "spin-factors-test", + "spin-locked-app", + "spin-sqlite", + "spin-sqlite-inproc", + "spin-sqlite-libsql", + "spin-world", + "table", + "tokio", + "toml 0.8.14", +] + +[[package]] +name = "spin-factor-variables" +version = "2.8.0-pre0" +dependencies = [ + "azure_core", + "azure_identity", + "azure_security_keyvault", + "dotenvy", + "serde 1.0.197", + "spin-expressions", + "spin-factors", + "spin-factors-test", + "spin-world", + "tokio", + "toml 0.8.14", + "tracing", + "vaultrs", +] + +[[package]] +name = "spin-factor-wasi" +version = "2.8.0-pre0" +dependencies = [ + "async-trait", + "bytes", + "cap-primitives 3.0.0", + "spin-common", + "spin-factors", + "spin-factors-test", + "tokio", + "wasmtime", + "wasmtime-wasi", +] + +[[package]] +name = "spin-factors" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "serde 1.0.197", + "spin-app", + "spin-factors-derive", + "thiserror", + "toml 0.8.14", + "tracing", + "wasmtime", +] + +[[package]] +name = "spin-factors-derive" +version = "2.8.0-pre0" +dependencies = [ + "expander", + "proc-macro2", + "quote", + "syn 2.0.58", +] + +[[package]] +name = "spin-factors-executor" version = "2.8.0-pre0" dependencies = [ "anyhow", - "async-trait", - "glob", - "reqwest 0.11.27", - "serde 1.0.197", - "similar", - "spin-common", - "spin-manifest", - "tempfile", - "terminal", + "spin-app", + "spin-core", + "spin-factor-wasi", + "spin-factors", + "spin-factors-test", "tokio", - "toml 0.8.14", - "toml_edit 0.20.7", - "tracing", - "ui-testing", ] [[package]] -name = "spin-expressions" +name = "spin-factors-test" version = "2.8.0-pre0" dependencies = [ - "anyhow", - "async-trait", - "dotenvy", - "once_cell", "serde 1.0.197", - "spin-locked-app", - "thiserror", - "tokio", - "toml 0.5.11", + "spin-app", + "spin-factors", + "spin-factors-derive", + "spin-loader", + "tempfile", + "toml 0.8.14", ] [[package]] @@ -7476,7 +7564,7 @@ dependencies = [ "anyhow", "http 1.1.0", "http-body-util", - "hyper 1.2.0", + "hyper 1.4.1", "indexmap 1.9.3", "percent-encoding", "routefinder", @@ -7546,18 +7634,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "spin-llm" -version = "2.8.0-pre0" -dependencies = [ - "anyhow", - "bytesize", - "llm", - "spin-app", - "spin-core", - "spin-world", -] - [[package]] name = "spin-llm-local" version = "2.8.0-pre0" @@ -7574,7 +7650,6 @@ dependencies = [ "serde 1.0.197", "spin-common", "spin-core", - "spin-llm", "spin-world", "terminal", "tokenizers", @@ -7588,12 +7663,9 @@ version = "2.8.0-pre0" dependencies = [ "anyhow", "http 0.2.12", - "llm", "reqwest 0.11.27", "serde 1.0.197", "serde_json", - "spin-core", - "spin-llm", "spin-telemetry", "spin-world", "tracing", @@ -7614,7 +7686,6 @@ dependencies = [ "itertools 0.10.5", "lazy_static 1.4.0", "mime_guess", - "outbound-http", "path-absolutize", "regex", "reqwest 0.11.27", @@ -7645,7 +7716,6 @@ version = "2.8.0-pre0" dependencies = [ "anyhow", "async-trait", - "ouroboros", "serde 1.0.197", "serde_json", "spin-serde", @@ -7693,7 +7763,6 @@ dependencies = [ "spin-loader", "spin-locked-app", "spin-manifest", - "spin-testing", "tempfile", "tokio", "tokio-util 0.7.10", @@ -7741,6 +7810,29 @@ dependencies = [ "url", ] +[[package]] +name = "spin-runtime-config" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "spin-factor-key-value", + "spin-factor-key-value-azure", + "spin-factor-key-value-redis", + "spin-factor-key-value-spin", + "spin-factor-llm", + "spin-factor-outbound-http", + "spin-factor-outbound-mqtt", + "spin-factor-outbound-mysql", + "spin-factor-outbound-networking", + "spin-factor-outbound-pg", + "spin-factor-outbound-redis", + "spin-factor-sqlite", + "spin-factor-variables", + "spin-factor-wasi", + "spin-factors", + "toml 0.8.14", +] + [[package]] name = "spin-serde" version = "2.8.0-pre0" @@ -7846,44 +7938,14 @@ dependencies = [ "walkdir", ] -[[package]] -name = "spin-testing" -version = "2.8.0-pre0" -dependencies = [ - "anyhow", - "http 1.1.0", - "hyper 1.2.0", - "serde 1.0.197", - "serde_json", - "spin-app", - "spin-componentize", - "spin-core", - "spin-http", - "spin-trigger", - "tokio", - "tracing-subscriber", -] - [[package]] name = "spin-trigger" version = "2.8.0-pre0" dependencies = [ "anyhow", - "async-trait", "clap 3.2.25", "ctrlc", - "dirs 4.0.0", "futures", - "http 1.1.0", - "indexmap 1.9.3", - "ipnet", - "outbound-http", - "outbound-mqtt", - "outbound-mysql", - "outbound-pg", - "outbound-redis", - "rustls-pemfile 2.1.2", - "rustls-pki-types", "sanitize-filename", "serde 1.0.197", "serde_json", @@ -7891,33 +7953,24 @@ dependencies = [ "spin-common", "spin-componentize", "spin-core", - "spin-expressions", - "spin-key-value", - "spin-key-value-azure", - "spin-key-value-redis", - "spin-key-value-sqlite", - "spin-llm", - "spin-llm-local", - "spin-llm-remote-http", - "spin-loader", - "spin-manifest", - "spin-outbound-networking", - "spin-serde", - "spin-sqlite", - "spin-sqlite-inproc", - "spin-sqlite-libsql", + "spin-factor-key-value", + "spin-factor-llm", + "spin-factor-outbound-http", + "spin-factor-outbound-mqtt", + "spin-factor-outbound-mysql", + "spin-factor-outbound-networking", + "spin-factor-outbound-pg", + "spin-factor-outbound-redis", + "spin-factor-sqlite", + "spin-factor-variables", + "spin-factor-wasi", + "spin-factors", + "spin-factors-executor", + "spin-runtime-config", "spin-telemetry", - "spin-variables", - "spin-world", - "tempfile", "terminal", "tokio", - "toml 0.5.11", "tracing", - "url", - "wasmtime", - "wasmtime-wasi", - "wasmtime-wasi-http", ] [[package]] @@ -7927,16 +7980,13 @@ dependencies = [ "anyhow", "async-trait", "clap 3.2.25", - "criterion", "futures", "futures-util", "http 1.1.0", "http-body-util", - "hyper 1.2.0", + "hyper 1.4.1", "hyper-util", "indexmap 1.9.3", - "num_cpus", - "outbound-http", "percent-encoding", "rustls 0.22.4", "rustls-pemfile 2.1.2", @@ -7945,10 +7995,11 @@ dependencies = [ "serde_json", "spin-app", "spin-core", + "spin-factor-outbound-http", + "spin-factor-wasi", "spin-http", "spin-outbound-networking", "spin-telemetry", - "spin-testing", "spin-trigger", "spin-world", "terminal", @@ -7957,7 +8008,6 @@ dependencies = [ "tokio-rustls 0.25.0", "tracing", "url", - "wasi-common", "wasmtime", "wasmtime-wasi", "wasmtime-wasi-http", @@ -7971,47 +8021,21 @@ dependencies = [ "anyhow", "async-trait", "futures", - "redis 0.21.7", + "redis 0.26.1", "serde 1.0.197", - "spin-app", - "spin-common", - "spin-core", - "spin-expressions", + "spin-factor-variables", "spin-telemetry", - "spin-testing", "spin-trigger", "spin-world", "tokio", "tracing", ] -[[package]] -name = "spin-variables" -version = "2.8.0-pre0" -dependencies = [ - "anyhow", - "async-trait", - "azure_core", - "azure_identity", - "azure_security_keyvault", - "dotenvy", - "once_cell", - "serde 1.0.197", - "spin-app", - "spin-core", - "spin-expressions", - "spin-world", - "thiserror", - "tokio", - "toml 0.5.11", - "tracing", - "vaultrs", -] - [[package]] name = "spin-world" version = "2.8.0-pre0" dependencies = [ + "async-trait", "wasmtime", ] @@ -8150,6 +8174,15 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" +dependencies = [ + "futures-core", +] + [[package]] name = "synstructure" version = "0.12.6" @@ -8170,7 +8203,18 @@ checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", "core-foundation", - "system-configuration-sys", + "system-configuration-sys 0.5.0", +] + +[[package]] +name = "system-configuration" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "658bc6ee10a9b4fcf576e9b0819d95ec16f4d2c02d39fd83ac1c8789785c4a42" +dependencies = [ + "bitflags 2.5.0", + "core-foundation", + "system-configuration-sys 0.6.0", ] [[package]] @@ -8183,6 +8227,16 @@ dependencies = [ "libc", ] +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "system-interface" version = "0.27.2" @@ -8228,14 +8282,15 @@ checksum = "1f227968ec00f0e5322f9b8173c7a0cbcff6181a0a5b28e9892491c286277231" [[package]] name = "tempfile" -version = "3.10.1" +version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +checksum = "04cbcdd0c794ebb0d4cf35e88edd2f7d2c4c3e9a5a6dab322839b321c6a87a64" dependencies = [ "cfg-if", "fastrand 2.0.2", + "once_cell", "rustix 0.38.32", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -8294,7 +8349,7 @@ dependencies = [ "anyhow", "fslock", "regex", - "reqwest 0.12.4", + "reqwest 0.12.7", "temp-dir", "tokio", ] @@ -8310,7 +8365,9 @@ dependencies = [ "log", "nix 0.26.4", "regex", - "reqwest 0.12.4", + "reqwest 0.12.7", + "spin-app", + "spin-factors-executor", "spin-http", "spin-loader", "spin-trigger", @@ -8322,15 +8379,6 @@ dependencies = [ "wasmtime-wasi-http", ] -[[package]] -name = "textwrap" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" -dependencies = [ - "unicode-width", -] - [[package]] name = "textwrap" version = "0.16.1" @@ -8419,16 +8467,6 @@ dependencies = [ "lazy_static 0.2.11", ] -[[package]] -name = "tinytemplate" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" -dependencies = [ - "serde 1.0.197", - "serde_json", -] - [[package]] name = "tinyvec" version = "1.6.0" @@ -8495,21 +8533,20 @@ dependencies = [ [[package]] name = "tokio" -version = "1.37.0" +version = "1.39.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" +checksum = "9babc99b9923bfa4804bd74722ff02c0381021eafa4db9949217e3be8e84fff5" dependencies = [ "backtrace", "bytes", "libc", - "mio", - "num_cpus", + "mio 1.0.2", "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2 0.5.6", "tokio-macros", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -8524,9 +8561,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.2.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", @@ -8590,6 +8627,17 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +dependencies = [ + "rustls 0.23.7", + "rustls-pki-types", + "tokio", +] + [[package]] name = "tokio-socks" version = "0.5.1" @@ -8921,7 +8969,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ "cfg-if", - "rand 0.7.3", + "rand 0.8.5", "static_assertions", ] @@ -9211,7 +9259,7 @@ dependencies = [ "once_cell", "pathdiff", "ptree", - "reqwest 0.12.4", + "reqwest 0.12.7", "secrecy", "semver", "serde 1.0.197", @@ -9324,33 +9372,6 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" -[[package]] -name = "wasi-common" -version = "22.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b86fd41e1e26ff6af9451c6a332a5ce5f5283ca51e87d875cdd9a05305598ee3" -dependencies = [ - "anyhow", - "bitflags 2.5.0", - "cap-fs-ext", - "cap-rand", - "cap-std 3.0.0", - "cap-time-ext", - "fs-set-times", - "io-extras", - "io-lifetimes 2.0.3", - "log", - "once_cell", - "rustix 0.38.32", - "system-interface", - "thiserror", - "tokio", - "tracing", - "wasmtime", - "wiggle", - "windows-sys 0.52.0", -] - [[package]] name = "wasite" version = "0.1.0" @@ -9538,7 +9559,7 @@ dependencies = [ "anyhow", "dirs 5.0.1", "http 1.1.0", - "reqwest 0.12.4", + "reqwest 0.12.7", "semver", "serde 1.0.197", "serde_json", @@ -9926,7 +9947,7 @@ dependencies = [ "http 1.1.0", "http-body 1.0.0", "http-body-util", - "hyper 1.2.0", + "hyper 1.4.1", "rustls 0.22.4", "tokio", "tokio-rustls 0.25.0", @@ -10234,7 +10255,37 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.4", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result", + "windows-strings", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-strings" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +dependencies = [ + "windows-result", + "windows-targets 0.52.6", ] [[package]] @@ -10261,7 +10312,16 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.4", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", ] [[package]] @@ -10296,17 +10356,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.4", - "windows_aarch64_msvc 0.52.4", - "windows_i686_gnu 0.52.4", - "windows_i686_msvc 0.52.4", - "windows_x86_64_gnu 0.52.4", - "windows_x86_64_gnullvm 0.52.4", - "windows_x86_64_msvc 0.52.4", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] @@ -10323,9 +10384,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -10341,9 +10402,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -10359,9 +10420,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -10377,9 +10444,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -10395,9 +10462,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -10413,9 +10480,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -10431,9 +10498,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" @@ -10463,16 +10530,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "winreg" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" -dependencies = [ - "cfg-if", - "windows-sys 0.48.0", -] - [[package]] name = "winx" version = "0.36.3" @@ -10644,12 +10701,6 @@ dependencies = [ "linked-hash-map", ] -[[package]] -name = "yansi" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" - [[package]] name = "zbus" version = "3.15.2" diff --git a/Cargo.toml b/Cargo.toml index 89fafac710..c32cfc09c2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,7 @@ edition = "2021" license = "Apache-2.0 WITH LLVM-exception" homepage = "https://developer.fermyon.com/spin" repository = "https://github.com/fermyon/spin" -rust-version = "1.76" +rust-version = "1.79" [dependencies] anyhow = { workspace = true } @@ -35,9 +35,6 @@ itertools = "0.11.0" lazy_static = "1.4.0" levenshtein = "1.0.5" nix = { version = "0.24", features = ["signal"] } -outbound-http = { path = "crates/outbound-http" } -outbound-redis = { path = "crates/outbound-redis" } -outbound-mqtt = { path = "crates/outbound-mqtt" } spin-key-value = { path = "crates/key-value" } spin-key-value-sqlite = { path = "crates/key-value-sqlite" } path-absolutize = "3.0.11" @@ -68,7 +65,6 @@ spin-templates = { path = "crates/templates" } spin-trigger = { path = "crates/trigger" } spin-trigger-http = { path = "crates/trigger-http" } spin-trigger-redis = { path = "crates/trigger-redis" } -spin-variables = { path = "crates/variables" } tempfile = "3.8.0" tokio = { version = "1.23", features = ["full"] } diff --git a/build.rs b/build.rs index 7ddf012e35..3246e3ec7d 100644 --- a/build.rs +++ b/build.rs @@ -68,17 +68,16 @@ error: the `wasm32-wasi` target is not installed std::fs::create_dir_all("target/test-programs").unwrap(); build_wasm_test_program("core-wasi-test.wasm", "crates/core/tests/core-wasi-test"); - build_wasm_test_program("redis-rust.wasm", "crates/trigger-redis/tests/rust"); - - build_wasm_test_program( - "spin-http-benchmark.wasm", - "crates/trigger-http/benches/spin-http-benchmark", - ); - build_wasm_test_program( - "wagi-benchmark.wasm", - "crates/trigger-http/benches/wagi-benchmark", - ); - build_wasm_test_program("timer_app_example.wasm", "examples/spin-timer/app-example"); + // build_wasm_test_program("redis-rust.wasm", "crates/trigger-redis/tests/rust"); + // build_wasm_test_program( + // "spin-http-benchmark.wasm", + // "crates/trigger-http/benches/spin-http-benchmark", + // ); + // build_wasm_test_program( + // "wagi-benchmark.wasm", + // "crates/trigger-http/benches/wagi-benchmark", + // ); + // build_wasm_test_program("timer_app_example.wasm", "examples/spin-timer/app-example"); cargo_build(TIMER_TRIGGER_INTEGRATION_TEST); } diff --git a/crates/app/Cargo.toml b/crates/app/Cargo.toml index fadb5e3500..2f0d0cf182 100644 --- a/crates/app/Cargo.toml +++ b/crates/app/Cargo.toml @@ -7,10 +7,8 @@ edition = { workspace = true } [dependencies] anyhow = "1.0" async-trait = "0.1" -ouroboros = "0.18.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" -spin-core = { path = "../core" } spin-locked-app = { path = "../locked-app" } spin-serde = { path = "../serde" } thiserror = "1.0" diff --git a/crates/app/src/host_component.rs b/crates/app/src/host_component.rs deleted file mode 100644 index c5ab96c4e8..0000000000 --- a/crates/app/src/host_component.rs +++ /dev/null @@ -1,123 +0,0 @@ -use std::{any::Any, sync::Arc}; - -use anyhow::Context; -use spin_core::{ - AnyHostComponentDataHandle, EngineBuilder, HostComponent, HostComponentDataHandle, - HostComponentsData, -}; - -use crate::{App, AppComponent}; - -/// A trait for "dynamic" Spin host components. -/// -/// This extends [`HostComponent`] to support per-[`AppComponent`] dynamic -/// runtime configuration. -/// -/// Dynamic host components differ from regular host components in that they can be -/// configured on a per-component basis. -pub trait DynamicHostComponent: HostComponent { - /// Called on [`AppComponent`] instance initialization. - /// - /// The `data` returned by [`HostComponent::build_data`] is passed, along - /// with a reference to the `component` being instantiated. - fn update_data(&self, data: &mut Self::Data, component: &AppComponent) -> anyhow::Result<()>; - - /// Called on [`App`] load to validate any configuration needed by this - /// host component. - /// - /// Note that the _absence_ of configuration should not be treated as an - /// error here, as the app may not use this host component at all. - #[allow(unused_variables)] - fn validate_app(&self, app: &App) -> anyhow::Result<()> { - Ok(()) - } -} - -impl DynamicHostComponent for Arc { - fn update_data(&self, data: &mut Self::Data, component: &AppComponent) -> anyhow::Result<()> { - (**self).update_data(data, component) - } -} - -/// A version of `DynamicHostComponent` which can be made into a trait object. -/// -/// This is only implemented for `T: DynamicHostComponent`. We want to make `DynamicHostComponent` -/// into a trait object so that we can store them into a heterogeneous collection in `DynamicHostComponents`. -/// -/// `DynamicHostComponent` can't be made into a trait object itself since `HostComponent::add_to_linker` -/// does not have a `self` parameter (and thus cannot be add to the object's vtable). -trait DynSafeDynamicHostComponent { - /// The moral equivalent to `DynamicHostComponent::update_data` - fn update_data_any(&self, data: &mut dyn Any, component: &AppComponent) -> anyhow::Result<()>; - /// The moral equivalent to `DynamicHostComponent::validate_app` - fn validate_app(&self, app: &App) -> anyhow::Result<()>; -} - -impl DynSafeDynamicHostComponent for T -where - T::Data: Any, -{ - fn update_data_any(&self, data: &mut dyn Any, component: &AppComponent) -> anyhow::Result<()> { - let data = data.downcast_mut().context("wrong data type")?; - self.update_data(data, component) - } - - fn validate_app(&self, app: &App) -> anyhow::Result<()> { - T::validate_app(self, app) - } -} - -struct DynamicHostComponentWithHandle { - host_component: Arc, - handle: AnyHostComponentDataHandle, -} - -/// A heterogeneous collection of dynamic host components. -/// -/// This is stored in an `AppLoader` so that the host components -/// can be referenced and updated at a later point. This is effectively -/// what makes a `DynamicHostComponent` "dynamic" and differentiates it from -/// a regular `HostComponent`. -#[derive(Default)] -pub(crate) struct DynamicHostComponents { - host_components: Vec, -} - -impl DynamicHostComponents { - pub fn add_dynamic_host_component( - &mut self, - engine_builder: &mut EngineBuilder, - host_component: DHC, - ) -> anyhow::Result> { - let host_component = Arc::new(host_component); - let handle = engine_builder.add_host_component(host_component.clone())?; - self.host_components.push(DynamicHostComponentWithHandle { - host_component, - handle: handle.into(), - }); - Ok(handle.into()) - } - - pub fn update_data( - &self, - host_components_data: &mut HostComponentsData, - component: &AppComponent, - ) -> anyhow::Result<()> { - for DynamicHostComponentWithHandle { - host_component, - handle, - } in &self.host_components - { - let data = host_components_data.get_or_insert_any(*handle); - host_component.update_data_any(data.as_mut(), component)?; - } - Ok(()) - } - - pub fn validate_app(&self, app: &App) -> anyhow::Result<()> { - for DynamicHostComponentWithHandle { host_component, .. } in &self.host_components { - host_component.validate_app(app)?; - } - Ok(()) - } -} diff --git a/crates/app/src/lib.rs b/crates/app/src/lib.rs index 70d6a59b6b..e3aa54ac36 100644 --- a/crates/app/src/lib.rs +++ b/crates/app/src/lib.rs @@ -6,22 +6,16 @@ #![deny(missing_docs)] -mod host_component; +use serde::Deserialize; use serde_json::Value; +use spin_locked_app::MetadataExt; + +use locked::{ContentPath, LockedApp, LockedComponent, LockedComponentSource, LockedTrigger}; + pub use spin_locked_app::locked; pub use spin_locked_app::values; pub use spin_locked_app::{Error, MetadataKey, Result}; -use ouroboros::self_referencing; -use serde::Deserialize; -use spin_core::{wasmtime, Engine, EngineBuilder, HostComponentDataHandle, StoreBuilder}; - -use host_component::DynamicHostComponents; -use locked::{ContentPath, LockedApp, LockedComponent, LockedComponentSource, LockedTrigger}; -use spin_locked_app::MetadataExt; - -pub use async_trait::async_trait; -pub use host_component::DynamicHostComponent; pub use locked::Variable; /// MetadataKey for extracting the application name. @@ -33,130 +27,28 @@ pub const APP_DESCRIPTION_KEY: MetadataKey = MetadataKey::new("description"); /// MetadataKey for extracting the OCI image digest. pub const OCI_IMAGE_DIGEST_KEY: MetadataKey = MetadataKey::new("oci_image_digest"); -/// A trait for implementing the low-level operations needed to load an [`App`]. -// TODO(lann): Should this migrate to spin-loader? -#[async_trait] -pub trait Loader { - /// Called with an implementation-defined `uri` pointing to some - /// representation of a [`LockedApp`], which will be loaded. - async fn load_app(&self, uri: &str) -> anyhow::Result; - - /// Called with a [`LockedComponentSource`] pointing to a Wasm component - /// binary, which will be loaded. - async fn load_component( - &self, - engine: &wasmtime::Engine, - source: &LockedComponentSource, - ) -> anyhow::Result; - - /// Called with a [`LockedComponentSource`] pointing to a Wasm module - /// binary, which will be loaded. - async fn load_module( - &self, - engine: &wasmtime::Engine, - source: &LockedComponentSource, - ) -> anyhow::Result; - - /// Called with an [`AppComponent`]; any `files` configured with the - /// component should be "mounted" into the `store_builder`, via e.g. - /// [`StoreBuilder::read_only_preopened_dir`]. - async fn mount_files( - &self, - store_builder: &mut StoreBuilder, - component: &AppComponent, - ) -> anyhow::Result<()>; -} - -/// An `AppLoader` holds an implementation of [`Loader`] along with -/// [`DynamicHostComponent`]s configuration. -pub struct AppLoader { - inner: Box, - dynamic_host_components: DynamicHostComponents, +/// An `App` holds loaded configuration for a Spin application. +#[derive(Debug)] +pub struct App { + id: String, + locked: LockedApp, } -impl AppLoader { - /// Creates a new [`AppLoader`]. - pub fn new(loader: impl Loader + Send + Sync + 'static) -> Self { +impl App { + /// Returns a new app for the given runtime-specific identifier and locked + /// app. + pub fn new(id: impl Into, locked: LockedApp) -> Self { Self { - inner: Box::new(loader), - dynamic_host_components: Default::default(), - } - } - - /// Adds a [`DynamicHostComponent`] to the given [`EngineBuilder`] and - /// configures this [`AppLoader`] to update it on component instantiation. - /// - /// This calls [`EngineBuilder::add_host_component`] for you; it should not - /// be called separately. - pub fn add_dynamic_host_component( - &mut self, - engine_builder: &mut EngineBuilder, - host_component: DHC, - ) -> anyhow::Result> { - self.dynamic_host_components - .add_dynamic_host_component(engine_builder, host_component) - } - - /// Loads an [`App`] from the given `Loader`-implementation-specific `uri`. - pub async fn load_app(&self, uri: String) -> Result { - let locked = self - .inner - .load_app(&uri) - .await - .map_err(Error::LoaderError)?; - let app = App { - loader: self, - uri, + id: id.into(), locked, - }; - self.dynamic_host_components - .validate_app(&app) - .map_err(Error::ValidationError)?; - Ok(app) - } - - /// Loads an [`OwnedApp`] from the given `Loader`-implementation-specific - /// `uri`; the [`OwnedApp`] takes ownership of this [`AppLoader`]. - pub async fn load_owned_app(self, uri: String) -> Result { - OwnedApp::try_new_async(self, |loader| Box::pin(loader.load_app(uri))).await - } -} - -impl std::fmt::Debug for AppLoader { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("AppLoader").finish() + } } -} - -#[self_referencing] -#[derive(Debug)] -pub struct OwnedApp { - loader: AppLoader, - #[borrows(loader)] - #[covariant] - app: App<'this>, -} - -impl OwnedApp { - /// Returns a reference to the owned [`App`]. - pub fn borrowed(&self) -> &App { - self.borrow_app() + /// Returns a runtime-specific identifier for this app. + pub fn id(&self) -> &str { + &self.id } -} -/// An `App` holds loaded configuration for a Spin application. -/// -/// Note: The `L` param is an implementation detail to support the -/// [`App::inert`] constructor. -#[derive(Debug)] -pub struct App<'a, L = AppLoader> { - loader: &'a L, - uri: String, - locked: LockedApp, -} - -impl<'a, L> App<'a, L> { /// Deserializes typed metadata for this app. /// /// Returns `Ok(None)` if there is no metadata for the given `key` and an @@ -186,7 +78,7 @@ impl<'a, L> App<'a, L> { } /// Returns an iterator of [`AppComponent`]s defined for this app. - pub fn components(&self) -> impl Iterator> { + pub fn components(&self) -> impl Iterator> { self.locked .components .iter() @@ -195,13 +87,13 @@ impl<'a, L> App<'a, L> { /// Returns the [`AppComponent`] with the given `component_id`, or `None` /// if it doesn't exist. - pub fn get_component(&self, component_id: &str) -> Option> { + pub fn get_component(&self, component_id: &str) -> Option> { self.components() .find(|component| component.locked.id == component_id) } /// Returns an iterator of [`AppTrigger`]s defined for this app. - pub fn triggers(&self) -> impl Iterator> { + pub fn triggers(&self) -> impl Iterator> + '_ { self.locked .triggers .iter() @@ -209,9 +101,9 @@ impl<'a, L> App<'a, L> { } /// Returns the trigger metadata for a specific trigger type. - pub fn get_trigger_metadata<'this, T: Deserialize<'this> + Default>( + pub fn get_trigger_metadata<'this, T: Deserialize<'this>>( &'this self, - trigger_type: &'a str, + trigger_type: &str, ) -> Result> { let Some(value) = self.get_trigger_metadata_value(trigger_type) else { return Ok(None); @@ -240,14 +132,28 @@ impl<'a, L> App<'a, L> { /// Returns an iterator of [`AppTrigger`]s defined for this app with /// the given `trigger_type`. - pub fn triggers_with_type( + pub fn triggers_with_type<'a>( &'a self, trigger_type: &'a str, - ) -> impl Iterator> { + ) -> impl Iterator { self.triggers() .filter(move |trigger| trigger.locked.trigger_type == trigger_type) } + /// Returns an iterator of trigger IDs and deserialized trigger configs for + /// the given `trigger_type`. + pub fn trigger_configs<'a, T: Deserialize<'a>>( + &'a self, + trigger_type: &'a str, + ) -> Result> { + self.triggers_with_type(trigger_type) + .map(|trigger| { + let config = trigger.typed_config::()?; + Ok((trigger.id(), config)) + }) + .collect::>>() + } + /// Checks that the application does not have any host requirements /// outside the supported set. The error case returns a comma-separated /// list of unmet requirements. @@ -256,36 +162,14 @@ impl<'a, L> App<'a, L> { } } -impl<'a> App<'a> { - /// Returns a [`Loader`]-implementation-specific URI for this app. - pub fn uri(&self) -> &str { - &self.uri - } -} - -#[doc(hidden)] -pub struct InertLoader; - -impl App<'static, InertLoader> { - /// Return an "inert" App which does not have an associated [`AppLoader`] - /// and cannot be used to instantiate components. - pub fn inert(locked: LockedApp) -> Self { - App { - loader: &InertLoader, - uri: "".into(), - locked, - } - } -} - /// An `AppComponent` holds configuration for a Spin application component. -pub struct AppComponent<'a, L = AppLoader> { +pub struct AppComponent<'a> { /// The app this component belongs to. - pub app: &'a App<'a, L>, + pub app: &'a App, locked: &'a LockedComponent, } -impl<'a, L> AppComponent<'a, L> { +impl<'a> AppComponent<'a> { /// Returns this component's app-unique ID. pub fn id(&self) -> &str { &self.locked.id @@ -296,6 +180,14 @@ impl<'a, L> AppComponent<'a, L> { &self.locked.source } + /// Returns an iterator of environment variable (key, value) pairs. + pub fn environment(&self) -> impl IntoIterator { + self.locked + .env + .iter() + .map(|(k, v)| (k.as_str(), v.as_str())) + } + /// Returns an iterator of [`ContentPath`]s for this component's configured /// "directory mounts". pub fn files(&self) -> std::slice::Iter { @@ -328,72 +220,21 @@ impl<'a, L> AppComponent<'a, L> { } } -impl<'a> AppComponent<'a> { - /// Loads and returns the [`spin_core::Component`] for this component. - pub async fn load_component( - &self, - engine: &Engine, - ) -> Result { - self.app - .loader - .inner - .load_component(engine.as_ref(), &self.locked.source) - .await - .map_err(Error::LoaderError) - } - - /// Loads and returns the [`spin_core::Module`] for this component. - pub async fn load_module( - &self, - engine: &Engine, - ) -> Result { - self.app - .loader - .inner - .load_module(engine.as_ref(), &self.locked.source) - .await - .map_err(Error::LoaderError) - } - - /// Updates the given [`StoreBuilder`] with configuration for this component. - /// - /// In particular, the WASI 'env' and "preloaded dirs" are set up, and any - /// [`DynamicHostComponent`]s associated with the source [`AppLoader`] are - /// configured. - pub async fn apply_store_config(&self, builder: &mut StoreBuilder) -> Result<()> { - builder.env(&self.locked.env).map_err(Error::CoreError)?; - - let loader = self.app.loader; - loader - .inner - .mount_files(builder, self) - .await - .map_err(Error::LoaderError)?; - - loader - .dynamic_host_components - .update_data(builder.host_components_data(), self) - .map_err(Error::HostComponentError)?; - - Ok(()) - } -} - /// An `AppTrigger` holds configuration for a Spin application trigger. -pub struct AppTrigger<'a, L = AppLoader> { +pub struct AppTrigger<'a> { /// The app this trigger belongs to. - pub app: &'a App<'a, L>, + pub app: &'a App, locked: &'a LockedTrigger, } -impl<'a, L> AppTrigger<'a, L> { +impl<'a> AppTrigger<'a> { /// Returns this trigger's app-unique ID. - pub fn id(&self) -> &str { + pub fn id(&self) -> &'a str { &self.locked.id } /// Returns the Trigger's type. - pub fn trigger_type(&self) -> &str { + pub fn trigger_type(&self) -> &'a str { &self.locked.trigger_type } @@ -406,7 +247,7 @@ impl<'a, L> AppTrigger<'a, L> { /// /// This is a convenience wrapper that looks up the component based on the /// 'component' metadata value which is conventionally a component ID. - pub fn component(&self) -> Result> { + pub fn component(&self) -> Result> { let id = &self.locked.id; let common_config: CommonTriggerConfig = self.typed_config()?; let component_id = common_config.component.ok_or_else(|| { diff --git a/crates/componentize/src/bugs.rs b/crates/componentize/src/bugs.rs index 1e7ed9e63c..0b9dbbfaeb 100644 --- a/crates/componentize/src/bugs.rs +++ b/crates/componentize/src/bugs.rs @@ -1,66 +1,57 @@ -use anyhow::bail; -use wasm_metadata::Producers; -use wasmparser::{Encoding, ExternalKind, Parser, Payload}; +use crate::module_info::ModuleInfo; -/// Represents the detected likelihood of the allocation bug fixed in -/// https://github.com/WebAssembly/wasi-libc/pull/377 being present in a Wasm -/// module. +pub const EARLIEST_PROBABLY_SAFE_CLANG_VERSION: &str = "15.0.7"; + +/// This error represents the likely presence of the allocation bug fixed in +/// https://github.com/WebAssembly/wasi-libc/pull/377 in a Wasm module. #[derive(Debug, PartialEq)] -pub enum WasiLibc377Bug { - ProbablySafe, - ProbablyUnsafe, - Unknown, +pub struct WasiLibc377Bug { + clang_version: Option, } impl WasiLibc377Bug { - pub fn detect(module: &[u8]) -> anyhow::Result { - for payload in Parser::new(0).parse_all(module) { - match payload? { - Payload::Version { encoding, .. } if encoding != Encoding::Module => { - bail!("detection only applicable to modules"); - } - Payload::ExportSection(reader) => { - for export in reader { - let export = export?; - if export.kind == ExternalKind::Func && export.name == "cabi_realloc" { - // `cabi_realloc` is a good signal that this module - // uses wit-bindgen, making it probably-safe. - tracing::debug!("Found cabi_realloc export"); - return Ok(Self::ProbablySafe); - } - } - } - Payload::CustomSection(c) if c.name() == "producers" => { - let producers = Producers::from_bytes(c.data(), c.data_offset())?; - if let Some(clang_version) = - producers.get("processed-by").and_then(|f| f.get("clang")) - { - tracing::debug!(clang_version, "Parsed producers.processed-by.clang"); - - // Clang/LLVM version is a good proxy for wasi-sdk - // version; the allocation bug was fixed in wasi-sdk-18 - // and LLVM was updated to 15.0.7 in wasi-sdk-19. - if let Some((major, minor, patch)) = parse_clang_version(clang_version) { - return if (major, minor, patch) >= (15, 0, 7) { - Ok(Self::ProbablySafe) - } else { - Ok(Self::ProbablyUnsafe) - }; - } else { - tracing::warn!( - clang_version, - "Unexpected producers.processed-by.clang version" - ); - } - } - } - _ => (), + /// Detects the likely presence of this bug. + pub fn check(module_info: &ModuleInfo) -> Result<(), Self> { + if module_info.probably_uses_wit_bindgen() { + // Modules built with wit-bindgen are probably safe. + return Ok(()); + } + if let Some(clang_version) = &module_info.clang_version { + // Clang/LLVM version is a good proxy for wasi-sdk + // version; the allocation bug was fixed in wasi-sdk-18 + // and LLVM was updated to 15.0.7 in wasi-sdk-19. + if let Some((major, minor, patch)) = parse_clang_version(clang_version) { + let earliest_safe = + parse_clang_version(EARLIEST_PROBABLY_SAFE_CLANG_VERSION).unwrap(); + if (major, minor, patch) < earliest_safe { + return Err(Self { + clang_version: Some(clang_version.clone()), + }); + }; + } else { + tracing::warn!( + clang_version, + "Unexpected producers.processed-by.clang version" + ); } } - Ok(Self::Unknown) + Ok(()) + } +} + +impl std::fmt::Display for WasiLibc377Bug { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "This Wasm module appears to have been compiled with wasi-sdk version <19 \ + which contains a critical memory safety bug. For more information, see: \ + https://github.com/fermyon/spin/issues/2552" + ) } } +impl std::error::Error for WasiLibc377Bug {} + fn parse_clang_version(ver: &str) -> Option<(u16, u16, u16)> { // Strip optional trailing detail after space let ver = ver.split(' ').next().unwrap(); @@ -77,42 +68,34 @@ mod tests { #[test] fn wasi_libc_377_detect() { - use WasiLibc377Bug::*; - for (wasm, expected) in [ - (r#"(module)"#, Unknown), + for (wasm, safe) in [ + (r#"(module)"#, true), ( r#"(module (func (export "cabi_realloc") (unreachable)))"#, - ProbablySafe, - ), - ( - r#"(module (func (export "some_other_function") (unreachable)))"#, - Unknown, + true, ), ( r#"(module (@producers (processed-by "clang" "16.0.0 extra-stuff")))"#, - ProbablySafe, + true, ), ( r#"(module (@producers (processed-by "clang" "15.0.7")))"#, - ProbablySafe, + true, ), ( r#"(module (@producers (processed-by "clang" "15.0.6")))"#, - ProbablyUnsafe, - ), - ( - r#"(module (@producers (processed-by "clang" "14.0.0")))"#, - ProbablyUnsafe, + false, ), ( - r#"(module (@producers (processed-by "clang" "a.b.c")))"#, - Unknown, + r#"(module (@producers (processed-by "clang" "14.0.0 extra-stuff")))"#, + false, ), ] { eprintln!("WAT: {wasm}"); let module = wat::parse_str(wasm).unwrap(); - let detected = WasiLibc377Bug::detect(&module).unwrap(); - assert_eq!(detected, expected); + let module_info = ModuleInfo::from_module(&module).unwrap(); + let detected = WasiLibc377Bug::check(&module_info); + assert!(detected.is_ok() == safe, "{wasm} -> {detected:?}"); } } } diff --git a/crates/componentize/src/lib.rs b/crates/componentize/src/lib.rs index f4be1bcb13..e78269c256 100644 --- a/crates/componentize/src/lib.rs +++ b/crates/componentize/src/lib.rs @@ -3,6 +3,7 @@ use { anyhow::{anyhow, Context, Result}, convert::{IntoEntityType, IntoExportKind}, + module_info::ModuleInfo, std::{borrow::Cow, collections::HashSet}, wasm_encoder::{CustomSection, ExportSection, ImportSection, Module, RawSection}, wasmparser::{Encoding, Parser, Payload}, @@ -14,6 +15,7 @@ pub mod bugs; #[cfg(test)] mod abi_conformance; mod convert; +mod module_info; const SPIN_ADAPTER: &[u8] = include_bytes!(concat!( env!("OUT_DIR"), @@ -51,8 +53,9 @@ pub fn componentize_if_necessary(module_or_component: &[u8]) -> Result } pub fn componentize(module: &[u8]) -> Result> { - match WitBindgenVersion::from_module(module)? { - WitBindgenVersion::V0_2 => componentize_old_bindgen(module), + let module_info = ModuleInfo::from_module(module)?; + match WitBindgenVersion::detect(&module_info)? { + WitBindgenVersion::V0_2OrNone => componentize_old_module(module, &module_info), WitBindgenVersion::GreaterThanV0_4 => componentize_new_bindgen(module), WitBindgenVersion::Other(other) => Err(anyhow::anyhow!( "cannot adapt modules created with wit-bindgen version {other}" @@ -65,40 +68,36 @@ pub fn componentize(module: &[u8]) -> Result> { #[derive(Debug)] enum WitBindgenVersion { GreaterThanV0_4, - V0_2, + V0_2OrNone, Other(String), } impl WitBindgenVersion { - fn from_module(module: &[u8]) -> Result { - let (_, bindgen) = metadata::decode(module)?; - if let Some(producers) = bindgen.producers { - if let Some(processors) = producers.get("processed-by") { - let bindgen_version = processors.iter().find_map(|(key, value)| { - key.starts_with("wit-bindgen").then_some(value.as_str()) - }); - if let Some(v) = bindgen_version { - let mut parts = v.split('.'); - let Some(major) = parts.next().and_then(|p| p.parse::().ok()) else { - return Ok(Self::Other(v.to_owned())); - }; - let Some(minor) = parts.next().and_then(|p| p.parse::().ok()) else { - return Ok(Self::Other(v.to_owned())); - }; - if (major == 0 && minor < 5) || major >= 1 { - return Ok(Self::Other(v.to_owned())); - } - // Either there should be no patch version or nothing after patch - if parts.next().is_none() || parts.next().is_none() { - return Ok(Self::GreaterThanV0_4); - } else { - return Ok(Self::Other(v.to_owned())); - } + fn detect(module_info: &ModuleInfo) -> Result { + if let Some(processors) = module_info.bindgen_processors() { + let bindgen_version = processors + .iter() + .find_map(|(key, value)| key.starts_with("wit-bindgen").then_some(value.as_str())); + if let Some(v) = bindgen_version { + let mut parts = v.split('.'); + let Some(major) = parts.next().and_then(|p| p.parse::().ok()) else { + return Ok(Self::Other(v.to_owned())); + }; + let Some(minor) = parts.next().and_then(|p| p.parse::().ok()) else { + return Ok(Self::Other(v.to_owned())); + }; + if (major == 0 && minor < 5) || major >= 1 { + return Ok(Self::Other(v.to_owned())); + } + // Either there should be no patch version or nothing after patch + if parts.next().is_none() || parts.next().is_none() { + return Ok(Self::GreaterThanV0_4); + } else { + return Ok(Self::Other(v.to_owned())); } } } - - Ok(Self::V0_2) + Ok(Self::V0_2OrNone) } } @@ -111,6 +110,18 @@ pub fn componentize_new_bindgen(module: &[u8]) -> Result> { .encode() } +/// Modules *not* produced with wit-bindgen >= 0.5 could be old wit-bindgen or no wit-bindgen +pub fn componentize_old_module(module: &[u8], module_info: &ModuleInfo) -> Result> { + // If the module has a _start export and doesn't obviously use wit-bindgen + // it is likely an old p1 command module. + if module_info.has_start_export && !module_info.probably_uses_wit_bindgen() { + bugs::WasiLibc377Bug::check(module_info)?; + componentize_command(module) + } else { + componentize_old_bindgen(module) + } +} + /// Modules produced with wit-bindgen 0.2 need more extensive adaption pub fn componentize_old_bindgen(module: &[u8]) -> Result> { let (module, exports) = retarget_imports_and_get_exports(ADAPTER_NAME, module)?; diff --git a/crates/componentize/src/module_info.rs b/crates/componentize/src/module_info.rs new file mode 100644 index 0000000000..c951b24432 --- /dev/null +++ b/crates/componentize/src/module_info.rs @@ -0,0 +1,106 @@ +use wasm_metadata::Producers; +use wasmparser::{Encoding, ExternalKind, Parser, Payload}; +use wit_component::metadata::Bindgen; + +// wit-bindgen has used both of these historically. +const CANONICAL_ABI_REALLOC_EXPORTS: &[&str] = &["cabi_realloc", "canonical_abi_realloc"]; + +/// Stores various bits of info parsed from a Wasm module that are relevant to +/// componentization. +#[derive(Default)] +pub struct ModuleInfo { + pub bindgen: Option, + pub clang_version: Option, + pub realloc_export: Option, + pub has_start_export: bool, +} + +impl ModuleInfo { + /// Parses info from the given binary module bytes. + pub fn from_module(module: &[u8]) -> anyhow::Result { + let mut info = Self::default(); + for payload in Parser::new(0).parse_all(module) { + match payload? { + Payload::Version { encoding, .. } => { + anyhow::ensure!( + encoding == Encoding::Module, + "ModuleInfo::from_module is only applicable to Modules; got a {encoding:?}" + ); + } + Payload::ExportSection(reader) => { + for export in reader { + let export = export?; + if export.kind == ExternalKind::Func { + if CANONICAL_ABI_REALLOC_EXPORTS.contains(&export.name) { + tracing::debug!( + "Found canonical ABI realloc export {:?}", + export.name + ); + info.realloc_export = Some(export.name.to_string()); + } else if export.name == "_start" { + tracing::debug!("Found _start export"); + info.has_start_export = true; + } + } + } + } + Payload::CustomSection(c) => { + let section_name = c.name(); + if section_name == "producers" { + let producers = Producers::from_bytes(c.data(), c.data_offset())?; + if let Some(clang_version) = + producers.get("processed-by").and_then(|f| f.get("clang")) + { + tracing::debug!(clang_version, "Parsed producers.processed-by.clang"); + info.clang_version = Some(clang_version.to_string()); + } + } else if section_name.starts_with("component-type") { + match decode_bindgen_custom_section(section_name, c.data()) { + Ok(bindgen) => { + tracing::debug!("Parsed bindgen section {section_name:?}"); + info.bindgen = Some(bindgen); + } + Err(err) => tracing::warn!( + "Error parsing bindgen section {section_name:?}: {err}" + ), + } + } + } + _ => (), + } + } + Ok(info) + } + + /// Returns true if the given module was heuristically probably compiled + /// with wit-bindgen. + pub fn probably_uses_wit_bindgen(&self) -> bool { + // Presence of bindgen metadata is a strong signal + self.bindgen.is_some() || + // A canonical ABI realloc export is a decent signal + self.realloc_export.is_some() + } + + /// Returns the wit-bindgen metadata producers processed-by field, if + /// present. + pub fn bindgen_processors(&self) -> Option { + self.bindgen + .as_ref()? + .producers + .as_ref()? + .get("processed-by") + } +} + +/// This is a silly workaround for the limited public interface available in +/// [`wit_component::metadata`]. +// TODO: Make Bindgen::decode_custom_section public? +fn decode_bindgen_custom_section(name: &str, data: &[u8]) -> anyhow::Result { + let mut module = wasm_encoder::Module::new(); + module.section(&wasm_encoder::CustomSection { + name: name.into(), + data: data.into(), + }); + let (_, bindgen) = wit_component::metadata::decode(module.as_slice())?; + Ok(bindgen) +} diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index d0d96c44bb..53ab5f3875 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -9,25 +9,13 @@ anyhow = "1.0" async-trait = "0.1" tracing = { workspace = true } wasmtime = { workspace = true } -wasmtime-wasi = { workspace = true } -wasmtime-wasi-http = { workspace = true } -wasi-common-preview1 = { workspace = true } -system-interface = { version = "0.27.0", features = ["cap_std_impls"] } -cap-std = "3.0.0" -cap-primitives = "3.0.0" -tokio = "1.0" -bytes = "1.0" -spin-telemetry = { path = "../telemetry" } -http = "1.0" - -[target.'cfg(unix)'.dependencies] -rustix = "0.37.19" - -[target.'cfg(windows)'.dependencies] -io-extras = "0.18.0" [dev-dependencies] -tempfile = "3" -tokio = { version = "1", features = ["macros", "rt", "rt-multi-thread"] } +serde_json = "1" spin-componentize = { workspace = true } -futures = "0.3" +tokio = { version = "1", features = ["macros", "rt", "rt-multi-thread"] } +spin-factor-wasi = { path = "../factor-wasi" } +spin-factors = { path = "../factors" } +spin-factors-test = { path = "../factors-test" } +spin-locked-app = { path = "../locked-app" } +wasmtime-wasi = { workspace = true } \ No newline at end of file diff --git a/crates/core/build.rs b/crates/core/build.rs new file mode 100644 index 0000000000..c96556b06e --- /dev/null +++ b/crates/core/build.rs @@ -0,0 +1,6 @@ +fn main() { + println!("cargo:rerun-if-changed=build.rs"); + // Enable spin-factors-derive to emit expanded macro output. + let out_dir = std::env::var("OUT_DIR").unwrap(); + println!("cargo:rustc-env=SPIN_FACTORS_DERIVE_EXPAND_DIR={out_dir}"); +} diff --git a/crates/core/src/host_component.rs b/crates/core/src/host_component.rs deleted file mode 100644 index a2b428a5fa..0000000000 --- a/crates/core/src/host_component.rs +++ /dev/null @@ -1,305 +0,0 @@ -use std::{ - any::{type_name, Any, TypeId}, - collections::HashMap, - marker::PhantomData, - sync::Arc, -}; - -use anyhow::{bail, Result}; - -use super::{Data, Linker}; - -/// A trait for Spin "host components". -/// -/// A Spin host component is an interface provided to Spin components that is -/// implemented by the host. This trait is designed to be compatible with -/// [`wasmtime::component::bindgen`]'s generated bindings. -/// -/// # Example -/// -/// ```ignore -/// use spin_core::my_interface; -/// -/// #[derive(Default)] -/// struct MyHostComponent { -/// // ... -/// } -/// -/// #[async_trait] -/// impl my_interface::Host for MyHostComponent { -/// // ... -/// } -/// -/// impl HostComponent for MyHostComponent { -/// type Data = Self; -/// -/// fn add_to_linker( -/// linker: &mut Linker, -/// get: impl Fn(&mut spin_core::Data) -> &mut Self::Data + Send + Sync + Copy + 'static, -/// ) -> anyhow::Result<()> { -/// my_interface::add_to_linker(linker, get) -/// } -/// -/// fn build_data(&self) -> Self::Data { -/// Default::default() -/// } -/// } -/// ``` -pub trait HostComponent: Send + Sync + 'static { - /// Host component runtime data. - type Data: Send + Sized + 'static; - - /// Add this component to the given Linker, using the given runtime state-getting handle. - /// - /// This function signature mirrors those generated by [`wasmtime::component::bindgen`]. - fn add_to_linker( - linker: &mut Linker, - get: impl Fn(&mut Data) -> &mut Self::Data + Send + Sync + Copy + 'static, - ) -> Result<()>; - - /// Builds new host component runtime data for [`HostComponentsData`]. - fn build_data(&self) -> Self::Data; -} - -impl HostComponent for Arc { - type Data = HC::Data; - - fn add_to_linker( - linker: &mut Linker, - get: impl Fn(&mut Data) -> &mut Self::Data + Send + Sync + Copy + 'static, - ) -> Result<()> { - HC::add_to_linker(linker, get) - } - - fn build_data(&self) -> Self::Data { - (**self).build_data() - } -} - -/// An opaque handle which can be passed to [`HostComponentsData`] to access -/// host component data. -#[derive(Clone, Copy)] -pub struct AnyHostComponentDataHandle(usize); - -impl From> for AnyHostComponentDataHandle { - fn from(value: HostComponentDataHandle) -> Self { - value.inner - } -} - -/// An opaque handle returned by [`crate::EngineBuilder::add_host_component`] -/// which can be passed to [`HostComponentsData`] to access or set associated -/// [`HostComponent::Data`]. -pub struct HostComponentDataHandle { - inner: AnyHostComponentDataHandle, - _phantom: PhantomData HC::Data>, -} - -impl HostComponentDataHandle { - fn from_any(handle: AnyHostComponentDataHandle) -> Self { - Self { - inner: handle, - _phantom: PhantomData, - } - } -} - -impl Clone for HostComponentDataHandle { - fn clone(&self) -> Self { - *self - } -} - -impl Copy for HostComponentDataHandle {} - -impl From>> for HostComponentDataHandle { - fn from(value: HostComponentDataHandle>) -> Self { - Self::from_any(value.inner) - } -} - -#[doc(hidden)] -pub trait DynSafeHostComponent { - fn build_data_box(&self) -> AnyData; -} - -impl DynSafeHostComponent for T -where - T::Data: Any + Send, -{ - fn build_data_box(&self) -> AnyData { - Box::new(self.build_data()) - } -} - -type BoxHostComponent = Box; - -#[derive(Default)] -pub struct HostComponentsBuilder { - handles: HashMap, - host_components: Vec, -} - -impl HostComponentsBuilder { - pub fn add_host_component( - &mut self, - linker: &mut Linker, - host_component: HC, - ) -> Result> { - let type_id = TypeId::of::(); - if self.handles.contains_key(&type_id) { - bail!( - "already have a host component of type {}", - type_name::() - ) - } - - let handle = AnyHostComponentDataHandle(self.host_components.len()); - self.handles.insert(type_id, handle); - - self.host_components.push(Box::new(host_component)); - HC::add_to_linker(linker, move |data| { - data.host_components_data - .get_or_insert_any(handle) - .downcast_mut() - .unwrap() - })?; - Ok(HostComponentDataHandle:: { - inner: handle, - _phantom: PhantomData, - }) - } - - pub fn build(self) -> HostComponents { - HostComponents { - handles: self.handles, - host_components: Arc::new(self.host_components), - } - } -} - -pub struct HostComponents { - handles: HashMap, - host_components: Arc>, -} - -impl HostComponents { - pub fn builder() -> HostComponentsBuilder { - Default::default() - } - - pub fn new_data(&self) -> HostComponentsData { - // Fill with `None` - let data = std::iter::repeat_with(Default::default) - .take(self.host_components.len()) - .collect(); - HostComponentsData { - data, - host_components: self.host_components.clone(), - } - } - - pub fn find_handle(&self) -> Option> { - self.handles - .get(&TypeId::of::()) - .map(|handle| HostComponentDataHandle::from_any(*handle)) - } -} - -type AnyData = Box; - -/// Holds a heterogenous set of [`HostComponent::Data`]s. -pub struct HostComponentsData { - data: Vec>, - host_components: Arc>, -} - -impl HostComponentsData { - /// Sets the [`HostComponent::Data`] for the given `handle`. - pub fn set(&mut self, handle: HostComponentDataHandle, data: HC::Data) { - self.data[handle.inner.0] = Some(Box::new(data)); - } - - /// Retrieves a mutable reference to [`HostComponent::Data`] for the given `handle`. - /// - /// If unset, the data will be initialized with [`HostComponent::build_data`]. - /// - /// # Panics - /// - /// If the given handle was not obtained from the same [`HostComponentsBuilder`] that - /// was the source of this [`HostComponentsData`], this function may panic. - pub fn get_or_insert( - &mut self, - handle: HostComponentDataHandle, - ) -> &mut HC::Data { - let data = self.get_or_insert_any(handle.inner); - data.downcast_mut().unwrap() - } - - /// Retrieves a mutable reference to [`HostComponent::Data`] for the given `handle`. - /// - /// If unset, the data will be initialized with [`HostComponent::build_data`]. - /// - /// # Panics - /// - /// If the given handle was not obtained from the same [`HostComponentsBuilder`] that - /// was the source of this [`HostComponentsData`], this function may panic. - pub fn get_or_insert_any(&mut self, handle: AnyHostComponentDataHandle) -> &mut AnyData { - let idx = handle.0; - self.data[idx].get_or_insert_with(|| self.host_components[idx].build_data_box()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - struct TestHC; - - impl HostComponent for TestHC { - type Data = u8; - - fn add_to_linker( - _linker: &mut Linker, - _get: impl Fn(&mut Data) -> &mut Self::Data + Send + Sync + Copy + 'static, - ) -> Result<()> { - Ok(()) - } - - fn build_data(&self) -> Self::Data { - 0 - } - } - - #[test] - fn host_components_data() { - let engine = wasmtime::Engine::default(); - let mut linker: crate::Linker<()> = crate::Linker::new(&engine); - - let mut builder = HostComponents::builder(); - let handle1 = builder - .add_host_component(&mut linker, Arc::new(TestHC)) - .unwrap(); - let handle2 = builder.add_host_component(&mut linker, TestHC).unwrap(); - let host_components = builder.build(); - let mut hc_data = host_components.new_data(); - - assert_eq!(hc_data.get_or_insert(handle1), &0); - - hc_data.set(handle2, 1); - assert_eq!(hc_data.get_or_insert(handle2), &1); - } - - #[test] - fn find_handle() { - let engine = wasmtime::Engine::default(); - let mut linker: crate::Linker<()> = crate::Linker::new(&engine); - - let mut builder = HostComponents::builder(); - builder.add_host_component(&mut linker, TestHC).unwrap(); - let host_components = builder.build(); - let handle = host_components.find_handle::().unwrap(); - let mut hc_data = host_components.new_data(); - assert_eq!(hc_data.get_or_insert(handle), &0); - } -} diff --git a/crates/core/src/io.rs b/crates/core/src/io.rs deleted file mode 100644 index a8bb7bd3dd..0000000000 --- a/crates/core/src/io.rs +++ /dev/null @@ -1,34 +0,0 @@ -use wasmtime_wasi::{pipe::MemoryOutputPipe, HostOutputStream}; - -/// An in-memory stdio output buffer. -#[derive(Clone)] -pub struct OutputBuffer(MemoryOutputPipe); - -impl OutputBuffer { - /// Clones the buffered output from this buffer. - pub fn contents(&self) -> bytes::Bytes { - self.0.contents() - } - - pub(crate) fn writer(&self) -> impl HostOutputStream { - self.0.clone() - } -} - -impl Default for OutputBuffer { - fn default() -> Self { - Self(MemoryOutputPipe::new(usize::MAX)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[tokio::test] - async fn take_what_you_write() { - let buf = OutputBuffer::default(); - buf.writer().write(b"foo".to_vec().into()).unwrap(); - assert_eq!(buf.contents().as_ref(), b"foo"); - } -} diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index c98704f094..cb7fb53419 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -1,46 +1,30 @@ //! Spin core execution engine //! -//! This crate provides low-level Wasm and WASI functionality required by Spin. -//! Most of this functionality consists of wrappers around [`wasmtime`] and -//! [`wasi_common`] that narrows the flexibility of `wasmtime` to the set of -//! features used by Spin (such as only supporting `wasmtime`'s async calling style). +//! This crate provides low-level Wasm functionality required by Spin. Most of +//! this functionality consists of wrappers around [`wasmtime`] that narrow the +//! flexibility of `wasmtime` to the set of features used by Spin (such as only +//! supporting `wasmtime`'s async calling style). #![deny(missing_docs)] -mod host_component; -mod io; mod limits; -mod preview1; mod store; -pub mod wasi_2023_10_18; -pub mod wasi_2023_11_10; use std::sync::OnceLock; use std::{path::PathBuf, time::Duration}; use anyhow::Result; -use http::Request; -use tracing::{field::Empty, instrument}; +use tracing::instrument; use wasmtime::{InstanceAllocationStrategy, PoolingAllocationConfig}; -use wasmtime_wasi::ResourceTable; -use wasmtime_wasi_http::body::HyperOutgoingBody; -use wasmtime_wasi_http::types::{default_send_request, WasiHttpCtx, WasiHttpView}; - -use self::host_component::{HostComponents, HostComponentsBuilder}; pub use async_trait::async_trait; pub use wasmtime::{ self, - component::{Component, Instance}, + component::{Component, Instance, InstancePre, Linker}, Instance as ModuleInstance, Module, Trap, }; -pub use wasmtime_wasi::I32Exit; -pub use host_component::{ - AnyHostComponentDataHandle, HostComponent, HostComponentDataHandle, HostComponentsData, -}; -pub use io::OutputBuffer; -pub use store::{Store, StoreBuilder, Wasi, WasiVersion}; +pub use store::{AsState, Store, StoreBuilder}; /// The default [`EngineBuilder::epoch_tick_interval`]. pub const DEFAULT_EPOCH_TICK_INTERVAL: Duration = Duration::from_millis(10); @@ -207,183 +191,43 @@ fn use_pooling_allocator_by_default() -> bool { } /// Host state data associated with individual [Store]s and [Instance]s. -pub struct Data { - inner: T, - wasi: Wasi, - host_components_data: HostComponentsData, +#[derive(Default)] +pub struct State { store_limits: limits::StoreLimitsAsync, - table: ResourceTable, } -impl Data { +impl State { /// Get the amount of memory in bytes consumed by instances in the store pub fn memory_consumed(&self) -> u64 { self.store_limits.memory_consumed() } } -impl AsRef for Data { - fn as_ref(&self) -> &T { - &self.inner - } -} - -impl AsMut for Data { - fn as_mut(&mut self) -> &mut T { - &mut self.inner - } -} - -impl wasmtime_wasi::WasiView for Data { - fn table(&mut self) -> &mut ResourceTable { - &mut self.table - } - - fn ctx(&mut self) -> &mut wasmtime_wasi::WasiCtx { - match &mut self.wasi { - Wasi::Preview1(_) => panic!("using WASI Preview 1 functions with Preview 2 store"), - Wasi::Preview2 { wasi_ctx, .. } => wasi_ctx, - } - } -} - -impl WasiHttpView for Data { - fn ctx(&mut self) -> &mut WasiHttpCtx { - match &mut self.wasi { - Wasi::Preview1(_) => panic!("using WASI Preview 1 functions with Preview 2 store"), - Wasi::Preview2 { wasi_http_ctx, .. } => wasi_http_ctx, - } - } - - fn table(&mut self) -> &mut ResourceTable { - &mut self.table - } - - #[instrument( - name = "spin_core.send_request", - skip_all, - fields( - otel.kind = "client", - url.full = %request.uri(), - http.request.method = %request.method(), - otel.name = %request.method(), - http.response.status_code = Empty, - server.address = Empty, - server.port = Empty, - ), - )] - fn send_request( - &mut self, - mut request: Request, - config: wasmtime_wasi_http::types::OutgoingRequestConfig, - ) -> wasmtime_wasi_http::HttpResult - where - Self: Sized, - { - spin_telemetry::inject_trace_context(&mut request); - T::send_request(self, request, config) - } -} - -/// Handler for wasi-http based requests -pub trait OutboundWasiHttpHandler { - /// Send the request - fn send_request( - data: &mut Data, - request: Request, - config: wasmtime_wasi_http::types::OutgoingRequestConfig, - ) -> wasmtime_wasi_http::HttpResult - where - Self: Sized; -} - -impl OutboundWasiHttpHandler for () { - fn send_request( - _data: &mut Data, - request: Request, - config: wasmtime_wasi_http::types::OutgoingRequestConfig, - ) -> wasmtime_wasi_http::HttpResult - where - Self: Sized, - { - Ok(default_send_request(request, config)) - } -} - -/// An alias for [`wasmtime::Linker`] specialized to [`Data`]. -pub type ModuleLinker = wasmtime::Linker>; - -/// An alias for [`wasmtime::component::Linker`] specialized to [`Data`]. -pub type Linker = wasmtime::component::Linker>; - /// A builder interface for configuring a new [`Engine`]. /// /// A new [`EngineBuilder`] can be obtained with [`Engine::builder`]. pub struct EngineBuilder { engine: wasmtime::Engine, linker: Linker, - module_linker: ModuleLinker, - host_components_builder: HostComponentsBuilder, epoch_tick_interval: Duration, epoch_ticker_thread: bool, } -impl EngineBuilder { +impl EngineBuilder { fn new(config: &Config) -> Result { let engine = wasmtime::Engine::new(&config.inner)?; let linker: Linker = Linker::new(&engine); - let mut module_linker = ModuleLinker::new(&engine); - - wasi_common_preview1::tokio::add_to_linker(&mut module_linker, |data| { - match &mut data.wasi { - Wasi::Preview1(ctx) => ctx, - Wasi::Preview2 { .. } => { - panic!("using WASI Preview 2 functions with Preview 1 store") - } - } - })?; - Ok(Self { engine, linker, - module_linker, - host_components_builder: HostComponents::builder(), epoch_tick_interval: DEFAULT_EPOCH_TICK_INTERVAL, epoch_ticker_thread: true, }) } -} - -impl EngineBuilder { - /// Adds definition(s) to the built [`Engine`]. - /// - /// This method's signature is meant to be used with - /// [`wasmtime::component::bindgen`]'s generated `add_to_linker` functions, e.g.: - /// - /// ```ignore - /// use spin_core::my_interface; - /// // ... - /// let mut builder: EngineBuilder = Engine::builder(); - /// builder.link_import(my_interface::add_to_linker)?; - /// ``` - pub fn link_import( - &mut self, - f: impl FnOnce(&mut Linker, fn(&mut Data) -> &mut T) -> Result<()>, - ) -> Result<()> { - f(&mut self.linker, Data::as_mut) - } - /// Adds a [`HostComponent`] to the built [`Engine`]. - /// - /// Returns a [`HostComponentDataHandle`] which can be passed to - /// [`HostComponentsData`] to access or set associated - /// [`HostComponent::Data`] for an instance. - pub fn add_host_component( - &mut self, - host_component: HC, - ) -> Result> { - self.host_components_builder - .add_host_component(&mut self.linker, host_component) + /// Returns a reference to the [`Linker`] for this [`Engine`]. + pub fn linker(&mut self) -> &mut Linker { + &mut self.linker } /// Sets the epoch tick internal for the built [`Engine`]. @@ -426,14 +270,9 @@ impl EngineBuilder { /// Builds an [`Engine`] from this builder. pub fn build(self) -> Engine { self.maybe_spawn_epoch_ticker(); - - let host_components = self.host_components_builder.build(); - Engine { inner: self.engine, linker: self.linker, - module_linker: self.module_linker, - host_components, epoch_tick_interval: self.epoch_tick_interval, } } @@ -444,48 +283,24 @@ impl EngineBuilder { pub struct Engine { inner: wasmtime::Engine, linker: Linker, - module_linker: ModuleLinker, - host_components: HostComponents, epoch_tick_interval: Duration, } -impl Engine { +impl Engine { /// Creates a new [`EngineBuilder`] with the given [`Config`]. pub fn builder(config: &Config) -> Result> { EngineBuilder::new(config) } /// Creates a new [`StoreBuilder`]. - pub fn store_builder(&self, wasi_version: WasiVersion) -> StoreBuilder { - StoreBuilder::new( - self.inner.clone(), - self.epoch_tick_interval, - &self.host_components, - wasi_version, - ) + pub fn store_builder(&self) -> StoreBuilder { + StoreBuilder::new(self.inner.clone(), self.epoch_tick_interval) } /// Creates a new [`InstancePre`] for the given [`Component`]. #[instrument(skip_all, level = "debug")] pub fn instantiate_pre(&self, component: &Component) -> Result> { - let inner = self.linker.instantiate_pre(component)?; - Ok(InstancePre { inner }) - } - - /// Creates a new [`ModuleInstancePre`] for the given [`Module`]. - #[instrument(skip_all, level = "debug")] - pub fn module_instantiate_pre(&self, module: &Module) -> Result> { - let inner = self.module_linker.instantiate_pre(module)?; - Ok(ModuleInstancePre { inner }) - } - - /// Find the [`HostComponentDataHandle`] for a [`HostComponent`] if configured for this engine. - /// Note: [`DynamicHostComponent`]s are implicitly wrapped in `Arc`s and need to be explicitly - /// typed as such here, e.g. `find_host_component_handle::>()`. - pub fn find_host_component_handle( - &self, - ) -> Option> { - self.host_components.find_handle() + self.linker.instantiate_pre(component) } } @@ -494,61 +309,3 @@ impl AsRef for Engine { &self.inner } } - -/// A pre-initialized instance that is ready to be instantiated. -/// -/// See [`wasmtime::component::InstancePre`] for more information. -pub struct InstancePre { - inner: wasmtime::component::InstancePre>, -} - -impl InstancePre { - /// Instantiates this instance with the given [`Store`]. - #[instrument(skip_all, level = "debug")] - pub async fn instantiate_async(&self, store: &mut Store) -> Result { - self.inner.instantiate_async(store).await - } -} - -impl Clone for InstancePre { - fn clone(&self) -> Self { - Self { - inner: self.inner.clone(), - } - } -} - -impl AsRef>> for InstancePre { - fn as_ref(&self) -> &wasmtime::component::InstancePre> { - &self.inner - } -} - -/// A pre-initialized module instance that is ready to be instantiated. -/// -/// See [`wasmtime::InstancePre`] for more information. -pub struct ModuleInstancePre { - inner: wasmtime::InstancePre>, -} - -impl ModuleInstancePre { - /// Instantiates this instance with the given [`Store`]. - #[instrument(skip_all, level = "debug")] - pub async fn instantiate_async(&self, store: &mut Store) -> Result { - self.inner.instantiate_async(store).await - } -} - -impl Clone for ModuleInstancePre { - fn clone(&self) -> Self { - Self { - inner: self.inner.clone(), - } - } -} - -impl AsRef>> for ModuleInstancePre { - fn as_ref(&self) -> &wasmtime::InstancePre> { - &self.inner - } -} diff --git a/crates/core/src/preview1.rs b/crates/core/src/preview1.rs deleted file mode 100644 index 3b1cd967ce..0000000000 --- a/crates/core/src/preview1.rs +++ /dev/null @@ -1,212 +0,0 @@ -//! Ports of `ReadOnlyDir` and `ReadOnlyFile` to Preview 1 API. -//! Adapted from https://github.com/bytecodealliance/preview2-prototyping/pull/121 - -use std::{any::Any, path::PathBuf}; - -use wasi_common_preview1::{ - dir::{OpenResult, ReaddirCursor, ReaddirEntity}, - file::{Advice, FdFlags, FileType, Filestat, OFlags}, - Error, ErrorExt, SystemTimeSpec, WasiDir, WasiFile, -}; - -pub struct ReadOnlyDir(pub Box); - -#[async_trait::async_trait] -impl WasiDir for ReadOnlyDir { - fn as_any(&self) -> &dyn Any { - self - } - - async fn open_file( - &self, - symlink_follow: bool, - path: &str, - oflags: OFlags, - read: bool, - write: bool, - fdflags: FdFlags, - ) -> Result { - if write { - Err(Error::perm()) - } else { - let open_result = self - .0 - .open_file(symlink_follow, path, oflags, read, write, fdflags) - .await?; - Ok(match open_result { - OpenResult::File(f) => OpenResult::File(Box::new(ReadOnlyFile(f))), - OpenResult::Dir(d) => OpenResult::Dir(Box::new(ReadOnlyDir(d))), - }) - } - } - - async fn create_dir(&self, _path: &str) -> Result<(), Error> { - Err(Error::perm()) - } - - async fn readdir( - &self, - cursor: ReaddirCursor, - ) -> Result> + Send>, Error> { - self.0.readdir(cursor).await - } - - async fn symlink(&self, _old_path: &str, _new_path: &str) -> Result<(), Error> { - Err(Error::perm()) - } - - async fn remove_dir(&self, _path: &str) -> Result<(), Error> { - Err(Error::perm()) - } - - async fn unlink_file(&self, _path: &str) -> Result<(), Error> { - Err(Error::perm()) - } - - async fn read_link(&self, path: &str) -> Result { - self.0.read_link(path).await - } - - async fn get_filestat(&self) -> Result { - self.0.get_filestat().await - } - - async fn get_path_filestat( - &self, - path: &str, - follow_symlinks: bool, - ) -> Result { - self.0.get_path_filestat(path, follow_symlinks).await - } - - async fn rename( - &self, - _path: &str, - _dest_dir: &dyn WasiDir, - _dest_path: &str, - ) -> Result<(), Error> { - Err(Error::perm()) - } - - async fn hard_link( - &self, - _path: &str, - _target_dir: &dyn WasiDir, - _target_path: &str, - ) -> Result<(), Error> { - Err(Error::perm()) - } - - async fn set_times( - &self, - _path: &str, - _atime: Option, - _mtime: Option, - _follow_symlinks: bool, - ) -> Result<(), Error> { - Err(Error::perm()) - } -} - -pub struct ReadOnlyFile(pub Box); - -#[async_trait::async_trait] -impl WasiFile for ReadOnlyFile { - fn as_any(&self) -> &dyn Any { - self - } - - async fn get_filetype(&self) -> Result { - self.0.get_filetype().await - } - - #[cfg(unix)] - fn pollable(&self) -> Option { - self.0.pollable() - } - - #[cfg(windows)] - fn pollable(&self) -> Option { - self.0.pollable() - } - - fn isatty(&self) -> bool { - self.0.isatty() - } - - async fn datasync(&self) -> Result<(), Error> { - self.0.datasync().await - } - - async fn sync(&self) -> Result<(), Error> { - self.0.sync().await - } - - async fn get_fdflags(&self) -> Result { - self.0.get_fdflags().await - } - - async fn set_fdflags(&mut self, _flags: FdFlags) -> Result<(), Error> { - Err(Error::perm()) - } - - async fn get_filestat(&self) -> Result { - self.0.get_filestat().await - } - - async fn set_filestat_size(&self, _size: u64) -> Result<(), Error> { - Err(Error::perm()) - } - - async fn advise(&self, offset: u64, len: u64, advice: Advice) -> Result<(), Error> { - self.0.advise(offset, len, advice).await - } - - async fn set_times( - &self, - _atime: Option, - _mtime: Option, - ) -> Result<(), Error> { - Err(Error::perm()) - } - - async fn read_vectored<'a>(&self, bufs: &mut [std::io::IoSliceMut<'a>]) -> Result { - self.0.read_vectored(bufs).await - } - - async fn read_vectored_at<'a>( - &self, - bufs: &mut [std::io::IoSliceMut<'a>], - offset: u64, - ) -> Result { - self.0.read_vectored_at(bufs, offset).await - } - - async fn write_vectored_at<'a>( - &self, - _bufs: &[std::io::IoSlice<'a>], - _offset: u64, - ) -> Result { - Err(Error::perm()) - } - - async fn seek(&self, pos: std::io::SeekFrom) -> Result { - self.0.seek(pos).await - } - - async fn peek(&self, buf: &mut [u8]) -> Result { - self.0.peek(buf).await - } - - fn num_ready_bytes(&self) -> Result { - self.0.num_ready_bytes() - } - - async fn readable(&self) -> Result<(), Error> { - self.0.readable().await - } - - async fn writable(&self) -> Result<(), Error> { - Err(Error::perm()) - } -} diff --git a/crates/core/src/store.rs b/crates/core/src/store.rs index eeb36006bb..9afbd3cfd3 100644 --- a/crates/core/src/store.rs +++ b/crates/core/src/store.rs @@ -1,70 +1,11 @@ -use anyhow::{anyhow, Result}; -use bytes::Bytes; -use cap_primitives::net::Pool; -use cap_std::ipnet::{IpNet, Ipv4Net, Ipv6Net}; -use std::{ - io::{Read, Write}, - mem, - net::{Ipv4Addr, Ipv6Addr}, - path::{Path, PathBuf}, - sync::{Arc, Mutex}, - time::{Duration, Instant}, -}; -use system_interface::io::ReadReady; -use tokio::io::{AsyncRead, AsyncWrite}; -use wasi_common_preview1 as wasi_preview1; -use wasmtime_wasi::{ - self as wasi_preview2, HostInputStream, HostOutputStream, StdinStream, StdoutStream, - StreamError, StreamResult, Subscribe, -}; -use wasmtime_wasi_http::types::WasiHttpCtx; +use anyhow::Result; +use std::time::{Duration, Instant}; -use crate::{ - async_trait, - host_component::{HostComponents, HostComponentsData}, - io::OutputBuffer, - limits::StoreLimitsAsync, - preview1, Data, -}; +use crate::{limits::StoreLimitsAsync, State}; #[cfg(doc)] use crate::EngineBuilder; -/// Wrapper for the Preview 1 and Preview 2 versions of `WasiCtx`. -/// -/// Currently, only WAGI uses Preview 1, while everything else uses Preview 2 (possibly via an adapter). WAGI is -/// stuck on Preview 1 and modules because there's no reliable way to wrap an arbitrary Preview 1 command in a -/// component -- the Preview 1 -> 2 adapter only works with modules that either export `canonical_abi_realloc` -/// (e.g. native Spin apps) or use a recent version of `wasi-sdk`, which contains patches to allow the adapter to -/// safely allocate memory via `memory.grow`. -/// -/// In theory, someone could build a WAGI app using a new-enough version of `wasi-sdk` and wrap it in a component -/// using the adapter, but that wouldn't add any value beyond leaving it as a module, and any toolchain capable of -/// natively producing components will be capable enough to produce native Spin apps, so we probably won't ever -/// support WAGI components. -/// -// TODO: As of this writing, the plan is to merge the WASI Preview 1 and Preview 2 implementations together, at -// which point we'll be able to avoid all the duplication here and below. -pub enum Wasi { - /// Preview 1 `WasiCtx` - Preview1(wasi_preview1::WasiCtx), - /// Preview 2 `WasiCtx` - Preview2 { - /// `wasi-cli` context - wasi_ctx: wasi_preview2::WasiCtx, - - /// `wasi-http` context - wasi_http_ctx: WasiHttpCtx, - }, -} - -/// The version of Wasi being used -#[allow(missing_docs)] -pub enum WasiVersion { - Preview1, - Preview2, -} - /// A `Store` holds the runtime state of a Spin instance. /// /// In general, a `Store` is expected to live only for the lifetime of a single @@ -72,16 +13,11 @@ pub enum WasiVersion { /// /// A `Store` can be built with a [`StoreBuilder`]. pub struct Store { - inner: wasmtime::Store>, + inner: wasmtime::Store, epoch_tick_interval: Duration, } impl Store { - /// Returns a mutable reference to the [`HostComponentsData`] of this [`Store`]. - pub fn host_components_data(&mut self) -> &mut HostComponentsData { - &mut self.inner.data_mut().host_components_data - } - /// Sets the execution deadline. /// /// This is a rough deadline; an instance will trap some time after this @@ -102,22 +38,32 @@ impl Store { }; self.inner.set_epoch_deadline(ticks); } + + /// Provides access to the inner [`wasmtime::Store`]'s data. + pub fn data(&self) -> &T { + self.inner.data() + } + + /// Provides access to the inner [`wasmtime::Store`]'s data. + pub fn data_mut(&mut self) -> &mut T { + self.inner.data_mut() + } } -impl AsRef>> for Store { - fn as_ref(&self) -> &wasmtime::Store> { +impl AsRef> for Store { + fn as_ref(&self) -> &wasmtime::Store { &self.inner } } -impl AsMut>> for Store { - fn as_mut(&mut self) -> &mut wasmtime::Store> { +impl AsMut> for Store { + fn as_mut(&mut self) -> &mut wasmtime::Store { &mut self.inner } } impl wasmtime::AsContext for Store { - type Data = Data; + type Data = T; fn as_context(&self) -> wasmtime::StoreContext<'_, Self::Data> { self.inner.as_context() @@ -136,27 +82,16 @@ impl wasmtime::AsContextMut for Store { pub struct StoreBuilder { engine: wasmtime::Engine, epoch_tick_interval: Duration, - wasi: std::result::Result, - host_components_data: HostComponentsData, store_limits: StoreLimitsAsync, - net_pool: Pool, } impl StoreBuilder { // Called by Engine::store_builder. - pub(crate) fn new( - engine: wasmtime::Engine, - epoch_tick_interval: Duration, - host_components: &HostComponents, - wasi: WasiVersion, - ) -> Self { + pub(crate) fn new(engine: wasmtime::Engine, epoch_tick_interval: Duration) -> Self { Self { engine, epoch_tick_interval, - wasi: Ok(wasi.into()), - host_components_data: host_components.new_data(), store_limits: StoreLimitsAsync::default(), - net_pool: Pool::default(), } } @@ -168,281 +103,15 @@ impl StoreBuilder { self.store_limits = StoreLimitsAsync::new(Some(max_memory_size), None); } - /// Inherit stdin from the host process. - pub fn inherit_stdin(&mut self) { - self.with_wasi(|wasi| match wasi { - WasiCtxBuilder::Preview1(ctx) => { - ctx.set_stdin(Box::new(wasi_common_preview1::tokio::stdio::stdin())) - } - WasiCtxBuilder::Preview2(ctx) => { - ctx.inherit_stdin(); - } - }); - } - - /// Insert IP network with a given port range - pub fn insert_ip_net_port_range( - &mut self, - ip_net: IpNet, - ports_start: u16, - ports_end: Option, - ) { - self.with_wasi(|wasi| match wasi { - WasiCtxBuilder::Preview1(_) => { - panic!("Enabling network only allowed in preview2") - } - WasiCtxBuilder::Preview2(_) => {} - }); - - self.net_pool.insert_ip_net_port_range( - ip_net, - ports_start, - ports_end, - cap_primitives::ambient_authority(), - ); - } - - /// Allow unrestricted outbound access to the host network. - pub fn inherit_network(&mut self) { - self.with_wasi(|wasi| match wasi { - WasiCtxBuilder::Preview1(_) => { - panic!("Enabling network only allowed in preview2") - } - WasiCtxBuilder::Preview2(_) => { - // TODO: ctx.allow_udp(false); - } - }); - - // Allow access to 0.0.0.0/0, i.e. all IPv4 addresses - self.net_pool.insert_ip_net_port_any( - IpNet::V4(Ipv4Net::new(Ipv4Addr::new(0, 0, 0, 0), 0).unwrap()), - cap_primitives::ambient_authority(), - ); - // Allow access to 0:/0, i.e. all IPv6 addresses - self.net_pool.insert_ip_net_port_any( - IpNet::V6(Ipv6Net::new(Ipv6Addr::new(0, 0, 0, 0, 0, 0, 0, 0), 0).unwrap()), - cap_primitives::ambient_authority(), - ); - } - - /// Sets the WASI `stdin` descriptor to the given [`Read`]er. - pub fn stdin_pipe( - &mut self, - r: impl AsyncRead + Read + ReadReady + Send + Sync + Unpin + 'static, - ) { - self.with_wasi(|wasi| match wasi { - WasiCtxBuilder::Preview1(ctx) => { - ctx.set_stdin(Box::new(wasi_preview1::pipe::ReadPipe::new(r))) - } - WasiCtxBuilder::Preview2(ctx) => { - ctx.stdin(PipeStdinStream::new(r)); - } - }) - } - - /// Inherit stdin from the host process. - pub fn inherit_stdout(&mut self) { - self.with_wasi(|wasi| match wasi { - WasiCtxBuilder::Preview1(ctx) => { - ctx.set_stdout(Box::new(wasi_common_preview1::tokio::stdio::stdout())) - } - WasiCtxBuilder::Preview2(ctx) => { - ctx.inherit_stdout(); - } - }); - } - - /// Sets the WASI `stdout` descriptor to the given [`Write`]er. - pub fn stdout(&mut self, w: Box) -> Result<()> { - self.try_with_wasi(|wasi| match wasi { - WasiCtxBuilder::Preview1(ctx) => { - ctx.set_stdout(w); - Ok(()) - } - WasiCtxBuilder::Preview2(_) => Err(anyhow!( - "`Store::stdout` only supported with WASI Preview 1" - )), - }) - } - - /// Sets the WASI `stdout` descriptor to the given [`Write`]er. - pub fn stdout_pipe(&mut self, w: impl AsyncWrite + Write + Send + Sync + Unpin + 'static) { - self.with_wasi(|wasi| match wasi { - WasiCtxBuilder::Preview1(ctx) => { - ctx.set_stdout(Box::new(wasi_preview1::pipe::WritePipe::new(w))) - } - WasiCtxBuilder::Preview2(ctx) => { - ctx.stdout(PipeStdoutStream::new(w)); - } - }) - } - - /// Sets the WASI `stdout` descriptor to an in-memory buffer which can be - /// retrieved after execution from the returned [`OutputBuffer`]. - pub fn stdout_buffered(&mut self) -> Result { - let buffer = OutputBuffer::default(); - // This only needs to work with Preview 2 since WAGI does its own thing with Preview 1: - self.try_with_wasi(|wasi| match wasi { - WasiCtxBuilder::Preview1(_) => Err(anyhow!( - "`Store::stdout_buffered` only supported with WASI Preview 2" - )), - WasiCtxBuilder::Preview2(ctx) => { - ctx.stdout(BufferStdoutStream(buffer.clone())); - Ok(()) - } - })?; - Ok(buffer) - } - - /// Inherit stdin from the host process. - pub fn inherit_stderr(&mut self) { - self.with_wasi(|wasi| match wasi { - WasiCtxBuilder::Preview1(ctx) => { - ctx.set_stderr(Box::new(wasi_common_preview1::tokio::stdio::stderr())) - } - WasiCtxBuilder::Preview2(ctx) => { - ctx.inherit_stderr(); - } - }); - } - - /// Sets the WASI `stderr` descriptor to the given [`Write`]er. - pub fn stderr_pipe(&mut self, w: impl AsyncWrite + Write + Send + Sync + Unpin + 'static) { - self.with_wasi(|wasi| match wasi { - WasiCtxBuilder::Preview1(ctx) => { - ctx.set_stderr(Box::new(wasi_preview1::pipe::WritePipe::new(w))) - } - WasiCtxBuilder::Preview2(ctx) => { - ctx.stderr(PipeStdoutStream::new(w)); - } - }) - } - - /// Appends the given strings to the the WASI 'args'. - pub fn args<'b>(&mut self, args: impl IntoIterator) -> Result<()> { - self.try_with_wasi(|wasi| { - for arg in args { - match wasi { - WasiCtxBuilder::Preview1(ctx) => ctx.push_arg(arg)?, - WasiCtxBuilder::Preview2(ctx) => { - ctx.arg(arg); - } - } - } - Ok(()) - }) - } - - /// Sets the given key/value string entries on the the WASI 'env'. - pub fn env( - &mut self, - vars: impl IntoIterator, impl AsRef)>, - ) -> Result<()> { - self.try_with_wasi(|wasi| { - for (k, v) in vars { - match wasi { - WasiCtxBuilder::Preview1(ctx) => ctx.push_env(k.as_ref(), v.as_ref())?, - WasiCtxBuilder::Preview2(ctx) => { - ctx.env(k, v); - } - } - } - - Ok(()) - }) - } - - /// "Mounts" the given `host_path` into the WASI filesystem at the given - /// `guest_path` with read-only capabilities. - pub fn read_only_preopened_dir( - &mut self, - host_path: impl AsRef, - guest_path: PathBuf, - ) -> Result<()> { - self.preopened_dir_impl(host_path, guest_path, false) - } - - /// "Mounts" the given `host_path` into the WASI filesystem at the given - /// `guest_path` with read and write capabilities. - pub fn read_write_preopened_dir( - &mut self, - host_path: impl AsRef, - guest_path: PathBuf, - ) -> Result<()> { - self.preopened_dir_impl(host_path, guest_path, true) - } - - fn preopened_dir_impl( - &mut self, - host_path: impl AsRef, - guest_path: PathBuf, - writable: bool, - ) -> Result<()> { - let cap_std_dir = - cap_std::fs::Dir::open_ambient_dir(host_path.as_ref(), cap_std::ambient_authority())?; - let path = guest_path - .to_str() - .ok_or_else(|| anyhow!("non-utf8 path: {}", guest_path.display()))?; - - self.try_with_wasi(|wasi| { - match wasi { - WasiCtxBuilder::Preview1(ctx) => { - let mut dir = - Box::new(wasi_common_preview1::tokio::Dir::from_cap_std(cap_std_dir)) as _; - if !writable { - dir = Box::new(preview1::ReadOnlyDir(dir)); - } - ctx.push_preopened_dir(dir, path)?; - } - WasiCtxBuilder::Preview2(ctx) => { - let dir_perms = if writable { - wasi_preview2::DirPerms::all() - } else { - wasi_preview2::DirPerms::READ - }; - let file_perms = wasi_preview2::FilePerms::all(); - - ctx.preopened_dir(host_path.as_ref(), path, dir_perms, file_perms)?; - } - } - Ok(()) - }) - } - - /// Returns a mutable reference to the built - pub fn host_components_data(&mut self) -> &mut HostComponentsData { - &mut self.host_components_data - } - /// Builds a [`Store`] from this builder with given host state data. /// - /// If `T: Default`, it may be preferable to use [`Store::build`]. - pub fn build_with_data(mut self, inner_data: T) -> Result> { - let net_pool = mem::take(&mut self.net_pool); - self.with_wasi(move |wasi| match wasi { - WasiCtxBuilder::Preview1(_) => {} - WasiCtxBuilder::Preview2(ctx) => { - ctx.socket_addr_check(move |addr, _| { - let net_pool = net_pool.clone(); - Box::pin(async move { net_pool.check_addr(&addr).is_ok() }) - }); - } - }); - - let wasi = self.wasi.map_err(anyhow::Error::msg)?.build(); + /// The `T` parameter must provide access to a [`State`] via `impl + /// AsMut`. + pub fn build(self, mut data: T) -> Result> { + data.as_state().store_limits = self.store_limits; - let mut inner = wasmtime::Store::new( - &self.engine, - Data { - inner: inner_data, - wasi, - host_components_data: self.host_components_data, - store_limits: self.store_limits, - table: wasi_preview2::ResourceTable::new(), - }, - ); - - inner.limiter_async(move |data| &mut data.store_limits); + let mut inner = wasmtime::Store::new(&self.engine, data); + inner.limiter_async(|data| &mut data.as_state().store_limits); // With epoch interruption enabled, there must be _some_ deadline set // or execution will trap immediately. Since this is a delta, we need @@ -455,177 +124,17 @@ impl StoreBuilder { epoch_tick_interval: self.epoch_tick_interval, }) } - - /// Builds a [`Store`] from this builder with `Default` host state data. - pub fn build(self) -> Result> { - self.build_with_data(T::default()) - } - - fn with_wasi(&mut self, f: impl FnOnce(&mut WasiCtxBuilder)) { - let _ = self.try_with_wasi(|wasi| { - f(wasi); - Ok(()) - }); - } - - fn try_with_wasi(&mut self, f: impl FnOnce(&mut WasiCtxBuilder) -> Result<()>) -> Result<()> { - let wasi = self - .wasi - .as_mut() - .map_err(|err| anyhow!("StoreBuilder already failed: {}", err))?; - - match f(wasi) { - Ok(()) => Ok(()), - Err(err) => { - self.wasi = Err(err.to_string()); - Err(err) - } - } - } -} - -struct PipeStdinStream { - buffer: Vec, - inner: Arc>, -} - -impl PipeStdinStream { - fn new(inner: T) -> Self { - Self { - buffer: vec![0_u8; 64 * 1024], - inner: Arc::new(Mutex::new(inner)), - } - } -} - -impl Clone for PipeStdinStream { - fn clone(&self) -> Self { - Self { - buffer: vec![0_u8; 64 * 1024], - inner: self.inner.clone(), - } - } -} - -impl HostInputStream for PipeStdinStream { - fn read(&mut self, size: usize) -> StreamResult { - let size = size.min(self.buffer.len()); - - let count = self - .inner - .lock() - .unwrap() - .read(&mut self.buffer[..size]) - .map_err(|e| StreamError::LastOperationFailed(anyhow::anyhow!(e)))?; - - Ok(Bytes::copy_from_slice(&self.buffer[..count])) - } -} - -#[async_trait] -impl Subscribe for PipeStdinStream { - async fn ready(&mut self) {} -} - -impl StdinStream for PipeStdinStream { - fn stream(&self) -> Box { - Box::new(self.clone()) - } - - fn isatty(&self) -> bool { - false - } -} - -struct PipeStdoutStream(Arc>); - -impl Clone for PipeStdoutStream { - fn clone(&self) -> Self { - Self(self.0.clone()) - } -} - -impl PipeStdoutStream { - fn new(inner: T) -> Self { - Self(Arc::new(Mutex::new(inner))) - } -} - -impl HostOutputStream for PipeStdoutStream { - fn write(&mut self, bytes: Bytes) -> Result<(), StreamError> { - self.0 - .lock() - .unwrap() - .write_all(&bytes) - .map_err(|e| StreamError::LastOperationFailed(anyhow::anyhow!(e))) - } - - fn flush(&mut self) -> Result<(), StreamError> { - self.0 - .lock() - .unwrap() - .flush() - .map_err(|e| StreamError::LastOperationFailed(anyhow::anyhow!(e))) - } - - fn check_write(&mut self) -> Result { - Ok(1024 * 1024) - } } -impl StdoutStream for PipeStdoutStream { - fn stream(&self) -> Box { - Box::new(self.clone()) - } - - fn isatty(&self) -> bool { - false - } +/// For consumers that need to use a type other than [`State`] as the [`Store`] +/// `data`, this trait must be implemented for that type. +pub trait AsState { + /// Gives access to the inner [`State`]. + fn as_state(&mut self) -> &mut State; } -#[async_trait] -impl Subscribe for PipeStdoutStream { - async fn ready(&mut self) {} -} - -struct BufferStdoutStream(OutputBuffer); - -impl StdoutStream for BufferStdoutStream { - fn stream(&self) -> Box { - Box::new(self.0.writer()) - } - - fn isatty(&self) -> bool { - false - } -} - -/// A builder of a `WasiCtx` for all versions of Wasi -#[allow(clippy::large_enum_variant)] -enum WasiCtxBuilder { - Preview1(wasi_preview1::WasiCtx), - Preview2(wasi_preview2::WasiCtxBuilder), -} - -impl From for WasiCtxBuilder { - fn from(value: WasiVersion) -> Self { - match value { - WasiVersion::Preview1 => { - Self::Preview1(wasi_common_preview1::tokio::WasiCtxBuilder::new().build()) - } - WasiVersion::Preview2 => Self::Preview2(wasi_preview2::WasiCtxBuilder::new()), - } - } -} - -impl WasiCtxBuilder { - fn build(self) -> Wasi { - match self { - WasiCtxBuilder::Preview1(ctx) => Wasi::Preview1(ctx), - WasiCtxBuilder::Preview2(mut b) => Wasi::Preview2 { - wasi_ctx: b.build(), - wasi_http_ctx: WasiHttpCtx::new(), - }, - } +impl AsState for State { + fn as_state(&mut self) -> &mut State { + self } } diff --git a/crates/core/tests/core-wasi-test/src/main.rs b/crates/core/tests/core-wasi-test/src/main.rs index a1bc0ed28d..77a76c2a1d 100644 --- a/crates/core/tests/core-wasi-test/src/main.rs +++ b/crates/core/tests/core-wasi-test/src/main.rs @@ -5,11 +5,6 @@ use std::time::Duration; -wit_bindgen::generate!({ - world: "multiplier", - path: "wit/multiplier.wit" -}); - type Result = std::result::Result<(), Box>; fn main() -> Result { @@ -44,12 +39,6 @@ fn main() -> Result { eprintln!("write {path}"); std::fs::write(path, "content")?; } - "multiply" => { - let input: i32 = args.next().expect("input").parse().expect("i32"); - eprintln!("multiply {input}"); - let output = imports::multiply(input); - println!("{output}"); - } "sleep" => { let duration = Duration::from_millis(args.next().expect("duration_ms").parse().expect("u64")); diff --git a/crates/core/tests/integration_test.rs b/crates/core/tests/integration_test.rs index d8b24ced78..ad47912b4d 100644 --- a/crates/core/tests/integration_test.rs +++ b/crates/core/tests/integration_test.rs @@ -1,86 +1,28 @@ use std::{ - io::Cursor, path::PathBuf, time::{Duration, Instant}, }; use anyhow::Context; -use spin_core::{ - Component, Config, Engine, HostComponent, I32Exit, Store, StoreBuilder, Trap, WasiVersion, -}; -use tempfile::TempDir; +use serde_json::json; +use spin_core::{AsState, Component, Config, Engine, State, Store, StoreBuilder, Trap}; +use spin_factor_wasi::{DummyFilesMounter, WasiFactor}; +use spin_factors::{App, AsInstanceState, RuntimeFactors}; +use spin_locked_app::locked::LockedApp; use tokio::{fs, io::AsyncWrite}; - -#[tokio::test(flavor = "multi_thread")] -async fn test_stdio() { - let stdout = run_core_wasi_test(["echo"], |store_builder| { - store_builder.stdin_pipe(Cursor::new(b"DATA")); - }) - .await - .unwrap(); - - assert_eq!(stdout, "DATA"); -} - -#[tokio::test(flavor = "multi_thread")] -async fn test_read_only_preopened_dir() { - let filename = "test_file"; - let tempdir = TempDir::new().unwrap(); - std::fs::write(tempdir.path().join(filename), "x").unwrap(); - - run_core_wasi_test(["read", filename], |store_builder| { - store_builder - .read_only_preopened_dir(&tempdir, "/".into()) - .unwrap(); - }) - .await - .unwrap(); -} - -#[tokio::test(flavor = "multi_thread")] -async fn test_read_only_preopened_dir_write_fails() { - let filename = "test_file"; - let tempdir = TempDir::new().unwrap(); - std::fs::write(tempdir.path().join(filename), "x").unwrap(); - - let err = run_core_wasi_test(["write", filename], |store_builder| { - store_builder - .read_only_preopened_dir(&tempdir, "/".into()) - .unwrap(); - }) - .await - .unwrap_err(); - let trap = err - .root_cause() // The error returned is a backtrace. We need the root cause. - .downcast_ref::() - .expect("trap error was not an I32Exit"); - assert_eq!(trap.0, 1); -} - -#[tokio::test(flavor = "multi_thread")] -async fn test_read_write_preopened_dir() { - let filename = "test_file"; - let tempdir = TempDir::new().unwrap(); - - run_core_wasi_test(["write", filename], |store_builder| { - store_builder - .read_write_preopened_dir(&tempdir, "/".into()) - .unwrap(); - }) - .await - .unwrap(); - - let content = std::fs::read(tempdir.path().join(filename)).unwrap(); - assert_eq!(content, b"content"); -} +use wasmtime_wasi::I32Exit; #[tokio::test(flavor = "multi_thread")] async fn test_max_memory_size_obeyed() { let max = 10_000_000; let alloc = max / 10; - run_core_wasi_test(["alloc", &format!("{alloc}")], |store_builder| { - store_builder.max_memory_size(max); - }) + run_test( + ["alloc", &format!("{alloc}")], + |store_builder| { + store_builder.max_memory_size(max); + }, + |_| {}, + ) .await .unwrap(); } @@ -89,9 +31,13 @@ async fn test_max_memory_size_obeyed() { async fn test_max_memory_size_violated() { let max = 10_000_000; let alloc = max * 2; - let err = run_core_wasi_test(["alloc", &format!("{alloc}")], |store_builder| { - store_builder.max_memory_size(max); - }) + let err = run_test( + ["alloc", &format!("{alloc}")], + |store_builder| { + store_builder.max_memory_size(max); + }, + |_| {}, + ) .await .unwrap_err(); let trap = err @@ -101,14 +47,14 @@ async fn test_max_memory_size_violated() { assert_eq!(trap.0, 1); } +// FIXME: racy timing test #[tokio::test(flavor = "multi_thread")] async fn test_set_deadline_obeyed() { - run_core_wasi_test_engine( - &test_engine(), + run_test( ["sleep", "20"], |_| {}, |store| { - store.set_deadline(Instant::now() + Duration::from_millis(1000)); + store.set_deadline(Instant::now() + Duration::from_millis(10000)); }, ) .await @@ -117,8 +63,7 @@ async fn test_set_deadline_obeyed() { #[tokio::test(flavor = "multi_thread")] async fn test_set_deadline_violated() { - let err = run_core_wasi_test_engine( - &test_engine(), + let err = run_test( ["sleep", "100"], |_| {}, |store| { @@ -132,81 +77,79 @@ async fn test_set_deadline_violated() { } #[tokio::test(flavor = "multi_thread")] -async fn test_host_component() { - let stdout = run_core_wasi_test(["multiply", "5"], |_| {}).await.unwrap(); - assert_eq!(stdout, "10"); +async fn test_panic() { + let err = run_test(["panic"], |_| {}, |_| {}).await.unwrap_err(); + let trap = err.downcast::().expect("trap"); + assert_eq!(trap, Trap::UnreachableCodeReached); } -#[tokio::test(flavor = "multi_thread")] -async fn test_host_component_data_update() { - let engine = test_engine(); - let multiplier_handle = engine - .find_host_component_handle::() - .unwrap(); +#[derive(RuntimeFactors)] +struct TestFactors { + wasi: WasiFactor, +} - let stdout = run_core_wasi_test_engine( - &engine, - ["multiply", "5"], - |store_builder| { - store_builder - .host_components_data() - .set(multiplier_handle, Multiplier(100)); - }, - |_| {}, - ) - .await - .unwrap(); - assert_eq!(stdout, "500"); +struct TestState { + core: State, + factors: TestFactorsInstanceState, } -#[tokio::test(flavor = "multi_thread")] -async fn test_panic() { - let err = run_core_wasi_test(["panic"], |_| {}).await.unwrap_err(); - let trap = err.downcast::().expect("trap"); - assert_eq!(trap, Trap::UnreachableCodeReached); +impl AsState for TestState { + fn as_state(&mut self) -> &mut State { + &mut self.core + } } -fn test_config() -> Config { +impl AsInstanceState for TestState { + fn as_instance_state(&mut self) -> &mut TestFactorsInstanceState { + &mut self.factors + } +} + +async fn run_test( + args: impl IntoIterator, + update_store_builder: impl FnOnce(&mut StoreBuilder), + update_store: impl FnOnce(&mut Store), +) -> anyhow::Result<()> { + let mut factors = TestFactors { + wasi: WasiFactor::new(DummyFilesMounter), + }; + let mut config = Config::default(); config .wasmtime_config() .wasm_backtrace_details(wasmtime::WasmBacktraceDetails::Enable); - config -} -fn test_engine() -> Engine<()> { - let mut builder = Engine::builder(&test_config()).unwrap(); - builder.add_host_component(MultiplierHostComponent).unwrap(); - builder - .link_import(|l, _| wasmtime_wasi::add_to_linker_async(l)) - .unwrap(); - builder - .link_import(|l, _| spin_core::wasi_2023_10_18::add_to_linker(l)) - .unwrap(); - builder.build() -} + let mut builder = Engine::builder(&config).unwrap(); + factors.init(builder.linker())?; + let engine = builder.build(); -async fn run_core_wasi_test<'a>( - args: impl IntoIterator, - f: impl FnOnce(&mut StoreBuilder), -) -> anyhow::Result { - run_core_wasi_test_engine(&test_engine(), args, f, |_| {}).await -} + let mut store_builder = engine.store_builder(); + update_store_builder(&mut store_builder); -async fn run_core_wasi_test_engine<'a>( - engine: &Engine<()>, - args: impl IntoIterator, - update_store_builder: impl FnOnce(&mut StoreBuilder), - update_store: impl FnOnce(&mut Store<()>), -) -> anyhow::Result { - let mut store_builder: StoreBuilder = engine.store_builder(WasiVersion::Preview2); - let stdout_buf = store_builder.stdout_buffered()?; - store_builder.stderr_pipe(TestWriter(tokio::io::stdout())); - store_builder.args(args)?; + let locked: LockedApp = serde_json::from_value(json!({ + "spin_lock_version": 1, + "triggers": [], + "components": [{ + "id": "test-component", + "source": { + "content_type": "application/wasm", + "content": {}, + }, + }] + }))?; + let app = App::new("test-app", locked); + let configured_app = factors.configure_app(app, Default::default())?; + let mut builders = factors.prepare(&configured_app, "test-component")?; + builders.wasi().args(args); + let instance_state = factors.build_instance_state(builders)?; + let state = TestState { + core: State::default(), + factors: instance_state, + }; - update_store_builder(&mut store_builder); + let mut store = store_builder.build(state)?; + update_store(&mut store); - let mut store = store_builder.build()?; let module_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) .join("../../target/test-programs/core-wasi-test.wasm"); let component = spin_componentize::componentize_command(&fs::read(module_path).await?)?; @@ -221,48 +164,11 @@ async fn run_core_wasi_test_engine<'a>( .context("missing the expected 'wasi:cli/run@0.2.0' instance")?; instance.typed_func::<(), (Result<(), ()>,)>("run")? }; - update_store(&mut store); func.call_async(&mut store, ()) .await? .0 - .map_err(|()| anyhow::anyhow!("command failed"))?; - - let stdout = String::from_utf8(stdout_buf.contents().to_vec())? - .trim_end() - .into(); - Ok(stdout) -} - -// Simple test HostComponent; multiplies the input by the configured factor -#[derive(Clone)] -struct MultiplierHostComponent; - -mod multiplier { - wasmtime::component::bindgen!("multiplier" in "tests/core-wasi-test/wit"); -} - -impl HostComponent for MultiplierHostComponent { - type Data = Multiplier; - - fn add_to_linker( - linker: &mut spin_core::Linker, - get: impl Fn(&mut spin_core::Data) -> &mut Self::Data + Send + Sync + Copy + 'static, - ) -> anyhow::Result<()> { - multiplier::imports::add_to_linker(linker, get) - } - - fn build_data(&self) -> Self::Data { - Multiplier(2) - } -} - -struct Multiplier(i32); - -impl multiplier::imports::Host for Multiplier { - fn multiply(&mut self, a: i32) -> i32 { - self.0 * a - } + .map_err(|()| anyhow::anyhow!("command failed")) } // Write with `print!`, required for test output capture diff --git a/crates/expressions/src/lib.rs b/crates/expressions/src/lib.rs index 612c0696bb..350914b558 100644 --- a/crates/expressions/src/lib.rs +++ b/crates/expressions/src/lib.rs @@ -5,6 +5,8 @@ use std::{borrow::Cow, collections::HashMap, fmt::Debug}; use spin_locked_app::Variable; +pub use async_trait; + pub use provider::Provider; use template::Part; pub use template::Template; @@ -251,13 +253,21 @@ impl<'a> Key<'a> { } } +impl<'a> TryFrom<&'a str> for Key<'a> { + type Error = Error; + + fn try_from(value: &'a str) -> std::prelude::v1::Result { + Self::new(value) + } +} + impl<'a> AsRef for Key<'a> { fn as_ref(&self) -> &str { self.0 } } -type Result = std::result::Result; +pub type Result = std::result::Result; /// A variable resolution error. #[derive(Debug, thiserror::Error)] diff --git a/crates/factor-key-value-azure/Cargo.toml b/crates/factor-key-value-azure/Cargo.toml new file mode 100644 index 0000000000..318855ff98 --- /dev/null +++ b/crates/factor-key-value-azure/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "spin-factor-key-value-azure" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +rust-version.workspace = true + +[dependencies] +anyhow = "1.0" +serde = { version = "1.0", features = ["rc"] } +spin-factor-key-value = { path = "../factor-key-value" } +# TODO: merge with this crate +spin-key-value-azure = { path = "../key-value-azure" } + +[lints] +workspace = true diff --git a/crates/factor-key-value-azure/src/lib.rs b/crates/factor-key-value-azure/src/lib.rs new file mode 100644 index 0000000000..cad2da1ee8 --- /dev/null +++ b/crates/factor-key-value-azure/src/lib.rs @@ -0,0 +1,58 @@ +use serde::Deserialize; +use spin_factor_key_value::runtime_config::spin::MakeKeyValueStore; +use spin_key_value_azure::{ + KeyValueAzureCosmos, KeyValueAzureCosmosAuthOptions, KeyValueAzureCosmosRuntimeConfigOptions, +}; + +/// A key-value store that uses Azure Cosmos as the backend. +#[derive(Default)] +pub struct AzureKeyValueStore { + _priv: (), +} + +impl AzureKeyValueStore { + /// Creates a new `AzureKeyValueStore`. + pub fn new() -> Self { + Self::default() + } +} + +/// Runtime configuration for the Azure Cosmos key-value store. +#[derive(Deserialize)] +pub struct AzureCosmosKeyValueRuntimeConfig { + /// The authorization token for the Azure Cosmos DB account. + key: Option, + /// The Azure Cosmos DB account name. + account: String, + /// The Azure Cosmos DB database. + database: String, + /// The Azure Cosmos DB container where data is stored. + /// The CosmosDB container must be created with the default partition key, /id + container: String, +} + +impl MakeKeyValueStore for AzureKeyValueStore { + const RUNTIME_CONFIG_TYPE: &'static str = "azure_cosmos"; + + type RuntimeConfig = AzureCosmosKeyValueRuntimeConfig; + + type StoreManager = KeyValueAzureCosmos; + + fn make_store( + &self, + runtime_config: Self::RuntimeConfig, + ) -> anyhow::Result { + let auth_options = match runtime_config.key { + Some(key) => KeyValueAzureCosmosAuthOptions::RuntimeConfigValues( + KeyValueAzureCosmosRuntimeConfigOptions::new(key), + ), + None => KeyValueAzureCosmosAuthOptions::Environmental, + }; + KeyValueAzureCosmos::new( + runtime_config.account, + runtime_config.database, + runtime_config.container, + auth_options, + ) + } +} diff --git a/crates/factor-key-value-redis/Cargo.toml b/crates/factor-key-value-redis/Cargo.toml new file mode 100644 index 0000000000..1c19c58ff5 --- /dev/null +++ b/crates/factor-key-value-redis/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "spin-factor-key-value-redis" +version = { workspace = true } +authors = { workspace = true } +edition = { workspace = true } + +[dependencies] +anyhow = "1.0" +serde = { version = "1.0", features = ["rc"] } +spin-factor-key-value = { path = "../factor-key-value" } +# TODO: merge with this crate +spin-key-value-redis = { path = "../key-value-redis" } + +[lints] +workspace = true diff --git a/crates/factor-key-value-redis/src/lib.rs b/crates/factor-key-value-redis/src/lib.rs new file mode 100644 index 0000000000..67d71ac3e5 --- /dev/null +++ b/crates/factor-key-value-redis/src/lib.rs @@ -0,0 +1,38 @@ +use serde::Deserialize; +use spin_factor_key_value::runtime_config::spin::MakeKeyValueStore; +use spin_key_value_redis::KeyValueRedis; + +/// A key-value store that uses Redis as the backend. +#[derive(Default)] +pub struct RedisKeyValueStore { + _priv: (), +} + +impl RedisKeyValueStore { + /// Creates a new `RedisKeyValueStore`. + pub fn new() -> Self { + Self::default() + } +} + +/// Runtime configuration for the Redis key-value store. +#[derive(Deserialize)] +pub struct RedisKeyValueRuntimeConfig { + /// The URL of the Redis server. + url: String, +} + +impl MakeKeyValueStore for RedisKeyValueStore { + const RUNTIME_CONFIG_TYPE: &'static str = "redis"; + + type RuntimeConfig = RedisKeyValueRuntimeConfig; + + type StoreManager = KeyValueRedis; + + fn make_store( + &self, + runtime_config: Self::RuntimeConfig, + ) -> anyhow::Result { + KeyValueRedis::new(runtime_config.url) + } +} diff --git a/crates/factor-key-value-spin/Cargo.toml b/crates/factor-key-value-spin/Cargo.toml new file mode 100644 index 0000000000..29ca47c3f3 --- /dev/null +++ b/crates/factor-key-value-spin/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "spin-factor-key-value-spin" +version = { workspace = true } +authors = { workspace = true } +edition = { workspace = true } + +[dependencies] +anyhow = "1.0" +serde = { version = "1.0", features = ["rc"] } +spin-factor-key-value = { path = "../factor-key-value" } +# TODO: merge with this crate +spin-key-value-sqlite = { path = "../key-value-sqlite" } + +[lints] +workspace = true diff --git a/crates/factor-key-value-spin/src/lib.rs b/crates/factor-key-value-spin/src/lib.rs new file mode 100644 index 0000000000..636b6c491a --- /dev/null +++ b/crates/factor-key-value-spin/src/lib.rs @@ -0,0 +1,91 @@ +use std::{ + fs, + path::{Path, PathBuf}, +}; + +use anyhow::{bail, Context}; +use serde::{Deserialize, Serialize}; +use spin_factor_key_value::runtime_config::spin::MakeKeyValueStore; +use spin_key_value_sqlite::{DatabaseLocation, KeyValueSqlite}; + +/// A key-value store that uses SQLite as the backend. +pub struct SpinKeyValueStore { + /// The base path or directory for the SQLite database file. + base_path: Option, +} + +impl SpinKeyValueStore { + /// Create a new SpinKeyValueStore with the given base path. + /// + /// If the database directory is None, the database will always be in-memory. + /// If it's `Some`, the database will be stored at the combined `base_path` and + /// the `path` specified in the runtime configuration. + pub fn new(base_path: Option) -> Self { + Self { base_path } + } +} + +impl MakeKeyValueStore for SpinKeyValueStore { + const RUNTIME_CONFIG_TYPE: &'static str = "spin"; + + type RuntimeConfig = SpinKeyValueRuntimeConfig; + + type StoreManager = KeyValueSqlite; + + fn make_store( + &self, + runtime_config: Self::RuntimeConfig, + ) -> anyhow::Result { + let location = match (&self.base_path, &runtime_config.path) { + // If both the base path and the path are specified, resolve the path against the base path + (Some(base_path), Some(path)) => { + let path = resolve_relative_path(path, base_path); + DatabaseLocation::Path(path) + } + // If the base path is `None` but path is an absolute path, use the absolute path + (None, Some(path)) if path.is_absolute() => DatabaseLocation::Path(path.clone()), + // If the base path is `None` but path is a relative path, error out + (None, Some(path)) => { + bail!( + "key-value store path '{}' is relative, but no base path is set", + path.display() + ) + } + // Otherwise, use an in-memory database + (None | Some(_), None) => DatabaseLocation::InMemory, + }; + if let DatabaseLocation::Path(path) = &location { + // Create the store's parent directory if necessary + if let Some(parent) = path.parent().filter(|p| !p.exists()) { + fs::create_dir_all(parent) + .context("Failed to create key value store's parent directory")?; + } + } + Ok(KeyValueSqlite::new(location)) + } +} + +/// The serialized runtime configuration for the SQLite key-value store. +#[derive(Deserialize, Serialize)] +pub struct SpinKeyValueRuntimeConfig { + /// The path to the SQLite database file. + path: Option, +} + +impl SpinKeyValueRuntimeConfig { + /// Create a new SpinKeyValueRuntimeConfig with the given parent directory + /// where the key-value store will live. + pub fn new(path: Option) -> Self { + Self { path } + } +} + +/// Resolve a relative path against a base dir. +/// +/// If the path is absolute, it is returned as is. Otherwise, it is resolved against the base dir. +fn resolve_relative_path(path: &Path, base_dir: &Path) -> PathBuf { + if path.is_absolute() { + return path.to_owned(); + } + base_dir.join(path) +} diff --git a/crates/factor-key-value/Cargo.toml b/crates/factor-key-value/Cargo.toml new file mode 100644 index 0000000000..64df403528 --- /dev/null +++ b/crates/factor-key-value/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "spin-factor-key-value" +version = { workspace = true } +authors = { workspace = true } +edition = { workspace = true } + +[dependencies] +anyhow = "1.0" +serde = { version = "1.0", features = ["rc"] } +spin-factors = { path = "../factors" } +# TODO: merge with this crate +spin-key-value = { path = "../key-value" } +spin-world = { path = "../world" } +toml = "0.8" + +[dev-dependencies] +spin-factors-test = { path = "../factors-test" } +tokio = { version = "1", features = ["macros", "rt"] } +spin-factor-key-value-spin = { path = "../factor-key-value-spin" } +spin-factor-key-value-redis = { path = "../factor-key-value-redis" } +tempfile = "3.12.0" + + +[lints] +workspace = true diff --git a/crates/factor-key-value/src/lib.rs b/crates/factor-key-value/src/lib.rs new file mode 100644 index 0000000000..1e71bb86af --- /dev/null +++ b/crates/factor-key-value/src/lib.rs @@ -0,0 +1,159 @@ +pub mod runtime_config; + +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; + +use anyhow::ensure; +use spin_factors::{ + ConfigureAppContext, Factor, FactorInstanceBuilder, InitContext, InstanceBuilders, + PrepareContext, RuntimeFactors, +}; +use spin_key_value::{ + CachingStoreManager, DefaultManagerGetter, DelegatingStoreManager, KeyValueDispatch, + StoreManager, KEY_VALUE_STORES_KEY, +}; + +pub use runtime_config::RuntimeConfig; + +/// A factor that provides key-value storage. +pub struct KeyValueFactor { + default_label_resolver: Arc, +} + +impl KeyValueFactor { + /// Create a new KeyValueFactor. + /// + /// The `default_label_resolver` is used to resolve store managers for labels that + /// are not defined in the runtime configuration. + pub fn new(default_label_resolver: impl DefaultLabelResolver + 'static) -> Self { + Self { + default_label_resolver: Arc::new(default_label_resolver), + } + } +} + +impl Factor for KeyValueFactor { + type RuntimeConfig = RuntimeConfig; + type AppState = AppState; + type InstanceBuilder = InstanceBuilder; + + fn init(&mut self, mut ctx: InitContext) -> anyhow::Result<()> { + ctx.link_bindings(spin_world::v1::key_value::add_to_linker)?; + ctx.link_bindings(spin_world::v2::key_value::add_to_linker)?; + Ok(()) + } + + fn configure_app( + &self, + mut ctx: ConfigureAppContext, + ) -> anyhow::Result { + let store_managers = ctx.take_runtime_config().unwrap_or_default(); + let default_label_resolver = self.default_label_resolver.clone(); + let default_fn: DefaultManagerGetter = + Arc::new(move |label| default_label_resolver.default(label)); + + let delegating_manager = DelegatingStoreManager::new(store_managers, default_fn); + let caching_manager = CachingStoreManager::new(delegating_manager); + let store_manager_manager = Arc::new(caching_manager); + + // Build component -> allowed stores map + let mut component_allowed_stores = HashMap::new(); + for component in ctx.app().components() { + let component_id = component.id().to_string(); + let key_value_stores = component + .get_metadata(KEY_VALUE_STORES_KEY)? + .unwrap_or_default() + .into_iter() + .collect::>(); + for label in &key_value_stores { + // TODO: port nicer errors from KeyValueComponent (via error type?) + ensure!( + store_manager_manager.is_defined(label) + || self.default_label_resolver.default(label).is_some(), + "unknown key_value_stores label {label:?} for component {component_id:?}" + ); + } + component_allowed_stores.insert(component_id, key_value_stores); + // TODO: warn (?) on unused store? + } + + Ok(AppState { + store_manager: store_manager_manager, + component_allowed_stores, + }) + } + + fn prepare( + &self, + ctx: PrepareContext, + _builders: &mut InstanceBuilders, + ) -> anyhow::Result { + let app_state = ctx.app_state(); + let allowed_stores = app_state + .component_allowed_stores + .get(ctx.app_component().id()) + .expect("component should be in component_stores") + .clone(); + Ok(InstanceBuilder { + store_manager: app_state.store_manager.clone(), + allowed_stores, + }) + } +} + +type AppStoreManager = CachingStoreManager; + +pub struct AppState { + /// The store manager for the app. + /// + /// This is a cache around a delegating store manager. For `get` requests, + /// first checks the cache before delegating to the underlying store + /// manager. + store_manager: Arc, + /// The allowed stores for each component. + /// + /// This is a map from component ID to the set of store labels that the + /// component is allowed to use. + component_allowed_stores: HashMap>, +} + +pub struct InstanceBuilder { + /// The store manager for the app. + /// + /// This is a cache around a delegating store manager. For `get` requests, + /// first checks the cache before delegating to the underlying store + /// manager. + store_manager: Arc, + /// The allowed stores for this component instance. + allowed_stores: HashSet, +} + +impl FactorInstanceBuilder for InstanceBuilder { + type InstanceState = KeyValueDispatch; + + fn build(self) -> anyhow::Result { + let Self { + store_manager, + allowed_stores, + } = self; + let mut dispatch = KeyValueDispatch::new_with_capacity(u32::MAX); + dispatch.init(allowed_stores, store_manager); + Ok(dispatch) + } +} + +/// Resolves a label to a default [`StoreManager`]. +pub trait DefaultLabelResolver: Send + Sync { + /// If there is no runtime configuration for a given store label, return a default store manager. + /// + /// If `Option::None` is returned, the store is not allowed. + fn default(&self, label: &str) -> Option>; +} + +impl DefaultLabelResolver for Arc { + fn default(&self, label: &str) -> Option> { + self.as_ref().default(label) + } +} diff --git a/crates/factor-key-value/src/runtime_config.rs b/crates/factor-key-value/src/runtime_config.rs new file mode 100644 index 0000000000..0c83243a31 --- /dev/null +++ b/crates/factor-key-value/src/runtime_config.rs @@ -0,0 +1,30 @@ +pub mod spin; + +use std::{collections::HashMap, sync::Arc}; + +use spin_key_value::StoreManager; + +/// Runtime configuration for all key value stores. +#[derive(Default, Clone)] +pub struct RuntimeConfig { + /// Map of store names to store managers. + store_managers: HashMap>, +} + +impl RuntimeConfig { + /// Adds a store manager for the store with the given label to the runtime configuration. + /// + /// If a store manager already exists for the given label, it will be replaced. + pub fn add_store_manager(&mut self, label: String, store_manager: Arc) { + self.store_managers.insert(label, store_manager); + } +} + +impl IntoIterator for RuntimeConfig { + type Item = (String, Arc); + type IntoIter = std::collections::hash_map::IntoIter>; + + fn into_iter(self) -> Self::IntoIter { + self.store_managers.into_iter() + } +} diff --git a/crates/factor-key-value/src/runtime_config/spin.rs b/crates/factor-key-value/src/runtime_config/spin.rs new file mode 100644 index 0000000000..64c4e1d57f --- /dev/null +++ b/crates/factor-key-value/src/runtime_config/spin.rs @@ -0,0 +1,164 @@ +//! Runtime configuration implementation used by Spin CLI. + +use crate::{DefaultLabelResolver, RuntimeConfig}; +use anyhow::Context as _; +use serde::de::DeserializeOwned; +use serde::{Deserialize, Serialize}; +use spin_factors::runtime_config::toml::GetTomlValue; +use spin_key_value::StoreManager; +use std::{collections::HashMap, sync::Arc}; + +/// Defines the construction of a key value store from a serialized runtime config. +pub trait MakeKeyValueStore: 'static + Send + Sync { + /// Unique type identifier for the store. + const RUNTIME_CONFIG_TYPE: &'static str; + /// Runtime configuration for the store. + type RuntimeConfig: DeserializeOwned; + /// The store manager for the store. + type StoreManager: StoreManager; + + /// Creates a new store manager from the runtime configuration. + fn make_store(&self, runtime_config: Self::RuntimeConfig) + -> anyhow::Result; +} + +/// A function that creates a store manager from a TOML table. +type StoreFromToml = + Arc anyhow::Result> + Send + Sync>; + +/// Creates a `StoreFromToml` function from a `MakeKeyValueStore` implementation. +fn store_from_toml_fn(provider_type: T) -> StoreFromToml { + Arc::new(move |table| { + let runtime_config: T::RuntimeConfig = table + .try_into() + .context("could not parse key-value runtime config")?; + let provider = provider_type + .make_store(runtime_config) + .context("could not make key-value store from runtime config")?; + Ok(Arc::new(provider)) + }) +} + +/// Converts from toml based runtime configuration into a [`RuntimeConfig`]. +/// +/// Also acts as [`DefaultLabelResolver`]. +/// +/// The various store types (i.e., the "type" field in the toml field) are registered with the +/// resolver using `add_store_type`. The default store for a label is registered using `add_default_store`. +#[derive(Default, Clone)] +pub struct RuntimeConfigResolver { + /// A map of store types to a function that returns the appropriate store + /// manager from runtime config TOML. + store_types: HashMap<&'static str, StoreFromToml>, + /// A map of default store configurations for a label. + defaults: HashMap<&'static str, StoreConfig>, +} + +impl RuntimeConfigResolver { + /// Create a new RuntimeConfigResolver. + pub fn new() -> Self { + ::default() + } + + /// Adds a default store configuration for a label. + /// + /// Users must ensure that the store type for `config` has been registered with + /// the resolver using [`Self::register_store_type`]. + pub fn add_default_store( + &mut self, + label: &'static str, + config: T::RuntimeConfig, + ) -> anyhow::Result<()> + where + T: MakeKeyValueStore, + T::RuntimeConfig: Serialize, + { + self.defaults.insert( + label, + StoreConfig::new(T::RUNTIME_CONFIG_TYPE.to_owned(), config)?, + ); + Ok(()) + } + + /// Registers a store type to the resolver. + pub fn register_store_type( + &mut self, + store_type: T, + ) -> anyhow::Result<()> { + if self + .store_types + .insert(T::RUNTIME_CONFIG_TYPE, store_from_toml_fn(store_type)) + .is_some() + { + anyhow::bail!( + "duplicate key value store type {:?}", + T::RUNTIME_CONFIG_TYPE + ); + } + Ok(()) + } + + /// Resolves a toml table into a runtime config. + pub fn resolve_from_toml( + &self, + table: Option<&impl GetTomlValue>, + ) -> anyhow::Result> { + let Some(table) = table.and_then(|t| t.get("key_value_store")) else { + return Ok(None); + }; + let table: HashMap = table.clone().try_into()?; + + let mut runtime_config = RuntimeConfig::default(); + for (label, config) in table { + let store_manager = self.store_manager_from_config(config).with_context(|| { + format!("could not configure key-value store with label '{label}'") + })?; + runtime_config.add_store_manager(label.clone(), store_manager); + } + Ok(Some(runtime_config)) + } + + /// Given a [`StoreConfig`], returns a store manager. + /// + /// Errors if there is no [`MakeKeyValueStore`] registered for the store config's type + /// or if the store manager cannot be created from the config. + fn store_manager_from_config( + &self, + config: StoreConfig, + ) -> anyhow::Result> { + let config_type = config.type_.as_str(); + let maker = self.store_types.get(config_type).with_context(|| { + format!("the store type '{config_type}' was not registered with the config resolver") + })?; + maker(config.config) + } +} + +impl DefaultLabelResolver for RuntimeConfigResolver { + fn default(&self, label: &str) -> Option> { + let config = self.defaults.get(label)?; + // TODO(rylev): The unwrap here is not ideal. We should return a Result instead. + // Piping that through `DefaultLabelResolver` is a bit awkward, though. + Some(self.store_manager_from_config(config.clone()).unwrap()) + } +} + +#[derive(Deserialize, Clone)] +pub struct StoreConfig { + #[serde(rename = "type")] + pub type_: String, + #[serde(flatten)] + pub config: toml::Table, +} + +impl StoreConfig { + pub fn new(type_: String, config: T) -> anyhow::Result + where + T: Serialize, + { + Ok(Self { + type_, + config: toml::value::Table::try_from(config)?, + }) + } +} diff --git a/crates/factor-key-value/tests/factor_test.rs b/crates/factor-key-value/tests/factor_test.rs new file mode 100644 index 0000000000..890e95ca9c --- /dev/null +++ b/crates/factor-key-value/tests/factor_test.rs @@ -0,0 +1,274 @@ +use anyhow::Context as _; +use spin_factor_key_value::{ + runtime_config::spin::{MakeKeyValueStore, RuntimeConfigResolver}, + KeyValueFactor, RuntimeConfig, +}; +use spin_factor_key_value_redis::RedisKeyValueStore; +use spin_factor_key_value_spin::{SpinKeyValueRuntimeConfig, SpinKeyValueStore}; +use spin_factors::{FactorRuntimeConfigSource, RuntimeConfigSourceFinalizer, RuntimeFactors}; +use spin_factors_test::{toml, TestEnvironment}; +use spin_world::v2::key_value::HostStore; +use std::{collections::HashSet, sync::Arc}; + +#[derive(RuntimeFactors)] +struct TestFactors { + key_value: KeyValueFactor, +} + +#[tokio::test] +async fn default_key_value_works() -> anyhow::Result<()> { + let mut test_resolver = RuntimeConfigResolver::new(); + test_resolver.register_store_type(SpinKeyValueStore::new(None))?; + test_resolver + .add_default_store::("default", SpinKeyValueRuntimeConfig::new(None))?; + let factors = TestFactors { + key_value: KeyValueFactor::new(test_resolver), + }; + let env = TestEnvironment::new(factors).extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + key_value_stores = ["default"] + }); + let state = env.build_instance_state().await?; + + assert_eq!( + state.key_value.allowed_stores(), + &["default".into()].into_iter().collect::>() + ); + Ok(()) +} + +async fn run_test_with_config_and_stores_for_label( + runtime_config: Option, + store_types: Vec, + labels: Vec<&str>, +) -> anyhow::Result { + let mut test_resolver = RuntimeConfigResolver::new(); + for store_type in store_types { + test_resolver.register_store_type(store_type)?; + } + let test_resolver = Arc::new(test_resolver); + let factors = TestFactors { + key_value: KeyValueFactor::new(test_resolver.clone()), + }; + let labels_clone = labels.clone(); + let env = TestEnvironment::new(factors) + .extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + key_value_stores = labels_clone + }) + .runtime_config(TomlConfig::new(test_resolver, runtime_config))?; + let state = env.build_instance_state().await?; + assert_eq!( + labels, + state.key_value.allowed_stores().iter().collect::>() + ); + + Ok(state) +} + +#[tokio::test] +async fn overridden_default_key_value_works() -> anyhow::Result<()> { + let runtime_config = toml::toml! { + [key_value_store.default] + type = "redis" + url = "redis://localhost:6379" + }; + run_test_with_config_and_stores_for_label( + Some(runtime_config), + vec![RedisKeyValueStore::new()], + vec!["default"], + ) + .await?; + Ok(()) +} + +#[tokio::test] +async fn custom_spin_key_value_works() -> anyhow::Result<()> { + let runtime_config = toml::toml! { + [key_value_store.custom] + type = "spin" + }; + run_test_with_config_and_stores_for_label( + Some(runtime_config), + vec![SpinKeyValueStore::new(None)], + vec!["custom"], + ) + .await?; + Ok(()) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 1)] +async fn custom_spin_key_value_works_with_absolute_path() -> anyhow::Result<()> { + let tmp_dir = tempfile::TempDir::with_prefix("example")?; + let db_path = tmp_dir.path().join("foo/custom.db"); + // Check that the db does not exist yet - it will exist by the end of the test + assert!(!db_path.exists()); + + let path_str = db_path.to_str().unwrap(); + let runtime_config = toml::toml! { + [key_value_store.custom] + type = "spin" + path = path_str + }; + let mut state = run_test_with_config_and_stores_for_label( + Some(runtime_config), + vec![SpinKeyValueStore::new(Some( + std::env::current_dir().context("failed to get current directory")?, + ))], + vec!["custom"], + ) + .await?; + + // Actually et a key since store creation is lazy + let store = state.key_value.open("custom".to_owned()).await??; + let _ = state.key_value.get(store, "foo".to_owned()).await??; + + // Check that the parent has been created + assert!(db_path.exists()); + Ok(()) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 1)] +async fn custom_spin_key_value_works_with_relative_path() -> anyhow::Result<()> { + let tmp_dir = tempfile::TempDir::with_prefix("example")?; + let db_path = tmp_dir.path().join("custom.db"); + // Check that the db does not exist yet - it will exist by the end of the test + assert!(!db_path.exists()); + + let runtime_config = toml::toml! { + [key_value_store.custom] + type = "spin" + path = "custom.db" + }; + let mut state = run_test_with_config_and_stores_for_label( + Some(runtime_config), + vec![SpinKeyValueStore::new(Some(tmp_dir.path().to_owned()))], + vec!["custom"], + ) + .await?; + + // Actually et a key since store creation is lazy + let store = state.key_value.open("custom".to_owned()).await??; + let _ = state.key_value.get(store, "foo".to_owned()).await??; + + // Check that the correct store in the config was chosen by verifying the existence of the DB + assert!(db_path.exists()); + Ok(()) +} + +#[tokio::test] +async fn custom_redis_key_value_works() -> anyhow::Result<()> { + let runtime_config = toml::toml! { + [key_value_store.custom] + type = "redis" + url = "redis://localhost:6379" + }; + run_test_with_config_and_stores_for_label( + Some(runtime_config), + vec![RedisKeyValueStore::new()], + vec!["custom"], + ) + .await?; + Ok(()) +} + +#[tokio::test] +async fn misconfigured_spin_key_value_fails() -> anyhow::Result<()> { + let tmp_dir = tempfile::TempDir::with_prefix("example")?; + let runtime_config = toml::toml! { + [key_value_store.custom] + type = "spin" + path = "/$$&/bad/path/foo.db" + }; + let result = run_test_with_config_and_stores_for_label( + Some(runtime_config), + vec![SpinKeyValueStore::new(Some(tmp_dir.path().to_owned()))], + vec!["custom"], + ) + .await; + // TODO(rylev): This only fails on my machine due to a read-only file system error. + // We should consider adding a check for the error message. + assert!(result.is_err()); + Ok(()) +} + +#[tokio::test(flavor = "multi_thread", worker_threads = 1)] +// TODO(rylev): consider removing this test as it is really only a consequence of +// toml deserialization and not a feature of the key-value store. +async fn multiple_custom_key_value_uses_second_store() -> anyhow::Result<()> { + let tmp_dir = tempfile::TempDir::with_prefix("example")?; + let db_path = tmp_dir.path().join("custom.db"); + // Check that the db does not exist yet - it will exist by the end of the test + assert!(!db_path.exists()); + + let mut test_resolver = RuntimeConfigResolver::new(); + test_resolver.register_store_type(RedisKeyValueStore::new())?; + test_resolver.register_store_type(SpinKeyValueStore::new(Some(tmp_dir.path().to_owned())))?; + let test_resolver = Arc::new(test_resolver); + let factors = TestFactors { + key_value: KeyValueFactor::new(test_resolver.clone()), + }; + let runtime_config = toml::toml! { + [key_value_store.custom] + type = "redis" + url = "redis://localhost:6379" + + [key_value_store.custom] + type = "spin" + path = "custom.db" + + }; + let env = TestEnvironment::new(factors) + .extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + key_value_stores = ["custom"] + }) + .runtime_config(TomlConfig::new(test_resolver, Some(runtime_config)))?; + let mut state = env.build_instance_state().await?; + + // Actually et a key since store creation is lazy + let store = state.key_value.open("custom".to_owned()).await??; + let _ = state.key_value.get(store, "foo".to_owned()).await??; + + assert_eq!( + state.key_value.allowed_stores(), + &["custom".into()].into_iter().collect::>() + ); + // Check that the correct store in the config was chosen by verifying the existence of the DB + assert!(db_path.exists()); + Ok(()) +} + +struct TomlConfig { + resolver: Arc, + toml: Option, +} + +impl TomlConfig { + fn new(resolver: Arc, toml: Option) -> Self { + Self { resolver, toml } + } +} + +impl TryFrom for TestFactorsRuntimeConfig { + type Error = anyhow::Error; + + fn try_from(value: TomlConfig) -> Result { + Self::from_source(value) + } +} + +impl FactorRuntimeConfigSource for TomlConfig { + fn get_runtime_config(&mut self) -> anyhow::Result> { + self.resolver.resolve_from_toml(self.toml.as_ref()) + } +} + +impl RuntimeConfigSourceFinalizer for TomlConfig { + fn finalize(&mut self) -> anyhow::Result<()> { + Ok(()) + } +} diff --git a/crates/factor-llm/Cargo.toml b/crates/factor-llm/Cargo.toml new file mode 100644 index 0000000000..b7f0e4107a --- /dev/null +++ b/crates/factor-llm/Cargo.toml @@ -0,0 +1,35 @@ +[package] +name = "spin-factor-llm" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +rust-version.workspace = true + +[features] +llm = ["spin-llm-local"] +llm-metal = ["llm", "spin-llm-local/metal"] +llm-cublas = ["llm", "spin-llm-local/cublas"] + +[dependencies] +anyhow = "1.0" +async-trait = "0.1" +serde = "1.0" +spin-factors = { path = "../factors" } +spin-llm-local = { path = "../llm-local", optional = true } +spin-llm-remote-http = { path = "../llm-remote-http" } +spin-locked-app = { path = "../locked-app" } +spin-world = { path = "../world" } +tracing = { workspace = true } +tokio = { version = "1", features = ["sync"] } +toml = "0.8" +url = { version = "2", features = ["serde"] } + +[dev-dependencies] +spin-factors-test = { path = "../factors-test" } +tokio = { version = "1", features = ["macros", "rt"] } + +[lints] +workspace = true diff --git a/crates/llm/src/lib.rs b/crates/factor-llm/src/host.rs similarity index 71% rename from crates/llm/src/lib.rs rename to crates/factor-llm/src/host.rs index 399c2fbcaf..af980ad7e1 100644 --- a/crates/llm/src/lib.rs +++ b/crates/factor-llm/src/host.rs @@ -1,39 +1,11 @@ -pub mod host_component; - -use spin_app::MetadataKey; -use spin_core::async_trait; +use async_trait::async_trait; use spin_world::v1::llm::{self as v1}; use spin_world::v2::llm::{self as v2}; -use std::collections::HashSet; - -pub use crate::host_component::LlmComponent; - -pub const MODEL_ALL_MINILM_L6_V2: &str = "all-minilm-l6-v2"; -pub const AI_MODELS_KEY: MetadataKey> = MetadataKey::new("ai_models"); - -#[async_trait] -pub trait LlmEngine: Send + Sync { - async fn infer( - &mut self, - model: v1::InferencingModel, - prompt: String, - params: v2::InferencingParams, - ) -> Result; - - async fn generate_embeddings( - &mut self, - model: v2::EmbeddingModel, - data: Vec, - ) -> Result; -} -pub struct LlmDispatch { - engine: Box, - allowed_models: HashSet, -} +use crate::InstanceState; #[async_trait] -impl v2::Host for LlmDispatch { +impl v2::Host for InstanceState { async fn infer( &mut self, model: v2::InferencingModel, @@ -44,6 +16,8 @@ impl v2::Host for LlmDispatch { return Err(access_denied_error(&model)); } self.engine + .lock() + .await .infer( model, prompt, @@ -67,7 +41,7 @@ impl v2::Host for LlmDispatch { if !self.allowed_models.contains(&m) { return Err(access_denied_error(&m)); } - self.engine.generate_embeddings(m, data).await + self.engine.lock().await.generate_embeddings(m, data).await } fn convert_error(&mut self, error: v2::Error) -> anyhow::Result { @@ -76,7 +50,7 @@ impl v2::Host for LlmDispatch { } #[async_trait] -impl v1::Host for LlmDispatch { +impl v1::Host for InstanceState { async fn infer( &mut self, model: v1::InferencingModel, diff --git a/crates/factor-llm/src/lib.rs b/crates/factor-llm/src/lib.rs new file mode 100644 index 0000000000..543e59b613 --- /dev/null +++ b/crates/factor-llm/src/lib.rs @@ -0,0 +1,146 @@ +mod host; +pub mod spin; + +use std::collections::{HashMap, HashSet}; +use std::sync::Arc; + +use async_trait::async_trait; +use spin_factors::{ + ConfigureAppContext, Factor, InstanceBuilders, PrepareContext, RuntimeFactors, + SelfInstanceBuilder, +}; +use spin_locked_app::MetadataKey; +use spin_world::v1::llm::{self as v1}; +use spin_world::v2::llm::{self as v2}; +use tokio::sync::Mutex; + +pub const ALLOWED_MODELS_KEY: MetadataKey> = MetadataKey::new("ai_models"); + +/// The factor for LLMs. +pub struct LlmFactor { + default_engine_creator: Box, +} + +impl LlmFactor { + /// Creates a new LLM factor with the given default engine creator. + /// + /// The default engine creator is used to create the engine if no runtime configuration is provided. + pub fn new(default_engine_creator: F) -> Self { + Self { + default_engine_creator: Box::new(default_engine_creator), + } + } +} + +impl Factor for LlmFactor { + type RuntimeConfig = RuntimeConfig; + type AppState = AppState; + type InstanceBuilder = InstanceState; + + fn init( + &mut self, + mut ctx: spin_factors::InitContext, + ) -> anyhow::Result<()> { + ctx.link_bindings(spin_world::v1::llm::add_to_linker)?; + ctx.link_bindings(spin_world::v2::llm::add_to_linker)?; + Ok(()) + } + + fn configure_app( + &self, + mut ctx: ConfigureAppContext, + ) -> anyhow::Result { + let component_allowed_models = ctx + .app() + .components() + .map(|component| { + Ok(( + component.id().to_string(), + component + .get_metadata(ALLOWED_MODELS_KEY)? + .unwrap_or_default() + .into_iter() + .collect::>() + .into(), + )) + }) + .collect::>()?; + let engine = ctx + .take_runtime_config() + .map(|c| c.engine) + .unwrap_or_else(|| self.default_engine_creator.create()); + Ok(AppState { + engine, + component_allowed_models, + }) + } + + fn prepare( + &self, + ctx: PrepareContext, + _builders: &mut InstanceBuilders, + ) -> anyhow::Result { + let allowed_models = ctx + .app_state() + .component_allowed_models + .get(ctx.app_component().id()) + .cloned() + .unwrap_or_default(); + let engine = ctx.app_state().engine.clone(); + + Ok(InstanceState { + engine, + allowed_models, + }) + } +} + +/// The application state for the LLM factor. +pub struct AppState { + engine: Arc>, + component_allowed_models: HashMap>>, +} + +/// The instance state for the LLM factor. +pub struct InstanceState { + engine: Arc>, + pub allowed_models: Arc>, +} + +/// The runtime configuration for the LLM factor. +pub struct RuntimeConfig { + engine: Arc>, +} + +impl SelfInstanceBuilder for InstanceState {} + +/// The interface for a language model engine. +#[async_trait] +pub trait LlmEngine: Send + Sync { + async fn infer( + &mut self, + model: v1::InferencingModel, + prompt: String, + params: v2::InferencingParams, + ) -> Result; + + async fn generate_embeddings( + &mut self, + model: v2::EmbeddingModel, + data: Vec, + ) -> Result; +} + +/// A creator for an LLM engine. +pub trait LlmEngineCreator: Send + Sync { + fn create(&self) -> Arc>; +} + +impl LlmEngineCreator for F +where + F: Fn() -> Arc> + Send + Sync, +{ + fn create(&self) -> Arc> { + self() + } +} diff --git a/crates/factor-llm/src/spin.rs b/crates/factor-llm/src/spin.rs new file mode 100644 index 0000000000..ab3a167fa3 --- /dev/null +++ b/crates/factor-llm/src/spin.rs @@ -0,0 +1,165 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use spin_factors::runtime_config::toml::GetTomlValue; +use spin_llm_remote_http::RemoteHttpLlmEngine; +use spin_world::async_trait; +use spin_world::v1::llm::{self as v1}; +use spin_world::v2::llm::{self as v2}; +use tokio::sync::Mutex; +use url::Url; + +use crate::{LlmEngine, LlmEngineCreator, RuntimeConfig}; + +#[cfg(feature = "llm")] +mod local { + use super::*; + pub use spin_llm_local::LocalLlmEngine; + + #[async_trait] + impl LlmEngine for LocalLlmEngine { + async fn infer( + &mut self, + model: v2::InferencingModel, + prompt: String, + params: v2::InferencingParams, + ) -> Result { + self.infer(model, prompt, params).await + } + + async fn generate_embeddings( + &mut self, + model: v2::EmbeddingModel, + data: Vec, + ) -> Result { + self.generate_embeddings(model, data).await + } + } +} + +/// The default engine creator for the LLM factor when used in the Spin CLI. +pub fn default_engine_creator( + state_dir: Option, + use_gpu: bool, +) -> anyhow::Result { + #[cfg(feature = "llm")] + let engine = { + use anyhow::Context as _; + let models_dir_parent = match state_dir { + Some(ref dir) => dir.clone(), + None => std::env::current_dir().context("failed to get current working directory")?, + }; + spin_llm_local::LocalLlmEngine::new(models_dir_parent.join("ai-models"), use_gpu) + }; + #[cfg(not(feature = "llm"))] + let engine = { + let _ = (state_dir, use_gpu); + noop::NoopLlmEngine + }; + let engine = Arc::new(Mutex::new(engine)) as Arc>; + Ok(move || engine.clone()) +} + +#[async_trait] +impl LlmEngine for RemoteHttpLlmEngine { + async fn infer( + &mut self, + model: v1::InferencingModel, + prompt: String, + params: v2::InferencingParams, + ) -> Result { + self.infer(model, prompt, params).await + } + + async fn generate_embeddings( + &mut self, + model: v2::EmbeddingModel, + data: Vec, + ) -> Result { + self.generate_embeddings(model, data).await + } +} + +pub fn runtime_config_from_toml( + table: &impl GetTomlValue, + state_dir: Option, + use_gpu: bool, +) -> anyhow::Result> { + let Some(value) = table.get("llm_compute") else { + return Ok(None); + }; + let config: LlmCompute = value.clone().try_into()?; + + Ok(Some(RuntimeConfig { + engine: config.into_engine(state_dir, use_gpu)?, + })) +} + +#[derive(Debug, serde::Deserialize)] +#[serde(rename_all = "snake_case", tag = "type")] +pub enum LlmCompute { + Spin, + RemoteHttp(RemoteHttpCompute), +} + +impl LlmCompute { + fn into_engine( + self, + state_dir: Option, + use_gpu: bool, + ) -> anyhow::Result>> { + let engine: Arc> = match self { + #[cfg(not(feature = "llm"))] + LlmCompute::Spin => { + let _ = (state_dir, use_gpu); + Arc::new(Mutex::new(noop::NoopLlmEngine)) + } + #[cfg(feature = "llm")] + LlmCompute::Spin => default_engine_creator(state_dir, use_gpu)?.create(), + LlmCompute::RemoteHttp(config) => Arc::new(Mutex::new(RemoteHttpLlmEngine::new( + config.url, + config.auth_token, + ))), + }; + Ok(engine) + } +} + +#[derive(Debug, serde::Deserialize)] +pub struct RemoteHttpCompute { + url: Url, + auth_token: String, +} + +/// A noop engine used when the local engine feature is disabled. +#[cfg(not(feature = "llm"))] +mod noop { + use super::*; + + #[derive(Clone, Copy)] + pub(super) struct NoopLlmEngine; + + #[async_trait] + impl LlmEngine for NoopLlmEngine { + async fn infer( + &mut self, + _model: v2::InferencingModel, + _prompt: String, + _params: v2::InferencingParams, + ) -> Result { + Err(v2::Error::RuntimeError( + "Local LLM operations are not supported in this version of Spin.".into(), + )) + } + + async fn generate_embeddings( + &mut self, + _model: v2::EmbeddingModel, + _data: Vec, + ) -> Result { + Err(v2::Error::RuntimeError( + "Local LLM operations are not supported in this version of Spin.".into(), + )) + } + } +} diff --git a/crates/factor-llm/tests/factor_test.rs b/crates/factor-llm/tests/factor_test.rs new file mode 100644 index 0000000000..a0c4e988a6 --- /dev/null +++ b/crates/factor-llm/tests/factor_test.rs @@ -0,0 +1,140 @@ +use std::collections::HashSet; +use std::sync::Arc; + +use spin_factor_llm::{LlmEngine, LlmFactor}; +use spin_factors::{anyhow, RuntimeFactors}; +use spin_factors_test::{toml, TestEnvironment}; +use spin_world::v1::llm::{self as v1}; +use spin_world::v2::llm::{self as v2, Host}; +use tokio::sync::Mutex; + +#[derive(RuntimeFactors)] +struct TestFactors { + llm: LlmFactor, +} + +#[tokio::test] +async fn llm_works() -> anyhow::Result<()> { + let handle = Box::new(|op| match op { + Operation::Inference { + model, + prompt, + params, + } => { + assert_eq!(model, "llama2-chat"); + assert_eq!(prompt, "some prompt"); + assert_eq!(params.max_tokens, 100); + Ok(v2::InferencingResult { + text: "response".to_owned(), + usage: v2::InferencingUsage { + prompt_token_count: 1, + generated_token_count: 1, + }, + } + .into()) + } + Operation::Embedding { .. } => { + todo!("add test for embeddings") + } + }); + let factors = TestFactors { + llm: LlmFactor::new(move || { + Arc::new(Mutex::new(FakeLLm { + handle: handle.clone(), + })) as _ + }), + }; + let env = TestEnvironment::new(factors).extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + ai_models = ["llama2-chat"] + }); + let mut state = env.build_instance_state().await?; + + assert_eq!( + &*state.llm.allowed_models, + &["llama2-chat".to_owned()] + .into_iter() + .collect::>() + ); + + assert!(matches!( + state + .llm + .infer("unknown-model".into(), "some prompt".into(), Default::default()) + .await, + Err(v2::Error::InvalidInput(msg)) if msg.contains("The component does not have access to use") + )); + + state + .llm + .infer("llama2-chat".into(), "some prompt".into(), None) + .await?; + Ok(()) +} + +struct FakeLLm { + handle: Box Result + Sync + Send>, +} + +#[allow(dead_code)] +enum Operation { + Inference { + model: v1::InferencingModel, + prompt: String, + params: v2::InferencingParams, + }, + Embedding { + model: v2::EmbeddingModel, + data: Vec, + }, +} + +enum OperationResult { + Inferencing(v2::InferencingResult), + Embeddings(v2::EmbeddingsResult), +} + +impl From for OperationResult { + fn from(e: v2::EmbeddingsResult) -> Self { + OperationResult::Embeddings(e) + } +} + +impl From for OperationResult { + fn from(i: v2::InferencingResult) -> Self { + OperationResult::Inferencing(i) + } +} + +#[async_trait::async_trait] +impl LlmEngine for FakeLLm { + async fn infer( + &mut self, + model: v1::InferencingModel, + prompt: String, + params: v2::InferencingParams, + ) -> Result { + let OperationResult::Inferencing(i) = (self.handle)(Operation::Inference { + model, + prompt, + params, + })? + else { + panic!("test incorrectly configured. inferencing operation returned embeddings result") + }; + Ok(i) + } + + async fn generate_embeddings( + &mut self, + model: v2::EmbeddingModel, + data: Vec, + ) -> Result { + let OperationResult::Embeddings(e) = (self.handle)(Operation::Embedding { model, data })? + else { + panic!("test incorrectly configured. embeddings operation returned inferencing result") + }; + Ok(e) + } +} diff --git a/crates/factor-outbound-http/Cargo.toml b/crates/factor-outbound-http/Cargo.toml new file mode 100644 index 0000000000..9d22b59084 --- /dev/null +++ b/crates/factor-outbound-http/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "spin-factor-outbound-http" +version = { workspace = true } +authors = { workspace = true } +edition = { workspace = true } + +[dependencies] +anyhow = "1.0" +http = "1.1.0" +http-body-util = "0.1" +hyper = "1.4.1" +reqwest = { version = "0.11", features = ["gzip"] } +rustls = { version = "0.23", default-features = false, features = ["ring", "std"] } +spin-factor-outbound-networking = { path = "../factor-outbound-networking" } +spin-factors = { path = "../factors" } +spin-telemetry = { path = "../telemetry" } +spin-world = { path = "../world" } +terminal = { path = "../terminal" } +tokio = { version = "1", features = ["macros", "rt"] } +tokio-rustls = { version = "0.26", default-features = false, features = ["logging", "tls12"] } +tracing = { workspace = true } +wasmtime = { workspace = true } +wasmtime-wasi = { workspace = true } +wasmtime-wasi-http = { workspace = true } + +[dev-dependencies] +spin-factor-variables = { path = "../factor-variables" } +spin-factors-test = { path = "../factors-test" } + +[lints] +workspace = true diff --git a/crates/factor-outbound-http/src/lib.rs b/crates/factor-outbound-http/src/lib.rs new file mode 100644 index 0000000000..739be2ab9a --- /dev/null +++ b/crates/factor-outbound-http/src/lib.rs @@ -0,0 +1,195 @@ +mod spin; +mod wasi; +pub mod wasi_2023_10_18; +pub mod wasi_2023_11_10; + +use std::net::SocketAddr; + +use anyhow::Context; +use http::{ + uri::{Authority, Parts, PathAndQuery, Scheme}, + HeaderValue, Uri, +}; +use spin_factor_outbound_networking::{ + ComponentTlsConfigs, OutboundAllowedHosts, OutboundNetworkingFactor, +}; +use spin_factors::{ + anyhow, ConfigureAppContext, Factor, InstanceBuilders, PrepareContext, RuntimeFactors, + SelfInstanceBuilder, +}; +use wasmtime_wasi_http::WasiHttpCtx; + +pub use wasmtime_wasi_http::{ + body::HyperOutgoingBody, + types::{HostFutureIncomingResponse, OutgoingRequestConfig}, + HttpResult, +}; + +#[derive(Default)] +pub struct OutboundHttpFactor { + _priv: (), +} + +impl OutboundHttpFactor { + pub fn new() -> Self { + Self::default() + } +} + +impl Factor for OutboundHttpFactor { + type RuntimeConfig = (); + type AppState = (); + type InstanceBuilder = InstanceState; + + fn init( + &mut self, + mut ctx: spin_factors::InitContext, + ) -> anyhow::Result<()> { + ctx.link_bindings(spin_world::v1::http::add_to_linker)?; + wasi::add_to_linker::(&mut ctx)?; + Ok(()) + } + + fn configure_app( + &self, + _ctx: ConfigureAppContext, + ) -> anyhow::Result { + Ok(()) + } + + fn prepare( + &self, + _ctx: PrepareContext, + builders: &mut InstanceBuilders, + ) -> anyhow::Result { + let outbound_networking = builders.get_mut::()?; + let allowed_hosts = outbound_networking.allowed_hosts(); + let component_tls_configs = outbound_networking.component_tls_configs().clone(); + Ok(InstanceState { + wasi_http_ctx: WasiHttpCtx::new(), + allowed_hosts, + component_tls_configs, + self_request_origin: None, + request_interceptor: None, + spin_http_client: None, + }) + } +} + +pub struct InstanceState { + wasi_http_ctx: WasiHttpCtx, + allowed_hosts: OutboundAllowedHosts, + component_tls_configs: ComponentTlsConfigs, + self_request_origin: Option, + request_interceptor: Option>, + // Connection-pooling client for 'fermyon:spin/http' interface + spin_http_client: Option, +} + +impl InstanceState { + /// Sets the [`SelfRequestOrigin`] for this instance. + /// + /// This is used to handle outbound requests to relative URLs. If unset, + /// those requests will fail. + pub fn set_self_request_origin(&mut self, origin: SelfRequestOrigin) { + self.self_request_origin = Some(origin); + } + + /// Sets a [`OutboundHttpInterceptor`] for this instance. + /// + /// Returns an error if it has already been called for this instance. + pub fn set_request_interceptor( + &mut self, + interceptor: impl OutboundHttpInterceptor + 'static, + ) -> anyhow::Result<()> { + if self.request_interceptor.is_some() { + anyhow::bail!("set_request_interceptor can only be called once"); + } + self.request_interceptor = Some(Box::new(interceptor)); + Ok(()) + } +} + +impl SelfInstanceBuilder for InstanceState {} + +pub type Request = http::Request; + +/// SelfRequestOrigin indicates the base URI to use for "self" requests. +/// +/// This is meant to be set on [`Request::extensions_mut`] in appropriate +/// contexts such as an incoming request handler. +#[derive(Clone, Debug)] +pub struct SelfRequestOrigin { + pub scheme: Scheme, + pub authority: Authority, +} + +impl SelfRequestOrigin { + pub fn create(scheme: Scheme, addr: &SocketAddr) -> anyhow::Result { + Ok(SelfRequestOrigin { + scheme, + authority: addr + .to_string() + .parse() + .with_context(|| format!("address '{addr}' is not a valid authority"))?, + }) + } + + pub fn from_uri(uri: &Uri) -> anyhow::Result { + Ok(Self { + scheme: uri.scheme().context("URI missing scheme")?.clone(), + authority: uri.authority().context("URI missing authority")?.clone(), + }) + } + + fn into_uri(self, path_and_query: Option) -> Uri { + let mut parts = Parts::default(); + parts.scheme = Some(self.scheme); + parts.authority = Some(self.authority); + parts.path_and_query = path_and_query; + Uri::from_parts(parts).unwrap() + } + + fn use_tls(&self) -> bool { + self.scheme == Scheme::HTTPS + } + + fn host_header(&self) -> HeaderValue { + HeaderValue::from_str(self.authority.as_str()).unwrap() + } +} + +impl std::fmt::Display for SelfRequestOrigin { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}://{}", self.scheme, self.authority) + } +} + +/// An outbound HTTP request interceptor to be used with +/// [`InstanceState::set_request_interceptor`]. +pub trait OutboundHttpInterceptor: Send + Sync { + /// Intercept an outgoing HTTP request. + /// + /// If this method returns [`InterceptedResponse::Continue`], the (possibly + /// updated) request and config will be passed on to the default outgoing + /// request handler. + /// + /// If this method returns [`InterceptedResponse::Intercepted`], the inner + /// result will be returned as the result of the request, bypassing the + /// default handler. + fn intercept( + &self, + request: &mut Request, + config: &mut OutgoingRequestConfig, + ) -> InterceptOutcome; +} + +/// The type returned by an [`OutboundHttpInterceptor`]. +pub enum InterceptOutcome { + /// The intercepted request will be passed on to the default outgoing + /// request handler. + Continue, + /// The given result will be returned as the result of the intercepted + /// request, bypassing the default handler. + Complete(HttpResult), +} diff --git a/crates/factor-outbound-http/src/spin.rs b/crates/factor-outbound-http/src/spin.rs new file mode 100644 index 0000000000..633df727d9 --- /dev/null +++ b/crates/factor-outbound-http/src/spin.rs @@ -0,0 +1,179 @@ +use spin_world::{ + async_trait, + v1::{ + http as spin_http, + http_types::{self, HttpError, Method, Request, Response}, + }, +}; +use tracing::{field::Empty, instrument, Level, Span}; + +#[async_trait] +impl spin_http::Host for crate::InstanceState { + #[instrument(name = "spin_outbound_http.send_request", skip_all, err(level = Level::INFO), + fields(otel.kind = "client", url.full = Empty, http.request.method = Empty, + http.response.status_code = Empty, otel.name = Empty, server.address = Empty, server.port = Empty))] + async fn send_request(&mut self, req: Request) -> Result { + let span = Span::current(); + record_request_fields(&span, &req); + + let uri = req.uri; + tracing::trace!("Sending outbound HTTP to {uri:?}"); + + let abs_url = if !uri.starts_with('/') { + // Absolute URI + let is_allowed = self + .allowed_hosts + .check_url(&uri, "https") + .await + .unwrap_or(false); + if !is_allowed { + return Err(HttpError::DestinationNotAllowed); + } + uri + } else { + // Relative URI ("self" request) + let is_allowed = self + .allowed_hosts + .check_relative_url(&["http", "https"]) + .await + .unwrap_or(false); + if !is_allowed { + return Err(HttpError::DestinationNotAllowed); + } + + let Some(origin) = &self.self_request_origin else { + tracing::error!( + "Couldn't handle outbound HTTP request to relative URI; no origin set" + ); + return Err(HttpError::InvalidUrl); + }; + format!("{origin}{uri}") + }; + let req_url = reqwest::Url::parse(&abs_url).map_err(|_| HttpError::InvalidUrl)?; + + if !req.params.is_empty() { + tracing::warn!("HTTP params field is deprecated"); + } + + // Allow reuse of Client's internal connection pool for multiple requests + // in a single component execution + let client = self.spin_http_client.get_or_insert_with(Default::default); + + let mut req = { + let mut builder = client.request(reqwest_method(req.method), req_url); + for (key, val) in req.headers { + builder = builder.header(key, val); + } + builder + .body(req.body.unwrap_or_default()) + .build() + .map_err(|err| { + tracing::error!("Error building outbound request: {err}"); + HttpError::RuntimeError + })? + }; + spin_telemetry::inject_trace_context(req.headers_mut()); + + let resp = client.execute(req).await.map_err(log_reqwest_error)?; + + tracing::trace!("Returning response from outbound request to {abs_url}"); + span.record("http.response.status_code", resp.status().as_u16()); + response_from_reqwest(resp).await + } +} + +impl http_types::Host for crate::InstanceState { + fn convert_http_error(&mut self, err: HttpError) -> anyhow::Result { + Ok(err) + } +} + +fn record_request_fields(span: &Span, req: &Request) { + let method = match req.method { + Method::Get => "GET", + Method::Post => "POST", + Method::Put => "PUT", + Method::Delete => "DELETE", + Method::Patch => "PATCH", + Method::Head => "HEAD", + Method::Options => "OPTIONS", + }; + span.record("otel.name", method) + .record("http.request.method", method) + .record("url.full", req.uri.clone()); + if let Ok(uri) = req.uri.parse::() { + if let Some(authority) = uri.authority() { + span.record("server.address", authority.host()); + if let Some(port) = authority.port() { + span.record("server.port", port.as_u16()); + } + } + } +} + +fn reqwest_method(m: Method) -> reqwest::Method { + match m { + Method::Get => reqwest::Method::GET, + Method::Post => reqwest::Method::POST, + Method::Put => reqwest::Method::PUT, + Method::Delete => reqwest::Method::DELETE, + Method::Patch => reqwest::Method::PATCH, + Method::Head => reqwest::Method::HEAD, + Method::Options => reqwest::Method::OPTIONS, + } +} + +fn log_reqwest_error(err: reqwest::Error) -> HttpError { + let error_desc = if err.is_timeout() { + "timeout error" + } else if err.is_connect() { + "connection error" + } else if err.is_body() || err.is_decode() { + "message body error" + } else if err.is_request() { + "request error" + } else { + "error" + }; + tracing::warn!( + "Outbound HTTP {}: URL {}, error detail {:?}", + error_desc, + err.url() + .map(|u| u.to_string()) + .unwrap_or_else(|| "".to_owned()), + err + ); + HttpError::RuntimeError +} + +async fn response_from_reqwest(res: reqwest::Response) -> Result { + let status = res.status().as_u16(); + + let headers = res + .headers() + .into_iter() + .map(|(key, val)| { + Ok(( + key.to_string(), + val.to_str() + .map_err(|_| { + tracing::error!("Non-ascii response header {key} = {val:?}"); + HttpError::RuntimeError + })? + .to_string(), + )) + }) + .collect::, _>>()?; + + let body = res + .bytes() + .await + .map_err(|_| HttpError::RuntimeError)? + .to_vec(); + + Ok(Response { + status, + headers: Some(headers), + body: Some(body), + }) +} diff --git a/crates/factor-outbound-http/src/wasi.rs b/crates/factor-outbound-http/src/wasi.rs new file mode 100644 index 0000000000..8d49bad2ab --- /dev/null +++ b/crates/factor-outbound-http/src/wasi.rs @@ -0,0 +1,339 @@ +use std::{error::Error, sync::Arc}; + +use anyhow::Context; +use http::{header::HOST, Request}; +use http_body_util::BodyExt; +use rustls::ClientConfig; +use spin_factor_outbound_networking::OutboundAllowedHosts; +use spin_factors::{wasmtime::component::ResourceTable, RuntimeFactorsInstanceState}; +use tokio::{net::TcpStream, time::timeout}; +use tracing::{field::Empty, instrument, Instrument}; +use wasmtime_wasi_http::{ + bindings::http::types::ErrorCode, + body::HyperOutgoingBody, + io::TokioIo, + types::{HostFutureIncomingResponse, IncomingResponse}, + WasiHttpCtx, WasiHttpImpl, WasiHttpView, +}; + +use crate::{ + wasi_2023_10_18, wasi_2023_11_10, InstanceState, InterceptOutcome, OutboundHttpFactor, + SelfRequestOrigin, +}; + +pub(crate) fn add_to_linker( + ctx: &mut spin_factors::InitContext, +) -> anyhow::Result<()> { + fn type_annotate(f: F) -> F + where + F: Fn(&mut T) -> WasiHttpImpl, + { + f + } + let get_data_with_table = ctx.get_data_with_table_fn(); + let closure = type_annotate(move |data| { + let (state, table) = get_data_with_table(data); + WasiHttpImpl(WasiHttpImplInner { state, table }) + }); + let linker = ctx.linker(); + wasmtime_wasi_http::bindings::http::outgoing_handler::add_to_linker_get_host(linker, closure)?; + wasmtime_wasi_http::bindings::http::types::add_to_linker_get_host(linker, closure)?; + + wasi_2023_10_18::add_to_linker(linker, closure)?; + wasi_2023_11_10::add_to_linker(linker, closure)?; + + Ok(()) +} + +impl OutboundHttpFactor { + pub fn get_wasi_http_impl( + runtime_instance_state: &mut impl RuntimeFactorsInstanceState, + ) -> Option> { + let (state, table) = runtime_instance_state.get_with_table::()?; + Some(WasiHttpImpl(WasiHttpImplInner { state, table })) + } +} + +pub(crate) struct WasiHttpImplInner<'a> { + state: &'a mut InstanceState, + table: &'a mut ResourceTable, +} + +impl<'a> WasiHttpView for WasiHttpImplInner<'a> { + fn ctx(&mut self) -> &mut WasiHttpCtx { + &mut self.state.wasi_http_ctx + } + + fn table(&mut self) -> &mut ResourceTable { + self.table + } + + #[instrument( + name = "spin_outbound_http.send_request", + skip_all, + fields( + otel.kind = "client", + url.full = %request.uri(), + http.request.method = %request.method(), + otel.name = %request.method(), + http.response.status_code = Empty, + server.address = Empty, + server.port = Empty, + ), + )] + fn send_request( + &mut self, + mut request: Request, + mut config: wasmtime_wasi_http::types::OutgoingRequestConfig, + ) -> wasmtime_wasi_http::HttpResult { + // wasmtime-wasi-http fills in scheme and authority for relative URLs + // (e.g. https://:443/), which makes them hard to reason about. + // Undo that here. + let uri = request.uri_mut(); + if uri + .authority() + .is_some_and(|authority| authority.host().is_empty()) + { + let mut builder = http::uri::Builder::new(); + if let Some(paq) = uri.path_and_query() { + builder = builder.path_and_query(paq.clone()); + } + *uri = builder.build().unwrap(); + } + + if let Some(interceptor) = &self.state.request_interceptor { + match interceptor.intercept(&mut request, &mut config) { + InterceptOutcome::Continue => (), + InterceptOutcome::Complete(res) => return res, + } + } + + let host = request.uri().host().unwrap_or_default(); + let tls_client_config = self + .state + .component_tls_configs + .get_client_config(host) + .clone(); + + Ok(HostFutureIncomingResponse::Pending( + wasmtime_wasi::runtime::spawn( + send_request_impl( + request, + config, + self.state.allowed_hosts.clone(), + self.state.self_request_origin.clone(), + tls_client_config, + ) + .in_current_span(), + ), + )) + } +} + +async fn send_request_impl( + mut request: Request, + mut config: wasmtime_wasi_http::types::OutgoingRequestConfig, + outbound_allowed_hosts: OutboundAllowedHosts, + self_request_origin: Option, + tls_client_config: Arc, +) -> anyhow::Result> { + if request.uri().authority().is_some() { + // Absolute URI + let is_allowed = outbound_allowed_hosts + .check_url(&request.uri().to_string(), "https") + .await + .unwrap_or(false); + if !is_allowed { + return Ok(Err(ErrorCode::HttpRequestDenied)); + } + } else { + // Relative URI ("self" request) + let is_allowed = outbound_allowed_hosts + .check_relative_url(&["http", "https"]) + .await + .unwrap_or(false); + if !is_allowed { + return Ok(Err(ErrorCode::HttpRequestDenied)); + } + + let Some(origin) = self_request_origin else { + tracing::error!("Couldn't handle outbound HTTP request to relative URI; no origin set"); + return Ok(Err(ErrorCode::HttpRequestUriInvalid)); + }; + + config.use_tls = origin.use_tls(); + + request.headers_mut().insert(HOST, origin.host_header()); + spin_telemetry::inject_trace_context(&mut request); + + let path_and_query = request.uri().path_and_query().cloned(); + *request.uri_mut() = origin.into_uri(path_and_query); + } + + let authority = request.uri().authority().context("authority not set")?; + let current_span = tracing::Span::current(); + current_span.record("server.address", authority.host()); + if let Some(port) = authority.port() { + current_span.record("server.port", port.as_u16()); + } + + Ok(send_request_handler(request, config, tls_client_config).await) +} + +/// This is a fork of wasmtime_wasi_http::default_send_request_handler function +/// forked from bytecodealliance/wasmtime commit-sha 29a76b68200fcfa69c8fb18ce6c850754279a05b +/// This fork provides the ability to configure client cert auth for mTLS +async fn send_request_handler( + mut request: http::Request, + wasmtime_wasi_http::types::OutgoingRequestConfig { + use_tls, + connect_timeout, + first_byte_timeout, + between_bytes_timeout, + }: wasmtime_wasi_http::types::OutgoingRequestConfig, + tls_client_config: Arc, +) -> Result { + let authority_str = if let Some(authority) = request.uri().authority() { + if authority.port().is_some() { + authority.to_string() + } else { + let port = if use_tls { 443 } else { 80 }; + format!("{}:{port}", authority) + } + } else { + return Err(ErrorCode::HttpRequestUriInvalid); + }; + + let tcp_stream = timeout(connect_timeout, TcpStream::connect(&authority_str)) + .await + .map_err(|_| ErrorCode::ConnectionTimeout)? + .map_err(|err| match err.kind() { + std::io::ErrorKind::AddrNotAvailable => { + dns_error("address not available".to_string(), 0) + } + _ => { + if err + .to_string() + .starts_with("failed to lookup address information") + { + dns_error("address not available".to_string(), 0) + } else { + ErrorCode::ConnectionRefused + } + } + })?; + + let (mut sender, worker) = if use_tls { + #[cfg(any(target_arch = "riscv64", target_arch = "s390x"))] + { + return Err(ErrorCode::InternalError(Some( + "unsupported architecture for SSL".to_string(), + ))); + } + + #[cfg(not(any(target_arch = "riscv64", target_arch = "s390x")))] + { + use rustls::pki_types::ServerName; + let connector = tokio_rustls::TlsConnector::from(tls_client_config); + let mut parts = authority_str.split(':'); + let host = parts.next().unwrap_or(&authority_str); + let domain = ServerName::try_from(host) + .map_err(|e| { + tracing::warn!("dns lookup error: {e:?}"); + dns_error("invalid dns name".to_string(), 0) + })? + .to_owned(); + let stream = connector.connect(domain, tcp_stream).await.map_err(|e| { + tracing::warn!("tls protocol error: {e:?}"); + ErrorCode::TlsProtocolError + })?; + let stream = TokioIo::new(stream); + + let (sender, conn) = timeout( + connect_timeout, + hyper::client::conn::http1::handshake(stream), + ) + .await + .map_err(|_| ErrorCode::ConnectionTimeout)? + .map_err(hyper_request_error)?; + + let worker = wasmtime_wasi::runtime::spawn(async move { + match conn.await { + Ok(()) => {} + // TODO: shouldn't throw away this error and ideally should + // surface somewhere. + Err(e) => tracing::warn!("dropping error {e}"), + } + }); + + (sender, worker) + } + } else { + let tcp_stream = TokioIo::new(tcp_stream); + let (sender, conn) = timeout( + connect_timeout, + // TODO: we should plumb the builder through the http context, and use it here + hyper::client::conn::http1::handshake(tcp_stream), + ) + .await + .map_err(|_| ErrorCode::ConnectionTimeout)? + .map_err(hyper_request_error)?; + + let worker = wasmtime_wasi::runtime::spawn(async move { + match conn.await { + Ok(()) => {} + // TODO: same as above, shouldn't throw this error away. + Err(e) => tracing::warn!("dropping error {e}"), + } + }); + + (sender, worker) + }; + + // at this point, the request contains the scheme and the authority, but + // the http packet should only include those if addressing a proxy, so + // remove them here, since SendRequest::send_request does not do it for us + *request.uri_mut() = http::Uri::builder() + .path_and_query( + request + .uri() + .path_and_query() + .map(|p| p.as_str()) + .unwrap_or("/"), + ) + .build() + .expect("comes from valid request"); + + let resp = timeout(first_byte_timeout, sender.send_request(request)) + .await + .map_err(|_| ErrorCode::ConnectionReadTimeout)? + .map_err(hyper_request_error)? + .map(|body| body.map_err(hyper_request_error).boxed()); + + Ok(wasmtime_wasi_http::types::IncomingResponse { + resp, + worker: Some(worker), + between_bytes_timeout, + }) +} + +/// Translate a [`hyper::Error`] to a wasi-http `ErrorCode` in the context of a request. +fn hyper_request_error(err: hyper::Error) -> ErrorCode { + // If there's a source, we might be able to extract a wasi-http error from it. + if let Some(cause) = err.source() { + if let Some(err) = cause.downcast_ref::() { + return err.clone(); + } + } + + tracing::warn!("hyper request error: {err:?}"); + + ErrorCode::HttpProtocolError +} + +fn dns_error(rcode: String, info_code: u16) -> ErrorCode { + ErrorCode::DnsError(wasmtime_wasi_http::bindings::http::types::DnsErrorPayload { + rcode: Some(rcode), + info_code: Some(info_code), + }) +} diff --git a/crates/factor-outbound-http/src/wasi_2023_10_18.rs b/crates/factor-outbound-http/src/wasi_2023_10_18.rs new file mode 100644 index 0000000000..891853ec6c --- /dev/null +++ b/crates/factor-outbound-http/src/wasi_2023_10_18.rs @@ -0,0 +1,639 @@ +use anyhow::Result; +use wasmtime::component::{Linker, Resource}; +use wasmtime_wasi_http::{WasiHttpImpl, WasiHttpView}; + +mod latest { + pub use wasmtime_wasi_http::bindings::wasi::*; + pub mod http { + pub use wasmtime_wasi_http::bindings::http::*; + } +} + +mod bindings { + use super::latest; + + wasmtime::component::bindgen!({ + path: "../../wit", + interfaces: r#" + include wasi:http/proxy@0.2.0-rc-2023-10-18; + "#, + async: { + // Only need async exports + only_imports: [], + }, + with: { + "wasi:io/poll/pollable": latest::io::poll::Pollable, + "wasi:io/streams/input-stream": latest::io::streams::InputStream, + "wasi:io/streams/output-stream": latest::io::streams::OutputStream, + "wasi:io/streams/error": latest::io::streams::Error, + "wasi:http/types/incoming-response": latest::http::types::IncomingResponse, + "wasi:http/types/incoming-request": latest::http::types::IncomingRequest, + "wasi:http/types/incoming-body": latest::http::types::IncomingBody, + "wasi:http/types/outgoing-response": latest::http::types::OutgoingResponse, + "wasi:http/types/outgoing-request": latest::http::types::OutgoingRequest, + "wasi:http/types/outgoing-body": latest::http::types::OutgoingBody, + "wasi:http/types/fields": latest::http::types::Fields, + "wasi:http/types/response-outparam": latest::http::types::ResponseOutparam, + "wasi:http/types/future-incoming-response": latest::http::types::FutureIncomingResponse, + "wasi:http/types/future-trailers": latest::http::types::FutureTrailers, + }, + trappable_imports: true, + }); +} + +mod wasi { + pub use super::bindings::wasi::{http0_2_0_rc_2023_10_18 as http, io0_2_0_rc_2023_10_18 as io}; +} + +pub mod exports { + pub mod wasi { + pub use super::super::bindings::exports::wasi::http0_2_0_rc_2023_10_18 as http; + } +} + +use wasi::http::types::{ + Error as HttpError, Fields, FutureIncomingResponse, FutureTrailers, Headers, IncomingBody, + IncomingRequest, IncomingResponse, Method, OutgoingBody, OutgoingRequest, OutgoingResponse, + RequestOptions, ResponseOutparam, Scheme, StatusCode, Trailers, +}; +use wasi::io::poll::Pollable; +use wasi::io::streams::{InputStream, OutputStream}; + +use crate::wasi::WasiHttpImplInner; + +pub(crate) fn add_to_linker(linker: &mut Linker, closure: F) -> Result<()> +where + T: Send, + F: Fn(&mut T) -> WasiHttpImpl + Send + Sync + Copy + 'static, +{ + wasi::http::types::add_to_linker_get_host(linker, closure)?; + wasi::http::outgoing_handler::add_to_linker_get_host(linker, closure)?; + Ok(()) +} + +impl wasi::http::types::Host for WasiHttpImpl where T: WasiHttpView + Send {} + +impl wasi::http::types::HostFields for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn new( + &mut self, + entries: Vec<(String, Vec)>, + ) -> wasmtime::Result> { + match latest::http::types::HostFields::from_list(self, entries)? { + Ok(fields) => Ok(fields), + Err(e) => Err(e.into()), + } + } + + fn get( + &mut self, + self_: wasmtime::component::Resource, + name: String, + ) -> wasmtime::Result>> { + latest::http::types::HostFields::get(self, self_, name) + } + + fn set( + &mut self, + self_: wasmtime::component::Resource, + name: String, + value: Vec>, + ) -> wasmtime::Result<()> { + latest::http::types::HostFields::set(self, self_, name, value)??; + Ok(()) + } + + fn delete( + &mut self, + self_: wasmtime::component::Resource, + name: String, + ) -> wasmtime::Result<()> { + latest::http::types::HostFields::delete(self, self_, name)??; + Ok(()) + } + + fn append( + &mut self, + self_: wasmtime::component::Resource, + name: String, + value: Vec, + ) -> wasmtime::Result<()> { + latest::http::types::HostFields::append(self, self_, name, value)??; + Ok(()) + } + + fn entries( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result)>> { + latest::http::types::HostFields::entries(self, self_) + } + + fn clone( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result> { + latest::http::types::HostFields::clone(self, self_) + } + + fn drop(&mut self, rep: wasmtime::component::Resource) -> wasmtime::Result<()> { + latest::http::types::HostFields::drop(self, rep) + } +} + +impl wasi::http::types::HostIncomingRequest for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn method( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result { + latest::http::types::HostIncomingRequest::method(self, self_).map(|e| e.into()) + } + + fn path_with_query( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result> { + latest::http::types::HostIncomingRequest::path_with_query(self, self_) + } + + fn scheme( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result> { + latest::http::types::HostIncomingRequest::scheme(self, self_).map(|e| e.map(|e| e.into())) + } + + fn authority( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result> { + latest::http::types::HostIncomingRequest::authority(self, self_) + } + + fn headers( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result> { + latest::http::types::HostIncomingRequest::headers(self, self_) + } + + fn consume( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result, ()>> { + latest::http::types::HostIncomingRequest::consume(self, self_) + } + + fn drop( + &mut self, + rep: wasmtime::component::Resource, + ) -> wasmtime::Result<()> { + latest::http::types::HostIncomingRequest::drop(self, rep) + } +} + +impl wasi::http::types::HostIncomingResponse for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn status( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result { + latest::http::types::HostIncomingResponse::status(self, self_) + } + + fn headers( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result> { + latest::http::types::HostIncomingResponse::headers(self, self_) + } + + fn consume( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result, ()>> { + latest::http::types::HostIncomingResponse::consume(self, self_) + } + + fn drop( + &mut self, + rep: wasmtime::component::Resource, + ) -> wasmtime::Result<()> { + latest::http::types::HostIncomingResponse::drop(self, rep) + } +} + +impl wasi::http::types::HostIncomingBody for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn stream( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result, ()>> { + latest::http::types::HostIncomingBody::stream(self, self_) + } + + fn finish( + &mut self, + this: wasmtime::component::Resource, + ) -> wasmtime::Result> { + latest::http::types::HostIncomingBody::finish(self, this) + } + + fn drop(&mut self, rep: wasmtime::component::Resource) -> wasmtime::Result<()> { + latest::http::types::HostIncomingBody::drop(self, rep) + } +} + +impl wasi::http::types::HostOutgoingRequest for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn new( + &mut self, + method: Method, + path_with_query: Option, + scheme: Option, + authority: Option, + headers: wasmtime::component::Resource, + ) -> wasmtime::Result> { + let headers = latest::http::types::HostFields::clone(self, headers)?; + let request = latest::http::types::HostOutgoingRequest::new(self, headers)?; + let borrow = || Resource::new_borrow(request.rep()); + + if let Err(()) = + latest::http::types::HostOutgoingRequest::set_method(self, borrow(), method.into())? + { + latest::http::types::HostOutgoingRequest::drop(self, request)?; + anyhow::bail!("invalid method supplied"); + } + + if let Err(()) = latest::http::types::HostOutgoingRequest::set_path_with_query( + self, + borrow(), + path_with_query, + )? { + latest::http::types::HostOutgoingRequest::drop(self, request)?; + anyhow::bail!("invalid path-with-query supplied"); + } + + // Historical WASI would fill in an empty authority with a port which + // got just enough working to get things through. Current WASI requires + // the authority, though, so perform the translation manually here. + let authority = authority.unwrap_or_else(|| match &scheme { + Some(Scheme::Http) | Some(Scheme::Other(_)) => ":80".to_string(), + Some(Scheme::Https) | None => ":443".to_string(), + }); + if let Err(()) = latest::http::types::HostOutgoingRequest::set_scheme( + self, + borrow(), + scheme.map(|s| s.into()), + )? { + latest::http::types::HostOutgoingRequest::drop(self, request)?; + anyhow::bail!("invalid scheme supplied"); + } + + if let Err(()) = latest::http::types::HostOutgoingRequest::set_authority( + self, + borrow(), + Some(authority), + )? { + latest::http::types::HostOutgoingRequest::drop(self, request)?; + anyhow::bail!("invalid authority supplied"); + } + + Ok(request) + } + + fn write( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result, ()>> { + latest::http::types::HostOutgoingRequest::body(self, self_) + } + + fn drop( + &mut self, + rep: wasmtime::component::Resource, + ) -> wasmtime::Result<()> { + latest::http::types::HostOutgoingRequest::drop(self, rep) + } +} + +impl wasi::http::types::HostOutgoingResponse for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn new( + &mut self, + status_code: StatusCode, + headers: wasmtime::component::Resource, + ) -> wasmtime::Result> { + let headers = latest::http::types::HostFields::clone(self, headers)?; + let response = latest::http::types::HostOutgoingResponse::new(self, headers)?; + let borrow = || Resource::new_borrow(response.rep()); + + if let Err(()) = + latest::http::types::HostOutgoingResponse::set_status_code(self, borrow(), status_code)? + { + latest::http::types::HostOutgoingResponse::drop(self, response)?; + anyhow::bail!("invalid status code supplied"); + } + + Ok(response) + } + + fn write( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result, ()>> { + latest::http::types::HostOutgoingResponse::body(self, self_) + } + + fn drop( + &mut self, + rep: wasmtime::component::Resource, + ) -> wasmtime::Result<()> { + latest::http::types::HostOutgoingResponse::drop(self, rep) + } +} + +impl wasi::http::types::HostOutgoingBody for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn write( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result, ()>> { + latest::http::types::HostOutgoingBody::write(self, self_) + } + + fn finish( + &mut self, + this: wasmtime::component::Resource, + trailers: Option>, + ) -> wasmtime::Result<()> { + latest::http::types::HostOutgoingBody::finish(self, this, trailers)?; + Ok(()) + } + + fn drop(&mut self, rep: wasmtime::component::Resource) -> wasmtime::Result<()> { + latest::http::types::HostOutgoingBody::drop(self, rep) + } +} + +impl wasi::http::types::HostResponseOutparam for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn set( + &mut self, + param: wasmtime::component::Resource, + response: Result, HttpError>, + ) -> wasmtime::Result<()> { + let response = response.map_err(|err| { + // TODO: probably need to figure out a better mapping between + // errors, but that seems like it would require string matching, + // which also seems not great. + let msg = match err { + HttpError::InvalidUrl(s) => format!("invalid url: {s}"), + HttpError::TimeoutError(s) => format!("timeout: {s}"), + HttpError::ProtocolError(s) => format!("protocol error: {s}"), + HttpError::UnexpectedError(s) => format!("unexpected error: {s}"), + }; + latest::http::types::ErrorCode::InternalError(Some(msg)) + }); + latest::http::types::HostResponseOutparam::set(self, param, response) + } + + fn drop( + &mut self, + rep: wasmtime::component::Resource, + ) -> wasmtime::Result<()> { + latest::http::types::HostResponseOutparam::drop(self, rep) + } +} + +impl wasi::http::types::HostFutureTrailers for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn subscribe( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result> { + latest::http::types::HostFutureTrailers::subscribe(self, self_) + } + + fn get( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result, HttpError>>> { + match latest::http::types::HostFutureTrailers::get(self, self_)? { + Some(Ok(Ok(Some(trailers)))) => Ok(Some(Ok(trailers))), + // Return an empty trailers if no trailers popped out since this + // version of WASI couldn't represent the lack of trailers. + Some(Ok(Ok(None))) => Ok(Some(Ok(latest::http::types::HostFields::new(self)?))), + Some(Ok(Err(e))) => Ok(Some(Err(e.into()))), + Some(Err(())) => Err(anyhow::anyhow!("trailers have already been retrieved")), + None => Ok(None), + } + } + + fn drop(&mut self, rep: wasmtime::component::Resource) -> wasmtime::Result<()> { + latest::http::types::HostFutureTrailers::drop(self, rep) + } +} + +impl wasi::http::types::HostFutureIncomingResponse for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn get( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result< + Option, HttpError>, ()>>, + > { + match latest::http::types::HostFutureIncomingResponse::get(self, self_)? { + None => Ok(None), + Some(Ok(Ok(response))) => Ok(Some(Ok(Ok(response)))), + Some(Ok(Err(e))) => Ok(Some(Ok(Err(e.into())))), + Some(Err(())) => Ok(Some(Err(()))), + } + } + + fn subscribe( + &mut self, + self_: wasmtime::component::Resource, + ) -> wasmtime::Result> { + latest::http::types::HostFutureIncomingResponse::subscribe(self, self_) + } + + fn drop( + &mut self, + rep: wasmtime::component::Resource, + ) -> wasmtime::Result<()> { + latest::http::types::HostFutureIncomingResponse::drop(self, rep) + } +} + +impl wasi::http::outgoing_handler::Host for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn handle( + &mut self, + request: wasmtime::component::Resource, + options: Option, + ) -> wasmtime::Result, HttpError>> + { + let options = match options { + Some(RequestOptions { + connect_timeout_ms, + first_byte_timeout_ms, + between_bytes_timeout_ms, + }) => { + let options = latest::http::types::HostRequestOptions::new(self)?; + let borrow = || Resource::new_borrow(request.rep()); + + if let Some(ms) = connect_timeout_ms { + if let Err(()) = latest::http::types::HostRequestOptions::set_connect_timeout( + self, + borrow(), + Some(ms.into()), + )? { + latest::http::types::HostRequestOptions::drop(self, options)?; + anyhow::bail!("invalid connect timeout supplied"); + } + } + + if let Some(ms) = first_byte_timeout_ms { + if let Err(()) = + latest::http::types::HostRequestOptions::set_first_byte_timeout( + self, + borrow(), + Some(ms.into()), + )? + { + latest::http::types::HostRequestOptions::drop(self, options)?; + anyhow::bail!("invalid first byte timeout supplied"); + } + } + + if let Some(ms) = between_bytes_timeout_ms { + if let Err(()) = + latest::http::types::HostRequestOptions::set_between_bytes_timeout( + self, + borrow(), + Some(ms.into()), + )? + { + latest::http::types::HostRequestOptions::drop(self, options)?; + anyhow::bail!("invalid between bytes timeout supplied"); + } + } + + Some(options) + } + None => None, + }; + match latest::http::outgoing_handler::Host::handle(self, request, options) { + Ok(resp) => Ok(Ok(resp)), + Err(e) => Ok(Err(e.downcast()?.into())), + } + } +} + +macro_rules! convert { + () => {}; + ($kind:ident $from:path [<=>] $to:path { $($body:tt)* } $($rest:tt)*) => { + convert!($kind $from => $to { $($body)* }); + convert!($kind $to => $from { $($body)* }); + + convert!($($rest)*); + }; + (struct $from:ty => $to:path { $($field:ident,)* } $($rest:tt)*) => { + impl From<$from> for $to { + fn from(e: $from) -> $to { + $to { + $( $field: e.$field.into(), )* + } + } + } + + convert!($($rest)*); + }; + (enum $from:path => $to:path { $($variant:ident $(($e:ident))?,)* } $($rest:tt)*) => { + impl From<$from> for $to { + fn from(e: $from) -> $to { + use $from as A; + use $to as B; + match e { + $( + A::$variant $(($e))? => B::$variant $(($e.into()))?, + )* + } + } + } + + convert!($($rest)*); + }; + (flags $from:path => $to:path { $($flag:ident,)* } $($rest:tt)*) => { + impl From<$from> for $to { + fn from(e: $from) -> $to { + use $from as A; + use $to as B; + let mut out = B::empty(); + $( + if e.contains(A::$flag) { + out |= B::$flag; + } + )* + out + } + } + + convert!($($rest)*); + }; +} + +pub(crate) use convert; + +convert! { + enum latest::http::types::Method [<=>] Method { + Get, + Head, + Post, + Put, + Delete, + Connect, + Options, + Trace, + Patch, + Other(e), + } + + enum latest::http::types::Scheme [<=>] Scheme { + Http, + Https, + Other(e), + } +} + +impl From for HttpError { + fn from(e: latest::http::types::ErrorCode) -> HttpError { + // TODO: should probably categorize this better given the typed info + // we have in `e`. + HttpError::UnexpectedError(e.to_string()) + } +} diff --git a/crates/factor-outbound-http/src/wasi_2023_11_10.rs b/crates/factor-outbound-http/src/wasi_2023_11_10.rs new file mode 100644 index 0000000000..0a878cfcfb --- /dev/null +++ b/crates/factor-outbound-http/src/wasi_2023_11_10.rs @@ -0,0 +1,774 @@ +#![doc(hidden)] // internal implementation detail used in tests and spin-trigger + +use super::wasi_2023_10_18::convert; +use anyhow::Result; +use wasmtime::component::{Linker, Resource}; +use wasmtime_wasi_http::{WasiHttpImpl, WasiHttpView}; + +mod latest { + pub use wasmtime_wasi_http::bindings::wasi::*; + pub mod http { + pub use wasmtime_wasi_http::bindings::http::*; + } +} + +mod bindings { + use super::latest; + + wasmtime::component::bindgen!({ + path: "../../wit", + interfaces: r#" + include wasi:http/proxy@0.2.0-rc-2023-11-10; + "#, + async: { + // Only need async exports + only_imports: [], + }, + with: { + "wasi:io/poll/pollable": latest::io::poll::Pollable, + "wasi:io/streams/input-stream": latest::io::streams::InputStream, + "wasi:io/streams/output-stream": latest::io::streams::OutputStream, + "wasi:io/error/error": latest::io::error::Error, + "wasi:http/types/incoming-response": latest::http::types::IncomingResponse, + "wasi:http/types/incoming-request": latest::http::types::IncomingRequest, + "wasi:http/types/incoming-body": latest::http::types::IncomingBody, + "wasi:http/types/outgoing-response": latest::http::types::OutgoingResponse, + "wasi:http/types/outgoing-request": latest::http::types::OutgoingRequest, + "wasi:http/types/outgoing-body": latest::http::types::OutgoingBody, + "wasi:http/types/fields": latest::http::types::Fields, + "wasi:http/types/response-outparam": latest::http::types::ResponseOutparam, + "wasi:http/types/future-incoming-response": latest::http::types::FutureIncomingResponse, + "wasi:http/types/future-trailers": latest::http::types::FutureTrailers, + "wasi:http/types/request-options": latest::http::types::RequestOptions, + }, + trappable_imports: true, + }); +} + +mod wasi { + pub use super::bindings::wasi::{http0_2_0_rc_2023_11_10 as http, io0_2_0_rc_2023_11_10 as io}; +} + +pub mod exports { + pub mod wasi { + pub use super::super::bindings::exports::wasi::http0_2_0_rc_2023_11_10 as http; + } +} + +use wasi::http::types::{ + DnsErrorPayload, ErrorCode as HttpErrorCode, FieldSizePayload, Fields, FutureIncomingResponse, + FutureTrailers, HeaderError, Headers, IncomingBody, IncomingRequest, IncomingResponse, Method, + OutgoingBody, OutgoingRequest, OutgoingResponse, RequestOptions, ResponseOutparam, Scheme, + StatusCode, TlsAlertReceivedPayload, Trailers, +}; +use wasi::io::poll::Pollable; +use wasi::io::streams::{Error as IoError, InputStream, OutputStream}; + +use crate::wasi::WasiHttpImplInner; + +pub(crate) fn add_to_linker(linker: &mut Linker, closure: F) -> Result<()> +where + T: Send, + F: Fn(&mut T) -> WasiHttpImpl + Send + Sync + Copy + 'static, +{ + wasi::http::types::add_to_linker_get_host(linker, closure)?; + wasi::http::outgoing_handler::add_to_linker_get_host(linker, closure)?; + Ok(()) +} + +impl wasi::http::types::Host for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn http_error_code( + &mut self, + error: Resource, + ) -> wasmtime::Result> { + latest::http::types::Host::http_error_code(self, error).map(|e| e.map(|e| e.into())) + } +} + +impl wasi::http::types::HostRequestOptions for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn new(&mut self) -> wasmtime::Result> { + latest::http::types::HostRequestOptions::new(self) + } + + fn connect_timeout_ms( + &mut self, + self_: Resource, + ) -> wasmtime::Result> { + latest::http::types::HostRequestOptions::connect_timeout(self, self_) + } + + fn set_connect_timeout_ms( + &mut self, + self_: Resource, + duration: Option, + ) -> wasmtime::Result> { + latest::http::types::HostRequestOptions::set_connect_timeout(self, self_, duration) + } + + fn first_byte_timeout_ms( + &mut self, + self_: Resource, + ) -> wasmtime::Result> { + latest::http::types::HostRequestOptions::first_byte_timeout(self, self_) + } + + fn set_first_byte_timeout_ms( + &mut self, + self_: Resource, + duration: Option, + ) -> wasmtime::Result> { + latest::http::types::HostRequestOptions::set_first_byte_timeout(self, self_, duration) + } + + fn between_bytes_timeout_ms( + &mut self, + self_: Resource, + ) -> wasmtime::Result> { + latest::http::types::HostRequestOptions::between_bytes_timeout(self, self_) + } + + fn set_between_bytes_timeout_ms( + &mut self, + self_: Resource, + duration: Option, + ) -> wasmtime::Result> { + latest::http::types::HostRequestOptions::set_between_bytes_timeout(self, self_, duration) + } + + fn drop(&mut self, self_: Resource) -> wasmtime::Result<()> { + latest::http::types::HostRequestOptions::drop(self, self_) + } +} + +impl wasi::http::types::HostFields for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn new(&mut self) -> wasmtime::Result> { + latest::http::types::HostFields::new(self) + } + + fn from_list( + &mut self, + entries: Vec<(String, Vec)>, + ) -> wasmtime::Result, HeaderError>> { + latest::http::types::HostFields::from_list(self, entries).map(|r| r.map_err(|e| e.into())) + } + + fn get(&mut self, self_: Resource, name: String) -> wasmtime::Result>> { + latest::http::types::HostFields::get(self, self_, name) + } + + fn set( + &mut self, + self_: Resource, + name: String, + value: Vec>, + ) -> wasmtime::Result> { + latest::http::types::HostFields::set(self, self_, name, value) + .map(|r| r.map_err(|e| e.into())) + } + + fn delete( + &mut self, + self_: Resource, + name: String, + ) -> wasmtime::Result> { + latest::http::types::HostFields::delete(self, self_, name).map(|r| r.map_err(|e| e.into())) + } + + fn append( + &mut self, + self_: Resource, + name: String, + value: Vec, + ) -> wasmtime::Result> { + latest::http::types::HostFields::append(self, self_, name, value) + .map(|r| r.map_err(|e| e.into())) + } + + fn entries(&mut self, self_: Resource) -> wasmtime::Result)>> { + latest::http::types::HostFields::entries(self, self_) + } + + fn clone(&mut self, self_: Resource) -> wasmtime::Result> { + latest::http::types::HostFields::clone(self, self_) + } + + fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { + latest::http::types::HostFields::drop(self, rep) + } +} + +impl wasi::http::types::HostIncomingRequest for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn method(&mut self, self_: Resource) -> wasmtime::Result { + latest::http::types::HostIncomingRequest::method(self, self_).map(|e| e.into()) + } + + fn path_with_query( + &mut self, + self_: Resource, + ) -> wasmtime::Result> { + latest::http::types::HostIncomingRequest::path_with_query(self, self_) + } + + fn scheme(&mut self, self_: Resource) -> wasmtime::Result> { + latest::http::types::HostIncomingRequest::scheme(self, self_).map(|e| e.map(|e| e.into())) + } + + fn authority(&mut self, self_: Resource) -> wasmtime::Result> { + latest::http::types::HostIncomingRequest::authority(self, self_) + } + + fn headers(&mut self, self_: Resource) -> wasmtime::Result> { + latest::http::types::HostIncomingRequest::headers(self, self_) + } + + fn consume( + &mut self, + self_: Resource, + ) -> wasmtime::Result, ()>> { + latest::http::types::HostIncomingRequest::consume(self, self_) + } + + fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { + latest::http::types::HostIncomingRequest::drop(self, rep) + } +} + +impl wasi::http::types::HostIncomingResponse for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn status(&mut self, self_: Resource) -> wasmtime::Result { + latest::http::types::HostIncomingResponse::status(self, self_) + } + + fn headers( + &mut self, + self_: Resource, + ) -> wasmtime::Result> { + latest::http::types::HostIncomingResponse::headers(self, self_) + } + + fn consume( + &mut self, + self_: Resource, + ) -> wasmtime::Result, ()>> { + latest::http::types::HostIncomingResponse::consume(self, self_) + } + + fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { + latest::http::types::HostIncomingResponse::drop(self, rep) + } +} + +impl wasi::http::types::HostIncomingBody for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn stream( + &mut self, + self_: Resource, + ) -> wasmtime::Result, ()>> { + latest::http::types::HostIncomingBody::stream(self, self_) + } + + fn finish( + &mut self, + this: Resource, + ) -> wasmtime::Result> { + latest::http::types::HostIncomingBody::finish(self, this) + } + + fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { + latest::http::types::HostIncomingBody::drop(self, rep) + } +} + +impl wasi::http::types::HostOutgoingRequest for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn new(&mut self, headers: Resource) -> wasmtime::Result> { + latest::http::types::HostOutgoingRequest::new(self, headers) + } + + fn method(&mut self, self_: Resource) -> wasmtime::Result { + latest::http::types::HostOutgoingRequest::method(self, self_).map(|m| m.into()) + } + + fn set_method( + &mut self, + self_: Resource, + method: Method, + ) -> wasmtime::Result> { + latest::http::types::HostOutgoingRequest::set_method(self, self_, method.into()) + } + + fn path_with_query( + &mut self, + self_: Resource, + ) -> wasmtime::Result> { + latest::http::types::HostOutgoingRequest::path_with_query(self, self_) + } + + fn set_path_with_query( + &mut self, + self_: Resource, + path_with_query: Option, + ) -> wasmtime::Result> { + latest::http::types::HostOutgoingRequest::set_path_with_query(self, self_, path_with_query) + } + + fn scheme(&mut self, self_: Resource) -> wasmtime::Result> { + latest::http::types::HostOutgoingRequest::scheme(self, self_).map(|s| s.map(|s| s.into())) + } + + fn set_scheme( + &mut self, + self_: Resource, + scheme: Option, + ) -> wasmtime::Result> { + latest::http::types::HostOutgoingRequest::set_scheme(self, self_, scheme.map(|s| s.into())) + } + + fn authority(&mut self, self_: Resource) -> wasmtime::Result> { + latest::http::types::HostOutgoingRequest::authority(self, self_) + } + + fn set_authority( + &mut self, + self_: Resource, + authority: Option, + ) -> wasmtime::Result> { + latest::http::types::HostOutgoingRequest::set_authority(self, self_, authority) + } + + fn headers(&mut self, self_: Resource) -> wasmtime::Result> { + latest::http::types::HostOutgoingRequest::headers(self, self_) + } + + fn body( + &mut self, + self_: Resource, + ) -> wasmtime::Result, ()>> { + latest::http::types::HostOutgoingRequest::body(self, self_) + } + + fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { + latest::http::types::HostOutgoingRequest::drop(self, rep) + } +} + +impl wasi::http::types::HostOutgoingResponse for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn new(&mut self, headers: Resource) -> wasmtime::Result> { + let headers = latest::http::types::HostFields::clone(self, headers)?; + latest::http::types::HostOutgoingResponse::new(self, headers) + } + + fn status_code(&mut self, self_: Resource) -> wasmtime::Result { + latest::http::types::HostOutgoingResponse::status_code(self, self_) + } + + fn set_status_code( + &mut self, + self_: Resource, + status_code: StatusCode, + ) -> wasmtime::Result> { + latest::http::types::HostOutgoingResponse::set_status_code(self, self_, status_code) + } + + fn headers( + &mut self, + self_: Resource, + ) -> wasmtime::Result> { + latest::http::types::HostOutgoingResponse::headers(self, self_) + } + + fn body( + &mut self, + self_: Resource, + ) -> wasmtime::Result, ()>> { + latest::http::types::HostOutgoingResponse::body(self, self_) + } + + fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { + latest::http::types::HostOutgoingResponse::drop(self, rep) + } +} + +impl wasi::http::types::HostOutgoingBody for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn write( + &mut self, + self_: Resource, + ) -> wasmtime::Result, ()>> { + latest::http::types::HostOutgoingBody::write(self, self_) + } + + fn finish( + &mut self, + this: Resource, + trailers: Option>, + ) -> wasmtime::Result> { + match latest::http::types::HostOutgoingBody::finish(self, this, trailers) { + Ok(()) => Ok(Ok(())), + Err(e) => Ok(Err(e.downcast()?.into())), + } + } + + fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { + latest::http::types::HostOutgoingBody::drop(self, rep) + } +} + +impl wasi::http::types::HostResponseOutparam for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn set( + &mut self, + param: Resource, + response: Result, HttpErrorCode>, + ) -> wasmtime::Result<()> { + latest::http::types::HostResponseOutparam::set(self, param, response.map_err(|e| e.into())) + } + + fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { + latest::http::types::HostResponseOutparam::drop(self, rep) + } +} + +impl wasi::http::types::HostFutureTrailers for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn subscribe( + &mut self, + self_: Resource, + ) -> wasmtime::Result> { + latest::http::types::HostFutureTrailers::subscribe(self, self_) + } + + fn get( + &mut self, + self_: Resource, + ) -> wasmtime::Result>, HttpErrorCode>>> { + match latest::http::types::HostFutureTrailers::get(self, self_)? { + Some(Ok(Ok(trailers))) => Ok(Some(Ok(trailers))), + Some(Ok(Err(e))) => Ok(Some(Err(e.into()))), + Some(Err(())) => Err(anyhow::anyhow!("trailers have already been retrieved")), + None => Ok(None), + } + } + + fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { + latest::http::types::HostFutureTrailers::drop(self, rep) + } +} + +impl wasi::http::types::HostFutureIncomingResponse for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn get( + &mut self, + self_: Resource, + ) -> wasmtime::Result, HttpErrorCode>, ()>>> + { + match latest::http::types::HostFutureIncomingResponse::get(self, self_)? { + None => Ok(None), + Some(Ok(Ok(response))) => Ok(Some(Ok(Ok(response)))), + Some(Ok(Err(e))) => Ok(Some(Ok(Err(e.into())))), + Some(Err(())) => Ok(Some(Err(()))), + } + } + + fn subscribe( + &mut self, + self_: Resource, + ) -> wasmtime::Result> { + latest::http::types::HostFutureIncomingResponse::subscribe(self, self_) + } + + fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { + latest::http::types::HostFutureIncomingResponse::drop(self, rep) + } +} + +impl wasi::http::outgoing_handler::Host for WasiHttpImpl +where + T: WasiHttpView + Send, +{ + fn handle( + &mut self, + request: Resource, + options: Option>, + ) -> wasmtime::Result, HttpErrorCode>> { + match latest::http::outgoing_handler::Host::handle(self, request, options) { + Ok(resp) => Ok(Ok(resp)), + Err(e) => Ok(Err(e.downcast()?.into())), + } + } +} + +convert! { + enum latest::http::types::Method [<=>] Method { + Get, + Head, + Post, + Put, + Delete, + Connect, + Options, + Trace, + Patch, + Other(e), + } + + enum latest::http::types::Scheme [<=>] Scheme { + Http, + Https, + Other(e), + } + + enum latest::http::types::HeaderError => HeaderError { + InvalidSyntax, + Forbidden, + Immutable, + } + + struct latest::http::types::DnsErrorPayload [<=>] DnsErrorPayload { + rcode, + info_code, + } + + struct latest::http::types::TlsAlertReceivedPayload [<=>] TlsAlertReceivedPayload { + alert_id, + alert_message, + } + + struct latest::http::types::FieldSizePayload [<=>] FieldSizePayload { + field_name, + field_size, + } +} + +impl From for HttpErrorCode { + fn from(e: latest::http::types::ErrorCode) -> Self { + match e { + latest::http::types::ErrorCode::DnsTimeout => HttpErrorCode::DnsTimeout, + latest::http::types::ErrorCode::DnsError(e) => HttpErrorCode::DnsError(e.into()), + latest::http::types::ErrorCode::DestinationNotFound => { + HttpErrorCode::DestinationNotFound + } + latest::http::types::ErrorCode::DestinationUnavailable => { + HttpErrorCode::DestinationUnavailable + } + latest::http::types::ErrorCode::DestinationIpProhibited => { + HttpErrorCode::DestinationIpProhibited + } + latest::http::types::ErrorCode::DestinationIpUnroutable => { + HttpErrorCode::DestinationIpUnroutable + } + latest::http::types::ErrorCode::ConnectionRefused => HttpErrorCode::ConnectionRefused, + latest::http::types::ErrorCode::ConnectionTerminated => { + HttpErrorCode::ConnectionTerminated + } + latest::http::types::ErrorCode::ConnectionTimeout => HttpErrorCode::ConnectionTimeout, + latest::http::types::ErrorCode::ConnectionReadTimeout => { + HttpErrorCode::ConnectionReadTimeout + } + latest::http::types::ErrorCode::ConnectionWriteTimeout => { + HttpErrorCode::ConnectionWriteTimeout + } + latest::http::types::ErrorCode::ConnectionLimitReached => { + HttpErrorCode::ConnectionLimitReached + } + latest::http::types::ErrorCode::TlsProtocolError => HttpErrorCode::TlsProtocolError, + latest::http::types::ErrorCode::TlsCertificateError => { + HttpErrorCode::TlsCertificateError + } + latest::http::types::ErrorCode::TlsAlertReceived(e) => { + HttpErrorCode::TlsAlertReceived(e.into()) + } + latest::http::types::ErrorCode::HttpRequestDenied => HttpErrorCode::HttpRequestDenied, + latest::http::types::ErrorCode::HttpRequestLengthRequired => { + HttpErrorCode::HttpRequestLengthRequired + } + latest::http::types::ErrorCode::HttpRequestBodySize(e) => { + HttpErrorCode::HttpRequestBodySize(e) + } + latest::http::types::ErrorCode::HttpRequestMethodInvalid => { + HttpErrorCode::HttpRequestMethodInvalid + } + latest::http::types::ErrorCode::HttpRequestUriInvalid => { + HttpErrorCode::HttpRequestUriInvalid + } + latest::http::types::ErrorCode::HttpRequestUriTooLong => { + HttpErrorCode::HttpRequestUriTooLong + } + latest::http::types::ErrorCode::HttpRequestHeaderSectionSize(e) => { + HttpErrorCode::HttpRequestHeaderSectionSize(e) + } + latest::http::types::ErrorCode::HttpRequestHeaderSize(e) => { + HttpErrorCode::HttpRequestHeaderSize(e.map(|e| e.into())) + } + latest::http::types::ErrorCode::HttpRequestTrailerSectionSize(e) => { + HttpErrorCode::HttpRequestTrailerSectionSize(e) + } + latest::http::types::ErrorCode::HttpRequestTrailerSize(e) => { + HttpErrorCode::HttpRequestTrailerSize(e.into()) + } + latest::http::types::ErrorCode::HttpResponseIncomplete => { + HttpErrorCode::HttpResponseIncomplete + } + latest::http::types::ErrorCode::HttpResponseHeaderSectionSize(e) => { + HttpErrorCode::HttpResponseHeaderSectionSize(e) + } + latest::http::types::ErrorCode::HttpResponseHeaderSize(e) => { + HttpErrorCode::HttpResponseHeaderSize(e.into()) + } + latest::http::types::ErrorCode::HttpResponseBodySize(e) => { + HttpErrorCode::HttpResponseBodySize(e) + } + latest::http::types::ErrorCode::HttpResponseTrailerSectionSize(e) => { + HttpErrorCode::HttpResponseTrailerSectionSize(e) + } + latest::http::types::ErrorCode::HttpResponseTrailerSize(e) => { + HttpErrorCode::HttpResponseTrailerSize(e.into()) + } + latest::http::types::ErrorCode::HttpResponseTransferCoding(e) => { + HttpErrorCode::HttpResponseTransferCoding(e) + } + latest::http::types::ErrorCode::HttpResponseContentCoding(e) => { + HttpErrorCode::HttpResponseContentCoding(e) + } + latest::http::types::ErrorCode::HttpResponseTimeout => { + HttpErrorCode::HttpResponseTimeout + } + latest::http::types::ErrorCode::HttpUpgradeFailed => HttpErrorCode::HttpUpgradeFailed, + latest::http::types::ErrorCode::HttpProtocolError => HttpErrorCode::HttpProtocolError, + latest::http::types::ErrorCode::LoopDetected => HttpErrorCode::LoopDetected, + latest::http::types::ErrorCode::ConfigurationError => HttpErrorCode::ConfigurationError, + latest::http::types::ErrorCode::InternalError(e) => HttpErrorCode::InternalError(e), + } + } +} + +impl From for latest::http::types::ErrorCode { + fn from(e: HttpErrorCode) -> Self { + match e { + HttpErrorCode::DnsTimeout => latest::http::types::ErrorCode::DnsTimeout, + HttpErrorCode::DnsError(e) => latest::http::types::ErrorCode::DnsError(e.into()), + HttpErrorCode::DestinationNotFound => { + latest::http::types::ErrorCode::DestinationNotFound + } + HttpErrorCode::DestinationUnavailable => { + latest::http::types::ErrorCode::DestinationUnavailable + } + HttpErrorCode::DestinationIpProhibited => { + latest::http::types::ErrorCode::DestinationIpProhibited + } + HttpErrorCode::DestinationIpUnroutable => { + latest::http::types::ErrorCode::DestinationIpUnroutable + } + HttpErrorCode::ConnectionRefused => latest::http::types::ErrorCode::ConnectionRefused, + HttpErrorCode::ConnectionTerminated => { + latest::http::types::ErrorCode::ConnectionTerminated + } + HttpErrorCode::ConnectionTimeout => latest::http::types::ErrorCode::ConnectionTimeout, + HttpErrorCode::ConnectionReadTimeout => { + latest::http::types::ErrorCode::ConnectionReadTimeout + } + HttpErrorCode::ConnectionWriteTimeout => { + latest::http::types::ErrorCode::ConnectionWriteTimeout + } + HttpErrorCode::ConnectionLimitReached => { + latest::http::types::ErrorCode::ConnectionLimitReached + } + HttpErrorCode::TlsProtocolError => latest::http::types::ErrorCode::TlsProtocolError, + HttpErrorCode::TlsCertificateError => { + latest::http::types::ErrorCode::TlsCertificateError + } + HttpErrorCode::TlsAlertReceived(e) => { + latest::http::types::ErrorCode::TlsAlertReceived(e.into()) + } + HttpErrorCode::HttpRequestDenied => latest::http::types::ErrorCode::HttpRequestDenied, + HttpErrorCode::HttpRequestLengthRequired => { + latest::http::types::ErrorCode::HttpRequestLengthRequired + } + HttpErrorCode::HttpRequestBodySize(e) => { + latest::http::types::ErrorCode::HttpRequestBodySize(e) + } + HttpErrorCode::HttpRequestMethodInvalid => { + latest::http::types::ErrorCode::HttpRequestMethodInvalid + } + HttpErrorCode::HttpRequestUriInvalid => { + latest::http::types::ErrorCode::HttpRequestUriInvalid + } + HttpErrorCode::HttpRequestUriTooLong => { + latest::http::types::ErrorCode::HttpRequestUriTooLong + } + HttpErrorCode::HttpRequestHeaderSectionSize(e) => { + latest::http::types::ErrorCode::HttpRequestHeaderSectionSize(e) + } + HttpErrorCode::HttpRequestHeaderSize(e) => { + latest::http::types::ErrorCode::HttpRequestHeaderSize(e.map(|e| e.into())) + } + HttpErrorCode::HttpRequestTrailerSectionSize(e) => { + latest::http::types::ErrorCode::HttpRequestTrailerSectionSize(e) + } + HttpErrorCode::HttpRequestTrailerSize(e) => { + latest::http::types::ErrorCode::HttpRequestTrailerSize(e.into()) + } + HttpErrorCode::HttpResponseIncomplete => { + latest::http::types::ErrorCode::HttpResponseIncomplete + } + HttpErrorCode::HttpResponseHeaderSectionSize(e) => { + latest::http::types::ErrorCode::HttpResponseHeaderSectionSize(e) + } + HttpErrorCode::HttpResponseHeaderSize(e) => { + latest::http::types::ErrorCode::HttpResponseHeaderSize(e.into()) + } + HttpErrorCode::HttpResponseBodySize(e) => { + latest::http::types::ErrorCode::HttpResponseBodySize(e) + } + HttpErrorCode::HttpResponseTrailerSectionSize(e) => { + latest::http::types::ErrorCode::HttpResponseTrailerSectionSize(e) + } + HttpErrorCode::HttpResponseTrailerSize(e) => { + latest::http::types::ErrorCode::HttpResponseTrailerSize(e.into()) + } + HttpErrorCode::HttpResponseTransferCoding(e) => { + latest::http::types::ErrorCode::HttpResponseTransferCoding(e) + } + HttpErrorCode::HttpResponseContentCoding(e) => { + latest::http::types::ErrorCode::HttpResponseContentCoding(e) + } + HttpErrorCode::HttpResponseTimeout => { + latest::http::types::ErrorCode::HttpResponseTimeout + } + HttpErrorCode::HttpUpgradeFailed => latest::http::types::ErrorCode::HttpUpgradeFailed, + HttpErrorCode::HttpProtocolError => latest::http::types::ErrorCode::HttpProtocolError, + HttpErrorCode::LoopDetected => latest::http::types::ErrorCode::LoopDetected, + HttpErrorCode::ConfigurationError => latest::http::types::ErrorCode::ConfigurationError, + HttpErrorCode::InternalError(e) => latest::http::types::ErrorCode::InternalError(e), + } + } +} diff --git a/crates/factor-outbound-http/tests/factor_test.rs b/crates/factor-outbound-http/tests/factor_test.rs new file mode 100644 index 0000000000..42bfe7c5ac --- /dev/null +++ b/crates/factor-outbound-http/tests/factor_test.rs @@ -0,0 +1,97 @@ +use std::time::Duration; + +use anyhow::bail; +use http::{Request, Uri}; +use spin_factor_outbound_http::{OutboundHttpFactor, SelfRequestOrigin}; +use spin_factor_outbound_networking::OutboundNetworkingFactor; +use spin_factor_variables::VariablesFactor; +use spin_factors::{anyhow, RuntimeFactors}; +use spin_factors_test::{toml, TestEnvironment}; +use wasmtime_wasi::Subscribe; +use wasmtime_wasi_http::{ + bindings::http::types::ErrorCode, types::OutgoingRequestConfig, WasiHttpView, +}; + +#[derive(RuntimeFactors)] +struct TestFactors { + variables: VariablesFactor, + networking: OutboundNetworkingFactor, + http: OutboundHttpFactor, +} + +#[tokio::test] +async fn allowed_host_is_allowed() -> anyhow::Result<()> { + let mut state = test_instance_state("https://*").await?; + let mut wasi_http = OutboundHttpFactor::get_wasi_http_impl(&mut state).unwrap(); + + // [100::] is an IPv6 "black hole", which should always fail + let req = Request::get("https://[100::1]:443").body(Default::default())?; + let mut future_resp = wasi_http.send_request(req, test_request_config())?; + future_resp.ready().await; + + // We don't want to make an actual network request, so treat "connection refused" as success + match future_resp.unwrap_ready().unwrap() { + Ok(_) => bail!("expected Err, got Ok"), + Err(err) => assert!(matches!(err, ErrorCode::ConnectionRefused), "{err:?}"), + }; + Ok(()) +} + +#[tokio::test] +async fn self_request_smoke_test() -> anyhow::Result<()> { + let mut state = test_instance_state("http://self").await?; + let origin = SelfRequestOrigin::from_uri(&Uri::from_static("http://[100::1]"))?; + state.http.set_self_request_origin(origin); + + let mut wasi_http = OutboundHttpFactor::get_wasi_http_impl(&mut state).unwrap(); + let req = Request::get("/self-request").body(Default::default())?; + let mut future_resp = wasi_http.send_request(req, test_request_config())?; + future_resp.ready().await; + + // We don't want to make an actual network request, so treat "connection refused" as success + match future_resp.unwrap_ready().unwrap() { + Ok(_) => bail!("expected Err, got Ok"), + Err(err) => assert!(matches!(err, ErrorCode::ConnectionRefused), "{err:?}"), + }; + Ok(()) +} + +#[tokio::test] +async fn disallowed_host_fails() -> anyhow::Result<()> { + let mut state = test_instance_state("https://allowed.test").await?; + let mut wasi_http = OutboundHttpFactor::get_wasi_http_impl(&mut state).unwrap(); + + let req = Request::get("https://denied.test").body(Default::default())?; + let mut future_resp = wasi_http.send_request(req, test_request_config())?; + future_resp.ready().await; + match future_resp.unwrap_ready().unwrap() { + Ok(_) => bail!("expected Err, got Ok"), + Err(err) => assert!(matches!(err, ErrorCode::HttpRequestDenied)), + }; + Ok(()) +} + +async fn test_instance_state( + allowed_outbound_hosts: &str, +) -> anyhow::Result { + let factors = TestFactors { + variables: VariablesFactor::default(), + networking: OutboundNetworkingFactor::new(), + http: OutboundHttpFactor::new(), + }; + let env = TestEnvironment::new(factors).extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + allowed_outbound_hosts = [allowed_outbound_hosts] + }); + env.build_instance_state().await +} + +fn test_request_config() -> OutgoingRequestConfig { + OutgoingRequestConfig { + use_tls: false, + connect_timeout: Duration::from_secs(60), + first_byte_timeout: Duration::from_secs(60), + between_bytes_timeout: Duration::from_secs(60), + } +} diff --git a/crates/factor-outbound-mqtt/Cargo.toml b/crates/factor-outbound-mqtt/Cargo.toml new file mode 100644 index 0000000000..95d7dce534 --- /dev/null +++ b/crates/factor-outbound-mqtt/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "spin-factor-outbound-mqtt" +version = { workspace = true } +authors = { workspace = true } +edition = { workspace = true } + +[dependencies] +anyhow = "1.0" +rumqttc = { version = "0.24", features = ["url"] } +spin-factor-outbound-networking = { path = "../factor-outbound-networking" } +spin-factors = { path = "../factors" } +spin-core = { path = "../core" } +spin-world = { path = "../world" } +table = { path = "../table" } +tokio = { version = "1.0", features = ["sync"] } +tracing = { workspace = true } + +[dev-dependencies] +spin-factor-variables = { path = "../factor-variables" } +spin-factors-test = { path = "../factors-test" } +tokio = { version = "1", features = ["macros", "rt"] } + +[lints] +workspace = true diff --git a/crates/factor-outbound-mqtt/src/host.rs b/crates/factor-outbound-mqtt/src/host.rs new file mode 100644 index 0000000000..460b888c16 --- /dev/null +++ b/crates/factor-outbound-mqtt/src/host.rs @@ -0,0 +1,124 @@ +use std::{sync::Arc, time::Duration}; + +use anyhow::Result; +use spin_core::{async_trait, wasmtime::component::Resource}; +use spin_factor_outbound_networking::OutboundAllowedHosts; +use spin_world::v2::mqtt::{self as v2, Connection, Error, Qos}; +use tracing::{instrument, Level}; + +use crate::ClientCreator; + +pub struct InstanceState { + allowed_hosts: OutboundAllowedHosts, + connections: table::Table>, + create_client: Arc, +} + +impl InstanceState { + pub fn new(allowed_hosts: OutboundAllowedHosts, create_client: Arc) -> Self { + Self { + allowed_hosts, + create_client, + connections: table::Table::new(1024), + } + } +} + +#[async_trait] +pub trait MqttClient: Send + Sync { + async fn publish_bytes(&self, topic: String, qos: Qos, payload: Vec) -> Result<(), Error>; +} + +impl InstanceState { + async fn is_address_allowed(&self, address: &str) -> Result { + self.allowed_hosts.check_url(address, "mqtt").await + } + + async fn establish_connection( + &mut self, + address: String, + username: String, + password: String, + keep_alive_interval: Duration, + ) -> Result, Error> { + self.connections + .push((self.create_client).create(address, username, password, keep_alive_interval)?) + .map(Resource::new_own) + .map_err(|_| Error::TooManyConnections) + } + + async fn get_conn(&self, connection: Resource) -> Result<&dyn MqttClient, Error> { + self.connections + .get(connection.rep()) + .ok_or(Error::Other( + "could not find connection for resource".into(), + )) + .map(|c| c.as_ref()) + } +} + +impl v2::Host for InstanceState { + fn convert_error(&mut self, error: Error) -> Result { + Ok(error) + } +} + +#[async_trait] +impl v2::HostConnection for InstanceState { + #[instrument(name = "spin_outbound_mqtt.open_connection", skip(self, password), err(level = Level::INFO), fields(otel.kind = "client"))] + async fn open( + &mut self, + address: String, + username: String, + password: String, + keep_alive_interval: u64, + ) -> Result, Error> { + if !self + .is_address_allowed(&address) + .await + .map_err(|e| v2::Error::Other(e.to_string()))? + { + return Err(v2::Error::ConnectionFailed(format!( + "address {address} is not permitted" + ))); + } + self.establish_connection( + address, + username, + password, + Duration::from_secs(keep_alive_interval), + ) + .await + } + + /// Publish a message to the MQTT broker. + /// + /// OTEL trace propagation is not directly supported in MQTT V3. You will need to embed the + /// current trace context into the payload yourself. + /// https://w3c.github.io/trace-context-mqtt/#mqtt-v3-recommendation. + #[instrument(name = "spin_outbound_mqtt.publish", skip(self, connection, payload), err(level = Level::INFO), + fields(otel.kind = "producer", otel.name = format!("{} publish", topic), messaging.operation = "publish", + messaging.system = "mqtt"))] + async fn publish( + &mut self, + connection: Resource, + topic: String, + payload: Vec, + qos: Qos, + ) -> Result<(), Error> { + let conn = self.get_conn(connection).await.map_err(other_error)?; + + conn.publish_bytes(topic, qos, payload).await?; + + Ok(()) + } + + fn drop(&mut self, connection: Resource) -> anyhow::Result<()> { + self.connections.remove(connection.rep()); + Ok(()) + } +} + +pub fn other_error(e: impl std::fmt::Display) -> Error { + Error::Other(e.to_string()) +} diff --git a/crates/factor-outbound-mqtt/src/lib.rs b/crates/factor-outbound-mqtt/src/lib.rs new file mode 100644 index 0000000000..21e4242f96 --- /dev/null +++ b/crates/factor-outbound-mqtt/src/lib.rs @@ -0,0 +1,169 @@ +mod host; + +use std::sync::Arc; +use std::time::Duration; + +use host::other_error; +use host::InstanceState; +use rumqttc::{AsyncClient, Event, Incoming, Outgoing, QoS}; +use spin_core::async_trait; +use spin_factor_outbound_networking::OutboundNetworkingFactor; +use spin_factors::{ + ConfigureAppContext, Factor, InstanceBuilders, PrepareContext, RuntimeFactors, + SelfInstanceBuilder, +}; +use spin_world::v2::mqtt::{self as v2, Error, Qos}; +use tokio::sync::Mutex; + +pub use host::MqttClient; + +pub struct OutboundMqttFactor { + create_client: Arc, +} + +impl OutboundMqttFactor { + pub fn new(create_client: Arc) -> Self { + Self { create_client } + } +} + +impl Factor for OutboundMqttFactor { + type RuntimeConfig = (); + type AppState = (); + type InstanceBuilder = InstanceState; + + fn init( + &mut self, + mut ctx: spin_factors::InitContext, + ) -> anyhow::Result<()> { + ctx.link_bindings(spin_world::v2::mqtt::add_to_linker)?; + Ok(()) + } + + fn configure_app( + &self, + _ctx: ConfigureAppContext, + ) -> anyhow::Result { + Ok(()) + } + + fn prepare( + &self, + _ctx: PrepareContext, + builders: &mut InstanceBuilders, + ) -> anyhow::Result { + let allowed_hosts = builders + .get_mut::()? + .allowed_hosts(); + Ok(InstanceState::new( + allowed_hosts, + self.create_client.clone(), + )) + } +} + +impl SelfInstanceBuilder for InstanceState {} + +// This is a concrete implementation of the MQTT client using rumqttc. +pub struct NetworkedMqttClient { + inner: rumqttc::AsyncClient, + event_loop: Mutex, +} + +const MQTT_CHANNEL_CAP: usize = 1000; + +impl NetworkedMqttClient { + /// Create a [`ClientCreator`] that creates a [`NetworkedMqttClient`]. + pub fn creator() -> Arc { + Arc::new(|address, username, password, keep_alive_interval| { + Ok(Arc::new(NetworkedMqttClient::create( + address, + username, + password, + keep_alive_interval, + )?) as _) + }) + } + + /// Create a new [`NetworkedMqttClient`] with the given address, username, password, and keep alive interval. + pub fn create( + address: String, + username: String, + password: String, + keep_alive_interval: Duration, + ) -> Result { + let mut conn_opts = rumqttc::MqttOptions::parse_url(address).map_err(|e| { + tracing::error!("MQTT URL parse error: {e:?}"); + Error::InvalidAddress + })?; + conn_opts.set_credentials(username, password); + conn_opts.set_keep_alive(keep_alive_interval); + let (client, event_loop) = AsyncClient::new(conn_opts, MQTT_CHANNEL_CAP); + Ok(Self { + inner: client, + event_loop: Mutex::new(event_loop), + }) + } +} + +#[async_trait] +impl MqttClient for NetworkedMqttClient { + async fn publish_bytes(&self, topic: String, qos: Qos, payload: Vec) -> Result<(), Error> { + let qos = match qos { + Qos::AtMostOnce => rumqttc::QoS::AtMostOnce, + Qos::AtLeastOnce => rumqttc::QoS::AtLeastOnce, + Qos::ExactlyOnce => rumqttc::QoS::ExactlyOnce, + }; + // Message published to EventLoop (not MQTT Broker) + self.inner + .publish_bytes(topic, qos, false, payload.into()) + .await + .map_err(other_error)?; + + // Poll event loop until outgoing publish event is iterated over to send the message to MQTT broker or capture/throw error. + // We may revisit this later to manage long running connections, high throughput use cases and their issues in the connection pool. + let mut lock = self.event_loop.lock().await; + loop { + let event = lock + .poll() + .await + .map_err(|err| v2::Error::ConnectionFailed(err.to_string()))?; + + match (qos, event) { + (QoS::AtMostOnce, Event::Outgoing(Outgoing::Publish(_))) + | (QoS::AtLeastOnce, Event::Incoming(Incoming::PubAck(_))) + | (QoS::ExactlyOnce, Event::Incoming(Incoming::PubComp(_))) => break, + + (_, _) => continue, + } + } + Ok(()) + } +} + +/// A trait for creating MQTT client. +#[async_trait] +pub trait ClientCreator: Send + Sync { + fn create( + &self, + address: String, + username: String, + password: String, + keep_alive_interval: Duration, + ) -> Result, Error>; +} + +impl ClientCreator for F +where + F: Fn(String, String, String, Duration) -> Result, Error> + Send + Sync, +{ + fn create( + &self, + address: String, + username: String, + password: String, + keep_alive_interval: Duration, + ) -> Result, Error> { + self(address, username, password, keep_alive_interval) + } +} diff --git a/crates/factor-outbound-mqtt/tests/factor_test.rs b/crates/factor-outbound-mqtt/tests/factor_test.rs new file mode 100644 index 0000000000..e507d65207 --- /dev/null +++ b/crates/factor-outbound-mqtt/tests/factor_test.rs @@ -0,0 +1,132 @@ +use std::sync::Arc; +use std::time::Duration; + +use anyhow::{bail, Result}; +use spin_core::async_trait; +use spin_factor_outbound_mqtt::{ClientCreator, MqttClient, OutboundMqttFactor}; +use spin_factor_outbound_networking::OutboundNetworkingFactor; +use spin_factor_variables::VariablesFactor; +use spin_factors::{anyhow, RuntimeFactors}; +use spin_factors_test::{toml, TestEnvironment}; +use spin_world::v2::mqtt::{self as v2, Error, HostConnection, Qos}; + +pub struct MockMqttClient {} + +#[async_trait] +impl MqttClient for MockMqttClient { + async fn publish_bytes( + &self, + _topic: String, + _qos: Qos, + _payload: Vec, + ) -> Result<(), Error> { + Ok(()) + } +} + +impl ClientCreator for MockMqttClient { + fn create( + &self, + _address: String, + _username: String, + _password: String, + _keep_alive_interval: Duration, + ) -> Result, Error> { + Ok(Arc::new(MockMqttClient {})) + } +} + +#[derive(RuntimeFactors)] +struct TestFactors { + variables: VariablesFactor, + networking: OutboundNetworkingFactor, + mqtt: OutboundMqttFactor, +} + +fn factors() -> TestFactors { + TestFactors { + variables: VariablesFactor::default(), + networking: OutboundNetworkingFactor::new(), + mqtt: OutboundMqttFactor::new(Arc::new(MockMqttClient {})), + } +} + +fn test_env() -> TestEnvironment { + TestEnvironment::new(factors()).extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + allowed_outbound_hosts = ["mqtt://*:*"] + }) +} + +#[tokio::test] +async fn disallowed_host_fails() -> anyhow::Result<()> { + let env = TestEnvironment::new(factors()).extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + }); + let mut state = env.build_instance_state().await?; + + let res = state + .mqtt + .open( + "mqtt://mqtt.test:1883".to_string(), + "username".to_string(), + "password".to_string(), + 1, + ) + .await; + let Err(err) = res else { + bail!("expected Err, got Ok"); + }; + assert!(matches!(err, v2::Error::ConnectionFailed(_))); + + Ok(()) +} + +#[tokio::test] +async fn allowed_host_succeeds() -> anyhow::Result<()> { + let mut state = test_env().build_instance_state().await?; + + let res = state + .mqtt + .open( + "mqtt://mqtt.test:1883".to_string(), + "username".to_string(), + "password".to_string(), + 1, + ) + .await; + let Ok(_) = res else { + bail!("expected Ok, got Err"); + }; + + Ok(()) +} + +#[tokio::test] +async fn exercise_publish() -> anyhow::Result<()> { + let mut state = test_env().build_instance_state().await?; + + let res = state + .mqtt + .open( + "mqtt://mqtt.test:1883".to_string(), + "username".to_string(), + "password".to_string(), + 1, + ) + .await?; + + state + .mqtt + .publish( + res, + "message".to_string(), + b"test message".to_vec(), + Qos::ExactlyOnce, + ) + .await?; + + Ok(()) +} diff --git a/crates/outbound-mysql/Cargo.toml b/crates/factor-outbound-mysql/Cargo.toml similarity index 75% rename from crates/outbound-mysql/Cargo.toml rename to crates/factor-outbound-mysql/Cargo.toml index bfc49cd676..da9324eabe 100644 --- a/crates/outbound-mysql/Cargo.toml +++ b/crates/factor-outbound-mysql/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "outbound-mysql" +name = "spin-factor-outbound-mysql" version = { workspace = true } authors = { workspace = true } edition = { workspace = true } @@ -19,6 +19,8 @@ mysql_common = { version = "0.31.0", default-features = false } spin-app = { path = "../app" } spin-core = { path = "../core" } spin-expressions = { path = "../expressions" } +spin-factor-outbound-networking = { path = "../factor-outbound-networking" } +spin-factors = { path = "../factors"} spin-outbound-networking = { path = "../outbound-networking" } spin-world = { path = "../world" } table = { path = "../table" } @@ -26,5 +28,9 @@ tokio = { version = "1", features = ["rt-multi-thread"] } tracing = { version = "0.1", features = ["log"] } url = "2.3.1" +[dev-dependencies] +spin-factor-variables = { path = "../factor-variables" } +spin-factors-test = { path = "../factors-test" } + [lints] workspace = true diff --git a/crates/outbound-mysql/src/lib.rs b/crates/factor-outbound-mysql/src/client.rs similarity index 63% rename from crates/outbound-mysql/src/lib.rs rename to crates/factor-outbound-mysql/src/client.rs index 78f369f47c..335074d68d 100644 --- a/crates/outbound-mysql/src/lib.rs +++ b/crates/factor-outbound-mysql/src/client.rs @@ -1,133 +1,72 @@ -use anyhow::{Context, Result}; -use mysql_async::{consts::ColumnType, from_value_opt, prelude::*, Opts, OptsBuilder, SslOpts}; -use spin_app::DynamicHostComponent; -use spin_core::wasmtime::component::Resource; -use spin_core::{async_trait, HostComponent}; -use spin_world::v1::mysql as v1; -use spin_world::v2::mysql::{self as v2, Connection}; -use spin_world::v2::rdbms_types as v2_types; -use spin_world::v2::rdbms_types::{Column, DbDataType, DbValue, ParameterValue}; use std::sync::Arc; -use tracing::{instrument, Level}; -use url::Url; - -/// A simple implementation to support outbound mysql connection -pub struct OutboundMysqlComponent { - pub resolver: spin_expressions::SharedPreparedResolver, -} -#[derive(Default)] -pub struct OutboundMysql { - allowed_hosts: spin_outbound_networking::AllowedHostsConfig, - pub connections: table::Table, -} +use anyhow::{anyhow, Result}; +use mysql_async::consts::ColumnType; +use mysql_async::prelude::{FromValue, Queryable as _}; +use mysql_async::{from_value_opt, Conn as MysqlClient, Opts, OptsBuilder, SslOpts}; +use spin_core::async_trait; +use spin_world::v2::mysql::{self as v2}; +use spin_world::v2::rdbms_types::{ + self as v2_types, Column, DbDataType, DbValue, ParameterValue, RowSet, +}; +use url::Url; -impl OutboundMysql { - async fn open_connection(&mut self, address: &str) -> Result, v2::Error> { - self.connections - .push( - build_conn(address) - .await - .map_err(|e| v2::Error::ConnectionFailed(format!("{e:?}")))?, - ) - .map_err(|_| v2::Error::ConnectionFailed("too many connections".into())) - .map(Resource::new_own) - } +#[async_trait] +pub trait Client: Send + Sync + 'static { + async fn build_client(address: &str) -> Result + where + Self: Sized; - async fn get_conn( + async fn execute( &mut self, - connection: Resource, - ) -> Result<&mut mysql_async::Conn, v2::Error> { - self.connections - .get_mut(connection.rep()) - .ok_or_else(|| v2::Error::ConnectionFailed("no connection found".into())) - } + statement: String, + params: Vec, + ) -> Result<(), v2::Error>; - fn is_address_allowed(&self, address: &str) -> bool { - spin_outbound_networking::check_url(address, "mysql", &self.allowed_hosts) - } + async fn query( + &mut self, + statement: String, + params: Vec, + ) -> Result; } -impl HostComponent for OutboundMysqlComponent { - type Data = OutboundMysql; - - fn add_to_linker( - linker: &mut spin_core::Linker, - get: impl Fn(&mut spin_core::Data) -> &mut Self::Data + Send + Sync + Copy + 'static, - ) -> anyhow::Result<()> { - v2::add_to_linker(linker, get)?; - v1::add_to_linker(linker, get) - } - - fn build_data(&self) -> Self::Data { - Default::default() - } -} +#[async_trait] +impl Client for MysqlClient { + async fn build_client(address: &str) -> Result + where + Self: Sized, + { + tracing::debug!("Build new connection: {}", address); -impl DynamicHostComponent for OutboundMysqlComponent { - fn update_data( - &self, - data: &mut Self::Data, - component: &spin_app::AppComponent, - ) -> anyhow::Result<()> { - let hosts = component - .get_metadata(spin_outbound_networking::ALLOWED_HOSTS_KEY)? - .unwrap_or_default(); - data.allowed_hosts = spin_outbound_networking::AllowedHostsConfig::parse( - &hosts, - self.resolver.get().unwrap(), - ) - .context("`allowed_outbound_hosts` contained an invalid url")?; - Ok(()) - } -} + let opts = build_opts(address)?; -impl v2::Host for OutboundMysql {} + let connection_pool = mysql_async::Pool::new(opts); -#[async_trait] -impl v2::HostConnection for OutboundMysql { - #[instrument(name = "spin_outbound_mysql.open_connection", skip(self), err(level = Level::INFO), fields(otel.kind = "client", db.system = "mysql"))] - async fn open(&mut self, address: String) -> Result, v2::Error> { - if !self.is_address_allowed(&address) { - return Err(v2::Error::ConnectionFailed(format!( - "address {address} is not permitted" - ))); - } - self.open_connection(&address).await + connection_pool.get_conn().await.map_err(|e| anyhow!(e)) } - #[instrument(name = "spin_outbound_mysql.execute", skip(self, connection), err(level = Level::INFO), fields(otel.kind = "client", db.system = "mysql", otel.name = statement))] async fn execute( &mut self, - connection: Resource, statement: String, params: Vec, ) -> Result<(), v2::Error> { let db_params = params.into_iter().map(to_sql_parameter).collect::>(); let parameters = mysql_async::Params::Positional(db_params); - self.get_conn(connection) - .await? - .exec_batch(&statement, &[parameters]) + self.exec_batch(&statement, &[parameters]) .await - .map_err(|e| v2::Error::QueryFailed(format!("{:?}", e)))?; - - Ok(()) + .map_err(|e| v2::Error::QueryFailed(format!("{:?}", e))) } - #[instrument(name = "spin_outbound_mysql.query", skip(self, connection), err(level = Level::INFO), fields(otel.kind = "client", db.system = "mysql", otel.name = statement))] async fn query( &mut self, - connection: Resource, statement: String, params: Vec, - ) -> Result { + ) -> Result { let db_params = params.into_iter().map(to_sql_parameter).collect::>(); let parameters = mysql_async::Params::Positional(db_params); let mut query_result = self - .get_conn(connection) - .await? .exec_iter(&statement, parameters) .await .map_err(|e| v2::Error::QueryFailed(format!("{:?}", e)))?; @@ -147,69 +86,6 @@ impl v2::HostConnection for OutboundMysql { } } } - - fn drop(&mut self, connection: Resource) -> Result<()> { - self.connections.remove(connection.rep()); - Ok(()) - } -} - -impl v2_types::Host for OutboundMysql { - fn convert_error(&mut self, error: v2::Error) -> Result { - Ok(error) - } -} - -/// Delegate a function call to the v2::HostConnection implementation -macro_rules! delegate { - ($self:ident.$name:ident($address:expr, $($arg:expr),*)) => {{ - if !$self.is_address_allowed(&$address) { - return Err(v1::MysqlError::ConnectionFailed(format!( - "address {} is not permitted", $address - ))); - } - let connection = match $self.open_connection(&$address).await { - Ok(c) => c, - Err(e) => return Err(e.into()), - }; - ::$name($self, connection, $($arg),*) - .await - .map_err(Into::into) - }}; -} - -#[async_trait] -impl v1::Host for OutboundMysql { - async fn execute( - &mut self, - address: String, - statement: String, - params: Vec, - ) -> Result<(), v1::MysqlError> { - delegate!(self.execute( - address, - statement, - params.into_iter().map(Into::into).collect() - )) - } - - async fn query( - &mut self, - address: String, - statement: String, - params: Vec, - ) -> Result { - delegate!(self.query( - address, - statement, - params.into_iter().map(Into::into).collect() - )) - .map(Into::into) - } - - fn convert_mysql_error(&mut self, error: v1::MysqlError) -> Result { - Ok(error) - } } fn to_sql_parameter(value: ParameterValue) -> mysql_async::Value { @@ -347,16 +223,6 @@ fn convert_value(value: mysql_async::Value, column: &Column) -> Result Result { - tracing::debug!("Build new connection: {}", address); - - let opts = build_opts(address)?; - - let connection_pool = mysql_async::Pool::new(opts); - - connection_pool.get_conn().await -} - fn is_ssl_param(s: &str) -> bool { ["ssl-mode", "sslmode"].contains(&s.to_lowercase().as_str()) } diff --git a/crates/factor-outbound-mysql/src/host.rs b/crates/factor-outbound-mysql/src/host.rs new file mode 100644 index 0000000000..b28a340a8f --- /dev/null +++ b/crates/factor-outbound-mysql/src/host.rs @@ -0,0 +1,145 @@ +use anyhow::Result; +use spin_core::async_trait; +use spin_core::wasmtime::component::Resource; +use spin_world::v1::mysql as v1; +use spin_world::v2::mysql::{self as v2, Connection}; +use spin_world::v2::rdbms_types as v2_types; +use spin_world::v2::rdbms_types::ParameterValue; +use tracing::{instrument, Level}; + +use crate::client::Client; +use crate::InstanceState; + +impl InstanceState { + async fn open_connection(&mut self, address: &str) -> Result, v2::Error> { + self.connections + .push( + C::build_client(address) + .await + .map_err(|e| v2::Error::ConnectionFailed(format!("{e:?}")))?, + ) + .map_err(|_| v2::Error::ConnectionFailed("too many connections".into())) + .map(Resource::new_own) + } + + async fn get_client(&mut self, connection: Resource) -> Result<&mut C, v2::Error> { + self.connections + .get_mut(connection.rep()) + .ok_or_else(|| v2::Error::ConnectionFailed("no connection found".into())) + } + + async fn is_address_allowed(&self, address: &str) -> Result { + self.allowed_hosts.check_url(address, "mysql").await + } +} + +#[async_trait] +impl v2::Host for InstanceState {} + +#[async_trait] +impl v2::HostConnection for InstanceState { + #[instrument(name = "spin_outbound_mysql.open_connection", skip(self), err(level = Level::INFO), fields(otel.kind = "client", db.system = "mysql"))] + async fn open(&mut self, address: String) -> Result, v2::Error> { + if !self + .is_address_allowed(&address) + .await + .map_err(|e| v2::Error::Other(e.to_string()))? + { + return Err(v2::Error::ConnectionFailed(format!( + "address {address} is not permitted" + ))); + } + self.open_connection(&address).await + } + + #[instrument(name = "spin_outbound_mysql.execute", skip(self, connection), err(level = Level::INFO), fields(otel.kind = "client", db.system = "mysql", otel.name = statement))] + async fn execute( + &mut self, + connection: Resource, + statement: String, + params: Vec, + ) -> Result<(), v2::Error> { + Ok(self + .get_client(connection) + .await? + .execute(statement, params) + .await?) + } + + #[instrument(name = "spin_outbound_mysql.query", skip(self, connection), err(level = Level::INFO), fields(otel.kind = "client", db.system = "mysql", otel.name = statement))] + async fn query( + &mut self, + connection: Resource, + statement: String, + params: Vec, + ) -> Result { + Ok(self + .get_client(connection) + .await? + .query(statement, params) + .await?) + } + + fn drop(&mut self, connection: Resource) -> Result<()> { + self.connections.remove(connection.rep()); + Ok(()) + } +} + +impl v2_types::Host for InstanceState { + fn convert_error(&mut self, error: v2::Error) -> Result { + Ok(error) + } +} + +/// Delegate a function call to the v2::HostConnection implementation +macro_rules! delegate { + ($self:ident.$name:ident($address:expr, $($arg:expr),*)) => {{ + if !$self.is_address_allowed(&$address).await.map_err(|e| v2::Error::Other(e.to_string()))? { + return Err(v1::MysqlError::ConnectionFailed(format!( + "address {} is not permitted", $address + ))); + } + let connection = match $self.open_connection(&$address).await { + Ok(c) => c, + Err(e) => return Err(e.into()), + }; + ::$name($self, connection, $($arg),*) + .await + .map_err(Into::into) + }}; +} + +#[async_trait] +impl v1::Host for InstanceState { + async fn execute( + &mut self, + address: String, + statement: String, + params: Vec, + ) -> Result<(), v1::MysqlError> { + delegate!(self.execute( + address, + statement, + params.into_iter().map(Into::into).collect() + )) + } + + async fn query( + &mut self, + address: String, + statement: String, + params: Vec, + ) -> Result { + delegate!(self.query( + address, + statement, + params.into_iter().map(Into::into).collect() + )) + .map(Into::into) + } + + fn convert_mysql_error(&mut self, error: v1::MysqlError) -> Result { + Ok(error) + } +} diff --git a/crates/factor-outbound-mysql/src/lib.rs b/crates/factor-outbound-mysql/src/lib.rs new file mode 100644 index 0000000000..123620052c --- /dev/null +++ b/crates/factor-outbound-mysql/src/lib.rs @@ -0,0 +1,67 @@ +pub mod client; +mod host; + +use client::Client; +use mysql_async::Conn as MysqlClient; +use spin_factor_outbound_networking::{OutboundAllowedHosts, OutboundNetworkingFactor}; +use spin_factors::{Factor, InitContext, RuntimeFactors, SelfInstanceBuilder}; +use spin_world::v1::mysql as v1; +use spin_world::v2::mysql::{self as v2}; + +pub struct OutboundMysqlFactor { + _phantom: std::marker::PhantomData, +} + +impl Factor for OutboundMysqlFactor { + type RuntimeConfig = (); + type AppState = (); + type InstanceBuilder = InstanceState; + + fn init(&mut self, mut ctx: InitContext) -> anyhow::Result<()> { + ctx.link_bindings(v1::add_to_linker)?; + ctx.link_bindings(v2::add_to_linker)?; + Ok(()) + } + + fn configure_app( + &self, + _ctx: spin_factors::ConfigureAppContext, + ) -> anyhow::Result { + Ok(()) + } + + fn prepare( + &self, + _ctx: spin_factors::PrepareContext, + builders: &mut spin_factors::InstanceBuilders, + ) -> anyhow::Result { + let allowed_hosts = builders + .get_mut::()? + .allowed_hosts(); + Ok(InstanceState { + allowed_hosts, + connections: Default::default(), + }) + } +} + +impl Default for OutboundMysqlFactor { + fn default() -> Self { + Self { + _phantom: Default::default(), + } + } +} + +impl OutboundMysqlFactor { + pub fn new() -> Self { + Self::default() + } +} + +pub struct InstanceState { + allowed_hosts: OutboundAllowedHosts, + connections: table::Table, +} + +impl SelfInstanceBuilder for InstanceState {} diff --git a/crates/factor-outbound-mysql/tests/factor_test.rs b/crates/factor-outbound-mysql/tests/factor_test.rs new file mode 100644 index 0000000000..b9261377b2 --- /dev/null +++ b/crates/factor-outbound-mysql/tests/factor_test.rs @@ -0,0 +1,135 @@ +use anyhow::{bail, Result}; +use spin_factor_outbound_mysql::client::Client; +use spin_factor_outbound_mysql::OutboundMysqlFactor; +use spin_factor_outbound_networking::OutboundNetworkingFactor; +use spin_factor_variables::VariablesFactor; +use spin_factors::{anyhow, RuntimeFactors}; +use spin_factors_test::{toml, TestEnvironment}; +use spin_world::async_trait; +use spin_world::v2::mysql::HostConnection; +use spin_world::v2::mysql::{self as v2}; +use spin_world::v2::rdbms_types::{ParameterValue, RowSet}; + +#[derive(RuntimeFactors)] +struct TestFactors { + variables: VariablesFactor, + networking: OutboundNetworkingFactor, + mysql: OutboundMysqlFactor, +} + +fn factors() -> TestFactors { + TestFactors { + variables: VariablesFactor::default(), + networking: OutboundNetworkingFactor::new(), + mysql: OutboundMysqlFactor::::new(), + } +} + +fn test_env() -> TestEnvironment { + TestEnvironment::new(factors()).extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + allowed_outbound_hosts = ["mysql://*:*"] + }) +} + +#[tokio::test] +async fn disallowed_host_fails() -> anyhow::Result<()> { + let env = TestEnvironment::new(factors()).extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + }); + let mut state = env.build_instance_state().await?; + + let res = state + .mysql + .open("mysql://user:pass@mysql.test:3306/test".to_string()) + .await; + let Err(err) = res else { + bail!("expected Err, got Ok"); + }; + assert!(matches!(err, v2::Error::ConnectionFailed(_))); + + Ok(()) +} + +#[tokio::test] +async fn allowed_host_succeeds() -> anyhow::Result<()> { + let mut state = test_env().build_instance_state().await?; + + let res = state + .mysql + .open("mysql://user:pass@localhost:3306/test".to_string()) + .await; + let Ok(_) = res else { + bail!("expected Ok, got Err"); + }; + + Ok(()) +} + +#[tokio::test] +async fn exercise_execute() -> anyhow::Result<()> { + let mut state = test_env().build_instance_state().await?; + + let connection = state + .mysql + .open("mysql://user:pass@localhost:3306/test".to_string()) + .await?; + + state + .mysql + .execute(connection, "SELECT * FROM test".to_string(), vec![]) + .await?; + + Ok(()) +} + +#[tokio::test] +async fn exercise_query() -> anyhow::Result<()> { + let mut state = test_env().build_instance_state().await?; + + let connection = state + .mysql + .open("mysql://user:pass@localhost:3306/test".to_string()) + .await?; + + state + .mysql + .query(connection, "SELECT * FROM test".to_string(), vec![]) + .await?; + + Ok(()) +} + +// TODO: We can expand this mock to track calls and simulate return values +pub struct MockClient {} + +#[async_trait] +impl Client for MockClient { + async fn build_client(_address: &str) -> anyhow::Result + where + Self: Sized, + { + Ok(MockClient {}) + } + + async fn execute( + &mut self, + _statement: String, + _params: Vec, + ) -> Result<(), v2::Error> { + Ok(()) + } + + async fn query( + &mut self, + _statement: String, + _params: Vec, + ) -> Result { + Ok(RowSet { + columns: vec![], + rows: vec![], + }) + } +} diff --git a/crates/factor-outbound-networking/Cargo.toml b/crates/factor-outbound-networking/Cargo.toml new file mode 100644 index 0000000000..03fd55e4c6 --- /dev/null +++ b/crates/factor-outbound-networking/Cargo.toml @@ -0,0 +1,39 @@ +[package] +name = "spin-factor-outbound-networking" +version = { workspace = true } +authors = { workspace = true } +edition = { workspace = true } + +[dependencies] +anyhow = "1" +futures-util = "0.3" +http = "1.1.0" +ipnet = "2.9.0" +rustls = { version = "0.23", default-features = false, features = ["ring", "std", "tls12"] } +rustls-pemfile = { version = "2.1.2", optional = true } +rustls-pki-types = "1.7.0" +serde = { version = "1", features = ["derive"] } +spin-factor-variables = { path = "../factor-variables" } +spin-factor-wasi = { path = "../factor-wasi" } +spin-factors = { path = "../factors" } +# TODO: merge with this crate +spin-outbound-networking = { path = "../outbound-networking" } +spin-serde = { path = "../serde" } +tracing = { workspace = true } +webpki-roots = "0.26" + +[dev-dependencies] +spin-factors-test = { path = "../factors-test" } +tempfile = "3.10.1" +tokio = { version = "1", features = ["macros", "rt"] } +toml = "0.8" +wasmtime-wasi = { workspace = true } + +[features] +default = ["spin-cli"] +# Includes the runtime configuration handling used by the Spin CLI +spin-cli = [ + "dep:rustls-pemfile", +] +[lints] +workspace = true diff --git a/crates/factor-outbound-networking/src/lib.rs b/crates/factor-outbound-networking/src/lib.rs new file mode 100644 index 0000000000..7af5f34f14 --- /dev/null +++ b/crates/factor-outbound-networking/src/lib.rs @@ -0,0 +1,238 @@ +pub mod runtime_config; + +use std::{collections::HashMap, sync::Arc}; + +use futures_util::{ + future::{BoxFuture, Shared}, + FutureExt, +}; +use runtime_config::RuntimeConfig; +use spin_factor_variables::VariablesFactor; +use spin_factor_wasi::{SocketAddrUse, WasiFactor}; +use spin_factors::{ + anyhow::{self, Context}, + ConfigureAppContext, Error, Factor, FactorInstanceBuilder, InstanceBuilders, PrepareContext, + RuntimeFactors, +}; +use spin_outbound_networking::{AllowedHostsConfig, ALLOWED_HOSTS_KEY}; + +pub use spin_outbound_networking::OutboundUrl; + +pub use runtime_config::ComponentTlsConfigs; + +pub type SharedFutureResult = Shared, Arc>>>; + +#[derive(Default)] +pub struct OutboundNetworkingFactor { + disallowed_host_handler: Option>, +} + +impl OutboundNetworkingFactor { + pub fn new() -> Self { + Self::default() + } + + /// Sets a handler to be called when a request is disallowed by an + /// instance's configured `allowed_outbound_hosts`. + pub fn set_disallowed_host_handler(&mut self, handler: impl DisallowedHostHandler + 'static) { + self.disallowed_host_handler = Some(Arc::new(handler)); + } +} + +impl Factor for OutboundNetworkingFactor { + type RuntimeConfig = RuntimeConfig; + type AppState = AppState; + type InstanceBuilder = InstanceBuilder; + + fn configure_app( + &self, + mut ctx: ConfigureAppContext, + ) -> anyhow::Result { + // Extract allowed_outbound_hosts for all components + let component_allowed_hosts = ctx + .app() + .components() + .map(|component| { + Ok(( + component.id().to_string(), + component + .get_metadata(ALLOWED_HOSTS_KEY)? + .unwrap_or_default() + .into_boxed_slice() + .into(), + )) + }) + .collect::>()?; + + let runtime_config = match ctx.take_runtime_config() { + Some(cfg) => cfg, + // The default RuntimeConfig provides default TLS client configs + None => RuntimeConfig::new([])?, + }; + + Ok(AppState { + component_allowed_hosts, + runtime_config, + }) + } + + fn prepare( + &self, + ctx: PrepareContext, + builders: &mut InstanceBuilders, + ) -> anyhow::Result { + let hosts = ctx + .app_state() + .component_allowed_hosts + .get(ctx.app_component().id()) + .cloned() + .context("missing component allowed hosts")?; + let resolver = builders + .get_mut::()? + .expression_resolver() + .clone(); + let allowed_hosts_future = async move { + let prepared = resolver.prepare().await?; + AllowedHostsConfig::parse(&hosts, &prepared) + } + .map(|res| res.map(Arc::new).map_err(Arc::new)) + .boxed() + .shared(); + + match builders.get_mut::() { + Ok(wasi_builder) => { + // Update Wasi socket allowed ports + let allowed_hosts = OutboundAllowedHosts { + allowed_hosts_future: allowed_hosts_future.clone(), + disallowed_host_handler: self.disallowed_host_handler.clone(), + }; + wasi_builder.outbound_socket_addr_check(move |addr, addr_use| { + let allowed_hosts = allowed_hosts.clone(); + async move { + // TODO: validate against existing spin-core behavior + let scheme = match addr_use { + SocketAddrUse::TcpBind => return false, + SocketAddrUse::TcpConnect => "tcp", + SocketAddrUse::UdpBind | SocketAddrUse::UdpConnect | SocketAddrUse::UdpOutgoingDatagram => "udp", + }; + allowed_hosts.check_url(&addr.to_string(), scheme).await.unwrap_or_else(|err| { + // TODO: should this trap (somehow)? + tracing::error!(%err, "allowed_outbound_hosts variable resolution failed"); + false + }) + } + }); + } + Err(Error::NoSuchFactor(_)) => (), // no WasiFactor to configure; that's OK + Err(err) => return Err(err.into()), + } + + let component_tls_configs = ctx + .app_state() + .runtime_config + .get_component_tls_configs(ctx.app_component().id()); + + Ok(InstanceBuilder { + allowed_hosts_future, + component_tls_configs, + disallowed_host_handler: self.disallowed_host_handler.clone(), + }) + } +} + +pub struct AppState { + component_allowed_hosts: HashMap>, + runtime_config: RuntimeConfig, +} + +pub struct InstanceBuilder { + allowed_hosts_future: SharedFutureResult, + component_tls_configs: ComponentTlsConfigs, + disallowed_host_handler: Option>, +} + +impl InstanceBuilder { + pub fn allowed_hosts(&self) -> OutboundAllowedHosts { + OutboundAllowedHosts { + allowed_hosts_future: self.allowed_hosts_future.clone(), + disallowed_host_handler: self.disallowed_host_handler.clone(), + } + } + + pub fn component_tls_configs(&self) -> &ComponentTlsConfigs { + &self.component_tls_configs + } +} + +impl FactorInstanceBuilder for InstanceBuilder { + type InstanceState = (); + + fn build(self) -> anyhow::Result { + Ok(()) + } +} + +// TODO: Refactor w/ spin-outbound-networking crate to simplify +#[derive(Clone)] +pub struct OutboundAllowedHosts { + allowed_hosts_future: SharedFutureResult, + disallowed_host_handler: Option>, +} + +impl OutboundAllowedHosts { + /// Checks address against allowed hosts + /// + /// Calls the [`DisallowedHostCallback`] if set and URL is disallowed. + pub async fn check_url(&self, url: &str, scheme: &str) -> anyhow::Result { + let Ok(url) = OutboundUrl::parse(url, scheme) else { + tracing::warn!( + "A component tried to make a request to a url that could not be parsed: {url}", + ); + return Ok(false); + }; + + let allowed_hosts = self.resolve().await?; + let is_allowed = allowed_hosts.allows(&url); + if !is_allowed { + self.report_disallowed_host(url.scheme(), &url.authority()); + } + Ok(is_allowed) + } + + /// Checks if allowed hosts permit relative requests + /// + /// Calls the [`DisallowedHostCallback`] if set and relative requests are + /// disallowed. + pub async fn check_relative_url(&self, schemes: &[&str]) -> anyhow::Result { + let allowed_hosts = self.resolve().await?; + let is_allowed = allowed_hosts.allows_relative_url(schemes); + if !is_allowed { + let scheme = schemes.first().unwrap_or(&""); + self.report_disallowed_host(scheme, "self"); + } + Ok(is_allowed) + } + + async fn resolve(&self) -> anyhow::Result> { + self.allowed_hosts_future.clone().await.map_err(|err| { + tracing::error!("Error resolving allowed_outbound_hosts variables: {err}"); + anyhow::Error::msg(err) + }) + } + + fn report_disallowed_host(&self, scheme: &str, authority: &str) { + if let Some(handler) = &self.disallowed_host_handler { + handler.handle_disallowed_host(scheme, authority); + } + } +} + +pub trait DisallowedHostHandler: Send + Sync { + fn handle_disallowed_host(&self, scheme: &str, authority: &str); +} + +impl DisallowedHostHandler for F { + fn handle_disallowed_host(&self, scheme: &str, authority: &str) { + self(scheme, authority); + } +} diff --git a/crates/factor-outbound-networking/src/runtime_config.rs b/crates/factor-outbound-networking/src/runtime_config.rs new file mode 100644 index 0000000000..fad06edd1d --- /dev/null +++ b/crates/factor-outbound-networking/src/runtime_config.rs @@ -0,0 +1,269 @@ +#[cfg(feature = "spin-cli")] +pub mod spin; + +use std::{collections::HashMap, str::FromStr, sync::Arc}; + +use anyhow::{ensure, Context}; +use rustls::{ClientConfig, RootCertStore}; +use rustls_pki_types::{CertificateDer, PrivateKeyDer}; + +/// Runtime configuration for outbound networking. +#[derive(Debug)] +pub struct RuntimeConfig { + /// Maps component ID -> HostClientConfigs + component_host_client_configs: HashMap, + /// The default [`ClientConfig`] for a host if one is not explicitly configured for it. + default_client_config: Arc, +} + +// Maps host authority -> ClientConfig +type HostClientConfigs = Arc>>; + +impl RuntimeConfig { + /// Returns runtime config with the given list of [`TlsConfig`]s. The first + /// [`TlsConfig`] to match an outgoing request (based on + /// [`TlsConfig::components`] and [`TlsConfig::hosts`]) will be used. + pub fn new(tls_configs: impl IntoIterator) -> anyhow::Result { + let mut component_host_client_configs = HashMap::::new(); + for tls_config in tls_configs { + ensure!( + !tls_config.components.is_empty(), + "client TLS 'components' list may not be empty" + ); + ensure!( + !tls_config.hosts.is_empty(), + "client TLS 'hosts' list may not be empty" + ); + let client_config = Arc::new( + tls_config + .to_client_config() + .context("error building TLS client config")?, + ); + for component in &tls_config.components { + let host_configs = component_host_client_configs + .entry(component.clone()) + .or_default(); + for host in &tls_config.hosts { + validate_host(host)?; + // First matching (component, host) pair wins + Arc::get_mut(host_configs) + .unwrap() + .entry(host.clone()) + .or_insert_with(|| client_config.clone()); + } + } + } + + let default_client_config = Arc::new(TlsConfig::default().to_client_config()?); + + Ok(Self { + component_host_client_configs, + default_client_config, + }) + } + + /// Returns [`ComponentTlsConfigs`] for the given component. + pub fn get_component_tls_configs(&self, component_id: &str) -> ComponentTlsConfigs { + let host_client_configs = self + .component_host_client_configs + .get(component_id) + .cloned(); + ComponentTlsConfigs { + host_client_configs, + default_client_config: self.default_client_config.clone(), + } + } + + /// Returns a [`ClientConfig`] for the given component and host authority. + /// + /// This is a convenience method, equivalent to: + /// `.get_client_config(component_id).get_client_config(host)` + pub fn get_client_config(&self, component_id: &str, host: &str) -> Arc { + let component_config = self.get_component_tls_configs(component_id); + component_config.get_client_config(host).clone() + } +} + +pub(crate) fn validate_host(host: &str) -> anyhow::Result<()> { + // Validate hostname + let authority = http::uri::Authority::from_str(host) + .with_context(|| format!("invalid TLS 'host' {host:?}"))?; + ensure!( + authority.port().is_none(), + "invalid TLS 'host' {host:?}; ports not currently supported" + ); + Ok(()) +} + +/// TLS configurations for a specific component. +#[derive(Clone)] +pub struct ComponentTlsConfigs { + host_client_configs: Option, + default_client_config: Arc, +} + +impl ComponentTlsConfigs { + /// Returns a [`ClientConfig`] for the given host authority. + pub fn get_client_config(&self, host: &str) -> &Arc { + self.host_client_configs + .as_ref() + .and_then(|configs| configs.get(host)) + .unwrap_or(&self.default_client_config) + } +} + +#[derive(Debug)] +pub struct ClientCertConfig { + cert_chain: Vec>, + key_der: PrivateKeyDer<'static>, +} + +/// TLS configuration for one or more component(s) and host(s). +#[derive(Debug)] +pub struct TlsConfig { + /// The component(s) this configuration applies to. + pub components: Vec, + /// The host(s) this configuration applies to. + pub hosts: Vec, + /// A set of CA certs that should be considered valid roots. + pub root_certificates: Vec>, + /// If true, the "standard" CA certs defined by `webpki-roots` crate will be + /// considered valid roots in addition to `root_certificates`. + pub use_webpki_roots: bool, + /// A certificate and private key to be used as the client certificate for + /// "mutual TLS" (mTLS). + pub client_cert: Option, +} + +impl Default for TlsConfig { + fn default() -> Self { + Self { + components: vec![], + hosts: vec![], + root_certificates: vec![], + // Use webpki roots by default + use_webpki_roots: true, + client_cert: None, + } + } +} + +impl TlsConfig { + fn to_client_config(&self) -> anyhow::Result { + let mut root_store = RootCertStore::empty(); + if self.use_webpki_roots { + root_store.extend(webpki_roots::TLS_SERVER_ROOTS.iter().cloned()); + } + for ca in &self.root_certificates { + root_store.add(ca.clone())?; + } + + let builder = ClientConfig::builder().with_root_certificates(root_store); + + if let Some(ClientCertConfig { + cert_chain, + key_der, + }) = &self.client_cert + { + Ok(builder.with_client_auth_cert(cert_chain.clone(), key_der.clone_key())?) + } else { + Ok(builder.with_no_client_auth()) + } + } +} + +#[cfg(test)] +mod tests { + use std::{io::BufReader, path::Path}; + + use anyhow::Context; + + use super::*; + + #[test] + fn test_empty_config() -> anyhow::Result<()> { + let runtime_config = RuntimeConfig::new([])?; + // Just make sure the default path doesn't panic + runtime_config.get_client_config("foo", "bar"); + Ok(()) + } + + #[test] + fn test_minimal_config() -> anyhow::Result<()> { + let runtime_config = RuntimeConfig::new([TlsConfig { + components: vec!["test-component".into()], + hosts: vec!["test-host".into()], + root_certificates: vec![], + use_webpki_roots: false, + client_cert: None, + }])?; + let client_config = runtime_config.get_client_config("test-component", "test-host"); + // Check that we didn't just get the default + let default_config = runtime_config.get_client_config("other_component", "test-host"); + assert!(!Arc::ptr_eq(&client_config, &default_config)); + Ok(()) + } + + #[test] + fn test_maximal_config() -> anyhow::Result<()> { + let test_certs = test_certs()?; + let test_key = test_key()?; + let runtime_config = RuntimeConfig::new([TlsConfig { + components: vec!["test-component".into()], + hosts: vec!["test-host".into()], + root_certificates: vec![test_certs[0].clone()], + use_webpki_roots: false, + client_cert: Some(ClientCertConfig { + cert_chain: test_certs, + key_der: test_key, + }), + }])?; + let client_config = runtime_config.get_client_config("test-component", "test-host"); + assert!(client_config.client_auth_cert_resolver.has_certs()); + Ok(()) + } + + #[test] + fn test_config_overrides() -> anyhow::Result<()> { + let test_certs = test_certs()?; + let test_key = test_key()?; + let runtime_config = RuntimeConfig::new([ + TlsConfig { + components: vec!["test-component1".into()], + hosts: vec!["test-host".into()], + client_cert: Some(ClientCertConfig { + cert_chain: test_certs, + key_der: test_key, + }), + ..Default::default() + }, + TlsConfig { + components: vec!["test-component1".into(), "test-component2".into()], + hosts: vec!["test-host".into()], + ..Default::default() + }, + ])?; + // First match wins + let client_config1 = runtime_config.get_client_config("test-component1", "test-host"); + assert!(client_config1.client_auth_cert_resolver.has_certs()); + + // Correctly select by differing component ID + let client_config2 = runtime_config.get_client_config("test-component-2", "test-host"); + assert!(!client_config2.client_auth_cert_resolver.has_certs()); + Ok(()) + } + + const TESTDATA_DIR: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/testdata"); + + fn test_certs() -> anyhow::Result>> { + let file = std::fs::File::open(Path::new(TESTDATA_DIR).join("valid-cert.pem"))?; + rustls_pemfile::certs(&mut BufReader::new(file)) + .map(|res| res.map_err(Into::into)) + .collect() + } + + fn test_key() -> anyhow::Result> { + let file = std::fs::File::open(Path::new(TESTDATA_DIR).join("valid-private-key.pem"))?; + rustls_pemfile::private_key(&mut BufReader::new(file))?.context("no private key") + } +} diff --git a/crates/factor-outbound-networking/src/runtime_config/spin.rs b/crates/factor-outbound-networking/src/runtime_config/spin.rs new file mode 100644 index 0000000000..863ebed7fe --- /dev/null +++ b/crates/factor-outbound-networking/src/runtime_config/spin.rs @@ -0,0 +1,280 @@ +use anyhow::{bail, ensure, Context}; +use serde::{Deserialize, Deserializer}; +use spin_factors::runtime_config::toml::GetTomlValue; +use std::io; +use std::{ + fs, + path::{Path, PathBuf}, +}; + +use super::{validate_host, TlsConfig}; + +/// Spin's default handling of the runtime configuration for outbound TLS. +pub struct SpinTlsRuntimeConfig { + runtime_config_dir: PathBuf, +} + +impl SpinTlsRuntimeConfig { + /// Creates a new `SpinTlsRuntimeConfig`. + /// + /// The given `runtime_config_dir` will be used as the root to resolve any + /// relative paths. + pub fn new(runtime_config_dir: impl Into) -> Self { + Self { + runtime_config_dir: runtime_config_dir.into(), + } + } + + /// Get the runtime configuration for client TLS from a TOML table. + /// + /// Expects table to be in the format: + /// ````toml + /// [[client_tls]] + /// component_ids = ["example-component"] + /// hosts = ["example.com"] + /// ca_use_webpki_roots = true + /// ca_roots_file = "path/to/roots.crt" + /// client_cert_file = "path/to/client.crt" + /// client_private_key_file = "path/to/client.key" + /// ``` + pub fn config_from_table( + &self, + table: &impl GetTomlValue, + ) -> anyhow::Result> { + let Some(tls_configs) = self.tls_configs_from_table(table)? else { + return Ok(None); + }; + let runtime_config = super::RuntimeConfig::new(tls_configs)?; + Ok(Some(runtime_config)) + } + + fn tls_configs_from_table( + &self, + table: &T, + ) -> anyhow::Result>> { + let Some(array) = table.get("client_tls") else { + return Ok(None); + }; + let toml_configs: Vec = array.clone().try_into()?; + + let tls_configs = toml_configs + .into_iter() + .map(|toml_config| self.load_tls_config(toml_config)) + .collect::>>() + .context("failed to parse TLS configs from TOML")?; + Ok(Some(tls_configs)) + } + + fn load_tls_config(&self, toml_config: RuntimeConfigToml) -> anyhow::Result { + let RuntimeConfigToml { + component_ids, + hosts, + ca_use_webpki_roots, + ca_roots_file, + client_cert_file, + client_private_key_file, + } = toml_config; + ensure!( + !component_ids.is_empty(), + "[[client_tls]] 'component_ids' list may not be empty" + ); + ensure!( + !hosts.is_empty(), + "[[client_tls]] 'hosts' list may not be empty" + ); + + let components = component_ids.into_iter().map(Into::into).collect(); + + let hosts = hosts + .iter() + .map(|host| { + host.parse() + .map_err(|err| anyhow::anyhow!("invalid host {host:?}: {err:?}")) + }) + .collect::>>()?; + + let use_webpki_roots = if let Some(ca_use_webpki_roots) = ca_use_webpki_roots { + ca_use_webpki_roots + } else { + // Use webpki roots by default *unless* explicit roots were given + ca_roots_file.is_none() + }; + + let root_certificates = ca_roots_file + .map(|path| self.load_certs(path)) + .transpose()? + .unwrap_or_default(); + + let client_cert = match (client_cert_file, client_private_key_file) { + (Some(cert_path), Some(key_path)) => Some(super::ClientCertConfig { + cert_chain: self.load_certs(cert_path)?, + key_der: self.load_key(key_path)?, + }), + (None, None) => None, + (Some(_), None) => bail!("client_cert_file specified without client_private_key_file"), + (None, Some(_)) => bail!("client_private_key_file specified without client_cert_file"), + }; + + Ok(TlsConfig { + components, + hosts, + root_certificates, + use_webpki_roots, + client_cert, + }) + } + + // Parse certs from the provided file + fn load_certs( + &self, + path: impl AsRef, + ) -> io::Result>> { + let path = self.runtime_config_dir.join(path); + rustls_pemfile::certs(&mut io::BufReader::new(fs::File::open(path).map_err( + |err| { + io::Error::new( + io::ErrorKind::InvalidInput, + format!("failed to read cert file {:?}", err), + ) + }, + )?)) + .collect() + } + + // Parse a private key from the provided file + fn load_key( + &self, + path: impl AsRef, + ) -> anyhow::Result> { + let path = self.runtime_config_dir.join(path); + let file = fs::File::open(&path) + .with_context(|| format!("failed to read private key from '{}'", path.display()))?; + Ok(rustls_pemfile::private_key(&mut io::BufReader::new(file)) + .with_context(|| format!("failed to parse private key from '{}'", path.display()))? + .ok_or_else(|| { + io::Error::new( + io::ErrorKind::InvalidInput, + format!( + "private key file '{}' contains no private keys", + path.display() + ), + ) + })?) + } +} + +#[derive(Debug, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct RuntimeConfigToml { + component_ids: Vec, + #[serde(deserialize_with = "deserialize_hosts")] + hosts: Vec, + ca_use_webpki_roots: Option, + ca_roots_file: Option, + client_cert_file: Option, + client_private_key_file: Option, +} + +fn deserialize_hosts<'de, D: Deserializer<'de>>(deserializer: D) -> Result, D::Error> { + let hosts = Vec::::deserialize(deserializer)?; + for host in &hosts { + validate_host(host).map_err(serde::de::Error::custom)?; + } + Ok(hosts) +} + +#[cfg(test)] +mod tests { + use super::*; + + const TESTDATA_DIR: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/testdata"); + + #[test] + fn test_min_config() -> anyhow::Result<()> { + let config = SpinTlsRuntimeConfig::new("/doesnt-matter"); + + let tls_configs = config + .tls_configs_from_table(&toml::toml! { + [[client_tls]] + component_ids = ["test-component"] + hosts = ["test-host"] + + })? + .context("missing config section")?; + assert_eq!(tls_configs.len(), 1); + + assert_eq!(tls_configs[0].components, ["test-component"]); + assert_eq!(tls_configs[0].hosts[0].as_str(), "test-host"); + assert!(tls_configs[0].use_webpki_roots); + Ok(()) + } + + #[test] + fn test_max_config() -> anyhow::Result<()> { + let config = SpinTlsRuntimeConfig::new(TESTDATA_DIR); + + let tls_configs = config + .tls_configs_from_table(&toml::toml! { + [[client_tls]] + component_ids = ["test-component"] + hosts = ["test-host"] + ca_use_webpki_roots = true + ca_roots_file = "valid-cert.pem" + client_cert_file = "valid-cert.pem" + client_private_key_file = "valid-private-key.pem" + })? + .context("missing config section")?; + assert_eq!(tls_configs.len(), 1); + + assert!(tls_configs[0].use_webpki_roots); + assert_eq!(tls_configs[0].root_certificates.len(), 2); + assert!(tls_configs[0].client_cert.is_some()); + Ok(()) + } + + #[test] + fn test_use_webpki_roots_default_with_explicit_roots() -> anyhow::Result<()> { + let config = SpinTlsRuntimeConfig::new(TESTDATA_DIR); + + let tls_configs = config + .tls_configs_from_table(&toml::toml! { + [[client_tls]] + component_ids = ["test-component"] + hosts = ["test-host"] + ca_roots_file = "valid-cert.pem" + })? + .context("missing config section")?; + + assert!(!tls_configs[0].use_webpki_roots); + Ok(()) + } + + #[test] + fn test_invalid_cert() { + let config = SpinTlsRuntimeConfig::new(TESTDATA_DIR); + + config + .tls_configs_from_table(&toml::toml! { + [[client_tls]] + component_ids = ["test-component"] + hosts = ["test-host"] + ca_roots_file = "invalid-cert.pem" + }) + .unwrap_err(); + } + + #[test] + fn test_invalid_private_key() { + let config = SpinTlsRuntimeConfig::new(TESTDATA_DIR); + + config + .tls_configs_from_table(&toml::toml! { + [[client_tls]] + component_ids = ["test-component"] + hosts = ["test-host"] + client_cert_file = "valid-cert.pem" + client_private_key_file = "invalid-key.pem" + }) + .unwrap_err(); + } +} diff --git a/crates/trigger-http/src/testdata/invalid-cert.pem b/crates/factor-outbound-networking/testdata/invalid-cert.pem similarity index 100% rename from crates/trigger-http/src/testdata/invalid-cert.pem rename to crates/factor-outbound-networking/testdata/invalid-cert.pem diff --git a/crates/trigger-http/src/testdata/invalid-private-key.pem b/crates/factor-outbound-networking/testdata/invalid-private-key.pem similarity index 100% rename from crates/trigger-http/src/testdata/invalid-private-key.pem rename to crates/factor-outbound-networking/testdata/invalid-private-key.pem diff --git a/crates/trigger-http/src/testdata/valid-cert.pem b/crates/factor-outbound-networking/testdata/valid-cert.pem similarity index 100% rename from crates/trigger-http/src/testdata/valid-cert.pem rename to crates/factor-outbound-networking/testdata/valid-cert.pem diff --git a/crates/trigger-http/src/testdata/valid-private-key.pem b/crates/factor-outbound-networking/testdata/valid-private-key.pem similarity index 100% rename from crates/trigger-http/src/testdata/valid-private-key.pem rename to crates/factor-outbound-networking/testdata/valid-private-key.pem diff --git a/crates/factor-outbound-networking/tests/factor_test.rs b/crates/factor-outbound-networking/tests/factor_test.rs new file mode 100644 index 0000000000..d4bd1a51c8 --- /dev/null +++ b/crates/factor-outbound-networking/tests/factor_test.rs @@ -0,0 +1,66 @@ +use spin_factor_outbound_networking::OutboundNetworkingFactor; +use spin_factor_variables::VariablesFactor; +use spin_factor_wasi::{DummyFilesMounter, WasiFactor}; +use spin_factors::{anyhow, RuntimeFactors}; +use spin_factors_test::{toml, TestEnvironment}; +use wasmtime_wasi::{bindings::sockets::instance_network::Host, SocketAddrUse, WasiView}; + +#[derive(RuntimeFactors)] +struct TestFactors { + wasi: WasiFactor, + variables: VariablesFactor, + networking: OutboundNetworkingFactor, +} + +#[tokio::test] +async fn configures_wasi_socket_addr_check() -> anyhow::Result<()> { + let factors = TestFactors { + wasi: WasiFactor::new(DummyFilesMounter), + variables: VariablesFactor::default(), + networking: OutboundNetworkingFactor::new(), + }; + let env = TestEnvironment::new(factors).extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + allowed_outbound_hosts = ["*://192.0.2.1:12345"] + }); + let mut state = env.build_instance_state().await?; + let mut wasi = WasiFactor::get_wasi_impl(&mut state).unwrap(); + + let network_resource = wasi.instance_network()?; + let network = wasi.table().get(&network_resource)?; + + network + .check_socket_addr( + "192.0.2.1:12345".parse().unwrap(), + SocketAddrUse::TcpConnect, + ) + .await?; + for not_allowed in ["192.0.2.1:25", "192.0.2.2:12345"] { + assert_eq!( + network + .check_socket_addr(not_allowed.parse().unwrap(), SocketAddrUse::TcpConnect) + .await + .unwrap_err() + .kind(), + std::io::ErrorKind::PermissionDenied + ); + } + Ok(()) +} + +#[tokio::test] +async fn wasi_factor_is_optional() -> anyhow::Result<()> { + #[derive(RuntimeFactors)] + struct WithoutWasi { + variables: VariablesFactor, + networking: OutboundNetworkingFactor, + } + TestEnvironment::new(WithoutWasi { + variables: VariablesFactor::default(), + networking: OutboundNetworkingFactor::new(), + }) + .build_instance_state() + .await?; + Ok(()) +} diff --git a/crates/outbound-pg/Cargo.toml b/crates/factor-outbound-pg/Cargo.toml similarity index 53% rename from crates/outbound-pg/Cargo.toml rename to crates/factor-outbound-pg/Cargo.toml index d12580978f..cd8681a4a2 100644 --- a/crates/outbound-pg/Cargo.toml +++ b/crates/factor-outbound-pg/Cargo.toml @@ -1,25 +1,26 @@ [package] -name = "outbound-pg" +name = "spin-factor-outbound-pg" version = { workspace = true } authors = { workspace = true } edition = { workspace = true } -[lib] -doctest = false - [dependencies] anyhow = "1.0" native-tls = "0.2.11" postgres-native-tls = "0.5.0" -spin-app = { path = "../app" } spin-core = { path = "../core" } -spin-expressions = { path = "../expressions" } -spin-outbound-networking = { path = "../outbound-networking" } +spin-factor-outbound-networking = { path = "../factor-outbound-networking" } +spin-factors = { path = "../factors" } spin-world = { path = "../world" } table = { path = "../table" } tokio = { version = "1", features = ["rt-multi-thread"] } -tokio-postgres = { version = "0.7.7" } +tokio-postgres = "0.7.7" tracing = { workspace = true } +[dev-dependencies] +spin-factor-variables = { path = "../factor-variables" } +spin-factors-test = { path = "../factors-test" } +tokio = { version = "1", features = ["macros", "rt"] } + [lints] workspace = true diff --git a/crates/factor-outbound-pg/src/client.rs b/crates/factor-outbound-pg/src/client.rs new file mode 100644 index 0000000000..06a93a6311 --- /dev/null +++ b/crates/factor-outbound-pg/src/client.rs @@ -0,0 +1,284 @@ +use anyhow::{anyhow, Result}; +use native_tls::TlsConnector; +use postgres_native_tls::MakeTlsConnector; +use spin_world::async_trait; +use spin_world::v2::postgres::{self as v2}; +use spin_world::v2::rdbms_types::{Column, DbDataType, DbValue, ParameterValue, RowSet}; +use tokio_postgres::types::Type; +use tokio_postgres::{config::SslMode, types::ToSql, Row}; +use tokio_postgres::{Client as TokioClient, NoTls, Socket}; + +#[async_trait] +pub trait Client { + async fn build_client(address: &str) -> Result + where + Self: Sized; + + async fn execute( + &self, + statement: String, + params: Vec, + ) -> Result; + + async fn query( + &self, + statement: String, + params: Vec, + ) -> Result; +} + +#[async_trait] +impl Client for TokioClient { + async fn build_client(address: &str) -> Result + where + Self: Sized, + { + let config = address.parse::()?; + + tracing::debug!("Build new connection: {}", address); + + if config.get_ssl_mode() == SslMode::Disable { + let (client, connection) = config.connect(NoTls).await?; + spawn_connection(connection); + Ok(client) + } else { + let builder = TlsConnector::builder(); + let connector = MakeTlsConnector::new(builder.build()?); + let (client, connection) = config.connect(connector).await?; + spawn_connection(connection); + Ok(client) + } + } + + async fn execute( + &self, + statement: String, + params: Vec, + ) -> Result { + let params: Vec<&(dyn ToSql + Sync)> = params + .iter() + .map(to_sql_parameter) + .collect::>>() + .map_err(|e| v2::Error::ValueConversionFailed(format!("{:?}", e)))?; + + self.execute(&statement, params.as_slice()) + .await + .map_err(|e| v2::Error::QueryFailed(format!("{:?}", e))) + } + + async fn query( + &self, + statement: String, + params: Vec, + ) -> Result { + let params: Vec<&(dyn ToSql + Sync)> = params + .iter() + .map(to_sql_parameter) + .collect::>>() + .map_err(|e| v2::Error::BadParameter(format!("{:?}", e)))?; + + let results = self + .query(&statement, params.as_slice()) + .await + .map_err(|e| v2::Error::QueryFailed(format!("{:?}", e)))?; + + if results.is_empty() { + return Ok(RowSet { + columns: vec![], + rows: vec![], + }); + } + + let columns = infer_columns(&results[0]); + let rows = results + .iter() + .map(convert_row) + .collect::, _>>() + .map_err(|e| v2::Error::QueryFailed(format!("{:?}", e)))?; + + Ok(RowSet { columns, rows }) + } +} + +fn spawn_connection(connection: tokio_postgres::Connection) +where + T: tokio_postgres::tls::TlsStream + std::marker::Unpin + std::marker::Send + 'static, +{ + tokio::spawn(async move { + if let Err(e) = connection.await { + tracing::error!("Postgres connection error: {}", e); + } + }); +} + +fn to_sql_parameter(value: &ParameterValue) -> Result<&(dyn ToSql + Sync)> { + match value { + ParameterValue::Boolean(v) => Ok(v), + ParameterValue::Int32(v) => Ok(v), + ParameterValue::Int64(v) => Ok(v), + ParameterValue::Int8(v) => Ok(v), + ParameterValue::Int16(v) => Ok(v), + ParameterValue::Floating32(v) => Ok(v), + ParameterValue::Floating64(v) => Ok(v), + ParameterValue::Uint8(_) + | ParameterValue::Uint16(_) + | ParameterValue::Uint32(_) + | ParameterValue::Uint64(_) => Err(anyhow!("Postgres does not support unsigned integers")), + ParameterValue::Str(v) => Ok(v), + ParameterValue::Binary(v) => Ok(v), + ParameterValue::DbNull => Ok(&PgNull), + } +} + +fn infer_columns(row: &Row) -> Vec { + let mut result = Vec::with_capacity(row.len()); + for index in 0..row.len() { + result.push(infer_column(row, index)); + } + result +} + +fn infer_column(row: &Row, index: usize) -> Column { + let column = &row.columns()[index]; + let name = column.name().to_owned(); + let data_type = convert_data_type(column.type_()); + Column { name, data_type } +} + +fn convert_data_type(pg_type: &Type) -> DbDataType { + match *pg_type { + Type::BOOL => DbDataType::Boolean, + Type::BYTEA => DbDataType::Binary, + Type::FLOAT4 => DbDataType::Floating32, + Type::FLOAT8 => DbDataType::Floating64, + Type::INT2 => DbDataType::Int16, + Type::INT4 => DbDataType::Int32, + Type::INT8 => DbDataType::Int64, + Type::TEXT | Type::VARCHAR | Type::BPCHAR => DbDataType::Str, + _ => { + tracing::debug!("Couldn't convert Postgres type {} to WIT", pg_type.name(),); + DbDataType::Other + } + } +} + +fn convert_row(row: &Row) -> Result, tokio_postgres::Error> { + let mut result = Vec::with_capacity(row.len()); + for index in 0..row.len() { + result.push(convert_entry(row, index)?); + } + Ok(result) +} + +fn convert_entry(row: &Row, index: usize) -> Result { + let column = &row.columns()[index]; + let value = match column.type_() { + &Type::BOOL => { + let value: Option = row.try_get(index)?; + match value { + Some(v) => DbValue::Boolean(v), + None => DbValue::DbNull, + } + } + &Type::BYTEA => { + let value: Option> = row.try_get(index)?; + match value { + Some(v) => DbValue::Binary(v), + None => DbValue::DbNull, + } + } + &Type::FLOAT4 => { + let value: Option = row.try_get(index)?; + match value { + Some(v) => DbValue::Floating32(v), + None => DbValue::DbNull, + } + } + &Type::FLOAT8 => { + let value: Option = row.try_get(index)?; + match value { + Some(v) => DbValue::Floating64(v), + None => DbValue::DbNull, + } + } + &Type::INT2 => { + let value: Option = row.try_get(index)?; + match value { + Some(v) => DbValue::Int16(v), + None => DbValue::DbNull, + } + } + &Type::INT4 => { + let value: Option = row.try_get(index)?; + match value { + Some(v) => DbValue::Int32(v), + None => DbValue::DbNull, + } + } + &Type::INT8 => { + let value: Option = row.try_get(index)?; + match value { + Some(v) => DbValue::Int64(v), + None => DbValue::DbNull, + } + } + &Type::TEXT | &Type::VARCHAR | &Type::BPCHAR => { + let value: Option = row.try_get(index)?; + match value { + Some(v) => DbValue::Str(v), + None => DbValue::DbNull, + } + } + t => { + tracing::debug!( + "Couldn't convert Postgres type {} in column {}", + t.name(), + column.name() + ); + DbValue::Unsupported + } + }; + Ok(value) +} + +/// Although the Postgres crate converts Rust Option::None to Postgres NULL, +/// it enforces the type of the Option as it does so. (For example, trying to +/// pass an Option::::None to a VARCHAR column fails conversion.) As we +/// do not know expected column types, we instead use a "neutral" custom type +/// which allows conversion to any type but always tells the Postgres crate to +/// treat it as a SQL NULL. +struct PgNull; + +impl ToSql for PgNull { + fn to_sql( + &self, + _ty: &Type, + _out: &mut tokio_postgres::types::private::BytesMut, + ) -> Result> + where + Self: Sized, + { + Ok(tokio_postgres::types::IsNull::Yes) + } + + fn accepts(_ty: &Type) -> bool + where + Self: Sized, + { + true + } + + fn to_sql_checked( + &self, + _ty: &Type, + _out: &mut tokio_postgres::types::private::BytesMut, + ) -> Result> { + Ok(tokio_postgres::types::IsNull::Yes) + } +} + +impl std::fmt::Debug for PgNull { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("NULL").finish() + } +} diff --git a/crates/factor-outbound-pg/src/host.rs b/crates/factor-outbound-pg/src/host.rs new file mode 100644 index 0000000000..1f7be3570b --- /dev/null +++ b/crates/factor-outbound-pg/src/host.rs @@ -0,0 +1,170 @@ +use anyhow::Result; +use spin_core::{async_trait, wasmtime::component::Resource}; +use spin_world::v1::postgres as v1; +use spin_world::v1::rdbms_types as v1_types; +use spin_world::v2::postgres::{self as v2, Connection}; +use spin_world::v2::rdbms_types; +use spin_world::v2::rdbms_types::{ParameterValue, RowSet}; +use tracing::instrument; +use tracing::Level; + +use crate::client::Client; +use crate::InstanceState; + +impl InstanceState { + async fn open_connection(&mut self, address: &str) -> Result, v2::Error> { + self.connections + .push( + C::build_client(address) + .await + .map_err(|e| v2::Error::ConnectionFailed(format!("{e:?}")))?, + ) + .map_err(|_| v2::Error::ConnectionFailed("too many connections".into())) + .map(Resource::new_own) + } + + async fn get_client(&mut self, connection: Resource) -> Result<&C, v2::Error> { + self.connections + .get(connection.rep()) + .ok_or_else(|| v2::Error::ConnectionFailed("no connection found".into())) + } + + async fn is_address_allowed(&self, address: &str) -> Result { + let Ok(config) = address.parse::() else { + return Ok(false); + }; + for (i, host) in config.get_hosts().iter().enumerate() { + match host { + tokio_postgres::config::Host::Tcp(address) => { + let ports = config.get_ports(); + // The port we use is either: + // * The port at the same index as the host + // * The first port if there is only one port + let port = + ports + .get(i) + .or_else(|| if ports.len() == 1 { ports.get(1) } else { None }); + let port_str = port.map(|p| format!(":{}", p)).unwrap_or_default(); + let url = format!("{address}{port_str}"); + if !self.allowed_hosts.check_url(&url, "postgres").await? { + return Ok(false); + } + } + #[cfg(unix)] + tokio_postgres::config::Host::Unix(_) => return Ok(false), + } + } + Ok(true) + } +} + +#[async_trait] +impl v2::Host for InstanceState {} + +#[async_trait] +impl v2::HostConnection for InstanceState { + #[instrument(name = "spin_outbound_pg.open_connection", skip(self), err(level = Level::INFO), fields(otel.kind = "client", db.system = "postgresql"))] + async fn open(&mut self, address: String) -> Result, v2::Error> { + if !self + .is_address_allowed(&address) + .await + .map_err(|e| v2::Error::Other(e.to_string()))? + { + return Err(v2::Error::ConnectionFailed(format!( + "address {address} is not permitted" + ))); + } + self.open_connection(&address).await + } + + #[instrument(name = "spin_outbound_pg.execute", skip(self, connection), err(level = Level::INFO), fields(otel.kind = "client", db.system = "postgresql", otel.name = statement))] + async fn execute( + &mut self, + connection: Resource, + statement: String, + params: Vec, + ) -> Result { + Ok(self + .get_client(connection) + .await? + .execute(statement, params) + .await?) + } + + #[instrument(name = "spin_outbound_pg.query", skip(self, connection), err(level = Level::INFO), fields(otel.kind = "client", db.system = "postgresql", otel.name = statement))] + async fn query( + &mut self, + connection: Resource, + statement: String, + params: Vec, + ) -> Result { + Ok(self + .get_client(connection) + .await? + .query(statement, params) + .await?) + } + + fn drop(&mut self, connection: Resource) -> anyhow::Result<()> { + self.connections.remove(connection.rep()); + Ok(()) + } +} + +impl rdbms_types::Host for InstanceState { + fn convert_error(&mut self, error: v2::Error) -> Result { + Ok(error) + } +} + +/// Delegate a function call to the v2::HostConnection implementation +macro_rules! delegate { + ($self:ident.$name:ident($address:expr, $($arg:expr),*)) => {{ + if !$self.is_address_allowed(&$address).await.map_err(|e| v2::Error::Other(e.to_string()))? { + return Err(v1::PgError::ConnectionFailed(format!( + "address {} is not permitted", $address + ))); + } + let connection = match $self.open_connection(&$address).await { + Ok(c) => c, + Err(e) => return Err(e.into()), + }; + ::$name($self, connection, $($arg),*) + .await + .map_err(|e| e.into()) + }}; +} + +#[async_trait] +impl v1::Host for InstanceState { + async fn execute( + &mut self, + address: String, + statement: String, + params: Vec, + ) -> Result { + delegate!(self.execute( + address, + statement, + params.into_iter().map(Into::into).collect() + )) + } + + async fn query( + &mut self, + address: String, + statement: String, + params: Vec, + ) -> Result { + delegate!(self.query( + address, + statement, + params.into_iter().map(Into::into).collect() + )) + .map(Into::into) + } + + fn convert_pg_error(&mut self, error: v1::PgError) -> Result { + Ok(error) + } +} diff --git a/crates/factor-outbound-pg/src/lib.rs b/crates/factor-outbound-pg/src/lib.rs new file mode 100644 index 0000000000..46efdb5679 --- /dev/null +++ b/crates/factor-outbound-pg/src/lib.rs @@ -0,0 +1,71 @@ +pub mod client; +mod host; + +use client::Client; +use spin_factor_outbound_networking::{OutboundAllowedHosts, OutboundNetworkingFactor}; +use spin_factors::{ + anyhow, ConfigureAppContext, Factor, InstanceBuilders, PrepareContext, RuntimeFactors, + SelfInstanceBuilder, +}; +use tokio_postgres::Client as PgClient; + +pub struct OutboundPgFactor { + _phantom: std::marker::PhantomData, +} + +impl Factor for OutboundPgFactor { + type RuntimeConfig = (); + type AppState = (); + type InstanceBuilder = InstanceState; + + fn init( + &mut self, + mut ctx: spin_factors::InitContext, + ) -> anyhow::Result<()> { + ctx.link_bindings(spin_world::v1::postgres::add_to_linker)?; + ctx.link_bindings(spin_world::v2::postgres::add_to_linker)?; + Ok(()) + } + + fn configure_app( + &self, + _ctx: ConfigureAppContext, + ) -> anyhow::Result { + Ok(()) + } + + fn prepare( + &self, + _ctx: PrepareContext, + builders: &mut InstanceBuilders, + ) -> anyhow::Result { + let allowed_hosts = builders + .get_mut::()? + .allowed_hosts(); + Ok(InstanceState { + allowed_hosts, + connections: Default::default(), + }) + } +} + +impl Default for OutboundPgFactor { + fn default() -> Self { + Self { + _phantom: Default::default(), + } + } +} + +impl OutboundPgFactor { + pub fn new() -> Self { + Self::default() + } +} + +pub struct InstanceState { + allowed_hosts: OutboundAllowedHosts, + connections: table::Table, +} + +impl SelfInstanceBuilder for InstanceState {} diff --git a/crates/factor-outbound-pg/tests/factor_test.rs b/crates/factor-outbound-pg/tests/factor_test.rs new file mode 100644 index 0000000000..b765d805f6 --- /dev/null +++ b/crates/factor-outbound-pg/tests/factor_test.rs @@ -0,0 +1,136 @@ +use anyhow::{bail, Result}; +use spin_factor_outbound_networking::OutboundNetworkingFactor; +use spin_factor_outbound_pg::client::Client; +use spin_factor_outbound_pg::OutboundPgFactor; +use spin_factor_variables::VariablesFactor; +use spin_factors::{anyhow, RuntimeFactors}; +use spin_factors_test::{toml, TestEnvironment}; +use spin_world::async_trait; +use spin_world::v2::postgres::HostConnection; +use spin_world::v2::postgres::{self as v2}; +use spin_world::v2::rdbms_types::Error as PgError; +use spin_world::v2::rdbms_types::{ParameterValue, RowSet}; + +#[derive(RuntimeFactors)] +struct TestFactors { + variables: VariablesFactor, + networking: OutboundNetworkingFactor, + pg: OutboundPgFactor, +} + +fn factors() -> TestFactors { + TestFactors { + variables: VariablesFactor::default(), + networking: OutboundNetworkingFactor::new(), + pg: OutboundPgFactor::::new(), + } +} + +fn test_env() -> TestEnvironment { + TestEnvironment::new(factors()).extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + allowed_outbound_hosts = ["postgres://*:*"] + }) +} + +#[tokio::test] +async fn disallowed_host_fails() -> anyhow::Result<()> { + let env = TestEnvironment::new(factors()).extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + }); + let mut state = env.build_instance_state().await?; + + let res = state + .pg + .open("postgres://postgres.test:5432/test".to_string()) + .await; + let Err(err) = res else { + bail!("expected Err, got Ok"); + }; + assert!(matches!(err, PgError::ConnectionFailed(_))); + + Ok(()) +} + +#[tokio::test] +async fn allowed_host_succeeds() -> anyhow::Result<()> { + let mut state = test_env().build_instance_state().await?; + + let res = state + .pg + .open("postgres://localhost:5432/test".to_string()) + .await; + let Ok(_) = res else { + bail!("expected Ok, got Err"); + }; + + Ok(()) +} + +#[tokio::test] +async fn exercise_execute() -> anyhow::Result<()> { + let mut state = test_env().build_instance_state().await?; + + let connection = state + .pg + .open("postgres://localhost:5432/test".to_string()) + .await?; + + state + .pg + .execute(connection, "SELECT * FROM test".to_string(), vec![]) + .await?; + + Ok(()) +} + +#[tokio::test] +async fn exercise_query() -> anyhow::Result<()> { + let mut state = test_env().build_instance_state().await?; + + let connection = state + .pg + .open("postgres://localhost:5432/test".to_string()) + .await?; + + state + .pg + .query(connection, "SELECT * FROM test".to_string(), vec![]) + .await?; + + Ok(()) +} + +// TODO: We can expand this mock to track calls and simulate return values +pub struct MockClient {} + +#[async_trait] +impl Client for MockClient { + async fn build_client(_address: &str) -> anyhow::Result + where + Self: Sized, + { + Ok(MockClient {}) + } + + async fn execute( + &self, + _statement: String, + _params: Vec, + ) -> Result { + Ok(0) + } + + async fn query( + &self, + _statement: String, + _params: Vec, + ) -> Result { + Ok(RowSet { + columns: vec![], + rows: vec![], + }) + } +} diff --git a/crates/factor-outbound-redis/Cargo.toml b/crates/factor-outbound-redis/Cargo.toml new file mode 100644 index 0000000000..0dffe6ea41 --- /dev/null +++ b/crates/factor-outbound-redis/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "spin-factor-outbound-redis" +version = { workspace = true } +authors = { workspace = true } +edition = { workspace = true } + +[dependencies] +anyhow = "1.0" +spin-factor-outbound-networking = { path = "../factor-outbound-networking" } +spin-factors = { path = "../factors" } +spin-core = { path = "../core" } +spin-world = { path = "../world" } +tracing = { workspace = true } +table = { path = "../table" } +redis = { version = "0.21", features = ["tokio-comp", "tokio-native-tls-comp", "aio"] } + + +[dev-dependencies] +spin-factors-test = { path = "../factors-test" } +tokio = { version = "1", features = ["macros", "rt"] } +spin-factor-variables = { path = "../factor-variables" } + +# wasmtime-wasi-http = { workspace = true } +[lints] +workspace = true diff --git a/crates/outbound-redis/src/lib.rs b/crates/factor-outbound-redis/src/host.rs similarity index 92% rename from crates/outbound-redis/src/lib.rs rename to crates/factor-outbound-redis/src/host.rs index e895efd628..baf3bb3c91 100644 --- a/crates/outbound-redis/src/lib.rs +++ b/crates/factor-outbound-redis/src/host.rs @@ -1,53 +1,21 @@ -mod host_component; - use anyhow::Result; use redis::{aio::Connection, AsyncCommands, FromRedisValue, Value}; use spin_core::{async_trait, wasmtime::component::Resource}; +use spin_factor_outbound_networking::OutboundAllowedHosts; use spin_world::v1::{redis as v1, redis_types}; use spin_world::v2::redis::{ self as v2, Connection as RedisConnection, Error, RedisParameter, RedisResult, }; - -pub use host_component::OutboundRedisComponent; use tracing::{instrument, Level}; -struct RedisResults(Vec); - -impl FromRedisValue for RedisResults { - fn from_redis_value(value: &Value) -> redis::RedisResult { - fn append(values: &mut Vec, value: &Value) { - match value { - Value::Nil | Value::Okay => (), - Value::Int(v) => values.push(RedisResult::Int64(*v)), - Value::Data(bytes) => values.push(RedisResult::Binary(bytes.to_owned())), - Value::Bulk(bulk) => bulk.iter().for_each(|value| append(values, value)), - Value::Status(message) => values.push(RedisResult::Status(message.to_owned())), - } - } - - let mut values = Vec::new(); - append(&mut values, value); - Ok(RedisResults(values)) - } -} - -pub struct OutboundRedis { - allowed_hosts: spin_outbound_networking::AllowedHostsConfig, - connections: table::Table, -} - -impl Default for OutboundRedis { - fn default() -> Self { - Self { - allowed_hosts: Default::default(), - connections: table::Table::new(1024), - } - } +pub struct InstanceState { + pub allowed_hosts: OutboundAllowedHosts, + pub connections: table::Table, } -impl OutboundRedis { - fn is_address_allowed(&self, address: &str) -> bool { - spin_outbound_networking::check_url(address, "redis", &self.allowed_hosts) +impl InstanceState { + async fn is_address_allowed(&self, address: &str) -> Result { + self.allowed_hosts.check_url(address, "redis").await } async fn establish_connection( @@ -64,19 +32,34 @@ impl OutboundRedis { .map(Resource::new_own) .map_err(|_| Error::TooManyConnections) } + + async fn get_conn( + &mut self, + connection: Resource, + ) -> Result<&mut Connection, Error> { + self.connections + .get_mut(connection.rep()) + .ok_or(Error::Other( + "could not find connection for resource".into(), + )) + } } -impl v2::Host for OutboundRedis { +impl v2::Host for crate::InstanceState { fn convert_error(&mut self, error: Error) -> Result { Ok(error) } } #[async_trait] -impl v2::HostConnection for OutboundRedis { +impl v2::HostConnection for crate::InstanceState { #[instrument(name = "spin_outbound_redis.open_connection", skip(self), err(level = Level::INFO), fields(otel.kind = "client", db.system = "redis"))] async fn open(&mut self, address: String) -> Result, Error> { - if !self.is_address_allowed(&address) { + if !self + .is_address_allowed(&address) + .await + .map_err(|e| v2::Error::Other(e.to_string()))? + { return Err(Error::InvalidAddress); } @@ -220,7 +203,7 @@ fn other_error(e: impl std::fmt::Display) -> Error { /// Delegate a function call to the v2::HostConnection implementation macro_rules! delegate { ($self:ident.$name:ident($address:expr, $($arg:expr),*)) => {{ - if !$self.is_address_allowed(&$address) { + if !$self.is_address_allowed(&$address).await.map_err(|_| v1::Error::Error)? { return Err(v1::Error::Error); } let connection = match $self.establish_connection($address).await { @@ -234,7 +217,7 @@ macro_rules! delegate { } #[async_trait] -impl v1::Host for OutboundRedis { +impl v1::Host for crate::InstanceState { async fn publish( &mut self, address: String, @@ -297,21 +280,28 @@ impl v1::Host for OutboundRedis { } } -impl redis_types::Host for OutboundRedis { +impl redis_types::Host for crate::InstanceState { fn convert_error(&mut self, error: redis_types::Error) -> Result { Ok(error) } } -impl OutboundRedis { - async fn get_conn( - &mut self, - connection: Resource, - ) -> Result<&mut Connection, Error> { - self.connections - .get_mut(connection.rep()) - .ok_or(Error::Other( - "could not find connection for resource".into(), - )) +struct RedisResults(Vec); + +impl FromRedisValue for RedisResults { + fn from_redis_value(value: &Value) -> redis::RedisResult { + fn append(values: &mut Vec, value: &Value) { + match value { + Value::Nil | Value::Okay => (), + Value::Int(v) => values.push(RedisResult::Int64(*v)), + Value::Data(bytes) => values.push(RedisResult::Binary(bytes.to_owned())), + Value::Bulk(bulk) => bulk.iter().for_each(|value| append(values, value)), + Value::Status(message) => values.push(RedisResult::Status(message.to_owned())), + } + } + + let mut values = Vec::new(); + append(&mut values, value); + Ok(RedisResults(values)) } } diff --git a/crates/factor-outbound-redis/src/lib.rs b/crates/factor-outbound-redis/src/lib.rs new file mode 100644 index 0000000000..1c9f137525 --- /dev/null +++ b/crates/factor-outbound-redis/src/lib.rs @@ -0,0 +1,58 @@ +mod host; + +use host::InstanceState; +use spin_factor_outbound_networking::OutboundNetworkingFactor; +use spin_factors::{ + anyhow, ConfigureAppContext, Factor, InstanceBuilders, PrepareContext, RuntimeFactors, + SelfInstanceBuilder, +}; + +/// The [`Factor`] for `fermyon:spin/outbound-redis`. +#[derive(Default)] +pub struct OutboundRedisFactor { + _priv: (), +} + +impl OutboundRedisFactor { + pub fn new() -> Self { + Self::default() + } +} + +impl Factor for OutboundRedisFactor { + type RuntimeConfig = (); + type AppState = (); + type InstanceBuilder = InstanceState; + + fn init( + &mut self, + mut ctx: spin_factors::InitContext, + ) -> anyhow::Result<()> { + ctx.link_bindings(spin_world::v1::redis::add_to_linker)?; + ctx.link_bindings(spin_world::v2::redis::add_to_linker)?; + Ok(()) + } + + fn configure_app( + &self, + _ctx: ConfigureAppContext, + ) -> anyhow::Result { + Ok(()) + } + + fn prepare( + &self, + _ctx: PrepareContext, + builders: &mut InstanceBuilders, + ) -> anyhow::Result { + let allowed_hosts = builders + .get_mut::()? + .allowed_hosts(); + Ok(InstanceState { + allowed_hosts, + connections: table::Table::new(1024), + }) + } +} + +impl SelfInstanceBuilder for InstanceState {} diff --git a/crates/factor-outbound-redis/tests/factor_test.rs b/crates/factor-outbound-redis/tests/factor_test.rs new file mode 100644 index 0000000000..6f2f7b051a --- /dev/null +++ b/crates/factor-outbound-redis/tests/factor_test.rs @@ -0,0 +1,43 @@ +use anyhow::bail; +use spin_factor_outbound_networking::OutboundNetworkingFactor; +use spin_factor_outbound_redis::OutboundRedisFactor; +use spin_factor_variables::VariablesFactor; +use spin_factors::{anyhow, RuntimeFactors}; +use spin_factors_test::{toml, TestEnvironment}; +use spin_world::v2::redis::{Error, HostConnection}; + +#[derive(RuntimeFactors)] +struct TestFactors { + variables: VariablesFactor, + networking: OutboundNetworkingFactor, + redis: OutboundRedisFactor, +} + +#[tokio::test] +async fn no_outbound_hosts_fails() -> anyhow::Result<()> { + let factors = TestFactors { + variables: VariablesFactor::default(), + networking: OutboundNetworkingFactor::new(), + redis: OutboundRedisFactor::new(), + }; + let env = TestEnvironment::new(factors).extend_manifest(toml! { + spin_manifest_version = 2 + application.name = "test-app" + [[trigger.test]] + + [component.test-component] + source = "does-not-exist.wasm" + }); + let mut state = env.build_instance_state().await?; + let connection = state + .redis + .open("redis://redis.test:8080".to_string()) + .await; + + let Err(err) = connection else { + bail!("expected Error, got Ok"); + }; + + assert!(matches!(err, Error::InvalidAddress)); + Ok(()) +} diff --git a/crates/factor-sqlite/Cargo.toml b/crates/factor-sqlite/Cargo.toml new file mode 100644 index 0000000000..3b45dcf3ca --- /dev/null +++ b/crates/factor-sqlite/Cargo.toml @@ -0,0 +1,38 @@ +[package] +name = "spin-factor-sqlite" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +rust-version.workspace = true + +[dependencies] +async-trait = "0.1" +serde = { version = "1.0", features = ["rc"] } +spin-factors = { path = "../factors" } +spin-locked-app = { path = "../locked-app" } +spin-world = { path = "../world" } +table = { path = "../table" } +tokio = "1" +toml = "0.8" +spin-sqlite = { path = "../sqlite", optional = true } +spin-sqlite-inproc = { path = "../sqlite-inproc", optional = true } +spin-sqlite-libsql = { path = "../sqlite-libsql", optional = true } + +[dev-dependencies] +spin-factors-test = { path = "../factors-test" } +tokio = { version = "1", features = ["macros", "rt"] } + +[features] +default = ["spin-cli"] +# Includes the runtime configuration handling used by the Spin CLI +spin-cli = [ + "dep:spin-sqlite", + "dep:spin-sqlite-inproc", + "dep:spin-sqlite-libsql", +] + +[lints] +workspace = true diff --git a/crates/factor-sqlite/src/host.rs b/crates/factor-sqlite/src/host.rs new file mode 100644 index 0000000000..92d4b146dc --- /dev/null +++ b/crates/factor-sqlite/src/host.rs @@ -0,0 +1,174 @@ +use std::collections::HashSet; +use std::sync::Arc; + +use async_trait::async_trait; + +use spin_factors::wasmtime::component::Resource; +use spin_factors::{anyhow, SelfInstanceBuilder}; +use spin_world::v1::sqlite as v1; +use spin_world::v2::sqlite as v2; + +use crate::{Connection, ConnectionCreator}; + +pub struct InstanceState { + allowed_databases: Arc>, + connections: table::Table>, + get_connection_creator: ConnectionCreatorGetter, +} + +impl InstanceState { + pub fn allowed_databases(&self) -> &HashSet { + &self.allowed_databases + } +} + +/// A function that takes a database label and returns a connection creator, if one exists. +pub type ConnectionCreatorGetter = + Arc Option> + Send + Sync>; + +impl InstanceState { + /// Create a new `InstanceState` + /// + /// Takes the list of allowed databases, and a function for getting a connection creator given a database label. + pub fn new( + allowed_databases: Arc>, + get_connection_creator: ConnectionCreatorGetter, + ) -> Self { + Self { + allowed_databases, + connections: table::Table::new(256), + get_connection_creator, + } + } + + fn get_connection( + &self, + connection: Resource, + ) -> Result<&dyn Connection, v2::Error> { + self.connections + .get(connection.rep()) + .map(|conn| conn.as_ref()) + .ok_or(v2::Error::InvalidConnection) + } +} + +impl SelfInstanceBuilder for InstanceState {} + +impl v2::Host for InstanceState { + fn convert_error(&mut self, error: v2::Error) -> anyhow::Result { + Ok(error) + } +} + +#[async_trait] +impl v2::HostConnection for InstanceState { + async fn open(&mut self, database: String) -> Result, v2::Error> { + if !self.allowed_databases.contains(&database) { + return Err(v2::Error::AccessDenied); + } + (self.get_connection_creator)(&database) + .ok_or(v2::Error::NoSuchDatabase)? + .create_connection() + .await + .and_then(|conn| { + self.connections + .push(conn) + .map_err(|()| v2::Error::Io("too many connections opened".to_string())) + }) + .map(Resource::new_own) + } + + async fn execute( + &mut self, + connection: Resource, + query: String, + parameters: Vec, + ) -> Result { + let conn = match self.get_connection(connection) { + Ok(c) => c, + Err(err) => return Err(err), + }; + conn.query(&query, parameters).await + } + + fn drop(&mut self, connection: Resource) -> anyhow::Result<()> { + let _ = self.connections.remove(connection.rep()); + Ok(()) + } +} + +#[async_trait] +impl v1::Host for InstanceState { + async fn open(&mut self, database: String) -> Result { + let result = ::open(self, database).await; + result.map_err(to_legacy_error).map(|s| s.rep()) + } + + async fn execute( + &mut self, + connection: u32, + query: String, + parameters: Vec, + ) -> Result { + let this = Resource::new_borrow(connection); + let result = ::execute( + self, + this, + query, + parameters.into_iter().map(from_legacy_value).collect(), + ) + .await; + result.map_err(to_legacy_error).map(to_legacy_query_result) + } + + async fn close(&mut self, connection: u32) -> anyhow::Result<()> { + ::drop(self, Resource::new_own(connection)) + } + + fn convert_error(&mut self, error: v1::Error) -> anyhow::Result { + Ok(error) + } +} + +fn to_legacy_error(error: v2::Error) -> v1::Error { + match error { + v2::Error::NoSuchDatabase => v1::Error::NoSuchDatabase, + v2::Error::AccessDenied => v1::Error::AccessDenied, + v2::Error::InvalidConnection => v1::Error::InvalidConnection, + v2::Error::DatabaseFull => v1::Error::DatabaseFull, + v2::Error::Io(s) => v1::Error::Io(s), + } +} + +fn to_legacy_query_result(result: v2::QueryResult) -> v1::QueryResult { + v1::QueryResult { + columns: result.columns, + rows: result.rows.into_iter().map(to_legacy_row_result).collect(), + } +} + +fn to_legacy_row_result(result: v2::RowResult) -> v1::RowResult { + v1::RowResult { + values: result.values.into_iter().map(to_legacy_value).collect(), + } +} + +fn to_legacy_value(value: v2::Value) -> v1::Value { + match value { + v2::Value::Integer(i) => v1::Value::Integer(i), + v2::Value::Real(r) => v1::Value::Real(r), + v2::Value::Text(t) => v1::Value::Text(t), + v2::Value::Blob(b) => v1::Value::Blob(b), + v2::Value::Null => v1::Value::Null, + } +} + +fn from_legacy_value(value: v1::Value) -> v2::Value { + match value { + v1::Value::Integer(i) => v2::Value::Integer(i), + v1::Value::Real(r) => v2::Value::Real(r), + v1::Value::Text(t) => v2::Value::Text(t), + v1::Value::Blob(b) => v2::Value::Blob(b), + v1::Value::Null => v2::Value::Null, + } +} diff --git a/crates/factor-sqlite/src/lib.rs b/crates/factor-sqlite/src/lib.rs new file mode 100644 index 0000000000..4d90a1c64a --- /dev/null +++ b/crates/factor-sqlite/src/lib.rs @@ -0,0 +1,188 @@ +mod host; +pub mod runtime_config; + +use std::collections::{HashMap, HashSet}; +use std::sync::Arc; + +use host::InstanceState; + +use async_trait::async_trait; +use spin_factors::{anyhow, Factor}; +use spin_locked_app::MetadataKey; +use spin_world::v1::sqlite as v1; +use spin_world::v2::sqlite as v2; + +pub use runtime_config::RuntimeConfig; + +pub struct SqliteFactor { + default_label_resolver: Arc, +} + +impl SqliteFactor { + /// Create a new `SqliteFactor` + /// + /// Takes a `default_label_resolver` for how to handle when a database label doesn't + /// have a corresponding runtime configuration. + pub fn new(default_label_resolver: impl DefaultLabelResolver + 'static) -> Self { + Self { + default_label_resolver: Arc::new(default_label_resolver), + } + } +} + +impl Factor for SqliteFactor { + type RuntimeConfig = RuntimeConfig; + type AppState = AppState; + type InstanceBuilder = InstanceState; + + fn init( + &mut self, + mut ctx: spin_factors::InitContext, + ) -> anyhow::Result<()> { + ctx.link_bindings(v1::add_to_linker)?; + ctx.link_bindings(v2::add_to_linker)?; + Ok(()) + } + + fn configure_app( + &self, + mut ctx: spin_factors::ConfigureAppContext, + ) -> anyhow::Result { + let connection_creators = ctx + .take_runtime_config() + .map(|r| r.connection_creators) + .unwrap_or_default(); + + let allowed_databases = ctx + .app() + .components() + .map(|component| { + Ok(( + component.id().to_string(), + Arc::new( + component + .get_metadata(ALLOWED_DATABASES_KEY)? + .unwrap_or_default() + .into_iter() + .collect::>(), + ), + )) + }) + .collect::>>()?; + let resolver = self.default_label_resolver.clone(); + let get_connection_creator: host::ConnectionCreatorGetter = Arc::new(move |label| { + connection_creators + .get(label) + .cloned() + .or_else(|| resolver.default(label)) + }); + + ensure_allowed_databases_are_configured(&allowed_databases, |label| { + get_connection_creator(label).is_some() + })?; + + Ok(AppState { + allowed_databases, + get_connection_creator, + }) + } + + fn prepare( + &self, + ctx: spin_factors::PrepareContext, + _builders: &mut spin_factors::InstanceBuilders, + ) -> spin_factors::anyhow::Result { + let allowed_databases = ctx + .app_state() + .allowed_databases + .get(ctx.app_component().id()) + .cloned() + .unwrap_or_default(); + let get_connection_creator = ctx.app_state().get_connection_creator.clone(); + Ok(InstanceState::new( + allowed_databases, + get_connection_creator, + )) + } +} + +/// Ensure that all the databases in the allowed databases list for each component are configured +fn ensure_allowed_databases_are_configured( + allowed_databases: &HashMap>>, + is_configured: impl Fn(&str) -> bool, +) -> anyhow::Result<()> { + let mut errors = Vec::new(); + for (component_id, allowed_dbs) in allowed_databases { + for allowed in allowed_dbs.iter() { + if !is_configured(allowed) { + errors.push(format!( + "- Component {component_id} uses database '{allowed}'" + )); + } + } + } + + if !errors.is_empty() { + let prologue = vec![ + "One or more components use SQLite databases which are not defined.", + "Check the spelling, or pass a runtime configuration file that defines these stores.", + "See https://developer.fermyon.com/spin/dynamic-configuration#sqlite-storage-runtime-configuration", + "Details:", + ]; + let lines: Vec<_> = prologue + .into_iter() + .map(|s| s.to_owned()) + .chain(errors) + .collect(); + return Err(anyhow::anyhow!(lines.join("\n"))); + } + Ok(()) +} + +pub const ALLOWED_DATABASES_KEY: MetadataKey> = MetadataKey::new("databases"); + +/// Resolves a label to a default connection creator. +pub trait DefaultLabelResolver: Send + Sync { + /// If there is no runtime configuration for a given database label, return a default connection creator. + /// + /// If `Option::None` is returned, the database is not allowed. + fn default(&self, label: &str) -> Option>; +} + +pub struct AppState { + /// A map from component id to a set of allowed database labels. + allowed_databases: HashMap>>, + /// A function for mapping from database name to a connection creator. + get_connection_creator: host::ConnectionCreatorGetter, +} + +/// A creator of a connections for a particular SQLite database. +#[async_trait] +pub trait ConnectionCreator: Send + Sync { + /// Get a *new* [`Connection`] + /// + /// The connection should be a new connection, not a reused one. + async fn create_connection(&self) -> Result, v2::Error>; +} + +#[async_trait::async_trait] +impl ConnectionCreator for F +where + F: Fn() -> anyhow::Result> + Send + Sync + 'static, +{ + async fn create_connection(&self) -> Result, v2::Error> { + (self)().map_err(|_| v2::Error::InvalidConnection) + } +} + +/// A trait abstracting over operations to a SQLite database +#[async_trait] +pub trait Connection: Send + Sync { + async fn query( + &self, + query: &str, + parameters: Vec, + ) -> Result; + + async fn execute_batch(&self, statements: &str) -> anyhow::Result<()>; +} diff --git a/crates/factor-sqlite/src/runtime_config.rs b/crates/factor-sqlite/src/runtime_config.rs new file mode 100644 index 0000000000..10eb8e871e --- /dev/null +++ b/crates/factor-sqlite/src/runtime_config.rs @@ -0,0 +1,13 @@ +#[cfg(feature = "spin-cli")] +pub mod spin; + +use std::{collections::HashMap, sync::Arc}; + +use crate::ConnectionCreator; + +/// A runtime configuration for SQLite databases. +/// +/// Maps database labels to connection creators. +pub struct RuntimeConfig { + pub connection_creators: HashMap>, +} diff --git a/crates/factor-sqlite/src/runtime_config/spin.rs b/crates/factor-sqlite/src/runtime_config/spin.rs new file mode 100644 index 0000000000..ef84ca1606 --- /dev/null +++ b/crates/factor-sqlite/src/runtime_config/spin.rs @@ -0,0 +1,262 @@ +//! Spin's default handling of the runtime configuration for SQLite databases. + +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; + +use serde::Deserialize; +use spin_factors::{ + anyhow::{self, Context as _}, + runtime_config::toml::GetTomlValue, +}; +use spin_sqlite_inproc::InProcDatabaseLocation; +use spin_world::v2::sqlite as v2; +use tokio::sync::OnceCell; + +use crate::{Connection, ConnectionCreator, DefaultLabelResolver}; + +/// Spin's default resolution of runtime configuration for SQLite databases. +/// +/// This type implements how Spin CLI's SQLite implementation is configured +/// through the runtime config toml as well as the behavior of the "default" label. +pub struct RuntimeConfigResolver { + default_database_dir: Option, + local_database_dir: PathBuf, +} + +impl RuntimeConfigResolver { + /// Create a new `SpinSqliteRuntimeConfig` + /// + /// This takes as arguments: + /// * the directory to use as the default location for SQLite databases. + /// Usually this will be the path to the `.spin` state directory. If + /// `None`, the default database will be in-memory. + /// * the path to the directory from which relative paths to + /// local SQLite databases are resolved. (this should most likely be the + /// path to the runtime-config file or the current working dir). + pub fn new(default_database_dir: Option, local_database_dir: PathBuf) -> Self { + Self { + default_database_dir, + local_database_dir, + } + } + + /// Get the runtime configuration for SQLite databases from a TOML table. + /// + /// Expects table to be in the format: + /// ````toml + /// [sqlite_database.$database-label] + /// type = "$database-type" + /// ... extra type specific configuration ... + /// ``` + pub fn resolve_from_toml( + &self, + table: &impl GetTomlValue, + ) -> anyhow::Result> { + let Some(table) = table.get("sqlite_database") else { + return Ok(None); + }; + let config: std::collections::HashMap = table.clone().try_into()?; + let connection_creators = config + .into_iter() + .map(|(k, v)| Ok((k, self.get_connection_creator(v)?))) + .collect::>()?; + Ok(Some(super::RuntimeConfig { + connection_creators, + })) + } + + /// Get a connection creator for a given runtime configuration. + pub fn get_connection_creator( + &self, + config: RuntimeConfig, + ) -> anyhow::Result> { + let database_kind = config.type_.as_str(); + match database_kind { + "spin" => { + let config: LocalDatabase = config.config.try_into()?; + Ok(Arc::new( + config.connection_creator(&self.local_database_dir)?, + )) + } + "libsql" => { + let config: LibSqlDatabase = config.config.try_into()?; + Ok(Arc::new(config.connection_creator()?)) + } + _ => anyhow::bail!("Unknown database kind: {database_kind}"), + } + } +} + +#[derive(Deserialize)] +pub struct RuntimeConfig { + #[serde(rename = "type")] + pub type_: String, + #[serde(flatten)] + pub config: toml::Table, +} + +impl DefaultLabelResolver for RuntimeConfigResolver { + fn default(&self, label: &str) -> Option> { + // Only default the database labeled "default". + if label != "default" { + return None; + } + + let path = self + .default_database_dir + .as_deref() + .map(|p| p.join(DEFAULT_SQLITE_DB_FILENAME)); + let factory = move || { + let location = InProcDatabaseLocation::from_path(path.clone())?; + let connection = spin_sqlite_inproc::InProcConnection::new(location)?; + Ok(Box::new(connection) as _) + }; + Some(Arc::new(factory)) + } +} + +const DEFAULT_SQLITE_DB_FILENAME: &str = "sqlite_db.db"; + +#[async_trait::async_trait] +impl Connection for spin_sqlite_inproc::InProcConnection { + async fn query( + &self, + query: &str, + parameters: Vec, + ) -> Result { + ::query(self, query, parameters).await + } + + async fn execute_batch(&self, statements: &str) -> anyhow::Result<()> { + ::execute_batch(self, statements).await + } +} + +/// A wrapper around a libSQL connection that implements the [`Connection`] trait. +struct LibSqlConnection { + url: String, + token: String, + // Since the libSQL client can only be created asynchronously, we wait until + // we're in the `Connection` implementation to create. Since we only want to do + // this once, we use a `OnceCell` to store it. + inner: OnceCell, +} + +impl LibSqlConnection { + fn new(url: String, token: String) -> Self { + Self { + url, + token, + inner: OnceCell::new(), + } + } + + async fn get_client(&self) -> Result<&spin_sqlite_libsql::LibsqlClient, v2::Error> { + self.inner + .get_or_try_init(|| async { + spin_sqlite_libsql::LibsqlClient::create(self.url.clone(), self.token.clone()) + .await + .context("failed to create SQLite client") + }) + .await + .map_err(|_| v2::Error::InvalidConnection) + } +} + +#[async_trait::async_trait] +impl Connection for LibSqlConnection { + async fn query( + &self, + query: &str, + parameters: Vec, + ) -> Result { + let client = self.get_client().await?; + ::query( + client, query, parameters, + ) + .await + } + + async fn execute_batch(&self, statements: &str) -> anyhow::Result<()> { + let client = self.get_client().await?; + ::execute_batch( + client, statements, + ) + .await + } +} + +/// Configuration for a local SQLite database. +#[derive(Clone, Debug, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct LocalDatabase { + pub path: Option, +} + +impl LocalDatabase { + /// Get a new connection creator for a local database. + /// + /// `base_dir` is the base directory path from which `path` is resolved if it is a relative path. + fn connection_creator(self, base_dir: &Path) -> anyhow::Result { + let path = self + .path + .as_ref() + .map(|p| resolve_relative_path(p, base_dir)); + let location = InProcDatabaseLocation::from_path(path)?; + let factory = move || { + let connection = spin_sqlite_inproc::InProcConnection::new(location.clone())?; + Ok(Box::new(connection) as _) + }; + Ok(factory) + } +} + +/// Resolve a relative path against a base dir. +/// +/// If the path is absolute, it is returned as is. Otherwise, it is resolved against the base dir. +fn resolve_relative_path(path: &Path, base_dir: &Path) -> PathBuf { + if path.is_absolute() { + return path.to_owned(); + } + base_dir.join(path) +} + +/// Configuration for a libSQL database. +#[derive(Clone, Debug, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct LibSqlDatabase { + url: String, + token: String, +} + +impl LibSqlDatabase { + /// Get a new connection creator for a libSQL database. + fn connection_creator(self) -> anyhow::Result { + let url = check_url(&self.url) + .with_context(|| { + format!( + "unexpected libSQL URL '{}' in runtime config file ", + self.url + ) + })? + .to_owned(); + let factory = move || { + let connection = LibSqlConnection::new(url.clone(), self.token.clone()); + Ok(Box::new(connection) as _) + }; + Ok(factory) + } +} + +// Checks an incoming url is in the shape we expect +fn check_url(url: &str) -> anyhow::Result<&str> { + if url.starts_with("https://") || url.starts_with("http://") { + Ok(url) + } else { + Err(anyhow::anyhow!( + "URL does not start with 'https://' or 'http://'. Spin currently only supports talking to libSQL databases over HTTP(S)" + )) + } +} diff --git a/crates/factor-sqlite/tests/factor_test.rs b/crates/factor-sqlite/tests/factor_test.rs new file mode 100644 index 0000000000..b668bf343d --- /dev/null +++ b/crates/factor-sqlite/tests/factor_test.rs @@ -0,0 +1,153 @@ +use std::{collections::HashSet, sync::Arc}; + +use spin_factor_sqlite::{runtime_config::spin::RuntimeConfigResolver, SqliteFactor}; +use spin_factors::{ + anyhow::{self, bail, Context}, + runtime_config::toml::TomlKeyTracker, + Factor, FactorRuntimeConfigSource, RuntimeConfigSourceFinalizer, RuntimeFactors, +}; +use spin_factors_test::{toml, TestEnvironment}; + +#[derive(RuntimeFactors)] +struct TestFactors { + sqlite: SqliteFactor, +} + +#[tokio::test] +async fn sqlite_works() -> anyhow::Result<()> { + let test_resolver = DefaultLabelResolver::new(Some("default")); + let factors = TestFactors { + sqlite: SqliteFactor::new(test_resolver), + }; + let env = TestEnvironment::new(factors).extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + sqlite_databases = ["default"] + }); + let state = env.build_instance_state().await?; + + assert_eq!( + state.sqlite.allowed_databases(), + &["default".into()].into_iter().collect::>() + ); + + Ok(()) +} + +#[tokio::test] +async fn errors_when_non_configured_database_used() -> anyhow::Result<()> { + let test_resolver = DefaultLabelResolver::new(None); + let factors = TestFactors { + sqlite: SqliteFactor::new(test_resolver), + }; + let env = TestEnvironment::new(factors).extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + sqlite_databases = ["foo"] + }); + let Err(err) = env.build_instance_state().await else { + bail!("Expected build_instance_state to error but it did not"); + }; + + assert!(err + .to_string() + .contains("One or more components use SQLite databases which are not defined.")); + + Ok(()) +} + +#[tokio::test] +async fn no_error_when_database_is_configured() -> anyhow::Result<()> { + let test_resolver = DefaultLabelResolver::new(None); + let factors = TestFactors { + sqlite: SqliteFactor::new(test_resolver), + }; + let runtime_config = toml! { + [sqlite_database.foo] + type = "spin" + }; + let sqlite_config = RuntimeConfigResolver::new(None, "/".into()); + let env = TestEnvironment::new(factors) + .extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + sqlite_databases = ["foo"] + }) + .runtime_config(TomlRuntimeSource::new(&runtime_config, sqlite_config))?; + env.build_instance_state() + .await + .context("build_instance_state failed")?; + Ok(()) +} + +struct TomlRuntimeSource<'a> { + table: TomlKeyTracker<'a>, + runtime_config_resolver: RuntimeConfigResolver, +} + +impl<'a> TomlRuntimeSource<'a> { + fn new(table: &'a toml::Table, runtime_config_resolver: RuntimeConfigResolver) -> Self { + Self { + table: TomlKeyTracker::new(table), + runtime_config_resolver, + } + } +} + +impl FactorRuntimeConfigSource for TomlRuntimeSource<'_> { + fn get_runtime_config( + &mut self, + ) -> anyhow::Result::RuntimeConfig>> { + self.runtime_config_resolver.resolve_from_toml(&self.table) + } +} + +impl RuntimeConfigSourceFinalizer for TomlRuntimeSource<'_> { + fn finalize(&mut self) -> anyhow::Result<()> { + self.table.validate_all_keys_used()?; + Ok(()) + } +} + +impl TryFrom> for TestFactorsRuntimeConfig { + type Error = anyhow::Error; + + fn try_from(value: TomlRuntimeSource<'_>) -> Result { + Self::from_source(value) + } +} + +/// Will return an `InvalidConnectionCreator` for the supplied default database. +struct DefaultLabelResolver { + default: Option, +} + +impl DefaultLabelResolver { + fn new(default: Option<&str>) -> Self { + Self { + default: default.map(Into::into), + } + } +} + +impl spin_factor_sqlite::DefaultLabelResolver for DefaultLabelResolver { + fn default(&self, label: &str) -> Option> { + let Some(default) = &self.default else { + return None; + }; + (default == label).then_some(Arc::new(InvalidConnectionCreator)) + } +} + +/// A connection creator that always returns an error. +struct InvalidConnectionCreator; + +#[async_trait::async_trait] +impl spin_factor_sqlite::ConnectionCreator for InvalidConnectionCreator { + async fn create_connection( + &self, + ) -> Result, spin_world::v2::sqlite::Error> + { + Err(spin_world::v2::sqlite::Error::InvalidConnection) + } +} diff --git a/crates/variables/Cargo.toml b/crates/factor-variables/Cargo.toml similarity index 57% rename from crates/variables/Cargo.toml rename to crates/factor-variables/Cargo.toml index f5de219e41..60e0f507b7 100644 --- a/crates/variables/Cargo.toml +++ b/crates/factor-variables/Cargo.toml @@ -1,29 +1,26 @@ [package] -name = "spin-variables" +name = "spin-factor-variables" version = { workspace = true } authors = { workspace = true } edition = { workspace = true } [dependencies] -anyhow = "1.0" -async-trait = "0.1" +azure_security_keyvault = { git = "https://github.com/azure/azure-sdk-for-rust", rev = "8c4caa251c3903d5eae848b41bb1d02a4d65231c" } +azure_core = { git = "https://github.com/azure/azure-sdk-for-rust", rev = "8c4caa251c3903d5eae848b41bb1d02a4d65231c" } +azure_identity = { git = "https://github.com/azure/azure-sdk-for-rust", rev = "8c4caa251c3903d5eae848b41bb1d02a4d65231c" } dotenvy = "0.15" -once_cell = "1" -spin-app = { path = "../app" } -spin-core = { path = "../core" } +serde = { version = "1.0", features = ["rc"] } spin-expressions = { path = "../expressions" } +spin-factors = { path = "../factors" } spin-world = { path = "../world" } -thiserror = "1" tokio = { version = "1", features = ["rt-multi-thread"] } -vaultrs = "0.6.2" -serde = "1.0.188" +toml = "0.8" tracing = { workspace = true } -azure_security_keyvault = { git = "https://github.com/azure/azure-sdk-for-rust.git", rev = "8c4caa251c3903d5eae848b41bb1d02a4d65231c" } -azure_core = { git = "https://github.com/azure/azure-sdk-for-rust.git", rev = "8c4caa251c3903d5eae848b41bb1d02a4d65231c" } -azure_identity = { git = "https://github.com/azure/azure-sdk-for-rust.git", rev = "8c4caa251c3903d5eae848b41bb1d02a4d65231c" } +vaultrs = "0.6.2" [dev-dependencies] -toml = "0.5" +spin-factors-test = { path = "../factors-test" } +tokio = { version = "1", features = ["macros", "rt"] } [lints] workspace = true diff --git a/crates/factor-variables/build.rs b/crates/factor-variables/build.rs new file mode 100644 index 0000000000..c96556b06e --- /dev/null +++ b/crates/factor-variables/build.rs @@ -0,0 +1,6 @@ +fn main() { + println!("cargo:rerun-if-changed=build.rs"); + // Enable spin-factors-derive to emit expanded macro output. + let out_dir = std::env::var("OUT_DIR").unwrap(); + println!("cargo:rustc-env=SPIN_FACTORS_DERIVE_EXPAND_DIR={out_dir}"); +} diff --git a/crates/factor-variables/src/host.rs b/crates/factor-variables/src/host.rs new file mode 100644 index 0000000000..aa1d70f34f --- /dev/null +++ b/crates/factor-variables/src/host.rs @@ -0,0 +1,46 @@ +use spin_factors::anyhow; +use spin_world::{async_trait, v1, v2::variables}; + +use crate::InstanceState; + +#[async_trait] +impl variables::Host for InstanceState { + async fn get(&mut self, key: String) -> Result { + let key = spin_expressions::Key::new(&key).map_err(expressions_to_variables_err)?; + self.expression_resolver + .resolve(&self.component_id, key) + .await + .map_err(expressions_to_variables_err) + } + + fn convert_error(&mut self, error: variables::Error) -> anyhow::Result { + Ok(error) + } +} + +#[async_trait] +impl v1::config::Host for InstanceState { + async fn get_config(&mut self, key: String) -> Result { + ::get(self, key) + .await + .map_err(|err| match err { + variables::Error::InvalidName(msg) => v1::config::Error::InvalidKey(msg), + variables::Error::Undefined(msg) => v1::config::Error::Provider(msg), + other => v1::config::Error::Other(format!("{other}")), + }) + } + + fn convert_error(&mut self, err: v1::config::Error) -> anyhow::Result { + Ok(err) + } +} + +fn expressions_to_variables_err(err: spin_expressions::Error) -> variables::Error { + use spin_expressions::Error; + match err { + Error::InvalidName(msg) => variables::Error::InvalidName(msg), + Error::Undefined(msg) => variables::Error::Undefined(msg), + Error::Provider(err) => variables::Error::Provider(err.to_string()), + other => variables::Error::Other(format!("{other}")), + } +} diff --git a/crates/factor-variables/src/lib.rs b/crates/factor-variables/src/lib.rs new file mode 100644 index 0000000000..3c9b6603aa --- /dev/null +++ b/crates/factor-variables/src/lib.rs @@ -0,0 +1,94 @@ +mod host; +pub mod runtime_config; +pub mod spin_cli; + +use std::sync::Arc; + +use runtime_config::RuntimeConfig; +use spin_expressions::{ProviderResolver as ExpressionResolver, Template}; +use spin_factors::{ + anyhow, ConfigureAppContext, Factor, InitContext, InstanceBuilders, PrepareContext, + RuntimeFactors, SelfInstanceBuilder, +}; + +/// A factor for providing variables to components. +#[derive(Default)] +pub struct VariablesFactor { + _priv: (), +} + +impl Factor for VariablesFactor { + type RuntimeConfig = RuntimeConfig; + type AppState = AppState; + type InstanceBuilder = InstanceState; + + fn init(&mut self, mut ctx: InitContext) -> anyhow::Result<()> { + ctx.link_bindings(spin_world::v1::config::add_to_linker)?; + ctx.link_bindings(spin_world::v2::variables::add_to_linker)?; + Ok(()) + } + + fn configure_app( + &self, + mut ctx: ConfigureAppContext, + ) -> anyhow::Result { + let app = ctx.app(); + let mut expression_resolver = + ExpressionResolver::new(app.variables().map(|(key, val)| (key.clone(), val.clone())))?; + + for component in app.components() { + expression_resolver.add_component_variables( + component.id(), + component.config().map(|(k, v)| (k.into(), v.into())), + )?; + } + + for provider in ctx.take_runtime_config().unwrap_or_default() { + expression_resolver.add_provider(provider); + } + + Ok(AppState { + expression_resolver: Arc::new(expression_resolver), + }) + } + + fn prepare( + &self, + ctx: PrepareContext, + _builders: &mut InstanceBuilders, + ) -> anyhow::Result { + let component_id = ctx.app_component().id().to_string(); + let expression_resolver = ctx.app_state().expression_resolver.clone(); + Ok(InstanceState { + component_id, + expression_resolver, + }) + } +} + +pub struct AppState { + expression_resolver: Arc, +} + +impl AppState { + pub async fn resolve_expression( + &self, + expr: impl Into>, + ) -> spin_expressions::Result { + let template = Template::new(expr)?; + self.expression_resolver.resolve_template(&template).await + } +} + +pub struct InstanceState { + component_id: String, + expression_resolver: Arc, +} + +impl InstanceState { + pub fn expression_resolver(&self) -> &Arc { + &self.expression_resolver + } +} + +impl SelfInstanceBuilder for InstanceState {} diff --git a/crates/factor-variables/src/runtime_config.rs b/crates/factor-variables/src/runtime_config.rs new file mode 100644 index 0000000000..aaa7eb8da6 --- /dev/null +++ b/crates/factor-variables/src/runtime_config.rs @@ -0,0 +1,16 @@ +use spin_expressions::Provider; + +/// The runtime configuration for the variables factor. +#[derive(Default)] +pub struct RuntimeConfig { + pub providers: Vec>, +} + +impl IntoIterator for RuntimeConfig { + type Item = Box; + type IntoIter = std::vec::IntoIter>; + + fn into_iter(self) -> Self::IntoIter { + self.providers.into_iter() + } +} diff --git a/crates/variables/src/provider/azure_key_vault.rs b/crates/factor-variables/src/spin_cli/azure_key_vault.rs similarity index 64% rename from crates/variables/src/provider/azure_key_vault.rs rename to crates/factor-variables/src/spin_cli/azure_key_vault.rs index c9009a52fd..54fe15bbcc 100644 --- a/crates/variables/src/provider/azure_key_vault.rs +++ b/crates/factor-variables/src/spin_cli/azure_key_vault.rs @@ -1,35 +1,52 @@ use std::sync::Arc; -use anyhow::{Context, Result}; -use async_trait::async_trait; -use azure_core::auth::TokenCredential; -use azure_core::Url; +use anyhow::Context as _; +use azure_core::{auth::TokenCredential, Url}; use azure_security_keyvault::SecretClient; use serde::Deserialize; use spin_expressions::{Key, Provider}; +use spin_factors::anyhow; +use spin_world::async_trait; use tracing::{instrument, Level}; /// Azure KeyVault runtime config literal options for authentication -#[derive(Clone, Debug)] -pub struct AzureKeyVaultRuntimeConfigOptions { - client_id: String, - client_secret: String, - tenant_id: String, - authority_host: AzureAuthorityHost, +/// +/// Some of these fields are optional. Whether they are set determines whether environmental variables +/// will be used to resolve the information instead. +#[derive(Clone, Debug, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct AzureKeyVaultVariablesConfig { + pub vault_url: String, + pub client_id: Option, + pub client_secret: Option, + pub tenant_id: Option, + pub authority_host: Option, } -impl AzureKeyVaultRuntimeConfigOptions { - pub fn new( - client_id: String, - client_secret: String, - tenant_id: String, - authority_host: Option, - ) -> Self { - Self { - client_id, - client_secret, - tenant_id, - authority_host: authority_host.unwrap_or_default(), +#[derive(Debug, Copy, Clone, Deserialize, Default)] +pub enum AzureAuthorityHost { + #[default] + AzurePublicCloud, + AzureChina, + AzureGermany, + AzureGovernment, +} + +impl TryFrom for AzureKeyVaultAuthOptions { + type Error = anyhow::Error; + + fn try_from(value: AzureKeyVaultVariablesConfig) -> Result { + match (value.client_id, value.tenant_id, value.client_secret) { + (Some(client_id), Some(tenant_id), Some(client_secret)) => Ok( + AzureKeyVaultAuthOptions::RuntimeConfigValues{ + client_id, + client_secret, + tenant_id, + authority_host: value.authority_host.unwrap_or_default(), + } + ), + (None, None, None) => Ok(AzureKeyVaultAuthOptions::Environmental), + _ => anyhow::bail!("The current runtime config specifies some but not all of the Azure KeyVault 'client_id', 'client_secret', and 'tenant_id' values. Provide the missing values to authenticate to Azure KeyVault with the given service principal, or remove all these values to authenticate using ambient authentication (e.g. env vars, Azure CLI, Managed Identity, Workload Identity).") } } } @@ -38,7 +55,12 @@ impl AzureKeyVaultRuntimeConfigOptions { #[derive(Clone, Debug)] pub enum AzureKeyVaultAuthOptions { /// Runtime Config values indicates the service principal credentials have been supplied - RuntimeConfigValues(AzureKeyVaultRuntimeConfigOptions), + RuntimeConfigValues { + client_id: String, + client_secret: String, + tenant_id: String, + authority_host: AzureAuthorityHost, + }, /// Environmental indicates that the environment variables of the process should be used to /// create the TokenCredential for the Cosmos client. This will use the Azure Rust SDK's /// DefaultCredentialChain to derive the TokenCredential based on what environment variables @@ -68,29 +90,36 @@ pub enum AzureKeyVaultAuthOptions { /// /// Common across each: /// - `AZURE_AUTHORITY_HOST`: (optional) the host for the identity provider. For example, for Azure public cloud the host defaults to "https://login.microsoftonline.com". + /// /// See also: https://github.com/Azure/azure-sdk-for-rust/blob/main/sdk/identity/README.md Environmental, } +/// A provider that fetches variables from Azure Key Vault. #[derive(Debug)] pub struct AzureKeyVaultProvider { secret_client: SecretClient, } impl AzureKeyVaultProvider { - pub fn new( + pub fn create( vault_url: impl Into, auth_options: AzureKeyVaultAuthOptions, - ) -> Result { + ) -> anyhow::Result { let http_client = azure_core::new_http_client(); - let token_credential = match auth_options.clone() { - AzureKeyVaultAuthOptions::RuntimeConfigValues(config) => { + let token_credential = match auth_options { + AzureKeyVaultAuthOptions::RuntimeConfigValues { + client_id, + client_secret, + tenant_id, + authority_host, + } => { let credential = azure_identity::ClientSecretCredential::new( http_client, - config.authority_host.into(), - config.tenant_id.to_string(), - config.client_id.to_string(), - config.client_secret.to_string(), + authority_host.into(), + tenant_id, + client_id, + client_secret, ); Arc::new(credential) as Arc } @@ -106,7 +135,7 @@ impl AzureKeyVaultProvider { #[async_trait] impl Provider for AzureKeyVaultProvider { #[instrument(name = "spin_variables.get_from_azure_key_vault", skip(self), err(level = Level::INFO), fields(otel.kind = "client"))] - async fn get(&self, key: &Key) -> Result> { + async fn get(&self, key: &Key) -> anyhow::Result> { let secret = self .secret_client .get(key.as_str()) @@ -116,20 +145,6 @@ impl Provider for AzureKeyVaultProvider { } } -#[derive(Debug, Copy, Clone, Deserialize)] -pub enum AzureAuthorityHost { - AzurePublicCloud, - AzureChina, - AzureGermany, - AzureGovernment, -} - -impl Default for AzureAuthorityHost { - fn default() -> Self { - Self::AzurePublicCloud - } -} - impl From for Url { fn from(value: AzureAuthorityHost) -> Self { let url = match value { diff --git a/crates/factor-variables/src/spin_cli/env.rs b/crates/factor-variables/src/spin_cli/env.rs new file mode 100644 index 0000000000..781140a1b8 --- /dev/null +++ b/crates/factor-variables/src/spin_cli/env.rs @@ -0,0 +1,208 @@ +use std::{ + collections::HashMap, + env::VarError, + path::{Path, PathBuf}, + sync::OnceLock, +}; + +use serde::Deserialize; +use spin_expressions::{Key, Provider}; +use spin_factors::anyhow::{self, Context as _}; +use spin_world::async_trait; +use tracing::{instrument, Level}; + +/// Configuration for the environment variables provider. +#[derive(Debug, Default, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct EnvVariablesConfig { + /// A prefix to add to variable names when resolving from the environment. + /// + /// Unless empty, joined to the variable name with an underscore. + #[serde(default)] + pub prefix: Option, + /// Optional path to a 'dotenv' file which will be merged into the environment. + #[serde(default)] + pub dotenv_path: Option, +} + +const DEFAULT_ENV_PREFIX: &str = "SPIN_VARIABLE"; + +type EnvFetcherFn = Box Result + Send + Sync>; + +/// A [`Provider`] that uses environment variables. +pub struct EnvVariablesProvider { + prefix: Option, + env_fetcher: EnvFetcherFn, + dotenv_path: Option, + dotenv_cache: OnceLock>, +} + +impl Default for EnvVariablesProvider { + fn default() -> Self { + Self { + prefix: None, + env_fetcher: Box::new(|s| std::env::var(s)), + dotenv_path: Some(".env".into()), + dotenv_cache: Default::default(), + } + } +} + +impl EnvVariablesProvider { + /// Creates a new EnvProvider. + /// + /// * `prefix` - The string prefix to use to distinguish an environment variable that should be used. + /// If not set, the default prefix is used. + /// * `env_fetcher` - The function to use to fetch an environment variable. + /// * `dotenv_path` - The path to the .env file to load environment variables from. If not set, + /// no .env file is loaded. + pub fn new( + prefix: Option>, + env_fetcher: impl Fn(&str) -> Result + Send + Sync + 'static, + dotenv_path: Option, + ) -> Self { + Self { + prefix: prefix.map(Into::into), + dotenv_path, + env_fetcher: Box::new(env_fetcher), + dotenv_cache: Default::default(), + } + } + + /// Gets the value of a variable from the environment. + fn get_sync(&self, key: &Key) -> anyhow::Result> { + let prefix = self + .prefix + .clone() + .unwrap_or_else(|| DEFAULT_ENV_PREFIX.to_string()); + + let upper_key = key.as_ref().to_ascii_uppercase(); + let env_key = format!("{prefix}_{upper_key}"); + + self.query_env(&env_key) + } + + /// Queries the environment for a variable defaulting to dotenv. + fn query_env(&self, env_key: &str) -> anyhow::Result> { + match (self.env_fetcher)(env_key) { + Err(std::env::VarError::NotPresent) => self.get_dotenv(env_key), + other => other + .map(Some) + .with_context(|| format!("failed to resolve env var {env_key}")), + } + } + + fn get_dotenv(&self, key: &str) -> anyhow::Result> { + let Some(dotenv_path) = self.dotenv_path.as_deref() else { + return Ok(None); + }; + let cache = match self.dotenv_cache.get() { + Some(cache) => cache, + None => { + let cache = load_dotenv(dotenv_path)?; + let _ = self.dotenv_cache.set(cache); + // Safe to unwrap because we just set the cache. + // Ensures we always get the first value set. + self.dotenv_cache.get().unwrap() + } + }; + Ok(cache.get(key).cloned()) + } +} + +impl std::fmt::Debug for EnvVariablesProvider { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("EnvProvider") + .field("prefix", &self.prefix) + .field("dotenv_path", &self.dotenv_path) + .finish() + } +} + +fn load_dotenv(dotenv_path: &Path) -> anyhow::Result> { + Ok(dotenvy::from_path_iter(dotenv_path) + .into_iter() + .flatten() + .collect::, _>>()?) +} + +#[async_trait] +impl Provider for EnvVariablesProvider { + #[instrument(name = "spin_variables.get_from_env", skip(self), err(level = Level::INFO))] + async fn get(&self, key: &Key) -> anyhow::Result> { + tokio::task::block_in_place(|| self.get_sync(key)) + } +} + +#[cfg(test)] +mod test { + use std::env::temp_dir; + + use super::*; + + struct TestEnv { + map: HashMap, + } + + impl TestEnv { + fn new() -> Self { + Self { + map: Default::default(), + } + } + + fn insert(&mut self, key: &str, value: &str) { + self.map.insert(key.to_string(), value.to_string()); + } + + fn get(&self, key: &str) -> Result { + self.map.get(key).cloned().ok_or(VarError::NotPresent) + } + } + + #[test] + fn provider_get() { + let mut env = TestEnv::new(); + env.insert("TESTING_SPIN_ENV_KEY1", "val"); + let key1 = Key::new("env_key1").unwrap(); + assert_eq!( + EnvVariablesProvider::new(Some("TESTING_SPIN"), move |key| env.get(key), None) + .get_sync(&key1) + .unwrap(), + Some("val".to_string()) + ); + } + + #[test] + fn provider_get_dotenv() { + let dotenv_path = temp_dir().join("spin-env-provider-test"); + std::fs::write(&dotenv_path, b"TESTING_SPIN_ENV_KEY2=dotenv_val").unwrap(); + + let key = Key::new("env_key2").unwrap(); + assert_eq!( + EnvVariablesProvider::new( + Some("TESTING_SPIN"), + |_| Err(VarError::NotPresent), + Some(dotenv_path) + ) + .get_sync(&key) + .unwrap(), + Some("dotenv_val".to_string()) + ); + } + + #[test] + fn provider_get_missing() { + let key = Key::new("definitely_not_set").unwrap(); + assert_eq!( + EnvVariablesProvider::new( + Some("TESTING_SPIN"), + |_| Err(VarError::NotPresent), + Default::default() + ) + .get_sync(&key) + .unwrap(), + None + ); + } +} diff --git a/crates/factor-variables/src/spin_cli/mod.rs b/crates/factor-variables/src/spin_cli/mod.rs new file mode 100644 index 0000000000..6a31a5b09f --- /dev/null +++ b/crates/factor-variables/src/spin_cli/mod.rs @@ -0,0 +1,70 @@ +//! The runtime configuration for the variables factor used in the Spin CLI. + +mod azure_key_vault; +mod env; +mod statik; +mod vault; + +pub use azure_key_vault::*; +pub use env::*; +pub use statik::*; +pub use vault::*; + +use serde::Deserialize; +use spin_expressions::Provider; +use spin_factors::{anyhow, runtime_config::toml::GetTomlValue}; + +use crate::runtime_config::RuntimeConfig; + +/// Resolves a runtime configuration for the variables factor from a TOML table. +pub fn runtime_config_from_toml(table: &impl GetTomlValue) -> anyhow::Result { + // Always include the environment variable provider. + let mut providers = vec![Box::::default() as _]; + let value = table + .get("variables_provider") + .or_else(|| table.get("config_provider")); + let Some(array) = value else { + return Ok(RuntimeConfig { providers }); + }; + + let provider_configs: Vec = array.clone().try_into()?; + let new_providers = provider_configs + .into_iter() + .map(VariableProviderConfiguration::into_provider) + .collect::>>()?; + providers.extend(new_providers); + Ok(RuntimeConfig { providers }) +} + +/// A runtime configuration used in the Spin CLI for one type of variable provider. +#[derive(Debug, Deserialize)] +#[serde(rename_all = "snake_case", tag = "type")] +pub enum VariableProviderConfiguration { + /// A provider that uses Azure Key Vault. + AzureKeyVault(AzureKeyVaultVariablesConfig), + /// A static provider of variables. + Static(StaticVariablesProvider), + /// A provider that uses HashiCorp Vault. + Vault(VaultVariablesProvider), + /// An environment variable provider. + Env(EnvVariablesConfig), +} + +impl VariableProviderConfiguration { + /// Returns the provider for the configuration. + pub fn into_provider(self) -> anyhow::Result> { + let provider: Box = match self { + VariableProviderConfiguration::Static(provider) => Box::new(provider), + VariableProviderConfiguration::Env(config) => Box::new(env::EnvVariablesProvider::new( + config.prefix, + |s| std::env::var(s), + config.dotenv_path, + )), + VariableProviderConfiguration::Vault(provider) => Box::new(provider), + VariableProviderConfiguration::AzureKeyVault(config) => Box::new( + AzureKeyVaultProvider::create(config.vault_url.clone(), config.try_into()?)?, + ), + }; + Ok(provider) + } +} diff --git a/crates/factor-variables/src/spin_cli/statik.rs b/crates/factor-variables/src/spin_cli/statik.rs new file mode 100644 index 0000000000..d596c3e56c --- /dev/null +++ b/crates/factor-variables/src/spin_cli/statik.rs @@ -0,0 +1,18 @@ +use std::{collections::HashMap, sync::Arc}; + +use serde::Deserialize; +use spin_expressions::{async_trait::async_trait, Key, Provider}; +use spin_factors::anyhow; + +/// A [`Provider`] that reads variables from an static map. +#[derive(Debug, Deserialize, Clone)] +pub struct StaticVariablesProvider { + values: Arc>, +} + +#[async_trait] +impl Provider for StaticVariablesProvider { + async fn get(&self, key: &Key) -> anyhow::Result> { + Ok(self.values.get(key.as_str()).cloned()) + } +} diff --git a/crates/variables/src/provider/vault.rs b/crates/factor-variables/src/spin_cli/vault.rs similarity index 67% rename from crates/variables/src/provider/vault.rs rename to crates/factor-variables/src/spin_cli/vault.rs index d48505c38c..9008ce3c5f 100644 --- a/crates/variables/src/provider/vault.rs +++ b/crates/factor-variables/src/spin_cli/vault.rs @@ -1,6 +1,6 @@ -use anyhow::{Context, Result}; -use async_trait::async_trait; use serde::{Deserialize, Serialize}; +use spin_expressions::async_trait::async_trait; +use spin_factors::anyhow::{self, Context as _}; use tracing::{instrument, Level}; use vaultrs::{ client::{VaultClient, VaultClientSettingsBuilder}, @@ -10,40 +10,25 @@ use vaultrs::{ use spin_expressions::{Key, Provider}; +#[derive(Debug, Default, Deserialize)] +#[serde(deny_unknown_fields)] /// A config Provider that uses HashiCorp Vault. -#[derive(Debug)] -pub struct VaultProvider { +pub struct VaultVariablesProvider { + /// The URL of the Vault server. url: String, + /// The token to authenticate with. token: String, + /// The mount point of the KV engine. mount: String, + /// The optional prefix to use for all keys. + #[serde(default)] prefix: Option, } -impl VaultProvider { - pub fn new( - url: impl Into, - token: impl Into, - mount: impl Into, - prefix: Option>, - ) -> Self { - Self { - url: url.into(), - token: token.into(), - mount: mount.into(), - prefix: prefix.map(Into::into), - } - } -} - -#[derive(Deserialize, Serialize)] -struct Secret { - value: String, -} - #[async_trait] -impl Provider for VaultProvider { +impl Provider for VaultVariablesProvider { #[instrument(name = "spin_variables.get_from_vault", skip(self), err(level = Level::INFO), fields(otel.kind = "client"))] - async fn get(&self, key: &Key) -> Result> { + async fn get(&self, key: &Key) -> anyhow::Result> { let client = VaultClient::new( VaultClientSettingsBuilder::default() .address(&self.url) @@ -54,6 +39,11 @@ impl Provider for VaultProvider { Some(prefix) => format!("{}/{}", prefix, key.as_str()), None => key.as_str().to_string(), }; + + #[derive(Deserialize, Serialize)] + struct Secret { + value: String, + } match kv2::read::(&client, &self.mount, &path).await { Ok(secret) => Ok(Some(secret.value)), // Vault doesn't have this entry so pass along the chain diff --git a/crates/factor-variables/tests/factor_test.rs b/crates/factor-variables/tests/factor_test.rs new file mode 100644 index 0000000000..33df9f1b2f --- /dev/null +++ b/crates/factor-variables/tests/factor_test.rs @@ -0,0 +1,69 @@ +use spin_factor_variables::{spin_cli, VariablesFactor}; +use spin_factors::{ + anyhow, Factor, FactorRuntimeConfigSource, RuntimeConfigSourceFinalizer, RuntimeFactors, +}; +use spin_factors_test::{toml, TestEnvironment}; +use spin_world::v2::variables::Host; + +#[derive(RuntimeFactors)] +struct TestFactors { + variables: VariablesFactor, +} + +#[tokio::test(flavor = "multi_thread")] +async fn static_provider_works() -> anyhow::Result<()> { + let factors = TestFactors { + variables: VariablesFactor::default(), + }; + let env = TestEnvironment::new(factors) + .extend_manifest(toml! { + [variables] + foo = { required = true } + + [component.test-component] + source = "does-not-exist.wasm" + variables = { baz = "<{{ foo }}>" } + }) + .runtime_config(TomlConfig::new(toml! { + [[variables_provider]] + type = "static" + values = { foo = "bar" } + }))?; + + let mut state = env.build_instance_state().await?; + let val = state.variables.get("baz".into()).await?; + assert_eq!(val, ""); + Ok(()) +} + +struct TomlConfig { + table: toml::Table, +} + +impl TomlConfig { + fn new(table: toml::Table) -> Self { + Self { table } + } +} + +impl TryFrom for TestFactorsRuntimeConfig { + type Error = anyhow::Error; + + fn try_from(value: TomlConfig) -> Result { + Self::from_source(value) + } +} + +impl FactorRuntimeConfigSource for TomlConfig { + fn get_runtime_config( + &mut self, + ) -> anyhow::Result::RuntimeConfig>> { + spin_cli::runtime_config_from_toml(&self.table).map(Some) + } +} + +impl RuntimeConfigSourceFinalizer for TomlConfig { + fn finalize(&mut self) -> anyhow::Result<()> { + Ok(()) + } +} diff --git a/crates/factor-wasi/Cargo.toml b/crates/factor-wasi/Cargo.toml new file mode 100644 index 0000000000..6201ac2724 --- /dev/null +++ b/crates/factor-wasi/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "spin-factor-wasi" +version = { workspace = true } +authors = { workspace = true } +edition = { workspace = true } + +[dependencies] +async-trait = "0.1" +bytes = "1.0" +cap-primitives = "3.0.0" +spin-common = { path = "../common" } +spin-factors = { path = "../factors" } +tokio = { version = "1" } +wasmtime = { workspace = true } +wasmtime-wasi = { workspace = true } + +[dev-dependencies] +spin-factors-test = { path = "../factors-test" } +tokio = { version = "1", features = ["macros", "rt"] } + +[lints] +workspace = true diff --git a/crates/factor-wasi/build.rs b/crates/factor-wasi/build.rs new file mode 100644 index 0000000000..c96556b06e --- /dev/null +++ b/crates/factor-wasi/build.rs @@ -0,0 +1,6 @@ +fn main() { + println!("cargo:rerun-if-changed=build.rs"); + // Enable spin-factors-derive to emit expanded macro output. + let out_dir = std::env::var("OUT_DIR").unwrap(); + println!("cargo:rustc-env=SPIN_FACTORS_DERIVE_EXPAND_DIR={out_dir}"); +} diff --git a/crates/factor-wasi/src/io.rs b/crates/factor-wasi/src/io.rs new file mode 100644 index 0000000000..f5dd0c2b5a --- /dev/null +++ b/crates/factor-wasi/src/io.rs @@ -0,0 +1,126 @@ +use std::io::{Read, Write}; +use std::sync::{Arc, Mutex}; + +use async_trait::async_trait; +use spin_factors::anyhow; +use wasmtime_wasi::{ + HostInputStream, HostOutputStream, StdinStream, StdoutStream, StreamError, Subscribe, +}; + +/// A [`HostOutputStream`] that writes to a `Write` type. +/// +/// `StdinStream::stream` and `StdoutStream::new` can be called more than once in components +/// which are composed of multiple subcomponents, since each subcomponent will potentially want +/// its own handle. This means the streams need to be shareable. The easiest way to do that is +/// provide cloneable implementations of streams which operate synchronously. +/// +/// Note that this amounts to doing synchronous I/O in an asynchronous context, which we'd normally +/// prefer to avoid, but the properly asynchronous implementations Host{In|Out}putStream based on +/// `AsyncRead`/`AsyncWrite`` are quite hairy and probably not worth it for "normal" stdio streams in +/// Spin. If this does prove to be a performance bottleneck, though, we can certainly revisit it. +pub struct PipedWriteStream(Arc>); + +impl PipedWriteStream { + pub fn new(inner: T) -> Self { + Self(Arc::new(Mutex::new(inner))) + } +} + +impl Clone for PipedWriteStream { + fn clone(&self) -> Self { + Self(self.0.clone()) + } +} + +impl HostOutputStream for PipedWriteStream { + fn write(&mut self, bytes: bytes::Bytes) -> Result<(), StreamError> { + self.0 + .lock() + .unwrap() + .write_all(&bytes) + .map_err(|e| StreamError::LastOperationFailed(anyhow::anyhow!(e))) + } + + fn flush(&mut self) -> Result<(), StreamError> { + self.0 + .lock() + .unwrap() + .flush() + .map_err(|e| StreamError::LastOperationFailed(anyhow::anyhow!(e))) + } + + fn check_write(&mut self) -> Result { + Ok(1024 * 1024) + } +} + +impl StdoutStream for PipedWriteStream { + fn stream(&self) -> Box { + Box::new(self.clone()) + } + + fn isatty(&self) -> bool { + false + } +} + +#[async_trait] +impl Subscribe for PipedWriteStream { + async fn ready(&mut self) {} +} + +/// A [`HostInputStream`] that reads to a `Read` type. +/// +/// See [`PipedWriteStream`] for more information on why this is synchronous. +pub struct PipeReadStream { + buffer: Vec, + inner: Arc>, +} + +impl PipeReadStream { + pub fn new(inner: T) -> Self { + Self { + buffer: vec![0_u8; 64 * 1024], + inner: Arc::new(Mutex::new(inner)), + } + } +} + +impl Clone for PipeReadStream { + fn clone(&self) -> Self { + Self { + buffer: vec![0_u8; 64 * 1024], + inner: self.inner.clone(), + } + } +} + +impl HostInputStream for PipeReadStream { + fn read(&mut self, size: usize) -> wasmtime_wasi::StreamResult { + let size = size.min(self.buffer.len()); + + let count = self + .inner + .lock() + .unwrap() + .read(&mut self.buffer[..size]) + .map_err(|e| StreamError::LastOperationFailed(anyhow::anyhow!(e)))?; + + Ok(bytes::Bytes::copy_from_slice(&self.buffer[..count])) + } +} + +#[async_trait] +impl Subscribe for PipeReadStream { + async fn ready(&mut self) {} +} + +impl StdinStream for PipeReadStream { + fn stream(&self) -> Box { + Box::new(self.clone()) + } + + fn isatty(&self) -> bool { + false + } +} diff --git a/crates/factor-wasi/src/lib.rs b/crates/factor-wasi/src/lib.rs new file mode 100644 index 0000000000..885f96a791 --- /dev/null +++ b/crates/factor-wasi/src/lib.rs @@ -0,0 +1,296 @@ +mod io; +pub mod spin; +mod wasi_2023_10_18; +mod wasi_2023_11_10; + +use std::{ + future::Future, + io::{Read, Write}, + net::SocketAddr, + path::Path, +}; + +use io::{PipeReadStream, PipedWriteStream}; +use spin_factors::{ + anyhow, AppComponent, Factor, FactorInstanceBuilder, InitContext, InstanceBuilders, + PrepareContext, RuntimeFactors, RuntimeFactorsInstanceState, +}; +use wasmtime_wasi::{ + DirPerms, FilePerms, ResourceTable, StdinStream, StdoutStream, WasiCtx, WasiCtxBuilder, + WasiImpl, WasiView, +}; + +pub use wasmtime_wasi::SocketAddrUse; + +pub struct WasiFactor { + files_mounter: Box, +} + +impl WasiFactor { + pub fn new(files_mounter: impl FilesMounter + 'static) -> Self { + Self { + files_mounter: Box::new(files_mounter), + } + } + + pub fn get_wasi_impl( + runtime_instance_state: &mut impl RuntimeFactorsInstanceState, + ) -> Option> { + let (state, table) = runtime_instance_state.get_with_table::()?; + Some(WasiImpl(WasiImplInner { + ctx: &mut state.ctx, + table, + })) + } +} + +impl Factor for WasiFactor { + type RuntimeConfig = (); + type AppState = (); + type InstanceBuilder = InstanceBuilder; + + fn init(&mut self, mut ctx: InitContext) -> anyhow::Result<()> { + fn type_annotate(f: F) -> F + where + F: Fn(&mut T) -> WasiImpl, + { + f + } + let get_data_with_table = ctx.get_data_with_table_fn(); + let closure = type_annotate(move |data| { + let (state, table) = get_data_with_table(data); + WasiImpl(WasiImplInner { + ctx: &mut state.ctx, + table, + }) + }); + let linker = ctx.linker(); + use wasmtime_wasi::bindings; + bindings::clocks::wall_clock::add_to_linker_get_host(linker, closure)?; + bindings::clocks::monotonic_clock::add_to_linker_get_host(linker, closure)?; + bindings::filesystem::types::add_to_linker_get_host(linker, closure)?; + bindings::filesystem::preopens::add_to_linker_get_host(linker, closure)?; + bindings::io::error::add_to_linker_get_host(linker, closure)?; + bindings::io::poll::add_to_linker_get_host(linker, closure)?; + bindings::io::streams::add_to_linker_get_host(linker, closure)?; + bindings::random::random::add_to_linker_get_host(linker, closure)?; + bindings::random::insecure::add_to_linker_get_host(linker, closure)?; + bindings::random::insecure_seed::add_to_linker_get_host(linker, closure)?; + bindings::cli::exit::add_to_linker_get_host(linker, closure)?; + bindings::cli::environment::add_to_linker_get_host(linker, closure)?; + bindings::cli::stdin::add_to_linker_get_host(linker, closure)?; + bindings::cli::stdout::add_to_linker_get_host(linker, closure)?; + bindings::cli::stderr::add_to_linker_get_host(linker, closure)?; + bindings::cli::terminal_input::add_to_linker_get_host(linker, closure)?; + bindings::cli::terminal_output::add_to_linker_get_host(linker, closure)?; + bindings::cli::terminal_stdin::add_to_linker_get_host(linker, closure)?; + bindings::cli::terminal_stdout::add_to_linker_get_host(linker, closure)?; + bindings::cli::terminal_stderr::add_to_linker_get_host(linker, closure)?; + bindings::sockets::tcp::add_to_linker_get_host(linker, closure)?; + bindings::sockets::tcp_create_socket::add_to_linker_get_host(linker, closure)?; + bindings::sockets::udp::add_to_linker_get_host(linker, closure)?; + bindings::sockets::udp_create_socket::add_to_linker_get_host(linker, closure)?; + bindings::sockets::instance_network::add_to_linker_get_host(linker, closure)?; + bindings::sockets::network::add_to_linker_get_host(linker, closure)?; + bindings::sockets::ip_name_lookup::add_to_linker_get_host(linker, closure)?; + + wasi_2023_10_18::add_to_linker(linker, closure)?; + wasi_2023_11_10::add_to_linker(linker, closure)?; + + Ok(()) + } + + fn configure_app( + &self, + _ctx: spin_factors::ConfigureAppContext, + ) -> anyhow::Result { + Ok(()) + } + + fn prepare( + &self, + ctx: PrepareContext, + _builders: &mut InstanceBuilders, + ) -> anyhow::Result { + let mut wasi_ctx = WasiCtxBuilder::new(); + + // Mount files + let mount_ctx = MountFilesContext { ctx: &mut wasi_ctx }; + self.files_mounter + .mount_files(ctx.app_component(), mount_ctx)?; + + let mut builder = InstanceBuilder { ctx: wasi_ctx }; + + // Apply environment variables + builder.env(ctx.app_component().environment()); + + Ok(builder) + } +} + +pub trait FilesMounter: Send + Sync { + fn mount_files( + &self, + app_component: &AppComponent, + ctx: MountFilesContext, + ) -> anyhow::Result<()>; +} + +pub struct DummyFilesMounter; + +impl FilesMounter for DummyFilesMounter { + fn mount_files( + &self, + app_component: &AppComponent, + _ctx: MountFilesContext, + ) -> anyhow::Result<()> { + anyhow::ensure!( + app_component.files().next().is_none(), + "DummyFilesMounter can't actually mount files" + ); + Ok(()) + } +} + +pub struct MountFilesContext<'a> { + ctx: &'a mut WasiCtxBuilder, +} + +impl<'a> MountFilesContext<'a> { + pub fn preopened_dir( + &mut self, + host_path: impl AsRef, + guest_path: impl AsRef, + writable: bool, + ) -> anyhow::Result<()> { + let (dir_perms, file_perms) = if writable { + (DirPerms::all(), FilePerms::all()) + } else { + (DirPerms::READ, FilePerms::READ) + }; + self.ctx + .preopened_dir(host_path, guest_path, dir_perms, file_perms)?; + Ok(()) + } +} + +pub struct InstanceBuilder { + ctx: WasiCtxBuilder, +} + +impl InstanceBuilder { + /// Sets the WASI `stdin` descriptor to the given [`StdinStream`]. + pub fn stdin(&mut self, stdin: impl StdinStream + 'static) { + self.ctx.stdin(stdin); + } + + /// Sets the WASI `stdin` descriptor to the given [`Read`]er. + pub fn stdin_pipe(&mut self, r: impl Read + Send + Sync + Unpin + 'static) { + self.stdin(PipeReadStream::new(r)); + } + + /// Sets the WASI `stdout` descriptor to the given [`StdoutStream`]. + pub fn stdout(&mut self, stdout: impl StdoutStream + 'static) { + self.ctx.stdout(stdout); + } + + /// Sets the WASI `stdout` descriptor to the given [`Write`]r. + pub fn stdout_pipe(&mut self, w: impl Write + Send + Sync + Unpin + 'static) { + self.stdout(PipedWriteStream::new(w)); + } + + /// Sets the WASI `stderr` descriptor to the given [`StdoutStream`]. + pub fn stderr(&mut self, stderr: impl StdoutStream + 'static) { + self.ctx.stderr(stderr); + } + + /// Sets the WASI `stderr` descriptor to the given [`Write`]r. + pub fn stderr_pipe(&mut self, w: impl Write + Send + Sync + Unpin + 'static) { + self.stderr(PipedWriteStream::new(w)); + } + + /// Appends the given strings to the WASI 'args'. + pub fn args(&mut self, args: impl IntoIterator>) { + for arg in args { + self.ctx.arg(arg); + } + } + + /// Sets the given key/value string entries on the WASI 'env'. + pub fn env(&mut self, vars: impl IntoIterator, impl AsRef)>) { + for (k, v) in vars { + self.ctx.env(k, v); + } + } + + /// "Mounts" the given `host_path` into the WASI filesystem at the given + /// `guest_path`. + pub fn preopened_dir( + &mut self, + host_path: impl AsRef, + guest_path: impl AsRef, + writable: bool, + ) -> anyhow::Result<()> { + let (dir_perms, file_perms) = if writable { + (DirPerms::all(), FilePerms::all()) + } else { + (DirPerms::READ, FilePerms::READ) + }; + self.ctx + .preopened_dir(host_path, guest_path, dir_perms, file_perms)?; + Ok(()) + } +} + +impl FactorInstanceBuilder for InstanceBuilder { + type InstanceState = InstanceState; + + fn build(self) -> anyhow::Result { + let InstanceBuilder { ctx: mut wasi_ctx } = self; + Ok(InstanceState { + ctx: wasi_ctx.build(), + }) + } +} + +impl InstanceBuilder { + pub fn outbound_socket_addr_check(&mut self, check: F) + where + F: Fn(SocketAddr, SocketAddrUse) -> Fut + Send + Sync + Clone + 'static, + Fut: Future + Send + Sync, + { + self.ctx.socket_addr_check(move |addr, addr_use| { + let check = check.clone(); + Box::pin(async move { + match addr_use { + wasmtime_wasi::SocketAddrUse::TcpBind => false, + wasmtime_wasi::SocketAddrUse::TcpConnect + | wasmtime_wasi::SocketAddrUse::UdpBind + | wasmtime_wasi::SocketAddrUse::UdpConnect + | wasmtime_wasi::SocketAddrUse::UdpOutgoingDatagram => { + check(addr, addr_use).await + } + } + }) + }); + } +} + +pub struct InstanceState { + ctx: WasiCtx, +} + +struct WasiImplInner<'a> { + ctx: &'a mut WasiCtx, + table: &'a mut ResourceTable, +} + +impl<'a> WasiView for WasiImplInner<'a> { + fn ctx(&mut self) -> &mut WasiCtx { + self.ctx + } + + fn table(&mut self) -> &mut ResourceTable { + self.table + } +} diff --git a/crates/factor-wasi/src/spin.rs b/crates/factor-wasi/src/spin.rs new file mode 100644 index 0000000000..25de63b5e0 --- /dev/null +++ b/crates/factor-wasi/src/spin.rs @@ -0,0 +1,48 @@ +use std::path::PathBuf; + +use spin_common::{ui::quoted_path, url::parse_file_url}; +use spin_factors::anyhow::{ensure, Context}; + +use crate::FilesMounter; + +pub struct SpinFilesMounter { + working_dir: PathBuf, + allow_transient_writes: bool, +} + +impl SpinFilesMounter { + pub fn new(working_dir: impl Into, allow_transient_writes: bool) -> Self { + Self { + working_dir: working_dir.into(), + allow_transient_writes, + } + } +} + +impl FilesMounter for SpinFilesMounter { + fn mount_files( + &self, + app_component: &spin_factors::AppComponent, + mut ctx: crate::MountFilesContext, + ) -> spin_factors::anyhow::Result<()> { + for content_dir in app_component.files() { + let source_uri = content_dir + .content + .source + .as_deref() + .with_context(|| format!("Missing 'source' on files mount {content_dir:?}"))?; + let source_path = self.working_dir.join(parse_file_url(source_uri)?); + ensure!( + source_path.is_dir(), + "SpinFilesMounter only supports directory mounts; {} is not a directory", + quoted_path(&source_path), + ); + let guest_path = &content_dir.path; + let guest_path = guest_path + .to_str() + .with_context(|| format!("guest path {guest_path:?} not valid UTF-8"))?; + ctx.preopened_dir(source_path, guest_path, self.allow_transient_writes)?; + } + Ok(()) + } +} diff --git a/crates/core/src/wasi_2023_10_18.rs b/crates/factor-wasi/src/wasi_2023_10_18.rs similarity index 75% rename from crates/core/src/wasi_2023_10_18.rs rename to crates/factor-wasi/src/wasi_2023_10_18.rs index 9918f5a5ed..386da6e722 100644 --- a/crates/core/src/wasi_2023_10_18.rs +++ b/crates/factor-wasi/src/wasi_2023_10_18.rs @@ -1,17 +1,11 @@ -#![doc(hidden)] // internal implementation detail used in tests and spin-trigger - -use anyhow::Result; use async_trait::async_trait; +use spin_factors::anyhow::{self, Result}; use std::mem; use wasmtime::component::{Linker, Resource}; -use wasmtime_wasi::{TrappableError, WasiImpl, WasiView}; -use wasmtime_wasi_http::{WasiHttpImpl, WasiHttpView}; +use wasmtime_wasi::{Pollable, TrappableError, WasiImpl, WasiView}; mod latest { pub use wasmtime_wasi::bindings::*; - pub mod http { - pub use wasmtime_wasi_http::bindings::http::*; - } } mod bindings { @@ -21,8 +15,6 @@ mod bindings { wasmtime::component::bindgen!({ path: "../../wit", interfaces: r#" - include wasi:http/proxy@0.2.0-rc-2023-10-18; - // NB: this is handling the historical behavior where Spin supported // more than "just" this snaphsot of the proxy world but additionally // other CLI-related interfaces. @@ -100,16 +92,6 @@ mod bindings { "wasi:sockets/udp/udp-socket": UdpSocket, "wasi:sockets/network/network": latest::sockets::network::Network, "wasi:sockets/ip-name-lookup/resolve-address-stream": latest::sockets::ip_name_lookup::ResolveAddressStream, - "wasi:http/types/incoming-response": latest::http::types::IncomingResponse, - "wasi:http/types/incoming-request": latest::http::types::IncomingRequest, - "wasi:http/types/incoming-body": latest::http::types::IncomingBody, - "wasi:http/types/outgoing-response": latest::http::types::OutgoingResponse, - "wasi:http/types/outgoing-request": latest::http::types::OutgoingRequest, - "wasi:http/types/outgoing-body": latest::http::types::OutgoingBody, - "wasi:http/types/fields": latest::http::types::Fields, - "wasi:http/types/response-outparam": latest::http::types::ResponseOutparam, - "wasi:http/types/future-incoming-response": latest::http::types::FutureIncomingResponse, - "wasi:http/types/future-trailers": latest::http::types::FutureTrailers, }, trappable_imports: true, }); @@ -118,18 +100,11 @@ mod bindings { mod wasi { pub use super::bindings::wasi::{ cli0_2_0_rc_2023_10_18 as cli, clocks0_2_0_rc_2023_10_18 as clocks, - filesystem0_2_0_rc_2023_10_18 as filesystem, http0_2_0_rc_2023_10_18 as http, - io0_2_0_rc_2023_10_18 as io, random0_2_0_rc_2023_10_18 as random, - sockets0_2_0_rc_2023_10_18 as sockets, + filesystem0_2_0_rc_2023_10_18 as filesystem, io0_2_0_rc_2023_10_18 as io, + random0_2_0_rc_2023_10_18 as random, sockets0_2_0_rc_2023_10_18 as sockets, }; } -pub mod exports { - pub mod wasi { - pub use super::super::bindings::exports::wasi::http0_2_0_rc_2023_10_18 as http; - } -} - use wasi::cli::terminal_input::TerminalInput; use wasi::cli::terminal_output::TerminalOutput; use wasi::clocks::monotonic_clock::Instant; @@ -139,12 +114,6 @@ use wasi::filesystem::types::{ DirectoryEntry, DirectoryEntryStream, Error, ErrorCode as FsErrorCode, Filesize, MetadataHashValue, Modes, NewTimestamp, OpenFlags, PathFlags, }; -use wasi::http::types::{ - Error as HttpError, Fields, FutureIncomingResponse, FutureTrailers, Headers, IncomingBody, - IncomingRequest, IncomingResponse, Method, OutgoingBody, OutgoingRequest, OutgoingResponse, - RequestOptions, ResponseOutparam, Scheme, StatusCode, Trailers, -}; -use wasi::io::poll::Pollable; use wasi::io::streams::{InputStream, OutputStream, StreamError}; use wasi::sockets::ip_name_lookup::{IpAddress, ResolveAddressStream}; use wasi::sockets::network::{Ipv4SocketAddress, Ipv6SocketAddress}; @@ -154,18 +123,13 @@ use wasi::sockets::tcp::{ }; use wasi::sockets::udp::Datagram; -pub fn add_to_linker(linker: &mut Linker) -> Result<()> +use crate::WasiImplInner; + +pub fn add_to_linker(linker: &mut Linker, closure: F) -> Result<()> where - T: WasiView + WasiHttpView, + T: Send, + F: Fn(&mut T) -> WasiImpl + Send + Sync + Copy + 'static, { - // interfaces from the "command" world - fn type_annotate_wasi(f: F) -> F - where - F: Fn(&mut T) -> WasiImpl<&mut T>, - { - f - } - let closure = type_annotate_wasi::(|t| WasiImpl(t)); wasi::clocks::monotonic_clock::add_to_linker_get_host(linker, closure)?; wasi::clocks::wall_clock::add_to_linker_get_host(linker, closure)?; wasi::filesystem::types::add_to_linker_get_host(linker, closure)?; @@ -192,16 +156,6 @@ where wasi::sockets::instance_network::add_to_linker_get_host(linker, closure)?; wasi::sockets::network::add_to_linker_get_host(linker, closure)?; wasi::sockets::ip_name_lookup::add_to_linker_get_host(linker, closure)?; - - fn type_annotate_http(f: F) -> F - where - F: Fn(&mut T) -> WasiHttpImpl<&mut T>, - { - f - } - let closure = type_annotate_http::(|t| WasiHttpImpl(t)); - wasi::http::types::add_to_linker_get_host(linker, closure)?; - wasi::http::outgoing_handler::add_to_linker_get_host(linker, closure)?; Ok(()) } @@ -1689,489 +1643,6 @@ where } } -impl wasi::http::types::Host for WasiHttpImpl where T: WasiHttpView + Send {} - -impl wasi::http::types::HostFields for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn new( - &mut self, - entries: Vec<(String, Vec)>, - ) -> wasmtime::Result> { - match latest::http::types::HostFields::from_list(self, entries)? { - Ok(fields) => Ok(fields), - Err(e) => Err(e.into()), - } - } - - fn get( - &mut self, - self_: wasmtime::component::Resource, - name: String, - ) -> wasmtime::Result>> { - latest::http::types::HostFields::get(self, self_, name) - } - - fn set( - &mut self, - self_: wasmtime::component::Resource, - name: String, - value: Vec>, - ) -> wasmtime::Result<()> { - latest::http::types::HostFields::set(self, self_, name, value)??; - Ok(()) - } - - fn delete( - &mut self, - self_: wasmtime::component::Resource, - name: String, - ) -> wasmtime::Result<()> { - latest::http::types::HostFields::delete(self, self_, name)??; - Ok(()) - } - - fn append( - &mut self, - self_: wasmtime::component::Resource, - name: String, - value: Vec, - ) -> wasmtime::Result<()> { - latest::http::types::HostFields::append(self, self_, name, value)??; - Ok(()) - } - - fn entries( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result)>> { - latest::http::types::HostFields::entries(self, self_) - } - - fn clone( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result> { - latest::http::types::HostFields::clone(self, self_) - } - - fn drop(&mut self, rep: wasmtime::component::Resource) -> wasmtime::Result<()> { - latest::http::types::HostFields::drop(self, rep) - } -} - -impl wasi::http::types::HostIncomingRequest for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn method( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result { - latest::http::types::HostIncomingRequest::method(self, self_).map(|e| e.into()) - } - - fn path_with_query( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result> { - latest::http::types::HostIncomingRequest::path_with_query(self, self_) - } - - fn scheme( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result> { - latest::http::types::HostIncomingRequest::scheme(self, self_).map(|e| e.map(|e| e.into())) - } - - fn authority( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result> { - latest::http::types::HostIncomingRequest::authority(self, self_) - } - - fn headers( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result> { - latest::http::types::HostIncomingRequest::headers(self, self_) - } - - fn consume( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result, ()>> { - latest::http::types::HostIncomingRequest::consume(self, self_) - } - - fn drop( - &mut self, - rep: wasmtime::component::Resource, - ) -> wasmtime::Result<()> { - latest::http::types::HostIncomingRequest::drop(self, rep) - } -} - -impl wasi::http::types::HostIncomingResponse for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn status( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result { - latest::http::types::HostIncomingResponse::status(self, self_) - } - - fn headers( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result> { - latest::http::types::HostIncomingResponse::headers(self, self_) - } - - fn consume( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result, ()>> { - latest::http::types::HostIncomingResponse::consume(self, self_) - } - - fn drop( - &mut self, - rep: wasmtime::component::Resource, - ) -> wasmtime::Result<()> { - latest::http::types::HostIncomingResponse::drop(self, rep) - } -} - -impl wasi::http::types::HostIncomingBody for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn stream( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result, ()>> { - latest::http::types::HostIncomingBody::stream(self, self_) - } - - fn finish( - &mut self, - this: wasmtime::component::Resource, - ) -> wasmtime::Result> { - latest::http::types::HostIncomingBody::finish(self, this) - } - - fn drop(&mut self, rep: wasmtime::component::Resource) -> wasmtime::Result<()> { - latest::http::types::HostIncomingBody::drop(self, rep) - } -} - -impl wasi::http::types::HostOutgoingRequest for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn new( - &mut self, - method: Method, - path_with_query: Option, - scheme: Option, - authority: Option, - headers: wasmtime::component::Resource, - ) -> wasmtime::Result> { - let headers = latest::http::types::HostFields::clone(self, headers)?; - let request = latest::http::types::HostOutgoingRequest::new(self, headers)?; - let borrow = || Resource::new_borrow(request.rep()); - - if let Err(()) = - latest::http::types::HostOutgoingRequest::set_method(self, borrow(), method.into())? - { - latest::http::types::HostOutgoingRequest::drop(self, request)?; - anyhow::bail!("invalid method supplied"); - } - - if let Err(()) = latest::http::types::HostOutgoingRequest::set_path_with_query( - self, - borrow(), - path_with_query, - )? { - latest::http::types::HostOutgoingRequest::drop(self, request)?; - anyhow::bail!("invalid path-with-query supplied"); - } - - // Historical WASI would fill in an empty authority with a port which - // got just enough working to get things through. Current WASI requires - // the authority, though, so perform the translation manually here. - let authority = authority.unwrap_or_else(|| match &scheme { - Some(Scheme::Http) | Some(Scheme::Other(_)) => ":80".to_string(), - Some(Scheme::Https) | None => ":443".to_string(), - }); - if let Err(()) = latest::http::types::HostOutgoingRequest::set_scheme( - self, - borrow(), - scheme.map(|s| s.into()), - )? { - latest::http::types::HostOutgoingRequest::drop(self, request)?; - anyhow::bail!("invalid scheme supplied"); - } - - if let Err(()) = latest::http::types::HostOutgoingRequest::set_authority( - self, - borrow(), - Some(authority), - )? { - latest::http::types::HostOutgoingRequest::drop(self, request)?; - anyhow::bail!("invalid authority supplied"); - } - - Ok(request) - } - - fn write( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result, ()>> { - latest::http::types::HostOutgoingRequest::body(self, self_) - } - - fn drop( - &mut self, - rep: wasmtime::component::Resource, - ) -> wasmtime::Result<()> { - latest::http::types::HostOutgoingRequest::drop(self, rep) - } -} - -impl wasi::http::types::HostOutgoingResponse for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn new( - &mut self, - status_code: StatusCode, - headers: wasmtime::component::Resource, - ) -> wasmtime::Result> { - let headers = latest::http::types::HostFields::clone(self, headers)?; - let response = latest::http::types::HostOutgoingResponse::new(self, headers)?; - let borrow = || Resource::new_borrow(response.rep()); - - if let Err(()) = - latest::http::types::HostOutgoingResponse::set_status_code(self, borrow(), status_code)? - { - latest::http::types::HostOutgoingResponse::drop(self, response)?; - anyhow::bail!("invalid status code supplied"); - } - - Ok(response) - } - - fn write( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result, ()>> { - latest::http::types::HostOutgoingResponse::body(self, self_) - } - - fn drop( - &mut self, - rep: wasmtime::component::Resource, - ) -> wasmtime::Result<()> { - latest::http::types::HostOutgoingResponse::drop(self, rep) - } -} - -impl wasi::http::types::HostOutgoingBody for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn write( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result, ()>> { - latest::http::types::HostOutgoingBody::write(self, self_) - } - - fn finish( - &mut self, - this: wasmtime::component::Resource, - trailers: Option>, - ) -> wasmtime::Result<()> { - latest::http::types::HostOutgoingBody::finish(self, this, trailers)?; - Ok(()) - } - - fn drop(&mut self, rep: wasmtime::component::Resource) -> wasmtime::Result<()> { - latest::http::types::HostOutgoingBody::drop(self, rep) - } -} - -impl wasi::http::types::HostResponseOutparam for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn set( - &mut self, - param: wasmtime::component::Resource, - response: Result, HttpError>, - ) -> wasmtime::Result<()> { - let response = response.map_err(|err| { - // TODO: probably need to figure out a better mapping between - // errors, but that seems like it would require string matching, - // which also seems not great. - let msg = match err { - HttpError::InvalidUrl(s) => format!("invalid url: {s}"), - HttpError::TimeoutError(s) => format!("timeout: {s}"), - HttpError::ProtocolError(s) => format!("protocol error: {s}"), - HttpError::UnexpectedError(s) => format!("unexpected error: {s}"), - }; - latest::http::types::ErrorCode::InternalError(Some(msg)) - }); - latest::http::types::HostResponseOutparam::set(self, param, response) - } - - fn drop( - &mut self, - rep: wasmtime::component::Resource, - ) -> wasmtime::Result<()> { - latest::http::types::HostResponseOutparam::drop(self, rep) - } -} - -impl wasi::http::types::HostFutureTrailers for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn subscribe( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result> { - latest::http::types::HostFutureTrailers::subscribe(self, self_) - } - - fn get( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result, HttpError>>> { - match latest::http::types::HostFutureTrailers::get(self, self_)? { - Some(Ok(Ok(Some(trailers)))) => Ok(Some(Ok(trailers))), - // Return an empty trailers if no trailers popped out since this - // version of WASI couldn't represent the lack of trailers. - Some(Ok(Ok(None))) => Ok(Some(Ok(latest::http::types::HostFields::new(self)?))), - Some(Ok(Err(e))) => Ok(Some(Err(e.into()))), - Some(Err(())) => Err(anyhow::anyhow!("trailers have already been retrieved")), - None => Ok(None), - } - } - - fn drop(&mut self, rep: wasmtime::component::Resource) -> wasmtime::Result<()> { - latest::http::types::HostFutureTrailers::drop(self, rep) - } -} - -impl wasi::http::types::HostFutureIncomingResponse for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn get( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result< - Option, HttpError>, ()>>, - > { - match latest::http::types::HostFutureIncomingResponse::get(self, self_)? { - None => Ok(None), - Some(Ok(Ok(response))) => Ok(Some(Ok(Ok(response)))), - Some(Ok(Err(e))) => Ok(Some(Ok(Err(e.into())))), - Some(Err(())) => Ok(Some(Err(()))), - } - } - - fn subscribe( - &mut self, - self_: wasmtime::component::Resource, - ) -> wasmtime::Result> { - latest::http::types::HostFutureIncomingResponse::subscribe(self, self_) - } - - fn drop( - &mut self, - rep: wasmtime::component::Resource, - ) -> wasmtime::Result<()> { - latest::http::types::HostFutureIncomingResponse::drop(self, rep) - } -} - -impl wasi::http::outgoing_handler::Host for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn handle( - &mut self, - request: wasmtime::component::Resource, - options: Option, - ) -> wasmtime::Result, HttpError>> - { - let options = match options { - Some(RequestOptions { - connect_timeout_ms, - first_byte_timeout_ms, - between_bytes_timeout_ms, - }) => { - let options = latest::http::types::HostRequestOptions::new(self)?; - let borrow = || Resource::new_borrow(request.rep()); - - if let Some(ms) = connect_timeout_ms { - if let Err(()) = latest::http::types::HostRequestOptions::set_connect_timeout( - self, - borrow(), - Some(ms.into()), - )? { - latest::http::types::HostRequestOptions::drop(self, options)?; - anyhow::bail!("invalid connect timeout supplied"); - } - } - - if let Some(ms) = first_byte_timeout_ms { - if let Err(()) = - latest::http::types::HostRequestOptions::set_first_byte_timeout( - self, - borrow(), - Some(ms.into()), - )? - { - latest::http::types::HostRequestOptions::drop(self, options)?; - anyhow::bail!("invalid first byte timeout supplied"); - } - } - - if let Some(ms) = between_bytes_timeout_ms { - if let Err(()) = - latest::http::types::HostRequestOptions::set_between_bytes_timeout( - self, - borrow(), - Some(ms.into()), - )? - { - latest::http::types::HostRequestOptions::drop(self, options)?; - anyhow::bail!("invalid between bytes timeout supplied"); - } - } - - Some(options) - } - None => None, - }; - match latest::http::outgoing_handler::Host::handle(self, request, options) { - Ok(resp) => Ok(Ok(resp)), - Err(e) => Ok(Err(e.downcast()?.into())), - } - } -} - pub fn convert_result( result: Result>, ) -> wasmtime::Result> @@ -2422,25 +1893,6 @@ convert! { data, remote_address, } - - enum latest::http::types::Method [<=>] Method { - Get, - Head, - Post, - Put, - Delete, - Connect, - Options, - Trace, - Patch, - Other(e), - } - - enum latest::http::types::Scheme [<=>] Scheme { - Http, - Https, - Other(e), - } } impl From for DescriptorStat { @@ -2455,11 +1907,3 @@ impl From for DescriptorStat { } } } - -impl From for HttpError { - fn from(e: latest::http::types::ErrorCode) -> HttpError { - // TODO: should probably categorize this better given the typed info - // we have in `e`. - HttpError::UnexpectedError(e.to_string()) - } -} diff --git a/crates/core/src/wasi_2023_11_10.rs b/crates/factor-wasi/src/wasi_2023_11_10.rs similarity index 64% rename from crates/core/src/wasi_2023_11_10.rs rename to crates/factor-wasi/src/wasi_2023_11_10.rs index a7d554debe..a98687885d 100644 --- a/crates/core/src/wasi_2023_11_10.rs +++ b/crates/factor-wasi/src/wasi_2023_11_10.rs @@ -1,17 +1,11 @@ -#![doc(hidden)] // internal implementation detail used in tests and spin-trigger - use super::wasi_2023_10_18::{convert, convert_result}; -use anyhow::Result; use async_trait::async_trait; +use spin_factors::anyhow::{self, Result}; use wasmtime::component::{Linker, Resource}; use wasmtime_wasi::{WasiImpl, WasiView}; -use wasmtime_wasi_http::{WasiHttpImpl, WasiHttpView}; mod latest { pub use wasmtime_wasi::bindings::*; - pub mod http { - pub use wasmtime_wasi_http::bindings::http::*; - } } mod bindings { @@ -20,8 +14,6 @@ mod bindings { wasmtime::component::bindgen!({ path: "../../wit", interfaces: r#" - include wasi:http/proxy@0.2.0-rc-2023-11-10; - // NB: this is handling the historical behavior where Spin supported // more than "just" this snapshot of the proxy world but additionally // other CLI-related interfaces. @@ -90,17 +82,6 @@ mod bindings { "wasi:sockets/udp/incoming-datagram-stream": latest::sockets::udp::IncomingDatagramStream, "wasi:sockets/network/network": latest::sockets::network::Network, "wasi:sockets/ip-name-lookup/resolve-address-stream": latest::sockets::ip_name_lookup::ResolveAddressStream, - "wasi:http/types/incoming-response": latest::http::types::IncomingResponse, - "wasi:http/types/incoming-request": latest::http::types::IncomingRequest, - "wasi:http/types/incoming-body": latest::http::types::IncomingBody, - "wasi:http/types/outgoing-response": latest::http::types::OutgoingResponse, - "wasi:http/types/outgoing-request": latest::http::types::OutgoingRequest, - "wasi:http/types/outgoing-body": latest::http::types::OutgoingBody, - "wasi:http/types/fields": latest::http::types::Fields, - "wasi:http/types/response-outparam": latest::http::types::ResponseOutparam, - "wasi:http/types/future-incoming-response": latest::http::types::FutureIncomingResponse, - "wasi:http/types/future-trailers": latest::http::types::FutureTrailers, - "wasi:http/types/request-options": latest::http::types::RequestOptions, }, trappable_imports: true, }); @@ -109,18 +90,11 @@ mod bindings { mod wasi { pub use super::bindings::wasi::{ cli0_2_0_rc_2023_11_10 as cli, clocks0_2_0_rc_2023_11_10 as clocks, - filesystem0_2_0_rc_2023_11_10 as filesystem, http0_2_0_rc_2023_11_10 as http, - io0_2_0_rc_2023_11_10 as io, random0_2_0_rc_2023_11_10 as random, - sockets0_2_0_rc_2023_11_10 as sockets, + filesystem0_2_0_rc_2023_11_10 as filesystem, io0_2_0_rc_2023_11_10 as io, + random0_2_0_rc_2023_11_10 as random, sockets0_2_0_rc_2023_11_10 as sockets, }; } -pub mod exports { - pub mod wasi { - pub use super::super::bindings::exports::wasi::http0_2_0_rc_2023_11_10 as http; - } -} - use wasi::cli::terminal_input::TerminalInput; use wasi::cli::terminal_output::TerminalOutput; use wasi::clocks::monotonic_clock::{Duration, Instant}; @@ -130,12 +104,6 @@ use wasi::filesystem::types::{ DirectoryEntryStream, ErrorCode as FsErrorCode, Filesize, MetadataHashValue, NewTimestamp, OpenFlags, PathFlags, }; -use wasi::http::types::{ - DnsErrorPayload, ErrorCode as HttpErrorCode, FieldSizePayload, Fields, FutureIncomingResponse, - FutureTrailers, HeaderError, Headers, IncomingBody, IncomingRequest, IncomingResponse, Method, - OutgoingBody, OutgoingRequest, OutgoingResponse, RequestOptions, ResponseOutparam, Scheme, - StatusCode, TlsAlertReceivedPayload, Trailers, -}; use wasi::io::poll::Pollable; use wasi::io::streams::{Error as IoError, InputStream, OutputStream, StreamError}; use wasi::sockets::ip_name_lookup::{IpAddress, ResolveAddressStream}; @@ -148,18 +116,13 @@ use wasi::sockets::udp::{ IncomingDatagram, IncomingDatagramStream, OutgoingDatagram, OutgoingDatagramStream, UdpSocket, }; -pub fn add_to_linker(linker: &mut Linker) -> Result<()> +use crate::WasiImplInner; + +pub fn add_to_linker(linker: &mut Linker, closure: F) -> Result<()> where - T: WasiView + WasiHttpView, + T: Send, + F: Fn(&mut T) -> WasiImpl + Send + Sync + Copy + 'static, { - // interfaces from the "command" world - fn type_annotate_wasi(f: F) -> F - where - F: Fn(&mut T) -> WasiImpl<&mut T>, - { - f - } - let closure = type_annotate_wasi::(|t| WasiImpl(t)); wasi::clocks::monotonic_clock::add_to_linker_get_host(linker, closure)?; wasi::clocks::wall_clock::add_to_linker_get_host(linker, closure)?; wasi::filesystem::types::add_to_linker_get_host(linker, closure)?; @@ -187,16 +150,6 @@ where wasi::sockets::instance_network::add_to_linker_get_host(linker, closure)?; wasi::sockets::network::add_to_linker_get_host(linker, closure)?; wasi::sockets::ip_name_lookup::add_to_linker_get_host(linker, closure)?; - - fn type_annotate_http(f: F) -> F - where - F: Fn(&mut T) -> WasiHttpImpl<&mut T>, - { - f - } - let closure = type_annotate_http::(|t| WasiHttpImpl(t)); - wasi::http::types::add_to_linker_get_host(linker, closure)?; - wasi::http::outgoing_handler::add_to_linker_get_host(linker, closure)?; Ok(()) } @@ -1571,457 +1524,6 @@ where } } -impl wasi::http::types::Host for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn http_error_code( - &mut self, - error: Resource, - ) -> wasmtime::Result> { - latest::http::types::Host::http_error_code(self, error).map(|e| e.map(|e| e.into())) - } -} - -impl wasi::http::types::HostRequestOptions for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn new(&mut self) -> wasmtime::Result> { - latest::http::types::HostRequestOptions::new(self) - } - - fn connect_timeout_ms( - &mut self, - self_: Resource, - ) -> wasmtime::Result> { - latest::http::types::HostRequestOptions::connect_timeout(self, self_) - } - - fn set_connect_timeout_ms( - &mut self, - self_: Resource, - duration: Option, - ) -> wasmtime::Result> { - latest::http::types::HostRequestOptions::set_connect_timeout(self, self_, duration) - } - - fn first_byte_timeout_ms( - &mut self, - self_: Resource, - ) -> wasmtime::Result> { - latest::http::types::HostRequestOptions::first_byte_timeout(self, self_) - } - - fn set_first_byte_timeout_ms( - &mut self, - self_: Resource, - duration: Option, - ) -> wasmtime::Result> { - latest::http::types::HostRequestOptions::set_first_byte_timeout(self, self_, duration) - } - - fn between_bytes_timeout_ms( - &mut self, - self_: Resource, - ) -> wasmtime::Result> { - latest::http::types::HostRequestOptions::between_bytes_timeout(self, self_) - } - - fn set_between_bytes_timeout_ms( - &mut self, - self_: Resource, - duration: Option, - ) -> wasmtime::Result> { - latest::http::types::HostRequestOptions::set_between_bytes_timeout(self, self_, duration) - } - - fn drop(&mut self, self_: Resource) -> wasmtime::Result<()> { - latest::http::types::HostRequestOptions::drop(self, self_) - } -} - -impl wasi::http::types::HostFields for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn new(&mut self) -> wasmtime::Result> { - latest::http::types::HostFields::new(self) - } - - fn from_list( - &mut self, - entries: Vec<(String, Vec)>, - ) -> wasmtime::Result, HeaderError>> { - latest::http::types::HostFields::from_list(self, entries).map(|r| r.map_err(|e| e.into())) - } - - fn get(&mut self, self_: Resource, name: String) -> wasmtime::Result>> { - latest::http::types::HostFields::get(self, self_, name) - } - - fn set( - &mut self, - self_: Resource, - name: String, - value: Vec>, - ) -> wasmtime::Result> { - latest::http::types::HostFields::set(self, self_, name, value) - .map(|r| r.map_err(|e| e.into())) - } - - fn delete( - &mut self, - self_: Resource, - name: String, - ) -> wasmtime::Result> { - latest::http::types::HostFields::delete(self, self_, name).map(|r| r.map_err(|e| e.into())) - } - - fn append( - &mut self, - self_: Resource, - name: String, - value: Vec, - ) -> wasmtime::Result> { - latest::http::types::HostFields::append(self, self_, name, value) - .map(|r| r.map_err(|e| e.into())) - } - - fn entries(&mut self, self_: Resource) -> wasmtime::Result)>> { - latest::http::types::HostFields::entries(self, self_) - } - - fn clone(&mut self, self_: Resource) -> wasmtime::Result> { - latest::http::types::HostFields::clone(self, self_) - } - - fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { - latest::http::types::HostFields::drop(self, rep) - } -} - -impl wasi::http::types::HostIncomingRequest for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn method(&mut self, self_: Resource) -> wasmtime::Result { - latest::http::types::HostIncomingRequest::method(self, self_).map(|e| e.into()) - } - - fn path_with_query( - &mut self, - self_: Resource, - ) -> wasmtime::Result> { - latest::http::types::HostIncomingRequest::path_with_query(self, self_) - } - - fn scheme(&mut self, self_: Resource) -> wasmtime::Result> { - latest::http::types::HostIncomingRequest::scheme(self, self_).map(|e| e.map(|e| e.into())) - } - - fn authority(&mut self, self_: Resource) -> wasmtime::Result> { - latest::http::types::HostIncomingRequest::authority(self, self_) - } - - fn headers(&mut self, self_: Resource) -> wasmtime::Result> { - latest::http::types::HostIncomingRequest::headers(self, self_) - } - - fn consume( - &mut self, - self_: Resource, - ) -> wasmtime::Result, ()>> { - latest::http::types::HostIncomingRequest::consume(self, self_) - } - - fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { - latest::http::types::HostIncomingRequest::drop(self, rep) - } -} - -impl wasi::http::types::HostIncomingResponse for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn status(&mut self, self_: Resource) -> wasmtime::Result { - latest::http::types::HostIncomingResponse::status(self, self_) - } - - fn headers( - &mut self, - self_: Resource, - ) -> wasmtime::Result> { - latest::http::types::HostIncomingResponse::headers(self, self_) - } - - fn consume( - &mut self, - self_: Resource, - ) -> wasmtime::Result, ()>> { - latest::http::types::HostIncomingResponse::consume(self, self_) - } - - fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { - latest::http::types::HostIncomingResponse::drop(self, rep) - } -} - -impl wasi::http::types::HostIncomingBody for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn stream( - &mut self, - self_: Resource, - ) -> wasmtime::Result, ()>> { - latest::http::types::HostIncomingBody::stream(self, self_) - } - - fn finish( - &mut self, - this: Resource, - ) -> wasmtime::Result> { - latest::http::types::HostIncomingBody::finish(self, this) - } - - fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { - latest::http::types::HostIncomingBody::drop(self, rep) - } -} - -impl wasi::http::types::HostOutgoingRequest for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn new(&mut self, headers: Resource) -> wasmtime::Result> { - latest::http::types::HostOutgoingRequest::new(self, headers) - } - - fn method(&mut self, self_: Resource) -> wasmtime::Result { - latest::http::types::HostOutgoingRequest::method(self, self_).map(|m| m.into()) - } - - fn set_method( - &mut self, - self_: Resource, - method: Method, - ) -> wasmtime::Result> { - latest::http::types::HostOutgoingRequest::set_method(self, self_, method.into()) - } - - fn path_with_query( - &mut self, - self_: Resource, - ) -> wasmtime::Result> { - latest::http::types::HostOutgoingRequest::path_with_query(self, self_) - } - - fn set_path_with_query( - &mut self, - self_: Resource, - path_with_query: Option, - ) -> wasmtime::Result> { - latest::http::types::HostOutgoingRequest::set_path_with_query(self, self_, path_with_query) - } - - fn scheme(&mut self, self_: Resource) -> wasmtime::Result> { - latest::http::types::HostOutgoingRequest::scheme(self, self_).map(|s| s.map(|s| s.into())) - } - - fn set_scheme( - &mut self, - self_: Resource, - scheme: Option, - ) -> wasmtime::Result> { - latest::http::types::HostOutgoingRequest::set_scheme(self, self_, scheme.map(|s| s.into())) - } - - fn authority(&mut self, self_: Resource) -> wasmtime::Result> { - latest::http::types::HostOutgoingRequest::authority(self, self_) - } - - fn set_authority( - &mut self, - self_: Resource, - authority: Option, - ) -> wasmtime::Result> { - latest::http::types::HostOutgoingRequest::set_authority(self, self_, authority) - } - - fn headers(&mut self, self_: Resource) -> wasmtime::Result> { - latest::http::types::HostOutgoingRequest::headers(self, self_) - } - - fn body( - &mut self, - self_: Resource, - ) -> wasmtime::Result, ()>> { - latest::http::types::HostOutgoingRequest::body(self, self_) - } - - fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { - latest::http::types::HostOutgoingRequest::drop(self, rep) - } -} - -impl wasi::http::types::HostOutgoingResponse for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn new(&mut self, headers: Resource) -> wasmtime::Result> { - let headers = latest::http::types::HostFields::clone(self, headers)?; - latest::http::types::HostOutgoingResponse::new(self, headers) - } - - fn status_code(&mut self, self_: Resource) -> wasmtime::Result { - latest::http::types::HostOutgoingResponse::status_code(self, self_) - } - - fn set_status_code( - &mut self, - self_: Resource, - status_code: StatusCode, - ) -> wasmtime::Result> { - latest::http::types::HostOutgoingResponse::set_status_code(self, self_, status_code) - } - - fn headers( - &mut self, - self_: Resource, - ) -> wasmtime::Result> { - latest::http::types::HostOutgoingResponse::headers(self, self_) - } - - fn body( - &mut self, - self_: Resource, - ) -> wasmtime::Result, ()>> { - latest::http::types::HostOutgoingResponse::body(self, self_) - } - - fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { - latest::http::types::HostOutgoingResponse::drop(self, rep) - } -} - -impl wasi::http::types::HostOutgoingBody for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn write( - &mut self, - self_: Resource, - ) -> wasmtime::Result, ()>> { - latest::http::types::HostOutgoingBody::write(self, self_) - } - - fn finish( - &mut self, - this: Resource, - trailers: Option>, - ) -> wasmtime::Result> { - match latest::http::types::HostOutgoingBody::finish(self, this, trailers) { - Ok(()) => Ok(Ok(())), - Err(e) => Ok(Err(e.downcast()?.into())), - } - } - - fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { - latest::http::types::HostOutgoingBody::drop(self, rep) - } -} - -impl wasi::http::types::HostResponseOutparam for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn set( - &mut self, - param: Resource, - response: Result, HttpErrorCode>, - ) -> wasmtime::Result<()> { - latest::http::types::HostResponseOutparam::set(self, param, response.map_err(|e| e.into())) - } - - fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { - latest::http::types::HostResponseOutparam::drop(self, rep) - } -} - -impl wasi::http::types::HostFutureTrailers for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn subscribe( - &mut self, - self_: Resource, - ) -> wasmtime::Result> { - latest::http::types::HostFutureTrailers::subscribe(self, self_) - } - - fn get( - &mut self, - self_: Resource, - ) -> wasmtime::Result>, HttpErrorCode>>> { - match latest::http::types::HostFutureTrailers::get(self, self_)? { - Some(Ok(Ok(trailers))) => Ok(Some(Ok(trailers))), - Some(Ok(Err(e))) => Ok(Some(Err(e.into()))), - Some(Err(())) => Err(anyhow::anyhow!("trailers have already been retrieved")), - None => Ok(None), - } - } - - fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { - latest::http::types::HostFutureTrailers::drop(self, rep) - } -} - -impl wasi::http::types::HostFutureIncomingResponse for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn get( - &mut self, - self_: Resource, - ) -> wasmtime::Result, HttpErrorCode>, ()>>> - { - match latest::http::types::HostFutureIncomingResponse::get(self, self_)? { - None => Ok(None), - Some(Ok(Ok(response))) => Ok(Some(Ok(Ok(response)))), - Some(Ok(Err(e))) => Ok(Some(Ok(Err(e.into())))), - Some(Err(())) => Ok(Some(Err(()))), - } - } - - fn subscribe( - &mut self, - self_: Resource, - ) -> wasmtime::Result> { - latest::http::types::HostFutureIncomingResponse::subscribe(self, self_) - } - - fn drop(&mut self, rep: Resource) -> wasmtime::Result<()> { - latest::http::types::HostFutureIncomingResponse::drop(self, rep) - } -} - -impl wasi::http::outgoing_handler::Host for WasiHttpImpl -where - T: WasiHttpView + Send, -{ - fn handle( - &mut self, - request: Resource, - options: Option>, - ) -> wasmtime::Result, HttpErrorCode>> { - match latest::http::outgoing_handler::Host::handle(self, request, options) { - Ok(resp) => Ok(Ok(resp)), - Err(e) => Ok(Err(e.downcast()?.into())), - } - } -} - convert! { struct latest::clocks::wall_clock::Datetime [<=>] Datetime { seconds, @@ -2186,46 +1688,6 @@ convert! { data, remote_address, } - - enum latest::http::types::Method [<=>] Method { - Get, - Head, - Post, - Put, - Delete, - Connect, - Options, - Trace, - Patch, - Other(e), - } - - enum latest::http::types::Scheme [<=>] Scheme { - Http, - Https, - Other(e), - } - - enum latest::http::types::HeaderError => HeaderError { - InvalidSyntax, - Forbidden, - Immutable, - } - - struct latest::http::types::DnsErrorPayload [<=>] DnsErrorPayload { - rcode, - info_code, - } - - struct latest::http::types::TlsAlertReceivedPayload [<=>] TlsAlertReceivedPayload { - alert_id, - alert_message, - } - - struct latest::http::types::FieldSizePayload [<=>] FieldSizePayload { - field_name, - field_size, - } } impl From for DescriptorStat { @@ -2249,207 +1711,3 @@ impl From for latest::sockets::udp::OutgoingDatagram { } } } - -impl From for HttpErrorCode { - fn from(e: latest::http::types::ErrorCode) -> Self { - match e { - latest::http::types::ErrorCode::DnsTimeout => HttpErrorCode::DnsTimeout, - latest::http::types::ErrorCode::DnsError(e) => HttpErrorCode::DnsError(e.into()), - latest::http::types::ErrorCode::DestinationNotFound => { - HttpErrorCode::DestinationNotFound - } - latest::http::types::ErrorCode::DestinationUnavailable => { - HttpErrorCode::DestinationUnavailable - } - latest::http::types::ErrorCode::DestinationIpProhibited => { - HttpErrorCode::DestinationIpProhibited - } - latest::http::types::ErrorCode::DestinationIpUnroutable => { - HttpErrorCode::DestinationIpUnroutable - } - latest::http::types::ErrorCode::ConnectionRefused => HttpErrorCode::ConnectionRefused, - latest::http::types::ErrorCode::ConnectionTerminated => { - HttpErrorCode::ConnectionTerminated - } - latest::http::types::ErrorCode::ConnectionTimeout => HttpErrorCode::ConnectionTimeout, - latest::http::types::ErrorCode::ConnectionReadTimeout => { - HttpErrorCode::ConnectionReadTimeout - } - latest::http::types::ErrorCode::ConnectionWriteTimeout => { - HttpErrorCode::ConnectionWriteTimeout - } - latest::http::types::ErrorCode::ConnectionLimitReached => { - HttpErrorCode::ConnectionLimitReached - } - latest::http::types::ErrorCode::TlsProtocolError => HttpErrorCode::TlsProtocolError, - latest::http::types::ErrorCode::TlsCertificateError => { - HttpErrorCode::TlsCertificateError - } - latest::http::types::ErrorCode::TlsAlertReceived(e) => { - HttpErrorCode::TlsAlertReceived(e.into()) - } - latest::http::types::ErrorCode::HttpRequestDenied => HttpErrorCode::HttpRequestDenied, - latest::http::types::ErrorCode::HttpRequestLengthRequired => { - HttpErrorCode::HttpRequestLengthRequired - } - latest::http::types::ErrorCode::HttpRequestBodySize(e) => { - HttpErrorCode::HttpRequestBodySize(e) - } - latest::http::types::ErrorCode::HttpRequestMethodInvalid => { - HttpErrorCode::HttpRequestMethodInvalid - } - latest::http::types::ErrorCode::HttpRequestUriInvalid => { - HttpErrorCode::HttpRequestUriInvalid - } - latest::http::types::ErrorCode::HttpRequestUriTooLong => { - HttpErrorCode::HttpRequestUriTooLong - } - latest::http::types::ErrorCode::HttpRequestHeaderSectionSize(e) => { - HttpErrorCode::HttpRequestHeaderSectionSize(e) - } - latest::http::types::ErrorCode::HttpRequestHeaderSize(e) => { - HttpErrorCode::HttpRequestHeaderSize(e.map(|e| e.into())) - } - latest::http::types::ErrorCode::HttpRequestTrailerSectionSize(e) => { - HttpErrorCode::HttpRequestTrailerSectionSize(e) - } - latest::http::types::ErrorCode::HttpRequestTrailerSize(e) => { - HttpErrorCode::HttpRequestTrailerSize(e.into()) - } - latest::http::types::ErrorCode::HttpResponseIncomplete => { - HttpErrorCode::HttpResponseIncomplete - } - latest::http::types::ErrorCode::HttpResponseHeaderSectionSize(e) => { - HttpErrorCode::HttpResponseHeaderSectionSize(e) - } - latest::http::types::ErrorCode::HttpResponseHeaderSize(e) => { - HttpErrorCode::HttpResponseHeaderSize(e.into()) - } - latest::http::types::ErrorCode::HttpResponseBodySize(e) => { - HttpErrorCode::HttpResponseBodySize(e) - } - latest::http::types::ErrorCode::HttpResponseTrailerSectionSize(e) => { - HttpErrorCode::HttpResponseTrailerSectionSize(e) - } - latest::http::types::ErrorCode::HttpResponseTrailerSize(e) => { - HttpErrorCode::HttpResponseTrailerSize(e.into()) - } - latest::http::types::ErrorCode::HttpResponseTransferCoding(e) => { - HttpErrorCode::HttpResponseTransferCoding(e) - } - latest::http::types::ErrorCode::HttpResponseContentCoding(e) => { - HttpErrorCode::HttpResponseContentCoding(e) - } - latest::http::types::ErrorCode::HttpResponseTimeout => { - HttpErrorCode::HttpResponseTimeout - } - latest::http::types::ErrorCode::HttpUpgradeFailed => HttpErrorCode::HttpUpgradeFailed, - latest::http::types::ErrorCode::HttpProtocolError => HttpErrorCode::HttpProtocolError, - latest::http::types::ErrorCode::LoopDetected => HttpErrorCode::LoopDetected, - latest::http::types::ErrorCode::ConfigurationError => HttpErrorCode::ConfigurationError, - latest::http::types::ErrorCode::InternalError(e) => HttpErrorCode::InternalError(e), - } - } -} - -impl From for latest::http::types::ErrorCode { - fn from(e: HttpErrorCode) -> Self { - match e { - HttpErrorCode::DnsTimeout => latest::http::types::ErrorCode::DnsTimeout, - HttpErrorCode::DnsError(e) => latest::http::types::ErrorCode::DnsError(e.into()), - HttpErrorCode::DestinationNotFound => { - latest::http::types::ErrorCode::DestinationNotFound - } - HttpErrorCode::DestinationUnavailable => { - latest::http::types::ErrorCode::DestinationUnavailable - } - HttpErrorCode::DestinationIpProhibited => { - latest::http::types::ErrorCode::DestinationIpProhibited - } - HttpErrorCode::DestinationIpUnroutable => { - latest::http::types::ErrorCode::DestinationIpUnroutable - } - HttpErrorCode::ConnectionRefused => latest::http::types::ErrorCode::ConnectionRefused, - HttpErrorCode::ConnectionTerminated => { - latest::http::types::ErrorCode::ConnectionTerminated - } - HttpErrorCode::ConnectionTimeout => latest::http::types::ErrorCode::ConnectionTimeout, - HttpErrorCode::ConnectionReadTimeout => { - latest::http::types::ErrorCode::ConnectionReadTimeout - } - HttpErrorCode::ConnectionWriteTimeout => { - latest::http::types::ErrorCode::ConnectionWriteTimeout - } - HttpErrorCode::ConnectionLimitReached => { - latest::http::types::ErrorCode::ConnectionLimitReached - } - HttpErrorCode::TlsProtocolError => latest::http::types::ErrorCode::TlsProtocolError, - HttpErrorCode::TlsCertificateError => { - latest::http::types::ErrorCode::TlsCertificateError - } - HttpErrorCode::TlsAlertReceived(e) => { - latest::http::types::ErrorCode::TlsAlertReceived(e.into()) - } - HttpErrorCode::HttpRequestDenied => latest::http::types::ErrorCode::HttpRequestDenied, - HttpErrorCode::HttpRequestLengthRequired => { - latest::http::types::ErrorCode::HttpRequestLengthRequired - } - HttpErrorCode::HttpRequestBodySize(e) => { - latest::http::types::ErrorCode::HttpRequestBodySize(e) - } - HttpErrorCode::HttpRequestMethodInvalid => { - latest::http::types::ErrorCode::HttpRequestMethodInvalid - } - HttpErrorCode::HttpRequestUriInvalid => { - latest::http::types::ErrorCode::HttpRequestUriInvalid - } - HttpErrorCode::HttpRequestUriTooLong => { - latest::http::types::ErrorCode::HttpRequestUriTooLong - } - HttpErrorCode::HttpRequestHeaderSectionSize(e) => { - latest::http::types::ErrorCode::HttpRequestHeaderSectionSize(e) - } - HttpErrorCode::HttpRequestHeaderSize(e) => { - latest::http::types::ErrorCode::HttpRequestHeaderSize(e.map(|e| e.into())) - } - HttpErrorCode::HttpRequestTrailerSectionSize(e) => { - latest::http::types::ErrorCode::HttpRequestTrailerSectionSize(e) - } - HttpErrorCode::HttpRequestTrailerSize(e) => { - latest::http::types::ErrorCode::HttpRequestTrailerSize(e.into()) - } - HttpErrorCode::HttpResponseIncomplete => { - latest::http::types::ErrorCode::HttpResponseIncomplete - } - HttpErrorCode::HttpResponseHeaderSectionSize(e) => { - latest::http::types::ErrorCode::HttpResponseHeaderSectionSize(e) - } - HttpErrorCode::HttpResponseHeaderSize(e) => { - latest::http::types::ErrorCode::HttpResponseHeaderSize(e.into()) - } - HttpErrorCode::HttpResponseBodySize(e) => { - latest::http::types::ErrorCode::HttpResponseBodySize(e) - } - HttpErrorCode::HttpResponseTrailerSectionSize(e) => { - latest::http::types::ErrorCode::HttpResponseTrailerSectionSize(e) - } - HttpErrorCode::HttpResponseTrailerSize(e) => { - latest::http::types::ErrorCode::HttpResponseTrailerSize(e.into()) - } - HttpErrorCode::HttpResponseTransferCoding(e) => { - latest::http::types::ErrorCode::HttpResponseTransferCoding(e) - } - HttpErrorCode::HttpResponseContentCoding(e) => { - latest::http::types::ErrorCode::HttpResponseContentCoding(e) - } - HttpErrorCode::HttpResponseTimeout => { - latest::http::types::ErrorCode::HttpResponseTimeout - } - HttpErrorCode::HttpUpgradeFailed => latest::http::types::ErrorCode::HttpUpgradeFailed, - HttpErrorCode::HttpProtocolError => latest::http::types::ErrorCode::HttpProtocolError, - HttpErrorCode::LoopDetected => latest::http::types::ErrorCode::LoopDetected, - HttpErrorCode::ConfigurationError => latest::http::types::ErrorCode::ConfigurationError, - HttpErrorCode::InternalError(e) => latest::http::types::ErrorCode::InternalError(e), - } - } -} diff --git a/crates/factor-wasi/tests/factor_test.rs b/crates/factor-wasi/tests/factor_test.rs new file mode 100644 index 0000000000..e5f1c7f29a --- /dev/null +++ b/crates/factor-wasi/tests/factor_test.rs @@ -0,0 +1,30 @@ +use spin_factor_wasi::{DummyFilesMounter, WasiFactor}; +use spin_factors::{anyhow, RuntimeFactors}; +use spin_factors_test::{toml, TestEnvironment}; +use wasmtime_wasi::bindings::cli::environment::Host; + +#[derive(RuntimeFactors)] +struct TestFactors { + wasi: WasiFactor, +} + +#[tokio::test] +async fn environment_works() -> anyhow::Result<()> { + let factors = TestFactors { + wasi: WasiFactor::new(DummyFilesMounter), + }; + let env = TestEnvironment::new(factors).extend_manifest(toml! { + [component.test-component] + source = "does-not-exist.wasm" + environment = { FOO = "bar" } + }); + let mut state = env.build_instance_state().await?; + let mut wasi = WasiFactor::get_wasi_impl(&mut state).unwrap(); + + let val = wasi + .get_environment()? + .into_iter() + .find_map(|(key, val)| (key == "FOO").then_some(val)); + assert_eq!(val.as_deref(), Some("bar")); + Ok(()) +} diff --git a/crates/factors-derive/Cargo.toml b/crates/factors-derive/Cargo.toml new file mode 100644 index 0000000000..387276149b --- /dev/null +++ b/crates/factors-derive/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "spin-factors-derive" +version = { workspace = true } +authors = { workspace = true } +edition = { workspace = true } + +[lib] +proc-macro = true + +[features] +expander = ["dep:expander"] + +[dependencies] +expander = { version = "2.2.1", optional = true } +proc-macro2 = "1.0.79" +quote = "1.0.35" +syn = "2.0.52" + +[lints] +workspace = true diff --git a/crates/factors-derive/src/lib.rs b/crates/factors-derive/src/lib.rs new file mode 100644 index 0000000000..df97924482 --- /dev/null +++ b/crates/factors-derive/src/lib.rs @@ -0,0 +1,282 @@ +use proc_macro2::TokenStream; +use quote::{format_ident, quote}; +use syn::{parse_macro_input, Data, DeriveInput, Error}; + +#[proc_macro_derive(RuntimeFactors)] +pub fn derive_factors(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + let input = parse_macro_input!(input as DeriveInput); + let expanded = expand_factors(&input).unwrap_or_else(|err| err.into_compile_error()); + + #[cfg(feature = "expander")] + let expanded = if let Some(dest_dir) = std::env::var_os("SPIN_FACTORS_DERIVE_EXPAND_DIR") { + expander::Expander::new("factors") + .write_to(expanded, std::path::Path::new(&dest_dir)) + .unwrap() + } else { + expanded + }; + + expanded.into() +} + +#[allow(non_snake_case)] +fn expand_factors(input: &DeriveInput) -> syn::Result { + let name = &input.ident; + let vis = &input.vis; + + let app_state_name = format_ident!("{name}AppState"); + let builders_name = format_ident!("{name}InstanceBuilders"); + let state_name = format_ident!("{name}InstanceState"); + let runtime_config_name = format_ident!("{name}RuntimeConfig"); + + if !input.generics.params.is_empty() { + return Err(Error::new_spanned( + input, + "cannot derive Factors for generic structs", + )); + } + + // Get struct fields + let fields = match &input.data { + Data::Struct(struct_data) => &struct_data.fields, + _ => { + return Err(Error::new_spanned( + input, + "can only derive Factors for structs", + )) + } + }; + let mut factor_names = Vec::with_capacity(fields.len()); + let mut factor_types = Vec::with_capacity(fields.len()); + for field in fields.iter() { + factor_names.push( + field + .ident + .as_ref() + .ok_or_else(|| Error::new_spanned(input, "tuple structs are not supported"))?, + ); + factor_types.push(&field.ty); + } + + let Any = quote!(::std::any::Any); + let Send = quote!(::std::marker::Send); + let TypeId = quote!(::std::any::TypeId); + let factors_crate = format_ident!("spin_factors"); + let factors_path = quote!(::#factors_crate); + let wasmtime = quote!(#factors_path::wasmtime); + let ResourceTable = quote!(#wasmtime::component::ResourceTable); + let Result = quote!(#factors_path::Result); + let Error = quote!(#factors_path::Error); + let Factor = quote!(#factors_path::Factor); + let ConfiguredApp = quote!(#factors_path::ConfiguredApp); + let FactorInstanceBuilder = quote!(#factors_path::FactorInstanceBuilder); + + Ok(quote! { + impl #factors_path::RuntimeFactors for #name { + type AppState = #app_state_name; + type InstanceBuilders = #builders_name; + type InstanceState = #state_name; + type RuntimeConfig = #runtime_config_name; + + fn init + Send + 'static>( + &mut self, + linker: &mut #wasmtime::component::Linker, + ) -> #Result<()> { + let factor_type_ids = [#( + (stringify!(#factor_types), #TypeId::of::<(<#factor_types as #Factor>::InstanceBuilder, <#factor_types as #Factor>::AppState)>()), + )*]; + + let mut unique = ::std::collections::HashSet::new(); + for (name, type_id) in factor_type_ids { + if !unique.insert(type_id) { + return Err(#Error::DuplicateFactorTypes(name.to_owned())); + } + } + + #( + #Factor::init::( + &mut self.#factor_names, + #factors_path::InitContext::::new( + linker, + |data| &mut data.as_instance_state().#factor_names, + |data| { + let state = data.as_instance_state(); + (&mut state.#factor_names, &mut state.__table) + }, + ) + ).map_err(#Error::factor_init_error::<#factor_types>)?; + )* + Ok(()) + } + + fn configure_app( + &self, + app: #factors_path::App, + runtime_config: Self::RuntimeConfig, + ) -> #Result<#ConfiguredApp> { + let mut app_state = #app_state_name { + #( #factor_names: None, )* + }; + #( + app_state.#factor_names = Some( + #Factor::configure_app( + &self.#factor_names, + #factors_path::ConfigureAppContext::::new( + &app, + &app_state, + runtime_config.#factor_names, + )?, + ).map_err(#Error::factor_configure_app_error::<#factor_types>)? + ); + )* + Ok(#ConfiguredApp::new(app, app_state)) + } + + fn prepare( + &self, configured_app: &#ConfiguredApp, + component_id: &str, + ) -> #Result { + let app_component = configured_app.app().get_component(component_id).ok_or_else(|| { + #factors_path::Error::UnknownComponent(component_id.to_string()) + })?; + let mut builders = #builders_name { + #( #factor_names: None, )* + }; + #( + builders.#factor_names = Some( + #Factor::prepare::( + &self.#factor_names, + #factors_path::PrepareContext::new( + configured_app.app_state::<#factor_types>().unwrap(), + &app_component, + ), + &mut #factors_path::InstanceBuilders::new(&mut builders), + ).map_err(#Error::factor_prepare_error::<#factor_types>)? + ); + )* + Ok(builders) + } + + fn build_instance_state( + &self, + builders: Self::InstanceBuilders, + ) -> #Result { + Ok(#state_name { + __table: #ResourceTable::new(), + #( + #factor_names: #FactorInstanceBuilder::build( + builders.#factor_names.unwrap() + ).map_err(#Error::factor_build_error::<#factor_types>)?, + )* + }) + } + + fn app_state(app_state: &Self::AppState) -> Option<&F::AppState> { + #( + if let Some(state) = &app_state.#factor_names { + if let Some(state) = ::downcast_ref(state) { + return Some(state) + } + } + )* + None + } + + fn instance_builder_mut( + builders: &mut Self::InstanceBuilders, + ) -> Option> { + let type_id = #TypeId::of::<(F::InstanceBuilder, F::AppState)>(); + #( + if type_id == #TypeId::of::<(<#factor_types as #Factor>::InstanceBuilder, <#factor_types as #Factor>::AppState)>() { + return Some( + builders.#factor_names.as_mut().map(|builder| { + ::downcast_mut(builder).unwrap() + }) + ); + } + )* + None + } + } + + #vis struct #app_state_name { + #( + pub #factor_names: Option<<#factor_types as #Factor>::AppState>, + )* + } + + #vis struct #builders_name { + #( + #factor_names: Option<<#factor_types as #Factor>::InstanceBuilder>, + )* + } + + #[allow(dead_code)] + impl #builders_name { + #( + pub fn #factor_names(&mut self) -> &mut <#factor_types as #Factor>::InstanceBuilder { + self.#factor_names.as_mut().unwrap() + } + )* + } + + #vis struct #state_name { + __table: #ResourceTable, + #( + pub #factor_names: #factors_path::FactorInstanceState<#factor_types>, + )* + } + + impl #factors_path::RuntimeFactorsInstanceState for #state_name { + fn get_with_table( + &mut self + ) -> ::std::option::Option<(&mut #factors_path::FactorInstanceState, &mut #ResourceTable)> { + #( + if let Some(state) = (&mut self.#factor_names as &mut (dyn #Any + #Send)).downcast_mut() { + return Some((state, &mut self.__table)) + } + )* + None + } + + fn table(&self) -> &#ResourceTable { + &self.__table + } + + fn table_mut(&mut self) -> &mut #ResourceTable { + &mut self.__table + } + } + + impl #factors_path::AsInstanceState<#state_name> for #state_name { + fn as_instance_state(&mut self) -> &mut Self { + self + } + } + + #[derive(Default)] + #vis struct #runtime_config_name { + #( + pub #factor_names: Option<<#factor_types as #Factor>::RuntimeConfig>, + )* + } + + impl #runtime_config_name { + /// Get the runtime configuration from the given source. + #[allow(dead_code)] + pub fn from_source(mut source: T) -> anyhow::Result + where T: #(#factors_path::FactorRuntimeConfigSource<#factor_types> +)* #factors_path::RuntimeConfigSourceFinalizer + { + #( + let #factor_names = >::get_runtime_config(&mut source)?; + )* + source.finalize()?; + Ok(#runtime_config_name { + #( + #factor_names, + )* + }) + } + } + }) +} diff --git a/crates/factors-executor/Cargo.toml b/crates/factors-executor/Cargo.toml new file mode 100644 index 0000000000..14500c6245 --- /dev/null +++ b/crates/factors-executor/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "spin-factors-executor" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +rust-version.workspace = true + +[dependencies] +anyhow = "1" +spin-app = { path = "../app" } +spin-core = { path = "../core" } +spin-factors = { path = "../factors" } + +[dev-dependencies] +spin-factor-wasi = { path = "../factor-wasi" } +spin-factors-test = { path = "../factors-test" } +tokio = { version = "1", features = ["macros", "rt"] } + +[lints] +workspace = true diff --git a/crates/factors-executor/src/lib.rs b/crates/factors-executor/src/lib.rs new file mode 100644 index 0000000000..f204dd1a77 --- /dev/null +++ b/crates/factors-executor/src/lib.rs @@ -0,0 +1,305 @@ +use std::collections::HashMap; + +use anyhow::Context; +use spin_app::{App, AppComponent}; +use spin_core::Component; +use spin_factors::{AsInstanceState, ConfiguredApp, RuntimeFactors, RuntimeFactorsInstanceState}; + +/// A FactorsExecutor manages execution of a Spin app. +/// +/// `Factors` is the executor's [`RuntimeFactors`]. `ExecutorInstanceState` +/// holds any other per-instance state needed by the caller. +pub struct FactorsExecutor { + core_engine: spin_core::Engine>, + factors: T, + hooks: Vec>>, +} + +impl FactorsExecutor { + /// Constructs a new executor. + pub fn new( + mut core_engine_builder: spin_core::EngineBuilder< + InstanceState<::InstanceState, U>, + >, + mut factors: T, + ) -> anyhow::Result { + factors + .init(core_engine_builder.linker()) + .context("failed to initialize factors")?; + Ok(Self { + factors, + core_engine: core_engine_builder.build(), + hooks: Default::default(), + }) + } + + /// Adds the given [`ExecutorHooks`] to this executor. + /// + /// Hooks are run in the order they are added. + pub fn add_hooks(&mut self, hooks: impl ExecutorHooks + 'static) { + self.hooks.push(Box::new(hooks)); + } + + /// Loads a [`FactorsApp`] with this executor. + pub fn load_app( + mut self, + app: App, + runtime_config: T::RuntimeConfig, + mut component_loader: impl ComponentLoader, + ) -> anyhow::Result> { + let configured_app = self + .factors + .configure_app(app, runtime_config) + .context("failed to configure app")?; + + for hooks in &mut self.hooks { + hooks.configure_app(&configured_app)?; + } + + let component_instance_pres = configured_app + .app() + .components() + .map(|app_component| { + let component = + component_loader.load_component(self.core_engine.as_ref(), &app_component)?; + let instance_pre = self.core_engine.instantiate_pre(&component)?; + Ok((app_component.id().to_string(), instance_pre)) + }) + .collect::>>()?; + + Ok(FactorsExecutorApp { + executor: self, + configured_app, + component_instance_pres, + }) + } +} + +pub trait ExecutorHooks: Send + Sync { + /// Configure app hooks run immediately after [`RuntimeFactors::configure_app`]. + fn configure_app(&mut self, configured_app: &ConfiguredApp) -> anyhow::Result<()> { + let _ = configured_app; + Ok(()) + } + + /// Prepare instance hooks run immediately before [`FactorsExecutor::prepare`] returns. + fn prepare_instance(&self, builder: &mut FactorsInstanceBuilder) -> anyhow::Result<()> { + let _ = builder; + Ok(()) + } +} + +/// A ComponentLoader is responsible for loading Wasmtime [`Component`]s. +pub trait ComponentLoader { + /// Loads a [`Component`] for the given [`AppComponent`]. + fn load_component( + &mut self, + engine: &spin_core::wasmtime::Engine, + component: &AppComponent, + ) -> anyhow::Result; +} + +type InstancePre = + spin_core::InstancePre::InstanceState, U>>; + +/// A FactorsExecutorApp represents a loaded Spin app, ready for instantiation. +pub struct FactorsExecutorApp { + executor: FactorsExecutor, + configured_app: ConfiguredApp, + // Maps component IDs -> InstancePres + component_instance_pres: HashMap>, +} + +impl FactorsExecutorApp { + pub fn engine(&self) -> &spin_core::Engine> { + &self.executor.core_engine + } + + pub fn configured_app(&self) -> &ConfiguredApp { + &self.configured_app + } + + pub fn app(&self) -> &App { + self.configured_app.app() + } + + pub fn get_component(&self, component_id: &str) -> anyhow::Result<&Component> { + let instance_pre = self + .component_instance_pres + .get(component_id) + .with_context(|| format!("no such component {component_id:?}"))?; + Ok(instance_pre.component()) + } + + /// Returns an instance builder for the given component ID. + pub fn prepare(&self, component_id: &str) -> anyhow::Result> { + let app_component = self + .configured_app + .app() + .get_component(component_id) + .with_context(|| format!("no such component {component_id:?}"))?; + + let instance_pre = self.component_instance_pres.get(component_id).unwrap(); + + let factor_builders = self + .executor + .factors + .prepare(&self.configured_app, component_id)?; + + let store_builder = self.executor.core_engine.store_builder(); + + let mut builder = FactorsInstanceBuilder { + store_builder, + factor_builders, + instance_pre, + app_component, + factors: &self.executor.factors, + }; + + for hooks in &self.executor.hooks { + hooks.prepare_instance(&mut builder)?; + } + + Ok(builder) + } +} + +/// A FactorsInstanceBuilder manages the instantiation of a Spin component +/// instance. +pub struct FactorsInstanceBuilder<'a, T: RuntimeFactors, U> { + app_component: AppComponent<'a>, + store_builder: spin_core::StoreBuilder, + factor_builders: T::InstanceBuilders, + instance_pre: &'a InstancePre, + factors: &'a T, +} + +impl<'a, T: RuntimeFactors, U> FactorsInstanceBuilder<'a, T, U> { + /// Returns the app component for the instance. + pub fn app_component(&self) -> &AppComponent { + &self.app_component + } + + /// Returns the store builder for the instance. + pub fn store_builder(&mut self) -> &mut spin_core::StoreBuilder { + &mut self.store_builder + } + + /// Returns the factor instance builders for the instance. + pub fn factor_builders(&mut self) -> &mut T::InstanceBuilders { + &mut self.factor_builders + } +} + +impl<'a, T: RuntimeFactors, U: Send> FactorsInstanceBuilder<'a, T, U> { + /// Instantiates the instance with the given executor instance state + pub async fn instantiate( + self, + executor_instance_state: U, + ) -> anyhow::Result<( + spin_core::Instance, + spin_core::Store>, + )> { + let instance_state = InstanceState { + core: Default::default(), + factors: self.factors.build_instance_state(self.factor_builders)?, + executor: executor_instance_state, + }; + let mut store = self.store_builder.build(instance_state)?; + let instance = self.instance_pre.instantiate_async(&mut store).await?; + Ok((instance, store)) + } +} + +/// InstanceState is the [`spin_core::Store`] `data` for an instance. +pub struct InstanceState { + core: spin_core::State, + factors: T, + executor: U, +} + +impl InstanceState { + /// Provides access to the [`spin_core::State`]. + pub fn core_state(&self) -> &spin_core::State { + &self.core + } + + /// Provides access to the [`RuntimeFactors::InstanceState`]. + pub fn factors_instance_state(&mut self) -> &mut T { + &mut self.factors + } + + /// Provides access to the `Self::ExecutorInstanceState`. + pub fn executor_instance_state(&mut self) -> &mut U { + &mut self.executor + } +} + +impl spin_core::AsState for InstanceState { + fn as_state(&mut self) -> &mut spin_core::State { + &mut self.core + } +} + +impl AsInstanceState for InstanceState { + fn as_instance_state(&mut self) -> &mut T { + &mut self.factors + } +} + +#[cfg(test)] +mod tests { + use spin_factor_wasi::{DummyFilesMounter, WasiFactor}; + use spin_factors::RuntimeFactors; + use spin_factors_test::TestEnvironment; + + use super::*; + + #[derive(RuntimeFactors)] + struct TestFactors { + wasi: WasiFactor, + } + + #[tokio::test] + async fn instance_builder_works() -> anyhow::Result<()> { + let factors = TestFactors { + wasi: WasiFactor::new(DummyFilesMounter), + }; + let env = TestEnvironment::new(factors); + let locked = env.build_locked_app().await?; + let app = App::new("test-app", locked); + + let engine_builder = spin_core::Engine::builder(&Default::default())?; + let executor = FactorsExecutor::new(engine_builder, env.factors)?; + + let factors_app = executor.load_app(app, Default::default(), DummyComponentLoader)?; + + let mut instance_builder = factors_app.prepare("empty")?; + + assert_eq!(instance_builder.app_component().id(), "empty"); + + instance_builder.store_builder().max_memory_size(1_000_000); + + instance_builder + .factor_builders() + .wasi + .as_mut() + .unwrap() + .args(["foo"]); + + let (_instance, _store) = instance_builder.instantiate(()).await?; + Ok(()) + } + + struct DummyComponentLoader; + + impl ComponentLoader for DummyComponentLoader { + fn load_component( + &mut self, + engine: &spin_core::wasmtime::Engine, + _component: &AppComponent, + ) -> anyhow::Result { + Component::new(engine, "(component)") + } + } +} diff --git a/crates/factors-test/Cargo.toml b/crates/factors-test/Cargo.toml new file mode 100644 index 0000000000..1516b851a1 --- /dev/null +++ b/crates/factors-test/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "spin-factors-test" +version = { workspace = true } +authors = { workspace = true } +edition = { workspace = true } + +[dependencies] +serde = "1.0" +spin-app = { path = "../app" } +spin-factors = { path = "../factors" } +spin-factors-derive = { path = "../factors-derive", features = ["expander"] } +spin-loader = { path = "../loader" } +tempfile = "3.10.1" +toml = "0.8.14" + +[lints] +workspace = true diff --git a/crates/factors-test/src/lib.rs b/crates/factors-test/src/lib.rs new file mode 100644 index 0000000000..344153fbee --- /dev/null +++ b/crates/factors-test/src/lib.rs @@ -0,0 +1,100 @@ +use spin_app::locked::LockedApp; +use spin_factors::{ + anyhow::{self, Context}, + wasmtime::{component::Linker, Config, Engine}, + App, RuntimeFactors, +}; +use spin_loader::FilesMountStrategy; + +pub use toml::toml; + +/// A test environment for building [`RuntimeFactors`] instances. +pub struct TestEnvironment { + /// The RuntimeFactors under test. + pub factors: T, + /// The `spin.toml` manifest. + pub manifest: toml::Table, + /// Runtime configuration for the factors. + pub runtime_config: T::RuntimeConfig, +} + +impl TestEnvironment { + /// Creates a new test environment by initializing the given + /// [`RuntimeFactors`]. + pub fn new(mut factors: T) -> Self { + let engine = Engine::new(Config::new().async_support(true)) + .expect("wasmtime engine failed to initialize"); + let mut linker = Linker::::new(&engine); + factors + .init(&mut linker) + .expect("RuntimeFactors::init failed"); + + let manifest = toml! { + spin_manifest_version = 2 + + [application] + name = "test-app" + + [[trigger.test-trigger]] + + [component.empty] + source = "does-not-exist.wasm" + }; + Self { + factors, + manifest, + runtime_config: Default::default(), + } + } + + /// Extends the manifest with the given TOML. + /// + /// The default manifest includes boilerplate like the + /// `spin_manifest_version` and `[application]` section, so you typically + /// need to pass only a `[component.test-component]` section. + pub fn extend_manifest(mut self, manifest_merge: toml::Table) -> Self { + self.manifest.extend(manifest_merge); + self + } + + /// Sets the runtime config. + pub fn runtime_config(mut self, runtime_config: C) -> anyhow::Result + where + C: TryInto, + E: Into, + { + self.runtime_config = runtime_config + .try_into() + .map_err(Into::into) + .context("failed to build runtime config")?; + Ok(self) + } + + /// Run through the [`Factor`]s' lifecycle(s) to build a + /// [`RuntimeFactors::InstanceState`] for the last component defined in the + /// manifest. + pub async fn build_instance_state(self) -> anyhow::Result { + let locked_app = self + .build_locked_app() + .await + .context("failed to build locked app")?; + let app = App::new("test-app", locked_app); + let configured_app = self.factors.configure_app(app, self.runtime_config)?; + + let component = + configured_app.app().components().last().context( + "expected configured app to have at least one component, but it did not", + )?; + let builders = self.factors.prepare(&configured_app, component.id())?; + + Ok(self.factors.build_instance_state(builders)?) + } + + pub async fn build_locked_app(&self) -> anyhow::Result { + let toml_str = toml::to_string(&self.manifest).context("failed serializing manifest")?; + let dir = tempfile::tempdir().context("failed creating tempdir")?; + let path = dir.path().join("spin.toml"); + std::fs::write(&path, toml_str).context("failed writing manifest")?; + spin_loader::from_file(&path, FilesMountStrategy::Direct, None).await + } +} diff --git a/crates/factors/Cargo.toml b/crates/factors/Cargo.toml new file mode 100644 index 0000000000..075e9a2118 --- /dev/null +++ b/crates/factors/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "spin-factors" +version = { workspace = true } +authors = { workspace = true } +edition = { workspace = true } + +[dependencies] +anyhow = "1.0" +serde = "1.0" +spin-app = { path = "../app" } +spin-factors-derive = { path = "../factors-derive" } +thiserror = "1.0" +# TODO: make this optional and behind a feature flag +toml = "0.8" +tracing = { workspace = true } +wasmtime = { workspace = true } + +[lints] +workspace = true diff --git a/crates/factors/build.rs b/crates/factors/build.rs new file mode 100644 index 0000000000..c96556b06e --- /dev/null +++ b/crates/factors/build.rs @@ -0,0 +1,6 @@ +fn main() { + println!("cargo:rerun-if-changed=build.rs"); + // Enable spin-factors-derive to emit expanded macro output. + let out_dir = std::env::var("OUT_DIR").unwrap(); + println!("cargo:rustc-env=SPIN_FACTORS_DERIVE_EXPAND_DIR={out_dir}"); +} diff --git a/crates/factors/src/factor.rs b/crates/factors/src/factor.rs new file mode 100644 index 0000000000..06e8a996d6 --- /dev/null +++ b/crates/factors/src/factor.rs @@ -0,0 +1,193 @@ +use std::any::Any; + +use wasmtime::component::{Linker, ResourceTable}; + +use crate::{ + prepare::FactorInstanceBuilder, App, Error, InstanceBuilders, PrepareContext, RuntimeFactors, +}; + +/// A contained (i.e., "factored") piece of runtime functionality. +pub trait Factor: Any + Sized { + /// The particular runtime configuration relevant to this factor. + /// + /// Runtime configuration allows for user-provided customization of the + /// factor's behavior on a per-app basis. + type RuntimeConfig; + + /// The application state of this factor. + /// + /// This state *may* be cached by the runtime across multiple requests. + type AppState; + + /// The builder of instance state for this factor. + type InstanceBuilder: FactorInstanceBuilder; + + /// Initializes this `Factor` for a runtime once at runtime startup. + /// + /// This will be called at most once, before any call to + /// [`Factor::prepare`]. `InitContext` provides access to a wasmtime + /// `Linker`, so this is where any bindgen `add_to_linker` calls go. + /// + /// The type parameter `T` here is the same as the [`wasmtime::Store`] type + /// parameter `T`, which will contain the [`RuntimeFactors::InstanceState`]. + fn init(&mut self, mut ctx: InitContext) -> anyhow::Result<()> { + _ = &mut ctx; + Ok(()) + } + + /// Performs factor-specific validation and configuration for the given + /// [`App`]. + /// + /// `ConfigureAppContext` gives access to: + /// - The `spin_app::App` + /// - This factors's `RuntimeConfig` + /// - The `AppState` for any factors configured before this one + /// + /// A runtime may - but is not required to - reuse the returned config + /// across multiple instances. Because this method may be called + /// per-instantiation, it should avoid any blocking operations that could + /// unnecessarily delay execution. + /// + /// This method may be called without any call to `init` or `prepare` in + /// cases where only validation is needed (e.g., `spin doctor`). + fn configure_app( + &self, + ctx: ConfigureAppContext, + ) -> anyhow::Result; + + /// Creates a new `FactorInstanceBuilder`, which will later build + /// per-instance state for this factor. + /// + /// This method is given access to the app component being instantiated and + /// to any other factors' instance builders that have already been prepared. + /// As such, this is the primary place for inter-factor dependencies to be + /// used. + fn prepare( + &self, + ctx: PrepareContext, + _builders: &mut InstanceBuilders, + ) -> anyhow::Result; +} + +/// The instance state of the given [`Factor`] `F`. +pub type FactorInstanceState = + <::InstanceBuilder as FactorInstanceBuilder>::InstanceState; + +pub(crate) type GetDataFn = fn(&mut T) -> &mut FactorInstanceState; + +pub(crate) type GetDataWithTableFn = + fn(&mut T) -> (&mut FactorInstanceState, &mut ResourceTable); + +/// An InitContext is passed to [`Factor::init`], giving access to the global +/// common [`wasmtime::component::Linker`]. +pub struct InitContext<'a, T, U: Factor> { + pub(crate) linker: &'a mut Linker, + pub(crate) get_data: GetDataFn, + pub(crate) get_data_with_table: GetDataWithTableFn, +} + +impl<'a, T, U: Factor> InitContext<'a, T, U> { + #[doc(hidden)] + pub fn new( + linker: &'a mut Linker, + get_data: GetDataFn, + get_data_with_table: GetDataWithTableFn, + ) -> Self { + Self { + linker, + get_data, + get_data_with_table, + } + } + + /// Returns a mutable reference to the [`wasmtime::component::Linker`]. + pub fn linker(&mut self) -> &mut Linker { + self.linker + } + + /// Returns a function that can be used to get the instance state for this factor. + pub fn get_data_fn(&self) -> GetDataFn { + self.get_data + } + + /// Returns a function that can be used to get the instance state for this + /// factor along with the instance's [`ResourceTable`]. + pub fn get_data_with_table_fn(&self) -> GetDataWithTableFn { + self.get_data_with_table + } + + /// Convenience method to link a binding to the linker. + pub fn link_bindings( + &mut self, + add_to_linker: impl Fn( + &mut Linker, + fn(&mut T) -> &mut FactorInstanceState, + ) -> anyhow::Result<()>, + ) -> anyhow::Result<()> { + add_to_linker(self.linker, self.get_data) + } +} + +pub struct ConfigureAppContext<'a, T: RuntimeFactors, F: Factor> { + app: &'a App, + app_state: &'a T::AppState, + runtime_config: Option, +} + +impl<'a, T: RuntimeFactors, F: Factor> ConfigureAppContext<'a, T, F> { + #[doc(hidden)] + pub fn new( + app: &'a App, + app_state: &'a T::AppState, + runtime_config: Option, + ) -> crate::Result { + Ok(Self { + app, + app_state, + runtime_config, + }) + } + + /// Get the [`App`] being configured. + pub fn app(&self) -> &App { + self.app + } + + /// Get the app state related to the given factor. + pub fn app_state(&self) -> crate::Result<&U::AppState> { + T::app_state::(self.app_state).ok_or(Error::no_such_factor::()) + } + + /// Get a reference to the runtime configuration for the given factor. + pub fn runtime_config(&self) -> Option<&F::RuntimeConfig> { + self.runtime_config.as_ref() + } + + /// Take ownership of the runtime configuration for the given factor. + pub fn take_runtime_config(&mut self) -> Option { + self.runtime_config.take() + } +} + +#[doc(hidden)] +pub struct ConfiguredApp { + app: App, + app_state: T::AppState, +} + +impl ConfiguredApp { + #[doc(hidden)] + pub fn new(app: App, app_state: T::AppState) -> Self { + Self { app, app_state } + } + + /// Get the configured [`App`]. + pub fn app(&self) -> &App { + &self.app + } + + /// Get the configured app's state related to the given factor. + pub fn app_state(&self) -> crate::Result<&U::AppState> { + T::app_state::(&self.app_state).ok_or(Error::no_such_factor::()) + } +} diff --git a/crates/factors/src/lib.rs b/crates/factors/src/lib.rs new file mode 100644 index 0000000000..d73435f215 --- /dev/null +++ b/crates/factors/src/lib.rs @@ -0,0 +1,98 @@ +mod factor; +mod prepare; +pub mod runtime_config; +mod runtime_factors; + +pub use anyhow; +pub use serde; +pub use wasmtime; + +pub use spin_app::{App, AppComponent}; +pub use spin_factors_derive::RuntimeFactors; + +pub use crate::{ + factor::{ConfigureAppContext, ConfiguredApp, Factor, FactorInstanceState, InitContext}, + prepare::{FactorInstanceBuilder, InstanceBuilders, PrepareContext, SelfInstanceBuilder}, + runtime_config::{FactorRuntimeConfigSource, RuntimeConfigSourceFinalizer}, + runtime_factors::{AsInstanceState, RuntimeFactors, RuntimeFactorsInstanceState}, +}; + +/// Result wrapper type defaulting to use [`Error`]. +pub type Result = std::result::Result; + +#[derive(Debug, thiserror::Error)] +pub enum Error { + #[error("two or more factors share the same type: {0}")] + DuplicateFactorTypes(String), + #[error("factor dependency ordering error: {0}")] + DependencyOrderingError(String), + #[error("{factor}::InstanceBuilder::build failed: {source}")] + FactorBuildError { + factor: &'static str, + source: anyhow::Error, + }, + #[error("{factor}::configure_app failed: {source}")] + FactorConfigureAppError { + factor: &'static str, + source: anyhow::Error, + }, + #[error("{factor}::init failed: {source}")] + FactorInitError { + factor: &'static str, + source: anyhow::Error, + }, + #[error("{factor}::prepare failed: {source}")] + FactorPrepareError { + factor: &'static str, + source: anyhow::Error, + }, + #[error("no such factor: {0}")] + NoSuchFactor(&'static str), + #[error("{factor} requested already-consumed key {key:?}")] + RuntimeConfigReusedKey { factor: &'static str, key: String }, + #[error("runtime config error: {0}")] + RuntimeConfigSource(#[source] anyhow::Error), + #[error("unused runtime config key(s): {}", keys.join(", "))] + RuntimeConfigUnusedKeys { keys: Vec }, + #[error("unknown component: {0}")] + UnknownComponent(String), +} + +impl Error { + fn no_such_factor() -> Self { + Self::NoSuchFactor(std::any::type_name::()) + } + + pub fn runtime_config_reused_key(key: impl Into) -> Self { + Self::RuntimeConfigReusedKey { + factor: std::any::type_name::(), + key: key.into(), + } + } + + // These helpers are used by factors-derive + + #[doc(hidden)] + pub fn factor_init_error(source: anyhow::Error) -> Self { + let factor = std::any::type_name::(); + Self::FactorInitError { factor, source } + } + + #[doc(hidden)] + pub fn factor_configure_app_error(source: anyhow::Error) -> Self { + let factor = std::any::type_name::(); + Self::FactorConfigureAppError { factor, source } + } + + #[doc(hidden)] + pub fn factor_prepare_error(source: anyhow::Error) -> Self { + let factor = std::any::type_name::(); + Self::FactorPrepareError { factor, source } + } + + #[doc(hidden)] + pub fn factor_build_error(source: anyhow::Error) -> Self { + let factor = std::any::type_name::(); + Self::FactorBuildError { factor, source } + } +} diff --git a/crates/factors/src/prepare.rs b/crates/factors/src/prepare.rs new file mode 100644 index 0000000000..318bb15b0e --- /dev/null +++ b/crates/factors/src/prepare.rs @@ -0,0 +1,95 @@ +use std::any::Any; + +use spin_app::AppComponent; + +use crate::{Error, Factor, RuntimeFactors}; + +/// A builder for a [`Factor`]'s per instance state. +pub trait FactorInstanceBuilder: Any { + /// The per instance state of the factor. + /// + /// This is equivalent to the existing `HostComponent::Data` and ends up + /// being stored in the `wasmtime::Store`. Any `bindgen` traits for this + /// factor will be implemented on this type. + type InstanceState: Send + 'static; + + /// Build the per instance state of the factor. + fn build(self) -> anyhow::Result; +} + +impl FactorInstanceBuilder for () { + type InstanceState = (); + + fn build(self) -> anyhow::Result { + Ok(()) + } +} + +/// A helper trait for when the type implementing [`FactorInstanceBuilder`] is also the instance state. +pub trait SelfInstanceBuilder: Send + 'static {} + +impl FactorInstanceBuilder for T { + type InstanceState = Self; + + fn build(self) -> anyhow::Result { + Ok(self) + } +} + +/// A PrepareContext is passed to [`Factor::prepare`]. +/// +/// This gives the factor access to app state and the app component. +pub struct PrepareContext<'a, F: Factor> { + pub(crate) app_state: &'a F::AppState, + pub(crate) app_component: &'a AppComponent<'a>, +} + +impl<'a, F: Factor> PrepareContext<'a, F> { + #[doc(hidden)] + pub fn new(app_state: &'a F::AppState, app_component: &'a AppComponent) -> Self { + Self { + app_state, + app_component, + } + } + + /// Get the app state related to the factor. + pub fn app_state(&self) -> &F::AppState { + self.app_state + } + + /// Get the app component. + pub fn app_component(&self) -> &AppComponent { + self.app_component + } +} + +/// The collection of all the already prepared `InstanceBuilder`s. +/// +/// Use `InstanceBuilders::get_mut` to get a mutable reference to a specific factor's instance builder. +pub struct InstanceBuilders<'a, T: RuntimeFactors> { + pub(crate) inner: &'a mut T::InstanceBuilders, +} + +impl<'a, T: RuntimeFactors> InstanceBuilders<'a, T> { + #[doc(hidden)] + pub fn new(inner: &'a mut T::InstanceBuilders) -> Self { + Self { inner } + } + + /// Returns the prepared [`FactorInstanceBuilder`] for the given [`Factor`]. + /// + /// Fails if the current [`RuntimeFactors`] does not include the given + /// [`Factor`] or if the given [`Factor`]'s builder has not been prepared + /// yet (because it is sequenced after this factor). + pub fn get_mut(&mut self) -> crate::Result<&mut U::InstanceBuilder> { + T::instance_builder_mut::(self.inner) + .ok_or(Error::no_such_factor::())? + .ok_or_else(|| { + Error::DependencyOrderingError(format!( + "{factor} builder requested before it was prepared", + factor = std::any::type_name::() + )) + }) + } +} diff --git a/crates/factors/src/runtime_config.rs b/crates/factors/src/runtime_config.rs new file mode 100644 index 0000000000..a11fc4583e --- /dev/null +++ b/crates/factors/src/runtime_config.rs @@ -0,0 +1,27 @@ +pub mod toml; + +use crate::Factor; + +/// The source of runtime configuration for a particular [`Factor`]. +pub trait FactorRuntimeConfigSource { + /// Get the runtime configuration for the factor. + fn get_runtime_config(&mut self) -> anyhow::Result>; +} + +impl FactorRuntimeConfigSource for () { + fn get_runtime_config(&mut self) -> anyhow::Result::RuntimeConfig>> { + Ok(None) + } +} + +/// Run some finalization logic on a [`RuntimeConfigSource`]. +pub trait RuntimeConfigSourceFinalizer { + /// Finalize the runtime config source. + fn finalize(&mut self) -> anyhow::Result<()>; +} + +impl RuntimeConfigSourceFinalizer for () { + fn finalize(&mut self) -> anyhow::Result<()> { + Ok(()) + } +} diff --git a/crates/factors/src/runtime_config/toml.rs b/crates/factors/src/runtime_config/toml.rs new file mode 100644 index 0000000000..23e3ef1e24 --- /dev/null +++ b/crates/factors/src/runtime_config/toml.rs @@ -0,0 +1,57 @@ +//! Helpers for reading runtime configuration from a TOML file. + +use std::{cell::RefCell, collections::HashSet}; + +/// A trait for getting a TOML value by key. +pub trait GetTomlValue { + fn get(&self, key: &str) -> Option<&toml::Value>; +} + +impl GetTomlValue for toml::Table { + fn get(&self, key: &str) -> Option<&toml::Value> { + self.get(key) + } +} + +#[derive(Debug, Clone)] +/// A helper for tracking which keys have been used in a TOML table. +pub struct TomlKeyTracker<'a> { + unused_keys: RefCell>, + table: &'a toml::Table, +} + +impl<'a> TomlKeyTracker<'a> { + pub fn new(table: &'a toml::Table) -> Self { + Self { + unused_keys: RefCell::new(table.keys().map(String::as_str).collect()), + table, + } + } + + pub fn validate_all_keys_used(&self) -> crate::Result<()> { + if !self.unused_keys.borrow().is_empty() { + return Err(crate::Error::RuntimeConfigUnusedKeys { + keys: self + .unused_keys + .borrow() + .iter() + .map(|s| (*s).to_owned()) + .collect(), + }); + } + Ok(()) + } +} + +impl GetTomlValue for TomlKeyTracker<'_> { + fn get(&self, key: &str) -> Option<&toml::Value> { + self.unused_keys.borrow_mut().remove(key); + self.table.get(key) + } +} + +impl AsRef for TomlKeyTracker<'_> { + fn as_ref(&self) -> &toml::Table { + self.table + } +} diff --git a/crates/factors/src/runtime_factors.rs b/crates/factors/src/runtime_factors.rs new file mode 100644 index 0000000000..e18a66c171 --- /dev/null +++ b/crates/factors/src/runtime_factors.rs @@ -0,0 +1,103 @@ +use wasmtime::component::{Linker, ResourceTable}; + +use crate::{factor::FactorInstanceState, App, ConfiguredApp, Factor}; + +/// A collection of `Factor`s that are initialized and configured together. +/// +/// Implemented by `#[derive(RuntimeFactors)]` and should not be implemented manually. +/// +/// # Example +/// +/// A typical usage of `RuntimeFactors` would look something like the following pseudo-code: +/// +/// ```ignore +/// #[derive(RuntimeFactors)] +/// struct MyFactors { +/// // ... +/// } +/// // Initialize the factors collection +/// let factors = MyFactors { /* .. */ }; +/// // Initialize each factor with a linker +/// factors.init(&mut linker)?; +/// // Configure the factors with an app and runtime config +/// let configured_app = factors.configure_app(app, runtime_config)?; +/// // Prepare instance state builders +/// let builders = factors.prepare(&configured_app, "component-id")?; +/// // Build the instance state for the factors +/// let data = factors.build_instance_state(builders)?; +/// // Initialize a `wasmtime` store with the instance state +/// let mut store = wasmtime::Store::new(&engine, data); +/// // Instantiate the component +/// let instance = linker.instantiate_async(&mut store, &component).await?; +/// ``` +pub trait RuntimeFactors: Sized + 'static { + /// The per application state of all the factors. + type AppState; + /// The per instance state of the factors. + type InstanceState: RuntimeFactorsInstanceState; + /// The collection of all the `InstanceBuilder`s of the factors. + type InstanceBuilders; + /// The runtime configuration of all the factors. + type RuntimeConfig: Default; + + /// Initialize the factors with the given linker. + /// + /// Each factor's `init` is called in turn. Must be called once before + /// [`RuntimeFactors::prepare`]. + fn init + Send + 'static>( + &mut self, + linker: &mut Linker, + ) -> crate::Result<()>; + + /// Configure the factors with the given app and runtime config. + fn configure_app( + &self, + app: App, + runtime_config: Self::RuntimeConfig, + ) -> crate::Result>; + + /// Prepare the factors' instance state builders. + fn prepare( + &self, + configured_app: &ConfiguredApp, + component_id: &str, + ) -> crate::Result; + + /// Build the instance state for the factors. + fn build_instance_state( + &self, + builders: Self::InstanceBuilders, + ) -> crate::Result; + + /// Get the app state related to a particular factor. + fn app_state(app_state: &Self::AppState) -> Option<&F::AppState>; + + /// Get the instance builder of a particular factor. + /// + /// The outer `Option` is `None` if the factor has not been registered with this `Factors` collection, + /// and the inner `Option` is `None` if the factor has not been prepared yet. + fn instance_builder_mut( + builders: &mut Self::InstanceBuilders, + ) -> Option>; +} + +/// Get the state of a particular Factor from the overall InstanceState +/// +/// Implemented by `#[derive(RuntimeFactors)]` +pub trait RuntimeFactorsInstanceState: AsInstanceState + Send + 'static { + fn get_with_table( + &mut self, + ) -> Option<(&mut FactorInstanceState, &mut ResourceTable)>; + + fn get(&mut self) -> Option<&mut FactorInstanceState> { + self.get_with_table::().map(|(state, _)| state) + } + + fn table(&self) -> &ResourceTable; + + fn table_mut(&mut self) -> &mut ResourceTable; +} + +pub trait AsInstanceState { + fn as_instance_state(&mut self) -> &mut T; +} diff --git a/crates/http/src/config.rs b/crates/http/src/config.rs index 2c675f1ca2..5ccee8e577 100644 --- a/crates/http/src/config.rs +++ b/crates/http/src/config.rs @@ -63,7 +63,8 @@ pub enum HttpExecutorType { #[derive(Clone, Debug, Deserialize, Serialize)] #[serde(default, deny_unknown_fields)] pub struct WagiTriggerConfig { - /// The name of the entrypoint. + /// The name of the entrypoint. (DEPRECATED) + #[serde(skip_serializing)] pub entrypoint: String, /// A string representation of the argv array. diff --git a/crates/http/src/trigger.rs b/crates/http/src/trigger.rs index ca53b33544..37a030ed0d 100644 --- a/crates/http/src/trigger.rs +++ b/crates/http/src/trigger.rs @@ -1,8 +1,4 @@ use serde::{Deserialize, Serialize}; -use spin_locked_app::MetadataKey; - -/// Http trigger metadata key -pub const METADATA_KEY: MetadataKey = MetadataKey::new("trigger"); #[derive(Clone, Debug, Default, Deserialize, Serialize)] #[serde(deny_unknown_fields)] diff --git a/crates/key-value-sqlite/src/lib.rs b/crates/key-value-sqlite/src/lib.rs index d84887ab7e..152da5ba38 100644 --- a/crates/key-value-sqlite/src/lib.rs +++ b/crates/key-value-sqlite/src/lib.rs @@ -10,6 +10,7 @@ use std::{ use tokio::task; use tracing::{instrument, Level}; +#[derive(Clone, Debug)] pub enum DatabaseLocation { InMemory, Path(PathBuf), @@ -159,10 +160,13 @@ mod test { .into_iter() .map(ToOwned::to_owned) .collect(), - Arc::new(DelegatingStoreManager::new([( - "default".to_owned(), - Arc::new(KeyValueSqlite::new(DatabaseLocation::InMemory)) as _, - )])), + Arc::new(DelegatingStoreManager::new( + [( + "default".to_owned(), + Arc::new(KeyValueSqlite::new(DatabaseLocation::InMemory)) as _, + )], + Arc::new(|_: &str| -> Option> { None }), + )), ); assert!(matches!( diff --git a/crates/key-value/Cargo.toml b/crates/key-value/Cargo.toml index fa8a440731..e5b53aec5a 100644 --- a/crates/key-value/Cargo.toml +++ b/crates/key-value/Cargo.toml @@ -9,7 +9,7 @@ doctest = false [dependencies] anyhow = "1.0" -tokio = { version = "1", features = ["macros", "sync"] } +tokio = { version = "1", features = ["macros", "sync", "rt"] } spin-app = { path = "../app" } spin-core = { path = "../core" } spin-world = { path = "../world" } diff --git a/crates/key-value/src/lib.rs b/crates/key-value/src/lib.rs index dc84fe7978..c9d2b6354d 100644 --- a/crates/key-value/src/lib.rs +++ b/crates/key-value/src/lib.rs @@ -5,11 +5,13 @@ use spin_world::v2::key_value; use std::{collections::HashSet, sync::Arc}; use table::Table; -mod host_component; +// TODO(factors): Code left for reference; remove after migration to factors +// mod host_component; mod util; -pub use host_component::{manager, KeyValueComponent}; -pub use util::{CachingStoreManager, DelegatingStoreManager, EmptyStoreManager}; +pub use util::{ + CachingStoreManager, DefaultManagerGetter, DelegatingStoreManager, EmptyStoreManager, +}; pub const KEY_VALUE_STORES_KEY: MetadataKey> = MetadataKey::new("key_value_stores"); @@ -59,6 +61,10 @@ impl KeyValueDispatch { pub fn get_store(&self, store: Resource) -> anyhow::Result<&Arc> { self.stores.get(store.rep()).context("invalid store") } + + pub fn allowed_stores(&self) -> &HashSet { + &self.allowed_stores + } } impl Default for KeyValueDispatch { diff --git a/crates/key-value/src/util.rs b/crates/key-value/src/util.rs index f689e0f215..c2aef7e4d3 100644 --- a/crates/key-value/src/util.rs +++ b/crates/key-value/src/util.rs @@ -28,25 +28,39 @@ impl StoreManager for EmptyStoreManager { } } +/// A function that takes a store label and returns the default store manager, if one exists. +pub type DefaultManagerGetter = Arc Option> + Send + Sync>; + pub struct DelegatingStoreManager { delegates: HashMap>, + default_manager: DefaultManagerGetter, } impl DelegatingStoreManager { - pub fn new(delegates: impl IntoIterator)>) -> Self { + pub fn new( + delegates: impl IntoIterator)>, + default_manager: DefaultManagerGetter, + ) -> Self { let delegates = delegates.into_iter().collect(); - Self { delegates } + Self { + delegates, + default_manager, + } } } #[async_trait] impl StoreManager for DelegatingStoreManager { async fn get(&self, name: &str) -> Result, Error> { - self.delegates - .get(name) - .ok_or(Error::NoSuchStore)? - .get(name) - .await + match self.delegates.get(name) { + Some(store) => store.get(name).await, + None => { + (self.default_manager)(name) + .ok_or(Error::NoSuchStore)? + .get(name) + .await + } + } } fn is_defined(&self, store_name: &str) -> bool { diff --git a/crates/llm-local/Cargo.toml b/crates/llm-local/Cargo.toml index b0d4ea3972..5b73316423 100644 --- a/crates/llm-local/Cargo.toml +++ b/crates/llm-local/Cargo.toml @@ -20,7 +20,6 @@ safetensors = "0.3.3" serde = { version = "1.0.150", features = ["derive"] } spin-common = { path = "../common" } spin-core = { path = "../core" } -spin-llm = { path = "../llm" } spin-world = { path = "../world" } terminal = { path = "../terminal" } tokenizers = "0.13.4" diff --git a/crates/llm-local/src/lib.rs b/crates/llm-local/src/lib.rs index f5d00c7a1b..cf0b9f9924 100644 --- a/crates/llm-local/src/lib.rs +++ b/crates/llm-local/src/lib.rs @@ -10,8 +10,6 @@ use llm::{ }; use rand::SeedableRng; use spin_common::ui::quoted_path; -use spin_core::async_trait; -use spin_llm::{LlmEngine, MODEL_ALL_MINILM_L6_V2}; use spin_world::v2::llm::{self as wasi_llm}; use std::{ collections::hash_map::Entry, @@ -23,6 +21,8 @@ use std::{ use tokenizers::PaddingParams; use tracing::{instrument, Level}; +const MODEL_ALL_MINILM_L6_V2: &str = "all-minilm-l6-v2"; + #[derive(Clone)] pub struct LocalLlmEngine { registry: PathBuf, @@ -31,10 +31,9 @@ pub struct LocalLlmEngine { embeddings_models: HashMap>, } -#[async_trait] -impl LlmEngine for LocalLlmEngine { +impl LocalLlmEngine { #[instrument(name = "spin_llm_local.infer", skip(self, prompt), err(level = Level::INFO))] - async fn infer( + pub async fn infer( &mut self, model: wasi_llm::InferencingModel, prompt: String, @@ -94,7 +93,7 @@ impl LlmEngine for LocalLlmEngine { } #[instrument(name = "spin_llm_local.generate_embeddings", skip(self, data), err(level = Level::INFO))] - async fn generate_embeddings( + pub async fn generate_embeddings( &mut self, model: wasi_llm::EmbeddingModel, data: Vec, @@ -107,18 +106,13 @@ impl LlmEngine for LocalLlmEngine { } impl LocalLlmEngine { - pub async fn new(registry: PathBuf, use_gpu: bool) -> Self { - let mut engine = Self { + pub fn new(registry: PathBuf, use_gpu: bool) -> Self { + Self { registry, use_gpu, inferencing_models: Default::default(), embeddings_models: Default::default(), - }; - - let _ = engine.inferencing_model("llama2-chat".into()).await; - let _ = engine.embeddings_model(MODEL_ALL_MINILM_L6_V2.into()).await; - - engine + } } /// Get embeddings model from cache or load from disk diff --git a/crates/llm-remote-http/Cargo.toml b/crates/llm-remote-http/Cargo.toml index 3a9bb8e12b..af05459e56 100644 --- a/crates/llm-remote-http/Cargo.toml +++ b/crates/llm-remote-http/Cargo.toml @@ -7,11 +7,8 @@ edition = { workspace = true } [dependencies] anyhow = "1.0" http = "0.2" -llm = { git = "https://github.com/rustformers/llm", rev = "2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663", default-features = false } serde = { version = "1.0.150", features = ["derive"] } serde_json = "1.0" -spin-core = { path = "../core" } -spin-llm = { path = "../llm" } spin-telemetry = { path = "../telemetry" } spin-world = { path = "../world" } reqwest = { version = "0.11", features = ["gzip", "json"] } diff --git a/crates/llm-remote-http/src/lib.rs b/crates/llm-remote-http/src/lib.rs index 4987b14b08..4a00395392 100644 --- a/crates/llm-remote-http/src/lib.rs +++ b/crates/llm-remote-http/src/lib.rs @@ -5,8 +5,6 @@ use reqwest::{ }; use serde::{Deserialize, Serialize}; use serde_json::json; -use spin_core::async_trait; -use spin_llm::LlmEngine; use spin_world::v2::llm::{self as wasi_llm}; use tracing::{instrument, Level}; @@ -53,10 +51,9 @@ struct EmbeddingResponseBody { usage: EmbeddingUsage, } -#[async_trait] -impl LlmEngine for RemoteHttpLlmEngine { +impl RemoteHttpLlmEngine { #[instrument(name = "spin_llm_remote_http.infer", skip(self, prompt), err(level = Level::INFO), fields(otel.kind = "client"))] - async fn infer( + pub async fn infer( &mut self, model: wasi_llm::InferencingModel, prompt: String, @@ -119,7 +116,7 @@ impl LlmEngine for RemoteHttpLlmEngine { } #[instrument(name = "spin_llm_remote_http.generate_embeddings", skip(self, data), err(level = Level::INFO), fields(otel.kind = "client"))] - async fn generate_embeddings( + pub async fn generate_embeddings( &mut self, model: wasi_llm::EmbeddingModel, data: Vec, diff --git a/crates/llm/Cargo.toml b/crates/llm/Cargo.toml deleted file mode 100644 index 08c193e0b6..0000000000 --- a/crates/llm/Cargo.toml +++ /dev/null @@ -1,16 +0,0 @@ -[package] -name = "spin-llm" -version = { workspace = true } -authors = { workspace = true } -edition = { workspace = true } - -[dependencies] -anyhow = "1.0" -bytesize = "1.1" -llm = { git = "https://github.com/rustformers/llm", rev = "2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663", features = [ - "tokenizers-remote", - "models", -], default-features = false } -spin-app = { path = "../app" } -spin-core = { path = "../core" } -spin-world = { path = "../world" } diff --git a/crates/llm/src/host_component.rs b/crates/llm/src/host_component.rs deleted file mode 100644 index 8574e6bb0e..0000000000 --- a/crates/llm/src/host_component.rs +++ /dev/null @@ -1,49 +0,0 @@ -use spin_app::DynamicHostComponent; -use spin_core::HostComponent; - -use crate::{LlmDispatch, LlmEngine, AI_MODELS_KEY}; - -pub struct LlmComponent { - create_engine: Box Box + Send + Sync>, -} - -impl LlmComponent { - pub fn new(create_engine: F) -> Self - where - F: Fn() -> Box + Send + Sync + 'static, - { - Self { - create_engine: Box::new(create_engine), - } - } -} - -impl HostComponent for LlmComponent { - type Data = LlmDispatch; - - fn add_to_linker( - linker: &mut spin_core::Linker, - get: impl Fn(&mut spin_core::Data) -> &mut Self::Data + Send + Sync + Copy + 'static, - ) -> anyhow::Result<()> { - spin_world::v1::llm::add_to_linker(linker, get)?; - spin_world::v2::llm::add_to_linker(linker, get) - } - - fn build_data(&self) -> Self::Data { - LlmDispatch { - engine: (self.create_engine)(), - allowed_models: Default::default(), - } - } -} - -impl DynamicHostComponent for LlmComponent { - fn update_data( - &self, - data: &mut Self::Data, - component: &spin_app::AppComponent, - ) -> anyhow::Result<()> { - data.allowed_models = component.get_metadata(AI_MODELS_KEY)?.unwrap_or_default(); - Ok(()) - } -} diff --git a/crates/loader/Cargo.toml b/crates/loader/Cargo.toml index 9173ec0eb9..74680ca8d7 100644 --- a/crates/loader/Cargo.toml +++ b/crates/loader/Cargo.toml @@ -16,7 +16,6 @@ indexmap = { version = "1" } itertools = "0.10.3" lazy_static = "1.4.0" mime_guess = { version = "2.0" } -outbound-http = { path = "../outbound-http", default-features = false } spin-outbound-networking = { path = "../outbound-networking" } path-absolutize = { version = "3.0.11", features = ["use_unix_paths_on_wasm"] } regex = "1.5.4" diff --git a/crates/locked-app/Cargo.toml b/crates/locked-app/Cargo.toml index 44b25fee5f..d0531c52a6 100644 --- a/crates/locked-app/Cargo.toml +++ b/crates/locked-app/Cargo.toml @@ -7,7 +7,6 @@ edition = { workspace = true } [dependencies] anyhow = "1.0" async-trait = "0.1" -ouroboros = "0.18.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" spin-serde = { path = "../serde" } diff --git a/crates/oci/Cargo.toml b/crates/oci/Cargo.toml index 54e3e03e10..92ac7ae6e3 100644 --- a/crates/oci/Cargo.toml +++ b/crates/oci/Cargo.toml @@ -6,7 +6,7 @@ edition = { workspace = true } [dependencies] anyhow = "1.0" -async-compression = "0.4.3" +async-compression = { version = "0.4.3", features = ["gzip", "tokio"] } # Fork with nested async-std dependency bumped to satisfy Windows build; branch/revision is protected async-tar = { git = "https://github.com/vdice/async-tar", rev = "71e037f9652971e7a55b412a8e47a37b06f9c29d" } base64 = "0.21" @@ -31,6 +31,3 @@ tokio = { version = "1", features = ["fs"] } tokio-util = { version = "0.7.9", features = ["compat"] } tracing = { workspace = true } walkdir = "2.3" - -[dev-dependencies] -spin-testing = { path = "../testing" } diff --git a/crates/oci/src/client.rs b/crates/oci/src/client.rs index 92015b9458..218b4cb666 100644 --- a/crates/oci/src/client.rs +++ b/crates/oci/src/client.rs @@ -824,252 +824,252 @@ mod test { } } - #[tokio::test] - async fn can_assemble_layers() { - use spin_locked_app::locked::LockedComponent; - use tokio::io::AsyncWriteExt; - - let working_dir = tempfile::tempdir().unwrap(); - - // Set up component/file directory tree - // - // create component1 and component2 dirs - let _ = tokio::fs::create_dir(working_dir.path().join("component1").as_path()).await; - let _ = tokio::fs::create_dir(working_dir.path().join("component2").as_path()).await; - - // create component "wasm" files - let mut c1 = tokio::fs::File::create(working_dir.path().join("component1.wasm")) - .await - .expect("should create component wasm file"); - c1.write_all(b"c1") - .await - .expect("should write component wasm contents"); - let mut c2 = tokio::fs::File::create(working_dir.path().join("component2.wasm")) - .await - .expect("should create component wasm file"); - c2.write_all(b"c2") - .await - .expect("should write component wasm contents"); - - // component1 files - let mut c1f1 = tokio::fs::File::create(working_dir.path().join("component1").join("bar")) - .await - .expect("should create component file"); - c1f1.write_all(b"bar") - .await - .expect("should write file contents"); - let mut c1f2 = tokio::fs::File::create(working_dir.path().join("component1").join("baz")) - .await - .expect("should create component file"); - c1f2.write_all(b"baz") - .await - .expect("should write file contents"); - - // component2 files - let mut c2f1 = tokio::fs::File::create(working_dir.path().join("component2").join("baz")) - .await - .expect("should create component file"); - c2f1.write_all(b"baz") - .await - .expect("should write file contents"); - - #[derive(Clone)] - struct TestCase { - name: &'static str, - opts: Option, - locked_components: Vec, - expected_layer_count: usize, - expected_error: Option<&'static str>, - } - - let tests: Vec = [ - TestCase { - name: "Two component layers", - opts: None, - locked_components: spin_testing::from_json!([{ - "id": "component1", - "source": { - "content_type": "application/wasm", - "source": format!("file://{}", working_dir.path().join("component1.wasm").to_str().unwrap()), - "digest": "digest", - }}, - { - "id": "component2", - "source": { - "content_type": "application/wasm", - "source": format!("file://{}", working_dir.path().join("component2.wasm").to_str().unwrap()), - "digest": "digest", - }}]), - expected_layer_count: 2, - expected_error: None, - }, - TestCase { - name: "One component layer and two file layers", - opts: Some(ClientOpts{content_ref_inline_max_size: 0}), - locked_components: spin_testing::from_json!([{ - "id": "component1", - "source": { - "content_type": "application/wasm", - "source": format!("file://{}", working_dir.path().join("component1.wasm").to_str().unwrap()), - "digest": "digest", - }, - "files": [ - { - "source": format!("file://{}", working_dir.path().join("component1").to_str().unwrap()), - "path": working_dir.path().join("component1").join("bar").to_str().unwrap() - }, - { - "source": format!("file://{}", working_dir.path().join("component1").to_str().unwrap()), - "path": working_dir.path().join("component1").join("baz").to_str().unwrap() - } - ] - }]), - expected_layer_count: 3, - expected_error: None, - }, - TestCase { - name: "One component layer and one file with inlined content", - opts: None, - locked_components: spin_testing::from_json!([{ - "id": "component1", - "source": { - "content_type": "application/wasm", - "source": format!("file://{}", working_dir.path().join("component1.wasm").to_str().unwrap()), - "digest": "digest", - }, - "files": [ - { - "source": format!("file://{}", working_dir.path().join("component1").to_str().unwrap()), - "path": working_dir.path().join("component1").join("bar").to_str().unwrap() - } - ] - }]), - expected_layer_count: 1, - expected_error: None, - }, - TestCase { - name: "Component has no source", - opts: None, - locked_components: spin_testing::from_json!([{ - "id": "component1", - "source": { - "content_type": "application/wasm", - "source": "", - "digest": "digest", - } - }]), - expected_layer_count: 0, - expected_error: Some("Invalid URL: \"\""), - }, - TestCase { - name: "Duplicate component sources", - opts: None, - locked_components: spin_testing::from_json!([{ - "id": "component1", - "source": { - "content_type": "application/wasm", - "source": format!("file://{}", working_dir.path().join("component1.wasm").to_str().unwrap()), - "digest": "digest", - }}, - { - "id": "component2", - "source": { - "content_type": "application/wasm", - "source": format!("file://{}", working_dir.path().join("component1.wasm").to_str().unwrap()), - "digest": "digest", - }}]), - expected_layer_count: 1, - expected_error: None, - }, - TestCase { - name: "Duplicate file paths", - opts: Some(ClientOpts{content_ref_inline_max_size: 0}), - locked_components: spin_testing::from_json!([{ - "id": "component1", - "source": { - "content_type": "application/wasm", - "source": format!("file://{}", working_dir.path().join("component1.wasm").to_str().unwrap()), - "digest": "digest", - }, - "files": [ - { - "source": format!("file://{}", working_dir.path().join("component1").to_str().unwrap()), - "path": working_dir.path().join("component1").join("bar").to_str().unwrap() - }, - { - "source": format!("file://{}", working_dir.path().join("component1").to_str().unwrap()), - "path": working_dir.path().join("component1").join("baz").to_str().unwrap() - } - ]}, - { - "id": "component2", - "source": { - "content_type": "application/wasm", - "source": format!("file://{}", working_dir.path().join("component2.wasm").to_str().unwrap()), - "digest": "digest", - }, - "files": [ - { - "source": format!("file://{}", working_dir.path().join("component2").to_str().unwrap()), - "path": working_dir.path().join("component2").join("baz").to_str().unwrap() - } - ] - }]), - expected_layer_count: 4, - expected_error: None, - }, - ] - .to_vec(); - - for tc in tests { - let triggers = Default::default(); - let metadata = Default::default(); - let variables = Default::default(); - let mut locked = LockedApp { - spin_lock_version: Default::default(), - components: tc.locked_components, - triggers, - metadata, - variables, - must_understand: Default::default(), - host_requirements: Default::default(), - }; - - let mut client = Client::new(false, Some(working_dir.path().to_path_buf())) - .await - .expect("should create new client"); - if let Some(o) = tc.opts { - client.opts = o; - } - - match tc.expected_error { - Some(e) => { - assert_eq!( - e, - client - .assemble_layers(&mut locked, AssemblyMode::Simple) - .await - .unwrap_err() - .to_string(), - "{}", - tc.name - ) - } - None => { - assert_eq!( - tc.expected_layer_count, - client - .assemble_layers(&mut locked, AssemblyMode::Simple) - .await - .unwrap() - .len(), - "{}", - tc.name - ) - } - } - } - } + // #[tokio::test] + // async fn can_assemble_layers() { + // use spin_locked_app::locked::LockedComponent; + // use tokio::io::AsyncWriteExt; + + // let working_dir = tempfile::tempdir().unwrap(); + + // // Set up component/file directory tree + // // + // // create component1 and component2 dirs + // let _ = tokio::fs::create_dir(working_dir.path().join("component1").as_path()).await; + // let _ = tokio::fs::create_dir(working_dir.path().join("component2").as_path()).await; + + // // create component "wasm" files + // let mut c1 = tokio::fs::File::create(working_dir.path().join("component1.wasm")) + // .await + // .expect("should create component wasm file"); + // c1.write_all(b"c1") + // .await + // .expect("should write component wasm contents"); + // let mut c2 = tokio::fs::File::create(working_dir.path().join("component2.wasm")) + // .await + // .expect("should create component wasm file"); + // c2.write_all(b"c2") + // .await + // .expect("should write component wasm contents"); + + // // component1 files + // let mut c1f1 = tokio::fs::File::create(working_dir.path().join("component1").join("bar")) + // .await + // .expect("should create component file"); + // c1f1.write_all(b"bar") + // .await + // .expect("should write file contents"); + // let mut c1f2 = tokio::fs::File::create(working_dir.path().join("component1").join("baz")) + // .await + // .expect("should create component file"); + // c1f2.write_all(b"baz") + // .await + // .expect("should write file contents"); + + // // component2 files + // let mut c2f1 = tokio::fs::File::create(working_dir.path().join("component2").join("baz")) + // .await + // .expect("should create component file"); + // c2f1.write_all(b"baz") + // .await + // .expect("should write file contents"); + + // #[derive(Clone)] + // struct TestCase { + // name: &'static str, + // opts: Option, + // locked_components: Vec, + // expected_layer_count: usize, + // expected_error: Option<&'static str>, + // } + + // let tests: Vec = [ + // TestCase { + // name: "Two component layers", + // opts: None, + // locked_components: spin_testing::from_json!([{ + // "id": "component1", + // "source": { + // "content_type": "application/wasm", + // "source": format!("file://{}", working_dir.path().join("component1.wasm").to_str().unwrap()), + // "digest": "digest", + // }}, + // { + // "id": "component2", + // "source": { + // "content_type": "application/wasm", + // "source": format!("file://{}", working_dir.path().join("component2.wasm").to_str().unwrap()), + // "digest": "digest", + // }}]), + // expected_layer_count: 2, + // expected_error: None, + // }, + // TestCase { + // name: "One component layer and two file layers", + // opts: Some(ClientOpts{content_ref_inline_max_size: 0}), + // locked_components: spin_testing::from_json!([{ + // "id": "component1", + // "source": { + // "content_type": "application/wasm", + // "source": format!("file://{}", working_dir.path().join("component1.wasm").to_str().unwrap()), + // "digest": "digest", + // }, + // "files": [ + // { + // "source": format!("file://{}", working_dir.path().join("component1").to_str().unwrap()), + // "path": working_dir.path().join("component1").join("bar").to_str().unwrap() + // }, + // { + // "source": format!("file://{}", working_dir.path().join("component1").to_str().unwrap()), + // "path": working_dir.path().join("component1").join("baz").to_str().unwrap() + // } + // ] + // }]), + // expected_layer_count: 3, + // expected_error: None, + // }, + // TestCase { + // name: "One component layer and one file with inlined content", + // opts: None, + // locked_components: spin_testing::from_json!([{ + // "id": "component1", + // "source": { + // "content_type": "application/wasm", + // "source": format!("file://{}", working_dir.path().join("component1.wasm").to_str().unwrap()), + // "digest": "digest", + // }, + // "files": [ + // { + // "source": format!("file://{}", working_dir.path().join("component1").to_str().unwrap()), + // "path": working_dir.path().join("component1").join("bar").to_str().unwrap() + // } + // ] + // }]), + // expected_layer_count: 1, + // expected_error: None, + // }, + // TestCase { + // name: "Component has no source", + // opts: None, + // locked_components: spin_testing::from_json!([{ + // "id": "component1", + // "source": { + // "content_type": "application/wasm", + // "source": "", + // "digest": "digest", + // } + // }]), + // expected_layer_count: 0, + // expected_error: Some("Invalid URL: \"\""), + // }, + // TestCase { + // name: "Duplicate component sources", + // opts: None, + // locked_components: spin_testing::from_json!([{ + // "id": "component1", + // "source": { + // "content_type": "application/wasm", + // "source": format!("file://{}", working_dir.path().join("component1.wasm").to_str().unwrap()), + // "digest": "digest", + // }}, + // { + // "id": "component2", + // "source": { + // "content_type": "application/wasm", + // "source": format!("file://{}", working_dir.path().join("component1.wasm").to_str().unwrap()), + // "digest": "digest", + // }}]), + // expected_layer_count: 1, + // expected_error: None, + // }, + // TestCase { + // name: "Duplicate file paths", + // opts: Some(ClientOpts{content_ref_inline_max_size: 0}), + // locked_components: spin_testing::from_json!([{ + // "id": "component1", + // "source": { + // "content_type": "application/wasm", + // "source": format!("file://{}", working_dir.path().join("component1.wasm").to_str().unwrap()), + // "digest": "digest", + // }, + // "files": [ + // { + // "source": format!("file://{}", working_dir.path().join("component1").to_str().unwrap()), + // "path": working_dir.path().join("component1").join("bar").to_str().unwrap() + // }, + // { + // "source": format!("file://{}", working_dir.path().join("component1").to_str().unwrap()), + // "path": working_dir.path().join("component1").join("baz").to_str().unwrap() + // } + // ]}, + // { + // "id": "component2", + // "source": { + // "content_type": "application/wasm", + // "source": format!("file://{}", working_dir.path().join("component2.wasm").to_str().unwrap()), + // "digest": "digest", + // }, + // "files": [ + // { + // "source": format!("file://{}", working_dir.path().join("component2").to_str().unwrap()), + // "path": working_dir.path().join("component2").join("baz").to_str().unwrap() + // } + // ] + // }]), + // expected_layer_count: 4, + // expected_error: None, + // }, + // ] + // .to_vec(); + + // for tc in tests { + // let triggers = Default::default(); + // let metadata = Default::default(); + // let variables = Default::default(); + // let mut locked = LockedApp { + // spin_lock_version: Default::default(), + // components: tc.locked_components, + // triggers, + // metadata, + // variables, + // must_understand: Default::default(), + // host_requirements: Default::default(), + // }; + + // let mut client = Client::new(false, Some(working_dir.path().to_path_buf())) + // .await + // .expect("should create new client"); + // if let Some(o) = tc.opts { + // client.opts = o; + // } + + // match tc.expected_error { + // Some(e) => { + // assert_eq!( + // e, + // client + // .assemble_layers(&mut locked, AssemblyMode::Simple) + // .await + // .unwrap_err() + // .to_string(), + // "{}", + // tc.name + // ) + // } + // None => { + // assert_eq!( + // tc.expected_layer_count, + // client + // .assemble_layers(&mut locked, AssemblyMode::Simple) + // .await + // .unwrap() + // .len(), + // "{}", + // tc.name + // ) + // } + // } + // } + // } fn annotatable_app() -> LockedApp { let mut meta_builder = spin_locked_app::values::ValuesMapBuilder::new(); diff --git a/crates/outbound-http/Cargo.toml b/crates/outbound-http/Cargo.toml deleted file mode 100644 index 73617a08c9..0000000000 --- a/crates/outbound-http/Cargo.toml +++ /dev/null @@ -1,30 +0,0 @@ -[package] -name = "outbound-http" -version = { workspace = true } -authors = { workspace = true } -edition = { workspace = true } - -[lib] -doctest = false - -[dependencies] -anyhow = "1.0" -http = "0.2" -reqwest = { version = "0.11", features = ["gzip"] } -spin-app = { path = "../app", optional = true } -spin-core = { path = "../core", optional = true } -spin-expressions = { path = "../expressions", optional = true } -spin-locked-app = { path = "../locked-app" } -spin-outbound-networking = { path = "../outbound-networking" } -spin-world = { path = "../world", optional = true } -spin-telemetry = { path = "../telemetry" } -terminal = { path = "../terminal" } -tracing = { workspace = true } -url = "2.2.1" - -[features] -default = ["runtime"] -runtime = ["dep:spin-app", "dep:spin-core", "dep:spin-expressions", "dep:spin-world"] - -[lints] -workspace = true diff --git a/crates/outbound-http/src/host_component.rs b/crates/outbound-http/src/host_component.rs deleted file mode 100644 index 0fd60a05d9..0000000000 --- a/crates/outbound-http/src/host_component.rs +++ /dev/null @@ -1,41 +0,0 @@ -use anyhow::Result; - -use spin_app::DynamicHostComponent; -use spin_core::{Data, HostComponent, Linker}; -use spin_outbound_networking::{AllowedHostsConfig, ALLOWED_HOSTS_KEY}; -use spin_world::v1::http; - -use crate::host_impl::OutboundHttp; - -pub struct OutboundHttpComponent { - pub resolver: spin_expressions::SharedPreparedResolver, -} - -impl HostComponent for OutboundHttpComponent { - type Data = OutboundHttp; - - fn add_to_linker( - linker: &mut Linker, - get: impl Fn(&mut Data) -> &mut Self::Data + Send + Sync + Copy + 'static, - ) -> Result<()> { - http::add_to_linker(linker, get) - } - - fn build_data(&self) -> Self::Data { - Default::default() - } -} - -impl DynamicHostComponent for OutboundHttpComponent { - fn update_data( - &self, - data: &mut Self::Data, - component: &spin_app::AppComponent, - ) -> anyhow::Result<()> { - let hosts = component - .get_metadata(ALLOWED_HOSTS_KEY)? - .unwrap_or_default(); - data.allowed_hosts = AllowedHostsConfig::parse(&hosts, self.resolver.get().unwrap())?; - Ok(()) - } -} diff --git a/crates/outbound-http/src/host_impl.rs b/crates/outbound-http/src/host_impl.rs deleted file mode 100644 index cffdc5cc19..0000000000 --- a/crates/outbound-http/src/host_impl.rs +++ /dev/null @@ -1,208 +0,0 @@ -use anyhow::Result; -use http::{HeaderMap, Uri}; -use reqwest::Client; -use spin_core::async_trait; -use spin_outbound_networking::{AllowedHostsConfig, OutboundUrl}; -use spin_world::v1::{ - http as outbound_http, - http_types::{self, Headers, HttpError, Method, Request, Response}, -}; -use tracing::{field::Empty, instrument, Level}; - -/// A very simple implementation for outbound HTTP requests. -#[derive(Default, Clone)] -pub struct OutboundHttp { - /// List of hosts guest modules are allowed to make requests to. - pub allowed_hosts: AllowedHostsConfig, - /// During an incoming HTTP request, origin is set to the host of that incoming HTTP request. - /// This is used to direct outbound requests to the same host when allowed. - pub origin: String, - client: Option, -} - -impl OutboundHttp { - /// Check if guest module is allowed to send request to URL, based on the list of - /// allowed hosts defined by the runtime. If the url passed in is a relative path, - /// only allow if allowed_hosts contains `self`. If the list of allowed hosts contains - /// `insecure:allow-all`, then all hosts are allowed. - /// If `None` is passed, the guest module is not allowed to send the request. - fn is_allowed(&mut self, url: &str) -> Result { - if url.starts_with('/') { - return Ok(self.allowed_hosts.allows_relative_url(&["http", "https"])); - } - - Ok(OutboundUrl::parse(url, "https") - .map(|u| self.allowed_hosts.allows(&u)) - .unwrap_or_default()) - } -} - -#[async_trait] -impl outbound_http::Host for OutboundHttp { - #[instrument(name = "spin_outbound_http.send_request", skip_all, err(level = Level::INFO), - fields(otel.kind = "client", url.full = Empty, http.request.method = Empty, - http.response.status_code = Empty, otel.name = Empty, server.address = Empty, server.port = Empty))] - async fn send_request(&mut self, req: Request) -> Result { - let current_span = tracing::Span::current(); - let method = format!("{:?}", req.method) - .strip_prefix("Method::") - .unwrap_or("_OTHER") - .to_uppercase(); - current_span.record("otel.name", method.clone()); - current_span.record("url.full", req.uri.clone()); - current_span.record("http.request.method", method); - if let Ok(uri) = req.uri.parse::() { - if let Some(authority) = uri.authority() { - current_span.record("server.address", authority.host()); - if let Some(port) = authority.port() { - current_span.record("server.port", port.as_u16()); - } - } - } - - tracing::trace!("Attempting to send outbound HTTP request to {}", req.uri); - if !self - .is_allowed(&req.uri) - .map_err(|_| HttpError::RuntimeError)? - { - tracing::info!("Destination not allowed: {}", req.uri); - if let Some((scheme, host_and_port)) = scheme_host_and_port(&req.uri) { - terminal::warn!("A component tried to make a HTTP request to non-allowed host '{host_and_port}'."); - eprintln!("To allow requests, add 'allowed_outbound_hosts = [\"{scheme}://{host_and_port}\"]' to the manifest component section."); - } - return Err(HttpError::DestinationNotAllowed); - } - - let method = method_from(req.method); - - let abs_url = if req.uri.starts_with('/') { - format!("{}{}", self.origin, req.uri) - } else { - req.uri.clone() - }; - - let req_url = reqwest::Url::parse(&abs_url).map_err(|_| HttpError::InvalidUrl)?; - - let mut headers = request_headers(req.headers).map_err(|_| HttpError::RuntimeError)?; - spin_telemetry::inject_trace_context(&mut headers); - let body = req.body.unwrap_or_default().to_vec(); - - if !req.params.is_empty() { - tracing::warn!("HTTP params field is deprecated"); - } - - // Allow reuse of Client's internal connection pool for multiple requests - // in a single component execution - let client = self.client.get_or_insert_with(Default::default); - - let resp = client - .request(method, req_url) - .headers(headers) - .body(body) - .send() - .await - .map_err(log_reqwest_error)?; - tracing::trace!("Returning response from outbound request to {}", req.uri); - current_span.record("http.response.status_code", resp.status().as_u16()); - response_from_reqwest(resp).await - } -} - -impl http_types::Host for OutboundHttp { - fn convert_http_error(&mut self, error: HttpError) -> Result { - Ok(error) - } -} - -fn log_reqwest_error(err: reqwest::Error) -> HttpError { - let error_desc = if err.is_timeout() { - "timeout error" - } else if err.is_connect() { - "connection error" - } else if err.is_body() || err.is_decode() { - "message body error" - } else if err.is_request() { - "request error" - } else { - "error" - }; - tracing::warn!( - "Outbound HTTP {}: URL {}, error detail {:?}", - error_desc, - err.url() - .map(|u| u.to_string()) - .unwrap_or_else(|| "".to_owned()), - err - ); - HttpError::RuntimeError -} - -fn method_from(m: Method) -> http::Method { - match m { - Method::Get => http::Method::GET, - Method::Post => http::Method::POST, - Method::Put => http::Method::PUT, - Method::Delete => http::Method::DELETE, - Method::Patch => http::Method::PATCH, - Method::Head => http::Method::HEAD, - Method::Options => http::Method::OPTIONS, - } -} - -async fn response_from_reqwest(res: reqwest::Response) -> Result { - let status = res.status().as_u16(); - let headers = response_headers(res.headers()).map_err(|_| HttpError::RuntimeError)?; - - let body = Some( - res.bytes() - .await - .map_err(|_| HttpError::RuntimeError)? - .to_vec(), - ); - - Ok(Response { - status, - headers, - body, - }) -} - -fn request_headers(h: Headers) -> anyhow::Result { - let mut res = HeaderMap::new(); - for (k, v) in h { - res.insert( - http::header::HeaderName::try_from(k)?, - http::header::HeaderValue::try_from(v)?, - ); - } - Ok(res) -} - -fn response_headers(h: &HeaderMap) -> anyhow::Result>> { - let mut res: Vec<(String, String)> = vec![]; - - for (k, v) in h { - res.push(( - k.to_string(), - std::str::from_utf8(v.as_bytes())?.to_string(), - )); - } - - Ok(Some(res)) -} - -/// Returns both the scheme and the `$HOST:$PORT` for the url string -/// -/// Returns `None` if the url cannot be parsed or if it does not contain a host -fn scheme_host_and_port(url: &str) -> Option<(String, String)> { - url::Url::parse(url).ok().and_then(|u| { - u.host_str().map(|h| { - let mut host = h.to_owned(); - if let Some(p) = u.port() { - use std::fmt::Write; - write!(&mut host, ":{p}").unwrap(); - } - (u.scheme().to_owned(), host) - }) - }) -} diff --git a/crates/outbound-http/src/lib.rs b/crates/outbound-http/src/lib.rs deleted file mode 100644 index 33a726f34b..0000000000 --- a/crates/outbound-http/src/lib.rs +++ /dev/null @@ -1,11 +0,0 @@ -#[cfg(feature = "runtime")] -mod host_component; -#[cfg(feature = "runtime")] -mod host_impl; - -#[cfg(feature = "runtime")] -pub use host_component::OutboundHttpComponent; - -use spin_locked_app::MetadataKey; - -pub const ALLOWED_HTTP_HOSTS_KEY: MetadataKey> = MetadataKey::new("allowed_http_hosts"); diff --git a/crates/outbound-mqtt/Cargo.toml b/crates/outbound-mqtt/Cargo.toml deleted file mode 100644 index 385bdfb117..0000000000 --- a/crates/outbound-mqtt/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -name = "outbound-mqtt" -version = { workspace = true } -authors = { workspace = true } -edition = { workspace = true } - -[lib] -doctest = false - -[dependencies] -anyhow = "1.0" -rumqttc = { version = "0.24", features = ["url"] } -spin-app = { path = "../app" } -spin-core = { path = "../core" } -spin-expressions = { path = "../expressions" } -spin-world = { path = "../world" } -spin-outbound-networking = { path = "../outbound-networking" } -table = { path = "../table" } -tokio = { version = "1", features = ["sync"] } -tracing = { workspace = true } - -[lints] -workspace = true diff --git a/crates/outbound-mqtt/src/host_component.rs b/crates/outbound-mqtt/src/host_component.rs deleted file mode 100644 index df242a54ba..0000000000 --- a/crates/outbound-mqtt/src/host_component.rs +++ /dev/null @@ -1,41 +0,0 @@ -use anyhow::Context; -use spin_app::DynamicHostComponent; -use spin_core::HostComponent; - -use crate::OutboundMqtt; - -pub struct OutboundMqttComponent { - pub resolver: spin_expressions::SharedPreparedResolver, -} - -impl HostComponent for OutboundMqttComponent { - type Data = OutboundMqtt; - fn add_to_linker( - linker: &mut spin_core::Linker, - get: impl Fn(&mut spin_core::Data) -> &mut Self::Data + Send + Sync + Copy + 'static, - ) -> anyhow::Result<()> { - spin_world::v2::mqtt::add_to_linker(linker, get) - } - - fn build_data(&self) -> Self::Data { - Default::default() - } -} - -impl DynamicHostComponent for OutboundMqttComponent { - fn update_data( - &self, - data: &mut Self::Data, - component: &spin_app::AppComponent, - ) -> anyhow::Result<()> { - let hosts = component - .get_metadata(spin_outbound_networking::ALLOWED_HOSTS_KEY)? - .unwrap_or_default(); - data.allowed_hosts = spin_outbound_networking::AllowedHostsConfig::parse( - &hosts, - self.resolver.get().unwrap(), - ) - .context("`allowed_outbound_hosts` contained an invalid url")?; - Ok(()) - } -} diff --git a/crates/outbound-mqtt/src/lib.rs b/crates/outbound-mqtt/src/lib.rs deleted file mode 100644 index b17d110556..0000000000 --- a/crates/outbound-mqtt/src/lib.rs +++ /dev/null @@ -1,158 +0,0 @@ -mod host_component; - -use std::time::Duration; - -use anyhow::Result; -use rumqttc::{AsyncClient, Event, Incoming, Outgoing, QoS}; -use spin_core::{async_trait, wasmtime::component::Resource}; -use spin_world::v2::mqtt::{self as v2, Connection as MqttConnection, Error, Qos}; - -pub use host_component::OutboundMqttComponent; -use tracing::{instrument, Level}; - -pub struct OutboundMqtt { - allowed_hosts: spin_outbound_networking::AllowedHostsConfig, - connections: table::Table<(AsyncClient, rumqttc::EventLoop)>, -} - -impl Default for OutboundMqtt { - fn default() -> Self { - Self { - allowed_hosts: Default::default(), - connections: table::Table::new(1024), - } - } -} - -const MQTT_CHANNEL_CAP: usize = 1000; - -impl OutboundMqtt { - fn is_address_allowed(&self, address: &str) -> bool { - spin_outbound_networking::check_url(address, "mqtt", &self.allowed_hosts) - } - - async fn establish_connection( - &mut self, - address: String, - username: String, - password: String, - keep_alive_interval: Duration, - ) -> Result, Error> { - let mut conn_opts = rumqttc::MqttOptions::parse_url(address).map_err(|e| { - tracing::error!("MQTT URL parse error: {e:?}"); - Error::InvalidAddress - })?; - conn_opts.set_credentials(username, password); - conn_opts.set_keep_alive(keep_alive_interval); - let (client, event_loop) = AsyncClient::new(conn_opts, MQTT_CHANNEL_CAP); - - self.connections - .push((client, event_loop)) - .map(Resource::new_own) - .map_err(|_| Error::TooManyConnections) - } -} - -impl v2::Host for OutboundMqtt { - fn convert_error(&mut self, error: Error) -> Result { - Ok(error) - } -} - -#[async_trait] -impl v2::HostConnection for OutboundMqtt { - #[instrument(name = "spin_outbound_mqtt.open_connection", skip(self, password), err(level = Level::INFO), fields(otel.kind = "client"))] - async fn open( - &mut self, - address: String, - username: String, - password: String, - keep_alive_interval: u64, - ) -> Result, Error> { - if !self.is_address_allowed(&address) { - return Err(v2::Error::ConnectionFailed(format!( - "address {address} is not permitted" - ))); - } - self.establish_connection( - address, - username, - password, - Duration::from_secs(keep_alive_interval), - ) - .await - } - - /// Publish a message to the MQTT broker. - /// - /// OTEL trace propagation is not directly supported in MQTT V3. You will need to embed the - /// current trace context into the payload yourself. - /// https://w3c.github.io/trace-context-mqtt/#mqtt-v3-recommendation. - #[instrument(name = "spin_outbound_mqtt.publish", skip(self, connection, payload), err(level = Level::INFO), - fields(otel.kind = "producer", otel.name = format!("{} publish", topic), messaging.operation = "publish", - messaging.system = "mqtt"))] - async fn publish( - &mut self, - connection: Resource, - topic: String, - payload: Vec, - qos: Qos, - ) -> Result<(), Error> { - let (client, eventloop) = self.get_conn(connection).await.map_err(other_error)?; - let qos = convert_to_mqtt_qos_value(qos); - - // Message published to EventLoop (not MQTT Broker) - client - .publish_bytes(topic, qos, false, payload.into()) - .await - .map_err(other_error)?; - - // Poll event loop until outgoing publish event is iterated over to send the message to MQTT broker or capture/throw error. - // We may revisit this later to manage long running connections, high throughput use cases and their issues in the connection pool. - loop { - let event = eventloop - .poll() - .await - .map_err(|err| v2::Error::ConnectionFailed(err.to_string()))?; - - match (qos, event) { - (QoS::AtMostOnce, Event::Outgoing(Outgoing::Publish(_))) - | (QoS::AtLeastOnce, Event::Incoming(Incoming::PubAck(_))) - | (QoS::ExactlyOnce, Event::Incoming(Incoming::PubComp(_))) => break, - - (_, _) => continue, - } - } - Ok(()) - } - - fn drop(&mut self, connection: Resource) -> anyhow::Result<()> { - self.connections.remove(connection.rep()); - Ok(()) - } -} - -fn convert_to_mqtt_qos_value(qos: Qos) -> rumqttc::QoS { - match qos { - Qos::AtMostOnce => rumqttc::QoS::AtMostOnce, - Qos::AtLeastOnce => rumqttc::QoS::AtLeastOnce, - Qos::ExactlyOnce => rumqttc::QoS::ExactlyOnce, - } -} - -fn other_error(e: impl std::fmt::Display) -> Error { - Error::Other(e.to_string()) -} - -impl OutboundMqtt { - async fn get_conn( - &mut self, - connection: Resource, - ) -> Result<&mut (AsyncClient, rumqttc::EventLoop), Error> { - self.connections - .get_mut(connection.rep()) - .ok_or(Error::Other( - "could not find connection for resource".into(), - )) - } -} diff --git a/crates/outbound-networking/src/lib.rs b/crates/outbound-networking/src/lib.rs index c2c3095b85..a2be724711 100644 --- a/crates/outbound-networking/src/lib.rs +++ b/crates/outbound-networking/src/lib.rs @@ -8,31 +8,6 @@ pub const ALLOWED_HOSTS_KEY: MetadataKey> = MetadataKey::new("allowe pub const SERVICE_CHAINING_DOMAIN: &str = "spin.internal"; pub const SERVICE_CHAINING_DOMAIN_SUFFIX: &str = ".spin.internal"; -/// Checks address against allowed hosts -/// -/// Emits several warnings -pub fn check_url(url: &str, scheme: &str, allowed_hosts: &AllowedHostsConfig) -> bool { - let Ok(url) = OutboundUrl::parse(url, scheme) else { - terminal::warn!( - "A component tried to make a request to an url that could not be parsed {url}.", - ); - return false; - }; - let is_allowed = allowed_hosts.allows(&url); - - if !is_allowed { - terminal::warn!("A component tried to make a request to non-allowed url '{url}'."); - let (scheme, host, port) = (url.scheme, url.host, url.port); - let msg = if let Some(port) = port { - format!("`allowed_outbound_hosts = [\"{scheme}://{host}:{port}\"]`") - } else { - format!("`allowed_outbound_hosts = [\"{scheme}://{host}:$PORT\"]` (where $PORT is the correct port number)") - }; - eprintln!("To allow requests, add {msg} to the manifest component section."); - } - is_allowed -} - /// An address is a url-like string that contains a host, a port, and an optional scheme #[derive(Eq, Debug, Clone)] pub struct AllowedHostConfig { @@ -200,7 +175,7 @@ impl HostConfig { HostConfig::List(l) => l.iter().any(|h| h.as_str() == host), HostConfig::ToSelf => false, HostConfig::Cidr(c) => { - let Ok(ip) = host.parse::() else { + let Ok(ip) = host.parse::() else { return false; }; c.contains(&ip) @@ -431,6 +406,18 @@ impl OutboundUrl { original, }) } + + pub fn scheme(&self) -> &str { + &self.scheme + } + + pub fn authority(&self) -> String { + if let Some(port) = self.port { + format!("{}:{port}", self.host) + } else { + self.host.clone() + } + } } impl std::fmt::Display for OutboundUrl { @@ -789,4 +776,11 @@ mod test { .allows(&OutboundUrl::parse("mysql://user%3Apass%23word@xyz.com", "mysql").unwrap())); assert!(allowed.allows(&OutboundUrl::parse("user%3Apass%23word@xyz.com", "mysql").unwrap())); } + + #[test] + fn test_cidr() { + let allowed = + AllowedHostsConfig::parse(&["*://127.0.0.1/24:63551"], &dummy_resolver()).unwrap(); + assert!(allowed.allows(&OutboundUrl::parse("tcp://127.0.0.1:63551", "tcp").unwrap())); + } } diff --git a/crates/outbound-pg/src/lib.rs b/crates/outbound-pg/src/lib.rs deleted file mode 100644 index 23f01c1917..0000000000 --- a/crates/outbound-pg/src/lib.rs +++ /dev/null @@ -1,461 +0,0 @@ -use anyhow::{anyhow, Context, Result}; -use native_tls::TlsConnector; -use postgres_native_tls::MakeTlsConnector; -use spin_app::DynamicHostComponent; -use spin_core::{async_trait, wasmtime::component::Resource, HostComponent}; -use spin_world::v1::postgres as v1; -use spin_world::v1::rdbms_types as v1_types; -use spin_world::v2::postgres::{self as v2, Connection}; -use spin_world::v2::rdbms_types; -use spin_world::v2::rdbms_types::{Column, DbDataType, DbValue, ParameterValue, RowSet}; -use tokio_postgres::{ - config::SslMode, - types::{ToSql, Type}, - Client, NoTls, Row, Socket, -}; -use tracing::instrument; -use tracing::Level; - -pub struct OutboundPgComponent { - pub resolver: spin_expressions::SharedPreparedResolver, -} - -/// A simple implementation to support outbound pg connection -#[derive(Default)] -pub struct OutboundPg { - allowed_hosts: spin_outbound_networking::AllowedHostsConfig, - pub connections: table::Table, -} - -impl OutboundPg { - async fn open_connection(&mut self, address: &str) -> Result, v2::Error> { - self.connections - .push( - build_client(address) - .await - .map_err(|e| v2::Error::ConnectionFailed(format!("{e:?}")))?, - ) - .map_err(|_| v2::Error::ConnectionFailed("too many connections".into())) - .map(Resource::new_own) - } - - async fn get_client(&mut self, connection: Resource) -> Result<&Client, v2::Error> { - self.connections - .get(connection.rep()) - .ok_or_else(|| v2::Error::ConnectionFailed("no connection found".into())) - } - - fn is_address_allowed(&self, address: &str) -> bool { - let Ok(config) = address.parse::() else { - return false; - }; - for (i, host) in config.get_hosts().iter().enumerate() { - match host { - tokio_postgres::config::Host::Tcp(address) => { - let ports = config.get_ports(); - // The port we use is either: - // * The port at the same index as the host - // * The first port if there is only one port - let port = - ports - .get(i) - .or_else(|| if ports.len() == 1 { ports.get(1) } else { None }); - let port_str = port.map(|p| format!(":{}", p)).unwrap_or_default(); - let url = format!("{address}{port_str}"); - if !spin_outbound_networking::check_url(&url, "postgres", &self.allowed_hosts) { - return false; - } - } - #[cfg(unix)] - tokio_postgres::config::Host::Unix(_) => return false, - } - } - true - } -} - -impl HostComponent for OutboundPgComponent { - type Data = OutboundPg; - - fn add_to_linker( - linker: &mut spin_core::Linker, - get: impl Fn(&mut spin_core::Data) -> &mut Self::Data + Send + Sync + Copy + 'static, - ) -> anyhow::Result<()> { - v1::add_to_linker(linker, get)?; - v2::add_to_linker(linker, get) - } - - fn build_data(&self) -> Self::Data { - Default::default() - } -} - -impl DynamicHostComponent for OutboundPgComponent { - fn update_data( - &self, - data: &mut Self::Data, - component: &spin_app::AppComponent, - ) -> anyhow::Result<()> { - let hosts = component - .get_metadata(spin_outbound_networking::ALLOWED_HOSTS_KEY)? - .unwrap_or_default(); - data.allowed_hosts = spin_outbound_networking::AllowedHostsConfig::parse( - &hosts, - self.resolver.get().unwrap(), - ) - .context("`allowed_outbound_hosts` contained an invalid url")?; - Ok(()) - } -} - -#[async_trait] -impl v2::Host for OutboundPg {} - -#[async_trait] -impl v2::HostConnection for OutboundPg { - #[instrument(name = "spin_outbound_pg.open_connection", skip(self), err(level = Level::INFO), fields(otel.kind = "client", db.system = "postgresql"))] - async fn open(&mut self, address: String) -> Result, v2::Error> { - if !self.is_address_allowed(&address) { - return Err(v2::Error::ConnectionFailed(format!( - "address {address} is not permitted" - ))); - } - self.open_connection(&address).await - } - - #[instrument(name = "spin_outbound_pg.execute", skip(self, connection), err(level = Level::INFO), fields(otel.kind = "client", db.system = "postgresql", otel.name = statement))] - async fn execute( - &mut self, - connection: Resource, - statement: String, - params: Vec, - ) -> Result { - let params: Vec<&(dyn ToSql + Sync)> = params - .iter() - .map(to_sql_parameter) - .collect::>>() - .map_err(|e| v2::Error::ValueConversionFailed(format!("{:?}", e)))?; - - let nrow = self - .get_client(connection) - .await? - .execute(&statement, params.as_slice()) - .await - .map_err(|e| v2::Error::QueryFailed(format!("{:?}", e)))?; - - Ok(nrow) - } - - #[instrument(name = "spin_outbound_pg.query", skip(self, connection), err(level = Level::INFO), fields(otel.kind = "client", db.system = "postgresql", otel.name = statement))] - async fn query( - &mut self, - connection: Resource, - statement: String, - params: Vec, - ) -> Result { - let params: Vec<&(dyn ToSql + Sync)> = params - .iter() - .map(to_sql_parameter) - .collect::>>() - .map_err(|e| v2::Error::BadParameter(format!("{:?}", e)))?; - - let results = self - .get_client(connection) - .await? - .query(&statement, params.as_slice()) - .await - .map_err(|e| v2::Error::QueryFailed(format!("{:?}", e)))?; - - if results.is_empty() { - return Ok(RowSet { - columns: vec![], - rows: vec![], - }); - } - - let columns = infer_columns(&results[0]); - let rows = results - .iter() - .map(convert_row) - .collect::, _>>() - .map_err(|e| v2::Error::QueryFailed(format!("{:?}", e)))?; - - Ok(RowSet { columns, rows }) - } - - fn drop(&mut self, connection: Resource) -> anyhow::Result<()> { - self.connections.remove(connection.rep()); - Ok(()) - } -} - -impl rdbms_types::Host for OutboundPg { - fn convert_error(&mut self, error: v2::Error) -> Result { - Ok(error) - } -} - -fn to_sql_parameter(value: &ParameterValue) -> anyhow::Result<&(dyn ToSql + Sync)> { - match value { - ParameterValue::Boolean(v) => Ok(v), - ParameterValue::Int32(v) => Ok(v), - ParameterValue::Int64(v) => Ok(v), - ParameterValue::Int8(v) => Ok(v), - ParameterValue::Int16(v) => Ok(v), - ParameterValue::Floating32(v) => Ok(v), - ParameterValue::Floating64(v) => Ok(v), - ParameterValue::Uint8(_) - | ParameterValue::Uint16(_) - | ParameterValue::Uint32(_) - | ParameterValue::Uint64(_) => Err(anyhow!("Postgres does not support unsigned integers")), - ParameterValue::Str(v) => Ok(v), - ParameterValue::Binary(v) => Ok(v), - ParameterValue::DbNull => Ok(&PgNull), - } -} - -fn infer_columns(row: &Row) -> Vec { - let mut result = Vec::with_capacity(row.len()); - for index in 0..row.len() { - result.push(infer_column(row, index)); - } - result -} - -fn infer_column(row: &Row, index: usize) -> Column { - let column = &row.columns()[index]; - let name = column.name().to_owned(); - let data_type = convert_data_type(column.type_()); - Column { name, data_type } -} - -fn convert_data_type(pg_type: &Type) -> DbDataType { - match *pg_type { - Type::BOOL => DbDataType::Boolean, - Type::BYTEA => DbDataType::Binary, - Type::FLOAT4 => DbDataType::Floating32, - Type::FLOAT8 => DbDataType::Floating64, - Type::INT2 => DbDataType::Int16, - Type::INT4 => DbDataType::Int32, - Type::INT8 => DbDataType::Int64, - Type::TEXT | Type::VARCHAR | Type::BPCHAR => DbDataType::Str, - _ => { - tracing::debug!("Couldn't convert Postgres type {} to WIT", pg_type.name(),); - DbDataType::Other - } - } -} - -fn convert_row(row: &Row) -> Result, tokio_postgres::Error> { - let mut result = Vec::with_capacity(row.len()); - for index in 0..row.len() { - result.push(convert_entry(row, index)?); - } - Ok(result) -} - -fn convert_entry(row: &Row, index: usize) -> Result { - let column = &row.columns()[index]; - let value = match column.type_() { - &Type::BOOL => { - let value: Option = row.try_get(index)?; - match value { - Some(v) => DbValue::Boolean(v), - None => DbValue::DbNull, - } - } - &Type::BYTEA => { - let value: Option> = row.try_get(index)?; - match value { - Some(v) => DbValue::Binary(v), - None => DbValue::DbNull, - } - } - &Type::FLOAT4 => { - let value: Option = row.try_get(index)?; - match value { - Some(v) => DbValue::Floating32(v), - None => DbValue::DbNull, - } - } - &Type::FLOAT8 => { - let value: Option = row.try_get(index)?; - match value { - Some(v) => DbValue::Floating64(v), - None => DbValue::DbNull, - } - } - &Type::INT2 => { - let value: Option = row.try_get(index)?; - match value { - Some(v) => DbValue::Int16(v), - None => DbValue::DbNull, - } - } - &Type::INT4 => { - let value: Option = row.try_get(index)?; - match value { - Some(v) => DbValue::Int32(v), - None => DbValue::DbNull, - } - } - &Type::INT8 => { - let value: Option = row.try_get(index)?; - match value { - Some(v) => DbValue::Int64(v), - None => DbValue::DbNull, - } - } - &Type::TEXT | &Type::VARCHAR | &Type::BPCHAR => { - let value: Option = row.try_get(index)?; - match value { - Some(v) => DbValue::Str(v), - None => DbValue::DbNull, - } - } - t => { - tracing::debug!( - "Couldn't convert Postgres type {} in column {}", - t.name(), - column.name() - ); - DbValue::Unsupported - } - }; - Ok(value) -} - -async fn build_client(address: &str) -> anyhow::Result { - let config = address.parse::()?; - - tracing::debug!("Build new connection: {}", address); - - if config.get_ssl_mode() == SslMode::Disable { - connect(config).await - } else { - connect_tls(config).await - } -} - -async fn connect(config: tokio_postgres::Config) -> anyhow::Result { - let (client, connection) = config.connect(NoTls).await?; - - spawn(connection); - - Ok(client) -} - -async fn connect_tls(config: tokio_postgres::Config) -> anyhow::Result { - let builder = TlsConnector::builder(); - let connector = MakeTlsConnector::new(builder.build()?); - let (client, connection) = config.connect(connector).await?; - - spawn(connection); - - Ok(client) -} - -fn spawn(connection: tokio_postgres::Connection) -where - T: tokio_postgres::tls::TlsStream + std::marker::Unpin + std::marker::Send + 'static, -{ - tokio::spawn(async move { - if let Err(e) = connection.await { - tracing::error!("Postgres connection error: {}", e); - } - }); -} - -/// Although the Postgres crate converts Rust Option::None to Postgres NULL, -/// it enforces the type of the Option as it does so. (For example, trying to -/// pass an Option::::None to a VARCHAR column fails conversion.) As we -/// do not know expected column types, we instead use a "neutral" custom type -/// which allows conversion to any type but always tells the Postgres crate to -/// treat it as a SQL NULL. -struct PgNull; - -impl ToSql for PgNull { - fn to_sql( - &self, - _ty: &Type, - _out: &mut tokio_postgres::types::private::BytesMut, - ) -> Result> - where - Self: Sized, - { - Ok(tokio_postgres::types::IsNull::Yes) - } - - fn accepts(_ty: &Type) -> bool - where - Self: Sized, - { - true - } - - fn to_sql_checked( - &self, - _ty: &Type, - _out: &mut tokio_postgres::types::private::BytesMut, - ) -> Result> { - Ok(tokio_postgres::types::IsNull::Yes) - } -} - -impl std::fmt::Debug for PgNull { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("NULL").finish() - } -} - -/// Delegate a function call to the v2::HostConnection implementation -macro_rules! delegate { - ($self:ident.$name:ident($address:expr, $($arg:expr),*)) => {{ - if !$self.is_address_allowed(&$address) { - return Err(v1::PgError::ConnectionFailed(format!( - "address {} is not permitted", $address - ))); - } - let connection = match $self.open_connection(&$address).await { - Ok(c) => c, - Err(e) => return Err(e.into()), - }; - ::$name($self, connection, $($arg),*) - .await - .map_err(|e| e.into()) - }}; -} - -#[async_trait] -impl v1::Host for OutboundPg { - async fn execute( - &mut self, - address: String, - statement: String, - params: Vec, - ) -> Result { - delegate!(self.execute( - address, - statement, - params.into_iter().map(Into::into).collect() - )) - } - - async fn query( - &mut self, - address: String, - statement: String, - params: Vec, - ) -> Result { - delegate!(self.query( - address, - statement, - params.into_iter().map(Into::into).collect() - )) - .map(Into::into) - } - - fn convert_pg_error(&mut self, error: v1::PgError) -> Result { - Ok(error) - } -} diff --git a/crates/outbound-redis/Cargo.toml b/crates/outbound-redis/Cargo.toml deleted file mode 100644 index 83080c6071..0000000000 --- a/crates/outbound-redis/Cargo.toml +++ /dev/null @@ -1,23 +0,0 @@ -[package] -name = "outbound-redis" -version = { workspace = true } -authors = { workspace = true } -edition = { workspace = true } - -[lib] -doctest = false - -[dependencies] -anyhow = "1.0" -redis = { version = "0.21", features = ["tokio-comp", "tokio-native-tls-comp"] } -spin-app = { path = "../app" } -spin-core = { path = "../core" } -spin-expressions = { path = "../expressions" } -spin-world = { path = "../world" } -spin-outbound-networking = { path = "../outbound-networking" } -table = { path = "../table" } -tokio = { version = "1", features = ["sync"] } -tracing = { workspace = true } - -[lints] -workspace = true diff --git a/crates/outbound-redis/src/host_component.rs b/crates/outbound-redis/src/host_component.rs deleted file mode 100644 index 464e1712d2..0000000000 --- a/crates/outbound-redis/src/host_component.rs +++ /dev/null @@ -1,42 +0,0 @@ -use anyhow::Context; -use spin_app::DynamicHostComponent; -use spin_core::HostComponent; - -use crate::OutboundRedis; - -pub struct OutboundRedisComponent { - pub resolver: spin_expressions::SharedPreparedResolver, -} - -impl HostComponent for OutboundRedisComponent { - type Data = OutboundRedis; - fn add_to_linker( - linker: &mut spin_core::Linker, - get: impl Fn(&mut spin_core::Data) -> &mut Self::Data + Send + Sync + Copy + 'static, - ) -> anyhow::Result<()> { - spin_world::v1::redis::add_to_linker(linker, get)?; - spin_world::v2::redis::add_to_linker(linker, get) - } - - fn build_data(&self) -> Self::Data { - Default::default() - } -} - -impl DynamicHostComponent for OutboundRedisComponent { - fn update_data( - &self, - data: &mut Self::Data, - component: &spin_app::AppComponent, - ) -> anyhow::Result<()> { - let hosts = component - .get_metadata(spin_outbound_networking::ALLOWED_HOSTS_KEY)? - .unwrap_or_default(); - data.allowed_hosts = spin_outbound_networking::AllowedHostsConfig::parse( - &hosts, - self.resolver.get().unwrap(), - ) - .context("`allowed_outbound_hosts` contained an invalid url")?; - Ok(()) - } -} diff --git a/crates/runtime-config/Cargo.toml b/crates/runtime-config/Cargo.toml new file mode 100644 index 0000000000..267dfdec79 --- /dev/null +++ b/crates/runtime-config/Cargo.toml @@ -0,0 +1,31 @@ +[package] +name = "spin-runtime-config" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +rust-version.workspace = true + +[dependencies] +anyhow = { workspace = true } +spin-factors = { path = "../factors" } +spin-factor-key-value = { path = "../factor-key-value" } +spin-factor-key-value-spin = { path = "../factor-key-value-spin" } +spin-factor-key-value-redis = { path = "../factor-key-value-redis" } +spin-factor-key-value-azure = { path = "../factor-key-value-azure" } +spin-factor-llm = { path = "../factor-llm" } +spin-factor-outbound-http = { path = "../factor-outbound-http" } +spin-factor-outbound-mqtt = { path = "../factor-outbound-mqtt" } +spin-factor-outbound-networking = { path = "../factor-outbound-networking" } +spin-factor-outbound-pg = { path = "../factor-outbound-pg" } +spin-factor-outbound-mysql = { path = "../factor-outbound-mysql" } +spin-factor-outbound-redis = { path = "../factor-outbound-redis" } +spin-factor-sqlite = { path = "../factor-sqlite" } +spin-factor-variables = { path = "../factor-variables" } +spin-factor-wasi = { path = "../factor-wasi" } +toml = "0.8" + +[lints] +workspace = true diff --git a/crates/runtime-config/src/lib.rs b/crates/runtime-config/src/lib.rs new file mode 100644 index 0000000000..362812c093 --- /dev/null +++ b/crates/runtime-config/src/lib.rs @@ -0,0 +1,457 @@ +use std::path::{Path, PathBuf}; + +use anyhow::Context as _; +use spin_factor_key_value::runtime_config::spin::{self as key_value}; +use spin_factor_key_value::{DefaultLabelResolver as _, KeyValueFactor}; +use spin_factor_key_value_spin::{SpinKeyValueRuntimeConfig, SpinKeyValueStore}; +use spin_factor_llm::{spin as llm, LlmFactor}; +use spin_factor_outbound_http::OutboundHttpFactor; +use spin_factor_outbound_mqtt::OutboundMqttFactor; +use spin_factor_outbound_mysql::OutboundMysqlFactor; +use spin_factor_outbound_networking::runtime_config::spin::SpinTlsRuntimeConfig; +use spin_factor_outbound_networking::OutboundNetworkingFactor; +use spin_factor_outbound_pg::OutboundPgFactor; +use spin_factor_outbound_redis::OutboundRedisFactor; +use spin_factor_sqlite::runtime_config::spin as sqlite; +use spin_factor_sqlite::SqliteFactor; +use spin_factor_variables::{spin_cli as variables, VariablesFactor}; +use spin_factor_wasi::WasiFactor; +use spin_factors::runtime_config::toml::GetTomlValue as _; +use spin_factors::{ + runtime_config::toml::TomlKeyTracker, FactorRuntimeConfigSource, RuntimeConfigSourceFinalizer, +}; + +/// The default state directory for the trigger. +pub const DEFAULT_STATE_DIR: &str = ".spin"; + +/// A runtime configuration which has been resolved from a runtime config source. +/// +/// Includes other pieces of configuration that are used to resolve the runtime configuration. +pub struct ResolvedRuntimeConfig { + /// The resolved runtime configuration. + pub runtime_config: T, + /// The resolver used to resolve key-value stores from runtime configuration. + pub key_value_resolver: key_value::RuntimeConfigResolver, + /// The resolver used to resolve sqlite databases from runtime configuration. + pub sqlite_resolver: sqlite::RuntimeConfigResolver, + /// The fully resolved state directory. + /// + /// `None` is used for an "unset" state directory which each factor will treat differently. + pub state_dir: Option, + /// The fully resolved log directory. + /// + /// `None` is used for an "unset" log directory. + pub log_dir: Option, +} + +impl ResolvedRuntimeConfig +where + T: for<'a, 'b> TryFrom>, + for<'a, 'b> >>::Error: Into, +{ + /// Creates a new resolved runtime configuration from an optional runtime config source TOML file. + pub fn from_optional_file( + runtime_config_path: Option<&Path>, + local_app_dir: Option, + provided_state_dir: UserProvidedPath, + provided_log_dir: UserProvidedPath, + use_gpu: bool, + ) -> anyhow::Result { + match runtime_config_path { + Some(runtime_config_path) => Self::from_file( + runtime_config_path, + local_app_dir, + provided_state_dir, + provided_log_dir, + use_gpu, + ), + None => Self::new( + Default::default(), + None, + local_app_dir, + provided_state_dir, + provided_log_dir, + use_gpu, + ), + } + } + + /// Creates a new resolved runtime configuration from a runtime config source TOML file. + /// + /// `provided_state_dir` is the explicitly provided state directory, if any. + pub fn from_file( + runtime_config_path: &Path, + local_app_dir: Option, + provided_state_dir: UserProvidedPath, + provided_log_dir: UserProvidedPath, + use_gpu: bool, + ) -> anyhow::Result { + let file = std::fs::read_to_string(runtime_config_path).with_context(|| { + format!( + "failed to read runtime config file '{}'", + runtime_config_path.display() + ) + })?; + let toml = toml::from_str(&file).with_context(|| { + format!( + "failed to parse runtime config file '{}' as toml", + runtime_config_path.display() + ) + })?; + + Self::new( + toml, + Some(runtime_config_path), + local_app_dir, + provided_state_dir, + provided_log_dir, + use_gpu, + ) + } + + /// Creates a new resolved runtime configuration from a TOML table. + pub fn new( + toml: toml::Table, + runtime_config_path: Option<&Path>, + local_app_dir: Option, + provided_state_dir: UserProvidedPath, + provided_log_dir: UserProvidedPath, + use_gpu: bool, + ) -> anyhow::Result { + let toml_resolver = + TomlResolver::new(&toml, local_app_dir, provided_state_dir, provided_log_dir); + let tls_resolver = runtime_config_path.map(SpinTlsRuntimeConfig::new); + let key_value_config_resolver = key_value_config_resolver(toml_resolver.state_dir()?); + let sqlite_config_resolver = sqlite_config_resolver(toml_resolver.state_dir()?) + .context("failed to resolve sqlite runtime config")?; + + let source = TomlRuntimeConfigSource::new( + toml_resolver.clone(), + &key_value_config_resolver, + tls_resolver.as_ref(), + &sqlite_config_resolver, + use_gpu, + ); + let runtime_config: T = source.try_into().map_err(Into::into)?; + + Ok(Self { + runtime_config, + key_value_resolver: key_value_config_resolver, + sqlite_resolver: sqlite_config_resolver, + state_dir: toml_resolver.state_dir()?, + log_dir: toml_resolver.log_dir()?, + }) + } + + /// Set initial key-value pairs supplied in the CLI arguments in the default store. + pub async fn set_initial_key_values( + &self, + initial_key_values: impl IntoIterator, + ) -> anyhow::Result<()> { + // We don't want to unnecessarily interact with the default store + let mut iter = initial_key_values.into_iter().peekable(); + if iter.peek().is_none() { + return Ok(()); + } + + let store = self + .key_value_resolver + .default(DEFAULT_KEY_VALUE_STORE_LABEL) + .expect("trigger was misconfigured and lacks a default store") + .get(DEFAULT_KEY_VALUE_STORE_LABEL) + .await + .expect("trigger was misconfigured and lacks a default store"); + for (key, value) in iter { + store + .set(key, value.as_bytes()) + .await + .context("failed to set key-value pair")?; + } + Ok(()) + } + + /// The fully resolved state directory. + pub fn state_dir(&self) -> Option { + self.state_dir.clone() + } + + /// The fully resolved state directory. + pub fn log_dir(&self) -> Option { + self.log_dir.clone() + } +} + +#[derive(Clone, Debug)] +/// Resolves runtime configuration from a TOML file. +pub struct TomlResolver<'a> { + table: TomlKeyTracker<'a>, + /// The local app directory. + local_app_dir: Option, + /// Explicitly provided state directory. + state_dir: UserProvidedPath, + /// Explicitly provided log directory. + log_dir: UserProvidedPath, +} + +impl<'a> TomlResolver<'a> { + /// Create a new TOML resolver. + pub fn new( + table: &'a toml::Table, + local_app_dir: Option, + state_dir: UserProvidedPath, + log_dir: UserProvidedPath, + ) -> Self { + Self { + table: TomlKeyTracker::new(table), + local_app_dir, + state_dir, + log_dir, + } + } + + /// Get the configured state_directory. + /// + /// Errors if the path cannot be converted to an absolute path. + pub fn state_dir(&self) -> std::io::Result> { + let mut state_dir = self.state_dir.clone(); + // If the state_dir is not explicitly provided, check the toml. + if matches!(state_dir, UserProvidedPath::Default) { + let from_toml = + self.table + .get("state_dir") + .and_then(|v| v.as_str()) + .map(|toml_value| { + if toml_value.is_empty() { + // If the toml value is empty, treat it as unset. + UserProvidedPath::Unset + } else { + // Otherwise, treat the toml value as a provided path. + UserProvidedPath::Provided(PathBuf::from(toml_value)) + } + }); + // If toml value is not provided, use the original value after all. + state_dir = from_toml.unwrap_or(state_dir); + } + + match (state_dir, &self.local_app_dir) { + (UserProvidedPath::Provided(p), _) => Ok(Some(std::path::absolute(p)?)), + (UserProvidedPath::Default, Some(local_app_dir)) => { + Ok(Some(local_app_dir.join(".spin"))) + } + (UserProvidedPath::Default | UserProvidedPath::Unset, _) => Ok(None), + } + } + + /// Get the configured log directory. + /// + /// Errors if the path cannot be converted to an absolute path. + pub fn log_dir(&self) -> std::io::Result> { + let mut log_dir = self.log_dir.clone(); + // If the log_dir is not explicitly provided, check the toml. + if matches!(log_dir, UserProvidedPath::Default) { + let from_toml = self + .table + .get("log_dir") + .and_then(|v| v.as_str()) + .map(|toml_value| { + if toml_value.is_empty() { + // If the toml value is empty, treat it as unset. + UserProvidedPath::Unset + } else { + // Otherwise, treat the toml value as a provided path. + UserProvidedPath::Provided(PathBuf::from(toml_value)) + } + }); + // If toml value is not provided, use the original value after all. + log_dir = from_toml.unwrap_or(log_dir); + } + + match log_dir { + UserProvidedPath::Provided(p) => Ok(Some(std::path::absolute(p)?)), + UserProvidedPath::Default => Ok(self.state_dir()?.map(|p| p.join("logs"))), + UserProvidedPath::Unset => Ok(None), + } + } + + /// Validate that all keys in the TOML file have been used. + pub fn validate_all_keys_used(&self) -> spin_factors::Result<()> { + self.table.validate_all_keys_used() + } +} + +/// The TOML based runtime configuration source Spin CLI. +pub struct TomlRuntimeConfigSource<'a, 'b> { + toml: TomlResolver<'b>, + key_value: &'a key_value::RuntimeConfigResolver, + tls: Option<&'a SpinTlsRuntimeConfig>, + sqlite: &'a sqlite::RuntimeConfigResolver, + use_gpu: bool, +} + +impl<'a, 'b> TomlRuntimeConfigSource<'a, 'b> { + pub fn new( + toml_resolver: TomlResolver<'b>, + key_value: &'a key_value::RuntimeConfigResolver, + tls: Option<&'a SpinTlsRuntimeConfig>, + sqlite: &'a sqlite::RuntimeConfigResolver, + use_gpu: bool, + ) -> Self { + Self { + toml: toml_resolver, + key_value, + tls, + sqlite, + use_gpu, + } + } +} + +impl FactorRuntimeConfigSource for TomlRuntimeConfigSource<'_, '_> { + fn get_runtime_config( + &mut self, + ) -> anyhow::Result> { + self.key_value.resolve_from_toml(Some(&self.toml.table)) + } +} + +impl FactorRuntimeConfigSource for TomlRuntimeConfigSource<'_, '_> { + fn get_runtime_config( + &mut self, + ) -> anyhow::Result::RuntimeConfig>> + { + let Some(tls) = self.tls else { + return Ok(None); + }; + tls.config_from_table(&self.toml.table) + } +} + +impl FactorRuntimeConfigSource for TomlRuntimeConfigSource<'_, '_> { + fn get_runtime_config( + &mut self, + ) -> anyhow::Result::RuntimeConfig>> { + Ok(Some(variables::runtime_config_from_toml(&self.toml.table)?)) + } +} + +impl FactorRuntimeConfigSource for TomlRuntimeConfigSource<'_, '_> { + fn get_runtime_config(&mut self) -> anyhow::Result> { + Ok(None) + } +} + +impl FactorRuntimeConfigSource for TomlRuntimeConfigSource<'_, '_> { + fn get_runtime_config(&mut self) -> anyhow::Result> { + Ok(None) + } +} + +impl FactorRuntimeConfigSource for TomlRuntimeConfigSource<'_, '_> { + fn get_runtime_config(&mut self) -> anyhow::Result> { + llm::runtime_config_from_toml(&self.toml.table, self.toml.state_dir()?, self.use_gpu) + } +} + +impl FactorRuntimeConfigSource for TomlRuntimeConfigSource<'_, '_> { + fn get_runtime_config(&mut self) -> anyhow::Result> { + Ok(None) + } +} + +impl FactorRuntimeConfigSource for TomlRuntimeConfigSource<'_, '_> { + fn get_runtime_config(&mut self) -> anyhow::Result> { + Ok(None) + } +} + +impl FactorRuntimeConfigSource for TomlRuntimeConfigSource<'_, '_> { + fn get_runtime_config(&mut self) -> anyhow::Result> { + Ok(None) + } +} + +impl FactorRuntimeConfigSource for TomlRuntimeConfigSource<'_, '_> { + fn get_runtime_config(&mut self) -> anyhow::Result> { + Ok(None) + } +} + +impl FactorRuntimeConfigSource for TomlRuntimeConfigSource<'_, '_> { + fn get_runtime_config(&mut self) -> anyhow::Result> { + self.sqlite.resolve_from_toml(&self.toml.table) + } +} + +impl RuntimeConfigSourceFinalizer for TomlRuntimeConfigSource<'_, '_> { + fn finalize(&mut self) -> anyhow::Result<()> { + Ok(self.toml.validate_all_keys_used()?) + } +} + +const DEFAULT_KEY_VALUE_STORE_LABEL: &str = "default"; + +/// The key-value runtime configuration resolver. +/// +/// Takes a base path that all local key-value stores which are configured with +/// relative paths will be relative to. +pub fn key_value_config_resolver( + local_store_base_path: Option, +) -> key_value::RuntimeConfigResolver { + let mut key_value = key_value::RuntimeConfigResolver::new(); + + // Register the supported store types. + // Unwraps are safe because the store types are known to not overlap. + key_value + .register_store_type(spin_factor_key_value_spin::SpinKeyValueStore::new( + local_store_base_path.clone(), + )) + .unwrap(); + key_value + .register_store_type(spin_factor_key_value_redis::RedisKeyValueStore::new()) + .unwrap(); + key_value + .register_store_type(spin_factor_key_value_azure::AzureKeyValueStore::new()) + .unwrap(); + + // Add handling of "default" store. + // Unwraps are safe because the store is known to be serializable as toml. + key_value + .add_default_store::( + DEFAULT_KEY_VALUE_STORE_LABEL, + SpinKeyValueRuntimeConfig::new( + local_store_base_path.map(|p| p.join(DEFAULT_SPIN_STORE_FILENAME)), + ), + ) + .unwrap(); + + key_value +} + +/// The default filename for the SQLite database. +const DEFAULT_SPIN_STORE_FILENAME: &str = "sqlite_key_value.db"; + +/// The sqlite runtime configuration resolver. +/// +/// Takes a path to the directory where the default database should be stored. +/// If the path is `None`, the default database will be in-memory. +fn sqlite_config_resolver( + default_database_dir: Option, +) -> anyhow::Result { + let local_database_dir = + std::env::current_dir().context("failed to get current working directory")?; + Ok(sqlite::RuntimeConfigResolver::new( + default_database_dir, + local_database_dir, + )) +} + +/// A user provided option which be either be provided, default, or explicitly none. +#[derive(Clone, Debug)] +pub enum UserProvidedPath { + /// Use the explicitly provided directory. + Provided(PathBuf), + /// Use the default. + Default, + /// Explicitly unset. + Unset, +} diff --git a/crates/sqlite-inproc/src/lib.rs b/crates/sqlite-inproc/src/lib.rs index 77877325d7..63d24d8fb7 100644 --- a/crates/sqlite-inproc/src/lib.rs +++ b/crates/sqlite-inproc/src/lib.rs @@ -16,6 +16,29 @@ pub enum InProcDatabaseLocation { Path(PathBuf), } +impl InProcDatabaseLocation { + /// Convert an optional path to a database location. + /// + /// Ensures that the parent directory of the database exists. + pub fn from_path(path: Option) -> anyhow::Result { + match path { + Some(path) => { + // Create the store's parent directory if necessary + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent).with_context(|| { + format!( + "failed to create sqlite database directory '{}'", + parent.display() + ) + })?; + } + Ok(Self::Path(path)) + } + None => Ok(Self::InMemory), + } + } +} + /// A connection to a sqlite database pub struct InProcConnection { location: InProcDatabaseLocation, diff --git a/crates/sqlite/src/lib.rs b/crates/sqlite/src/lib.rs index a9f589bf69..110dfaca0a 100644 --- a/crates/sqlite/src/lib.rs +++ b/crates/sqlite/src/lib.rs @@ -1,13 +1,13 @@ -mod host_component; +// TODO(factors): Code left for reference; remove after migration to factors +// mod host_component; -use spin_app::{async_trait, MetadataKey}; +use spin_app::MetadataKey; use spin_core::wasmtime::component::Resource; +use spin_world::async_trait; use spin_world::v1::sqlite::Error as V1SqliteError; use spin_world::v2::sqlite; use std::{collections::HashSet, sync::Arc}; -pub use host_component::SqliteComponent; - pub const DATABASES_KEY: MetadataKey> = MetadataKey::new("databases"); /// A store of connections for all accessible databases for an application diff --git a/crates/testing/Cargo.toml b/crates/testing/Cargo.toml deleted file mode 100644 index 45204df15d..0000000000 --- a/crates/testing/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -[package] -name = "spin-testing" -version = { workspace = true } -authors = { workspace = true } -edition = { workspace = true } - -[dependencies] -anyhow = "1.0" -http = "1.0.0" -hyper = "1.0.0" -serde = "1.0.188" -serde_json = "1" -spin-app = { path = "../app" } -spin-core = { path = "../core" } -spin-http = { path = "../http" } -spin-trigger = { path = "../trigger" } -tokio = { version = "1", features = ["macros", "rt"] } -tracing-subscriber = "0.3" -spin-componentize = { workspace = true } diff --git a/crates/testing/src/lib.rs b/crates/testing/src/lib.rs deleted file mode 100644 index 48b56bf589..0000000000 --- a/crates/testing/src/lib.rs +++ /dev/null @@ -1,244 +0,0 @@ -//! This crates contains common code for use in tests. Many methods will panic -//! in the slightest breeze, so DO NOT USE IN NON-TEST CODE. - -use std::{ - net::SocketAddr, - path::{Path, PathBuf}, - sync::Once, -}; - -use http::Response; -use serde::de::DeserializeOwned; -use serde_json::{json, Value}; -use spin_app::{ - async_trait, - locked::{LockedApp, LockedComponentSource}, - AppComponent, Loader, -}; -use spin_core::{Component, StoreBuilder}; -use spin_http::config::{ - HttpExecutorType, HttpTriggerConfig, HttpTriggerRouteConfig, WagiTriggerConfig, -}; -use spin_trigger::{HostComponentInitData, RuntimeConfig, TriggerExecutor, TriggerExecutorBuilder}; -use tokio::fs; - -pub use tokio; - -// Built by build.rs -const TEST_PROGRAM_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/../../target/test-programs"); - -/// Initialize a test writer for `tracing`, making its output compatible with libtest -pub fn init_tracing() { - static ONCE: Once = Once::new(); - ONCE.call_once(|| { - tracing_subscriber::fmt() - // Cranelift is very verbose at INFO, so let's tone that down: - .with_max_level(tracing_subscriber::filter::LevelFilter::WARN) - .with_test_writer() - .init(); - }) -} - -// Convenience wrapper for deserializing from literal JSON -#[macro_export] -macro_rules! from_json { - ($($json:tt)+) => { - serde_json::from_value(serde_json::json!($($json)+)).expect("valid json") - }; -} - -#[derive(Default)] -pub struct HttpTestConfig { - module_path: Option, - http_trigger_config: HttpTriggerConfig, -} - -#[derive(Default)] -pub struct RedisTestConfig { - module_path: Option, - redis_channel: String, -} - -impl HttpTestConfig { - pub fn module_path(&mut self, path: impl Into) -> &mut Self { - init_tracing(); - self.module_path = Some(path.into()); - self - } - - pub fn test_program(&mut self, name: impl AsRef) -> &mut Self { - self.module_path(Path::new(TEST_PROGRAM_PATH).join(name)) - } - - pub fn http_spin_trigger(&mut self, route: impl Into) -> &mut Self { - self.http_trigger_config = HttpTriggerConfig { - component: "test-component".to_string(), - route: route.into(), - executor: None, - }; - self - } - - pub fn http_wagi_trigger( - &mut self, - route: impl Into, - wagi_config: WagiTriggerConfig, - ) -> &mut Self { - self.http_trigger_config = HttpTriggerConfig { - component: "test-component".to_string(), - route: route.into(), - executor: Some(HttpExecutorType::Wagi(wagi_config)), - }; - self - } - - pub fn build_loader(&self) -> impl Loader { - init_tracing(); - TestLoader { - module_path: self.module_path.clone().expect("module path to be set"), - trigger_type: "http".into(), - app_trigger_metadata: json!({"base": "/"}), - trigger_config: serde_json::to_value(&self.http_trigger_config).unwrap(), - } - } - - pub async fn build_trigger(&self) -> Executor - where - Executor::TriggerConfig: DeserializeOwned, - { - TriggerExecutorBuilder::new(self.build_loader()) - .build( - TEST_APP_URI.to_string(), - RuntimeConfig::default(), - HostComponentInitData::default(), - ) - .await - .unwrap() - } -} - -impl RedisTestConfig { - pub fn module_path(&mut self, path: impl Into) -> &mut Self { - init_tracing(); - self.module_path = Some(path.into()); - self - } - - pub fn test_program(&mut self, name: impl AsRef) -> &mut Self { - self.module_path(Path::new(TEST_PROGRAM_PATH).join(name)) - } - - pub fn build_loader(&self) -> impl Loader { - TestLoader { - module_path: self.module_path.clone().expect("module path to be set"), - trigger_type: "redis".into(), - app_trigger_metadata: json!({"address": "test-redis-host"}), - trigger_config: json!({ - "component": "test-component", - "channel": self.redis_channel, - }), - } - } - - pub async fn build_trigger(&mut self, channel: &str) -> Executor - where - Executor::TriggerConfig: DeserializeOwned, - { - self.redis_channel = channel.into(); - - TriggerExecutorBuilder::new(self.build_loader()) - .build( - TEST_APP_URI.to_string(), - RuntimeConfig::default(), - HostComponentInitData::default(), - ) - .await - .unwrap() - } -} - -const TEST_APP_URI: &str = "spin-test:"; - -struct TestLoader { - module_path: PathBuf, - trigger_type: String, - app_trigger_metadata: Value, - trigger_config: Value, -} - -#[async_trait] -impl Loader for TestLoader { - async fn load_app(&self, uri: &str) -> anyhow::Result { - assert_eq!(uri, TEST_APP_URI); - let components = from_json!([{ - "id": "test-component", - "source": { - "content_type": "application/wasm", - "digest": "test-source", - }, - }]); - let triggers = from_json!([ - { - "id": "trigger--test-app", - "trigger_type": self.trigger_type, - "trigger_config": self.trigger_config, - }, - ]); - let mut trigger_meta = self.app_trigger_metadata.clone(); - trigger_meta - .as_object_mut() - .unwrap() - .insert("type".into(), self.trigger_type.clone().into()); - let metadata = from_json!({"name": "test-app", "trigger": trigger_meta}); - let variables = Default::default(); - Ok(LockedApp { - spin_lock_version: Default::default(), - components, - triggers, - metadata, - variables, - must_understand: Default::default(), - host_requirements: Default::default(), - }) - } - - async fn load_component( - &self, - engine: &spin_core::wasmtime::Engine, - source: &LockedComponentSource, - ) -> anyhow::Result { - assert_eq!(source.content.digest.as_deref(), Some("test-source")); - Component::new( - engine, - spin_componentize::componentize_if_necessary(&fs::read(&self.module_path).await?)?, - ) - } - - async fn load_module( - &self, - engine: &spin_core::wasmtime::Engine, - source: &LockedComponentSource, - ) -> anyhow::Result { - assert_eq!(source.content.digest.as_deref(), Some("test-source")); - spin_core::Module::from_file(engine, &self.module_path) - } - - async fn mount_files( - &self, - _store_builder: &mut StoreBuilder, - component: &AppComponent, - ) -> anyhow::Result<()> { - assert_eq!(component.files().len(), 0, "files testing not implemented"); - Ok(()) - } -} - -pub fn test_socket_addr() -> SocketAddr { - "127.0.0.1:55555".parse().unwrap() -} - -pub fn assert_http_response_success(resp: &Response) { - if !resp.status().is_success() { - panic!("non-success response {}: {:?}", resp.status(), resp.body()); - } -} diff --git a/crates/trigger-http/Cargo.toml b/crates/trigger-http/Cargo.toml index 79ce6c1288..65030acc6a 100644 --- a/crates/trigger-http/Cargo.toml +++ b/crates/trigger-http/Cargo.toml @@ -7,6 +7,11 @@ edition = { workspace = true } [lib] doctest = false +[features] +llm = ["spin-trigger/llm"] +llm-metal = ["spin-trigger/llm-metal"] +llm-cublas = ["spin-trigger/llm-cublas"] + [dependencies] anyhow = "1.0" async-trait = "0.1" @@ -18,7 +23,6 @@ hyper = { workspace = true } hyper-util = { version = "0.1.2", features = ["tokio"] } http-body-util = { workspace = true } indexmap = "1" -outbound-http = { path = "../outbound-http" } percent-encoding = "2" rustls = { version = "0.22.4" } rustls-pemfile = "2.1.2" @@ -27,6 +31,8 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1" spin-app = { path = "../app" } spin-core = { path = "../core" } +spin-factor-outbound-http = { path = "../factor-outbound-http" } +spin-factor-wasi = { path = "../factor-wasi" } spin-http = { path = "../http" } spin-outbound-networking = { path = "../outbound-networking" } spin-telemetry = { path = "../telemetry" } @@ -41,22 +47,7 @@ tracing = { workspace = true } wasmtime = { workspace = true } wasmtime-wasi = { workspace = true } wasmtime-wasi-http = { workspace = true } -wasi-common-preview1 = { workspace = true } webpki-roots = { version = "0.26.0" } -[dev-dependencies] -criterion = { version = "0.3.5", features = ["async_tokio"] } -num_cpus = "1" -spin-testing = { path = "../testing" } - -[[bench]] -name = "baseline" -harness = false - -[features] -llm = ["spin-trigger/llm"] -llm-metal = ["llm", "spin-trigger/llm-metal"] -llm-cublas = ["llm", "spin-trigger/llm-cublas"] - [lints] workspace = true diff --git a/crates/trigger-http/benches/baseline.rs b/crates/trigger-http/benches/baseline.rs deleted file mode 100644 index 2eac1aa6b0..0000000000 --- a/crates/trigger-http/benches/baseline.rs +++ /dev/null @@ -1,126 +0,0 @@ -use std::future::Future; -use std::sync::atomic::{AtomicBool, Ordering::Relaxed}; -use std::sync::Arc; - -use criterion::{criterion_group, criterion_main, Criterion}; - -use http::uri::Scheme; -use http::Request; -use spin_testing::{assert_http_response_success, HttpTestConfig}; -use spin_trigger_http::HttpTrigger; -use tokio::runtime::Runtime; - -criterion_main!(benches); -criterion_group!( - benches, - bench_startup, - bench_spin_concurrency_minimal, - bench_wagi_concurrency_minimal, -); - -async fn spin_trigger() -> Arc { - Arc::new( - HttpTestConfig::default() - .test_program("spin-http-benchmark.wasm") - .http_spin_trigger("/") - .build_trigger() - .await, - ) -} - -async fn wagi_trigger() -> Arc { - Arc::new( - HttpTestConfig::default() - .test_program("wagi-benchmark.wasm") - .http_wagi_trigger("/", Default::default()) - .build_trigger() - .await, - ) -} - -// Benchmark time to start and process one request -fn bench_startup(c: &mut Criterion) { - let async_runtime = Runtime::new().unwrap(); - - let mut group = c.benchmark_group("startup"); - group.bench_function("spin-executor", |b| { - b.to_async(&async_runtime).iter(|| async { - let trigger = spin_trigger().await; - run(&trigger, "/").await; - }); - }); - group.bench_function("spin-wagi-executor", |b| { - b.to_async(&async_runtime).iter(|| async { - let trigger = wagi_trigger().await; - run(&trigger, "/").await; - }); - }); -} - -fn bench_spin_concurrency_minimal(c: &mut Criterion) { - bench_concurrency_minimal(c, "spin-executor", spin_trigger); -} -fn bench_wagi_concurrency_minimal(c: &mut Criterion) { - bench_concurrency_minimal(c, "spin-wagi-executor", wagi_trigger); -} - -fn bench_concurrency_minimal>>( - c: &mut Criterion, - name: &str, - mk: fn() -> F, -) { - let async_runtime = Runtime::new().unwrap(); - let trigger = async_runtime.block_on(mk()); - - for task in ["/?sleep=1", "/?noop", "/?cpu=1"] { - let mut group = c.benchmark_group(format!("{name}{task}")); - for concurrency in concurrency_steps() { - group.bench_function(format!("concurrency-{}", concurrency), |b| { - let done = Arc::new(AtomicBool::new(false)); - let background = (0..concurrency - 1) - .map(|_| { - let trigger = trigger.clone(); - let done = done.clone(); - async_runtime.spawn(async move { - while !done.load(Relaxed) { - run(&trigger, task).await; - } - }) - }) - .collect::>(); - b.to_async(&async_runtime).iter(|| run(&trigger, task)); - done.store(true, Relaxed); - for task in background { - async_runtime.block_on(task).unwrap(); - } - }); - } - } -} - -// Helpers - -fn concurrency_steps() -> [u32; 3] { - let cpus = num_cpus::get() as u32; - if cpus > 1 { - [1, cpus, cpus * 4] - } else { - [1, 2, 4] - } -} - -async fn run(trigger: &HttpTrigger, path: &str) { - let req = Request::get(path.to_string()) - .body(Default::default()) - .unwrap(); - let resp = trigger - .handle( - req, - Scheme::HTTP, - "127.0.0.1:3000".parse().unwrap(), - "127.0.0.1:55555".parse().unwrap(), - ) - .await - .unwrap(); - assert_http_response_success(&resp); -} diff --git a/crates/trigger-http/benches/readme.md b/crates/trigger-http/benches/readme.md deleted file mode 100644 index 18c9ef2298..0000000000 --- a/crates/trigger-http/benches/readme.md +++ /dev/null @@ -1,8 +0,0 @@ -These benchmarks use [criterion.rs](https://github.com/bheisler/criterion.rs); the recommended way to run them is with the [cargo-criterion](https://github.com/bheisler/cargo-criterion) tool: - -```sh -$ cargo install cargo-criterion -$ cargo criterion --workspace -``` - -HTML reports will be written to `target/criterion/reports` \ No newline at end of file diff --git a/crates/trigger-http/benches/spin-http-benchmark/.cargo/config.toml b/crates/trigger-http/benches/spin-http-benchmark/.cargo/config.toml deleted file mode 100644 index 6b77899cb3..0000000000 --- a/crates/trigger-http/benches/spin-http-benchmark/.cargo/config.toml +++ /dev/null @@ -1,2 +0,0 @@ -[build] -target = "wasm32-wasi" diff --git a/crates/trigger-http/benches/spin-http-benchmark/Cargo.toml b/crates/trigger-http/benches/spin-http-benchmark/Cargo.toml deleted file mode 100644 index dae8080366..0000000000 --- a/crates/trigger-http/benches/spin-http-benchmark/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -name = "spin-http-benchmark" -version = "0.2.0" -edition = "2021" - -[lib] -crate-type = ["cdylib"] - -[dependencies] -wit-bindgen = "0.13.0" -url = "2.4.1" - -[workspace] diff --git a/crates/trigger-http/benches/spin-http-benchmark/src/lib.rs b/crates/trigger-http/benches/spin-http-benchmark/src/lib.rs deleted file mode 100644 index 52e8835297..0000000000 --- a/crates/trigger-http/benches/spin-http-benchmark/src/lib.rs +++ /dev/null @@ -1,57 +0,0 @@ -wit_bindgen::generate!({ - world: "http-trigger", - path: "../../../../wit/deps/spin@unversioned", - exports: { - "fermyon:spin/inbound-http": SpinHttp, - } -}); - -use exports::fermyon::spin::inbound_http; - -struct SpinHttp; - -impl inbound_http::Guest for SpinHttp { - fn handle_request(req: inbound_http::Request) -> inbound_http::Response { - let params = req.uri.find('?').map(|i| &req.uri[i + 1..]).unwrap_or(""); - for (key, value) in url::form_urlencoded::parse(params.as_bytes()) { - #[allow(clippy::single_match)] - match &*key { - // sleep= param simulates processing time - "sleep" => { - let ms = value.parse().expect("invalid sleep"); - std::thread::sleep(std::time::Duration::from_millis(ms)); - } - // cpu= param simulates compute time - "cpu" => { - let amt = value.parse().expect("invalid cpu"); - for _ in 0..amt { - do_some_work(); - } - } - _ => (), - } - } - inbound_http::Response { - status: 200, - headers: None, - body: None, - } - } -} - -// According to my computer, which is highly accurate, this is the best way to -// simulate precisely 1.5ms of work. That definitely won't change over time. -fn do_some_work() { - const N: usize = 4096; - const AMT: usize = 5_000; - - let mut a = [0u8; N]; - let mut b = [1u8; N]; - - for _ in 0..AMT { - a.copy_from_slice(&b); - std::hint::black_box(&a); - b.copy_from_slice(&a); - std::hint::black_box(&b); - } -} diff --git a/crates/trigger-http/benches/wagi-benchmark/.cargo/config.toml b/crates/trigger-http/benches/wagi-benchmark/.cargo/config.toml deleted file mode 100644 index 6b77899cb3..0000000000 --- a/crates/trigger-http/benches/wagi-benchmark/.cargo/config.toml +++ /dev/null @@ -1,2 +0,0 @@ -[build] -target = "wasm32-wasi" diff --git a/crates/trigger-http/benches/wagi-benchmark/Cargo.toml b/crates/trigger-http/benches/wagi-benchmark/Cargo.toml deleted file mode 100644 index a2b6085cf8..0000000000 --- a/crates/trigger-http/benches/wagi-benchmark/Cargo.toml +++ /dev/null @@ -1,6 +0,0 @@ -[package] -name = "wagi-benchmark" -version = "0.1.0" -edition = "2021" - -[workspace] \ No newline at end of file diff --git a/crates/trigger-http/benches/wagi-benchmark/src/main.rs b/crates/trigger-http/benches/wagi-benchmark/src/main.rs deleted file mode 100644 index a8e17c8ac6..0000000000 --- a/crates/trigger-http/benches/wagi-benchmark/src/main.rs +++ /dev/null @@ -1,11 +0,0 @@ -fn main() { - for arg in std::env::args() { - // sleep= param simulates processing time - if let Some(ms_str) = arg.strip_prefix("sleep=") { - let ms = ms_str.parse().expect("invalid sleep"); - std::thread::sleep(std::time::Duration::from_millis(ms)); - } - } - - println!("Content-Type: text/plain\n"); -} diff --git a/crates/trigger-http/src/handler.rs b/crates/trigger-http/src/handler.rs deleted file mode 100644 index 4ff4aa3980..0000000000 --- a/crates/trigger-http/src/handler.rs +++ /dev/null @@ -1,456 +0,0 @@ -use std::{net::SocketAddr, str, str::FromStr}; - -use crate::{Body, ChainedRequestHandler, HttpExecutor, HttpInstance, HttpTrigger, Store}; -use anyhow::{anyhow, Context, Result}; -use futures::TryFutureExt; -use http::{HeaderName, HeaderValue}; -use http_body_util::BodyExt; -use hyper::{Request, Response}; -use outbound_http::OutboundHttpComponent; -use spin_core::async_trait; -use spin_core::wasi_2023_10_18::exports::wasi::http::incoming_handler::Guest as IncomingHandler2023_10_18; -use spin_core::wasi_2023_11_10::exports::wasi::http::incoming_handler::Guest as IncomingHandler2023_11_10; -use spin_core::{Component, Engine, Instance}; -use spin_http::body; -use spin_http::routes::RouteMatch; -use spin_trigger::TriggerAppEngine; -use spin_world::v1::http_types; -use std::sync::Arc; -use tokio::{sync::oneshot, task}; -use tracing::{instrument, Instrument, Level}; -use wasmtime_wasi_http::{proxy::Proxy, WasiHttpView}; - -#[derive(Clone)] -pub struct HttpHandlerExecutor; - -#[async_trait] -impl HttpExecutor for HttpHandlerExecutor { - #[instrument(name = "spin_trigger_http.execute_wasm", skip_all, err(level = Level::INFO), fields(otel.name = format!("execute_wasm_component {}", route_match.component_id())))] - async fn execute( - &self, - engine: Arc>, - base: &str, - route_match: &RouteMatch, - req: Request, - client_addr: SocketAddr, - ) -> Result> { - let component_id = route_match.component_id(); - - tracing::trace!( - "Executing request using the Spin executor for component {}", - component_id - ); - - let (instance, mut store) = engine.prepare_instance(component_id).await?; - let HttpInstance::Component(instance, ty) = instance else { - unreachable!() - }; - - set_http_origin_from_request(&mut store, engine.clone(), self, &req); - - // set the client tls options for the current component_id. - // The OutboundWasiHttpHandler in this file is only used - // when making http-request from a http-trigger component. - // The outbound http requests from other triggers such as Redis - // uses OutboundWasiHttpHandler defined in spin_core crate. - store.as_mut().data_mut().as_mut().client_tls_opts = - engine.get_client_tls_opts(component_id); - - let resp = match ty { - HandlerType::Spin => { - Self::execute_spin(store, instance, base, route_match, req, client_addr) - .await - .map_err(contextualise_err)? - } - _ => { - Self::execute_wasi(store, instance, ty, base, route_match, req, client_addr).await? - } - }; - - tracing::info!( - "Request finished, sending response with status code {}", - resp.status() - ); - Ok(resp) - } -} - -impl HttpHandlerExecutor { - pub async fn execute_spin( - mut store: Store, - instance: Instance, - base: &str, - route_match: &RouteMatch, - req: Request, - client_addr: SocketAddr, - ) -> Result> { - let headers = Self::headers(&req, base, route_match, client_addr)?; - let func = instance - .exports(&mut store) - .instance("fermyon:spin/inbound-http") - // Safe since we have already checked that this instance exists - .expect("no fermyon:spin/inbound-http found") - .typed_func::<(http_types::Request,), (http_types::Response,)>("handle-request")?; - - let (parts, body) = req.into_parts(); - let bytes = body.collect().await?.to_bytes().to_vec(); - - let method = if let Some(method) = Self::method(&parts.method) { - method - } else { - return Ok(Response::builder() - .status(http::StatusCode::METHOD_NOT_ALLOWED) - .body(body::empty())?); - }; - - // Preparing to remove the params field. We are leaving it in place for now - // to avoid breaking the ABI, but no longer pass or accept values in it. - // https://github.com/fermyon/spin/issues/663 - let params = vec![]; - - let uri = match parts.uri.path_and_query() { - Some(u) => u.to_string(), - None => parts.uri.to_string(), - }; - - let req = http_types::Request { - method, - uri, - headers, - params, - body: Some(bytes), - }; - - let (resp,) = func.call_async(&mut store, (req,)).await?; - - if resp.status < 100 || resp.status > 600 { - tracing::error!("malformed HTTP status code"); - return Ok(Response::builder() - .status(http::StatusCode::INTERNAL_SERVER_ERROR) - .body(body::empty())?); - }; - - let mut response = http::Response::builder().status(resp.status); - if let Some(headers) = response.headers_mut() { - Self::append_headers(headers, resp.headers)?; - } - - let body = match resp.body { - Some(b) => body::full(b.into()), - None => body::empty(), - }; - - Ok(response.body(body)?) - } - - fn method(m: &http::Method) -> Option { - Some(match *m { - http::Method::GET => http_types::Method::Get, - http::Method::POST => http_types::Method::Post, - http::Method::PUT => http_types::Method::Put, - http::Method::DELETE => http_types::Method::Delete, - http::Method::PATCH => http_types::Method::Patch, - http::Method::HEAD => http_types::Method::Head, - http::Method::OPTIONS => http_types::Method::Options, - _ => return None, - }) - } - - async fn execute_wasi( - mut store: Store, - instance: Instance, - ty: HandlerType, - base: &str, - route_match: &RouteMatch, - mut req: Request, - client_addr: SocketAddr, - ) -> anyhow::Result> { - let headers = Self::headers(&req, base, route_match, client_addr)?; - req.headers_mut().clear(); - req.headers_mut() - .extend(headers.into_iter().filter_map(|(n, v)| { - let Ok(name) = n.parse::() else { - return None; - }; - let Ok(value) = HeaderValue::from_bytes(v.as_bytes()) else { - return None; - }; - Some((name, value)) - })); - let request = store.as_mut().data_mut().new_incoming_request(req)?; - - let (response_tx, response_rx) = oneshot::channel(); - let response = store - .as_mut() - .data_mut() - .new_response_outparam(response_tx)?; - - enum Handler { - Latest(Proxy), - Handler2023_11_10(IncomingHandler2023_11_10), - Handler2023_10_18(IncomingHandler2023_10_18), - } - - let handler = - { - let mut exports = instance.exports(&mut store); - match ty { - HandlerType::Wasi2023_10_18 => { - let mut instance = exports - .instance(WASI_HTTP_EXPORT_2023_10_18) - .ok_or_else(|| { - anyhow!("export of `{WASI_HTTP_EXPORT_2023_10_18}` not an instance") - })?; - Handler::Handler2023_10_18(IncomingHandler2023_10_18::new(&mut instance)?) - } - HandlerType::Wasi2023_11_10 => { - let mut instance = exports - .instance(WASI_HTTP_EXPORT_2023_11_10) - .ok_or_else(|| { - anyhow!("export of `{WASI_HTTP_EXPORT_2023_11_10}` not an instance") - })?; - Handler::Handler2023_11_10(IncomingHandler2023_11_10::new(&mut instance)?) - } - HandlerType::Wasi0_2 => { - drop(exports); - Handler::Latest(Proxy::new(&mut store, &instance)?) - } - HandlerType::Spin => panic!("should have used execute_spin instead"), - } - }; - - let span = tracing::debug_span!("execute_wasi"); - let handle = task::spawn( - async move { - let result = match handler { - Handler::Latest(proxy) => { - proxy - .wasi_http_incoming_handler() - .call_handle(&mut store, request, response) - .instrument(span) - .await - } - Handler::Handler2023_10_18(proxy) => { - proxy - .call_handle(&mut store, request, response) - .instrument(span) - .await - } - Handler::Handler2023_11_10(proxy) => { - proxy - .call_handle(&mut store, request, response) - .instrument(span) - .await - } - }; - - tracing::trace!( - "wasi-http memory consumed: {}", - store.as_ref().data().memory_consumed() - ); - - result - } - .in_current_span(), - ); - - match response_rx.await { - Ok(response) => { - task::spawn( - async move { - handle - .await - .context("guest invocation panicked")? - .context("guest invocation failed")?; - - Ok(()) - } - .map_err(|e: anyhow::Error| { - tracing::warn!("component error after response: {e:?}"); - }), - ); - - Ok(response.context("guest failed to produce a response")?) - } - - Err(_) => { - handle - .await - .context("guest invocation panicked")? - .context("guest invocation failed")?; - - Err(anyhow!( - "guest failed to produce a response prior to returning" - )) - } - } - } - - fn headers( - req: &Request, - base: &str, - route_match: &RouteMatch, - client_addr: SocketAddr, - ) -> Result> { - let mut res = Vec::new(); - for (name, value) in req - .headers() - .iter() - .map(|(name, value)| (name.to_string(), std::str::from_utf8(value.as_bytes()))) - { - let value = value?.to_string(); - res.push((name, value)); - } - - let default_host = http::HeaderValue::from_str("localhost")?; - let host = std::str::from_utf8( - req.headers() - .get("host") - .unwrap_or(&default_host) - .as_bytes(), - )?; - - // Set the environment information (path info, base path, etc) as headers. - // In the future, we might want to have this information in a context - // object as opposed to headers. - for (keys, val) in - crate::compute_default_headers(req.uri(), base, host, route_match, client_addr)? - { - res.push((Self::prepare_header_key(&keys[0]), val)); - } - - Ok(res) - } - - fn prepare_header_key(key: &str) -> String { - key.replace('_', "-").to_ascii_lowercase() - } - - fn append_headers(res: &mut http::HeaderMap, src: Option>) -> Result<()> { - if let Some(src) = src { - for (k, v) in src.iter() { - res.insert( - http::header::HeaderName::from_str(k)?, - http::header::HeaderValue::from_str(v)?, - ); - } - }; - - Ok(()) - } -} - -/// Whether this handler uses the custom Spin http handler interface for wasi-http -#[derive(Copy, Clone)] -pub enum HandlerType { - Spin, - Wasi0_2, - Wasi2023_11_10, - Wasi2023_10_18, -} - -const WASI_HTTP_EXPORT_2023_10_18: &str = "wasi:http/incoming-handler@0.2.0-rc-2023-10-18"; -const WASI_HTTP_EXPORT_2023_11_10: &str = "wasi:http/incoming-handler@0.2.0-rc-2023-11-10"; -const WASI_HTTP_EXPORT_0_2_0: &str = "wasi:http/incoming-handler@0.2.0"; - -impl HandlerType { - /// Determine the handler type from the exports of a component - pub fn from_component(engine: &Engine, component: &Component) -> Result { - let mut handler_ty = None; - - let mut set = |ty: HandlerType| { - if handler_ty.is_none() { - handler_ty = Some(ty); - Ok(()) - } else { - Err(anyhow!( - "component exports multiple different handlers but \ - it's expected to export only one" - )) - } - }; - let ty = component.component_type(); - for (name, _) in ty.exports(engine.as_ref()) { - match name { - WASI_HTTP_EXPORT_2023_10_18 => set(HandlerType::Wasi2023_10_18)?, - WASI_HTTP_EXPORT_2023_11_10 => set(HandlerType::Wasi2023_11_10)?, - WASI_HTTP_EXPORT_0_2_0 => set(HandlerType::Wasi0_2)?, - "fermyon:spin/inbound-http" => set(HandlerType::Spin)?, - _ => {} - } - } - - handler_ty.ok_or_else(|| { - anyhow!( - "Expected component to either export `{WASI_HTTP_EXPORT_2023_10_18}`, \ - `{WASI_HTTP_EXPORT_2023_11_10}`, `{WASI_HTTP_EXPORT_0_2_0}`, \ - or `fermyon:spin/inbound-http` but it exported none of those" - ) - }) - } -} - -fn set_http_origin_from_request( - store: &mut Store, - engine: Arc>, - handler: &HttpHandlerExecutor, - req: &Request, -) { - if let Some(authority) = req.uri().authority() { - if let Some(scheme) = req.uri().scheme_str() { - let origin = format!("{}://{}", scheme, authority); - if let Some(outbound_http_handle) = engine - .engine - .find_host_component_handle::>() - { - let outbound_http_data = store - .host_components_data() - .get_or_insert(outbound_http_handle); - - outbound_http_data.origin.clone_from(&origin); - store.as_mut().data_mut().as_mut().allowed_hosts = - outbound_http_data.allowed_hosts.clone(); - } - - let chained_request_handler = ChainedRequestHandler { - engine: engine.clone(), - executor: handler.clone(), - }; - store.as_mut().data_mut().as_mut().origin = Some(origin); - store.as_mut().data_mut().as_mut().chained_handler = Some(chained_request_handler); - } - } -} - -fn contextualise_err(e: anyhow::Error) -> anyhow::Error { - if e.to_string() - .contains("failed to find function export `canonical_abi_free`") - { - e.context( - "component is not compatible with Spin executor - should this use the Wagi executor?", - ) - } else { - e - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_spin_header_keys() { - assert_eq!( - HttpHandlerExecutor::prepare_header_key("SPIN_FULL_URL"), - "spin-full-url".to_string() - ); - assert_eq!( - HttpHandlerExecutor::prepare_header_key("SPIN_PATH_INFO"), - "spin-path-info".to_string() - ); - assert_eq!( - HttpHandlerExecutor::prepare_header_key("SPIN_RAW_COMPONENT_ROUTE"), - "spin-raw-component-route".to_string() - ); - } -} diff --git a/crates/trigger-http/src/headers.rs b/crates/trigger-http/src/headers.rs new file mode 100644 index 0000000000..eed6a40754 --- /dev/null +++ b/crates/trigger-http/src/headers.rs @@ -0,0 +1,331 @@ +use std::{net::SocketAddr, str, str::FromStr}; + +use anyhow::Result; +use http::Uri; +use hyper::Request; +use spin_http::routes::RouteMatch; +use spin_outbound_networking::is_service_chaining_host; + +use crate::Body; + +// We need to make the following pieces of information available to both executors. +// While the values we set are identical, the way they are passed to the +// modules is going to be different, so each executor must must use the info +// in its standardized way (environment variables for the Wagi executor, and custom headers +// for the Spin HTTP executor). +pub const FULL_URL: [&str; 2] = ["SPIN_FULL_URL", "X_FULL_URL"]; +pub const PATH_INFO: [&str; 2] = ["SPIN_PATH_INFO", "PATH_INFO"]; +pub const MATCHED_ROUTE: [&str; 2] = ["SPIN_MATCHED_ROUTE", "X_MATCHED_ROUTE"]; +pub const COMPONENT_ROUTE: [&str; 2] = ["SPIN_COMPONENT_ROUTE", "X_COMPONENT_ROUTE"]; +pub const RAW_COMPONENT_ROUTE: [&str; 2] = ["SPIN_RAW_COMPONENT_ROUTE", "X_RAW_COMPONENT_ROUTE"]; +pub const BASE_PATH: [&str; 2] = ["SPIN_BASE_PATH", "X_BASE_PATH"]; +pub const CLIENT_ADDR: [&str; 2] = ["SPIN_CLIENT_ADDR", "X_CLIENT_ADDR"]; + +pub fn compute_default_headers( + uri: &Uri, + host: &str, + route_match: &RouteMatch, + client_addr: SocketAddr, +) -> anyhow::Result> { + fn owned(strs: &[&'static str; 2]) -> [String; 2] { + [strs[0].to_owned(), strs[1].to_owned()] + } + + let owned_full_url: [String; 2] = owned(&FULL_URL); + let owned_path_info: [String; 2] = owned(&PATH_INFO); + let owned_matched_route: [String; 2] = owned(&MATCHED_ROUTE); + let owned_component_route: [String; 2] = owned(&COMPONENT_ROUTE); + let owned_raw_component_route: [String; 2] = owned(&RAW_COMPONENT_ROUTE); + let owned_base_path: [String; 2] = owned(&BASE_PATH); + let owned_client_addr: [String; 2] = owned(&CLIENT_ADDR); + + let mut res = vec![]; + let abs_path = uri + .path_and_query() + .expect("cannot get path and query") + .as_str(); + + let path_info = route_match.trailing_wildcard(); + + let scheme = uri.scheme_str().unwrap_or("http"); + + let full_url = format!("{}://{}{}", scheme, host, abs_path); + + res.push((owned_path_info, path_info)); + res.push((owned_full_url, full_url)); + res.push((owned_matched_route, route_match.based_route().to_string())); + + res.push((owned_base_path, "/".to_string())); + res.push(( + owned_raw_component_route, + route_match.raw_route().to_string(), + )); + res.push((owned_component_route, route_match.raw_route_or_prefix())); + res.push((owned_client_addr, client_addr.to_string())); + + for (wild_name, wild_value) in route_match.named_wildcards() { + let wild_header = format!("SPIN_PATH_MATCH_{}", wild_name.to_ascii_uppercase()); // TODO: safer + let wild_wagi_header = format!("X_PATH_MATCH_{}", wild_name.to_ascii_uppercase()); // TODO: safer + res.push(([wild_header, wild_wagi_header], wild_value.clone())); + } + + Ok(res) +} + +pub fn strip_forbidden_headers(req: &mut Request) { + let headers = req.headers_mut(); + if let Some(host_header) = headers.get("Host") { + if let Ok(host) = host_header.to_str() { + if is_service_chaining_host(host) { + headers.remove("Host"); + } + } + } +} + +pub fn prepare_request_headers( + req: &Request, + route_match: &RouteMatch, + client_addr: SocketAddr, +) -> Result> { + let mut res = Vec::new(); + for (name, value) in req + .headers() + .iter() + .map(|(name, value)| (name.to_string(), std::str::from_utf8(value.as_bytes()))) + { + let value = value?.to_string(); + res.push((name, value)); + } + + let default_host = http::HeaderValue::from_str("localhost")?; + let host = std::str::from_utf8( + req.headers() + .get("host") + .unwrap_or(&default_host) + .as_bytes(), + )?; + + // Set the environment information (path info, base path, etc) as headers. + // In the future, we might want to have this information in a context + // object as opposed to headers. + for (keys, val) in compute_default_headers(req.uri(), host, route_match, client_addr)? { + res.push((prepare_header_key(&keys[0]), val)); + } + + Ok(res) +} + +pub fn append_headers( + map: &mut http::HeaderMap, + headers: Option>, +) -> Result<()> { + if let Some(src) = headers { + for (k, v) in src.iter() { + map.insert( + http::header::HeaderName::from_str(k)?, + http::header::HeaderValue::from_str(v)?, + ); + } + }; + + Ok(()) +} + +fn prepare_header_key(key: &str) -> String { + key.replace('_', "-").to_ascii_lowercase() +} + +#[cfg(test)] +mod tests { + use super::*; + use anyhow::Result; + use spin_http::routes::Router; + + #[test] + fn test_spin_header_keys() { + assert_eq!( + prepare_header_key("SPIN_FULL_URL"), + "spin-full-url".to_string() + ); + assert_eq!( + prepare_header_key("SPIN_PATH_INFO"), + "spin-path-info".to_string() + ); + assert_eq!( + prepare_header_key("SPIN_RAW_COMPONENT_ROUTE"), + "spin-raw-component-route".to_string() + ); + } + + #[test] + fn test_default_headers() -> Result<()> { + let scheme = "https"; + let host = "fermyon.dev"; + let trigger_route = "/foo/..."; + let component_path = "/foo"; + let path_info = "/bar"; + let client_addr: SocketAddr = "127.0.0.1:8777".parse().unwrap(); + + let req_uri = format!( + "{}://{}{}{}?key1=value1&key2=value2", + scheme, host, component_path, path_info + ); + + let req = http::Request::builder() + .method("POST") + .uri(req_uri) + .body("")?; + + let (router, _) = Router::build("/", [("DUMMY", &trigger_route.into())])?; + let route_match = router.route("/foo/bar")?; + + let default_headers = compute_default_headers(req.uri(), host, &route_match, client_addr)?; + + assert_eq!( + search(&FULL_URL, &default_headers).unwrap(), + "https://fermyon.dev/foo/bar?key1=value1&key2=value2".to_string() + ); + assert_eq!( + search(&PATH_INFO, &default_headers).unwrap(), + "/bar".to_string() + ); + assert_eq!( + search(&MATCHED_ROUTE, &default_headers).unwrap(), + "/foo/...".to_string() + ); + assert_eq!( + search(&BASE_PATH, &default_headers).unwrap(), + "/".to_string() + ); + assert_eq!( + search(&RAW_COMPONENT_ROUTE, &default_headers).unwrap(), + "/foo/...".to_string() + ); + assert_eq!( + search(&COMPONENT_ROUTE, &default_headers).unwrap(), + "/foo".to_string() + ); + assert_eq!( + search(&CLIENT_ADDR, &default_headers).unwrap(), + "127.0.0.1:8777".to_string() + ); + + Ok(()) + } + + #[test] + fn test_default_headers_with_named_wildcards() -> Result<()> { + let scheme = "https"; + let host = "fermyon.dev"; + let trigger_route = "/foo/:userid/..."; + let component_path = "/foo"; + let path_info = "/bar"; + let client_addr: SocketAddr = "127.0.0.1:8777".parse().unwrap(); + + let req_uri = format!( + "{}://{}{}/42{}?key1=value1&key2=value2", + scheme, host, component_path, path_info + ); + + let req = http::Request::builder() + .method("POST") + .uri(req_uri) + .body("")?; + + let (router, _) = Router::build("/", [("DUMMY", &trigger_route.into())])?; + let route_match = router.route("/foo/42/bar")?; + + let default_headers = compute_default_headers(req.uri(), host, &route_match, client_addr)?; + + assert_eq!( + search(&FULL_URL, &default_headers).unwrap(), + "https://fermyon.dev/foo/42/bar?key1=value1&key2=value2".to_string() + ); + assert_eq!( + search(&PATH_INFO, &default_headers).unwrap(), + "/bar".to_string() + ); + assert_eq!( + search(&MATCHED_ROUTE, &default_headers).unwrap(), + "/foo/:userid/...".to_string() + ); + assert_eq!( + search(&BASE_PATH, &default_headers).unwrap(), + "/".to_string() + ); + assert_eq!( + search(&RAW_COMPONENT_ROUTE, &default_headers).unwrap(), + "/foo/:userid/...".to_string() + ); + assert_eq!( + search(&COMPONENT_ROUTE, &default_headers).unwrap(), + "/foo/:userid".to_string() + ); + assert_eq!( + search(&CLIENT_ADDR, &default_headers).unwrap(), + "127.0.0.1:8777".to_string() + ); + + assert_eq!( + search( + &["SPIN_PATH_MATCH_USERID", "X_PATH_MATCH_USERID"], + &default_headers + ) + .unwrap(), + "42".to_string() + ); + + Ok(()) + } + + #[test] + fn forbidden_headers_are_removed() { + let mut req = Request::get("http://test.spin.internal") + .header("Host", "test.spin.internal") + .header("accept", "text/plain") + .body(Default::default()) + .unwrap(); + + strip_forbidden_headers(&mut req); + + assert_eq!(1, req.headers().len()); + assert!(req.headers().get("Host").is_none()); + + let mut req = Request::get("http://test.spin.internal") + .header("Host", "test.spin.internal:1234") + .header("accept", "text/plain") + .body(Default::default()) + .unwrap(); + + strip_forbidden_headers(&mut req); + + assert_eq!(1, req.headers().len()); + assert!(req.headers().get("Host").is_none()); + } + + #[test] + fn non_forbidden_headers_are_not_removed() { + let mut req = Request::get("http://test.example.com") + .header("Host", "test.example.org") + .header("accept", "text/plain") + .body(Default::default()) + .unwrap(); + + strip_forbidden_headers(&mut req); + + assert_eq!(2, req.headers().len()); + assert!(req.headers().get("Host").is_some()); + } + + fn search(keys: &[&str; 2], headers: &[([String; 2], String)]) -> Option { + let mut res: Option = None; + for (k, v) in headers { + if k[0] == keys[0] && k[1] == keys[1] { + res = Some(v.clone()); + } + } + + res + } +} diff --git a/crates/trigger-http/src/instrument.rs b/crates/trigger-http/src/instrument.rs index 2e74d97aaa..89c5aa69d5 100644 --- a/crates/trigger-http/src/instrument.rs +++ b/crates/trigger-http/src/instrument.rs @@ -1,7 +1,8 @@ use anyhow::Result; use http::Response; use tracing::Level; -use wasmtime_wasi_http::body::HyperIncomingBody; + +use crate::Body; /// Create a span for an HTTP request. macro_rules! http_span { @@ -18,10 +19,10 @@ macro_rules! http_span { "url.scheme" = $request.uri().scheme_str().unwrap_or(""), "client.address" = $request.headers().get("x-forwarded-for").and_then(|val| val.to_str().ok()), // Recorded later - "error.type" = Empty, - "http.response.status_code" = Empty, - "http.route" = Empty, - "otel.name" = Empty, + "error.type" = ::tracing::field::Empty, + "http.response.status_code" = ::tracing::field::Empty, + "http.route" = ::tracing::field::Empty, + "otel.name" = ::tracing::field::Empty, ) }; } @@ -30,12 +31,17 @@ pub(crate) use http_span; /// Finish setting attributes on the HTTP span. pub(crate) fn finalize_http_span( - response: Result>, + response: Result>, method: String, -) -> Result> { +) -> Result> { let span = tracing::Span::current(); match response { Ok(response) => { + tracing::info!( + "Request finished, sending response with status code {}", + response.status() + ); + let matched_route = response.extensions().get::(); // Set otel.name and http.route if let Some(MatchedRoute { route }) = matched_route { @@ -74,19 +80,16 @@ pub struct MatchedRoute { } impl MatchedRoute { - pub fn set_response_extension( - resp: &mut Response, - route: impl Into, - ) { + pub fn set_response_extension(resp: &mut Response, route: impl Into) { resp.extensions_mut().insert(MatchedRoute { route: route.into(), }); } pub fn with_response_extension( - mut resp: Response, + mut resp: Response, route: impl Into, - ) -> Response { + ) -> Response { Self::set_response_extension(&mut resp, route); resp } diff --git a/crates/trigger-http/src/lib.rs b/crates/trigger-http/src/lib.rs index c5c5f066c1..636bff940c 100644 --- a/crates/trigger-http/src/lib.rs +++ b/crates/trigger-http/src/lib.rs @@ -1,80 +1,36 @@ //! Implementation for the Spin HTTP engine. -mod handler; +mod headers; mod instrument; +mod outbound_http; +mod server; +mod spin; mod tls; mod wagi; +mod wasi; use std::{ - collections::HashMap, error::Error, - io::IsTerminal, net::{Ipv4Addr, SocketAddr, ToSocketAddrs}, path::PathBuf, - str::FromStr, sync::Arc, }; -use anyhow::{Context, Result}; -use async_trait::async_trait; +use anyhow::{bail, Context}; use clap::Args; -use http::{header::HOST, uri::Authority, uri::Scheme, HeaderValue, StatusCode, Uri}; -use http_body_util::BodyExt; -use hyper::{ - body::{Bytes, Incoming}, - server::conn::http1, - service::service_fn, - Request, Response, -}; -use hyper_util::rt::tokio::TokioIo; -use instrument::{finalize_http_span, http_span}; -use spin_app::{AppComponent, APP_DESCRIPTION_KEY}; -use spin_core::{Engine, OutboundWasiHttpHandler}; -use spin_http::{ - app_info::AppInfo, - body, - config::{HttpExecutorType, HttpTriggerConfig}, - routes::{RouteMatch, Router}, -}; -use spin_outbound_networking::{ - is_service_chaining_host, parse_service_chaining_target, AllowedHostsConfig, OutboundUrl, -}; -use spin_trigger::{ParsedClientTlsOpts, TriggerAppEngine, TriggerExecutor, TriggerInstancePre}; -use tokio::{ - io::{AsyncRead, AsyncWrite}, - net::{TcpListener, TcpStream}, - task, - time::timeout, -}; +use serde::Deserialize; +use spin_app::App; +use spin_trigger::Trigger; +use wasmtime_wasi_http::bindings::wasi::http::types::ErrorCode; -use tracing::{field::Empty, log, Instrument}; -use wasmtime_wasi_http::{ - bindings::wasi::http::{types, types::ErrorCode}, - body::{HyperIncomingBody as Body, HyperOutgoingBody}, - types::HostFutureIncomingResponse, - HttpError, HttpResult, -}; - -use crate::{ - handler::{HandlerType, HttpHandlerExecutor}, - instrument::{instrument_error, MatchedRoute}, - wagi::WagiHttpExecutor, -}; +pub use server::HttpServer; pub use tls::TlsConfig; -pub(crate) type RuntimeData = HttpRuntimeData; -pub(crate) type Store = spin_core::Store; +pub(crate) use wasmtime_wasi_http::body::HyperIncomingBody as Body; -/// The Spin HTTP trigger. -pub struct HttpTrigger { - engine: Arc>, - router: Router, - // Base path for component routes. - base: String, - // Component ID -> component trigger config - component_trigger_configs: HashMap, -} +pub(crate) type TriggerApp = spin_trigger::TriggerApp; +pub(crate) type TriggerInstanceBuilder<'a> = spin_trigger::TriggerInstanceBuilder<'a, HttpTrigger>; #[derive(Args)] pub struct CliArgs { @@ -104,85 +60,30 @@ impl CliArgs { } } -pub enum HttpInstancePre { - Component(spin_core::InstancePre, HandlerType), - Module(spin_core::ModuleInstancePre), -} - -pub enum HttpInstance { - Component(spin_core::Instance, HandlerType), - Module(spin_core::ModuleInstance), +/// The Spin HTTP trigger. +pub struct HttpTrigger { + /// The address the server should listen on. + /// + /// Note that this might not be the actual socket address that ends up being bound to. + /// If the port is set to 0, the actual address will be determined by the OS. + listen_addr: SocketAddr, + tls_config: Option, } -#[async_trait] -impl TriggerExecutor for HttpTrigger { - const TRIGGER_TYPE: &'static str = "http"; - type RuntimeData = RuntimeData; - type TriggerConfig = HttpTriggerConfig; - type RunConfig = CliArgs; - type InstancePre = HttpInstancePre; +impl Trigger for HttpTrigger { + const TYPE: &'static str = "http"; - async fn new(engine: TriggerAppEngine) -> Result { - let mut base = engine - .trigger_metadata::()? - .unwrap_or_default() - .base; + type CliArgs = CliArgs; + type InstanceState = (); - if !base.starts_with('/') { - base = format!("/{base}"); - } - - let component_routes = engine - .trigger_configs() - .map(|(_, config)| (config.component.as_str(), &config.route)); - - let (router, duplicate_routes) = Router::build(&base, component_routes)?; - - if !duplicate_routes.is_empty() { - log::error!("The following component routes are duplicates and will never be used:"); - for dup in &duplicate_routes { - log::error!( - " {}: {} (duplicate of {})", - dup.replaced_id, - dup.route(), - dup.effective_id, - ); - } - } - - log::trace!( - "Constructed router for application {}: {:?}", - engine.app_name, - router.routes().collect::>() - ); - - let component_trigger_configs = engine - .trigger_configs() - .map(|(_, config)| (config.component.clone(), config.clone())) - .collect(); - - Ok(Self { - engine: Arc::new(engine), - router, - base, - component_trigger_configs, - }) + fn new(cli_args: Self::CliArgs, app: &spin_app::App) -> anyhow::Result { + Self::new(app, cli_args.address, cli_args.into_tls_config()) } - async fn run(self, config: Self::RunConfig) -> Result<()> { - let listen_addr = config.address; - let tls = config.into_tls_config(); - - let listener = TcpListener::bind(listen_addr) - .await - .with_context(|| format!("Unable to listen on {}", listen_addr))?; + async fn run(self, trigger_app: TriggerApp) -> anyhow::Result<()> { + let server = self.into_server(trigger_app)?; - let self_ = Arc::new(self); - if let Some(tls) = tls { - self_.serve_tls(listener, listen_addr, tls).await? - } else { - self_.serve(listener, listen_addr).await? - }; + server.serve().await?; Ok(()) } @@ -192,268 +93,42 @@ impl TriggerExecutor for HttpTrigger { } } -#[async_trait] -impl TriggerInstancePre for HttpInstancePre { - type Instance = HttpInstance; - - async fn instantiate_pre( - engine: &Engine, - component: &AppComponent, - config: &HttpTriggerConfig, - ) -> Result { - if let Some(HttpExecutorType::Wagi(_)) = &config.executor { - let module = component.load_module(engine).await?; - Ok(HttpInstancePre::Module( - engine.module_instantiate_pre(&module)?, - )) - } else { - let comp = component.load_component(engine).await?; - let handler_ty = HandlerType::from_component(engine, &comp)?; - Ok(HttpInstancePre::Component( - engine.instantiate_pre(&comp)?, - handler_ty, - )) - } - } - - async fn instantiate(&self, store: &mut Store) -> Result { - match self { - HttpInstancePre::Component(pre, ty) => Ok(HttpInstance::Component( - pre.instantiate_async(store).await?, - *ty, - )), - HttpInstancePre::Module(pre) => { - pre.instantiate_async(store).await.map(HttpInstance::Module) - } - } - } -} - impl HttpTrigger { - /// Handles incoming requests using an HTTP executor. - pub async fn handle( - &self, - mut req: Request, - scheme: Scheme, - server_addr: SocketAddr, - client_addr: SocketAddr, - ) -> Result> { - set_req_uri(&mut req, scheme, server_addr)?; - strip_forbidden_headers(&mut req); - - spin_telemetry::extract_trace_context(&req); - - log::info!( - "Processing request for application {} on URI {}", - &self.engine.app_name, - req.uri() - ); - - let path = req.uri().path().to_string(); - - // Handle well-known spin paths - if let Some(well_known) = path.strip_prefix(spin_http::WELL_KNOWN_PREFIX) { - return match well_known { - "health" => Ok(MatchedRoute::with_response_extension( - Response::new(body::full(Bytes::from_static(b"OK"))), - path, - )), - "info" => self.app_info(path), - _ => Self::not_found(NotFoundRouteKind::WellKnown), - }; - } - - // Route to app component - match self.router.route(&path) { - Ok(route_match) => { - spin_telemetry::metrics::monotonic_counter!( - spin.request_count = 1, - trigger_type = "http", - app_id = &self.engine.app_name, - component_id = route_match.component_id() - ); - - let component_id = route_match.component_id(); - - let trigger = self.component_trigger_configs.get(component_id).unwrap(); - - let executor = trigger.executor.as_ref().unwrap_or(&HttpExecutorType::Http); - - let res = match executor { - HttpExecutorType::Http => { - HttpHandlerExecutor - .execute( - self.engine.clone(), - &self.base, - &route_match, - req, - client_addr, - ) - .await - } - HttpExecutorType::Wagi(wagi_config) => { - let executor = WagiHttpExecutor { - wagi_config: wagi_config.clone(), - }; - executor - .execute( - self.engine.clone(), - &self.base, - &route_match, - req, - client_addr, - ) - .await - } - }; - match res { - Ok(res) => Ok(MatchedRoute::with_response_extension( - res, - route_match.raw_route(), - )), - Err(e) => { - log::error!("Error processing request: {:?}", e); - instrument_error(&e); - Self::internal_error(None, route_match.raw_route()) - } - } - } - Err(_) => Self::not_found(NotFoundRouteKind::Normal(path.to_string())), - } - } - - /// Returns spin status information. - fn app_info(&self, route: String) -> Result> { - let info = AppInfo::new(self.engine.app()); - let body = serde_json::to_vec_pretty(&info)?; - Ok(MatchedRoute::with_response_extension( - Response::builder() - .header("content-type", "application/json") - .body(body::full(body.into()))?, - route, - )) - } - - /// Creates an HTTP 500 response. - fn internal_error(body: Option<&str>, route: impl Into) -> Result> { - let body = match body { - Some(body) => body::full(Bytes::copy_from_slice(body.as_bytes())), - None => body::empty(), - }; - - Ok(MatchedRoute::with_response_extension( - Response::builder() - .status(StatusCode::INTERNAL_SERVER_ERROR) - .body(body)?, - route, - )) - } - - /// Creates an HTTP 404 response. - fn not_found(kind: NotFoundRouteKind) -> Result> { - use std::sync::atomic::{AtomicBool, Ordering}; - static SHOWN_GENERIC_404_WARNING: AtomicBool = AtomicBool::new(false); - if let NotFoundRouteKind::Normal(route) = kind { - if !SHOWN_GENERIC_404_WARNING.fetch_or(true, Ordering::Relaxed) - && std::io::stderr().is_terminal() - { - terminal::warn!("Request to {route} matched no pattern, and received a generic 404 response. To serve a more informative 404 page, add a catch-all (/...) route."); - } - } - Ok(Response::builder() - .status(StatusCode::NOT_FOUND) - .body(body::empty())?) - } - - fn serve_connection( - self: Arc, - stream: S, - server_addr: SocketAddr, - client_addr: SocketAddr, - ) { - task::spawn(async move { - if let Err(e) = http1::Builder::new() - .keep_alive(true) - .serve_connection( - TokioIo::new(stream), - service_fn(move |request| { - self.clone() - .instrumented_service_fn(server_addr, client_addr, request) - }), - ) - .await - { - log::warn!("{e:?}"); - } - }); - } + /// Create a new `HttpTrigger`. + pub fn new( + app: &spin_app::App, + listen_addr: SocketAddr, + tls_config: Option, + ) -> anyhow::Result { + Self::validate_app(app)?; - async fn instrumented_service_fn( - self: Arc, - server_addr: SocketAddr, - client_addr: SocketAddr, - request: Request, - ) -> Result> { - let span = http_span!(request, client_addr); - let method = request.method().to_string(); - async { - let result = self - .handle( - request.map(|body: Incoming| { - body.map_err(wasmtime_wasi_http::hyper_response_error) - .boxed() - }), - Scheme::HTTP, - server_addr, - client_addr, - ) - .await; - finalize_http_span(result, method) - } - .instrument(span) - .await + Ok(Self { + listen_addr, + tls_config, + }) } - async fn serve(self: Arc, listener: TcpListener, listen_addr: SocketAddr) -> Result<()> { - self.print_startup_msgs("http", &listener)?; - loop { - let (stream, client_addr) = listener.accept().await?; - self.clone() - .serve_connection(stream, listen_addr, client_addr); - } + /// Turn this [`HttpTrigger`] into an [`HttpServer`]. + pub fn into_server(self, trigger_app: TriggerApp) -> anyhow::Result> { + let Self { + listen_addr, + tls_config, + } = self; + let server = Arc::new(HttpServer::new(listen_addr, tls_config, trigger_app)?); + Ok(server) } - async fn serve_tls( - self: Arc, - listener: TcpListener, - listen_addr: SocketAddr, - tls: TlsConfig, - ) -> Result<()> { - let acceptor = tls.server_config()?; - self.print_startup_msgs("https", &listener)?; - - loop { - let (stream, addr) = listener.accept().await?; - match acceptor.accept(stream).await { - Ok(stream) => self.clone().serve_connection(stream, listen_addr, addr), - Err(err) => tracing::error!(?err, "Failed to start TLS session"), - } + fn validate_app(app: &App) -> anyhow::Result<()> { + #[derive(Deserialize)] + #[serde(deny_unknown_fields)] + struct TriggerMetadata { + base: Option, } - } - - fn print_startup_msgs(&self, scheme: &str, listener: &TcpListener) -> Result<()> { - let local_addr = listener.local_addr()?; - let base_url = format!("{scheme}://{local_addr:?}"); - terminal::step!("\nServing", "{}", base_url); - log::info!("Serving {}", base_url); - - println!("Available Routes:"); - for (route, component_id) in self.router.routes() { - println!(" {}: {}{}", component_id, base_url, route); - if let Some(component) = self.engine.app().get_component(component_id) { - if let Some(description) = component.get_metadata(APP_DESCRIPTION_KEY)? { - println!(" {}", description); - } + if let Some(TriggerMetadata { base: Some(base) }) = app.get_trigger_metadata("http")? { + if base == "/" { + tracing::warn!("This application has the deprecated trigger 'base' set to the default value '/'. This may be an error in the future!"); + } else { + bail!("This application is using the deprecated trigger 'base' field. The base must be prepended to each [[trigger.http]]'s 'route'.") } } Ok(()) @@ -473,502 +148,12 @@ fn parse_listen_addr(addr: &str) -> anyhow::Result { addrs.into_iter().next().context("couldn't resolve address") } -/// The incoming request's scheme and authority -/// -/// The incoming request's URI is relative to the server, so we need to set the scheme and authority -fn set_req_uri(req: &mut Request, scheme: Scheme, addr: SocketAddr) -> Result<()> { - let uri = req.uri().clone(); - let mut parts = uri.into_parts(); - let authority = format!("{}:{}", addr.ip(), addr.port()).parse().unwrap(); - parts.scheme = Some(scheme); - parts.authority = Some(authority); - *req.uri_mut() = Uri::from_parts(parts).unwrap(); - Ok(()) -} - -fn strip_forbidden_headers(req: &mut Request) { - let headers = req.headers_mut(); - if let Some(host_header) = headers.get("Host") { - if let Ok(host) = host_header.to_str() { - if is_service_chaining_host(host) { - headers.remove("Host"); - } - } - } -} - -// We need to make the following pieces of information available to both executors. -// While the values we set are identical, the way they are passed to the -// modules is going to be different, so each executor must must use the info -// in its standardized way (environment variables for the Wagi executor, and custom headers -// for the Spin HTTP executor). -const FULL_URL: [&str; 2] = ["SPIN_FULL_URL", "X_FULL_URL"]; -const PATH_INFO: [&str; 2] = ["SPIN_PATH_INFO", "PATH_INFO"]; -const MATCHED_ROUTE: [&str; 2] = ["SPIN_MATCHED_ROUTE", "X_MATCHED_ROUTE"]; -const COMPONENT_ROUTE: [&str; 2] = ["SPIN_COMPONENT_ROUTE", "X_COMPONENT_ROUTE"]; -const RAW_COMPONENT_ROUTE: [&str; 2] = ["SPIN_RAW_COMPONENT_ROUTE", "X_RAW_COMPONENT_ROUTE"]; -const BASE_PATH: [&str; 2] = ["SPIN_BASE_PATH", "X_BASE_PATH"]; -const CLIENT_ADDR: [&str; 2] = ["SPIN_CLIENT_ADDR", "X_CLIENT_ADDR"]; - -pub(crate) fn compute_default_headers( - uri: &Uri, - base: &str, - host: &str, - route_match: &RouteMatch, - client_addr: SocketAddr, -) -> Result> { - fn owned(strs: &[&'static str; 2]) -> [String; 2] { - [strs[0].to_owned(), strs[1].to_owned()] - } - - let owned_full_url: [String; 2] = owned(&FULL_URL); - let owned_path_info: [String; 2] = owned(&PATH_INFO); - let owned_matched_route: [String; 2] = owned(&MATCHED_ROUTE); - let owned_component_route: [String; 2] = owned(&COMPONENT_ROUTE); - let owned_raw_component_route: [String; 2] = owned(&RAW_COMPONENT_ROUTE); - let owned_base_path: [String; 2] = owned(&BASE_PATH); - let owned_client_addr: [String; 2] = owned(&CLIENT_ADDR); - - let mut res = vec![]; - let abs_path = uri - .path_and_query() - .expect("cannot get path and query") - .as_str(); - - let path_info = route_match.trailing_wildcard(); - - let scheme = uri.scheme_str().unwrap_or("http"); - - let full_url = format!("{}://{}{}", scheme, host, abs_path); - - res.push((owned_path_info, path_info)); - res.push((owned_full_url, full_url)); - res.push((owned_matched_route, route_match.based_route().to_string())); - - res.push((owned_base_path, base.to_string())); - res.push(( - owned_raw_component_route, - route_match.raw_route().to_string(), - )); - res.push((owned_component_route, route_match.raw_route_or_prefix())); - res.push((owned_client_addr, client_addr.to_string())); - - for (wild_name, wild_value) in route_match.named_wildcards() { - let wild_header = format!("SPIN_PATH_MATCH_{}", wild_name.to_ascii_uppercase()); // TODO: safer - let wild_wagi_header = format!("X_PATH_MATCH_{}", wild_name.to_ascii_uppercase()); // TODO: safer - res.push(([wild_header, wild_wagi_header], wild_value.clone())); - } - - Ok(res) -} - -/// The HTTP executor trait. -/// All HTTP executors must implement this trait. -#[async_trait] -pub(crate) trait HttpExecutor: Clone + Send + Sync + 'static { - async fn execute( - &self, - engine: Arc>, - base: &str, - route_match: &RouteMatch, - req: Request, - client_addr: SocketAddr, - ) -> Result>; -} - -#[derive(Clone)] -struct ChainedRequestHandler { - engine: Arc>, - executor: HttpHandlerExecutor, -} - -#[derive(Default)] -pub struct HttpRuntimeData { - origin: Option, - chained_handler: Option, - // Optional mapping of authority and TLS options for the current component - client_tls_opts: Option>, - /// The hosts this app is allowed to make outbound requests to - allowed_hosts: AllowedHostsConfig, -} - -impl HttpRuntimeData { - fn chain_request( - data: &mut spin_core::Data, - request: Request, - config: wasmtime_wasi_http::types::OutgoingRequestConfig, - component_id: String, - ) -> HttpResult { - use wasmtime_wasi_http::types::IncomingResponse; - - let this = data.as_ref(); - - let chained_handler = - this.chained_handler - .clone() - .ok_or(HttpError::trap(wasmtime::Error::msg( - "Internal error: internal request chaining not prepared (engine not assigned)", - )))?; - - let engine = chained_handler.engine; - let handler = chained_handler.executor; - - let base = "/"; - let route_match = RouteMatch::synthetic(&component_id, request.uri().path()); - - let client_addr = std::net::SocketAddr::from_str("0.0.0.0:0").unwrap(); - - let between_bytes_timeout = config.between_bytes_timeout; - - let resp_fut = async move { - match handler - .execute(engine.clone(), base, &route_match, request, client_addr) - .await - { - Ok(resp) => Ok(Ok(IncomingResponse { - resp, - between_bytes_timeout, - worker: None, - })), - Err(e) => Err(wasmtime::Error::msg(e)), - } - }; - - let handle = wasmtime_wasi::runtime::spawn(resp_fut); - Ok(HostFutureIncomingResponse::Pending(handle)) - } -} - -fn parse_chaining_target(request: &Request) -> Option { - parse_service_chaining_target(request.uri()) -} - -impl OutboundWasiHttpHandler for HttpRuntimeData { - fn send_request( - data: &mut spin_core::Data, - mut request: Request, - mut config: wasmtime_wasi_http::types::OutgoingRequestConfig, - ) -> HttpResult { - let this = data.as_mut(); - - let is_relative_url = request - .uri() - .authority() - .map(|a| a.host().trim() == "") - .unwrap_or_default(); - if is_relative_url { - // Origin must be set in the incoming http handler - let origin = this.origin.clone().unwrap(); - let path_and_query = request - .uri() - .path_and_query() - .map(|p| p.as_str()) - .unwrap_or("/"); - let uri: Uri = format!("{origin}{path_and_query}") - .parse() - // origin together with the path and query must be a valid URI - .unwrap(); - let host = format!("{}:{}", uri.host().unwrap(), uri.port().unwrap()); - let headers = request.headers_mut(); - headers.insert( - HOST, - HeaderValue::from_str(&host).map_err(|_| ErrorCode::HttpProtocolError)?, - ); - - config.use_tls = uri - .scheme() - .map(|s| s == &Scheme::HTTPS) - .unwrap_or_default(); - // We know that `uri` has an authority because we set it above - *request.uri_mut() = uri; - } - - let uri = request.uri(); - let uri_string = uri.to_string(); - let unallowed_relative = - is_relative_url && !this.allowed_hosts.allows_relative_url(&["http", "https"]); - let unallowed_absolute = !is_relative_url - && !this.allowed_hosts.allows( - &OutboundUrl::parse(uri_string, "https") - .map_err(|_| ErrorCode::HttpRequestUriInvalid)?, - ); - if unallowed_relative || unallowed_absolute { - tracing::error!("Destination not allowed: {}", request.uri()); - let host = if unallowed_absolute { - // Safe to unwrap because absolute urls have a host by definition. - let host = uri.authority().map(|a| a.host()).unwrap(); - let port = uri.authority().map(|a| a.port()).unwrap(); - let port = match port { - Some(port_str) => port_str.to_string(), - None => uri - .scheme() - .and_then(|s| (s == &Scheme::HTTP).then_some(80)) - .unwrap_or(443) - .to_string(), - }; - terminal::warn!( - "A component tried to make a HTTP request to non-allowed host '{host}'." - ); - let scheme = uri.scheme().unwrap_or(&Scheme::HTTPS); - format!("{scheme}://{host}:{port}") - } else { - terminal::warn!("A component tried to make a HTTP request to the same component but it does not have permission."); - "self".into() - }; - eprintln!("To allow requests, add 'allowed_outbound_hosts = [\"{}\"]' to the manifest component section.", host); - return Err(ErrorCode::HttpRequestDenied.into()); - } - - if let Some(component_id) = parse_chaining_target(&request) { - return Self::chain_request(data, request, config, component_id); - } - - let current_span = tracing::Span::current(); - let uri = request.uri(); - if let Some(authority) = uri.authority() { - current_span.record("server.address", authority.host()); - if let Some(port) = authority.port() { - current_span.record("server.port", port.as_u16()); - } - } - - let client_tls_opts = (data.as_ref()).client_tls_opts.clone(); - - // TODO: This is a temporary workaround to make sure that outbound task is instrumented. - // Once Wasmtime gives us the ability to do the spawn ourselves we can just call .instrument - // and won't have to do this workaround. - let response_handle = async move { - let res = send_request_handler(request, config, client_tls_opts).await; - if let Ok(res) = &res { - tracing::Span::current() - .record("http.response.status_code", res.resp.status().as_u16()); - } - Ok(res) - } - .in_current_span(); - Ok(HostFutureIncomingResponse::Pending( - wasmtime_wasi::runtime::spawn(response_handle), - )) - } -} - #[derive(Debug, PartialEq)] enum NotFoundRouteKind { Normal(String), WellKnown, } -/// This is a fork of wasmtime_wasi_http::default_send_request_handler function -/// forked from bytecodealliance/wasmtime commit-sha 29a76b68200fcfa69c8fb18ce6c850754279a05b -/// This fork provides the ability to configure client cert auth for mTLS -pub async fn send_request_handler( - mut request: hyper::Request, - wasmtime_wasi_http::types::OutgoingRequestConfig { - use_tls, - connect_timeout, - first_byte_timeout, - between_bytes_timeout, - }: wasmtime_wasi_http::types::OutgoingRequestConfig, - client_tls_opts: Option>, -) -> Result { - let authority_str = if let Some(authority) = request.uri().authority() { - if authority.port().is_some() { - authority.to_string() - } else { - let port = if use_tls { 443 } else { 80 }; - format!("{}:{port}", authority) - } - } else { - return Err(types::ErrorCode::HttpRequestUriInvalid); - }; - - let authority = &authority_str.parse::().unwrap(); - - let tcp_stream = timeout(connect_timeout, TcpStream::connect(&authority_str)) - .await - .map_err(|_| types::ErrorCode::ConnectionTimeout)? - .map_err(|e| match e.kind() { - std::io::ErrorKind::AddrNotAvailable => { - dns_error("address not available".to_string(), 0) - } - - _ => { - if e.to_string() - .starts_with("failed to lookup address information") - { - dns_error("address not available".to_string(), 0) - } else { - types::ErrorCode::ConnectionRefused - } - } - })?; - - let (mut sender, worker) = if use_tls { - #[cfg(any(target_arch = "riscv64", target_arch = "s390x"))] - { - return Err( - wasmtime_wasi_http::bindings::http::types::ErrorCode::InternalError(Some( - "unsupported architecture for SSL".to_string(), - )), - ); - } - - #[cfg(not(any(target_arch = "riscv64", target_arch = "s390x")))] - { - use rustls::pki_types::ServerName; - let config = - get_client_tls_config_for_authority(authority, client_tls_opts).map_err(|e| { - wasmtime_wasi_http::bindings::http::types::ErrorCode::InternalError(Some( - format!( - "failed to configure client tls config for authority. error: {}", - e - ), - )) - })?; - let connector = tokio_rustls::TlsConnector::from(std::sync::Arc::new(config)); - let mut parts = authority_str.split(':'); - let host = parts.next().unwrap_or(&authority_str); - let domain = ServerName::try_from(host) - .map_err(|e| { - tracing::warn!("dns lookup error: {e:?}"); - dns_error("invalid dns name".to_string(), 0) - })? - .to_owned(); - let stream = connector.connect(domain, tcp_stream).await.map_err(|e| { - tracing::warn!("tls protocol error: {e:?}"); - types::ErrorCode::TlsProtocolError - })?; - let stream = TokioIo::new(stream); - - let (sender, conn) = timeout( - connect_timeout, - hyper::client::conn::http1::handshake(stream), - ) - .await - .map_err(|_| types::ErrorCode::ConnectionTimeout)? - .map_err(hyper_request_error)?; - - let worker = wasmtime_wasi::runtime::spawn(async move { - match conn.await { - Ok(()) => {} - // TODO: shouldn't throw away this error and ideally should - // surface somewhere. - Err(e) => tracing::warn!("dropping error {e}"), - } - }); - - (sender, worker) - } - } else { - let tcp_stream = TokioIo::new(tcp_stream); - let (sender, conn) = timeout( - connect_timeout, - // TODO: we should plumb the builder through the http context, and use it here - hyper::client::conn::http1::handshake(tcp_stream), - ) - .await - .map_err(|_| types::ErrorCode::ConnectionTimeout)? - .map_err(hyper_request_error)?; - - let worker = wasmtime_wasi::runtime::spawn(async move { - match conn.await { - Ok(()) => {} - // TODO: same as above, shouldn't throw this error away. - Err(e) => tracing::warn!("dropping error {e}"), - } - }); - - (sender, worker) - }; - - // at this point, the request contains the scheme and the authority, but - // the http packet should only include those if addressing a proxy, so - // remove them here, since SendRequest::send_request does not do it for us - *request.uri_mut() = http::Uri::builder() - .path_and_query( - request - .uri() - .path_and_query() - .map(|p| p.as_str()) - .unwrap_or("/"), - ) - .build() - .expect("comes from valid request"); - - let resp = timeout(first_byte_timeout, sender.send_request(request)) - .await - .map_err(|_| types::ErrorCode::ConnectionReadTimeout)? - .map_err(hyper_request_error)? - .map(|body| body.map_err(hyper_request_error).boxed()); - - Ok(wasmtime_wasi_http::types::IncomingResponse { - resp, - worker: Some(worker), - between_bytes_timeout, - }) -} - -fn get_client_tls_config_for_authority( - authority: &Authority, - client_tls_opts: Option>, -) -> Result { - // derived from https://github.com/tokio-rs/tls/blob/master/tokio-rustls/examples/client/src/main.rs - let ca_webpki_roots = rustls::RootCertStore { - roots: webpki_roots::TLS_SERVER_ROOTS.into(), - }; - - #[allow(clippy::mutable_key_type)] - let client_tls_opts = match client_tls_opts { - Some(opts) => opts, - _ => { - return Ok(rustls::ClientConfig::builder() - .with_root_certificates(ca_webpki_roots) - .with_no_client_auth()); - } - }; - - let client_tls_opts_for_host = match client_tls_opts.get(authority) { - Some(opts) => opts, - _ => { - return Ok(rustls::ClientConfig::builder() - .with_root_certificates(ca_webpki_roots) - .with_no_client_auth()); - } - }; - - let mut root_cert_store = if client_tls_opts_for_host.ca_webpki_roots { - ca_webpki_roots - } else { - rustls::RootCertStore::empty() - }; - - if let Some(custom_root_ca) = &client_tls_opts_for_host.custom_root_ca { - for cer in custom_root_ca { - match root_cert_store.add(cer.to_owned()) { - Ok(_) => {} - Err(e) => { - return Err(anyhow::anyhow!( - "failed to add custom cert to root_cert_store. error: {}", - e - )); - } - } - } - } - - match ( - &client_tls_opts_for_host.cert_chain, - &client_tls_opts_for_host.private_key, - ) { - (Some(cert_chain), Some(private_key)) => Ok(rustls::ClientConfig::builder() - .with_root_certificates(root_cert_store) - .with_client_auth_cert(cert_chain.to_owned(), private_key.clone_key())?), - _ => Ok(rustls::ClientConfig::builder() - .with_root_certificates(root_cert_store) - .with_no_client_auth()), - } -} - /// Translate a [`hyper::Error`] to a wasi-http `ErrorCode` in the context of a request. pub fn hyper_request_error(err: hyper::Error) -> ErrorCode { // If there's a source, we might be able to extract a wasi-http error from it. @@ -992,249 +177,12 @@ pub fn dns_error(rcode: String, info_code: u16) -> ErrorCode { #[cfg(test)] mod tests { - use anyhow::Result; - use super::*; - #[test] - fn test_default_headers_with_base_path() -> Result<()> { - let scheme = "https"; - let host = "fermyon.dev"; - let base = "/base"; - let trigger_route = "/foo/..."; - let component_path = "/foo"; - let path_info = "/bar"; - let client_addr: SocketAddr = "127.0.0.1:8777".parse().unwrap(); - - let req_uri = format!( - "{}://{}{}{}{}?key1=value1&key2=value2", - scheme, host, base, component_path, path_info - ); - - let req = http::Request::builder() - .method("POST") - .uri(req_uri) - .body("")?; - - let (router, _) = Router::build(base, [("DUMMY", &trigger_route.into())])?; - let route_match = router.route("/base/foo/bar")?; - - let default_headers = - crate::compute_default_headers(req.uri(), base, host, &route_match, client_addr)?; - - assert_eq!( - search(&FULL_URL, &default_headers).unwrap(), - "https://fermyon.dev/base/foo/bar?key1=value1&key2=value2".to_string() - ); - assert_eq!( - search(&PATH_INFO, &default_headers).unwrap(), - "/bar".to_string() - ); - assert_eq!( - search(&MATCHED_ROUTE, &default_headers).unwrap(), - "/base/foo/...".to_string() - ); - assert_eq!( - search(&BASE_PATH, &default_headers).unwrap(), - "/base".to_string() - ); - assert_eq!( - search(&RAW_COMPONENT_ROUTE, &default_headers).unwrap(), - "/foo/...".to_string() - ); - assert_eq!( - search(&COMPONENT_ROUTE, &default_headers).unwrap(), - "/foo".to_string() - ); - assert_eq!( - search(&CLIENT_ADDR, &default_headers).unwrap(), - "127.0.0.1:8777".to_string() - ); - - Ok(()) - } - - #[test] - fn test_default_headers_without_base_path() -> Result<()> { - let scheme = "https"; - let host = "fermyon.dev"; - let base = "/"; - let trigger_route = "/foo/..."; - let component_path = "/foo"; - let path_info = "/bar"; - let client_addr: SocketAddr = "127.0.0.1:8777".parse().unwrap(); - - let req_uri = format!( - "{}://{}{}{}?key1=value1&key2=value2", - scheme, host, component_path, path_info - ); - - let req = http::Request::builder() - .method("POST") - .uri(req_uri) - .body("")?; - - let (router, _) = Router::build(base, [("DUMMY", &trigger_route.into())])?; - let route_match = router.route("/foo/bar")?; - - let default_headers = - crate::compute_default_headers(req.uri(), base, host, &route_match, client_addr)?; - - // TODO: we currently replace the scheme with HTTP. When TLS is supported, this should be fixed. - assert_eq!( - search(&FULL_URL, &default_headers).unwrap(), - "https://fermyon.dev/foo/bar?key1=value1&key2=value2".to_string() - ); - assert_eq!( - search(&PATH_INFO, &default_headers).unwrap(), - "/bar".to_string() - ); - assert_eq!( - search(&MATCHED_ROUTE, &default_headers).unwrap(), - "/foo/...".to_string() - ); - assert_eq!( - search(&BASE_PATH, &default_headers).unwrap(), - "/".to_string() - ); - assert_eq!( - search(&RAW_COMPONENT_ROUTE, &default_headers).unwrap(), - "/foo/...".to_string() - ); - assert_eq!( - search(&COMPONENT_ROUTE, &default_headers).unwrap(), - "/foo".to_string() - ); - assert_eq!( - search(&CLIENT_ADDR, &default_headers).unwrap(), - "127.0.0.1:8777".to_string() - ); - - Ok(()) - } - - #[test] - fn test_default_headers_with_named_wildcards() -> Result<()> { - let scheme = "https"; - let host = "fermyon.dev"; - let base = "/"; - let trigger_route = "/foo/:userid/..."; - let component_path = "/foo"; - let path_info = "/bar"; - let client_addr: SocketAddr = "127.0.0.1:8777".parse().unwrap(); - - let req_uri = format!( - "{}://{}{}/42{}?key1=value1&key2=value2", - scheme, host, component_path, path_info - ); - - let req = http::Request::builder() - .method("POST") - .uri(req_uri) - .body("")?; - - let (router, _) = Router::build(base, [("DUMMY", &trigger_route.into())])?; - let route_match = router.route("/foo/42/bar")?; - - let default_headers = - crate::compute_default_headers(req.uri(), base, host, &route_match, client_addr)?; - - // TODO: we currently replace the scheme with HTTP. When TLS is supported, this should be fixed. - assert_eq!( - search(&FULL_URL, &default_headers).unwrap(), - "https://fermyon.dev/foo/42/bar?key1=value1&key2=value2".to_string() - ); - assert_eq!( - search(&PATH_INFO, &default_headers).unwrap(), - "/bar".to_string() - ); - assert_eq!( - search(&MATCHED_ROUTE, &default_headers).unwrap(), - "/foo/:userid/...".to_string() - ); - assert_eq!( - search(&BASE_PATH, &default_headers).unwrap(), - "/".to_string() - ); - assert_eq!( - search(&RAW_COMPONENT_ROUTE, &default_headers).unwrap(), - "/foo/:userid/...".to_string() - ); - assert_eq!( - search(&COMPONENT_ROUTE, &default_headers).unwrap(), - "/foo/:userid".to_string() - ); - assert_eq!( - search(&CLIENT_ADDR, &default_headers).unwrap(), - "127.0.0.1:8777".to_string() - ); - - assert_eq!( - search( - &["SPIN_PATH_MATCH_USERID", "X_PATH_MATCH_USERID"], - &default_headers - ) - .unwrap(), - "42".to_string() - ); - - Ok(()) - } - - fn search(keys: &[&str; 2], headers: &[([String; 2], String)]) -> Option { - let mut res: Option = None; - for (k, v) in headers { - if k[0] == keys[0] && k[1] == keys[1] { - res = Some(v.clone()); - } - } - - res - } - #[test] fn parse_listen_addr_prefers_ipv4() { let addr = parse_listen_addr("localhost:12345").unwrap(); assert_eq!(addr.ip(), Ipv4Addr::LOCALHOST); assert_eq!(addr.port(), 12345); } - - #[test] - fn forbidden_headers_are_removed() { - let mut req = Request::get("http://test.spin.internal") - .header("Host", "test.spin.internal") - .header("accept", "text/plain") - .body(Default::default()) - .unwrap(); - - strip_forbidden_headers(&mut req); - - assert_eq!(1, req.headers().len()); - assert!(req.headers().get("Host").is_none()); - - let mut req = Request::get("http://test.spin.internal") - .header("Host", "test.spin.internal:1234") - .header("accept", "text/plain") - .body(Default::default()) - .unwrap(); - - strip_forbidden_headers(&mut req); - - assert_eq!(1, req.headers().len()); - assert!(req.headers().get("Host").is_none()); - } - - #[test] - fn non_forbidden_headers_are_not_removed() { - let mut req = Request::get("http://test.example.com") - .header("Host", "test.example.org") - .header("accept", "text/plain") - .body(Default::default()) - .unwrap(); - - strip_forbidden_headers(&mut req); - - assert_eq!(2, req.headers().len()); - assert!(req.headers().get("Host").is_some()); - } } diff --git a/crates/trigger-http/src/outbound_http.rs b/crates/trigger-http/src/outbound_http.rs new file mode 100644 index 0000000000..a722bc315d --- /dev/null +++ b/crates/trigger-http/src/outbound_http.rs @@ -0,0 +1,63 @@ +use std::{ + net::{IpAddr, Ipv4Addr, SocketAddr}, + sync::Arc, +}; + +use http::uri::Scheme; +use spin_factor_outbound_http::{ + HostFutureIncomingResponse, InterceptOutcome, OutgoingRequestConfig, Request, +}; +use spin_http::routes::RouteMatch; +use spin_outbound_networking::parse_service_chaining_target; +use wasmtime_wasi_http::types::IncomingResponse; + +use crate::HttpServer; + +/// An outbound HTTP interceptor that handles service chaining requests. +pub struct OutboundHttpInterceptor { + server: Arc, +} + +impl OutboundHttpInterceptor { + pub fn new(server: Arc) -> Self { + Self { server } + } +} + +const CHAINED_CLIENT_ADDR: SocketAddr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), 0); + +impl spin_factor_outbound_http::OutboundHttpInterceptor for OutboundHttpInterceptor { + fn intercept( + &self, + request: &mut Request, + config: &mut OutgoingRequestConfig, + ) -> InterceptOutcome { + let uri = request.uri(); + + // Handle service chaining requests + if let Some(component_id) = parse_service_chaining_target(uri) { + // TODO: look at the rest of chain_request + let route_match = RouteMatch::synthetic(&component_id, uri.path()); + let req = std::mem::take(request); + let between_bytes_timeout = config.between_bytes_timeout; + let server = self.server.clone(); + let resp_fut = async move { + match server + .handle_trigger_route(req, route_match, Scheme::HTTP, CHAINED_CLIENT_ADDR) + .await + { + Ok(resp) => Ok(Ok(IncomingResponse { + resp, + between_bytes_timeout, + worker: None, + })), + Err(e) => Err(wasmtime::Error::msg(e)), + } + }; + let resp = HostFutureIncomingResponse::pending(wasmtime_wasi::runtime::spawn(resp_fut)); + InterceptOutcome::Complete(Ok(resp)) + } else { + InterceptOutcome::Continue + } + } +} diff --git a/crates/trigger-http/src/server.rs b/crates/trigger-http/src/server.rs new file mode 100644 index 0000000000..b9ee135b77 --- /dev/null +++ b/crates/trigger-http/src/server.rs @@ -0,0 +1,514 @@ +use std::{collections::HashMap, future::Future, io::IsTerminal, net::SocketAddr, sync::Arc}; + +use anyhow::{bail, Context}; +use http::{ + uri::{Authority, Scheme}, + Request, Response, StatusCode, Uri, +}; +use http_body_util::BodyExt; +use hyper::{ + body::{Bytes, Incoming}, + server::conn::http1, + service::service_fn, +}; +use hyper_util::rt::TokioIo; +use spin_app::{APP_DESCRIPTION_KEY, APP_NAME_KEY}; +use spin_factor_outbound_http::SelfRequestOrigin; +use spin_http::{ + app_info::AppInfo, + body, + config::{HttpExecutorType, HttpTriggerConfig}, + routes::{RouteMatch, Router}, +}; +use tokio::{ + io::{AsyncRead, AsyncWrite}, + net::TcpListener, + task, +}; +use tracing::Instrument; +use wasmtime::component::Component; +use wasmtime_wasi_http::body::HyperOutgoingBody; + +use crate::{ + headers::strip_forbidden_headers, + instrument::{finalize_http_span, http_span, instrument_error, MatchedRoute}, + outbound_http::OutboundHttpInterceptor, + spin::SpinHttpExecutor, + wagi::WagiHttpExecutor, + wasi::WasiHttpExecutor, + Body, NotFoundRouteKind, TlsConfig, TriggerApp, TriggerInstanceBuilder, +}; + +/// An HTTP server which runs Spin apps. +pub struct HttpServer { + /// The address the server is listening on. + listen_addr: SocketAddr, + /// The TLS configuration for the server. + tls_config: Option, + /// Request router. + router: Router, + /// The app being triggered. + trigger_app: TriggerApp, + // Component ID -> component trigger config + component_trigger_configs: HashMap, + // Component ID -> handler type + component_handler_types: HashMap, +} + +impl HttpServer { + /// Create a new [`HttpServer`]. + pub fn new( + listen_addr: SocketAddr, + tls_config: Option, + trigger_app: TriggerApp, + ) -> anyhow::Result { + // This needs to be a vec before building the router to handle duplicate routes + let component_trigger_configs = Vec::from_iter( + trigger_app + .app() + .trigger_configs::("http")? + .into_iter() + .map(|(_, config)| (config.component.clone(), config)), + ); + + // Build router + let component_routes = component_trigger_configs + .iter() + .map(|(component_id, config)| (component_id.as_str(), &config.route)); + let (router, duplicate_routes) = Router::build("/", component_routes)?; + if !duplicate_routes.is_empty() { + tracing::error!( + "The following component routes are duplicates and will never be used:" + ); + for dup in &duplicate_routes { + tracing::error!( + " {}: {} (duplicate of {})", + dup.replaced_id, + dup.route(), + dup.effective_id, + ); + } + } + tracing::trace!( + "Constructed router: {:?}", + router.routes().collect::>() + ); + + // Now that router is built we can merge duplicate routes by component + let component_trigger_configs = HashMap::from_iter(component_trigger_configs); + + let component_handler_types = component_trigger_configs + .iter() + .map(|(component_id, trigger_config)| { + let handler_type = match &trigger_config.executor { + None | Some(HttpExecutorType::Http) => { + let component = trigger_app.get_component(component_id)?; + HandlerType::from_component(trigger_app.engine(), component)? + } + Some(HttpExecutorType::Wagi(wagi_config)) => { + anyhow::ensure!( + wagi_config.entrypoint == "_start", + "Wagi component '{component_id}' cannot use deprecated 'entrypoint' field" + ); + HandlerType::Wagi + } + }; + Ok((component_id.clone(), handler_type)) + }) + .collect::>()?; + Ok(Self { + listen_addr, + tls_config, + router, + trigger_app, + component_trigger_configs, + component_handler_types, + }) + } + + /// Serve incoming requests over the provided [`TcpListener`]. + pub async fn serve(self: Arc) -> anyhow::Result<()> { + let listener = TcpListener::bind(self.listen_addr).await.with_context(|| { + format!( + "Unable to listen on {listen_addr}", + listen_addr = self.listen_addr + ) + })?; + if let Some(tls_config) = self.tls_config.clone() { + self.serve_https(listener, tls_config).await?; + } else { + self.serve_http(listener).await?; + } + Ok(()) + } + + async fn serve_http(self: Arc, listener: TcpListener) -> anyhow::Result<()> { + self.print_startup_msgs("http", &listener)?; + loop { + let (stream, client_addr) = listener.accept().await?; + self.clone() + .serve_connection(stream, Scheme::HTTP, client_addr); + } + } + + async fn serve_https( + self: Arc, + listener: TcpListener, + tls_config: TlsConfig, + ) -> anyhow::Result<()> { + self.print_startup_msgs("https", &listener)?; + let acceptor = tls_config.server_config()?; + loop { + let (stream, client_addr) = listener.accept().await?; + match acceptor.accept(stream).await { + Ok(stream) => self + .clone() + .serve_connection(stream, Scheme::HTTPS, client_addr), + Err(err) => tracing::error!(?err, "Failed to start TLS session"), + } + } + } + + /// Handles incoming requests using an HTTP executor. + /// + /// This method handles well known paths and routes requests to the handler when the router + /// matches the requests path. + pub async fn handle( + self: &Arc, + mut req: Request, + server_scheme: Scheme, + client_addr: SocketAddr, + ) -> anyhow::Result> { + strip_forbidden_headers(&mut req); + + spin_telemetry::extract_trace_context(&req); + + let path = req.uri().path().to_string(); + + tracing::info!("Processing request on path '{path}'"); + + // Handle well-known spin paths + if let Some(well_known) = path.strip_prefix(spin_http::WELL_KNOWN_PREFIX) { + return match well_known { + "health" => Ok(MatchedRoute::with_response_extension( + Response::new(body::full(Bytes::from_static(b"OK"))), + path, + )), + "info" => self.app_info(path), + _ => Self::not_found(NotFoundRouteKind::WellKnown), + }; + } + + match self.router.route(&path) { + Ok(route_match) => { + self.handle_trigger_route(req, route_match, server_scheme, client_addr) + .await + } + Err(_) => Self::not_found(NotFoundRouteKind::Normal(path.to_string())), + } + } + + /// Handles a successful route match. + pub async fn handle_trigger_route( + self: &Arc, + mut req: Request, + route_match: RouteMatch, + server_scheme: Scheme, + client_addr: SocketAddr, + ) -> anyhow::Result> { + set_req_uri(&mut req, server_scheme.clone())?; + let app_id = self + .trigger_app + .app() + .get_metadata(APP_NAME_KEY)? + .unwrap_or_else(|| "".into()); + + let component_id = route_match.component_id(); + + spin_telemetry::metrics::monotonic_counter!( + spin.request_count = 1, + trigger_type = "http", + app_id = app_id, + component_id = component_id + ); + + let mut instance_builder = self.trigger_app.prepare(component_id)?; + + // Set up outbound HTTP request origin and service chaining + let outbound_http = instance_builder.factor_builders().outbound_http(); + let origin = SelfRequestOrigin::create(server_scheme, &self.listen_addr)?; + outbound_http.set_self_request_origin(origin); + outbound_http.set_request_interceptor(OutboundHttpInterceptor::new(self.clone()))?; + + // Prepare HTTP executor + let trigger_config = self.component_trigger_configs.get(component_id).unwrap(); + let handler_type = self.component_handler_types.get(component_id).unwrap(); + let executor = trigger_config + .executor + .as_ref() + .unwrap_or(&HttpExecutorType::Http); + + let res = match executor { + HttpExecutorType::Http => match handler_type { + HandlerType::Spin => { + SpinHttpExecutor + .execute(instance_builder, &route_match, req, client_addr) + .await + } + HandlerType::Wasi0_2 + | HandlerType::Wasi2023_11_10 + | HandlerType::Wasi2023_10_18 => { + WasiHttpExecutor { + handler_type: *handler_type, + } + .execute(instance_builder, &route_match, req, client_addr) + .await + } + HandlerType::Wagi => unreachable!(), + }, + HttpExecutorType::Wagi(wagi_config) => { + let executor = WagiHttpExecutor { + wagi_config: wagi_config.clone(), + }; + executor + .execute(instance_builder, &route_match, req, client_addr) + .await + } + }; + match res { + Ok(res) => Ok(MatchedRoute::with_response_extension( + res, + route_match.raw_route(), + )), + Err(err) => { + tracing::error!("Error processing request: {err:?}"); + instrument_error(&err); + Self::internal_error(None, route_match.raw_route()) + } + } + } + + /// Returns spin status information. + fn app_info(&self, route: String) -> anyhow::Result> { + let info = AppInfo::new(self.trigger_app.app()); + let body = serde_json::to_vec_pretty(&info)?; + Ok(MatchedRoute::with_response_extension( + Response::builder() + .header("content-type", "application/json") + .body(body::full(body.into()))?, + route, + )) + } + + /// Creates an HTTP 500 response. + fn internal_error( + body: Option<&str>, + route: impl Into, + ) -> anyhow::Result> { + let body = match body { + Some(body) => body::full(Bytes::copy_from_slice(body.as_bytes())), + None => body::empty(), + }; + + Ok(MatchedRoute::with_response_extension( + Response::builder() + .status(StatusCode::INTERNAL_SERVER_ERROR) + .body(body)?, + route, + )) + } + + /// Creates an HTTP 404 response. + fn not_found(kind: NotFoundRouteKind) -> anyhow::Result> { + use std::sync::atomic::{AtomicBool, Ordering}; + static SHOWN_GENERIC_404_WARNING: AtomicBool = AtomicBool::new(false); + if let NotFoundRouteKind::Normal(route) = kind { + if !SHOWN_GENERIC_404_WARNING.fetch_or(true, Ordering::Relaxed) + && std::io::stderr().is_terminal() + { + terminal::warn!("Request to {route} matched no pattern, and received a generic 404 response. To serve a more informative 404 page, add a catch-all (/...) route."); + } + } + Ok(Response::builder() + .status(StatusCode::NOT_FOUND) + .body(body::empty())?) + } + + fn serve_connection( + self: Arc, + stream: S, + server_scheme: Scheme, + client_addr: SocketAddr, + ) { + task::spawn(async move { + if let Err(err) = http1::Builder::new() + .keep_alive(true) + .serve_connection( + TokioIo::new(stream), + service_fn(move |request| { + self.clone().instrumented_service_fn( + server_scheme.clone(), + client_addr, + request, + ) + }), + ) + .await + { + tracing::warn!("Error serving HTTP connection: {err:?}"); + } + }); + } + + async fn instrumented_service_fn( + self: Arc, + server_scheme: Scheme, + client_addr: SocketAddr, + request: Request, + ) -> anyhow::Result> { + let span = http_span!(request, client_addr); + let method = request.method().to_string(); + async { + let result = self + .handle( + request.map(|body: Incoming| { + body.map_err(wasmtime_wasi_http::hyper_response_error) + .boxed() + }), + server_scheme, + client_addr, + ) + .await; + finalize_http_span(result, method) + } + .instrument(span) + .await + } + + fn print_startup_msgs(&self, scheme: &str, listener: &TcpListener) -> anyhow::Result<()> { + let local_addr = listener.local_addr()?; + let base_url = format!("{scheme}://{local_addr:?}"); + terminal::step!("\nServing", "{base_url}"); + tracing::info!("Serving {base_url}"); + + println!("Available Routes:"); + for (route, component_id) in self.router.routes() { + println!(" {}: {}{}", component_id, base_url, route); + if let Some(component) = self.trigger_app.app().get_component(component_id) { + if let Some(description) = component.get_metadata(APP_DESCRIPTION_KEY)? { + println!(" {}", description); + } + } + } + Ok(()) + } +} + +/// The incoming request's scheme and authority +/// +/// The incoming request's URI is relative to the server, so we need to set the scheme and authority. +/// Either the `Host` header or the request's URI's authority is used as the source of truth for the authority. +/// This function will error if the authority cannot be unambiguously determined. +fn set_req_uri(req: &mut Request, scheme: Scheme) -> anyhow::Result<()> { + let uri = req.uri().clone(); + let mut parts = uri.into_parts(); + let headers = req.headers(); + let header_authority = headers + .get(http::header::HOST) + .map(|h| -> anyhow::Result { + let host_header = h.to_str().context("'Host' header is not valid UTF-8")?; + host_header + .parse() + .context("'Host' header contains an invalid authority") + }) + .transpose()?; + let uri_authority = parts.authority; + + // Get authority either from request URI or from 'Host' header + let authority = match (header_authority, uri_authority) { + (None, None) => bail!("no 'Host' header present in request"), + (None, Some(a)) => a, + (Some(a), None) => a, + (Some(a1), Some(a2)) => { + // Ensure that if `req.authority` is set, it matches what was in the `Host` header + // https://github.com/hyperium/hyper/issues/1612 + if a1 != a2 { + return Err(anyhow::anyhow!( + "authority in 'Host' header does not match authority in URI" + )); + } + a1 + } + }; + parts.scheme = Some(scheme); + parts.authority = Some(authority); + *req.uri_mut() = Uri::from_parts(parts).unwrap(); + Ok(()) +} + +/// An HTTP executor. +pub(crate) trait HttpExecutor: Clone + Send + Sync + 'static { + fn execute( + &self, + instance_builder: TriggerInstanceBuilder, + route_match: &RouteMatch, + req: Request, + client_addr: SocketAddr, + ) -> impl Future>>; +} + +/// Whether this handler uses the custom Spin http handler interface for wasi-http +#[derive(Clone, Copy)] +pub enum HandlerType { + Spin, + Wagi, + Wasi0_2, + Wasi2023_11_10, + Wasi2023_10_18, +} + +pub const WASI_HTTP_EXPORT_2023_10_18: &str = "wasi:http/incoming-handler@0.2.0-rc-2023-10-18"; +pub const WASI_HTTP_EXPORT_2023_11_10: &str = "wasi:http/incoming-handler@0.2.0-rc-2023-11-10"; +pub const WASI_HTTP_EXPORT_0_2_0: &str = "wasi:http/incoming-handler@0.2.0"; + +impl HandlerType { + /// Determine the handler type from the exports of a component + pub fn from_component( + engine: impl AsRef, + component: &Component, + ) -> anyhow::Result { + let mut handler_ty = None; + + let mut set = |ty: HandlerType| { + if handler_ty.is_none() { + handler_ty = Some(ty); + Ok(()) + } else { + Err(anyhow::anyhow!( + "component exports multiple different handlers but \ + it's expected to export only one" + )) + } + }; + let ty = component.component_type(); + for (name, _) in ty.exports(engine.as_ref()) { + match name { + WASI_HTTP_EXPORT_2023_10_18 => set(HandlerType::Wasi2023_10_18)?, + WASI_HTTP_EXPORT_2023_11_10 => set(HandlerType::Wasi2023_11_10)?, + WASI_HTTP_EXPORT_0_2_0 => set(HandlerType::Wasi0_2)?, + "fermyon:spin/inbound-http" => set(HandlerType::Spin)?, + _ => {} + } + } + + handler_ty.ok_or_else(|| { + anyhow::anyhow!( + "Expected component to export one of \ + `{WASI_HTTP_EXPORT_2023_10_18}`, \ + `{WASI_HTTP_EXPORT_2023_11_10}`, \ + `{WASI_HTTP_EXPORT_0_2_0}`, \ + or `fermyon:spin/inbound-http` but it exported none of those" + ) + }) + } +} diff --git a/crates/trigger-http/src/spin.rs b/crates/trigger-http/src/spin.rs new file mode 100644 index 0000000000..10c854b200 --- /dev/null +++ b/crates/trigger-http/src/spin.rs @@ -0,0 +1,107 @@ +use std::net::SocketAddr; + +use anyhow::Result; +use http_body_util::BodyExt; +use hyper::{Request, Response}; +use spin_http::body; +use spin_http::routes::RouteMatch; +use spin_world::v1::http_types; +use tracing::{instrument, Level}; + +use crate::{ + headers::{append_headers, prepare_request_headers}, + server::HttpExecutor, + Body, TriggerInstanceBuilder, +}; + +/// An [`HttpExecutor`] that uses the `fermyon:spin/inbound-http` interface. +#[derive(Clone)] +pub struct SpinHttpExecutor; + +impl HttpExecutor for SpinHttpExecutor { + #[instrument(name = "spin_trigger_http.execute_wasm", skip_all, err(level = Level::INFO), fields(otel.name = format!("execute_wasm_component {}", route_match.component_id())))] + async fn execute( + &self, + instance_builder: TriggerInstanceBuilder<'_>, + route_match: &RouteMatch, + req: Request, + client_addr: SocketAddr, + ) -> Result> { + let component_id = route_match.component_id(); + + tracing::trace!("Executing request using the Spin executor for component {component_id}"); + + let (instance, mut store) = instance_builder.instantiate(()).await?; + + let headers = prepare_request_headers(&req, route_match, client_addr)?; + let func = instance + .exports(&mut store) + .instance("fermyon:spin/inbound-http") + // Safe since we have already checked that this instance exists + .expect("no fermyon:spin/inbound-http found") + .typed_func::<(http_types::Request,), (http_types::Response,)>("handle-request")?; + + let (parts, body) = req.into_parts(); + let bytes = body.collect().await?.to_bytes().to_vec(); + + let method = if let Some(method) = convert_method(&parts.method) { + method + } else { + return Ok(Response::builder() + .status(http::StatusCode::METHOD_NOT_ALLOWED) + .body(body::empty())?); + }; + + // Preparing to remove the params field. We are leaving it in place for now + // to avoid breaking the ABI, but no longer pass or accept values in it. + // https://github.com/fermyon/spin/issues/663 + let params = vec![]; + + let uri = match parts.uri.path_and_query() { + Some(u) => u.to_string(), + None => parts.uri.to_string(), + }; + + let req = http_types::Request { + method, + uri, + headers, + params, + body: Some(bytes), + }; + + let (resp,) = func.call_async(&mut store, (req,)).await?; + + if resp.status < 100 || resp.status > 600 { + tracing::error!("malformed HTTP status code"); + return Ok(Response::builder() + .status(http::StatusCode::INTERNAL_SERVER_ERROR) + .body(body::empty())?); + }; + + let mut response = http::Response::builder().status(resp.status); + if let Some(headers) = response.headers_mut() { + append_headers(headers, resp.headers)?; + } + + let body = match resp.body { + Some(b) => body::full(b.into()), + None => body::empty(), + }; + + Ok(response.body(body)?) + } +} + +fn convert_method(m: &http::Method) -> Option { + Some(match *m { + http::Method::GET => http_types::Method::Get, + http::Method::POST => http_types::Method::Post, + http::Method::PUT => http_types::Method::Put, + http::Method::DELETE => http_types::Method::Delete, + http::Method::PATCH => http_types::Method::Patch, + http::Method::HEAD => http_types::Method::Head, + http::Method::OPTIONS => http_types::Method::Options, + _ => return None, + }) +} diff --git a/crates/trigger-http/src/tls.rs b/crates/trigger-http/src/tls.rs index d0486c50e7..0888feed0d 100644 --- a/crates/trigger-http/src/tls.rs +++ b/crates/trigger-http/src/tls.rs @@ -6,6 +6,8 @@ use std::{ }; use tokio_rustls::{rustls, TlsAcceptor}; +// TODO: dedupe with spin-factor-outbound-networking (spin-tls crate?) + /// TLS configuration for the server. #[derive(Clone)] pub struct TlsConfig { @@ -31,7 +33,7 @@ impl TlsConfig { } // load_certs parse and return the certs from the provided file -pub fn load_certs( +fn load_certs( path: impl AsRef, ) -> io::Result>> { rustls_pemfile::certs(&mut io::BufReader::new(fs::File::open(path).map_err( @@ -46,7 +48,7 @@ pub fn load_certs( } // parse and return the first private key from the provided file -pub fn load_key(path: impl AsRef) -> io::Result> { +fn load_key(path: impl AsRef) -> io::Result> { private_key(&mut io::BufReader::new(fs::File::open(path).map_err( |err| { io::Error::new( @@ -69,18 +71,11 @@ pub fn load_key(path: impl AsRef) -> io::Result PathBuf { - let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - path.push("src"); - path.push("testdata"); - - path - } + const TESTDATA_DIR: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/testdata"); #[test] fn test_read_non_existing_cert() { - let mut path = testdatadir(); - path.push("non-existing-file.pem"); + let path = Path::new(TESTDATA_DIR).join("non-existing-file.pem"); let certs = load_certs(path); assert!(certs.is_err()); @@ -89,8 +84,7 @@ mod tests { #[test] fn test_read_invalid_cert() { - let mut path = testdatadir(); - path.push("invalid-cert.pem"); + let path = Path::new(TESTDATA_DIR).join("invalid-cert.pem"); let certs = load_certs(path); assert!(certs.is_err()); @@ -102,8 +96,7 @@ mod tests { #[test] fn test_read_valid_cert() { - let mut path = testdatadir(); - path.push("valid-cert.pem"); + let path = Path::new(TESTDATA_DIR).join("valid-cert.pem"); let certs = load_certs(path); assert!(certs.is_ok()); @@ -112,8 +105,7 @@ mod tests { #[test] fn test_read_non_existing_private_key() { - let mut path = testdatadir(); - path.push("non-existing-file.pem"); + let path = Path::new(TESTDATA_DIR).join("non-existing-file.pem"); let keys = load_key(path); assert!(keys.is_err()); @@ -122,8 +114,7 @@ mod tests { #[test] fn test_read_invalid_private_key() { - let mut path = testdatadir(); - path.push("invalid-private-key.pem"); + let path = Path::new(TESTDATA_DIR).join("invalid-private-key.pem"); let keys = load_key(path); assert!(keys.is_err()); @@ -132,8 +123,7 @@ mod tests { #[test] fn test_read_valid_private_key() { - let mut path = testdatadir(); - path.push("valid-private-key.pem"); + let path = Path::new(TESTDATA_DIR).join("valid-private-key.pem"); let keys = load_key(path); assert!(keys.is_ok()); diff --git a/crates/trigger-http/src/wagi.rs b/crates/trigger-http/src/wagi.rs index 0cb0202006..e447e27e00 100644 --- a/crates/trigger-http/src/wagi.rs +++ b/crates/trigger-http/src/wagi.rs @@ -1,30 +1,25 @@ -use std::{io::Cursor, net::SocketAddr, sync::Arc}; +use std::{io::Cursor, net::SocketAddr}; -use crate::HttpInstance; -use anyhow::{anyhow, ensure, Context, Result}; -use async_trait::async_trait; +use anyhow::{ensure, Result}; use http_body_util::BodyExt; use hyper::{Request, Response}; -use spin_core::WasiVersion; use spin_http::{config::WagiTriggerConfig, routes::RouteMatch, wagi}; -use spin_trigger::TriggerAppEngine; use tracing::{instrument, Level}; -use wasi_common_preview1::{pipe::WritePipe, I32Exit}; +use wasmtime_wasi::pipe::MemoryOutputPipe; +use wasmtime_wasi_http::body::HyperIncomingBody as Body; -use crate::{Body, HttpExecutor, HttpTrigger}; +use crate::{headers::compute_default_headers, server::HttpExecutor, TriggerInstanceBuilder}; #[derive(Clone)] pub struct WagiHttpExecutor { pub wagi_config: WagiTriggerConfig, } -#[async_trait] impl HttpExecutor for WagiHttpExecutor { #[instrument(name = "spin_trigger_http.execute_wagi", skip_all, err(level = Level::INFO), fields(otel.name = format!("execute_wagi_component {}", route_match.component_id())))] async fn execute( &self, - engine: Arc>, - base: &str, + mut instance_builder: TriggerInstanceBuilder<'_>, route_match: &RouteMatch, req: Request, client_addr: SocketAddr, @@ -72,55 +67,39 @@ impl HttpExecutor for WagiHttpExecutor { // This sets the current environment variables Wagi expects (such as // `PATH_INFO`, or `X_FULL_URL`). // Note that this overrides any existing headers previously set by Wagi. - for (keys, val) in - crate::compute_default_headers(&parts.uri, base, host, route_match, client_addr)? - { + for (keys, val) in compute_default_headers(&parts.uri, host, route_match, client_addr)? { headers.insert(keys[1].to_string(), val); } - let stdout = WritePipe::new_in_memory(); + let stdout = MemoryOutputPipe::new(usize::MAX); + + let wasi_builder = instance_builder.factor_builders().wasi(); - let mut store_builder = engine.store_builder(component, WasiVersion::Preview1)?; // Set up Wagi environment - store_builder.args(argv.split(' '))?; - store_builder.env(headers)?; - store_builder.stdin_pipe(Cursor::new(body)); - store_builder.stdout(Box::new(stdout.clone()))?; - - let (instance, mut store) = engine - .prepare_instance_with_store(component, store_builder) - .await?; - - let HttpInstance::Module(instance) = instance else { - unreachable!() - }; - - let start = instance - .get_func(&mut store, &self.wagi_config.entrypoint) - .ok_or_else(|| { - anyhow::anyhow!( - "No such function '{}' in {}", - self.wagi_config.entrypoint, - component - ) - })?; + wasi_builder.args(argv.split(' ')); + wasi_builder.env(headers); + wasi_builder.stdin_pipe(Cursor::new(body)); + wasi_builder.stdout(stdout.clone()); + + let (instance, mut store) = instance_builder.instantiate(()).await?; + + let command = wasmtime_wasi::bindings::Command::new(&mut store, &instance)?; + tracing::trace!("Calling Wasm entry point"); - start - .call_async(&mut store, &[], &mut []) + if let Err(()) = command + .wasi_cli_run() + .call_run(&mut store) .await - .or_else(ignore_successful_proc_exit_trap) - .with_context(|| { - anyhow!( - "invoking {} for component {component}", - self.wagi_config.entrypoint - ) - })?; - tracing::info!("Module execution complete"); + .or_else(ignore_successful_proc_exit_trap)? + { + tracing::error!("Wagi main function returned unsuccessful result"); + } + tracing::info!("Wagi execution complete"); // Drop the store so we're left with a unique reference to `stdout`: drop(store); - let stdout = stdout.try_into_inner().unwrap().into_inner(); + let stdout = stdout.try_into_inner().unwrap(); ensure!( !stdout.is_empty(), "The {component:?} component is configured to use the WAGI executor \ @@ -131,10 +110,13 @@ impl HttpExecutor for WagiHttpExecutor { } } -fn ignore_successful_proc_exit_trap(guest_err: anyhow::Error) -> Result<()> { - match guest_err.root_cause().downcast_ref::() { +fn ignore_successful_proc_exit_trap(guest_err: anyhow::Error) -> Result> { + match guest_err + .root_cause() + .downcast_ref::() + { Some(trap) => match trap.0 { - 0 => Ok(()), + 0 => Ok(Ok(())), _ => Err(guest_err), }, None => Err(guest_err), diff --git a/crates/trigger-http/src/wasi.rs b/crates/trigger-http/src/wasi.rs new file mode 100644 index 0000000000..d61f3bdb21 --- /dev/null +++ b/crates/trigger-http/src/wasi.rs @@ -0,0 +1,167 @@ +use std::net::SocketAddr; + +use anyhow::{anyhow, Context, Result}; +use futures::TryFutureExt; +use http::{HeaderName, HeaderValue}; +use hyper::{Request, Response}; +use spin_factor_outbound_http::wasi_2023_10_18::exports::wasi::http::incoming_handler::Guest as IncomingHandler2023_10_18; +use spin_factor_outbound_http::wasi_2023_11_10::exports::wasi::http::incoming_handler::Guest as IncomingHandler2023_11_10; +use spin_http::routes::RouteMatch; +use tokio::{sync::oneshot, task}; +use tracing::{instrument, Instrument, Level}; +use wasmtime_wasi_http::{body::HyperIncomingBody as Body, proxy::Proxy, WasiHttpView}; + +use crate::{ + headers::prepare_request_headers, + server::{HandlerType, HttpExecutor, WASI_HTTP_EXPORT_2023_10_18, WASI_HTTP_EXPORT_2023_11_10}, + TriggerInstanceBuilder, +}; + +/// An [`HttpExecutor`] that uses the `wasi:http/incoming-handler` interface. +#[derive(Clone)] +pub struct WasiHttpExecutor { + pub handler_type: HandlerType, +} + +impl HttpExecutor for WasiHttpExecutor { + #[instrument(name = "spin_trigger_http.execute_wasm", skip_all, err(level = Level::INFO), fields(otel.name = format!("execute_wasm_component {}", route_match.component_id())))] + async fn execute( + &self, + instance_builder: TriggerInstanceBuilder<'_>, + route_match: &RouteMatch, + mut req: Request, + client_addr: SocketAddr, + ) -> Result> { + let component_id = route_match.component_id(); + + tracing::trace!("Executing request using the Wasi executor for component {component_id}"); + + let (instance, mut store) = instance_builder.instantiate(()).await?; + + let headers = prepare_request_headers(&req, route_match, client_addr)?; + req.headers_mut().clear(); + req.headers_mut() + .extend(headers.into_iter().filter_map(|(n, v)| { + let Ok(name) = n.parse::() else { + return None; + }; + let Ok(value) = HeaderValue::from_bytes(v.as_bytes()) else { + return None; + }; + Some((name, value)) + })); + + let mut wasi_http = spin_factor_outbound_http::OutboundHttpFactor::get_wasi_http_impl( + store.data_mut().factors_instance_state(), + ) + .context("missing OutboundHttpFactor")?; + + let request = wasi_http.new_incoming_request(req)?; + + let (response_tx, response_rx) = oneshot::channel(); + let response = wasi_http.new_response_outparam(response_tx)?; + + drop(wasi_http); + + enum Handler { + Latest(Proxy), + Handler2023_11_10(IncomingHandler2023_11_10), + Handler2023_10_18(IncomingHandler2023_10_18), + } + + let handler = + { + let mut exports = instance.exports(&mut store); + match self.handler_type { + HandlerType::Wasi2023_10_18 => { + let mut instance = exports + .instance(WASI_HTTP_EXPORT_2023_10_18) + .ok_or_else(|| { + anyhow!("export of `{WASI_HTTP_EXPORT_2023_10_18}` not an instance") + })?; + Handler::Handler2023_10_18(IncomingHandler2023_10_18::new(&mut instance)?) + } + HandlerType::Wasi2023_11_10 => { + let mut instance = exports + .instance(WASI_HTTP_EXPORT_2023_11_10) + .ok_or_else(|| { + anyhow!("export of `{WASI_HTTP_EXPORT_2023_11_10}` not an instance") + })?; + Handler::Handler2023_11_10(IncomingHandler2023_11_10::new(&mut instance)?) + } + HandlerType::Wasi0_2 => { + drop(exports); + Handler::Latest(Proxy::new(&mut store, &instance)?) + } + HandlerType::Spin => unreachable!("should have used SpinHttpExecutor"), + HandlerType::Wagi => unreachable!("should have used WagiExecutor instead"), + } + }; + + let span = tracing::debug_span!("execute_wasi"); + let handle = task::spawn( + async move { + let result = match handler { + Handler::Latest(proxy) => { + proxy + .wasi_http_incoming_handler() + .call_handle(&mut store, request, response) + .instrument(span) + .await + } + Handler::Handler2023_10_18(handler) => { + handler + .call_handle(&mut store, request, response) + .instrument(span) + .await + } + Handler::Handler2023_11_10(handler) => { + handler + .call_handle(&mut store, request, response) + .instrument(span) + .await + } + }; + + tracing::trace!( + "wasi-http memory consumed: {}", + store.data().core_state().memory_consumed() + ); + + result + } + .in_current_span(), + ); + + match response_rx.await { + Ok(response) => { + task::spawn( + async move { + handle + .await + .context("guest invocation panicked")? + .context("guest invocation failed")?; + + Ok(()) + } + .map_err(|e: anyhow::Error| { + tracing::warn!("component error after response: {e:?}"); + }), + ); + + Ok(response.context("guest failed to produce a response")?) + } + + Err(_) => { + handle + .await + .context("guest invocation panicked")? + .context("guest invocation failed")?; + + Err(anyhow!( + "guest failed to produce a response prior to returning" + )) + } + } + } +} diff --git a/crates/trigger-http/testdata/invalid-cert.pem b/crates/trigger-http/testdata/invalid-cert.pem new file mode 100644 index 0000000000..f1a952b9c8 --- /dev/null +++ b/crates/trigger-http/testdata/invalid-cert.pem @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIBkjCCATegAwIBAgIIEOURVvWgx1AwCgYIKoZIzj0EAwIwIzEhMB8GA1UEAwwY +azNzLWNsaWVudC1jYUAxNzE3NzgwNTIwMB4XDTI0MDYwNzE3MTUyMFoXDTI1MDYw +NzE3MTUyMFowMDEXMBUGA1UEChMOc3lzdGVtOm1hc3RlcnMxFTATBgNVBAMTDHN5 +c3RlbTphZG1pbjBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABFGE/CVuauj8kmde +i4AagSJ5GYgGnL0eF55ItiXrKSjMmsIf/N8EyeamxQfWPKVk/1xhH7cS9GcQgNe6 +XrRvmLyjSDBGMA4GA1UdDwEB/wQEAwIFoDATBgNVHSUEDDAKBggrBgEFBQcDAjAf +BgNVHSMEGDAWgBRpihySeW3DafmU1cw6LMnQCQDD4jAKBggqhkjOPQQDAgNJADBG +AiEA/db1wb4mVrqJVctqbPU9xd0bXzJx7cBDzpWgPP9ISfkCIQDNyuskAkXvUMHH +F73/GJnh8Bt2H38qyzThM8nlR9v1eQ== +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIBdjCCAR2gAwIBAgIBADAKBggqhkjOPQQDAjAjMSEwHwYDVQQDDBhrM3MtY2xp +ZW50LWNhQDE3MTc3ODA1MjAwHhcNMjQwNjA3MTcxNTIwWhcNMzQwNjA1MTcxNTIw +WjAjMSEwHwYDVQQDDBhrM3MtY2xpZW50LWNhQDE3MTc3ODA1MjAwWTATBgcqhkjO +PQIBBggqhkjOPQMBBwNCAASozciE0YGl8ak3G0Ll1riwXSScfpK0QRle/cFizdlA +HgDowBssBcla0/2a/eWabxqTPzsZH0cVhL7Tialoj8GNo0IwQDAOBgNVHQ8BAf8E +BAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUaYocknltw2n5lNXMOizJ +0AkAw+IwCgYIKoZIzj0EAwIDRwAwRAIgR8YcLA8cH4qAMDRPDsJqLaw4GJFkgjwV +TCrMgyUxSvACIBwyklgm7mgHcC5WM9CqmliAGZJyV0xRPZBK01POrNf0 diff --git a/crates/trigger-http/testdata/invalid-private-key.pem b/crates/trigger-http/testdata/invalid-private-key.pem new file mode 100644 index 0000000000..39d7e59ee6 --- /dev/null +++ b/crates/trigger-http/testdata/invalid-private-key.pem @@ -0,0 +1,5 @@ +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIA+FBtmKJbd8wBGOWeuJQfHiCKjjXF8ywEPrvj8S1N3VoAoGCCqGSM49 +AwEHoUQDQgAEUYT8JW5q6PySZ16LgBqBInkZiAacvR4Xnki2JespKMyawh/83wTJ +5qbFB9Y8pWT/XGEftxL0ZxCA17petG+YvA== +-----END EC PRIVATE KEY- \ No newline at end of file diff --git a/crates/trigger-http/testdata/valid-cert.pem b/crates/trigger-http/testdata/valid-cert.pem new file mode 100644 index 0000000000..e75166d0e6 --- /dev/null +++ b/crates/trigger-http/testdata/valid-cert.pem @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIBkjCCATegAwIBAgIIEOURVvWgx1AwCgYIKoZIzj0EAwIwIzEhMB8GA1UEAwwY +azNzLWNsaWVudC1jYUAxNzE3NzgwNTIwMB4XDTI0MDYwNzE3MTUyMFoXDTI1MDYw +NzE3MTUyMFowMDEXMBUGA1UEChMOc3lzdGVtOm1hc3RlcnMxFTATBgNVBAMTDHN5 +c3RlbTphZG1pbjBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABFGE/CVuauj8kmde +i4AagSJ5GYgGnL0eF55ItiXrKSjMmsIf/N8EyeamxQfWPKVk/1xhH7cS9GcQgNe6 +XrRvmLyjSDBGMA4GA1UdDwEB/wQEAwIFoDATBgNVHSUEDDAKBggrBgEFBQcDAjAf +BgNVHSMEGDAWgBRpihySeW3DafmU1cw6LMnQCQDD4jAKBggqhkjOPQQDAgNJADBG +AiEA/db1wb4mVrqJVctqbPU9xd0bXzJx7cBDzpWgPP9ISfkCIQDNyuskAkXvUMHH +F73/GJnh8Bt2H38qyzThM8nlR9v1eQ== +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIBdjCCAR2gAwIBAgIBADAKBggqhkjOPQQDAjAjMSEwHwYDVQQDDBhrM3MtY2xp +ZW50LWNhQDE3MTc3ODA1MjAwHhcNMjQwNjA3MTcxNTIwWhcNMzQwNjA1MTcxNTIw +WjAjMSEwHwYDVQQDDBhrM3MtY2xpZW50LWNhQDE3MTc3ODA1MjAwWTATBgcqhkjO +PQIBBggqhkjOPQMBBwNCAASozciE0YGl8ak3G0Ll1riwXSScfpK0QRle/cFizdlA +HgDowBssBcla0/2a/eWabxqTPzsZH0cVhL7Tialoj8GNo0IwQDAOBgNVHQ8BAf8E +BAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUaYocknltw2n5lNXMOizJ +0AkAw+IwCgYIKoZIzj0EAwIDRwAwRAIgR8YcLA8cH4qAMDRPDsJqLaw4GJFkgjwV +TCrMgyUxSvACIBwyklgm7mgHcC5WM9CqmliAGZJyV0xRPZBK01POrNf0 +-----END CERTIFICATE----- \ No newline at end of file diff --git a/crates/trigger-http/testdata/valid-private-key.pem b/crates/trigger-http/testdata/valid-private-key.pem new file mode 100644 index 0000000000..2820fbed26 --- /dev/null +++ b/crates/trigger-http/testdata/valid-private-key.pem @@ -0,0 +1,5 @@ +-----BEGIN EC PRIVATE KEY----- +MHcCAQEEIA+FBtmKJbd8wBGOWeuJQfHiCKjjXF8ywEPrvj8S1N3VoAoGCCqGSM49 +AwEHoUQDQgAEUYT8JW5q6PySZ16LgBqBInkZiAacvR4Xnki2JespKMyawh/83wTJ +5qbFB9Y8pWT/XGEftxL0ZxCA17petG+YvA== +-----END EC PRIVATE KEY----- \ No newline at end of file diff --git a/crates/trigger-http/tests/local.crt.pem b/crates/trigger-http/tests/local.crt.pem deleted file mode 100644 index efd51f6707..0000000000 --- a/crates/trigger-http/tests/local.crt.pem +++ /dev/null @@ -1,17 +0,0 @@ ------BEGIN CERTIFICATE----- -MIICujCCAaICCQClexHj2O4K/TANBgkqhkiG9w0BAQsFADAfMQswCQYDVQQGEwJV -UzEQMA4GA1UECgwHRmVybXlvbjAeFw0yMjAyMjUxNzQ3MTFaFw0yMzAyMjUxNzQ3 -MTFaMB8xCzAJBgNVBAYTAlVTMRAwDgYDVQQKDAdGZXJteW9uMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwMbUZ2eoIaJfgcBJ2fILUViWYApnA9SU+Ruf -nm6DNm9Gy5+YThqxd/0mhbPwYVkfi2/3UddWDl3VPOAYcvYoHDqH0tHm10wo+UzY -DDcNZB9enLRfGCv9Fful4bqNd3Vtx2xNwc8+F0WiljtYeMc+9wp7M5WWbKJqzKPe -VQBADRlfGoG3jCLGaQ2fyVp/73nWdqbbluWJopxHph7v1alb/BxLcDi/tjWKgZut -Vr9ZtBBPDSjRbfjHarn6pibYZAWgzanpfsaSBdbpVNn1MQ/gNXIHmNFwfbsN0V+3 -LN/Z4VNZrkc+C7CjGhJOcBj0xtrSDhoHnOmDS/z+lBUdlNOUrQIDAQABMA0GCSqG -SIb3DQEBCwUAA4IBAQAOnRPnUJoEE8s9+ADUpKkWBXFCiRajtBSBDNDX3phRPwly -q2zG+gXyV+Axx1qvsis9yXQBF9DcD+lx0rEgGzQjYGfmEA45E8Co2Tih2ON7JkCu -bYoT+wMkgfOMci/S2BBOJ+d0LI3K0b1qDfc4KwHe6g3p5ywuEBFOaWKiMemJyywd -zpoD6QmcQ9qlp5/2pf12bNRUIdXe5+vMU3qVIZcWM49u04L2/Swyc6EFXfEtnp/m -6184isfCkc3egMvqEfrKUaf0lgNzCksmRD9sLF8wWaV4lcidzsNDdU47EPFutVMU -3iLgXAhmRuZ+eoBf56QkzVTQWnCYQdlGwZp1Fcoj ------END CERTIFICATE----- diff --git a/crates/trigger-http/tests/local.key.pem b/crates/trigger-http/tests/local.key.pem deleted file mode 100644 index 6b080db693..0000000000 --- a/crates/trigger-http/tests/local.key.pem +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDAxtRnZ6ghol+B -wEnZ8gtRWJZgCmcD1JT5G5+eboM2b0bLn5hOGrF3/SaFs/BhWR+Lb/dR11YOXdU8 -4Bhy9igcOofS0ebXTCj5TNgMNw1kH16ctF8YK/0V+6Xhuo13dW3HbE3Bzz4XRaKW -O1h4xz73CnszlZZsomrMo95VAEANGV8agbeMIsZpDZ/JWn/vedZ2ptuW5YminEem -Hu/VqVv8HEtwOL+2NYqBm61Wv1m0EE8NKNFt+MdqufqmJthkBaDNqel+xpIF1ulU -2fUxD+A1cgeY0XB9uw3RX7cs39nhU1muRz4LsKMaEk5wGPTG2tIOGgec6YNL/P6U -FR2U05StAgMBAAECggEAfpcSjATJp6yUwwOee3wyamyd8tth4mYKnbrCCqvPhkN0 -XeqjfUaSG5UlYs9SntqDmHEiG6AoZq6/hIY0B+oVVNQqtQoZaHAex/bqOLs+E+11 -l7nqaFkajQD/YUe79iIqwLYiKY8J2wZjSfwWkNlmQ5uiY7FrYlMVhuRk77SGWxKW -UbWfgTTMgEWIK6bU77FShQ7b0px5ZIulRPQeRaH8USdx0yktqUMwUakIrNyZ64u+ -Gx9k4ma2bCmbWxGlCEp0EQsYOlWDBeKu3Elq2g48KmADzbjvKlS7S/0fhcVqi2dE -Fj0BrmzxWjPzJwqxA6Z/8tykqzL5Nr6tOm0e6ZhBEQKBgQDhfy83jLfIWLt3rMcx -dFA4TGFSEUVJE9ESV0Za5zriLeGzM66JGut+Bph9Kk7XmDt+q3ewFJv7oDVibhzG -4nit+TakfSMWUAronsf2wQuUvpE6rNoZlWjhd7AE5f/eBZTYhNm5cp7ujGwnEn47 -vmfSVev+1yQcEUeV10OSWWaCrwKBgQDa2pEwps6htnZqiZsJP86LfxbTA1P+BgsV -nFvVkcCT0Uy7V0pSSdA82Ua/1KfcQ3BAJiBkINSL6Sob1+3lSQTeTHLVbXySacnh -c7UDDoayWJxtYNyjJeBzrjlZCDIkipJqz26pGfIhxePwVgbj30O/EB55y44gkxqn -JIvqIWBlYwKBgQDVqR4DI3lMAw92QKbo7A3KmkyoZybgLD+wgjNulKQNhW3Sz4hz -7qbt3bAFAN59l4ff6PZaR9zYWh/bKPxpUlMIfRdSWiOx05vSeAh+fMHNaZfQIdHx -5cjfwfltWsTLCTzUv2RRPBLtcu5TQ0mKsEpNWQ5ohE95rMHIb5ReCgmAjwKBgCb6 -NlGL49E5Re3DhDEphAekItSCCzt6qA65QkHPK5Un+ZqD+WCedM/hgpA3t42rFRrX -r30lu7UPWciLtHrZflx5ERqh3UXWQXY9vUdGFwc8cN+qGKGV5Vu089G/e+62H02W -lAbZ8B3DuMzdBW0gHliw7jyS3EVA7cZG5ARW3WwxAoGAW+FkrJKsPyyScHBdu/LD -GeDMGRBRBdthXVbtB7xkzi2Tla4TywlHTm32rK3ywtoBxzvhlxbVnbBODMO/83xZ -DKjq2leuKfXUNsuMEre7uhhs7ezEM6QfiKTTosD/D8Z3S8AA4q1NKu3iEBUjtXcS -FSaIdbf6aHPcvbRB9cDv5ho= ------END PRIVATE KEY----- diff --git a/crates/trigger-redis/Cargo.toml b/crates/trigger-redis/Cargo.toml index 70a90a61d5..0ace95427b 100644 --- a/crates/trigger-redis/Cargo.toml +++ b/crates/trigger-redis/Cargo.toml @@ -12,19 +12,13 @@ anyhow = "1.0" async-trait = "0.1" futures = "0.3" serde = "1.0.188" -spin-app = { path = "../app" } -spin-common = { path = "../common" } -spin-core = { path = "../core" } -spin-expressions = { path = "../expressions" } +spin-factor-variables = { path = "../factor-variables" } +spin-telemetry = { path = "../telemetry" } spin-trigger = { path = "../trigger" } spin-world = { path = "../world" } -redis = { version = "0.21", features = ["tokio-comp"] } +redis = { version = "0.26.1", features = ["tokio-comp"] } tracing = { workspace = true } -tokio = { version = "1.23", features = ["full"] } -spin-telemetry = { path = "../telemetry" } - -[dev-dependencies] -spin-testing = { path = "../testing" } +tokio = { version = "1.39.3", features = ["macros", "rt"] } [lints] workspace = true diff --git a/crates/trigger-redis/src/lib.rs b/crates/trigger-redis/src/lib.rs index 440fc00b22..5932b89392 100644 --- a/crates/trigger-redis/src/lib.rs +++ b/crates/trigger-redis/src/lib.rs @@ -1,203 +1,226 @@ -//! Implementation for the Spin Redis engine. - -mod spin; - -use anyhow::{anyhow, Context, Result}; -use futures::{future::join_all, StreamExt}; -use redis::{Client, ConnectionLike}; -use serde::{de::IgnoredAny, Deserialize, Serialize}; -use spin_common::url::remove_credentials; -use spin_core::{async_trait, InstancePre}; -use spin_trigger::{cli::NoArgs, TriggerAppEngine, TriggerExecutor}; -use std::collections::HashMap; -use std::sync::Arc; +use std::{collections::HashMap, sync::Arc}; + +use anyhow::Context; +use futures::{StreamExt, TryFutureExt}; +use redis::{Client, Msg}; +use serde::Deserialize; +use spin_factor_variables::VariablesFactor; +use spin_trigger::{cli::NoCliArgs, App, Trigger, TriggerApp}; +use spin_world::exports::fermyon::spin::inbound_redis; use tracing::{instrument, Level}; -use crate::spin::SpinRedisExecutor; +pub struct RedisTrigger; -pub(crate) type RuntimeData = (); -pub(crate) type Store = spin_core::Store; - -type ChannelComponents = HashMap>; -/// The Spin Redis trigger. -#[derive(Clone)] -pub struct RedisTrigger { - engine: Arc>, - // Mapping of server url with subscription channel and associated component IDs - server_channels: HashMap, +/// Redis trigger metadata. +#[derive(Clone, Debug, Default, Deserialize)] +#[serde(deny_unknown_fields)] +struct TriggerMetadata { + address: String, } /// Redis trigger configuration. -#[derive(Clone, Debug, Default, Deserialize, Serialize)] +#[derive(Clone, Debug, Default, Deserialize)] #[serde(deny_unknown_fields)] -pub struct RedisTriggerConfig { +struct TriggerConfig { /// Component ID to invoke - pub component: String, + component: String, /// Channel to subscribe to - pub channel: String, - /// optional overide address for trigger - pub address: Option, - /// Trigger executor (currently unused) - #[serde(default, skip_serializing)] - pub executor: IgnoredAny, + channel: String, + /// Optionally override address for trigger + address: Option, } -#[derive(Clone, Debug, Default, Deserialize, Serialize)] -#[serde(deny_unknown_fields)] -struct TriggerMetadata { - address: String, -} +impl Trigger for RedisTrigger { + const TYPE: &'static str = "redis"; -#[async_trait] -impl TriggerExecutor for RedisTrigger { - const TRIGGER_TYPE: &'static str = "redis"; - type RuntimeData = RuntimeData; - type TriggerConfig = RedisTriggerConfig; - type RunConfig = NoArgs; - type InstancePre = InstancePre; - - async fn new(engine: TriggerAppEngine) -> Result { - let default_address: String = engine - .trigger_metadata::()? - .unwrap_or_default() - .address; - let default_address_expr = spin_expressions::Template::new(default_address)?; - let default_address = engine.resolve_template(&default_address_expr)?; - - let mut server_channels: HashMap = HashMap::new(); - - for (_, config) in engine.trigger_configs() { - let address = config.address.clone().unwrap_or(default_address.clone()); - let address_expr = spin_expressions::Template::new(address)?; - let address = engine.resolve_template(&address_expr)?; - let server = server_channels.entry(address).or_default(); - let channel_expr = spin_expressions::Template::new(config.channel.as_str())?; - let channel = engine.resolve_template(&channel_expr)?; - server - .entry(channel) - .or_default() - .push(config.component.clone()); - } - Ok(Self { - engine: Arc::new(engine), - server_channels, - }) + type CliArgs = NoCliArgs; + + type InstanceState = (); + + fn new(_cli_args: Self::CliArgs, _app: &App) -> anyhow::Result { + Ok(Self) } - /// Run the Redis trigger indefinitely. - async fn run(self, _config: Self::RunConfig) -> Result<()> { - let tasks: Vec<_> = self - .server_channels - .clone() - .into_iter() - .map(|(server_address, channel_components)| { - let trigger = self.clone(); - tokio::spawn(async move { - trigger - .run_listener(server_address.clone(), channel_components.clone()) - .await - }) - }) - .collect(); + async fn run(self, trigger_app: spin_trigger::TriggerApp) -> anyhow::Result<()> { + let app_variables = trigger_app + .configured_app() + .app_state::() + .context("RedisTrigger depends on VariablesFactor")?; + + let app = trigger_app.app(); + let metadata = app + .get_trigger_metadata::(Self::TYPE)? + .unwrap_or_default(); + let default_address_expr = &metadata.address; + let default_address = app_variables + .resolve_expression(default_address_expr.clone()) + .await + .with_context(|| { + format!("failed to resolve redis trigger default address {default_address_expr:?}") + })?; - // wait for the first handle to be returned and drop the rest - let (result, _, rest) = futures::future::select_all(tasks).await; + // Maps -> -> + let mut server_channel_components: HashMap = HashMap::new(); - drop(rest); + // Resolve trigger configs before starting any subscribers + for (_, config) in app + .trigger_configs::(Self::TYPE)? + .into_iter() + .collect::>() + { + let component_id = config.component; + + let address_expr = config.address.as_ref().unwrap_or(&default_address); + let address = app_variables + .resolve_expression(address_expr.clone()) + .await + .with_context(|| { + format!( + "failed to resolve redis trigger address {address_expr:?} for component {component_id}" + ) + })?; + + let channel_expr = &config.channel; + let channel = app_variables + .resolve_expression(channel_expr.clone()) + .await + .with_context(|| { + format!( + "failed to resolve redis trigger channel {channel_expr:?} for component {component_id}" + ) + })?; + + server_channel_components + .entry(address) + .or_default() + .entry(channel) + .or_default() + .push(component_id); + } + + // Start subscriber(s) + let trigger_app = Arc::new(trigger_app); + let mut subscriber_tasks = Vec::new(); + for (address, channel_components) in server_channel_components { + let subscriber = Subscriber::new(address, trigger_app.clone(), channel_components)?; + let task = tokio::spawn(subscriber.run_listener()); + subscriber_tasks.push(task); + } - result? + // Wait for any task to complete + let (res, _, _) = futures::future::select_all(subscriber_tasks).await; + res? } } -impl RedisTrigger { - // Handle the message. - #[instrument(name = "spin_trigger_redis.handle_message", skip(self, channel_components, msg), - err(level = Level::INFO), fields(otel.name = format!("{} receive", msg.get_channel_name()), - otel.kind = "consumer", messaging.operation = "receive", messaging.system = "redis"))] - async fn handle( - &self, - address: &str, - channel_components: &ChannelComponents, - msg: redis::Msg, - ) -> Result<()> { - let channel = msg.get_channel_name(); - tracing::info!("Received message on channel {address}:{:?}", channel); - - if let Some(component_ids) = channel_components.get(channel) { - let futures = component_ids.iter().map(|id| { - tracing::trace!("Executing Redis component {id:?}"); - SpinRedisExecutor.execute(&self.engine, id, channel, msg.get_payload_bytes()) - }); - let results: Vec<_> = join_all(futures).await.into_iter().collect(); - let errors = results - .into_iter() - .filter_map(|r| r.err()) - .collect::>(); - if !errors.is_empty() { - return Err(anyhow!("{errors:#?}")); - } - } else { - tracing::debug!("No subscription found for {:?}", channel); - } - Ok(()) - } +/// Maps -> +type ChannelComponents = HashMap>; - async fn run_listener( - &self, +/// Subscribes to channels from a single Redis server. +struct Subscriber { + client: Client, + trigger_app: Arc>, + channel_components: ChannelComponents, +} + +impl Subscriber { + fn new( address: String, + trigger_app: Arc>, channel_components: ChannelComponents, - ) -> Result<()> { - tracing::info!("Connecting to Redis server at {}", address); - let mut client = Client::open(address.to_string())?; - let mut pubsub = client - .get_async_connection() + ) -> anyhow::Result { + let client = Client::open(address)?; + Ok(Self { + client, + trigger_app, + channel_components, + }) + } + + async fn run_listener(self) -> anyhow::Result<()> { + let server_addr = &self.client.get_connection_info().addr; + + tracing::info!("Connecting to Redis server at {server_addr}"); + let mut pubsub = self + .client + .get_async_pubsub() .await - .with_context(|| anyhow!("Redis trigger failed to connect to {}", address))? - .into_pubsub(); + .with_context(|| format!("Redis trigger failed to connect to {server_addr}"))?; + + println!("Active Channels on {server_addr}:"); - let sanitised_addr = remove_credentials(&address)?; - println!("Active Channels on {sanitised_addr}:"); // Subscribe to channels - for (channel, component) in channel_components.iter() { - tracing::info!("Subscribing component {component:?} to channel {channel:?}"); - pubsub.subscribe(channel).await?; - println!("\t{sanitised_addr}:{channel}: [{}]", component.join(",")); + for (channel, components) in &self.channel_components { + tracing::info!("Subscribing to {channel:?} on {server_addr}"); + pubsub.subscribe(channel).await.with_context(|| { + format!("Redis trigger failed to subscribe to channel {channel:?} on {server_addr}") + })?; + println!("\t{server_addr}/{channel}: [{}]", components.join(",")); } - let mut stream = pubsub.on_message(); - loop { - match stream.next().await { - Some(msg) => { - if let Err(err) = self.handle(&address, &channel_components, msg).await { - tracing::warn!("Error handling message: {err}"); - } - } - None => { - tracing::trace!("Empty message"); - if !client.check_connection() { - tracing::info!("No Redis connection available"); - println!("Disconnected from {address}"); - break; - } - } - }; + let mut message_stream = pubsub.on_message(); + while let Some(msg) = message_stream.next().await { + if let Err(err) = self.handle_message(msg).await { + tracing::error!("Error handling message from {server_addr}: {err}"); + } } + Err(anyhow::anyhow!("disconnected from {server_addr}")) + } + + #[instrument(name = "spin_trigger_redis.handle_message", skip_all, err(level = Level::INFO), fields( + otel.name = format!("{} receive", msg.get_channel_name()), + otel.kind = "consumer", + messaging.operation = "receive", + messaging.system = "redis" + ))] + async fn handle_message(&self, msg: Msg) -> anyhow::Result<()> { + let server_addr = &self.client.get_connection_info().addr; + let channel = msg.get_channel_name(); + tracing::trace!(%server_addr, %channel, "Received message"); + + let Some(component_ids) = self.channel_components.get(channel) else { + anyhow::bail!("message from unexpected channel {channel:?}"); + }; + + let dispatch_futures = component_ids.iter().map(|component_id| { + tracing::trace!("Executing Redis component {component_id}"); + self.dispatch_handler(&msg, component_id) + .inspect_err(move |err| { + tracing::info!("Component {component_id} handler failed: {err}"); + }) + }); + futures::future::join_all(dispatch_futures).await; + Ok(()) } -} -/// The Redis executor trait. -/// All Redis executors must implement this trait. -#[async_trait] -pub(crate) trait RedisExecutor: Clone + Send + Sync + 'static { - async fn execute( - &self, - engine: &TriggerAppEngine, - component_id: &str, - channel: &str, - payload: &[u8], - ) -> Result<()>; + async fn dispatch_handler(&self, msg: &Msg, component_id: &str) -> anyhow::Result<()> { + spin_telemetry::metrics::monotonic_counter!( + spin.request_count = 1, + trigger_type = "redis", + app_id = self.trigger_app.app().id(), + component_id = component_id + ); + + let (instance, mut store) = self + .trigger_app + .prepare(component_id)? + .instantiate(()) + .await?; + + let guest = { + let exports = &mut instance.exports(&mut store); + let mut inbound_redis_export = exports + .instance("fermyon:spin/inbound-redis") + .context("no fermyon:spin/inbound-redis instance found")?; + inbound_redis::Guest::new(&mut inbound_redis_export)? + }; + + let payload = msg.get_payload_bytes().to_vec(); + + guest + .call_handle_message(&mut store, &payload) + .await? + .context("Redis handler returned an error") + } } - -#[cfg(test)] -mod tests; diff --git a/crates/trigger-redis/src/spin.rs b/crates/trigger-redis/src/spin.rs deleted file mode 100644 index 290265a210..0000000000 --- a/crates/trigger-redis/src/spin.rs +++ /dev/null @@ -1,65 +0,0 @@ -use anyhow::{anyhow, Result}; -use async_trait::async_trait; -use spin_core::Instance; -use spin_trigger::TriggerAppEngine; -use spin_world::v1::redis_types::{Error, Payload}; -use tracing::{instrument, Level}; - -use crate::{RedisExecutor, RedisTrigger, Store}; - -#[derive(Clone)] -pub struct SpinRedisExecutor; - -#[async_trait] -impl RedisExecutor for SpinRedisExecutor { - #[instrument(name = "spin_trigger_redis.execute_wasm", skip(self, engine, payload), err(level = Level::INFO), fields(otel.name = format!("execute_wasm_component {}", component_id)))] - async fn execute( - &self, - engine: &TriggerAppEngine, - component_id: &str, - channel: &str, - payload: &[u8], - ) -> Result<()> { - tracing::trace!("Executing request using the Spin executor for component {component_id}"); - - spin_telemetry::metrics::monotonic_counter!( - spin.request_count = 1, - trigger_type = "redis", - app_id = engine.app_name, - component_id = component_id - ); - - let (instance, store) = engine.prepare_instance(component_id).await?; - - match Self::execute_impl(store, instance, channel, payload.to_vec()).await { - Ok(()) => { - tracing::trace!("Request finished OK"); - Ok(()) - } - Err(e) => { - tracing::trace!("Request finished with error from {component_id}: {e}"); - Err(anyhow!("Error from {component_id}: {e}")) - } - } - } -} - -impl SpinRedisExecutor { - pub async fn execute_impl( - mut store: Store, - instance: Instance, - _channel: &str, - payload: Vec, - ) -> Result<()> { - let func = instance - .exports(&mut store) - .instance("fermyon:spin/inbound-redis") - .ok_or_else(|| anyhow!("no fermyon:spin/inbound-redis instance found"))? - .typed_func::<(Payload,), (Result<(), Error>,)>("handle-message")?; - - match func.call_async(store, (payload,)).await? { - (Ok(()) | Err(Error::Success),) => Ok(()), - _ => Err(anyhow!("`handle-message` returned an error")), - } - } -} diff --git a/crates/trigger-redis/src/tests.rs b/crates/trigger-redis/src/tests.rs deleted file mode 100644 index 6e0bb91d8b..0000000000 --- a/crates/trigger-redis/src/tests.rs +++ /dev/null @@ -1,26 +0,0 @@ -use super::*; -use anyhow::Result; -use redis::{Msg, Value}; -use spin_testing::{tokio, RedisTestConfig}; - -fn create_trigger_event(channel: &str, payload: &str) -> redis::Msg { - Msg::from_value(&redis::Value::Bulk(vec![ - Value::Data("message".into()), - Value::Data(channel.into()), - Value::Data(payload.into()), - ])) - .unwrap() -} - -#[tokio::test] -async fn test_pubsub() -> Result<()> { - let trigger: RedisTrigger = RedisTestConfig::default() - .test_program("redis-rust.wasm") - .build_trigger("messages") - .await; - let test = HashMap::new(); - let msg = create_trigger_event("messages", "hello"); - trigger.handle("", &test, msg).await?; - - Ok(()) -} diff --git a/crates/trigger-redis/tests/rust/.cargo/.config b/crates/trigger-redis/tests/rust/.cargo/.config deleted file mode 100644 index 30c83a7906..0000000000 --- a/crates/trigger-redis/tests/rust/.cargo/.config +++ /dev/null @@ -1,2 +0,0 @@ -[build] - target = "wasm32-wasi" diff --git a/crates/trigger-redis/tests/rust/Cargo.lock b/crates/trigger-redis/tests/rust/Cargo.lock deleted file mode 100644 index dd62f57f13..0000000000 --- a/crates/trigger-redis/tests/rust/Cargo.lock +++ /dev/null @@ -1,303 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "anyhow" -version = "1.0.75" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" - -[[package]] -name = "bitflags" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" - -[[package]] -name = "equivalent" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" - -[[package]] -name = "hashbrown" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12" - -[[package]] -name = "heck" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "id-arena" -version = "2.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005" - -[[package]] -name = "indexmap" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8adf3ddd720272c6ea8bf59463c04e0f93d0bbf7c5439b691bca2987e0270897" -dependencies = [ - "equivalent", - "hashbrown", - "serde", -] - -[[package]] -name = "itoa" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" - -[[package]] -name = "leb128" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" - -[[package]] -name = "log" -version = "0.4.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" - -[[package]] -name = "proc-macro2" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "rust" -version = "0.1.0" -dependencies = [ - "wit-bindgen", -] - -[[package]] -name = "ryu" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" - -[[package]] -name = "semver" -version = "1.0.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" - -[[package]] -name = "serde" -version = "1.0.189" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e422a44e74ad4001bdc8eede9a4570ab52f71190e9c076d14369f38b9200537" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.189" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e48d1f918009ce3145511378cf68d613e3b3d9137d67272562080d68a2b32d5" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "serde_json" -version = "1.0.107" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" -dependencies = [ - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "smallvec" -version = "1.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" - -[[package]] -name = "spdx" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b19b32ed6d899ab23174302ff105c1577e45a06b08d4fe0a9dd13ce804bbbf71" -dependencies = [ - "smallvec", -] - -[[package]] -name = "syn" -version = "2.0.38" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "unicode-ident" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" - -[[package]] -name = "unicode-segmentation" -version = "1.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" - -[[package]] -name = "unicode-xid" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" - -[[package]] -name = "wasm-encoder" -version = "0.35.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ca90ba1b5b0a70d3d49473c5579951f3bddc78d47b59256d2f9d4922b150aca" -dependencies = [ - "leb128", -] - -[[package]] -name = "wasm-metadata" -version = "0.10.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14abc161bfda5b519aa229758b68f2a52b45a12b993808665c857d1a9a00223c" -dependencies = [ - "anyhow", - "indexmap", - "serde", - "serde_derive", - "serde_json", - "spdx", - "wasm-encoder", - "wasmparser", -] - -[[package]] -name = "wasmparser" -version = "0.115.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e06c0641a4add879ba71ccb3a1e4278fd546f76f1eafb21d8f7b07733b547cd5" -dependencies = [ - "indexmap", - "semver", -] - -[[package]] -name = "wit-bindgen" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7d92ce0ca6b6074059413a9581a637550c3a740581c854f9847ec293c8aed71" -dependencies = [ - "bitflags", - "wit-bindgen-rust-macro", -] - -[[package]] -name = "wit-bindgen-core" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "565b945ae074886071eccf9cdaf8ccd7b959c2b0d624095bea5fe62003e8b3e0" -dependencies = [ - "anyhow", - "wit-component", - "wit-parser", -] - -[[package]] -name = "wit-bindgen-rust" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5695ff4e41873ed9ce56d2787e6b5772bdad9e70e2c1d2d160621d1762257f4f" -dependencies = [ - "anyhow", - "heck", - "wasm-metadata", - "wit-bindgen-core", - "wit-component", -] - -[[package]] -name = "wit-bindgen-rust-macro" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a91835ea4231da1fe7971679d505ba14be7826e192b6357f08465866ef482e08" -dependencies = [ - "anyhow", - "proc-macro2", - "quote", - "syn", - "wit-bindgen-core", - "wit-bindgen-rust", - "wit-component", -] - -[[package]] -name = "wit-component" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e87488b57a08e2cbbd076b325acbe7f8666965af174d69d5929cd373bd54547f" -dependencies = [ - "anyhow", - "bitflags", - "indexmap", - "log", - "serde", - "serde_derive", - "serde_json", - "wasm-encoder", - "wasm-metadata", - "wasmparser", - "wit-parser", -] - -[[package]] -name = "wit-parser" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ace9943d89bbf3dbbc71b966da0e7302057b311f36a4ac3d65ddfef17b52cf" -dependencies = [ - "anyhow", - "id-arena", - "indexmap", - "log", - "semver", - "serde", - "serde_derive", - "serde_json", - "unicode-xid", -] diff --git a/crates/trigger-redis/tests/rust/Cargo.toml b/crates/trigger-redis/tests/rust/Cargo.toml deleted file mode 100644 index ad23592c0b..0000000000 --- a/crates/trigger-redis/tests/rust/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -name = "rust" -version = "0.1.0" -edition = "2021" -authors = ["Radu Matei "] - -[lib] -crate-type = ["cdylib"] - -[dependencies] -wit-bindgen = "0.13.0" - -[workspace] diff --git a/crates/trigger-redis/tests/rust/src/lib.rs b/crates/trigger-redis/tests/rust/src/lib.rs deleted file mode 100644 index 8371614b9e..0000000000 --- a/crates/trigger-redis/tests/rust/src/lib.rs +++ /dev/null @@ -1,25 +0,0 @@ -use std::str::{from_utf8, Utf8Error}; - -wit_bindgen::generate!({ - world: "redis-trigger", - path: "../../../../wit/deps/spin@unversioned", - exports: { - "fermyon:spin/inbound-redis": SpinRedis, - } -}); -use exports::fermyon::spin::inbound_redis::{self, Error, Payload}; - -struct SpinRedis; - -impl inbound_redis::Guest for SpinRedis { - fn handle_message(message: Payload) -> Result<(), Error> { - println!("Message: {:?}", from_utf8(&message)); - Ok(()) - } -} - -impl From for Error { - fn from(_: Utf8Error) -> Self { - Self::Error - } -} diff --git a/crates/trigger/Cargo.toml b/crates/trigger/Cargo.toml index 164a265aae..733a335309 100644 --- a/crates/trigger/Cargo.toml +++ b/crates/trigger/Cargo.toml @@ -1,68 +1,48 @@ [package] name = "spin-trigger" -version = { workspace = true } -authors = { workspace = true } -edition = { workspace = true } +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +rust-version.workspace = true [features] -llm = ["spin-llm-local"] -llm-metal = ["llm", "spin-llm-local/metal"] -llm-cublas = ["llm", "spin-llm-local/cublas"] -# Enables loading AOT compiled components, a potentially unsafe operation. See -# `::::enable_loading_aot_compiled_components` -# documentation for more information about the safety risks. -unsafe-aot-compilation = [] +llm = ["spin-factor-llm/llm"] +llm-metal = ["spin-factor-llm/llm-metal"] +llm-cublas = ["spin-factor-llm/llm-cublas"] [dependencies] -anyhow = "1.0" -async-trait = "0.1" -clap = { version = "3.1.15", features = ["derive", "env"] } +anyhow = "1" +clap = { version = "3.1.18", features = ["derive", "env"] } ctrlc = { version = "3.2", features = ["termination"] } -dirs = "4" futures = "0.3" -indexmap = "1" -ipnet = "2.9.0" -http = "1.0.0" -outbound-http = { path = "../outbound-http" } -outbound-redis = { path = "../outbound-redis" } -outbound-mqtt = { path = "../outbound-mqtt" } -outbound-pg = { path = "../outbound-pg" } -outbound-mysql = { path = "../outbound-mysql" } -rustls-pemfile = "2.1.2" -rustls-pki-types = "1.7.0" -spin-common = { path = "../common" } -spin-expressions = { path = "../expressions" } -spin-serde = { path = "../serde" } -spin-key-value = { path = "../key-value" } -spin-key-value-azure = { path = "../key-value-azure" } -spin-key-value-redis = { path = "../key-value-redis" } -spin-key-value-sqlite = { path = "../key-value-sqlite" } -spin-outbound-networking = { path = "../outbound-networking" } -spin-sqlite = { path = "../sqlite" } -spin-sqlite-inproc = { path = "../sqlite-inproc" } -spin-sqlite-libsql = { path = "../sqlite-libsql" } -spin-world = { path = "../world" } -spin-llm = { path = "../llm" } -spin-llm-local = { path = "../llm-local", optional = true } -spin-llm-remote-http = { path = "../llm-remote-http" } -spin-telemetry = { path = "../telemetry" } -sanitize-filename = "0.4" -serde = "1.0.188" -serde_json = "1.0" +spin-runtime-config = { path = "../runtime-config" } +sanitize-filename = "0.5" +serde = { version = "1", features = ["derive"] } +serde_json = "1" spin-app = { path = "../app" } +spin-common = { path = "../common" } +spin-componentize = { path = "../componentize" } spin-core = { path = "../core" } -spin-loader = { path = "../loader" } -spin-manifest = { path = "../manifest" } -spin-variables = { path = "../variables" } -terminal = { path = "../terminal" } +spin-factor-key-value = { path = "../factor-key-value" } +spin-factor-outbound-http = { path = "../factor-outbound-http" } +spin-factor-llm = { path = "../factor-llm" } +spin-factor-outbound-mqtt = { path = "../factor-outbound-mqtt" } +spin-factor-outbound-networking = { path = "../factor-outbound-networking" } +spin-factor-outbound-pg = { path = "../factor-outbound-pg" } +spin-factor-outbound-mysql = { path = "../factor-outbound-mysql" } +spin-factor-outbound-redis = { path = "../factor-outbound-redis" } +spin-factor-sqlite = { path = "../factor-sqlite" } +spin-factor-variables = { path = "../factor-variables" } +spin-factor-wasi = { path = "../factor-wasi" } +spin-factors = { path = "../factors" } +spin-factors-executor = { path = "../factors-executor" } +spin-telemetry = { path = "../telemetry" } tokio = { version = "1.23", features = ["fs"] } -toml = "0.5.9" -url = "2" -spin-componentize = { workspace = true } +terminal = { path = "../terminal" } tracing = { workspace = true } -wasmtime = { workspace = true } -wasmtime-wasi = { workspace = true } -wasmtime-wasi-http = { workspace = true } -[dev-dependencies] -tempfile = "3.8.0" \ No newline at end of file +[lints] +workspace = true diff --git a/crates/trigger/src/cli.rs b/crates/trigger/src/cli.rs index ccd63d67c4..74c5be2719 100644 --- a/crates/trigger/src/cli.rs +++ b/crates/trigger/src/cli.rs @@ -1,24 +1,20 @@ -use std::path::PathBuf; +mod launch_metadata; + +use std::future::Future; +use std::path::{Path, PathBuf}; use anyhow::{Context, Result}; use clap::{Args, IntoApp, Parser}; -use serde::de::DeserializeOwned; -use spin_app::Loader; +use spin_app::App; +use spin_common::ui::quoted_path; +use spin_common::url::parse_file_url; use spin_common::{arg_parser::parse_kv, sloth}; +use spin_factors_executor::{ComponentLoader, FactorsExecutor}; +use spin_runtime_config::{ResolvedRuntimeConfig, UserProvidedPath}; -use crate::network::Network; -use crate::runtime_config::llm::LLmOptions; -use crate::runtime_config::sqlite::SqlitePersistenceMessageHook; -use crate::runtime_config::SummariseRuntimeConfigHook; -use crate::stdio::StdioLoggingTriggerHooks; -use crate::{ - loader::TriggerLoader, - runtime_config::{key_value::KeyValuePersistenceMessageHook, RuntimeConfig}, - stdio::FollowComponents, -}; -use crate::{TriggerExecutor, TriggerExecutorBuilder}; - -mod launch_metadata; +use crate::factors::{TriggerFactors, TriggerFactorsRuntimeConfig}; +use crate::stdio::{FollowComponents, StdioLoggingExecutorHooks}; +use crate::{Trigger, TriggerApp}; pub use launch_metadata::LaunchMetadata; pub const APP_LOG_DIR: &str = "APP_LOG_DIR"; @@ -36,12 +32,9 @@ pub const SPIN_WORKING_DIR: &str = "SPIN_WORKING_DIR"; #[derive(Parser, Debug)] #[clap( usage = "spin [COMMAND] [OPTIONS]", - next_help_heading = help_heading::() + next_help_heading = help_heading::() )] -pub struct TriggerExecutorCommand -where - Executor::RunConfig: Args, -{ +pub struct FactorsTriggerCommand { /// Log directory for the stdout and stderr of components. Setting to /// the empty string disables logging to disk. #[clap( @@ -114,7 +107,7 @@ where pub state_dir: Option, #[clap(flatten)] - pub run_config: Executor::RunConfig, + pub trigger_args: T::CliArgs, /// Set a key/value pair (key=value) in the application's /// default store. Any existing value will be overwritten. @@ -137,15 +130,12 @@ where /// An empty implementation of clap::Args to be used as TriggerExecutor::RunConfig /// for executors that do not need additional CLI args. #[derive(Args)] -pub struct NoArgs; +pub struct NoCliArgs; -impl TriggerExecutorCommand -where - Executor::RunConfig: Args, - Executor::TriggerConfig: DeserializeOwned, -{ +impl FactorsTriggerCommand { /// Create a new TriggerExecutorBuilder from this TriggerExecutorCommand. pub async fn run(self) -> Result<()> { + // Handle --help-args-only if self.help_args_only { Self::command() .disable_help_flag(true) @@ -154,8 +144,9 @@ where return Ok(()); } + // Handle --launch-metadata-only if self.launch_metadata_only { - let lm = LaunchMetadata::infer::(); + let lm = LaunchMetadata::infer::(); let json = serde_json::to_string_pretty(&lm)?; eprintln!("{json}"); return Ok(()); @@ -164,17 +155,52 @@ where // Required env vars let working_dir = std::env::var(SPIN_WORKING_DIR).context(SPIN_WORKING_DIR)?; let locked_url = std::env::var(SPIN_LOCKED_URL).context(SPIN_LOCKED_URL)?; + let local_app_dir = std::env::var(SPIN_LOCAL_APP_DIR).ok(); + + let follow_components = self.follow_components(); + + // Load App + let app = { + let path = parse_file_url(&locked_url)?; + let contents = std::fs::read(&path) + .with_context(|| format!("failed to read manifest at {}", quoted_path(&path)))?; + let locked = + serde_json::from_slice(&contents).context("failed to parse app lock file JSON")?; + App::new(locked_url, locked) + }; + + // Validate required host features + if let Err(unmet) = app.ensure_needs_only(&T::supported_host_requirements()) { + anyhow::bail!("This application requires the following features that are not available in this version of the '{}' trigger: {unmet}", T::TYPE); + } - let init_data = crate::HostComponentInitData::new( - &*self.key_values, - &*self.sqlite_statements, - LLmOptions { use_gpu: true }, - ); + let trigger = T::new(self.trigger_args, &app)?; + let mut builder = TriggerAppBuilder::new(trigger, PathBuf::from(working_dir)); + let config = builder.engine_config(); - let loader = TriggerLoader::new(working_dir, self.allow_transient_write); - let executor = self.build_executor(loader, locked_url, init_data).await?; + // Apply --cache / --disable-cache + if !self.disable_cache { + config.enable_cache(&self.cache)?; + } - let run_fut = executor.run(self.run_config); + if self.disable_pooling { + config.disable_pooling(); + } + + let run_fut = builder + .run( + app, + TriggerAppOptions { + runtime_config_file: self.runtime_config_file.as_deref(), + state_dir: self.state_dir.as_deref(), + local_app_dir: local_app_dir.as_deref(), + initial_key_values: self.key_values, + allow_transient_write: self.allow_transient_write, + follow_components, + log_dir: self.log, + }, + ) + .await?; let (abortable, abort_handle) = futures::future::abortable(run_fut); ctrlc::set_handler(move || abort_handle.abort())?; @@ -194,43 +220,6 @@ where } } - async fn build_executor( - &self, - loader: impl Loader + Send + Sync + 'static, - locked_url: String, - init_data: crate::HostComponentInitData, - ) -> Result { - let runtime_config = self.build_runtime_config()?; - - let _sloth_guard = warn_if_wasm_build_slothful(); - - let mut builder = TriggerExecutorBuilder::new(loader); - self.update_config(builder.config_mut())?; - - builder.hooks(StdioLoggingTriggerHooks::new(self.follow_components())); - builder.hooks(Network::default()); - builder.hooks(SummariseRuntimeConfigHook::new(&self.runtime_config_file)); - builder.hooks(KeyValuePersistenceMessageHook); - builder.hooks(SqlitePersistenceMessageHook); - - builder.build(locked_url, runtime_config, init_data).await - } - - fn build_runtime_config(&self) -> Result { - let local_app_dir = std::env::var_os(SPIN_LOCAL_APP_DIR); - let mut config = RuntimeConfig::new(local_app_dir.map(Into::into)); - if let Some(state_dir) = &self.state_dir { - config.set_state_dir(state_dir); - } - if let Some(log_dir) = &self.log { - config.set_log_dir(log_dir); - } - if let Some(config_file) = &self.runtime_config_file { - config.merge_config_file(config_file)?; - } - Ok(config) - } - fn follow_components(&self) -> FollowComponents { if self.silence_component_logs { FollowComponents::None @@ -241,19 +230,6 @@ where FollowComponents::Named(followed) } } - - fn update_config(&self, config: &mut spin_core::Config) -> Result<()> { - // Apply --cache / --disable-cache - if !self.disable_cache { - config.enable_cache(&self.cache)?; - } - - if self.disable_pooling { - config.disable_pooling(); - } - - Ok(()) - } } const SLOTH_WARNING_DELAY_MILLIS: u64 = 1250; @@ -270,16 +246,168 @@ fn warn_if_wasm_build_slothful() -> sloth::SlothGuard { sloth::warn_if_slothful(SLOTH_WARNING_DELAY_MILLIS, format!("{message}\n")) } -fn help_heading() -> Option<&'static str> { - if E::TRIGGER_TYPE == help::HelpArgsOnlyTrigger::TRIGGER_TYPE { +fn help_heading() -> Option<&'static str> { + if T::TYPE == help::HelpArgsOnlyTrigger::TYPE { Some("TRIGGER OPTIONS") } else { - let heading = format!("{} TRIGGER OPTIONS", E::TRIGGER_TYPE.to_uppercase()); + let heading = format!("{} TRIGGER OPTIONS", T::TYPE.to_uppercase()); let as_str = Box::new(heading).leak(); Some(as_str) } } +/// A builder for a [`TriggerApp`]. +pub struct TriggerAppBuilder { + engine_config: spin_core::Config, + working_dir: PathBuf, + pub trigger: T, +} + +/// Options for building a [`TriggerApp`]. +#[derive(Default)] +pub struct TriggerAppOptions<'a> { + /// Path to the runtime config file. + runtime_config_file: Option<&'a Path>, + /// Path to the state directory. + state_dir: Option<&'a str>, + /// Path to the local app directory. + local_app_dir: Option<&'a str>, + /// Initial key/value pairs to set in the app's default store. + initial_key_values: Vec<(String, String)>, + /// Whether to allow transient writes to mounted files + allow_transient_write: bool, + /// Which components should have their logs followed. + follow_components: FollowComponents, + /// Log directory for component stdout/stderr. + log_dir: Option, +} + +impl TriggerAppBuilder { + pub fn new(trigger: T, working_dir: PathBuf) -> Self { + Self { + engine_config: spin_core::Config::default(), + working_dir, + trigger, + } + } + + pub fn engine_config(&mut self) -> &mut spin_core::Config { + &mut self.engine_config + } + + /// Build a [`TriggerApp`] from the given [`App`] and options. + pub async fn build( + &mut self, + app: App, + options: TriggerAppOptions<'_>, + ) -> anyhow::Result> { + let mut core_engine_builder = { + self.trigger.update_core_config(&mut self.engine_config)?; + + spin_core::Engine::builder(&self.engine_config)? + }; + self.trigger.add_to_linker(core_engine_builder.linker())?; + + let runtime_config_path = options.runtime_config_file; + let local_app_dir = options.local_app_dir.map(PathBuf::from); + let state_dir = match options.state_dir { + // Make sure `--state-dir=""` unsets the state dir + Some("") => UserProvidedPath::Unset, + Some(s) => UserProvidedPath::Provided(PathBuf::from(s)), + None => UserProvidedPath::Default, + }; + let log_dir = match &options.log_dir { + // Make sure `--log-dir=""` unsets the log dir + Some(p) if p.as_os_str().is_empty() => UserProvidedPath::Unset, + Some(p) => UserProvidedPath::Provided(p.clone()), + None => UserProvidedPath::Default, + }; + // Hardcode `use_gpu` to true for now + let use_gpu = true; + let runtime_config = + ResolvedRuntimeConfig::::from_optional_file( + runtime_config_path, + local_app_dir, + state_dir, + log_dir, + use_gpu, + )?; + + runtime_config + .set_initial_key_values(&options.initial_key_values) + .await?; + + let log_dir = runtime_config.log_dir(); + let factors = TriggerFactors::new( + runtime_config.state_dir(), + self.working_dir.clone(), + options.allow_transient_write, + runtime_config.key_value_resolver, + runtime_config.sqlite_resolver, + use_gpu, + ) + .context("failed to create factors")?; + + // TODO(factors): handle: self.sqlite_statements + + // TODO: port the rest of the component loader logic + struct SimpleComponentLoader; + impl ComponentLoader for SimpleComponentLoader { + fn load_component( + &mut self, + engine: &spin_core::wasmtime::Engine, + component: &spin_factors::AppComponent, + ) -> anyhow::Result { + let source = component + .source() + .content + .source + .as_ref() + .context("LockedComponentSource missing source field")?; + let path = parse_file_url(source)?; + let bytes = std::fs::read(&path).with_context(|| { + format!( + "failed to read component source from disk at path {}", + quoted_path(&path) + ) + })?; + let component = spin_componentize::componentize_if_necessary(&bytes) + .with_context(|| format!("preparing wasm {}", quoted_path(&path)))?; + spin_core::Component::new(engine, component) + .with_context(|| format!("compiling wasm {}", quoted_path(&path))) + } + } + + let mut executor = FactorsExecutor::new(core_engine_builder, factors)?; + + executor.add_hooks(StdioLoggingExecutorHooks::new( + options.follow_components, + log_dir, + )); + // TODO: + // builder.hooks(SummariseRuntimeConfigHook::new(&self.runtime_config_file)); + // builder.hooks(KeyValuePersistenceMessageHook); + // builder.hooks(SqlitePersistenceMessageHook); + + let configured_app = { + let _sloth_guard = warn_if_wasm_build_slothful(); + executor.load_app(app, runtime_config.runtime_config, SimpleComponentLoader)? + }; + + Ok(configured_app) + } + + /// Run the [`TriggerApp`] with the given [`App`] and options. + pub async fn run( + mut self, + app: App, + options: TriggerAppOptions<'_>, + ) -> anyhow::Result>> { + let configured_app = self.build(app, options).await?; + Ok(self.trigger.run(configured_app)) + } +} + pub mod help { use super::*; @@ -287,17 +415,22 @@ pub mod help { /// a `spin.toml` file. pub struct HelpArgsOnlyTrigger; - #[async_trait::async_trait] - impl TriggerExecutor for HelpArgsOnlyTrigger { - const TRIGGER_TYPE: &'static str = "help-args-only"; - type RuntimeData = (); - type TriggerConfig = (); - type RunConfig = NoArgs; - type InstancePre = spin_core::InstancePre; - async fn new(_: crate::TriggerAppEngine) -> Result { + impl Trigger for HelpArgsOnlyTrigger { + const TYPE: &'static str = "help-args-only"; + type CliArgs = NoCliArgs; + type InstanceState = (); + + fn new(_cli_args: Self::CliArgs, _app: &App) -> anyhow::Result { Ok(Self) } - async fn run(self, _: Self::RunConfig) -> Result<()> { + + async fn run( + self, + _configured_app: spin_factors_executor::FactorsExecutorApp< + TriggerFactors, + Self::InstanceState, + >, + ) -> anyhow::Result<()> { Ok(()) } } diff --git a/crates/trigger/src/cli/launch_metadata.rs b/crates/trigger/src/cli/launch_metadata.rs index 0be69b2824..be78e3634d 100644 --- a/crates/trigger/src/cli/launch_metadata.rs +++ b/crates/trigger/src/cli/launch_metadata.rs @@ -1,8 +1,8 @@ -use clap::{Args, CommandFactory}; +use clap::CommandFactory; use serde::{Deserialize, Serialize}; use std::ffi::OsString; -use crate::{cli::TriggerExecutorCommand, TriggerExecutor}; +use crate::{cli::FactorsTriggerCommand, Trigger}; /// Contains information about the trigger flags (and potentially /// in future configuration) that a consumer (such as `spin up`) @@ -27,11 +27,8 @@ struct LaunchFlag { } impl LaunchMetadata { - pub fn infer() -> Self - where - Executor::RunConfig: Args, - { - let all_flags: Vec<_> = TriggerExecutorCommand::::command() + pub fn infer() -> Self { + let all_flags: Vec<_> = FactorsTriggerCommand::::command() .get_arguments() .map(LaunchFlag::infer) .collect(); diff --git a/crates/trigger/src/factors.rs b/crates/trigger/src/factors.rs new file mode 100644 index 0000000000..3a1b0993a9 --- /dev/null +++ b/crates/trigger/src/factors.rs @@ -0,0 +1,90 @@ +use std::path::PathBuf; + +use anyhow::Context as _; +use spin_factor_key_value::KeyValueFactor; +use spin_factor_llm::LlmFactor; +use spin_factor_outbound_http::OutboundHttpFactor; +use spin_factor_outbound_mqtt::{NetworkedMqttClient, OutboundMqttFactor}; +use spin_factor_outbound_mysql::OutboundMysqlFactor; +use spin_factor_outbound_networking::OutboundNetworkingFactor; +use spin_factor_outbound_pg::OutboundPgFactor; +use spin_factor_outbound_redis::OutboundRedisFactor; +use spin_factor_sqlite::SqliteFactor; +use spin_factor_variables::VariablesFactor; +use spin_factor_wasi::{spin::SpinFilesMounter, WasiFactor}; +use spin_factors::RuntimeFactors; +use spin_runtime_config::TomlRuntimeConfigSource; + +#[derive(RuntimeFactors)] +pub struct TriggerFactors { + pub wasi: WasiFactor, + pub variables: VariablesFactor, + pub key_value: KeyValueFactor, + pub outbound_networking: OutboundNetworkingFactor, + pub outbound_http: OutboundHttpFactor, + pub sqlite: SqliteFactor, + pub redis: OutboundRedisFactor, + pub mqtt: OutboundMqttFactor, + pub pg: OutboundPgFactor, + pub mysql: OutboundMysqlFactor, + pub llm: LlmFactor, +} + +impl TriggerFactors { + pub fn new( + state_dir: Option, + working_dir: impl Into, + allow_transient_writes: bool, + default_key_value_label_resolver: impl spin_factor_key_value::DefaultLabelResolver + 'static, + default_sqlite_label_resolver: impl spin_factor_sqlite::DefaultLabelResolver + 'static, + use_gpu: bool, + ) -> anyhow::Result { + Ok(Self { + wasi: wasi_factor(working_dir, allow_transient_writes), + variables: VariablesFactor::default(), + key_value: KeyValueFactor::new(default_key_value_label_resolver), + outbound_networking: outbound_networking_factor(), + outbound_http: OutboundHttpFactor::new(), + sqlite: SqliteFactor::new(default_sqlite_label_resolver), + redis: OutboundRedisFactor::new(), + mqtt: OutboundMqttFactor::new(NetworkedMqttClient::creator()), + pg: OutboundPgFactor::new(), + mysql: OutboundMysqlFactor::new(), + llm: LlmFactor::new( + spin_factor_llm::spin::default_engine_creator(state_dir, use_gpu) + .context("failed to configure LLM factor")?, + ), + }) + } +} + +fn wasi_factor(working_dir: impl Into, allow_transient_writes: bool) -> WasiFactor { + WasiFactor::new(SpinFilesMounter::new(working_dir, allow_transient_writes)) +} + +fn outbound_networking_factor() -> OutboundNetworkingFactor { + fn disallowed_host_handler(scheme: &str, authority: &str) { + let host_pattern = format!("{scheme}://{authority}"); + tracing::error!("Outbound network destination not allowed: {host_pattern}"); + if scheme.starts_with("http") && authority == "self" { + terminal::warn!("A component tried to make an HTTP request to its own app but it does not have permission."); + } else { + terminal::warn!( + "A component tried to make an outbound network connection to disallowed destination '{host_pattern}'." + ); + }; + eprintln!("To allow this request, add 'allowed_outbound_hosts = [\"{host_pattern}\"]' to the manifest component section."); + } + + let mut factor = OutboundNetworkingFactor::new(); + factor.set_disallowed_host_handler(disallowed_host_handler); + factor +} + +impl TryFrom> for TriggerFactorsRuntimeConfig { + type Error = anyhow::Error; + + fn try_from(value: TomlRuntimeConfigSource<'_, '_>) -> Result { + Self::from_source(value) + } +} diff --git a/crates/trigger/src/lib.rs b/crates/trigger/src/lib.rs index 506221c588..b5f1fd3376 100644 --- a/crates/trigger/src/lib.rs +++ b/crates/trigger/src/lib.rs @@ -1,483 +1,64 @@ pub mod cli; -pub mod loader; -pub mod network; -mod runtime_config; +mod factors; mod stdio; -use std::{collections::HashMap, marker::PhantomData}; +use std::future::Future; -use anyhow::{Context, Result}; -pub use async_trait::async_trait; -use http::uri::Authority; -use runtime_config::llm::LLmOptions; -use serde::de::DeserializeOwned; +use clap::Args; +use factors::{TriggerFactors, TriggerFactorsInstanceState}; +use spin_core::Linker; +use spin_factors_executor::{FactorsExecutorApp, FactorsInstanceBuilder}; -use spin_app::{App, AppComponent, AppLoader, AppTrigger, Loader, OwnedApp, APP_NAME_KEY}; -use spin_core::{ - Config, Engine, EngineBuilder, Instance, InstancePre, OutboundWasiHttpHandler, Store, - StoreBuilder, WasiVersion, -}; +pub use spin_app::App; -pub use crate::runtime_config::{ParsedClientTlsOpts, RuntimeConfig}; +/// Type alias for a [`FactorsExecutorApp`] specialized to a [`Trigger`]. +pub type TriggerApp = FactorsExecutorApp::InstanceState>; -#[async_trait] -pub trait TriggerExecutor: Sized + Send + Sync { - const TRIGGER_TYPE: &'static str; - type RuntimeData: OutboundWasiHttpHandler + Default + Send + Sync + 'static; - type TriggerConfig; - type RunConfig; - type InstancePre: TriggerInstancePre; +pub type TriggerInstanceBuilder<'a, T> = + FactorsInstanceBuilder<'a, TriggerFactors, ::InstanceState>; - /// Create a new trigger executor. - async fn new(engine: TriggerAppEngine) -> Result; +pub type Store = spin_core::Store>; - /// Run the trigger executor. - async fn run(self, config: Self::RunConfig) -> Result<()>; +type TriggerInstanceState = spin_factors_executor::InstanceState< + TriggerFactorsInstanceState, + ::InstanceState, +>; - /// Make changes to the ExecutionContext using the given Builder. - fn configure_engine(_builder: &mut EngineBuilder) -> Result<()> { - Ok(()) - } - - fn supported_host_requirements() -> Vec<&'static str> { - Vec::new() - } -} - -/// Helper type alias to project the `Instance` of a given `TriggerExecutor`. -pub type ExecutorInstance = <::InstancePre as TriggerInstancePre< - ::RuntimeData, - ::TriggerConfig, ->>::Instance; - -#[async_trait] -pub trait TriggerInstancePre: Sized + Send + Sync -where - T: OutboundWasiHttpHandler + Send + Sync, -{ - type Instance; - - async fn instantiate_pre( - engine: &Engine, - component: &AppComponent, - config: &C, - ) -> Result; +pub trait Trigger: Sized + Send { + const TYPE: &'static str; - async fn instantiate(&self, store: &mut Store) -> Result; -} - -#[async_trait] -impl TriggerInstancePre for InstancePre -where - T: OutboundWasiHttpHandler + Send + Sync, -{ - type Instance = Instance; - - async fn instantiate_pre( - engine: &Engine, - component: &AppComponent, - _config: &C, - ) -> Result { - let comp = component.load_component(engine).await?; - Ok(engine - .instantiate_pre(&comp) - .with_context(|| format!("Failed to instantiate component '{}'", component.id()))?) - } - - async fn instantiate(&self, store: &mut Store) -> Result { - self.instantiate_async(store).await - } -} + type CliArgs: Args; + type InstanceState: Send + 'static; -pub struct TriggerExecutorBuilder { - loader: AppLoader, - config: Config, - hooks: Vec>, - disable_default_host_components: bool, - _phantom: PhantomData, -} - -impl TriggerExecutorBuilder { - /// Create a new TriggerExecutorBuilder with the given Application. - pub fn new(loader: impl Loader + Send + Sync + 'static) -> Self { - Self { - loader: AppLoader::new(loader), - config: Default::default(), - hooks: Default::default(), - disable_default_host_components: false, - _phantom: PhantomData, - } - } + /// Constructs a new trigger. + fn new(cli_args: Self::CliArgs, app: &App) -> anyhow::Result; - /// !!!Warning!!! Using a custom Wasmtime Config is entirely unsupported; - /// many configurations are likely to cause errors or unexpected behavior. + /// Update the [`spin_core::Config`] for this trigger. + /// + /// !!!Warning!!! This is unsupported; many configurations are likely to + /// cause errors or unexpected behavior, especially in future versions. #[doc(hidden)] - pub fn config_mut(&mut self) -> &mut spin_core::Config { - &mut self.config - } - - pub fn hooks(&mut self, hooks: impl TriggerHooks + 'static) -> &mut Self { - self.hooks.push(Box::new(hooks)); - self - } - - pub fn disable_default_host_components(&mut self) -> &mut Self { - self.disable_default_host_components = true; - self - } - - pub async fn build( - mut self, - app_uri: String, - runtime_config: runtime_config::RuntimeConfig, - init_data: HostComponentInitData, - ) -> Result - where - Executor::TriggerConfig: DeserializeOwned, - { - let resolver_cell = std::sync::Arc::new(std::sync::OnceLock::new()); - - let engine = { - let mut builder = Engine::builder(&self.config)?; - - if !self.disable_default_host_components { - // Wasmtime 17: WASI@0.2.0 - builder.link_import(|l, _| { - wasmtime_wasi::add_to_linker_async(l)?; - wasmtime_wasi_http::proxy::add_only_http_to_linker(l) - })?; - - // Wasmtime 15: WASI@0.2.0-rc-2023-11-10 - builder.link_import(|l, _| spin_core::wasi_2023_11_10::add_to_linker(l))?; - - // Wasmtime 14: WASI@0.2.0-rc-2023-10-18 - builder.link_import(|l, _| spin_core::wasi_2023_10_18::add_to_linker(l))?; - - self.loader.add_dynamic_host_component( - &mut builder, - outbound_redis::OutboundRedisComponent { - resolver: resolver_cell.clone(), - }, - )?; - self.loader.add_dynamic_host_component( - &mut builder, - outbound_mqtt::OutboundMqttComponent { - resolver: resolver_cell.clone(), - }, - )?; - self.loader.add_dynamic_host_component( - &mut builder, - outbound_mysql::OutboundMysqlComponent { - resolver: resolver_cell.clone(), - }, - )?; - self.loader.add_dynamic_host_component( - &mut builder, - outbound_pg::OutboundPgComponent { - resolver: resolver_cell.clone(), - }, - )?; - self.loader.add_dynamic_host_component( - &mut builder, - runtime_config::llm::build_component(&runtime_config, init_data.llm.use_gpu) - .await, - )?; - self.loader.add_dynamic_host_component( - &mut builder, - runtime_config::key_value::build_key_value_component( - &runtime_config, - &init_data.kv, - ) - .await?, - )?; - self.loader.add_dynamic_host_component( - &mut builder, - runtime_config::sqlite::build_component(&runtime_config, &init_data.sqlite) - .await?, - )?; - self.loader.add_dynamic_host_component( - &mut builder, - outbound_http::OutboundHttpComponent { - resolver: resolver_cell.clone(), - }, - )?; - self.loader.add_dynamic_host_component( - &mut builder, - spin_variables::VariablesHostComponent::new( - runtime_config.variables_providers()?, - ), - )?; - } - - Executor::configure_engine(&mut builder)?; - builder.build() - }; - - let app = self.loader.load_owned_app(app_uri).await?; - - if let Err(unmet) = app - .borrowed() - .ensure_needs_only(&Executor::supported_host_requirements()) - { - anyhow::bail!("This application requires the following features that are not available in this version of the '{}' trigger: {unmet}", Executor::TRIGGER_TYPE); - } - - let app_name = app.borrowed().require_metadata(APP_NAME_KEY)?; - - let resolver = - spin_variables::make_resolver(app.borrowed(), runtime_config.variables_providers()?)?; - let prepared_resolver = std::sync::Arc::new(resolver.prepare().await?); - resolver_cell - .set(prepared_resolver.clone()) - .map_err(|_| anyhow::anyhow!("resolver cell was already set!"))?; - - self.hooks - .iter_mut() - .try_for_each(|h| h.app_loaded(app.borrowed(), &runtime_config, &prepared_resolver))?; - - // Run trigger executor - Executor::new( - TriggerAppEngine::new( - engine, - app_name, - app, - self.hooks, - &prepared_resolver, - runtime_config.client_tls_opts()?, - ) - .await?, - ) - .await - } -} - -/// Initialization data for host components. -#[derive(Default)] // TODO: the implementation of Default is only for tests - would like to get rid of -pub struct HostComponentInitData { - kv: Vec<(String, String)>, - sqlite: Vec, - llm: LLmOptions, -} - -impl HostComponentInitData { - /// Create an instance of `HostComponentInitData`. `key_value_init_values` - /// will be added to the default key-value store; `sqlite_init_statements` - /// will be run against the default SQLite database. - pub fn new( - key_value_init_values: impl Into>, - sqlite_init_statements: impl Into>, - llm: LLmOptions, - ) -> Self { - Self { - kv: key_value_init_values.into(), - sqlite: sqlite_init_statements.into(), - llm, - } - } -} - -/// Execution context for a TriggerExecutor executing a particular App. -pub struct TriggerAppEngine { - /// Engine to be used with this executor. - pub engine: Engine, - /// Name of the app for e.g. logging. - pub app_name: String, - // An owned wrapper of the App. - app: OwnedApp, - // Trigger hooks - hooks: Vec>, - // Trigger configs for this trigger type, with order matching `app.triggers_with_type(Executor::TRIGGER_TYPE)` - trigger_configs: Vec, - // Map of {Component ID -> InstancePre} for each component. - component_instance_pres: HashMap, - // Resolver for value template expressions - resolver: std::sync::Arc, - // Map of { Component ID -> Map of { Authority -> ParsedClientTlsOpts} } - client_tls_opts: HashMap>, -} - -impl TriggerAppEngine { - /// Returns a new TriggerAppEngine. May return an error if trigger config validation or - /// component pre-instantiation fails. - pub async fn new( - engine: Engine, - app_name: String, - app: OwnedApp, - hooks: Vec>, - resolver: &std::sync::Arc, - client_tls_opts: HashMap>, - ) -> Result - where - ::TriggerConfig: DeserializeOwned, - { - let trigger_configs = app - .borrowed() - .triggers_with_type(Executor::TRIGGER_TYPE) - .map(|trigger| { - Ok(( - trigger.component()?.id().to_owned(), - trigger.typed_config().with_context(|| { - format!("invalid trigger configuration for {:?}", trigger.id()) - })?, - )) - }) - .collect::>>()?; - - let mut component_instance_pres = HashMap::default(); - for component in app.borrowed().components() { - let id = component.id(); - // There is an issue here for triggers that consider the trigger config during - // preinstantiation. We defer this for now because the only case is the HTTP - // `executor` field and that should not differ from trigger to trigger. - let trigger_config = trigger_configs - .iter() - .find(|(c, _)| c == id) - .map(|(_, cfg)| cfg); - if let Some(config) = trigger_config { - component_instance_pres.insert( - id.to_owned(), - Executor::InstancePre::instantiate_pre(&engine, &component, config) - .await - .with_context(|| format!("Failed to instantiate component '{id}'"))?, - ); - } else { - tracing::warn!( - "component '{id}' is not used by any triggers in app '{app_name}'", - id = id, - app_name = app_name - ) - } - } - - Ok(Self { - engine, - app_name, - app, - hooks, - trigger_configs: trigger_configs.into_iter().map(|(_, v)| v).collect(), - component_instance_pres, - resolver: resolver.clone(), - client_tls_opts, - }) - } - - /// Returns a reference to the App. - pub fn app(&self) -> &App { - self.app.borrowed() - } - - pub fn trigger_metadata(&self) -> spin_app::Result> { - self.app().get_trigger_metadata(Executor::TRIGGER_TYPE) - } - - /// Returns AppTriggers and typed TriggerConfigs for this executor type. - pub fn trigger_configs(&self) -> impl Iterator { - self.app() - .triggers_with_type(Executor::TRIGGER_TYPE) - .zip(&self.trigger_configs) - } - - /// Returns a new StoreBuilder for the given component ID. - pub fn store_builder( - &self, - component_id: &str, - wasi_version: WasiVersion, - ) -> Result { - let mut builder = self.engine.store_builder(wasi_version); - let component = self.get_component(component_id)?; - self.hooks - .iter() - .try_for_each(|h| h.component_store_builder(&component, &mut builder))?; - Ok(builder) - } - - /// Returns a new Store and Instance for the given component ID. - pub async fn prepare_instance( - &self, - component_id: &str, - ) -> Result<(ExecutorInstance, Store)> { - let store_builder = self.store_builder(component_id, WasiVersion::Preview2)?; - self.prepare_instance_with_store(component_id, store_builder) - .await - } - - /// Returns a new Store and Instance for the given component ID and StoreBuilder. - pub async fn prepare_instance_with_store( - &self, - component_id: &str, - mut store_builder: StoreBuilder, - ) -> Result<(ExecutorInstance, Store)> { - let component = self.get_component(component_id)?; - - // Build Store - component.apply_store_config(&mut store_builder).await?; - let mut store = store_builder.build()?; - - // Instantiate - let pre = self - .component_instance_pres - .get(component_id) - .expect("component_instance_pres missing valid component_id"); - - let instance = pre.instantiate(&mut store).await.with_context(|| { - format!( - "app {:?} component {:?} instantiation failed", - self.app_name, component_id - ) - })?; - - Ok((instance, store)) - } - - pub fn get_component(&self, component_id: &str) -> Result { - self.app().get_component(component_id).with_context(|| { - format!( - "app {:?} has no component {:?}", - self.app_name, component_id - ) - }) - } - - pub fn get_client_tls_opts( - &self, - component_id: &str, - ) -> Option> { - self.client_tls_opts.get(component_id).cloned() - } - - pub fn resolve_template( - &self, - template: &spin_expressions::Template, - ) -> Result { - self.resolver.resolve_template(template) + fn update_core_config(&mut self, config: &mut spin_core::Config) -> anyhow::Result<()> { + let _ = config; + Ok(()) } -} - -/// TriggerHooks allows a Spin environment to hook into a TriggerAppEngine's -/// configuration and execution processes. -pub trait TriggerHooks: Send + Sync { - #![allow(unused_variables)] - /// Called once, immediately after an App is loaded. - fn app_loaded( + /// Update the [`Linker`] for this trigger. + fn add_to_linker( &mut self, - app: &App, - runtime_config: &RuntimeConfig, - resolver: &std::sync::Arc, - ) -> Result<()> { + linker: &mut Linker>, + ) -> anyhow::Result<()> { + let _ = linker; Ok(()) } - /// Called while an AppComponent is being prepared for execution. - /// Implementations may update the given StoreBuilder to change the - /// environment of the instance to be executed. - fn component_store_builder( - &self, - component: &AppComponent, - store_builder: &mut StoreBuilder, - ) -> Result<()> { - Ok(()) + /// Run this trigger. + fn run(self, trigger_app: TriggerApp) -> impl Future> + Send; + + /// Returns a list of host requirements supported by this trigger specifically. + /// + /// See [`App::ensure_needs_only`]. + fn supported_host_requirements() -> Vec<&'static str> { + Vec::new() } } - -impl TriggerHooks for () {} diff --git a/crates/trigger/src/loader.rs b/crates/trigger/src/loader.rs deleted file mode 100644 index e46fab1044..0000000000 --- a/crates/trigger/src/loader.rs +++ /dev/null @@ -1,256 +0,0 @@ -use std::path::{Path, PathBuf}; - -use anyhow::{ensure, Context, Result}; -use async_trait::async_trait; -use spin_app::{ - locked::{LockedApp, LockedComponentSource}, - AppComponent, Loader, -}; -use spin_componentize::bugs::WasiLibc377Bug; -use spin_core::StoreBuilder; -use tokio::fs; - -use spin_common::{ui::quoted_path, url::parse_file_url}; - -/// Compilation status of all components of a Spin application -pub enum AotCompilationStatus { - /// No components are ahead of time (AOT) compiled. - Disabled, - #[cfg(feature = "unsafe-aot-compilation")] - /// All components are componentized and ahead of time (AOT) compiled to cwasm. - Enabled, -} - -/// Loader for the Spin runtime -pub struct TriggerLoader { - /// Working directory where files for mounting exist. - working_dir: PathBuf, - /// Set the static assets of the components in the temporary directory as writable. - allow_transient_write: bool, - /// Declares the compilation status of all components of a Spin application. - aot_compilation_status: AotCompilationStatus, -} - -impl TriggerLoader { - pub fn new(working_dir: impl Into, allow_transient_write: bool) -> Self { - Self { - working_dir: working_dir.into(), - allow_transient_write, - aot_compilation_status: AotCompilationStatus::Disabled, - } - } - - /// Updates the TriggerLoader to load AOT precompiled components - /// - /// **Warning: This feature may bypass important security guarantees of the - /// Wasmtime - // security sandbox if used incorrectly! Read this documentation - // carefully.** - /// - /// Usually, components are compiled just-in-time from portable Wasm - /// sources. This method causes components to instead be loaded - /// ahead-of-time as Wasmtime-precompiled native executable binaries. - /// Precompiled binaries must be produced with a compatible Wasmtime engine - /// using the same Wasmtime version and compiler target settings - typically - /// by a host with the same processor that will be executing them. See the - /// Wasmtime documentation for more information: - /// https://docs.rs/wasmtime/latest/wasmtime/struct.Module.html#method.deserialize - /// - /// # Safety - /// - /// This method is marked as `unsafe` because it enables potentially unsafe - /// behavior if - // used to load malformed or malicious precompiled binaries. Loading sources - // from an - /// incompatible Wasmtime engine will fail but is otherwise safe. This - /// method is safe if it can be guaranteed that `::load_component` will only ever be called with a trusted - /// `LockedComponentSource`. **Precompiled binaries must never be loaded - /// from untrusted sources.** - #[cfg(feature = "unsafe-aot-compilation")] - pub unsafe fn enable_loading_aot_compiled_components(&mut self) { - self.aot_compilation_status = AotCompilationStatus::Enabled; - } -} - -#[async_trait] -impl Loader for TriggerLoader { - async fn load_app(&self, url: &str) -> Result { - let path = parse_file_url(url)?; - let contents = std::fs::read(&path) - .with_context(|| format!("failed to read manifest at {}", quoted_path(&path)))?; - let app = - serde_json::from_slice(&contents).context("failed to parse app lock file JSON")?; - Ok(app) - } - - async fn load_component( - &self, - engine: &spin_core::wasmtime::Engine, - source: &LockedComponentSource, - ) -> Result { - let source = source - .content - .source - .as_ref() - .context("LockedComponentSource missing source field")?; - let path = parse_file_url(source)?; - match self.aot_compilation_status { - #[cfg(feature = "unsafe-aot-compilation")] - AotCompilationStatus::Enabled => { - match engine.detect_precompiled_file(&path)?{ - Some(wasmtime::Precompiled::Component) => { - unsafe { - spin_core::Component::deserialize_file(engine, &path) - .with_context(|| format!("deserializing module {}", quoted_path(&path))) - } - }, - Some(wasmtime::Precompiled::Module) => anyhow::bail!("Spin loader is configured to load only AOT compiled components but an AOT compiled module provided at {}", quoted_path(&path)), - None => { - anyhow::bail!("Spin loader is configured to load only AOT compiled components, but {} is not precompiled", quoted_path(&path)) - } - } - } - AotCompilationStatus::Disabled => { - let bytes = fs::read(&path).await.with_context(|| { - format!( - "failed to read component source from disk at path {}", - quoted_path(&path) - ) - })?; - let component = spin_componentize::componentize_if_necessary(&bytes)?; - spin_core::Component::new(engine, component.as_ref()) - .with_context(|| format!("loading module {}", quoted_path(&path))) - } - } - } - - async fn load_module( - &self, - engine: &spin_core::wasmtime::Engine, - source: &LockedComponentSource, - ) -> Result { - let source = source - .content - .source - .as_ref() - .context("LockedComponentSource missing source field")?; - let path = parse_file_url(source)?; - check_uncomponentizable_module_deprecation(&path); - spin_core::Module::from_file(engine, &path) - .with_context(|| format!("loading module {}", quoted_path(&path))) - } - - async fn mount_files( - &self, - store_builder: &mut StoreBuilder, - component: &AppComponent, - ) -> Result<()> { - for content_dir in component.files() { - let source_uri = content_dir - .content - .source - .as_deref() - .with_context(|| format!("Missing 'source' on files mount {content_dir:?}"))?; - let source_path = self.working_dir.join(parse_file_url(source_uri)?); - ensure!( - source_path.is_dir(), - "TriggerLoader only supports directory mounts; {} is not a directory", - quoted_path(&source_path), - ); - let guest_path = content_dir.path.clone(); - if self.allow_transient_write { - store_builder.read_write_preopened_dir(source_path, guest_path)?; - } else { - store_builder.read_only_preopened_dir(source_path, guest_path)?; - } - } - Ok(()) - } -} - -// Check whether the given module is (likely) susceptible to a wasi-libc bug -// that makes it unsafe to componentize. If so, print a deprecation warning; -// this will turn into a hard error in a future release. -fn check_uncomponentizable_module_deprecation(module_path: &Path) { - let module = match std::fs::read(module_path) { - Ok(module) => module, - Err(err) => { - tracing::warn!("Failed to read {module_path:?}: {err:#}"); - return; - } - }; - match WasiLibc377Bug::detect(&module) { - Ok(WasiLibc377Bug::ProbablySafe) => {} - not_safe @ Ok(WasiLibc377Bug::ProbablyUnsafe | WasiLibc377Bug::Unknown) => { - println!( - "\n!!! DEPRECATION WARNING !!!\n\ - The Wasm module at {path}\n\ - {verbs} have been compiled with wasi-sdk version <19 and is likely to\n\ - contain a critical memory safety bug. Spin has deprecated execution of these\n\ - modules; they will stop working in a future release.\n\ - For more information, see: https://github.com/fermyon/spin/issues/2552\n", - path = module_path.display(), - verbs = if not_safe.unwrap() == WasiLibc377Bug::ProbablyUnsafe { - "appears to" - } else { - "may" - } - ); - } - Err(err) => { - tracing::warn!("Failed to apply wasi-libc bug heuristic on {module_path:?}: {err:#}"); - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use spin_app::locked::ContentRef; - use std::io::Write; - use tempfile::NamedTempFile; - - fn precompiled_component(file: &mut NamedTempFile) -> LockedComponentSource { - let wasmtime_engine = wasmtime::Engine::default(); - let component = wasmtime::component::Component::new(&wasmtime_engine, "(component)") - .unwrap() - .serialize() - .unwrap(); - let file_uri = format!("file://{}", file.path().to_str().unwrap()); - file.write_all(&component).unwrap(); - LockedComponentSource { - content: ContentRef { - source: Some(file_uri), - ..Default::default() - }, - content_type: "application/wasm".to_string(), - } - } - - #[cfg(feature = "unsafe-aot-compilation")] - #[tokio::test] - async fn load_component_succeeds_for_precompiled_component() { - let mut file = NamedTempFile::new().unwrap(); - let source = precompiled_component(&mut file); - let mut loader = super::TriggerLoader::new("/unreferenced", false); - unsafe { - loader.enable_loading_aot_compiled_components(); - } - loader - .load_component(&spin_core::wasmtime::Engine::default(), &source) - .await - .unwrap(); - } - - #[tokio::test] - async fn load_component_fails_for_precompiled_component() { - let mut file = NamedTempFile::new().unwrap(); - let source = precompiled_component(&mut file); - let loader = super::TriggerLoader::new("/unreferenced", false); - let result = loader - .load_component(&spin_core::wasmtime::Engine::default(), &source) - .await; - assert!(result.is_err()); - } -} diff --git a/crates/trigger/src/network.rs b/crates/trigger/src/network.rs deleted file mode 100644 index ce7af9988b..0000000000 --- a/crates/trigger/src/network.rs +++ /dev/null @@ -1,88 +0,0 @@ -use std::sync::Arc; - -use crate::TriggerHooks; - -#[derive(Default)] -pub struct Network { - resolver: Arc, -} - -impl TriggerHooks for Network { - fn app_loaded( - &mut self, - _app: &spin_app::App, - _runtime_config: &crate::RuntimeConfig, - resolver: &Arc, - ) -> anyhow::Result<()> { - self.resolver = resolver.clone(); - Ok(()) - } - - fn component_store_builder( - &self, - component: &spin_app::AppComponent, - store_builder: &mut spin_core::StoreBuilder, - ) -> anyhow::Result<()> { - let hosts = component - .get_metadata(spin_outbound_networking::ALLOWED_HOSTS_KEY)? - .unwrap_or_default(); - let allowed_hosts = - spin_outbound_networking::AllowedHostsConfig::parse(&hosts, &self.resolver)?; - match allowed_hosts { - spin_outbound_networking::AllowedHostsConfig::All => store_builder.inherit_network(), - spin_outbound_networking::AllowedHostsConfig::SpecificHosts(configs) => { - for config in configs { - if config.scheme().allows_any() { - match config.host() { - spin_outbound_networking::HostConfig::Any => { - store_builder.inherit_network() - } - spin_outbound_networking::HostConfig::AnySubdomain(_) => continue, - spin_outbound_networking::HostConfig::ToSelf => {} - spin_outbound_networking::HostConfig::List(hosts) => { - for host in hosts { - let Ok(ip_net) = - // Parse the host as an `IpNet` cidr block and if it fails - // then try parsing again with `/32` appended to the end. - host.parse().or_else(|_| format!("{host}/32").parse()) - else { - continue; - }; - add_ip_net(store_builder, ip_net, config.port()); - } - } - spin_outbound_networking::HostConfig::Cidr(ip_net) => { - add_ip_net(store_builder, *ip_net, config.port()) - } - } - } - } - } - } - Ok(()) - } -} - -fn add_ip_net( - store_builder: &mut spin_core::StoreBuilder, - ip_net: ipnet::IpNet, - port: &spin_outbound_networking::PortConfig, -) { - match port { - spin_outbound_networking::PortConfig::Any => { - store_builder.insert_ip_net_port_range(ip_net, 0, None); - } - spin_outbound_networking::PortConfig::List(ports) => { - for port in ports { - match port { - spin_outbound_networking::IndividualPortConfig::Port(p) => { - store_builder.insert_ip_net_port_range(ip_net, *p, p.checked_add(1)); - } - spin_outbound_networking::IndividualPortConfig::Range(r) => { - store_builder.insert_ip_net_port_range(ip_net, r.start, Some(r.end)) - } - } - } - } - } -} diff --git a/crates/trigger/src/runtime_config.rs b/crates/trigger/src/runtime_config.rs deleted file mode 100644 index 4d22c48bcd..0000000000 --- a/crates/trigger/src/runtime_config.rs +++ /dev/null @@ -1,840 +0,0 @@ -pub mod client_tls; -pub mod key_value; -pub mod llm; -pub mod sqlite; -pub mod variables_provider; - -use std::{ - collections::HashMap, - fs, - path::{Path, PathBuf}, - sync::Arc, -}; - -use anyhow::{Context, Result}; -use http::uri::Authority; -use serde::Deserialize; -use spin_common::ui::quoted_path; -use spin_sqlite::Connection; - -use crate::TriggerHooks; - -use self::{ - client_tls::{load_certs, load_key, ClientTlsOpts}, - key_value::{KeyValueStore, KeyValueStoreOpts}, - llm::LlmComputeOpts, - sqlite::SqliteDatabaseOpts, - variables_provider::{VariablesProvider, VariablesProviderOpts}, -}; - -pub const DEFAULT_STATE_DIR: &str = ".spin"; -const DEFAULT_LOGS_DIR: &str = "logs"; -/// RuntimeConfig allows multiple sources of runtime configuration to be -/// queried uniformly. -#[derive(Debug, Default)] -pub struct RuntimeConfig { - local_app_dir: Option, - files: Vec, - overrides: RuntimeConfigOpts, -} - -impl RuntimeConfig { - // Gives more consistent conditional branches - #![allow(clippy::manual_map)] - - pub fn new(local_app_dir: Option) -> Self { - Self { - local_app_dir, - ..Default::default() - } - } - - /// Load a runtime config file from the given path. Options specified in a - /// later-loaded file take precedence over any earlier-loaded files. - pub fn merge_config_file(&mut self, path: impl Into) -> Result<()> { - let path = path.into(); - let mut opts = RuntimeConfigOpts::parse_file(&path)?; - opts.file_path = Some(path); - self.files.push(opts); - Ok(()) - } - - /// Return a Vec of configured [`VariablesProvider`]s. - pub fn variables_providers(&self) -> Result> { - let default_provider = - VariablesProviderOpts::default_provider_opts(self).build_provider()?; - let mut providers: Vec = vec![default_provider]; - for opts in self.opts_layers() { - for var_provider in &opts.variables_providers { - let provider = var_provider.build_provider()?; - providers.push(provider); - } - } - Ok(providers) - } - - /// Return an iterator of named configured [`KeyValueStore`]s. - pub fn key_value_stores(&self) -> Result> { - let mut stores = HashMap::new(); - // Insert explicitly-configured stores - for opts in self.opts_layers() { - for (name, store) in &opts.key_value_stores { - if !stores.contains_key(name) { - let store = store.build_store(opts)?; - stores.insert(name.to_owned(), store); - } - } - } - // Upsert default store - if !stores.contains_key("default") { - let store = KeyValueStoreOpts::default_store_opts(self) - .build_store(&RuntimeConfigOpts::default())?; - stores.insert("default".into(), store); - } - Ok(stores.into_iter()) - } - - // Return the "default" key value store config. - fn default_key_value_opts(&self) -> KeyValueStoreOpts { - self.opts_layers() - .find_map(|opts| opts.key_value_stores.get("default")) - .cloned() - .unwrap_or_else(|| KeyValueStoreOpts::default_store_opts(self)) - } - - // Return the "default" key value store config. - fn default_sqlite_opts(&self) -> SqliteDatabaseOpts { - self.opts_layers() - .find_map(|opts| opts.sqlite_databases.get("default")) - .cloned() - .unwrap_or_else(|| SqliteDatabaseOpts::default(self)) - } - - /// Return an iterator of named configured [`SqliteDatabase`]s. - pub async fn sqlite_databases( - &self, - ) -> Result)>> { - let mut databases = HashMap::new(); - // Insert explicitly-configured databases - for opts in self.opts_layers() { - for (name, database) in &opts.sqlite_databases { - if !databases.contains_key(name) { - let store = database.build(opts).await?; - databases.insert(name.to_owned(), store); - } - } - } - // Upsert default store - if !databases.contains_key("default") { - let store = SqliteDatabaseOpts::default(self) - .build(&RuntimeConfigOpts::default()) - .await?; - databases.insert("default".into(), store); - } - Ok(databases.into_iter()) - } - - /// Set the state dir, overriding any other runtime config source. - pub fn set_state_dir(&mut self, state_dir: impl Into) { - self.overrides.state_dir = Some(state_dir.into()); - } - - /// Return the state dir if set. - pub fn state_dir(&self) -> Option { - if let Some(path_str) = self.find_opt(|opts| &opts.state_dir) { - if path_str.is_empty() { - None // An empty string forces the state dir to be unset - } else { - Some(path_str.into()) - } - } else if let Some(app_dir) = &self.local_app_dir { - // If we're running a local app, return the default state dir - Some(app_dir.join(DEFAULT_STATE_DIR)) - } else { - None - } - } - - /// Set the log dir, overriding any other runtime config source. - pub fn set_log_dir(&mut self, log_dir: impl Into) { - self.overrides.log_dir = Some(log_dir.into()); - } - - /// Return the log dir if set. - pub fn log_dir(&self) -> Option { - if let Some(path) = self.find_opt(|opts| &opts.log_dir) { - if path.as_os_str().is_empty() { - // If the log dir is explicitly set to "", disable logging - None - } else { - // If there is an explicit log dir set, return it - Some(path.into()) - } - } else if let Some(state_dir) = self.state_dir() { - // If the state dir is set, build the default path - Some(state_dir.join(DEFAULT_LOGS_DIR)) - } else { - None - } - } - - pub fn llm_compute(&self) -> &LlmComputeOpts { - if let Some(compute) = self.find_opt(|opts| &opts.llm_compute) { - compute - } else { - &LlmComputeOpts::Spin - } - } - - // returns the client tls options in form of nested - // HashMap of { Component ID -> HashMap of { Host -> ParsedClientTlsOpts} } - pub fn client_tls_opts( - &self, - ) -> Result>> { - let mut components_map: HashMap> = - HashMap::new(); - - // if available, use the existing client tls opts value for a given component-id and host-authority - // to ensure first-one wins incase of duplicate options - fn use_existing_if_available( - existing_opts: Option<&HashMap>, - host: Authority, - newopts: ParsedClientTlsOpts, - ) -> (Authority, ParsedClientTlsOpts) { - match existing_opts { - None => (host, newopts), - Some(opts) => match opts.get(&host) { - Some(existing_opts_for_component_and_host) => { - (host, existing_opts_for_component_and_host.to_owned()) - } - None => (host, newopts), - }, - } - } - - for opt_layer in self.opts_layers() { - for opts in &opt_layer.client_tls_opts { - let parsed = parse_client_tls_opts(opts).context("parsing client tls options")?; - for component_id in &opts.component_ids { - let existing_opts_for_component = components_map.get(component_id.as_ref()); - #[allow(clippy::mutable_key_type)] - let hostmap = parsed - .hosts - .clone() - .into_iter() - .map(|host| { - use_existing_if_available( - existing_opts_for_component, - host, - parsed.clone(), - ) - }) - .collect::>(); - components_map.insert(component_id.to_string(), hostmap); - } - } - } - - Ok(components_map) - } - - /// Returns an iterator of RuntimeConfigOpts in order of decreasing precedence - fn opts_layers(&self) -> impl Iterator { - std::iter::once(&self.overrides).chain(self.files.iter().rev()) - } - - /// Returns the highest precedence RuntimeConfigOpts Option that is set - fn find_opt(&self, mut f: impl FnMut(&RuntimeConfigOpts) -> &Option) -> Option<&T> { - self.opts_layers().find_map(|opts| f(opts).as_ref()) - } -} - -#[derive(Debug, Default, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct RuntimeConfigOpts { - #[serde(default)] - pub state_dir: Option, - - #[serde(default)] - pub log_dir: Option, - - #[serde(default)] - pub llm_compute: Option, - - #[serde(rename = "variables_provider", alias = "config_provider", default)] - pub variables_providers: Vec, - - #[serde(rename = "key_value_store", default)] - pub key_value_stores: HashMap, - - #[serde(rename = "sqlite_database", default)] - pub sqlite_databases: HashMap, - - #[serde(skip)] - pub file_path: Option, - - #[serde(rename = "client_tls", default)] - pub client_tls_opts: Vec, -} - -impl RuntimeConfigOpts { - fn parse_file(path: &Path) -> Result { - let contents = fs::read_to_string(path) - .with_context(|| format!("Failed to read runtime config file {}", quoted_path(path)))?; - let ext = path.extension().unwrap_or_default(); - let is_json = ext != "toml" && (ext == "json" || contents.trim_start().starts_with('{')); - if is_json { - serde_json::from_str(&contents).with_context(|| { - format!( - "Failed to parse runtime config JSON file {}", - quoted_path(path) - ) - }) - } else { - toml::from_str(&contents).with_context(|| { - format!( - "Failed to parse runtime config TOML file {}", - quoted_path(path) - ) - }) - } - } -} - -fn resolve_config_path(path: &Path, config_opts: &RuntimeConfigOpts) -> Result { - if path.is_absolute() { - return Ok(path.to_owned()); - } - let base_path = match &config_opts.file_path { - Some(file_path) => file_path - .parent() - .with_context(|| { - format!( - "failed to get parent of runtime config file path {}", - quoted_path(file_path) - ) - })? - .to_owned(), - None => std::env::current_dir().context("failed to get current directory")?, - }; - Ok(base_path.join(path)) -} - -pub(crate) struct SummariseRuntimeConfigHook { - runtime_config_file: Option, -} - -impl SummariseRuntimeConfigHook { - pub(crate) fn new(runtime_config_file: &Option) -> Self { - Self { - runtime_config_file: runtime_config_file.clone(), - } - } -} - -impl TriggerHooks for SummariseRuntimeConfigHook { - fn app_loaded( - &mut self, - _app: &spin_app::App, - runtime_config: &RuntimeConfig, - _resolver: &Arc, - ) -> anyhow::Result<()> { - if let Some(path) = &self.runtime_config_file { - let mut opts = vec![]; - for opt in runtime_config.opts_layers() { - for (id, opt) in &opt.key_value_stores { - opts.push(Self::summarise_kv(id, opt)); - } - for (id, opt) in &opt.sqlite_databases { - opts.push(Self::summarise_sqlite(id, opt)); - } - if let Some(opt) = &opt.llm_compute { - opts.push(Self::summarise_llm(opt)); - } - } - if !opts.is_empty() { - let opts_text = opts.join(", "); - println!( - "Using {opts_text} runtime config from {}", - quoted_path(path) - ); - } - } - Ok(()) - } -} - -impl SummariseRuntimeConfigHook { - fn summarise_kv(id: &str, opt: &KeyValueStoreOpts) -> String { - let source = match opt { - KeyValueStoreOpts::Spin(_) => "spin", - KeyValueStoreOpts::Redis(_) => "redis", - KeyValueStoreOpts::AzureCosmos(_) => "cosmos", - }; - format!("[key_value_store.{id}: {}]", source) - } - - fn summarise_sqlite(id: &str, opt: &SqliteDatabaseOpts) -> String { - let source = match opt { - SqliteDatabaseOpts::Spin(_) => "spin", - SqliteDatabaseOpts::Libsql(_) => "libsql", - }; - format!("[sqlite_database.{id}: {}]", source) - } - - fn summarise_llm(opt: &LlmComputeOpts) -> String { - let source = match opt { - LlmComputeOpts::Spin => "spin", - LlmComputeOpts::RemoteHttp(_) => "remote-http", - }; - format!("[llm_compute: {}]", source) - } -} - -#[cfg(test)] -mod tests { - use std::io::Write; - - use tempfile::NamedTempFile; - use toml::toml; - - use super::*; - - #[test] - fn defaults_without_local_app_dir() -> Result<()> { - let config = RuntimeConfig::new(None); - - assert_eq!(config.state_dir(), None); - assert_eq!(config.log_dir(), None); - assert_eq!(default_spin_store_path(&config), None); - - Ok(()) - } - - #[test] - fn defaults_with_local_app_dir() -> Result<()> { - let app_dir = tempfile::tempdir()?; - let config = RuntimeConfig::new(Some(app_dir.path().into())); - - let state_dir = config.state_dir().unwrap(); - assert!(state_dir.starts_with(&app_dir)); - - let log_dir = config.log_dir().unwrap(); - assert!(log_dir.starts_with(&state_dir)); - - let default_db_path = default_spin_store_path(&config).unwrap(); - assert!(default_db_path.starts_with(&state_dir)); - - Ok(()) - } - - #[test] - fn state_dir_force_unset() -> Result<()> { - let app_dir = tempfile::tempdir()?; - let mut config = RuntimeConfig::new(Some(app_dir.path().into())); - assert!(config.state_dir().is_some()); - - config.set_state_dir(""); - assert!(config.state_dir().is_none()); - - Ok(()) - } - - #[test] - fn opts_layers_precedence() -> Result<()> { - let mut config = RuntimeConfig::new(None); - - merge_config_toml( - &mut config, - toml! { - state_dir = "file-state-dir" - log_dir = "file-log-dir" - }, - ); - - let state_dir = config.state_dir().unwrap(); - assert_eq!(state_dir.as_os_str(), "file-state-dir"); - - let log_dir = config.log_dir().unwrap(); - assert_eq!(log_dir.as_os_str(), "file-log-dir"); - - config.set_state_dir("override-state-dir"); - config.set_log_dir("override-log-dir"); - - let state_dir = config.state_dir().unwrap(); - assert_eq!(state_dir.as_os_str(), "override-state-dir"); - - let log_dir = config.log_dir().unwrap(); - assert_eq!(log_dir.as_os_str(), "override-log-dir"); - - Ok(()) - } - - #[test] - fn deprecated_config_provider_in_runtime_config_file() -> Result<()> { - let mut config = RuntimeConfig::new(None); - - // One default provider - assert_eq!(config.variables_providers()?.len(), 1); - - merge_config_toml( - &mut config, - toml! { - [[config_provider]] - type = "vault" - url = "http://vault" - token = "secret" - mount = "root" - }, - ); - assert_eq!(config.variables_providers()?.len(), 2); - - Ok(()) - } - - #[test] - fn variables_providers_from_file() -> Result<()> { - let mut config = RuntimeConfig::new(None); - - // One default provider - assert_eq!(config.variables_providers()?.len(), 1); - - merge_config_toml( - &mut config, - toml! { - [[variables_provider]] - type = "vault" - url = "http://vault" - token = "secret" - mount = "root" - }, - ); - assert_eq!(config.variables_providers()?.len(), 2); - - Ok(()) - } - - #[test] - fn key_value_stores_from_file() -> Result<()> { - let mut config = RuntimeConfig::new(None); - - // One default store - assert_eq!(config.key_value_stores().unwrap().into_iter().count(), 1); - - merge_config_toml( - &mut config, - toml! { - [key_value_store.default] - type = "spin" - path = "override.db" - - [key_value_store.other] - type = "spin" - path = "other.db" - }, - ); - assert_eq!(config.key_value_stores().unwrap().into_iter().count(), 2); - - Ok(()) - } - - #[test] - fn default_redis_key_value_store_from_file() -> Result<()> { - let mut config = RuntimeConfig::new(None); - - merge_config_toml( - &mut config, - toml! { - [key_value_store.default] - type = "redis" - url = "redis://127.0.0.1/" - }, - ); - assert_eq!(config.key_value_stores().unwrap().into_iter().count(), 1); - - assert!( - matches!(config.default_key_value_opts(), KeyValueStoreOpts::Redis(_)), - "expected default Redis store", - ); - - Ok(()) - } - - fn to_component_id(inp: &str) -> spin_serde::KebabId { - spin_serde::KebabId::try_from(inp.to_string()).expect("parse component id into kebab id") - } - - #[test] - fn test_parsing_valid_hosts_in_client_tls_opts() { - let input = ClientTlsOpts { - component_ids: vec![to_component_id("component-id-foo")], - hosts: vec!["fermyon.com".to_string(), "fermyon.com:5443".to_string()], - ca_roots_file: None, - cert_chain_file: None, - private_key_file: None, - ca_webpki_roots: None, - }; - - let parsed = parse_client_tls_opts(&input); - assert!(parsed.is_ok()); - assert_eq!(parsed.unwrap().hosts.len(), 2) - } - - #[test] - fn test_parsing_empty_hosts_in_client_tls_opts() { - let input = ClientTlsOpts { - component_ids: vec![to_component_id("component-id-foo")], - hosts: vec!["".to_string(), "fermyon.com:5443".to_string()], - ca_roots_file: None, - cert_chain_file: None, - private_key_file: None, - ca_webpki_roots: None, - }; - - let parsed = parse_client_tls_opts(&input); - assert!(parsed.is_err()); - assert_eq!( - "failed to parse uri ''. error: InvalidUri(Empty)", - parsed.unwrap_err().to_string() - ) - } - - #[test] - fn test_parsing_invalid_hosts_in_client_tls_opts() { - let input = ClientTlsOpts { - component_ids: vec![to_component_id("component-id-foo")], - hosts: vec!["perc%ent:443".to_string(), "fermyon.com:5443".to_string()], - ca_roots_file: None, - cert_chain_file: None, - private_key_file: None, - ca_webpki_roots: None, - }; - - let parsed = parse_client_tls_opts(&input); - assert!(parsed.is_err()); - assert_eq!( - "failed to parse uri 'perc%ent:443'. error: InvalidUri(InvalidAuthority)", - parsed.unwrap_err().to_string() - ) - } - - #[test] - fn test_parsing_multiple_client_tls_opts() { - let custom_root_ca = r#" ------BEGIN CERTIFICATE----- -MIIBeDCCAR2gAwIBAgIBADAKBggqhkjOPQQDAjAjMSEwHwYDVQQDDBhrM3Mtc2Vy -dmVyLWNhQDE3MTc3ODA1MjAwHhcNMjQwNjA3MTcxNTIwWhcNMzQwNjA1MTcxNTIw -WjAjMSEwHwYDVQQDDBhrM3Mtc2VydmVyLWNhQDE3MTc3ODA1MjAwWTATBgcqhkjO -PQIBBggqhkjOPQMBBwNCAAQnhGmz/r5E+ZBgkg/kpeSliS4LjMFaeFNM3C0SUksV -cVDbymRZt+D2loVpSIn9PnBHUIiR9kz+cmWJaJDhcY6Ho0IwQDAOBgNVHQ8BAf8E -BAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUzXLACkzCDPAXXERIxQim -NdG07zEwCgYIKoZIzj0EAwIDSQAwRgIhALwsHX2R7a7GXfgmn7h8rNRRvlQwyRaG -9hyv0a1cyJr2AiEA8+2vF0CZ/S0MG6rT0Y6xZ+iqi/vhcDnmBhJCxx2rwAI= ------END CERTIFICATE----- -"#; - let mut custom_root_ca_file = NamedTempFile::new().expect("temp file for custom root ca"); - custom_root_ca_file - .write_all(custom_root_ca.as_bytes()) - .expect("write custom root ca file"); - - let runtimeconfig_data = format!( - r#" -[[client_tls]] -hosts = ["localhost:6551"] -component_ids = ["component-no1"] -[[client_tls]] -hosts = ["localhost:6551"] -component_ids = ["component-no2"] -ca_roots_file = "{}" -"#, - custom_root_ca_file.path().to_str().unwrap() - ); - - let mut config = RuntimeConfig::new(None); - merge_config_toml(&mut config, toml::from_str(&runtimeconfig_data).unwrap()); - - let client_tls_opts = config.client_tls_opts(); - assert!(client_tls_opts.is_ok()); - - //assert that component level mapping works as expected - let client_tls_opts_ok = client_tls_opts.as_ref().unwrap(); - - // assert for component-no1 - assert!(client_tls_opts_ok - .get(&"component-no1".to_string()) - .is_some()); - - #[allow(clippy::mutable_key_type)] - let component_no1_client_tls_opts = client_tls_opts_ok - .get(&"component-no1".to_string()) - .expect("get opts for component-no1"); - assert!(component_no1_client_tls_opts - .get(&"localhost:6551".parse::().unwrap()) - .is_some()); - - let component_no1_host_client_tls_opts = component_no1_client_tls_opts - .get(&"localhost:6551".parse::().unwrap()) - .unwrap(); - assert!(component_no1_host_client_tls_opts.custom_root_ca.is_none()); - - // assert for component-no2 - assert!(client_tls_opts_ok - .get(&"component-no2".to_string()) - .is_some()); - - #[allow(clippy::mutable_key_type)] - let component_no2_client_tls_opts = client_tls_opts_ok - .get(&"component-no2".to_string()) - .expect("get opts for component-no2"); - assert!(component_no2_client_tls_opts - .get(&"localhost:6551".parse::().unwrap()) - .is_some()); - - let component_no2_host_client_tls_opts = component_no2_client_tls_opts - .get(&"localhost:6551".parse::().unwrap()) - .unwrap(); - assert!(component_no2_host_client_tls_opts.custom_root_ca.is_some()) - } - - #[test] - fn test_parsing_multiple_overlapping_client_tls_opts() { - let custom_root_ca = r#" ------BEGIN CERTIFICATE----- -MIIBeDCCAR2gAwIBAgIBADAKBggqhkjOPQQDAjAjMSEwHwYDVQQDDBhrM3Mtc2Vy -dmVyLWNhQDE3MTc3ODA1MjAwHhcNMjQwNjA3MTcxNTIwWhcNMzQwNjA1MTcxNTIw -WjAjMSEwHwYDVQQDDBhrM3Mtc2VydmVyLWNhQDE3MTc3ODA1MjAwWTATBgcqhkjO -PQIBBggqhkjOPQMBBwNCAAQnhGmz/r5E+ZBgkg/kpeSliS4LjMFaeFNM3C0SUksV -cVDbymRZt+D2loVpSIn9PnBHUIiR9kz+cmWJaJDhcY6Ho0IwQDAOBgNVHQ8BAf8E -BAMCAqQwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUzXLACkzCDPAXXERIxQim -NdG07zEwCgYIKoZIzj0EAwIDSQAwRgIhALwsHX2R7a7GXfgmn7h8rNRRvlQwyRaG -9hyv0a1cyJr2AiEA8+2vF0CZ/S0MG6rT0Y6xZ+iqi/vhcDnmBhJCxx2rwAI= ------END CERTIFICATE----- -"#; - let mut custom_root_ca_file = NamedTempFile::new().expect("temp file for custom root ca"); - custom_root_ca_file - .write_all(custom_root_ca.as_bytes()) - .expect("write custom root ca file"); - - let runtimeconfig_data = format!( - r#" -[[client_tls]] -hosts = ["localhost:6551"] -component_ids = ["component-no1"] -[[client_tls]] -hosts = ["localhost:6551"] -component_ids = ["component-no1"] -ca_roots_file = "{}" -"#, - custom_root_ca_file.path().to_str().unwrap() - ); - - let mut config = RuntimeConfig::new(None); - merge_config_toml(&mut config, toml::from_str(&runtimeconfig_data).unwrap()); - - let client_tls_opts = config.client_tls_opts(); - assert!(client_tls_opts.is_ok()); - - //assert that component level mapping works as expected - let client_tls_opts_ok = client_tls_opts.as_ref().unwrap(); - - // assert for component-no1 - assert!(client_tls_opts_ok - .get(&"component-no1".to_string()) - .is_some()); - - #[allow(clippy::mutable_key_type)] - let component_no1_client_tls_opts = client_tls_opts_ok - .get(&"component-no1".to_string()) - .expect("get opts for component-no1"); - assert!(component_no1_client_tls_opts - .get(&"localhost:6551".parse::().unwrap()) - .is_some()); - - let component_no1_host_client_tls_opts = component_no1_client_tls_opts - .get(&"localhost:6551".parse::().unwrap()) - .unwrap(); - - // verify that the last client_tls block wins for same component-id and host combination - assert!(component_no1_host_client_tls_opts.custom_root_ca.is_none()); - } - - fn merge_config_toml(config: &mut RuntimeConfig, value: toml::Value) { - let data = toml::to_vec(&value).expect("encode toml"); - let mut file = NamedTempFile::new().expect("temp file"); - file.write_all(&data).expect("write toml"); - config.merge_config_file(file.path()).expect("merge config"); - } - - fn default_spin_store_path(config: &RuntimeConfig) -> Option { - match config.default_key_value_opts() { - KeyValueStoreOpts::Spin(opts) => opts.path, - other => panic!("unexpected default store opts {other:?}"), - } - } -} - -// parsed client tls options -#[derive(Debug, Clone)] -pub struct ParsedClientTlsOpts { - pub components: Vec, - pub hosts: Vec, - pub custom_root_ca: Option>>, - pub cert_chain: Option>>, - pub private_key: Option>>, - pub ca_webpki_roots: bool, -} - -fn parse_client_tls_opts(inp: &ClientTlsOpts) -> Result { - let custom_root_ca = match &inp.ca_roots_file { - Some(path) => Some(load_certs(path).context("loading custom root ca")?), - None => None, - }; - - let cert_chain = match &inp.cert_chain_file { - Some(file) => Some(load_certs(file).context("loading client tls certs")?), - None => None, - }; - - let private_key = match &inp.private_key_file { - Some(file) => { - let privatekey = load_key(file).context("loading private key")?; - Some(Arc::from(privatekey)) - } - None => None, - }; - - let parsed_hosts: Vec = inp - .hosts - .clone() - .into_iter() - .map(|s| { - s.parse::() - .map_err(|e| anyhow::anyhow!("failed to parse uri '{}'. error: {:?}", s, e)) - }) - .collect::, anyhow::Error>>()?; - - let custom_root_ca_provided = custom_root_ca.is_some(); - - // use_ca_webpki_roots is true if - // 1. ca_webpki_roots is explicitly true in runtime config OR - // 2. custom_root_ca is not provided - // - // if custom_root_ca is provided, use_ca_webpki_roots defaults to false - let ca_webpki_roots = inp.ca_webpki_roots.unwrap_or(!custom_root_ca_provided); - - let parsed_component_ids: Vec = inp - .component_ids - .clone() - .into_iter() - .map(|s| s.to_string()) - .collect(); - - Ok(ParsedClientTlsOpts { - hosts: parsed_hosts, - components: parsed_component_ids, - custom_root_ca, - cert_chain, - private_key, - ca_webpki_roots, - }) -} diff --git a/crates/trigger/src/runtime_config/client_tls.rs b/crates/trigger/src/runtime_config/client_tls.rs deleted file mode 100644 index 59390841e7..0000000000 --- a/crates/trigger/src/runtime_config/client_tls.rs +++ /dev/null @@ -1,50 +0,0 @@ -use anyhow::Context; -use rustls_pemfile::private_key; -use std::io; -use std::{ - fs, - path::{Path, PathBuf}, -}; - -#[derive(Debug, serde::Deserialize)] -#[serde(rename_all = "snake_case", tag = "type")] -pub struct ClientTlsOpts { - pub component_ids: Vec, - pub hosts: Vec, - pub ca_roots_file: Option, - pub cert_chain_file: Option, - pub private_key_file: Option, - pub ca_webpki_roots: Option, -} - -// load_certs parse and return the certs from the provided file -pub fn load_certs( - path: impl AsRef, -) -> io::Result>> { - rustls_pemfile::certs(&mut io::BufReader::new(fs::File::open(path).map_err( - |err| { - io::Error::new( - io::ErrorKind::InvalidInput, - format!("failed to read cert file {:?}", err), - ) - }, - )?)) - .collect::>>>() -} - -// load_keys parse and return the first private key from the provided file -pub fn load_key( - path: impl AsRef, -) -> anyhow::Result> { - private_key(&mut io::BufReader::new( - fs::File::open(path).context("loading private key")?, - )) - .map_err(|_| anyhow::anyhow!("invalid input")) - .transpose() - .ok_or_else(|| { - io::Error::new( - io::ErrorKind::InvalidInput, - "private key file contains no private keys", - ) - })? -} diff --git a/crates/trigger/src/runtime_config/key_value.rs b/crates/trigger/src/runtime_config/key_value.rs deleted file mode 100644 index 29ab6f191d..0000000000 --- a/crates/trigger/src/runtime_config/key_value.rs +++ /dev/null @@ -1,192 +0,0 @@ -use std::{collections::HashMap, fs, path::PathBuf, sync::Arc}; - -use crate::{runtime_config::RuntimeConfig, TriggerHooks}; -use anyhow::{bail, Context, Result}; -use serde::Deserialize; -use spin_common::ui::quoted_path; -use spin_key_value::{ - CachingStoreManager, DelegatingStoreManager, KeyValueComponent, StoreManager, - KEY_VALUE_STORES_KEY, -}; -use spin_key_value_azure::{ - KeyValueAzureCosmos, KeyValueAzureCosmosAuthOptions, KeyValueAzureCosmosRuntimeConfigOptions, -}; -use spin_key_value_sqlite::{DatabaseLocation, KeyValueSqlite}; - -use super::{resolve_config_path, RuntimeConfigOpts}; - -const DEFAULT_SPIN_STORE_FILENAME: &str = "sqlite_key_value.db"; - -pub type KeyValueStore = Arc; - -/// Builds a [`KeyValueComponent`] from the given [`RuntimeConfig`]. -pub async fn build_key_value_component( - runtime_config: &RuntimeConfig, - init_data: &[(String, String)], -) -> Result { - let stores: HashMap<_, _> = runtime_config - .key_value_stores() - .context("Failed to build key-value component")? - .into_iter() - .collect(); - - // Avoid creating a database as a side-effect if one is not needed. - if !init_data.is_empty() { - if let Some(manager) = stores.get("default") { - let default_store = manager - .get("default") - .await - .context("Failed to access key-value store to set requested entries")?; - for (key, value) in init_data { - default_store - .set(key, value.as_bytes()) - .await - .with_context(|| { - format!("Failed to set requested entry {key} in key-value store") - })?; - } - } else { - bail!("Failed to access key-value store to set requested entries"); - } - } - - let delegating_manager = DelegatingStoreManager::new(stores); - let caching_manager = Arc::new(CachingStoreManager::new(delegating_manager)); - Ok(KeyValueComponent::new(spin_key_value::manager(move |_| { - caching_manager.clone() - }))) -} - -// Holds deserialized options from a `[key_value_store.]` runtime config section. -#[derive(Clone, Debug, Deserialize)] -#[serde(rename_all = "snake_case", tag = "type")] -pub enum KeyValueStoreOpts { - Spin(SpinKeyValueStoreOpts), - Redis(RedisKeyValueStoreOpts), - AzureCosmos(AzureCosmosConfig), -} - -impl KeyValueStoreOpts { - pub fn default_store_opts(runtime_config: &RuntimeConfig) -> Self { - Self::Spin(SpinKeyValueStoreOpts::default_store_opts(runtime_config)) - } - - pub fn build_store(&self, config_opts: &RuntimeConfigOpts) -> Result { - match self { - Self::Spin(opts) => opts.build_store(config_opts), - Self::Redis(opts) => opts.build_store(), - Self::AzureCosmos(opts) => opts.build_store(), - } - } -} - -#[derive(Clone, Debug, Default, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct SpinKeyValueStoreOpts { - pub path: Option, -} - -impl SpinKeyValueStoreOpts { - fn default_store_opts(runtime_config: &RuntimeConfig) -> Self { - // If the state dir is set, build the default path - let path = runtime_config - .state_dir() - .map(|dir| dir.join(DEFAULT_SPIN_STORE_FILENAME)); - Self { path } - } - - fn build_store(&self, config_opts: &RuntimeConfigOpts) -> Result { - let location = match self.path.as_ref() { - Some(path) => { - let path = resolve_config_path(path, config_opts)?; - // Create the store's parent directory if necessary - fs::create_dir_all(path.parent().unwrap()) - .context("Failed to create key value store")?; - DatabaseLocation::Path(path) - } - None => DatabaseLocation::InMemory, - }; - Ok(Arc::new(KeyValueSqlite::new(location))) - } -} - -#[derive(Clone, Debug, Deserialize)] -pub struct RedisKeyValueStoreOpts { - pub url: String, -} - -impl RedisKeyValueStoreOpts { - fn build_store(&self) -> Result { - let kv_redis = spin_key_value_redis::KeyValueRedis::new(self.url.clone())?; - Ok(Arc::new(kv_redis)) - } -} - -#[derive(Clone, Debug, Deserialize)] -pub struct AzureCosmosConfig { - key: Option, - account: String, - database: String, - container: String, -} - -impl AzureCosmosConfig { - pub fn build_store(&self) -> Result> { - let auth_options = match self.key.clone() { - Some(key) => { - tracing::debug!("Azure key value is using key auth."); - let config_values = KeyValueAzureCosmosRuntimeConfigOptions::new(key); - KeyValueAzureCosmosAuthOptions::RuntimeConfigValues(config_values) - } - None => { - tracing::debug!("Azure key value is using environmental auth."); - KeyValueAzureCosmosAuthOptions::Environmental - } - }; - let kv_azure_cosmos = KeyValueAzureCosmos::new( - self.account.clone(), - self.database.clone(), - self.container.clone(), - auth_options, - )?; - Ok(Arc::new(kv_azure_cosmos)) - } -} - -// Prints startup messages about the default key value store config. -pub struct KeyValuePersistenceMessageHook; - -impl TriggerHooks for KeyValuePersistenceMessageHook { - fn app_loaded( - &mut self, - app: &spin_app::App, - runtime_config: &RuntimeConfig, - _resolver: &Arc, - ) -> Result<()> { - // Only print if the app actually uses KV - if app.components().all(|c| { - c.get_metadata(KEY_VALUE_STORES_KEY) - .unwrap_or_default() - .unwrap_or_default() - .is_empty() - }) { - return Ok(()); - } - match runtime_config.default_key_value_opts() { - KeyValueStoreOpts::Redis(_store_opts) => { - println!("Storing default key-value data to Redis"); - } - KeyValueStoreOpts::Spin(store_opts) => { - if let Some(path) = &store_opts.path { - println!("Storing default key-value data to {}", quoted_path(path)); - } else { - println!("Using in-memory default key-value store; data will not be saved!"); - } - } - KeyValueStoreOpts::AzureCosmos(store_opts) => { - println!("Storing default key-value data to Azure CosmosDB: account: {}, database: {}, container: {}", store_opts.account, store_opts.database, store_opts.container); - } - } - Ok(()) - } -} diff --git a/crates/trigger/src/runtime_config/llm.rs b/crates/trigger/src/runtime_config/llm.rs deleted file mode 100644 index aed48f639c..0000000000 --- a/crates/trigger/src/runtime_config/llm.rs +++ /dev/null @@ -1,82 +0,0 @@ -use spin_llm_remote_http::RemoteHttpLlmEngine; -use url::Url; - -#[derive(Default)] -pub struct LLmOptions { - pub use_gpu: bool, -} - -pub(crate) async fn build_component( - runtime_config: &crate::RuntimeConfig, - use_gpu: bool, -) -> spin_llm::LlmComponent { - match runtime_config.llm_compute() { - #[cfg(feature = "llm")] - LlmComputeOpts::Spin => { - let path = runtime_config - .state_dir() - .unwrap_or_default() - .join("ai-models"); - let engine = spin_llm_local::LocalLlmEngine::new(path, use_gpu).await; - spin_llm::LlmComponent::new(move || Box::new(engine.clone())) - } - #[cfg(not(feature = "llm"))] - LlmComputeOpts::Spin => { - let _ = use_gpu; - spin_llm::LlmComponent::new(move || Box::new(noop::NoopLlmEngine.clone())) - } - LlmComputeOpts::RemoteHttp(config) => { - tracing::info!("Using remote compute for LLMs"); - let engine = - RemoteHttpLlmEngine::new(config.url.to_owned(), config.auth_token.to_owned()); - spin_llm::LlmComponent::new(move || Box::new(engine.clone())) - } - } -} - -#[derive(Debug, serde::Deserialize)] -#[serde(rename_all = "snake_case", tag = "type")] -pub enum LlmComputeOpts { - Spin, - RemoteHttp(RemoteHttpComputeOpts), -} - -#[derive(Debug, serde::Deserialize)] -pub struct RemoteHttpComputeOpts { - url: Url, - auth_token: String, -} - -#[cfg(not(feature = "llm"))] -mod noop { - use async_trait::async_trait; - use spin_llm::LlmEngine; - use spin_world::v2::llm as wasi_llm; - - #[derive(Clone)] - pub(super) struct NoopLlmEngine; - - #[async_trait] - impl LlmEngine for NoopLlmEngine { - async fn infer( - &mut self, - _model: wasi_llm::InferencingModel, - _prompt: String, - _params: wasi_llm::InferencingParams, - ) -> Result { - Err(wasi_llm::Error::RuntimeError( - "Local LLM operations are not supported in this version of Spin.".into(), - )) - } - - async fn generate_embeddings( - &mut self, - _model: wasi_llm::EmbeddingModel, - _data: Vec, - ) -> Result { - Err(wasi_llm::Error::RuntimeError( - "Local LLM operations are not supported in this version of Spin.".into(), - )) - } - } -} diff --git a/crates/trigger/src/runtime_config/sqlite.rs b/crates/trigger/src/runtime_config/sqlite.rs deleted file mode 100644 index 5163791e92..0000000000 --- a/crates/trigger/src/runtime_config/sqlite.rs +++ /dev/null @@ -1,240 +0,0 @@ -use std::{collections::HashMap, path::PathBuf, sync::Arc}; - -use crate::{runtime_config::RuntimeConfig, TriggerHooks}; -use anyhow::Context; -use spin_common::ui::quoted_path; -use spin_sqlite::{Connection, ConnectionsStore, SqliteComponent, DATABASES_KEY}; - -use super::RuntimeConfigOpts; - -const DEFAULT_SQLITE_DB_FILENAME: &str = "sqlite_db.db"; - -pub(crate) async fn build_component( - runtime_config: &RuntimeConfig, - sqlite_statements: &[String], -) -> anyhow::Result { - let databases: HashMap<_, _> = runtime_config - .sqlite_databases() - .await - .context("Failed to build sqlite component")? - .into_iter() - .collect(); - execute_statements(sqlite_statements, &databases).await?; - let connections_store = - Arc::new(SimpleConnectionsStore(databases)) as Arc; - Ok(SqliteComponent::new(move |_| connections_store.clone())) -} - -/// A `ConnectionStore` based on a `HashMap` -struct SimpleConnectionsStore(HashMap>); - -#[async_trait::async_trait] -impl ConnectionsStore for SimpleConnectionsStore { - async fn get_connection( - &self, - database: &str, - ) -> Result>, spin_world::v2::sqlite::Error> { - Ok(self.0.get(database).cloned()) - } - - fn has_connection_for(&self, database: &str) -> bool { - self.0.contains_key(database) - } -} - -async fn execute_statements( - statements: &[String], - databases: &HashMap>, -) -> anyhow::Result<()> { - if statements.is_empty() { - return Ok(()); - } - - for m in statements { - if let Some(config) = m.strip_prefix('@') { - let (file, database) = parse_file_and_label(config)?; - let database = databases.get(database).with_context(|| { - format!( - "based on the '@{config}' a registered database named '{database}' was expected but not found. The registered databases are '{:?}'", databases.keys() - ) - })?; - let sql = std::fs::read_to_string(file).with_context(|| { - format!("could not read file '{file}' containing sql statements") - })?; - database - .execute_batch(&sql) - .await - .with_context(|| format!("failed to execute sql from file '{file}'"))?; - } else { - let Some(default) = databases.get("default") else { - debug_assert!(false, "the 'default' sqlite database should always be available but for some reason was not"); - return Ok(()); - }; - default - .query(m, Vec::new()) - .await - .with_context(|| format!("failed to execute statement: '{m}'"))?; - } - } - Ok(()) -} - -/// Parses a @{file:label} sqlite statement -fn parse_file_and_label(config: &str) -> anyhow::Result<(&str, &str)> { - let config = config.trim(); - let (file, label) = match config.split_once(':') { - Some((_, label)) if label.trim().is_empty() => { - anyhow::bail!("database label is empty in the '@{config}' sqlite statement") - } - Some((file, label)) => (file.trim(), label.trim()), - None => (config, "default"), - }; - Ok((file, label)) -} - -// Holds deserialized options from a `[sqlite_database.]` runtime config section. -#[derive(Clone, Debug, serde::Deserialize)] -#[serde(rename_all = "snake_case", tag = "type")] -pub enum SqliteDatabaseOpts { - Spin(SpinSqliteDatabaseOpts), - Libsql(LibsqlOpts), -} - -impl SqliteDatabaseOpts { - pub fn default(runtime_config: &RuntimeConfig) -> Self { - Self::Spin(SpinSqliteDatabaseOpts::default(runtime_config)) - } - - pub async fn build( - &self, - config_opts: &RuntimeConfigOpts, - ) -> anyhow::Result> { - match self { - Self::Spin(opts) => opts.build(config_opts), - Self::Libsql(opts) => opts.build().await, - } - } -} - -#[derive(Clone, Debug, serde::Deserialize)] -#[serde(deny_unknown_fields)] -pub struct SpinSqliteDatabaseOpts { - pub path: Option, -} - -impl SpinSqliteDatabaseOpts { - pub fn default(runtime_config: &RuntimeConfig) -> Self { - let path = runtime_config - .state_dir() - .map(|dir| dir.join(DEFAULT_SQLITE_DB_FILENAME)); - Self { path } - } - - fn build(&self, config_opts: &RuntimeConfigOpts) -> anyhow::Result> { - use spin_sqlite_inproc::{InProcConnection, InProcDatabaseLocation}; - - let location = match self.path.as_ref() { - Some(path) => { - let path = super::resolve_config_path(path, config_opts)?; - // Create the store's parent directory if necessary - std::fs::create_dir_all(path.parent().unwrap()) - .context("Failed to create sqlite database directory")?; - InProcDatabaseLocation::Path(path) - } - None => InProcDatabaseLocation::InMemory, - }; - Ok(Arc::new(InProcConnection::new(location)?)) - } -} - -#[derive(Clone, Debug, serde::Deserialize)] -#[serde(deny_unknown_fields)] -pub struct LibsqlOpts { - url: String, - token: String, -} - -impl LibsqlOpts { - async fn build(&self) -> anyhow::Result> { - let url = check_url(&self.url) - .with_context(|| { - format!( - "unexpected libSQL URL '{}' in runtime config file ", - self.url - ) - })? - .to_owned(); - let client = spin_sqlite_libsql::LibsqlClient::create(url, self.token.clone()) - .await - .context("failed to create SQLite client")?; - Ok(Arc::new(client)) - } -} - -// Checks an incoming url is in the shape we expect -fn check_url(url: &str) -> anyhow::Result<&str> { - if url.starts_with("https://") || url.starts_with("http://") { - Ok(url) - } else { - Err(anyhow::anyhow!( - "URL does not start with 'https://' or 'http://'. Spin currently only supports talking to libSQL databases over HTTP(S)" - )) - } -} - -pub struct SqlitePersistenceMessageHook; - -impl TriggerHooks for SqlitePersistenceMessageHook { - fn app_loaded( - &mut self, - app: &spin_app::App, - runtime_config: &RuntimeConfig, - _resolver: &Arc, - ) -> anyhow::Result<()> { - if app.components().all(|c| { - c.get_metadata(DATABASES_KEY) - .unwrap_or_default() - .unwrap_or_default() - .is_empty() - }) { - return Ok(()); - } - - match runtime_config.default_sqlite_opts() { - SqliteDatabaseOpts::Spin(s) => { - if let Some(path) = &s.path { - println!("Storing default SQLite data to {}", quoted_path(path)); - } else { - println!("Using in-memory default SQLite database; data will not be saved!"); - } - } - SqliteDatabaseOpts::Libsql(l) => { - println!( - "Storing default SQLite data to a libsql database at {}", - l.url - ); - } - } - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn can_parse_file_and_label() { - let config = "file:label"; - let result = parse_file_and_label(config).unwrap(); - assert_eq!(result, ("file", "label")); - - let config = "file:"; - let result = parse_file_and_label(config); - assert!(result.is_err()); - - let config = "file"; - let result = parse_file_and_label(config).unwrap(); - assert_eq!(result, ("file", "default")); - } -} diff --git a/crates/trigger/src/runtime_config/variables_provider.rs b/crates/trigger/src/runtime_config/variables_provider.rs deleted file mode 100644 index 3cfc5c1de5..0000000000 --- a/crates/trigger/src/runtime_config/variables_provider.rs +++ /dev/null @@ -1,134 +0,0 @@ -use std::path::PathBuf; - -use anyhow::{anyhow, Result}; -use serde::Deserialize; -use spin_variables::provider::azure_key_vault::{ - AzureKeyVaultAuthOptions, AzureKeyVaultRuntimeConfigOptions, -}; -use spin_variables::provider::{ - azure_key_vault::{AzureAuthorityHost, AzureKeyVaultProvider}, - env::EnvProvider, - vault::VaultProvider, -}; - -use super::RuntimeConfig; - -pub type VariablesProvider = Box; - -// Holds deserialized options from a `[[config_provider]]` runtime config section. -#[derive(Debug, Deserialize)] -#[serde(rename_all = "snake_case", tag = "type")] -pub enum VariablesProviderOpts { - Env(EnvVariablesProviderOpts), - Vault(VaultVariablesProviderOpts), - AzureKeyVault(AzureKeyVaultVariablesProviderOpts), -} - -impl VariablesProviderOpts { - pub fn default_provider_opts(runtime_config: &RuntimeConfig) -> Self { - Self::Env(EnvVariablesProviderOpts::default_provider_opts( - runtime_config, - )) - } - - pub fn build_provider(&self) -> Result { - match self { - Self::Env(opts) => opts.build_provider(), - Self::Vault(opts) => opts.build_provider(), - Self::AzureKeyVault(opts) => opts.build_provider(), - } - } -} - -#[derive(Debug, Default, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct EnvVariablesProviderOpts { - /// A prefix to add to variable names when resolving from the environment. - /// Unless empty, joined to the variable name with an underscore. - #[serde(default)] - pub prefix: Option, - /// Optional path to a 'dotenv' file which will be merged into the environment. - #[serde(default)] - pub dotenv_path: Option, -} - -impl EnvVariablesProviderOpts { - pub fn default_provider_opts(runtime_config: &RuntimeConfig) -> Self { - let dotenv_path = runtime_config - .local_app_dir - .as_deref() - .map(|path| path.join(".env")); - Self { - prefix: None, - dotenv_path, - } - } - - pub fn build_provider(&self) -> Result { - Ok(Box::new(EnvProvider::new( - self.prefix.clone(), - self.dotenv_path.clone(), - ))) - } -} - -#[derive(Debug, Default, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct VaultVariablesProviderOpts { - pub url: String, - pub token: String, - pub mount: String, - #[serde(default)] - pub prefix: Option, -} - -impl VaultVariablesProviderOpts { - pub fn build_provider(&self) -> Result { - Ok(Box::new(VaultProvider::new( - &self.url, - &self.token, - &self.mount, - self.prefix.as_deref(), - ))) - } -} - -#[derive(Debug, Default, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct AzureKeyVaultVariablesProviderOpts { - pub vault_url: String, - pub client_id: Option, - pub client_secret: Option, - pub tenant_id: Option, - pub authority_host: Option, -} - -impl AzureKeyVaultVariablesProviderOpts { - pub fn build_provider(&self) -> Result { - let auth_config_runtime_vars = [&self.client_id, &self.tenant_id, &self.client_secret]; - let any_some = auth_config_runtime_vars.iter().any(|&var| var.is_some()); - let any_none = auth_config_runtime_vars.iter().any(|&var| var.is_none()); - - if any_none && any_some { - // some of the service principal auth options were specified, but not enough to authenticate. - return Err(anyhow!("The current runtime config specifies some but not all of the Azure KeyVault 'client_id', 'client_secret', and 'tenant_id' values. Provide the missing values to authenticate to Azure KeyVault with the given service principal, or remove all these values to authenticate using ambient authentication (e.g. env vars, Azure CLI, Managed Identity, Workload Identity).")); - } - - let auth_options = if any_some { - // all the service principal auth options were specified in the runtime config - AzureKeyVaultAuthOptions::RuntimeConfigValues(AzureKeyVaultRuntimeConfigOptions::new( - self.client_id.clone().unwrap(), - self.client_secret.clone().unwrap(), - self.tenant_id.clone().unwrap(), - self.authority_host, - )) - } else { - AzureKeyVaultAuthOptions::Environmental - }; - - Ok(Box::new(AzureKeyVaultProvider::new( - &self.vault_url, - auth_options, - )?)) - } -} diff --git a/crates/trigger/src/stdio.rs b/crates/trigger/src/stdio.rs index e23a7f8b67..398d59f727 100644 --- a/crates/trigger/src/stdio.rs +++ b/crates/trigger/src/stdio.rs @@ -6,13 +6,15 @@ use std::{ use anyhow::{Context, Result}; use spin_common::ui::quoted_path; +use spin_factors_executor::ExecutorHooks; use tokio::io::AsyncWrite; -use crate::{runtime_config::RuntimeConfig, TriggerHooks}; +use crate::factors::TriggerFactors; /// Which components should have their logs followed on stdout/stderr. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Default)] pub enum FollowComponents { + #[default] /// No components should have their logs followed. None, /// Only the specified components should have their logs followed. @@ -32,23 +34,17 @@ impl FollowComponents { } } -impl Default for FollowComponents { - fn default() -> Self { - Self::None - } -} - /// Implements TriggerHooks, writing logs to a log file and (optionally) stderr -pub struct StdioLoggingTriggerHooks { +pub struct StdioLoggingExecutorHooks { follow_components: FollowComponents, log_dir: Option, } -impl StdioLoggingTriggerHooks { - pub fn new(follow_components: FollowComponents) -> Self { +impl StdioLoggingExecutorHooks { + pub fn new(follow_components: FollowComponents, log_dir: Option) -> Self { Self { follow_components, - log_dir: None, + log_dir, } } @@ -91,17 +87,12 @@ impl StdioLoggingTriggerHooks { } } -impl TriggerHooks for StdioLoggingTriggerHooks { - fn app_loaded( +impl ExecutorHooks for StdioLoggingExecutorHooks { + fn configure_app( &mut self, - app: &spin_app::App, - runtime_config: &RuntimeConfig, - _resolver: &std::sync::Arc, + configured_app: &spin_factors::ConfiguredApp, ) -> anyhow::Result<()> { - self.log_dir = runtime_config.log_dir(); - - self.validate_follows(app)?; - + self.validate_follows(configured_app.app())?; if let Some(dir) = &self.log_dir { // Ensure log dir exists if set std::fs::create_dir_all(dir) @@ -109,26 +100,25 @@ impl TriggerHooks for StdioLoggingTriggerHooks { println!("Logging component stdio to {}", quoted_path(dir.join(""))) } - Ok(()) } - fn component_store_builder( + fn prepare_instance( &self, - component: &spin_app::AppComponent, - builder: &mut spin_core::StoreBuilder, + builder: &mut spin_factors_executor::FactorsInstanceBuilder, ) -> anyhow::Result<()> { - builder.stdout_pipe(self.component_stdio_writer( - component.id(), + let component_id = builder.app_component().id().to_string(); + let wasi_builder = builder.factor_builders().wasi(); + wasi_builder.stdout_pipe(self.component_stdio_writer( + &component_id, "stdout", self.log_dir.as_deref(), )?); - builder.stderr_pipe(self.component_stdio_writer( - component.id(), + wasi_builder.stderr_pipe(self.component_stdio_writer( + &component_id, "stderr", self.log_dir.as_deref(), )?); - Ok(()) } } diff --git a/crates/variables/src/host_component.rs b/crates/variables/src/host_component.rs deleted file mode 100644 index 821191add6..0000000000 --- a/crates/variables/src/host_component.rs +++ /dev/null @@ -1,122 +0,0 @@ -use std::sync::{Arc, Mutex}; - -use anyhow::Result; -use once_cell::sync::OnceCell; -use spin_app::{AppComponent, DynamicHostComponent}; -use spin_core::{async_trait, HostComponent}; -use spin_world::v1::config::Error as V1ConfigError; -use spin_world::v2::variables; - -use spin_expressions::{Error, Key, Provider, ProviderResolver}; - -pub struct VariablesHostComponent { - providers: Mutex>>, - resolver: Arc>, -} - -impl VariablesHostComponent { - pub fn new(providers: Vec>) -> Self { - Self { - providers: Mutex::new(providers), - resolver: Default::default(), - } - } -} - -impl HostComponent for VariablesHostComponent { - type Data = ComponentVariables; - - fn add_to_linker( - linker: &mut spin_core::Linker, - get: impl Fn(&mut spin_core::Data) -> &mut Self::Data + Send + Sync + Copy + 'static, - ) -> anyhow::Result<()> { - spin_world::v1::config::add_to_linker(linker, get)?; - variables::add_to_linker(linker, get) - } - - fn build_data(&self) -> Self::Data { - ComponentVariables { - resolver: self.resolver.clone(), - component_id: None, - } - } -} - -impl DynamicHostComponent for VariablesHostComponent { - fn update_data(&self, data: &mut Self::Data, component: &AppComponent) -> anyhow::Result<()> { - self.resolver.get_or_try_init(|| { - make_resolver(component.app, self.providers.lock().unwrap().drain(..)) - })?; - data.component_id = Some(component.id().to_string()); - Ok(()) - } -} - -pub fn make_resolver( - app: &spin_app::App, - providers: impl IntoIterator>, -) -> anyhow::Result { - let mut resolver = - ProviderResolver::new(app.variables().map(|(key, var)| (key.clone(), var.clone())))?; - for component in app.components() { - resolver.add_component_variables( - component.id(), - component.config().map(|(k, v)| (k.into(), v.into())), - )?; - } - for provider in providers { - resolver.add_provider(provider); - } - Ok(resolver) -} - -/// A component variables interface implementation. -pub struct ComponentVariables { - resolver: Arc>, - component_id: Option, -} - -#[async_trait] -impl variables::Host for ComponentVariables { - async fn get(&mut self, key: String) -> Result { - // Set by DynamicHostComponent::update_data - let component_id = self.component_id.as_deref().unwrap(); - let key = Key::new(&key).map_err(as_wit)?; - self.resolver - .get() - .unwrap() - .resolve(component_id, key) - .await - .map_err(as_wit) - } - - fn convert_error(&mut self, error: variables::Error) -> Result { - Ok(error) - } -} - -#[async_trait] -impl spin_world::v1::config::Host for ComponentVariables { - async fn get_config(&mut self, key: String) -> Result { - ::get(self, key) - .await - .map_err(|err| match err { - variables::Error::InvalidName(msg) => V1ConfigError::InvalidKey(msg), - variables::Error::Undefined(msg) => V1ConfigError::Provider(msg), - other => V1ConfigError::Other(format!("{other}")), - }) - } - - fn convert_error(&mut self, error: V1ConfigError) -> Result { - Ok(error) - } -} - -fn as_wit(err: Error) -> variables::Error { - match err { - Error::InvalidName(msg) => variables::Error::InvalidName(msg), - Error::Undefined(msg) => variables::Error::Undefined(msg), - Error::Provider(err) => variables::Error::Provider(err.to_string()), - other => variables::Error::Other(format!("{other}")), - } -} diff --git a/crates/variables/src/lib.rs b/crates/variables/src/lib.rs deleted file mode 100644 index 620e6171c9..0000000000 --- a/crates/variables/src/lib.rs +++ /dev/null @@ -1,4 +0,0 @@ -mod host_component; -pub mod provider; - -pub use host_component::{make_resolver, VariablesHostComponent}; diff --git a/crates/variables/src/provider.rs b/crates/variables/src/provider.rs deleted file mode 100644 index e311b3d014..0000000000 --- a/crates/variables/src/provider.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub mod azure_key_vault; -pub mod env; -pub mod vault; diff --git a/crates/variables/src/provider/env.rs b/crates/variables/src/provider/env.rs deleted file mode 100644 index 90ba581f31..0000000000 --- a/crates/variables/src/provider/env.rs +++ /dev/null @@ -1,141 +0,0 @@ -use std::{collections::HashMap, path::PathBuf, sync::Mutex}; - -use anyhow::{Context, Result}; -use async_trait::async_trait; - -use spin_expressions::{Key, Provider}; -use tracing::{instrument, Level}; - -const DEFAULT_ENV_PREFIX: &str = "SPIN_VARIABLE"; -const LEGACY_ENV_PREFIX: &str = "SPIN_CONFIG"; - -/// A config Provider that uses environment variables. -#[derive(Debug)] -pub struct EnvProvider { - prefix: Option, - dotenv_path: Option, - dotenv_cache: Mutex>>, -} - -impl EnvProvider { - /// Creates a new EnvProvider. - pub fn new(prefix: Option>, dotenv_path: Option) -> Self { - Self { - prefix: prefix.map(Into::into), - dotenv_path, - dotenv_cache: Default::default(), - } - } - - fn query_env(&self, env_key: &str) -> Result> { - match std::env::var(env_key) { - Err(std::env::VarError::NotPresent) => self.get_dotenv(env_key), - other => other - .map(Some) - .with_context(|| format!("failed to resolve env var {env_key}")), - } - } - - fn get_sync(&self, key: &Key) -> Result> { - let prefix = self - .prefix - .clone() - .unwrap_or(DEFAULT_ENV_PREFIX.to_string()); - let use_fallback = self.prefix.is_none(); - - let upper_key = key.as_ref().to_ascii_uppercase(); - let env_key = format!("{prefix}_{upper_key}"); - - match self.query_env(&env_key)? { - None if use_fallback => { - let old_key = format!("{LEGACY_ENV_PREFIX}_{upper_key}"); - let result = self.query_env(&old_key); - if let Ok(Some(_)) = &result { - eprintln!("Warning: variable '{key}': {env_key} was not set, so used {old_key}. The {LEGACY_ENV_PREFIX} prefix is deprecated; please switch to the {DEFAULT_ENV_PREFIX} prefix.", key = key.as_ref()); - } - result - } - other => Ok(other), - } - } - - fn get_dotenv(&self, key: &str) -> Result> { - if self.dotenv_path.is_none() { - return Ok(None); - } - let mut maybe_cache = self - .dotenv_cache - .lock() - .expect("dotenv_cache lock poisoned"); - let cache = match maybe_cache.as_mut() { - Some(cache) => cache, - None => maybe_cache.insert(self.load_dotenv()?), - }; - Ok(cache.get(key).cloned()) - } - - fn load_dotenv(&self) -> Result> { - let path = self.dotenv_path.as_deref().unwrap(); - Ok(dotenvy::from_path_iter(path) - .into_iter() - .flatten() - .collect::, _>>()?) - } -} - -#[async_trait] -impl Provider for EnvProvider { - #[instrument(name = "spin_variables.get_from_env", skip(self), err(level = Level::INFO))] - async fn get(&self, key: &Key) -> Result> { - tokio::task::block_in_place(|| self.get_sync(key)) - } -} - -#[cfg(test)] -mod test { - use std::env::temp_dir; - - use super::*; - - #[test] - fn provider_get() { - std::env::set_var("TESTING_SPIN_ENV_KEY1", "val"); - let key1 = Key::new("env_key1").unwrap(); - let mut envs = HashMap::new(); - envs.insert( - "TESTING_SPIN_ENV_KEY1".to_string(), - "dotenv_val".to_string(), - ); - assert_eq!( - EnvProvider::new(Some("TESTING_SPIN"), None) - .get_sync(&key1) - .unwrap(), - Some("val".to_string()) - ); - } - - #[test] - fn provider_get_dotenv() { - let dotenv_path = temp_dir().join("spin-env-provider-test"); - std::fs::write(&dotenv_path, b"TESTING_SPIN_ENV_KEY2=dotenv_val").unwrap(); - - let key = Key::new("env_key2").unwrap(); - assert_eq!( - EnvProvider::new(Some("TESTING_SPIN"), Some(dotenv_path)) - .get_sync(&key) - .unwrap(), - Some("dotenv_val".to_string()) - ); - } - - #[test] - fn provider_get_missing() { - let key = Key::new("please_do_not_ever_set_this_during_tests").unwrap(); - assert_eq!( - EnvProvider::new(Some("TESTING_SPIN"), Default::default()) - .get_sync(&key) - .unwrap(), - None - ); - } -} diff --git a/crates/world/Cargo.toml b/crates/world/Cargo.toml index 3c51d5b247..270b46a174 100644 --- a/crates/world/Cargo.toml +++ b/crates/world/Cargo.toml @@ -5,4 +5,5 @@ authors = { workspace = true } edition = { workspace = true } [dependencies] +async-trait = "0.1" wasmtime = { workspace = true } diff --git a/crates/world/src/lib.rs b/crates/world/src/lib.rs index 42c4d35451..d7adeb19f0 100644 --- a/crates/world/src/lib.rs +++ b/crates/world/src/lib.rs @@ -1,5 +1,7 @@ #![allow(missing_docs)] +pub use async_trait::async_trait; + wasmtime::component::bindgen!({ inline: r#" package fermyon:runtime; diff --git a/examples/spin-timer/Cargo.lock b/examples/spin-timer/Cargo.lock index b9d18f476f..900be37f80 100644 --- a/examples/spin-timer/Cargo.lock +++ b/examples/spin-timer/Cargo.lock @@ -18,15 +18,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] -name = "aes" -version = "0.8.4" +name = "adler2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" -dependencies = [ - "cfg-if", - "cipher", - "cpufeatures", -] +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "ahash" @@ -42,33 +37,18 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "0.7.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" -dependencies = [ - "memchr", -] - -[[package]] -name = "aho-corasick" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] -[[package]] -name = "aliasable" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "250f629c0161ad8107cf89319e990051fae62832fd343083bea452d93e2205fd" - [[package]] name = "allocator-api2" -version = "0.2.16" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "ambient-authority" @@ -91,64 +71,6 @@ dependencies = [ "libc", ] -[[package]] -name = "ansi_term" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" -dependencies = [ - "winapi", -] - -[[package]] -name = "anstream" -version = "0.6.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "is_terminal_polyfill", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" - -[[package]] -name = "anstyle-parse" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a64c907d4e79225ac72e2a354c9ce84d50ebb4586dee56c82b3ee73004f537f5" -dependencies = [ - "windows-sys 0.52.0", -] - -[[package]] -name = "anstyle-wincon" -version = "3.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" -dependencies = [ - "anstyle", - "windows-sys 0.52.0", -] - [[package]] name = "anyhow" version = "1.0.79" @@ -161,22 +83,6 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" -[[package]] -name = "arrayvec" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" - -[[package]] -name = "async-broadcast" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c48ccdbf6ca6b121e0f586cbc0e73ae440e56c67c30fa0873b4e110d9c26d2b" -dependencies = [ - "event-listener 2.5.3", - "futures-core", -] - [[package]] name = "async-channel" version = "1.9.0" @@ -195,16 +101,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" dependencies = [ "concurrent-queue", - "event-listener-strategy 0.5.2", + "event-listener-strategy", "futures-core", "pin-project-lite", ] [[package]] name = "async-compression" -version = "0.4.6" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a116f46a969224200a0a97f29cfd4c50e7534e4b4826bd23ea2c3c533039c82c" +checksum = "fec134f64e2bc57411226dfc4e52dec859ddfc7e711fc5e07b612584f000e4aa" dependencies = [ "flate2", "futures-core", @@ -213,65 +119,20 @@ dependencies = [ "tokio", ] -[[package]] -name = "async-executor" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8828ec6e544c02b0d6691d21ed9f9218d0384a82542855073c2a3f58304aaf0" -dependencies = [ - "async-task", - "concurrent-queue", - "fastrand 2.0.1", - "futures-lite 2.3.0", - "slab", -] - -[[package]] -name = "async-fs" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "279cf904654eeebfa37ac9bb1598880884924aab82e290aa65c9e77a0e142e06" -dependencies = [ - "async-lock 2.8.0", - "autocfg", - "blocking", - "futures-lite 1.13.0", -] - -[[package]] -name = "async-io" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" -dependencies = [ - "async-lock 2.8.0", - "autocfg", - "cfg-if", - "concurrent-queue", - "futures-lite 1.13.0", - "log", - "parking", - "polling 2.8.0", - "rustix 0.37.27", - "slab", - "socket2 0.4.10", - "waker-fn", -] - [[package]] name = "async-io" -version = "2.3.2" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcccb0f599cfa2f8ace422d3555572f47424da5648a4382a9dd0310ff8210884" +checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964" dependencies = [ - "async-lock 3.3.0", + "async-lock", "cfg-if", "concurrent-queue", "futures-io", "futures-lite 2.3.0", "parking", - "polling 3.7.0", - "rustix 0.38.31", + "polling", + "rustix", "slab", "tracing", "windows-sys 0.52.0", @@ -279,88 +140,51 @@ dependencies = [ [[package]] name = "async-lock" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" -dependencies = [ - "event-listener 2.5.3", -] - -[[package]] -name = "async-lock" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b" +checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" dependencies = [ - "event-listener 4.0.3", - "event-listener-strategy 0.4.0", + "event-listener 5.3.1", + "event-listener-strategy", "pin-project-lite", ] [[package]] name = "async-process" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea6438ba0a08d81529c69b36700fa2f95837bfe3e776ab39cde9c14d9149da88" -dependencies = [ - "async-io 1.13.0", - "async-lock 2.8.0", - "async-signal", - "blocking", - "cfg-if", - "event-listener 3.1.0", - "futures-lite 1.13.0", - "rustix 0.38.31", - "windows-sys 0.48.0", -] - -[[package]] -name = "async-process" -version = "2.2.2" +version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a53fc6301894e04a92cb2584fedde80cb25ba8e02d9dc39d4a87d036e22f397d" +checksum = "f7eda79bbd84e29c2b308d1dc099d7de8dcc7035e48f4bf5dc4a531a44ff5e2a" dependencies = [ "async-channel 2.3.1", - "async-io 2.3.2", - "async-lock 3.3.0", + "async-io", + "async-lock", "async-signal", "async-task", "blocking", "cfg-if", - "event-listener 5.3.0", + "event-listener 5.3.1", "futures-lite 2.3.0", - "rustix 0.38.31", + "rustix", "tracing", "windows-sys 0.52.0", ] -[[package]] -name = "async-recursion" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.48", -] - [[package]] name = "async-signal" -version = "0.2.5" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e47d90f65a225c4527103a8d747001fc56e375203592b25ad103e1ca13124c5" +checksum = "dfb3634b73397aa844481f814fad23bbf07fdb0eabec10f2eb95e58944b1ec32" dependencies = [ - "async-io 2.3.2", - "async-lock 2.8.0", + "async-io", + "async-lock", "atomic-waker", "cfg-if", "futures-core", "futures-io", - "rustix 0.38.31", + "rustix", "signal-hook-registry", "slab", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -382,7 +206,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", ] [[package]] @@ -399,7 +223,7 @@ checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", ] [[package]] @@ -436,9 +260,9 @@ dependencies = [ "bitflags 1.3.2", "bytes", "futures-util", - "http 0.2.11", + "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.28", + "hyper 0.14.30", "itoa", "matchit", "memchr", @@ -446,8 +270,8 @@ dependencies = [ "percent-encoding", "pin-project-lite", "rustversion", - "serde 1.0.203", - "sync_wrapper", + "serde", + "sync_wrapper 0.1.2", "tower", "tower-layer", "tower-service", @@ -462,7 +286,7 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 0.2.11", + "http 0.2.12", "http-body 0.4.6", "mime", "rustversion", @@ -473,10 +297,10 @@ dependencies = [ [[package]] name = "azure_core" version = "0.20.0" -source = "git+https://github.com/azure/azure-sdk-for-rust.git?rev=8c4caa251c3903d5eae848b41bb1d02a4d65231c#8c4caa251c3903d5eae848b41bb1d02a4d65231c" +source = "git+https://github.com/azure/azure-sdk-for-rust?rev=8c4caa251c3903d5eae848b41bb1d02a4d65231c#8c4caa251c3903d5eae848b41bb1d02a4d65231c" dependencies = [ "async-trait", - "base64 0.22.0", + "base64 0.22.1", "bytes", "dyn-clone", "futures", @@ -487,9 +311,9 @@ dependencies = [ "paste", "pin-project", "rand 0.8.5", - "reqwest 0.12.4", + "reqwest 0.12.5", "rustc_version", - "serde 1.0.203", + "serde", "serde_json", "sha2", "time", @@ -501,13 +325,13 @@ dependencies = [ [[package]] name = "azure_data_cosmos" version = "0.20.0" -source = "git+https://github.com/azure/azure-sdk-for-rust.git?rev=8c4caa251c3903d5eae848b41bb1d02a4d65231c#8c4caa251c3903d5eae848b41bb1d02a4d65231c" +source = "git+https://github.com/azure/azure-sdk-for-rust?rev=8c4caa251c3903d5eae848b41bb1d02a4d65231c#8c4caa251c3903d5eae848b41bb1d02a4d65231c" dependencies = [ "async-trait", "azure_core", "bytes", "futures", - "serde 1.0.203", + "serde", "serde_json", "thiserror", "time", @@ -519,16 +343,16 @@ dependencies = [ [[package]] name = "azure_identity" version = "0.20.0" -source = "git+https://github.com/azure/azure-sdk-for-rust.git?rev=8c4caa251c3903d5eae848b41bb1d02a4d65231c#8c4caa251c3903d5eae848b41bb1d02a4d65231c" +source = "git+https://github.com/azure/azure-sdk-for-rust?rev=8c4caa251c3903d5eae848b41bb1d02a4d65231c#8c4caa251c3903d5eae848b41bb1d02a4d65231c" dependencies = [ - "async-lock 3.3.0", - "async-process 2.2.2", + "async-lock", + "async-process", "async-trait", "azure_core", "futures", "oauth2", "pin-project", - "serde 1.0.203", + "serde", "time", "tracing", "tz-rs", @@ -539,12 +363,12 @@ dependencies = [ [[package]] name = "azure_security_keyvault" version = "0.20.0" -source = "git+https://github.com/azure/azure-sdk-for-rust.git?rev=8c4caa251c3903d5eae848b41bb1d02a4d65231c#8c4caa251c3903d5eae848b41bb1d02a4d65231c" +source = "git+https://github.com/azure/azure-sdk-for-rust?rev=8c4caa251c3903d5eae848b41bb1d02a4d65231c#8c4caa251c3903d5eae848b41bb1d02a4d65231c" dependencies = [ "async-trait", "azure_core", "futures", - "serde 1.0.203", + "serde", "serde_json", "time", ] @@ -559,17 +383,11 @@ dependencies = [ "cc", "cfg-if", "libc", - "miniz_oxide", + "miniz_oxide 0.7.2", "object 0.32.2", "rustc-demangle", ] -[[package]] -name = "base16ct" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" - [[package]] name = "base64" version = "0.13.1" @@ -584,40 +402,26 @@ checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" - -[[package]] -name = "base64ct" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" - -[[package]] -name = "beef" -version = "0.5.2" +version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a8241f3ebb85c056b509d4327ad0358fbbba6ffb340bf388f26350aeda225b1" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "bindgen" -version = "0.69.4" +version = "0.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" +checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f" dependencies = [ "bitflags 2.4.2", "cexpr", "clang-sys", - "itertools 0.12.1", - "lazy_static 1.4.0", - "lazycell", + "itertools", "proc-macro2", "quote", "regex", "rustc-hash", "shlex", - "syn 2.0.48", + "syn 2.0.75", ] [[package]] @@ -632,15 +436,6 @@ version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" -[[package]] -name = "bitmaps" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2" -dependencies = [ - "typenum", -] - [[package]] name = "block-buffer" version = "0.10.4" @@ -650,15 +445,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "block-padding" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" -dependencies = [ - "generic-array", -] - [[package]] name = "blocking" version = "1.6.1" @@ -678,7 +464,7 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9dd6407f73a9b8b6162d8a2ef999fe6afd7cc15902ebf42c5cd296addf17e0ad" dependencies = [ - "num-traits 0.2.19", + "num-traits", ] [[package]] @@ -687,12 +473,6 @@ version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" -[[package]] -name = "bytemuck" -version = "1.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b236fc92302c97ed75b38da1f4917b5cdda4984745740f153a5d3059e48d725e" - [[package]] name = "byteorder" version = "1.5.0" @@ -705,56 +485,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" dependencies = [ - "serde 1.0.203", -] - -[[package]] -name = "bytesize" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e368af43e418a04d52505cf3dbc23dda4e3407ae2fa99fd0e4f308ce546acc" - -[[package]] -name = "bzip2" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdb116a6ef3f6c3698828873ad02c3014b3c85cadb88496095628e3ef1e347f8" -dependencies = [ - "bzip2-sys", - "libc", -] - -[[package]] -name = "bzip2-sys" -version = "0.1.11+1.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc" -dependencies = [ - "cc", - "libc", - "pkg-config", -] - -[[package]] -name = "cached-path" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "097968e38f1319207f057d0f4d76452e4f4f847a5de61c5215379f297fa034f3" -dependencies = [ - "flate2", - "fs2", - "glob", - "indicatif", - "log", - "rand 0.8.5", - "reqwest 0.11.24", - "serde 1.0.203", - "serde_json", - "sha2", - "tar", - "tempfile", - "thiserror", - "zip", + "serde", ] [[package]] @@ -765,7 +496,7 @@ checksum = "769f8cd02eb04d57f14e2e371ebb533f96817f9b2525d73a5c72b61ca7973747" dependencies = [ "cap-primitives", "cap-std", - "io-lifetimes 2.0.3", + "io-lifetimes", "windows-sys 0.52.0", ] @@ -777,7 +508,7 @@ checksum = "59ff6d3fb274292a9af283417e383afe6ded1fe66f6472d2c781216d3d80c218" dependencies = [ "cap-primitives", "cap-std", - "rustix 0.38.31", + "rustix", "smallvec", ] @@ -790,19 +521,19 @@ dependencies = [ "ambient-authority", "fs-set-times", "io-extras", - "io-lifetimes 2.0.3", + "io-lifetimes", "ipnet", "maybe-owned", - "rustix 0.38.31", + "rustix", "windows-sys 0.52.0", "winx", ] [[package]] name = "cap-rand" -version = "3.0.0" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4327f08daac33a99bb03c54ae18c8f32c3ba31c728a33ddf683c6c6a5043de68" +checksum = "dbcb16a619d8b8211ed61f42bd290d2a1ac71277a69cf8417ec0996fa92f5211" dependencies = [ "ambient-authority", "rand 0.8.5", @@ -816,8 +547,8 @@ checksum = "266626ce180cf9709f317d0bf9754e3a5006359d87f4bf792f06c9c5f1b63c0f" dependencies = [ "cap-primitives", "io-extras", - "io-lifetimes 2.0.3", - "rustix 0.38.31", + "io-lifetimes", + "rustix", ] [[package]] @@ -830,19 +561,10 @@ dependencies = [ "cap-primitives", "iana-time-zone", "once_cell", - "rustix 0.38.31", + "rustix", "winx", ] -[[package]] -name = "cbc" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6" -dependencies = [ - "cipher", -] - [[package]] name = "cc" version = "1.0.99" @@ -860,7 +582,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" dependencies = [ - "nom 7.1.3", + "nom", ] [[package]] @@ -884,27 +606,17 @@ dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", - "num-traits 0.2.19", - "serde 1.0.203", + "num-traits", + "serde", "wasm-bindgen", "windows-targets 0.52.4", ] -[[package]] -name = "cipher" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" -dependencies = [ - "crypto-common", - "inout", -] - [[package]] name = "clang-sys" -version = "1.7.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67523a3b4be3ce1989d607a828d036249522dd9c1c8de7f4dd2dae43a37369d1" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" dependencies = [ "glob", "libc", @@ -919,62 +631,28 @@ checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" dependencies = [ "atty", "bitflags 1.3.2", - "clap_derive 3.2.25", - "clap_lex 0.2.4", + "clap_derive", + "clap_lex", "indexmap 1.9.3", "once_cell", - "strsim 0.10.0", + "strsim", "termcolor", "textwrap", ] -[[package]] -name = "clap" -version = "4.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" -dependencies = [ - "clap_builder", - "clap_derive 4.5.4", -] - -[[package]] -name = "clap_builder" -version = "4.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" -dependencies = [ - "anstream", - "anstyle", - "clap_lex 0.7.0", - "strsim 0.11.1", -] - [[package]] name = "clap_derive" version = "3.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008" dependencies = [ - "heck 0.4.1", + "heck", "proc-macro-error", "proc-macro2", "quote", "syn 1.0.109", ] -[[package]] -name = "clap_derive" -version = "4.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" -dependencies = [ - "heck 0.5.0", - "proc-macro2", - "quote", - "syn 2.0.48", -] - [[package]] name = "clap_lex" version = "0.2.4" @@ -984,17 +662,11 @@ dependencies = [ "os_str_bytes", ] -[[package]] -name = "clap_lex" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" - [[package]] name = "cmake" -version = "0.1.50" +version = "0.1.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a31c789563b815f77f4250caee12365734369f942439b7defd71e18a48197130" +checksum = "fb1e43aa7fd152b1f968787f7dbcdeb306d1867ff373c69955211876c053f91a" dependencies = [ "cc", ] @@ -1005,24 +677,18 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15" -[[package]] -name = "colorchoice" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" - [[package]] name = "combine" -version = "4.6.6" +version = "4.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" dependencies = [ "bytes", "futures-core", "memchr", "pin-project-lite", "tokio", - "tokio-util 0.7.10", + "tokio-util", ] [[package]] @@ -1035,71 +701,30 @@ dependencies = [ ] [[package]] -name = "config" -version = "0.11.0" +name = "const_fn" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b1b9d958c2b1368a663f05538fc1b5975adce1e19f435acceae987aceeeb369" -dependencies = [ - "lazy_static 1.4.0", - "nom 5.1.3", - "rust-ini", - "serde 1.0.203", - "serde-hjson", - "serde_json", - "toml 0.5.11", - "yaml-rust", -] +checksum = "373e9fafaa20882876db20562275ff58d50e0caa2590077fe7ce7bef90211d0d" [[package]] -name = "console" -version = "0.15.8" +name = "core-foundation" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ - "encode_unicode", - "lazy_static 1.4.0", + "core-foundation-sys", "libc", - "unicode-width", - "windows-sys 0.52.0", ] [[package]] -name = "const-oid" -version = "0.9.6" +name = "core-foundation-sys" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] -name = "const_fn" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "373e9fafaa20882876db20562275ff58d50e0caa2590077fe7ce7bef90211d0d" - -[[package]] -name = "constant_time_eq" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" - -[[package]] -name = "core-foundation" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" - -[[package]] -name = "cpp_demangle" -version = "0.4.3" +name = "cpp_demangle" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e8227005286ec39567949b33df9896bcadfa6051bccca2488129f108ca23119" dependencies = [ @@ -1176,7 +801,7 @@ version = "0.109.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8cfdc315e5d18997093e040a8d234bea1ac1e118a716d3e30f40d449e78207b" dependencies = [ - "serde 1.0.203", + "serde", "serde_derive", ] @@ -1218,7 +843,7 @@ dependencies = [ "cranelift-codegen", "cranelift-entity", "cranelift-frontend", - "itertools 0.12.1", + "itertools", "log", "smallvec", "wasmparser 0.209.1", @@ -1249,9 +874,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.12" +version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95" +checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" dependencies = [ "crossbeam-utils", ] @@ -1290,24 +915,6 @@ version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" -[[package]] -name = "crunchy" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" - -[[package]] -name = "crypto-bigint" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" -dependencies = [ - "generic-array", - "rand_core 0.6.4", - "subtle", - "zeroize", -] - [[package]] name = "crypto-common" version = "0.1.6" @@ -1324,7 +931,7 @@ version = "3.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "672465ae37dc1bc6380a6547a8883d5dd397b0f1faaad4f265726cc7042a5345" dependencies = [ - "nix 0.28.0", + "nix", "windows-sys 0.52.0", ] @@ -1334,18 +941,8 @@ version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850" dependencies = [ - "darling_core 0.14.4", - "darling_macro 0.14.4", -] - -[[package]] -name = "darling" -version = "0.20.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83b2eb4d90d12bdda5ed17de686c2acb4c57914f8f921b8da7e112b5a36f3fe1" -dependencies = [ - "darling_core 0.20.9", - "darling_macro 0.20.9", + "darling_core", + "darling_macro", ] [[package]] @@ -1358,46 +955,21 @@ dependencies = [ "ident_case", "proc-macro2", "quote", - "strsim 0.10.0", + "strsim", "syn 1.0.109", ] -[[package]] -name = "darling_core" -version = "0.20.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622687fe0bac72a04e5599029151f5796111b90f1baaa9b544d807a5e31cd120" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim 0.11.1", - "syn 2.0.48", -] - [[package]] name = "darling_macro" version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" dependencies = [ - "darling_core 0.14.4", + "darling_core", "quote", "syn 1.0.109", ] -[[package]] -name = "darling_macro" -version = "0.20.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "733cabb43482b1a1b53eee8583c2b9e8684d592215ea83efd305dd31bc2f0178" -dependencies = [ - "darling_core 0.20.9", - "quote", - "syn 2.0.48", -] - [[package]] name = "debugid" version = "0.8.0" @@ -1407,17 +979,6 @@ dependencies = [ "uuid", ] -[[package]] -name = "der" -version = "0.7.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" -dependencies = [ - "const-oid", - "pem-rfc7468", - "zeroize", -] - [[package]] name = "deranged" version = "0.3.11" @@ -1425,18 +986,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", - "serde 1.0.203", -] - -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", + "serde", ] [[package]] @@ -1445,16 +995,7 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d07adf7be193b71cc36b193d0f5fe60b918a3a9db4dad0449f57bcfd519704a3" dependencies = [ - "derive_builder_macro 0.11.2", -] - -[[package]] -name = "derive_builder" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d67778784b508018359cbc8696edb3db78160bab2c2a28ba7f56ef6932997f8" -dependencies = [ - "derive_builder_macro 0.12.0", + "derive_builder_macro", ] [[package]] @@ -1463,19 +1004,7 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f91d4cfa921f1c05904dc3c57b4a32c38aed3340cce209f3a6fd1478babafc4" dependencies = [ - "darling 0.14.4", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "derive_builder_core" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c11bdc11a0c47bc7d37d582b5285da6849c96681023680b906673c5707af7b0f" -dependencies = [ - "darling 0.14.4", + "darling", "proc-macro2", "quote", "syn 1.0.109", @@ -1487,33 +1016,10 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f0314b72bed045f3a68671b3c86328386762c93f82d98c65c3cb5e5f573dd68" dependencies = [ - "derive_builder_core 0.11.2", + "derive_builder_core", "syn 1.0.109", ] -[[package]] -name = "derive_builder_macro" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebcda35c7a396850a55ffeac740804b40ffec779b98fffbb1738f4033f0ee79e" -dependencies = [ - "derive_builder_core 0.12.0", - "syn 1.0.109", -] - -[[package]] -name = "dialoguer" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de" -dependencies = [ - "console", - "shell-words", - "tempfile", - "thiserror", - "zeroize", -] - [[package]] name = "digest" version = "0.10.7" @@ -1521,20 +1027,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer", - "const-oid", "crypto-common", "subtle", ] -[[package]] -name = "directories" -version = "4.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f51c5d4ddabd36886dd3e1438cb358cdcb0d7c499cb99cb4ac2e38e18b5cb210" -dependencies = [ - "dirs-sys 0.3.7", -] - [[package]] name = "directories-next" version = "2.0.0" @@ -1551,16 +1047,7 @@ version = "4.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059" dependencies = [ - "dirs-sys 0.3.7", -] - -[[package]] -name = "dirs" -version = "5.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" -dependencies = [ - "dirs-sys 0.4.1", + "dirs-sys", ] [[package]] @@ -1574,18 +1061,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "dirs-sys" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" -dependencies = [ - "libc", - "option-ext", - "redox_users", - "windows-sys 0.48.0", -] - [[package]] name = "dirs-sys-next" version = "0.1.2" @@ -1597,87 +1072,30 @@ dependencies = [ "winapi", ] -[[package]] -name = "docker_credential" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31951f49556e34d90ed28342e1df7e1cb7a229c4cab0aecc627b5d91edd41d07" -dependencies = [ - "base64 0.21.7", - "serde 1.0.203", - "serde_json", -] - [[package]] name = "dotenvy" version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" -[[package]] -name = "dunce" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" - [[package]] name = "dyn-clone" version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" -[[package]] -name = "ecdsa" -version = "0.16.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" -dependencies = [ - "der", - "digest", - "elliptic-curve", - "rfc6979", - "signature", - "spki", -] - [[package]] name = "either" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" -[[package]] -name = "elliptic-curve" -version = "0.13.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" -dependencies = [ - "base16ct", - "crypto-bigint", - "digest", - "ff", - "generic-array", - "group", - "pem-rfc7468", - "pkcs8", - "rand_core 0.6.4", - "sec1", - "subtle", - "zeroize", -] - [[package]] name = "embedded-io" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced" -[[package]] -name = "encode_unicode" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" - [[package]] name = "encoding_rs" version = "0.8.33" @@ -1687,27 +1105,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "enumflags2" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d232db7f5956f3f14313dc2f87985c58bd2c695ce124c8cdd984e08e15ac133d" -dependencies = [ - "enumflags2_derive", - "serde 1.0.203", -] - -[[package]] -name = "enumflags2_derive" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de0d48a183585823424a4ce1aa132d174a6a81bd540895822eb4c8373a8e49e8" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.48", -] - [[package]] name = "equivalent" version = "1.0.1" @@ -1724,12 +1121,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "esaxx-rs" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d817e038c30374a4bcb22f94d0a8a0e216958d4c3dcde369b1439fec4bdda6e6" - [[package]] name = "event-listener" version = "2.5.3" @@ -1738,54 +1129,22 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "event-listener" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d93877bcde0eb80ca09131a08d23f0a5c18a620b01db137dba666d18cd9b30c2" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", -] - -[[package]] -name = "event-listener" -version = "4.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b215c49b2b248c855fb73579eb1f4f26c38ffdc12973e20e07b91d78d5646e" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", -] - -[[package]] -name = "event-listener" -version = "5.3.0" +version = "5.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d9944b8ca13534cdfb2800775f8dd4902ff3fc75a50101466decadfdf322a24" +checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" dependencies = [ "concurrent-queue", "parking", "pin-project-lite", ] -[[package]] -name = "event-listener-strategy" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "958e4d70b6d5e81971bebec42271ec641e7ff4e170a6fa605f2b8a8b65cb97d3" -dependencies = [ - "event-listener 4.0.3", - "pin-project-lite", -] - [[package]] name = "event-listener-strategy" version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" dependencies = [ - "event-listener 5.3.0", + "event-listener 5.3.1", "pin-project-lite", ] @@ -1818,9 +1177,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.1" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" [[package]] name = "fd-lock" @@ -1829,52 +1188,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7e5768da2206272c81ef0b5e951a41862938a6070da63bcea197899942d3b947" dependencies = [ "cfg-if", - "rustix 0.38.31", - "windows-sys 0.52.0", -] - -[[package]] -name = "ff" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" -dependencies = [ - "rand_core 0.6.4", - "subtle", -] - -[[package]] -name = "filetime" -version = "0.2.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", + "rustix", "windows-sys 0.52.0", ] -[[package]] -name = "finl_unicode" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" - -[[package]] -name = "fixedbitset" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" - [[package]] name = "flate2" -version = "1.0.28" +version = "1.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +checksum = "9c0596c1eac1f9e04ed902702e9878208b336edc9d6fddc8a48387349bab3666" dependencies = [ "crc32fast", - "miniz_oxide", + "miniz_oxide 0.8.0", ] [[package]] @@ -1924,21 +1249,11 @@ version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "033b337d725b97690d86893f9de22b67b80dcc4e9ad815f348254c38119db8fb" dependencies = [ - "io-lifetimes 2.0.3", - "rustix 0.38.31", + "io-lifetimes", + "rustix", "windows-sys 0.52.0", ] -[[package]] -name = "fs2" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "futures" version = "0.3.30" @@ -2008,7 +1323,7 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" dependencies = [ - "fastrand 2.0.1", + "fastrand 2.1.0", "futures-core", "futures-io", "parking", @@ -2023,7 +1338,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", ] [[package]] @@ -2074,7 +1389,7 @@ dependencies = [ "bitflags 2.4.2", "debugid", "fxhash", - "serde 1.0.203", + "serde", "serde_json", ] @@ -2086,7 +1401,6 @@ checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", - "zeroize", ] [[package]] @@ -2113,24 +1427,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "ggml" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" -dependencies = [ - "ggml-sys", - "memmap2", - "thiserror", -] - -[[package]] -name = "ggml-sys" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" -dependencies = [ - "cc", -] - [[package]] name = "gimli" version = "0.28.1" @@ -2148,17 +1444,6 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" -[[package]] -name = "group" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" -dependencies = [ - "ff", - "rand_core 0.6.4", - "subtle", -] - [[package]] name = "h2" version = "0.3.26" @@ -2170,46 +1455,36 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http 0.2.11", + "http 0.2.12", "indexmap 2.2.6", "slab", "tokio", - "tokio-util 0.7.10", + "tokio-util", "tracing", ] [[package]] name = "h2" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "816ec7294445779408f36fe57bc5b7fc1cf59664059096c65f905c1c61f58069" +checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205" dependencies = [ + "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "futures-util", "http 1.1.0", "indexmap 2.2.6", "slab", "tokio", - "tokio-util 0.7.10", + "tokio-util", "tracing", ] [[package]] -name = "half" -version = "2.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc52e53916c08643f1b56ec082790d1e86a32e58dc5268f897f313fbae7b4872" -dependencies = [ - "cfg-if", - "crunchy", -] - -[[package]] -name = "hashbrown" -version = "0.12.3" +name = "hashbrown" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" @@ -2230,7 +1505,7 @@ checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ "ahash", "allocator-api2", - "serde 1.0.203", + "serde", ] [[package]] @@ -2248,12 +1523,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - [[package]] name = "hermit-abi" version = "0.1.19" @@ -2270,19 +1539,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hkdf" -version = "0.12.4" +name = "hermit-abi" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" -dependencies = [ - "hmac", -] +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" [[package]] name = "hmac" @@ -2295,9 +1555,9 @@ dependencies = [ [[package]] name = "http" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ "bytes", "fnv", @@ -2315,15 +1575,6 @@ dependencies = [ "itoa", ] -[[package]] -name = "http-auth" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "643c9bbf6a4ea8a656d6b4cd53d34f79e3f841ad5203c1a55fb7d761923bc255" -dependencies = [ - "memchr", -] - [[package]] name = "http-body" version = "0.4.6" @@ -2331,15 +1582,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http 0.2.11", + "http 0.2.12", "pin-project-lite", ] [[package]] name = "http-body" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", "http 1.1.0", @@ -2347,14 +1598,14 @@ dependencies = [ [[package]] name = "http-body-util" -version = "0.1.0" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "pin-project-lite", ] @@ -2371,7 +1622,7 @@ dependencies = [ "infer", "pin-project-lite", "rand 0.7.3", - "serde 1.0.203", + "serde", "serde_json", "serde_qs", "serde_urlencoded", @@ -2380,9 +1631,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.8.0" +version = "1.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" [[package]] name = "httpdate" @@ -2392,22 +1643,22 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "0.14.28" +version = "0.14.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" +checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", "h2 0.3.26", - "http 0.2.11", + "http 0.2.12", "http-body 0.4.6", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2 0.5.5", + "socket2", "tokio", "tower-service", "tracing", @@ -2416,16 +1667,16 @@ dependencies = [ [[package]] name = "hyper" -version = "1.3.1" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe575dd17d0862a9a33781c8c4696a55c320909004a67a00fb286ba8b1bc496d" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.4", + "h2 0.4.6", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "httparse", "httpdate", "itoa", @@ -2442,9 +1693,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", - "http 0.2.11", - "hyper 0.14.28", - "rustls 0.21.10", + "http 0.2.12", + "hyper 0.14.30", + "rustls 0.21.12", "tokio", "tokio-rustls 0.24.1", ] @@ -2456,15 +1707,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "399c78f9338483cb7e630c8474b07268983c6bd5acee012e4211f9f7bb21b070" dependencies = [ "futures-util", - "http 0.2.11", - "hyper 0.14.28", + "http 0.2.12", + "hyper 0.14.30", "log", "rustls 0.22.4", "rustls-native-certs", "rustls-pki-types", "tokio", "tokio-rustls 0.25.0", - "webpki-roots 0.26.1", + "webpki-roots 0.26.3", ] [[package]] @@ -2473,7 +1724,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "hyper 0.14.28", + "hyper 0.14.30", "pin-project-lite", "tokio", "tokio-io-timeout", @@ -2486,7 +1737,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "hyper 0.14.28", + "hyper 0.14.30", "native-tls", "tokio", "tokio-native-tls", @@ -2500,7 +1751,7 @@ checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", "http-body-util", - "hyper 1.3.1", + "hyper 1.4.1", "hyper-util", "native-tls", "tokio", @@ -2510,18 +1761,18 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.3" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" +checksum = "cde7055719c54e36e95e8719f95883f22072a48ede39db7fc17a4e1d5281e9b9" dependencies = [ "bytes", "futures-channel", "futures-util", "http 1.1.0", - "http-body 1.0.0", - "hyper 1.3.1", + "http-body 1.0.1", + "hyper 1.4.1", "pin-project-lite", - "socket2 0.5.5", + "socket2", "tokio", "tower", "tower-service", @@ -2573,20 +1824,6 @@ dependencies = [ "unicode-normalization", ] -[[package]] -name = "im-rc" -version = "15.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af1955a75fa080c677d3972822ec4bad316169ab1cfc6c257a942c2265dbe5fe" -dependencies = [ - "bitmaps", - "rand_core 0.6.4", - "rand_xoshiro", - "sized-chunks", - "typenum", - "version_check", -] - [[package]] name = "indexmap" version = "1.9.3" @@ -2595,7 +1832,6 @@ checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown 0.12.3", - "serde 1.0.203", ] [[package]] @@ -2606,19 +1842,7 @@ checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", "hashbrown 0.14.3", - "serde 1.0.203", -] - -[[package]] -name = "indicatif" -version = "0.16.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d207dc617c7a380ab07ff572a6e52fa202a2a8f355860ac9c38e23f8196be1b" -dependencies = [ - "console", - "lazy_static 1.4.0", - "number_prefix", - "regex", + "serde", ] [[package]] @@ -2627,46 +1851,25 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64e9829a50b42bb782c1df523f78d332fe371b10c661e78b7a3c34b0198e9fac" -[[package]] -name = "inout" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" -dependencies = [ - "block-padding", - "generic-array", -] - [[package]] name = "instant" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" dependencies = [ "cfg-if", ] [[package]] name = "io-extras" -version = "0.18.1" +version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c301e73fb90e8a29e600a9f402d095765f74310d582916a952f618836a1bd1ed" +checksum = "c9f046b9af244f13b3bd939f55d16830ac3a201e8a9ba9661bfcb03e2be72b9b" dependencies = [ - "io-lifetimes 2.0.3", + "io-lifetimes", "windows-sys 0.52.0", ] -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi 0.3.9", - "libc", - "windows-sys 0.48.0", -] - [[package]] name = "io-lifetimes" version = "2.0.3" @@ -2679,48 +1882,6 @@ version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" -[[package]] -name = "is_terminal_polyfill" -version = "1.70.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" - -[[package]] -name = "itertools" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f56a2d0bc861f9165be4eb3442afd3c236d8a98afd426f65d92324ae1091a484" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" -dependencies = [ - "either", -] - [[package]] name = "itertools" version = "0.12.1" @@ -2767,28 +1928,13 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.69" +version = "0.3.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +checksum = "1868808506b929d7b0cfa8f75951347aa71bb21144b7791bae35d9bccfcfe37a" dependencies = [ "wasm-bindgen", ] -[[package]] -name = "jwt" -version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6204285f77fe7d9784db3fdc449ecce1a0114927a51d5a41c4c7a292011c015f" -dependencies = [ - "base64 0.13.1", - "crypto-common", - "digest", - "hmac", - "serde 1.0.203", - "serde_json", - "sha2", -] - [[package]] name = "keyed_priority_queue" version = "0.4.2" @@ -2798,37 +1944,11 @@ dependencies = [ "indexmap 2.2.6", ] -[[package]] -name = "keyring" -version = "2.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "363387f0019d714aa60cc30ab4fe501a747f4c08fc58f069dd14be971bd495a0" -dependencies = [ - "byteorder", - "lazy_static 1.4.0", - "linux-keyutils", - "secret-service", - "security-framework", - "windows-sys 0.52.0", -] - -[[package]] -name = "lazy_static" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" - [[package]] name = "lazy_static" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" - -[[package]] -name = "lazycell" -version = "1.3.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "leb128" @@ -2836,19 +1956,6 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" -[[package]] -name = "lexical-core" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe" -dependencies = [ - "arrayvec", - "bitflags 1.3.2", - "cfg-if", - "ryu", - "static_assertions", -] - [[package]] name = "libc" version = "0.2.153" @@ -2857,12 +1964,12 @@ checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "libloading" -version = "0.8.1" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c571b676ddfc9a8c12f1f3d3085a7b163966a8fd8098a90640953ce5f6170161" +checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-sys 0.48.0", + "windows-targets 0.52.4", ] [[package]] @@ -2883,9 +1990,9 @@ dependencies = [ [[package]] name = "libsql" -version = "0.3.2" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3879a4ed80a245fd4dd8c8fa139245653e86184ed3ab97a6d6ea592045d25793" +checksum = "1bd17bcc143f2a5be449680dc63b91327d953bcabebe34a69c549fca8934ec9d" dependencies = [ "async-stream", "async-trait", @@ -2894,30 +2001,30 @@ dependencies = [ "bytes", "fallible-iterator 0.3.0", "futures", - "http 0.2.11", - "hyper 0.14.28", + "http 0.2.12", + "hyper 0.14.30", "hyper-rustls 0.25.0", "libsql-hrana", "libsql-sqlite3-parser", - "serde 1.0.203", + "serde", "serde_json", "thiserror", "tokio", - "tokio-util 0.7.10", + "tokio-util", "tower", "tracing", ] [[package]] name = "libsql-hrana" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40f256c5c98e84808e067133253471d6f5961c670f0127150694210fb8e6116a" +checksum = "220a925fe6d49dbfa7523b20f5a5391f579b5d9dcf9dd1225606d00929fcab3a" dependencies = [ "base64 0.21.7", "bytes", "prost", - "serde 1.0.203", + "serde", ] [[package]] @@ -2951,387 +2058,144 @@ dependencies = [ ] [[package]] -name = "linked-hash-map" -version = "0.5.6" +name = "linux-raw-sys" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] -name = "linux-keyutils" -version = "0.2.4" +name = "lock_api" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "761e49ec5fd8a5a463f9b84e877c373d888935b71c6be78f3767fe2ae6bed18e" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" dependencies = [ - "bitflags 2.4.2", - "libc", + "autocfg", + "scopeguard", ] [[package]] -name = "linux-raw-sys" -version = "0.3.8" +name = "log" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" [[package]] -name = "linux-raw-sys" -version = "0.4.13" +name = "lru" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" - -[[package]] -name = "llm" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" +checksum = "71e7d46de488603ffdd5f30afbc64fbba2378214a2c3a2fb83abf3d33126df17" dependencies = [ - "llm-base", - "llm-bloom", - "llm-gpt2", - "llm-gptj", - "llm-gptneox", - "llm-llama", - "llm-mpt", - "serde 1.0.203", - "tracing", + "hashbrown 0.13.2", ] [[package]] -name = "llm-base" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" +name = "lru" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37ee39891760e7d94734f6f63fedc29a2e4a152f836120753a72503f09fcf904" dependencies = [ - "bytemuck", - "ggml", - "half", - "llm-samplers", - "memmap2", - "partial_sort", - "rand 0.8.5", - "regex", - "serde 1.0.203", - "serde_bytes", - "thiserror", - "tokenizers", - "tracing", + "hashbrown 0.14.3", ] [[package]] -name = "llm-bloom" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" +name = "mach2" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19b955cdeb2a02b9117f121ce63aa52d08ade45de53e48fe6a38b39c10f6f709" dependencies = [ - "llm-base", + "libc", ] [[package]] -name = "llm-gpt2" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" dependencies = [ - "bytemuck", - "llm-base", + "regex-automata 0.1.10", ] [[package]] -name = "llm-gptj" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" -dependencies = [ - "llm-base", -] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" [[package]] -name = "llm-gptneox" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" -dependencies = [ - "llm-base", -] +name = "maybe-owned" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4facc753ae494aeb6e3c22f839b158aebd4f9270f55cd3c79906c45476c47ab4" [[package]] -name = "llm-llama" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" +name = "md-5" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ - "llm-base", - "tracing", + "cfg-if", + "digest", ] [[package]] -name = "llm-mpt" -version = "0.2.0-dev" -source = "git+https://github.com/rustformers/llm?rev=2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663#2f6ffd4435799ceaa1d1bcb5a8790e5b3e0c5663" -dependencies = [ - "llm-base", -] +name = "memchr" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" [[package]] -name = "llm-samplers" -version = "0.0.6" +name = "memfd" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7553f60d113c9cdc6a5402456a31cd9a273bef79f6f16d8a4f7b4bedf5f754b2" +checksum = "b2cffa4ad52c6f791f4f8b15f0c05f9824b2ced1160e88cc393d64fff9a8ac64" dependencies = [ - "anyhow", - "num-traits 0.2.19", - "rand 0.8.5", - "thiserror", + "rustix", ] [[package]] -name = "lock_api" -version = "0.4.11" +name = "memoffset" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" dependencies = [ "autocfg", - "scopeguard", ] [[package]] -name = "log" -version = "0.4.20" +name = "mime" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" [[package]] -name = "logos" -version = "0.13.0" +name = "minimal-lexical" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c000ca4d908ff18ac99b93a062cb8958d331c3220719c52e77cb19cc6ac5d2c1" -dependencies = [ - "logos-derive 0.13.0", -] +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] -name = "logos" -version = "0.14.0" +name = "miniz_oxide" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "161971eb88a0da7ae0c333e1063467c5b5727e7fb6b710b8db4814eade3a42e8" +checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" dependencies = [ - "logos-derive 0.14.0", + "adler", ] [[package]] -name = "logos-codegen" -version = "0.13.0" +name = "miniz_oxide" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc487311295e0002e452025d6b580b77bb17286de87b57138f3b5db711cded68" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" dependencies = [ - "beef", - "fnv", - "proc-macro2", - "quote", - "regex-syntax 0.6.29", - "syn 2.0.48", + "adler2", ] [[package]] -name = "logos-codegen" -version = "0.14.0" +name = "mio" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e31badd9de5131fdf4921f6473d457e3dd85b11b7f091ceb50e4df7c3eeb12a" -dependencies = [ - "beef", - "fnv", - "lazy_static 1.4.0", - "proc-macro2", - "quote", - "regex-syntax 0.8.2", - "syn 2.0.48", -] - -[[package]] -name = "logos-derive" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbfc0d229f1f42d790440136d941afd806bc9e949e2bcb8faa813b0f00d1267e" -dependencies = [ - "logos-codegen 0.13.0", -] - -[[package]] -name = "logos-derive" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c2a69b3eb68d5bd595107c9ee58d7e07fe2bb5e360cc85b0f084dedac80de0a" -dependencies = [ - "logos-codegen 0.14.0", -] - -[[package]] -name = "lru" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71e7d46de488603ffdd5f30afbc64fbba2378214a2c3a2fb83abf3d33126df17" -dependencies = [ - "hashbrown 0.13.2", -] - -[[package]] -name = "lru" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3262e75e648fce39813cb56ac41f3c3e3f65217ebf3844d818d1f9398cfb0dc" -dependencies = [ - "hashbrown 0.14.3", -] - -[[package]] -name = "mach2" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b955cdeb2a02b9117f121ce63aa52d08ade45de53e48fe6a38b39c10f6f709" -dependencies = [ - "libc", -] - -[[package]] -name = "macro_rules_attribute" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf0c9b980bf4f3a37fd7b1c066941dd1b1d0152ce6ee6e8fe8c49b9f6810d862" -dependencies = [ - "macro_rules_attribute-proc_macro", - "paste", -] - -[[package]] -name = "macro_rules_attribute-proc_macro" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58093314a45e00c77d5c508f76e77c3396afbbc0d01506e7fae47b018bac2b1d" - -[[package]] -name = "matchers" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" -dependencies = [ - "regex-automata 0.1.10", -] - -[[package]] -name = "matchit" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" - -[[package]] -name = "maybe-owned" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4facc753ae494aeb6e3c22f839b158aebd4f9270f55cd3c79906c45476c47ab4" - -[[package]] -name = "md-5" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" -dependencies = [ - "cfg-if", - "digest", -] - -[[package]] -name = "memchr" -version = "2.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" - -[[package]] -name = "memfd" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2cffa4ad52c6f791f4f8b15f0c05f9824b2ced1160e88cc393d64fff9a8ac64" -dependencies = [ - "rustix 0.38.31", -] - -[[package]] -name = "memmap2" -version = "0.5.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83faa42c0a078c393f6b29d5db232d8be22776a891f8f56e5284faee4a20b327" -dependencies = [ - "libc", -] - -[[package]] -name = "memoffset" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4" -dependencies = [ - "autocfg", -] - -[[package]] -name = "memoffset" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" -dependencies = [ - "autocfg", -] - -[[package]] -name = "miette" -version = "7.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4edc8853320c2a0dab800fbda86253c8938f6ea88510dc92c5f1ed20e794afc1" -dependencies = [ - "cfg-if", - "miette-derive", - "thiserror", - "unicode-width", -] - -[[package]] -name = "miette-derive" -version = "7.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf09caffaac8068c346b6df2a7fc27a177fd20b39421a39ce0a211bde679a6c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.48", -] - -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - -[[package]] -name = "mime_guess" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef" -dependencies = [ - "mime", - "unicase", -] - -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - -[[package]] -name = "miniz_oxide" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" -dependencies = [ - "adler", -] - -[[package]] -name = "mio" -version = "0.8.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", @@ -3339,33 +2203,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "monostate" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "878c2a1f1c70e5724fa28f101ca787b6a7e8ad5c5e4ae4ca3b0fa4a419fa9075" -dependencies = [ - "monostate-impl", - "serde 1.0.203", -] - -[[package]] -name = "monostate-impl" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f686d68a09079e63b1d2c64aa305095887ce50565f00a922ebfaeeee0d9ba6ce" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.48", -] - -[[package]] -name = "multimap" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" - [[package]] name = "mysql_async" version = "0.33.0" @@ -3379,8 +2216,8 @@ dependencies = [ "futures-sink", "futures-util", "keyed_priority_queue", - "lazy_static 1.4.0", - "lru 0.12.3", + "lazy_static", + "lru 0.12.4", "mio", "mysql_common", "native-tls", @@ -3389,13 +2226,13 @@ dependencies = [ "percent-encoding", "pin-project", "rand 0.8.5", - "serde 1.0.203", + "serde", "serde_json", - "socket2 0.5.5", + "socket2", "thiserror", "tokio", "tokio-native-tls", - "tokio-util 0.7.10", + "tokio-util", "twox-hash", "url", ] @@ -3416,13 +2253,13 @@ dependencies = [ "cmake", "crc32fast", "flate2", - "lazy_static 1.4.0", + "lazy_static", "num-bigint", - "num-traits 0.2.19", + "num-traits", "rand 0.8.5", "regex", "saturating", - "serde 1.0.203", + "serde", "serde_json", "sha1 0.10.6", "sha2", @@ -3435,11 +2272,10 @@ dependencies = [ [[package]] name = "native-tls" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" dependencies = [ - "lazy_static 1.4.0", "libc", "log", "openssl", @@ -3451,18 +2287,6 @@ dependencies = [ "tempfile", ] -[[package]] -name = "nix" -version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" -dependencies = [ - "bitflags 1.3.2", - "cfg-if", - "libc", - "memoffset 0.7.1", -] - [[package]] name = "nix" version = "0.28.0" @@ -3475,17 +2299,6 @@ dependencies = [ "libc", ] -[[package]] -name = "nom" -version = "5.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08959a387a676302eebf4ddbcbc611da04285579f76f88ee0506c63b1a61dd4b" -dependencies = [ - "lexical-core", - "memchr", - "version_check", -] - [[package]] name = "nom" version = "7.1.3" @@ -3496,15 +2309,6 @@ dependencies = [ "minimal-lexical", ] -[[package]] -name = "normpath" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5831952a9476f2fed74b77d74182fa5ddc4d21c72ec45a333b250e3ed0272804" -dependencies = [ - "windows-sys 0.52.0", -] - [[package]] name = "nu-ansi-term" version = "0.46.0" @@ -3515,37 +2319,14 @@ dependencies = [ "winapi", ] -[[package]] -name = "num" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" -dependencies = [ - "num-bigint", - "num-complex", - "num-integer", - "num-iter", - "num-rational", - "num-traits 0.2.19", -] - [[package]] name = "num-bigint" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c165a9ab64cf766f73521c0dd2cfdff64f488b8f0b3e621face3462d3db536d7" -dependencies = [ - "num-integer", - "num-traits 0.2.19", -] - -[[package]] -name = "num-complex" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ - "num-traits 0.2.19", + "num-integer", + "num-traits", ] [[package]] @@ -3560,38 +2341,7 @@ version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ - "num-traits 0.2.19", -] - -[[package]] -name = "num-iter" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" -dependencies = [ - "autocfg", - "num-integer", - "num-traits 0.2.19", -] - -[[package]] -name = "num-rational" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" -dependencies = [ - "num-bigint", - "num-integer", - "num-traits 0.2.19", -] - -[[package]] -name = "num-traits" -version = "0.1.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92e5113e9fd4cc14ded8e499429f396a20f98c772a47cc8622a736e1ec843c31" -dependencies = [ - "num-traits 0.2.19", + "num-traits", ] [[package]] @@ -3615,19 +2365,13 @@ dependencies = [ [[package]] name = "num_threads" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44" +checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" dependencies = [ "libc", ] -[[package]] -name = "number_prefix" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" - [[package]] name = "oauth2" version = "4.4.2" @@ -3637,9 +2381,9 @@ dependencies = [ "base64 0.13.1", "chrono", "getrandom 0.2.12", - "http 0.2.11", + "http 0.2.12", "rand 0.8.5", - "serde 1.0.203", + "serde", "serde_json", "serde_path_to_error", "sha2", @@ -3669,91 +2413,16 @@ dependencies = [ ] [[package]] -name = "oci-distribution" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b95a2c51531af0cb93761f66094044ca6ea879320bccd35ab747ff3fcab3f422" -dependencies = [ - "bytes", - "chrono", - "futures-util", - "http 1.1.0", - "http-auth", - "jwt", - "lazy_static 1.4.0", - "olpc-cjson", - "regex", - "reqwest 0.12.4", - "serde 1.0.203", - "serde_json", - "sha2", - "thiserror", - "tokio", - "tracing", - "unicase", -] - -[[package]] -name = "oci-wasm" -version = "0.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a91502e5352f927156f2b6a28d2558cc59558b1f441b681df3f706ced6937e07" -dependencies = [ - "anyhow", - "chrono", - "oci-distribution", - "serde 1.0.203", - "serde_json", - "sha2", - "tokio", - "wit-component 0.209.1", - "wit-parser 0.209.1", -] - -[[package]] -name = "olpc-cjson" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d637c9c15b639ccff597da8f4fa968300651ad2f1e968aefc3b4927a6fb2027a" -dependencies = [ - "serde 1.0.203", - "serde_json", - "unicode-normalization", -] - -[[package]] -name = "once_cell" -version = "1.19.0" +name = "once_cell" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" -[[package]] -name = "onig" -version = "6.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c4b31c8722ad9171c6d77d3557db078cab2bd50afcc9d09c8b315c59df8ca4f" -dependencies = [ - "bitflags 1.3.2", - "libc", - "once_cell", - "onig_sys", -] - -[[package]] -name = "onig_sys" -version = "69.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b829e3d7e9cc74c7e315ee8edb185bf4190da5acde74afd7fc59c35b1f086e7" -dependencies = [ - "cc", - "pkg-config", -] - [[package]] name = "openssl" -version = "0.10.64" +version = "0.10.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" +checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" dependencies = [ "bitflags 2.4.2", "cfg-if", @@ -3772,7 +2441,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", ] [[package]] @@ -3783,9 +2452,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.102" +version = "0.9.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" +checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" dependencies = [ "cc", "libc", @@ -3816,9 +2485,9 @@ checksum = "7690dc77bf776713848c4faa6501157469017eaf332baccd4eb1cea928743d94" dependencies = [ "async-trait", "bytes", - "http 0.2.11", + "http 0.2.12", "opentelemetry", - "reqwest 0.11.24", + "reqwest 0.11.27", ] [[package]] @@ -3829,14 +2498,14 @@ checksum = "1a016b8d9495c639af2145ac22387dcb88e44118e45320d9238fbf4e7889abcb" dependencies = [ "async-trait", "futures-core", - "http 0.2.11", + "http 0.2.12", "opentelemetry", "opentelemetry-http", "opentelemetry-proto", "opentelemetry-semantic-conventions", "opentelemetry_sdk", "prost", - "reqwest 0.11.24", + "reqwest 0.11.27", "thiserror", "tokio", "tonic", @@ -3874,7 +2543,7 @@ dependencies = [ "glob", "once_cell", "opentelemetry", - "ordered-float 4.2.0", + "ordered-float", "percent-encoding", "rand 0.8.5", "serde_json", @@ -3883,38 +2552,13 @@ dependencies = [ "tokio-stream", ] -[[package]] -name = "option-ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" - -[[package]] -name = "ordered-float" -version = "2.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" -dependencies = [ - "num-traits 0.2.19", -] - [[package]] name = "ordered-float" -version = "4.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a76df7075c7d4d01fdcb46c912dd17fba5b60c78ea480b475f2b6ab6f666584e" -dependencies = [ - "num-traits 0.2.19", -] - -[[package]] -name = "ordered-stream" -version = "0.2.0" +version = "4.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aa2b01e1d916879f73a53d01d1d6cee68adbb31d6d9177a8cfce093cced1d50" +checksum = "4a91171844676f8c7990ce64959210cd2eaef32c2612c50f9fae9f8aaa6065a6" dependencies = [ - "futures-core", - "pin-project-lite", + "num-traits", ] [[package]] @@ -3923,137 +2567,12 @@ version = "6.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" -[[package]] -name = "ouroboros" -version = "0.18.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97b7be5a8a3462b752f4be3ff2b2bf2f7f1d00834902e46be2a4d68b87b0573c" -dependencies = [ - "aliasable", - "ouroboros_macro", - "static_assertions", -] - -[[package]] -name = "ouroboros_macro" -version = "0.18.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b645dcde5f119c2c454a92d0dfa271a2a3b205da92e4292a68ead4bdbfde1f33" -dependencies = [ - "heck 0.4.1", - "itertools 0.12.1", - "proc-macro2", - "proc-macro2-diagnostics", - "quote", - "syn 2.0.48", -] - -[[package]] -name = "outbound-http" -version = "2.8.0-pre0" -dependencies = [ - "anyhow", - "http 0.2.11", - "reqwest 0.11.24", - "spin-app", - "spin-core", - "spin-expressions", - "spin-locked-app", - "spin-outbound-networking", - "spin-telemetry", - "spin-world", - "terminal", - "tracing", - "url", -] - -[[package]] -name = "outbound-mqtt" -version = "2.8.0-pre0" -dependencies = [ - "anyhow", - "rumqttc", - "spin-app", - "spin-core", - "spin-expressions", - "spin-outbound-networking", - "spin-world", - "table", - "tokio", - "tracing", -] - -[[package]] -name = "outbound-mysql" -version = "2.8.0-pre0" -dependencies = [ - "anyhow", - "flate2", - "mysql_async", - "mysql_common", - "spin-app", - "spin-core", - "spin-expressions", - "spin-outbound-networking", - "spin-world", - "table", - "tokio", - "tracing", - "url", -] - -[[package]] -name = "outbound-pg" -version = "2.8.0-pre0" -dependencies = [ - "anyhow", - "native-tls", - "postgres-native-tls", - "spin-app", - "spin-core", - "spin-expressions", - "spin-outbound-networking", - "spin-world", - "table", - "tokio", - "tokio-postgres", - "tracing", -] - -[[package]] -name = "outbound-redis" -version = "2.8.0-pre0" -dependencies = [ - "anyhow", - "redis", - "spin-app", - "spin-core", - "spin-expressions", - "spin-outbound-networking", - "spin-world", - "table", - "tokio", - "tracing", -] - [[package]] name = "overload" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" -[[package]] -name = "p256" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" -dependencies = [ - "ecdsa", - "elliptic-curve", - "primeorder", - "sha2", -] - [[package]] name = "parking" version = "2.2.0" @@ -4083,119 +2602,20 @@ dependencies = [ "windows-targets 0.48.5", ] -[[package]] -name = "partial_sort" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7924d1d0ad836f665c9065e26d016c673ece3993f30d340068b16f282afc1156" - -[[package]] -name = "password-hash" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7676374caaee8a325c9e7a2ae557f216c5563a171d6997b0ef8a65af35147700" -dependencies = [ - "base64ct", - "rand_core 0.6.4", - "subtle", -] - [[package]] name = "paste" version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" -[[package]] -name = "path-absolutize" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4af381fe79fa195b4909485d99f73a80792331df0625188e707854f0b3383f5" -dependencies = [ - "path-dedot", -] - -[[package]] -name = "path-dedot" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07ba0ad7e047712414213ff67533e6dd477af0a4e1d14fb52343e53d30ea9397" -dependencies = [ - "once_cell", -] - -[[package]] -name = "pathdiff" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8835116a5c179084a830efb3adc117ab007512b535bc1a21c991d3b32a6b44dd" - -[[package]] -name = "pbjson" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1030c719b0ec2a2d25a5df729d6cff1acf3cc230bf766f4f97833591f7577b90" -dependencies = [ - "base64 0.21.7", - "serde 1.0.203", -] - -[[package]] -name = "pbjson-build" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2580e33f2292d34be285c5bc3dba5259542b083cfad6037b6d70345f24dcb735" -dependencies = [ - "heck 0.4.1", - "itertools 0.11.0", - "prost", - "prost-types", -] - -[[package]] -name = "pbjson-types" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18f596653ba4ac51bdecbb4ef6773bc7f56042dc13927910de1684ad3d32aa12" -dependencies = [ - "bytes", - "chrono", - "pbjson", - "pbjson-build", - "prost", - "prost-build", - "serde 1.0.203", -] - -[[package]] -name = "pbkdf2" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83a0692ec44e4cf1ef28ca317f14f8f07da2d95ec3fa01f86e4467b725e60917" -dependencies = [ - "digest", - "hmac", - "password-hash", - "sha2", -] - [[package]] name = "pem" -version = "3.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b8fcc794035347fb64beda2d3b462595dd2753e3f268d89c5aae77e8cf2c310" -dependencies = [ - "base64 0.21.7", - "serde 1.0.203", -] - -[[package]] -name = "pem-rfc7468" -version = "0.7.0" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +checksum = "8e459365e590736a54c3fa561947c84837534b8e9af6fc5bf781307e82658fae" dependencies = [ - "base64ct", + "base64 0.22.1", + "serde", ] [[package]] @@ -4204,16 +2624,6 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" -[[package]] -name = "petgraph" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" -dependencies = [ - "fixedbitset", - "indexmap 2.2.6", -] - [[package]] name = "phf" version = "0.11.2" @@ -4270,7 +2680,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", ] [[package]] @@ -4287,25 +2697,15 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "piper" -version = "0.2.1" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "668d31b1c4eba19242f2088b2bf3316b82ca31082a8335764db4e083db7485d4" +checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" dependencies = [ "atomic-waker", - "fastrand 2.0.1", + "fastrand 2.1.0", "futures-io", ] -[[package]] -name = "pkcs8" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" -dependencies = [ - "der", - "spki", -] - [[package]] name = "pkg-config" version = "0.3.29" @@ -4314,33 +2714,17 @@ checksum = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb" [[package]] name = "polling" -version = "2.8.0" +version = "3.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" +checksum = "a3ed00ed3fbf728b5816498ecd316d1716eecaced9c0c8d2c5a6740ca214985b" dependencies = [ - "autocfg", - "bitflags 1.3.2", "cfg-if", "concurrent-queue", - "libc", - "log", + "hermit-abi 0.4.0", "pin-project-lite", - "windows-sys 0.48.0", -] - -[[package]] -name = "polling" -version = "3.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645493cf344456ef24219d02a768cf1fb92ddf8c92161679ae3d91b91a637be3" -dependencies = [ - "cfg-if", - "concurrent-queue", - "hermit-abi 0.3.9", - "pin-project-lite", - "rustix 0.38.31", - "tracing", - "windows-sys 0.52.0", + "rustix", + "tracing", + "windows-sys 0.52.0", ] [[package]] @@ -4351,7 +2735,7 @@ checksum = "a55c51ee6c0db07e68448e336cf8ea4131a620edefebf9893e759b2d793420f8" dependencies = [ "cobs", "embedded-io", - "serde 1.0.203", + "serde", ] [[package]] @@ -4369,11 +2753,11 @@ dependencies = [ [[package]] name = "postgres-protocol" -version = "0.6.6" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49b6c5ef183cd3ab4ba005f1ca64c21e8bd97ce4699cfea9e8d9a2c4958ca520" +checksum = "acda0ebdebc28befa84bee35e651e4c5f09073d668c7aed4cf7e23c3cda84b23" dependencies = [ - "base64 0.21.7", + "base64 0.22.1", "byteorder", "bytes", "fallible-iterator 0.2.0", @@ -4387,9 +2771,9 @@ dependencies = [ [[package]] name = "postgres-types" -version = "0.2.6" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d2234cdee9408b523530a9b6d2d6b373d1db34f6a8e51dc03ded1828d7fb67c" +checksum = "02048d9e032fb3cc3413bbf7b83a15d84a5d419778e2628751896d856498eee9" dependencies = [ "bytes", "fallible-iterator 0.2.0", @@ -4404,37 +2788,11 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" - -[[package]] -name = "prettyplease" -version = "0.2.17" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d3928fb5db768cb86f891ff014f0144589297e3c6a1aba6ed7cecfdace270c7" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" dependencies = [ - "proc-macro2", - "syn 2.0.48", -] - -[[package]] -name = "primeorder" -version = "0.13.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" -dependencies = [ - "elliptic-curve", -] - -[[package]] -name = "proc-macro-crate" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" -dependencies = [ - "once_cell", - "toml_edit 0.19.15", + "zerocopy", ] [[package]] @@ -4463,26 +2821,13 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.78" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] -[[package]] -name = "proc-macro2-diagnostics" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.48", - "version_check", - "yansi", -] - [[package]] name = "prost" version = "0.12.6" @@ -4493,27 +2838,6 @@ dependencies = [ "prost-derive", ] -[[package]] -name = "prost-build" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" -dependencies = [ - "bytes", - "heck 0.5.0", - "itertools 0.12.1", - "log", - "multimap", - "once_cell", - "petgraph", - "prettyplease", - "prost", - "prost-types", - "regex", - "syn 2.0.48", - "tempfile", -] - [[package]] name = "prost-derive" version = "0.12.6" @@ -4521,59 +2845,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" dependencies = [ "anyhow", - "itertools 0.12.1", + "itertools", "proc-macro2", "quote", - "syn 2.0.48", -] - -[[package]] -name = "prost-reflect" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f5eec97d5d34bdd17ad2db2219aabf46b054c6c41bd5529767c9ce55be5898f" -dependencies = [ - "logos 0.14.0", - "miette", - "once_cell", - "prost", - "prost-types", -] - -[[package]] -name = "prost-types" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0" -dependencies = [ - "prost", -] - -[[package]] -name = "protox" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a29b3c5596eb23a849deba860b53ffd468199d9ad5fe4402a7d55379e16aa2d2" -dependencies = [ - "bytes", - "miette", - "prost", - "prost-reflect", - "prost-types", - "protox-parse", - "thiserror", -] - -[[package]] -name = "protox-parse" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "033b939d76d358f7c32120c86c71f515bae45e64f2bde455200356557276276c" -dependencies = [ - "logos 0.13.0", - "miette", - "prost-types", - "thiserror", + "syn 2.0.75", ] [[package]] @@ -4585,22 +2860,6 @@ dependencies = [ "cc", ] -[[package]] -name = "ptree" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0de80796b316aec75344095a6d2ef68ec9b8f573b9e7adc821149ba3598e270" -dependencies = [ - "ansi_term", - "atty", - "config", - "directories", - "petgraph", - "serde 1.0.203", - "serde-value", - "tint", -] - [[package]] name = "quote" version = "1.0.35" @@ -4681,15 +2940,6 @@ dependencies = [ "rand_core 0.5.1", ] -[[package]] -name = "rand_xoshiro" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" -dependencies = [ - "rand_core 0.6.4", -] - [[package]] name = "rayon" version = "1.8.1" @@ -4700,17 +2950,6 @@ dependencies = [ "rayon-core", ] -[[package]] -name = "rayon-cond" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd1259362c9065e5ea39a789ef40b1e3fd934c94beb7b5ab3ac6629d3b5e7cb7" -dependencies = [ - "either", - "itertools 0.8.2", - "rayon", -] - [[package]] name = "rayon-core" version = "1.12.1" @@ -4739,7 +2978,7 @@ dependencies = [ "sha1 0.6.1", "tokio", "tokio-native-tls", - "tokio-util 0.7.10", + "tokio-util", "url", ] @@ -4778,14 +3017,14 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.5" +version = "1.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" +checksum = "4219d74c6b67a3654a9fbebc4b419e22126d13d2f3c4a07ee0cb61ff79a79619" dependencies = [ - "aho-corasick 1.1.2", + "aho-corasick", "memchr", "regex-automata 0.4.7", - "regex-syntax 0.8.2", + "regex-syntax 0.8.4", ] [[package]] @@ -4803,9 +3042,9 @@ version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ - "aho-corasick 1.1.2", + "aho-corasick", "memchr", - "regex-syntax 0.8.2", + "regex-syntax 0.8.4", ] [[package]] @@ -4816,21 +3055,15 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" - -[[package]] -name = "regex-syntax" -version = "0.8.2" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" [[package]] name = "reqwest" -version = "0.11.24" +version = "0.11.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6920094eb85afde5e4a138be3f2de8bbdf28000f0029e72c45025a56b042251" +checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" dependencies = [ "async-compression", "base64 0.21.7", @@ -4839,9 +3072,9 @@ dependencies = [ "futures-core", "futures-util", "h2 0.3.26", - "http 0.2.11", + "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.28", + "hyper 0.14.30", "hyper-rustls 0.24.2", "hyper-tls 0.5.0", "ipnet", @@ -4852,17 +3085,17 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.10", + "rustls 0.21.12", "rustls-pemfile 1.0.4", - "serde 1.0.203", + "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", + "sync_wrapper 0.1.2", "system-configuration", "tokio", "tokio-native-tls", "tokio-rustls 0.24.1", - "tokio-util 0.7.10", + "tokio-util", "tower-service", "url", "wasm-bindgen", @@ -4874,20 +3107,18 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.12.4" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "566cafdd92868e0939d3fb961bd0dc25fcfaaed179291093b3d43e6b3150ea10" +checksum = "c7d6d2a27d57148378eb5e111173f4276ad26340ecc5c49a4a2152167a2d6a37" dependencies = [ - "base64 0.22.0", + "base64 0.22.1", "bytes", - "encoding_rs", "futures-core", "futures-util", - "h2 0.4.4", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "http-body-util", - "hyper 1.3.1", + "hyper 1.4.1", "hyper-tls 0.6.0", "hyper-util", "ipnet", @@ -4898,16 +3129,14 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls-pemfile 2.1.2", - "serde 1.0.203", + "rustls-pemfile 2.1.3", + "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", - "system-configuration", + "sync_wrapper 1.0.1", "tokio", "tokio-native-tls", - "tokio-socks", - "tokio-util 0.7.10", + "tokio-util", "tower-service", "url", "wasm-bindgen", @@ -4917,16 +3146,6 @@ dependencies = [ "winreg 0.52.0", ] -[[package]] -name = "rfc6979" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" -dependencies = [ - "hmac", - "subtle", -] - [[package]] name = "ring" version = "0.17.8" @@ -4953,8 +3172,8 @@ dependencies = [ "futures-util", "log", "rustls-native-certs", - "rustls-pemfile 2.1.2", - "rustls-webpki 0.102.2", + "rustls-pemfile 2.1.3", + "rustls-webpki 0.102.6", "thiserror", "tokio", "tokio-rustls 0.25.0", @@ -4975,12 +3194,6 @@ dependencies = [ "smallvec", ] -[[package]] -name = "rust-ini" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e52c148ef37f8c375d49d5a73aa70713125b7f19095948a923f80afdeb22ec2" - [[package]] name = "rustc-demangle" version = "0.1.23" @@ -5011,10 +3224,10 @@ dependencies = [ "anyhow", "async-trait", "bytes", - "http 0.2.11", - "reqwest 0.11.24", + "http 0.2.12", + "reqwest 0.11.27", "rustify_derive", - "serde 1.0.203", + "serde", "serde_json", "serde_urlencoded", "thiserror", @@ -5024,9 +3237,9 @@ dependencies = [ [[package]] name = "rustify_derive" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58135536c18c04f4634bedad182a3f41baf33ef811cc38a3ec7b7061c57134c8" +checksum = "7345f32672da54338227b727bd578c897859ddfaad8952e0b0d787fb4e58f07d" dependencies = [ "proc-macro2", "quote", @@ -5036,20 +3249,6 @@ dependencies = [ "synstructure", ] -[[package]] -name = "rustix" -version = "0.37.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes 1.0.11", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", -] - [[package]] name = "rustix" version = "0.38.31" @@ -5060,16 +3259,16 @@ dependencies = [ "errno", "itoa", "libc", - "linux-raw-sys 0.4.13", + "linux-raw-sys", "once_cell", "windows-sys 0.52.0", ] [[package]] name = "rustls" -version = "0.21.10" +version = "0.21.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", "ring", @@ -5086,19 +3285,34 @@ dependencies = [ "log", "ring", "rustls-pki-types", - "rustls-webpki 0.102.2", + "rustls-webpki 0.102.6", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls" +version = "0.23.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c58f8c84392efc0a126acce10fa59ff7b3d2ac06ab451a33f2741989b806b044" +dependencies = [ + "log", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.6", "subtle", "zeroize", ] [[package]] name = "rustls-native-certs" -version = "0.7.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" +checksum = "04182dffc9091a404e0fc069ea5cd60e5b866c3adf881eff99a32d048242dffa" dependencies = [ "openssl-probe", - "rustls-pemfile 2.1.2", + "rustls-pemfile 2.1.3", "rustls-pki-types", "schannel", "security-framework", @@ -5115,19 +3329,19 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.1.2" +version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" +checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" dependencies = [ - "base64 0.22.0", + "base64 0.22.1", "rustls-pki-types", ] [[package]] name = "rustls-pki-types" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" +checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" [[package]] name = "rustls-webpki" @@ -5141,9 +3355,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.102.2" +version = "0.102.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610" +checksum = "8e6b52d4fda176fd835fdc55a835d4a89b8499cad995885a21149d5ad62f852e" dependencies = [ "ring", "rustls-pki-types", @@ -5152,9 +3366,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.15" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80af6f9131f277a45a3fba6ce8e2258037bb0477a67e610d3c1fe046ab31de47" +checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" [[package]] name = "ryu" @@ -5162,22 +3376,13 @@ version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - [[package]] name = "sanitize-filename" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08c502bdb638f1396509467cb0580ef3b29aa2a45c5d43e5d84928241280296c" +checksum = "2ed72fbaf78e6f2d41744923916966c4fbe3d7c74e3037a8ee482f1115572603" dependencies = [ - "lazy_static 1.4.0", + "lazy_static", "regex", ] @@ -5212,54 +3417,11 @@ dependencies = [ "untrusted", ] -[[package]] -name = "sec1" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" -dependencies = [ - "base16ct", - "der", - "generic-array", - "pkcs8", - "subtle", - "zeroize", -] - -[[package]] -name = "secrecy" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9bd1c54ea06cfd2f6b63219704de0b9b4f72dcc2b8fdef820be6cd799780e91e" -dependencies = [ - "serde 1.0.203", - "zeroize", -] - -[[package]] -name = "secret-service" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5204d39df37f06d1944935232fd2dfe05008def7ca599bf28c0800366c8a8f9" -dependencies = [ - "aes", - "cbc", - "futures-util", - "generic-array", - "hkdf", - "num", - "once_cell", - "rand 0.8.5", - "serde 1.0.203", - "sha2", - "zbus", -] - [[package]] name = "security-framework" -version = "2.9.2" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +checksum = "770452e37cad93e0a50d5abc3990d2bc351c36d0328f86cefec2f2fb206eaef6" dependencies = [ "bitflags 1.3.2", "core-foundation", @@ -5270,9 +3432,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.9.1" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +checksum = "75da29fe9b9b08fe9d6b22b5b4bcbc75d8db3aa31e639aa56bb62e9d46bfceaf" dependencies = [ "core-foundation-sys", "libc", @@ -5283,15 +3445,6 @@ name = "semver" version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" -dependencies = [ - "serde 1.0.203", -] - -[[package]] -name = "serde" -version = "0.8.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dad3f759919b92c3068c696c15c3d17238234498bbdcc80f2c469606f948ac8" [[package]] name = "serde" @@ -5302,37 +3455,6 @@ dependencies = [ "serde_derive", ] -[[package]] -name = "serde-hjson" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a3a4e0ea8a88553209f6cc6cfe8724ecad22e1acf372793c27d995290fe74f8" -dependencies = [ - "lazy_static 1.4.0", - "num-traits 0.1.43", - "regex", - "serde 0.8.23", -] - -[[package]] -name = "serde-value" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" -dependencies = [ - "ordered-float 2.10.1", - "serde 1.0.203", -] - -[[package]] -name = "serde_bytes" -version = "0.11.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b8497c313fd43ab992087548117643f6fcd935cbf36f176ffda0aacf9591734" -dependencies = [ - "serde 1.0.203", -] - [[package]] name = "serde_derive" version = "1.0.203" @@ -5341,7 +3463,7 @@ checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", ] [[package]] @@ -5352,7 +3474,7 @@ checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" dependencies = [ "itoa", "ryu", - "serde 1.0.203", + "serde", ] [[package]] @@ -5362,7 +3484,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" dependencies = [ "itoa", - "serde 1.0.203", + "serde", ] [[package]] @@ -5372,28 +3494,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7715380eec75f029a4ef7de39a9200e0a63823176b759d055b613f5a87df6a6" dependencies = [ "percent-encoding", - "serde 1.0.203", + "serde", "thiserror", ] -[[package]] -name = "serde_repr" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.48", -] - [[package]] name = "serde_spanned" version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "79e674e01f999af37c49f70a6ede167a8a60b2503e56c5599532a65baa5969a0" dependencies = [ - "serde 1.0.203", + "serde", ] [[package]] @@ -5405,50 +3516,7 @@ dependencies = [ "form_urlencoded", "itoa", "ryu", - "serde 1.0.203", -] - -[[package]] -name = "serde_with" -version = "3.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ad483d2ab0149d5a5ebcd9972a3852711e0153d863bf5a5d0391d28883c4a20" -dependencies = [ - "base64 0.22.0", - "chrono", - "hex", - "indexmap 1.9.3", - "indexmap 2.2.6", - "serde 1.0.203", - "serde_derive", - "serde_json", - "serde_with_macros", - "time", -] - -[[package]] -name = "serde_with_macros" -version = "3.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65569b702f41443e8bc8bbb1c5779bd0450bbe723b56198980e80ec45780bce2" -dependencies = [ - "darling 0.20.9", - "proc-macro2", - "quote", - "syn 2.0.48", -] - -[[package]] -name = "serde_yaml" -version = "0.9.34+deprecated" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" -dependencies = [ - "indexmap 2.2.6", - "itoa", - "ryu", - "serde 1.0.203", - "unsafe-libyaml", + "serde", ] [[package]] @@ -5473,9 +3541,9 @@ dependencies = [ [[package]] name = "sha1_smol" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" +checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d" [[package]] name = "sha2" @@ -5488,50 +3556,22 @@ dependencies = [ "digest", ] -[[package]] -name = "sha256" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18278f6a914fa3070aa316493f7d2ddfb9ac86ebc06fa3b83bffda487e9065b0" -dependencies = [ - "async-trait", - "bytes", - "hex", - "sha2", - "tokio", -] - [[package]] name = "sharded-slab" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" dependencies = [ - "lazy_static 1.4.0", + "lazy_static", ] -[[package]] -name = "shell-words" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" - [[package]] name = "shellexpand" version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ccc8076840c4da029af4f87e4e8daeb0fca6b87bbb02e10cb60b791450e11e4" dependencies = [ - "dirs 4.0.0", -] - -[[package]] -name = "shellexpand" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da03fa3b94cc19e3ebfc88c4229c49d8f08cdbd1228870a45f0ffdf84988e14b" -dependencies = [ - "dirs 5.0.1", + "dirs", ] [[package]] @@ -5549,32 +3589,12 @@ dependencies = [ "libc", ] -[[package]] -name = "signature" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" -dependencies = [ - "digest", - "rand_core 0.6.4", -] - [[package]] name = "siphasher" version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" -[[package]] -name = "sized-chunks" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e" -dependencies = [ - "bitmaps", - "typenum", -] - [[package]] name = "slab" version = "0.4.9" @@ -5596,17 +3616,7 @@ version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" dependencies = [ - "serde 1.0.203", -] - -[[package]] -name = "socket2" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" -dependencies = [ - "libc", - "winapi", + "serde", ] [[package]] @@ -5621,9 +3631,9 @@ dependencies = [ [[package]] name = "spdx" -version = "0.10.3" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62bde1398b09b9f93fc2fc9b9da86e362693e999d3a54a8ac47a99a5a73f638b" +checksum = "47317bbaf63785b53861e1ae2d11b80d6b624211d42cb20efcd210ee6f8a14bc" dependencies = [ "smallvec", ] @@ -5643,10 +3653,8 @@ version = "2.8.0-pre0" dependencies = [ "anyhow", "async-trait", - "ouroboros", - "serde 1.0.203", + "serde", "serde_json", - "spin-core", "spin-locked-app", "spin-serde", "thiserror", @@ -5657,7 +3665,7 @@ name = "spin-common" version = "2.8.0-pre0" dependencies = [ "anyhow", - "dirs 4.0.0", + "dirs", "sha2", "tempfile", "tokio", @@ -5671,9 +3679,9 @@ dependencies = [ "anyhow", "tracing", "wasm-encoder 0.200.0", - "wasm-metadata 0.200.0", + "wasm-metadata", "wasmparser 0.200.0", - "wit-component 0.200.0", + "wit-component", "wit-parser 0.200.0", ] @@ -5683,20 +3691,8 @@ version = "2.8.0-pre0" dependencies = [ "anyhow", "async-trait", - "bytes", - "cap-primitives", - "cap-std", - "http 1.1.0", - "io-extras", - "rustix 0.37.27", - "spin-telemetry", - "system-interface", - "tokio", "tracing", - "wasi-common", "wasmtime", - "wasmtime-wasi", - "wasmtime-wasi-http", ] [[package]] @@ -5707,19 +3703,102 @@ dependencies = [ "async-trait", "dotenvy", "once_cell", - "serde 1.0.203", + "serde", "spin-locked-app", "thiserror", ] [[package]] -name = "spin-key-value" +name = "spin-factor-key-value" version = "2.8.0-pre0" dependencies = [ "anyhow", - "lru 0.9.0", - "spin-app", + "serde", + "spin-factors", + "spin-key-value", + "spin-world", + "toml", +] + +[[package]] +name = "spin-factor-key-value-azure" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "serde", + "spin-factor-key-value", + "spin-key-value-azure", +] + +[[package]] +name = "spin-factor-key-value-redis" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "serde", + "spin-factor-key-value", + "spin-key-value-redis", +] + +[[package]] +name = "spin-factor-key-value-spin" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "serde", + "spin-factor-key-value", + "spin-key-value-sqlite", +] + +[[package]] +name = "spin-factor-llm" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "async-trait", + "serde", + "spin-factors", + "spin-llm-remote-http", + "spin-locked-app", + "spin-world", + "tokio", + "toml", + "tracing", + "url", +] + +[[package]] +name = "spin-factor-outbound-http" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "http 1.1.0", + "http-body-util", + "hyper 1.4.1", + "reqwest 0.11.27", + "rustls 0.23.12", + "spin-factor-outbound-networking", + "spin-factors", + "spin-telemetry", + "spin-world", + "terminal", + "tokio", + "tokio-rustls 0.26.0", + "tracing", + "wasmtime", + "wasmtime-wasi", + "wasmtime-wasi-http", +] + +[[package]] +name = "spin-factor-outbound-mqtt" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "rumqttc", "spin-core", + "spin-factor-outbound-networking", + "spin-factors", "spin-world", "table", "tokio", @@ -5727,144 +3806,242 @@ dependencies = [ ] [[package]] -name = "spin-key-value-azure" +name = "spin-factor-outbound-mysql" version = "2.8.0-pre0" dependencies = [ "anyhow", - "azure_data_cosmos", - "azure_identity", - "futures", - "serde 1.0.203", + "flate2", + "mysql_async", + "mysql_common", + "spin-app", "spin-core", - "spin-key-value", + "spin-expressions", + "spin-factor-outbound-networking", + "spin-factors", + "spin-outbound-networking", + "spin-world", + "table", "tokio", "tracing", "url", ] [[package]] -name = "spin-key-value-redis" +name = "spin-factor-outbound-networking" version = "2.8.0-pre0" dependencies = [ "anyhow", - "redis", + "futures-util", + "http 1.1.0", + "ipnet", + "rustls 0.23.12", + "rustls-pemfile 2.1.3", + "rustls-pki-types", + "serde", + "spin-factor-variables", + "spin-factor-wasi", + "spin-factors", + "spin-outbound-networking", + "spin-serde", + "tracing", + "webpki-roots 0.26.3", +] + +[[package]] +name = "spin-factor-outbound-pg" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "native-tls", + "postgres-native-tls", "spin-core", - "spin-key-value", + "spin-factor-outbound-networking", + "spin-factors", "spin-world", + "table", "tokio", + "tokio-postgres", "tracing", - "url", ] [[package]] -name = "spin-key-value-sqlite" +name = "spin-factor-outbound-redis" version = "2.8.0-pre0" dependencies = [ "anyhow", - "once_cell", - "rusqlite", + "redis", "spin-core", - "spin-key-value", + "spin-factor-outbound-networking", + "spin-factors", + "spin-world", + "table", + "tracing", +] + +[[package]] +name = "spin-factor-sqlite" +version = "2.8.0-pre0" +dependencies = [ + "async-trait", + "serde", + "spin-factors", + "spin-locked-app", + "spin-sqlite", + "spin-sqlite-inproc", + "spin-sqlite-libsql", + "spin-world", + "table", + "tokio", + "toml", +] + +[[package]] +name = "spin-factor-variables" +version = "2.8.0-pre0" +dependencies = [ + "azure_core", + "azure_identity", + "azure_security_keyvault", + "dotenvy", + "serde", + "spin-expressions", + "spin-factors", "spin-world", "tokio", + "toml", "tracing", + "vaultrs", +] + +[[package]] +name = "spin-factor-wasi" +version = "2.8.0-pre0" +dependencies = [ + "async-trait", + "bytes", + "cap-primitives", + "spin-common", + "spin-factors", + "tokio", + "wasmtime", + "wasmtime-wasi", +] + +[[package]] +name = "spin-factors" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "serde", + "spin-app", + "spin-factors-derive", + "thiserror", + "toml", + "tracing", + "wasmtime", +] + +[[package]] +name = "spin-factors-derive" +version = "2.8.0-pre0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.75", ] [[package]] -name = "spin-llm" +name = "spin-factors-executor" version = "2.8.0-pre0" dependencies = [ "anyhow", - "bytesize", - "llm", "spin-app", "spin-core", - "spin-world", + "spin-factors", ] [[package]] -name = "spin-llm-remote-http" +name = "spin-key-value" version = "2.8.0-pre0" dependencies = [ "anyhow", - "http 0.2.11", - "llm", - "reqwest 0.11.24", - "serde 1.0.203", - "serde_json", + "lru 0.9.0", + "spin-app", "spin-core", - "spin-llm", - "spin-telemetry", "spin-world", + "table", + "tokio", "tracing", ] [[package]] -name = "spin-loader" +name = "spin-key-value-azure" version = "2.8.0-pre0" dependencies = [ "anyhow", - "async-trait", - "bytes", - "dirs 4.0.0", - "dunce", + "azure_data_cosmos", + "azure_identity", "futures", - "glob", - "indexmap 1.9.3", - "itertools 0.10.5", - "lazy_static 1.4.0", - "mime_guess", - "outbound-http", - "path-absolutize", - "regex", - "reqwest 0.11.24", - "semver", - "serde 1.0.203", - "serde_json", - "sha2", - "shellexpand 3.1.0", - "spin-common", - "spin-locked-app", - "spin-manifest", - "spin-outbound-networking", - "tempfile", - "terminal", - "thiserror", + "serde", + "spin-core", + "spin-key-value", "tokio", - "tokio-util 0.6.10", - "toml 0.8.14", "tracing", - "walkdir", - "wasm-pkg-loader", + "url", ] [[package]] -name = "spin-locked-app" +name = "spin-key-value-redis" version = "2.8.0-pre0" dependencies = [ "anyhow", - "async-trait", - "ouroboros", - "serde 1.0.203", + "redis", + "spin-core", + "spin-key-value", + "spin-world", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "spin-key-value-sqlite" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "once_cell", + "rusqlite", + "spin-core", + "spin-key-value", + "spin-world", + "tokio", + "tracing", +] + +[[package]] +name = "spin-llm-remote-http" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "http 0.2.12", + "reqwest 0.11.27", + "serde", "serde_json", - "spin-serde", - "thiserror", + "spin-telemetry", + "spin-world", + "tracing", ] [[package]] -name = "spin-manifest" +name = "spin-locked-app" version = "2.8.0-pre0" dependencies = [ "anyhow", - "indexmap 1.9.3", - "semver", - "serde 1.0.203", + "async-trait", + "serde", + "serde_json", "spin-serde", - "terminal", "thiserror", - "toml 0.8.14", - "url", - "wasm-pkg-common", ] [[package]] @@ -5881,12 +4058,35 @@ dependencies = [ "urlencoding", ] +[[package]] +name = "spin-runtime-config" +version = "2.8.0-pre0" +dependencies = [ + "anyhow", + "spin-factor-key-value", + "spin-factor-key-value-azure", + "spin-factor-key-value-redis", + "spin-factor-key-value-spin", + "spin-factor-llm", + "spin-factor-outbound-http", + "spin-factor-outbound-mqtt", + "spin-factor-outbound-mysql", + "spin-factor-outbound-networking", + "spin-factor-outbound-pg", + "spin-factor-outbound-redis", + "spin-factor-sqlite", + "spin-factor-variables", + "spin-factor-wasi", + "spin-factors", + "toml", +] + [[package]] name = "spin-serde" version = "2.8.0-pre0" dependencies = [ "base64 0.21.7", - "serde 1.0.203", + "serde", ] [[package]] @@ -5938,7 +4138,7 @@ name = "spin-telemetry" version = "2.8.0-pre0" dependencies = [ "anyhow", - "http 0.2.11", + "http 0.2.12", "http 1.1.0", "opentelemetry", "opentelemetry-otlp", @@ -5957,106 +4157,44 @@ name = "spin-trigger" version = "2.8.0-pre0" dependencies = [ "anyhow", - "async-trait", - "clap 3.2.25", + "clap", "ctrlc", - "dirs 4.0.0", "futures", - "http 1.1.0", - "indexmap 1.9.3", - "ipnet", - "outbound-http", - "outbound-mqtt", - "outbound-mysql", - "outbound-pg", - "outbound-redis", - "rustls-pemfile 2.1.2", - "rustls-pki-types", "sanitize-filename", - "serde 1.0.203", + "serde", "serde_json", "spin-app", "spin-common", "spin-componentize", "spin-core", - "spin-expressions", - "spin-key-value", - "spin-key-value-azure", - "spin-key-value-redis", - "spin-key-value-sqlite", - "spin-llm", - "spin-llm-remote-http", - "spin-loader", - "spin-manifest", - "spin-outbound-networking", - "spin-serde", - "spin-sqlite", - "spin-sqlite-inproc", - "spin-sqlite-libsql", + "spin-factor-key-value", + "spin-factor-llm", + "spin-factor-outbound-http", + "spin-factor-outbound-mqtt", + "spin-factor-outbound-mysql", + "spin-factor-outbound-networking", + "spin-factor-outbound-pg", + "spin-factor-outbound-redis", + "spin-factor-sqlite", + "spin-factor-variables", + "spin-factor-wasi", + "spin-factors", + "spin-factors-executor", + "spin-runtime-config", "spin-telemetry", - "spin-variables", - "spin-world", "terminal", "tokio", - "toml 0.5.11", - "tracing", - "url", - "wasmtime", - "wasmtime-wasi", - "wasmtime-wasi-http", -] - -[[package]] -name = "spin-variables" -version = "2.8.0-pre0" -dependencies = [ - "anyhow", - "async-trait", - "azure_core", - "azure_identity", - "azure_security_keyvault", - "dotenvy", - "once_cell", - "serde 1.0.203", - "spin-app", - "spin-core", - "spin-expressions", - "spin-world", - "thiserror", - "tokio", "tracing", - "vaultrs", ] [[package]] name = "spin-world" version = "2.8.0-pre0" dependencies = [ + "async-trait", "wasmtime", ] -[[package]] -name = "spki" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "spm_precompiled" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5851699c4033c63636f7ea4cf7b7c1f1bf06d0cc03cfb42e711de5a5c46cf326" -dependencies = [ - "base64 0.13.1", - "nom 7.1.3", - "serde 1.0.203", - "unicode-segmentation", -] - [[package]] name = "sptr" version = "0.3.2" @@ -6086,13 +4224,13 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "stringprep" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" dependencies = [ - "finl_unicode", "unicode-bidi", "unicode-normalization", + "unicode-properties", ] [[package]] @@ -6101,12 +4239,6 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" -[[package]] -name = "strsim" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" - [[package]] name = "subprocess" version = "0.2.9" @@ -6119,9 +4251,9 @@ dependencies = [ [[package]] name = "subtle" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" @@ -6136,9 +4268,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.48" +version = "2.0.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" +checksum = "f6af063034fc1935ede7be0122941bafa9bacb949334d090b77ca98b5817c7d9" dependencies = [ "proc-macro2", "quote", @@ -6151,6 +4283,12 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" + [[package]] name = "synstructure" version = "0.12.6" @@ -6194,8 +4332,8 @@ dependencies = [ "cap-fs-ext", "cap-std", "fd-lock", - "io-lifetimes 2.0.3", - "rustix 0.38.31", + "io-lifetimes", + "rustix", "windows-sys 0.52.0", "winx", ] @@ -6204,17 +4342,6 @@ dependencies = [ name = "table" version = "2.8.0-pre0" -[[package]] -name = "tar" -version = "0.4.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" -dependencies = [ - "filetime", - "libc", - "xattr", -] - [[package]] name = "target-lexicon" version = "0.12.13" @@ -6223,13 +4350,14 @@ checksum = "69758bda2e78f098e4ccb393021a0963bb3442eac05f135c30f61b7370bbafae" [[package]] name = "tempfile" -version = "3.10.0" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a365e8cd18e44762ef95d87f284f4b5cd04107fec2ff3052bd6a3e6069669e67" +checksum = "b8fcd239983515c23a32fb82099f97d0b11b8c72f654ed659363a95c3dad7a53" dependencies = [ "cfg-if", - "fastrand 2.0.1", - "rustix 0.38.31", + "fastrand 2.1.0", + "once_cell", + "rustix", "windows-sys 0.52.0", ] @@ -6274,7 +4402,7 @@ checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", ] [[package]] @@ -6300,7 +4428,7 @@ dependencies = [ "num-conv", "num_threads", "powerfmt", - "serde 1.0.203", + "serde", "time-core", "time-macros", ] @@ -6321,20 +4449,11 @@ dependencies = [ "time-core", ] -[[package]] -name = "tint" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7af24570664a3074673dbbf69a65bdae0ae0b72f2949b1adfbacb736ee4d6896" -dependencies = [ - "lazy_static 0.2.11", -] - [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] @@ -6345,40 +4464,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" -[[package]] -name = "tokenizers" -version = "0.13.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aea68938177975ab09da68552b720eac941779ff386baceaf77e0f5f9cea645f" -dependencies = [ - "aho-corasick 0.7.20", - "cached-path", - "derive_builder 0.12.0", - "dirs 4.0.0", - "esaxx-rs", - "getrandom 0.2.12", - "itertools 0.9.0", - "lazy_static 1.4.0", - "log", - "macro_rules_attribute", - "monostate", - "onig", - "paste", - "rand 0.8.5", - "rayon", - "rayon-cond", - "regex", - "regex-syntax 0.7.5", - "reqwest 0.11.24", - "serde 1.0.203", - "serde_json", - "spm_precompiled", - "thiserror", - "unicode-normalization-alignments", - "unicode-segmentation", - "unicode_categories", -] - [[package]] name = "tokio" version = "1.36.0" @@ -6393,7 +4478,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.5", + "socket2", "tokio-macros", "windows-sys 0.48.0", ] @@ -6416,7 +4501,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", ] [[package]] @@ -6431,9 +4516,9 @@ dependencies = [ [[package]] name = "tokio-postgres" -version = "0.7.10" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d340244b32d920260ae7448cb72b6e238bddc3d4f7603394e7dd46ed8e48f5b8" +checksum = "03adcf0147e203b6032c0b2d30be1415ba03bc348901f3ff1cc0df6a733e60c3" dependencies = [ "async-trait", "byteorder", @@ -6449,9 +4534,9 @@ dependencies = [ "postgres-protocol", "postgres-types", "rand 0.8.5", - "socket2 0.5.5", + "socket2", "tokio", - "tokio-util 0.7.10", + "tokio-util", "whoami", ] @@ -6461,7 +4546,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.10", + "rustls 0.21.12", "tokio", ] @@ -6477,25 +4562,24 @@ dependencies = [ ] [[package]] -name = "tokio-scoped" -version = "0.2.0" +name = "tokio-rustls" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4beb8ba13bc53ac53ce1d52b42f02e5d8060f0f42138862869beb769722b256" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ + "rustls 0.23.12", + "rustls-pki-types", "tokio", - "tokio-stream", ] [[package]] -name = "tokio-socks" -version = "0.5.1" +name = "tokio-scoped" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51165dfa029d2a65969413a6cc96f354b86b464498702f174a4efa13608fd8c0" +checksum = "e4beb8ba13bc53ac53ce1d52b42f02e5d8060f0f42138862869beb769722b256" dependencies = [ - "either", - "futures-util", - "thiserror", "tokio", + "tokio-stream", ] [[package]] @@ -6511,39 +4595,15 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.6.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" -dependencies = [ - "bytes", - "futures-core", - "futures-sink", - "log", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "tokio-util" -version = "0.7.10" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", - "tracing", -] - -[[package]] -name = "toml" -version = "0.5.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" -dependencies = [ - "serde 1.0.203", ] [[package]] @@ -6552,11 +4612,10 @@ version = "0.8.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335" dependencies = [ - "indexmap 2.2.6", - "serde 1.0.203", + "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.14", + "toml_edit", ] [[package]] @@ -6565,18 +4624,7 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4badfd56924ae69bcc9039335b2e017639ce3f9b001c393c1b2d1ef846ce2cbf" dependencies = [ - "serde 1.0.203", -] - -[[package]] -name = "toml_edit" -version = "0.19.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" -dependencies = [ - "indexmap 2.2.6", - "toml_datetime", - "winnow 0.5.40", + "serde", ] [[package]] @@ -6586,10 +4634,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38" dependencies = [ "indexmap 2.2.6", - "serde 1.0.203", + "serde", "serde_spanned", "toml_datetime", - "winnow 0.6.13", + "winnow", ] [[package]] @@ -6604,9 +4652,9 @@ dependencies = [ "base64 0.21.7", "bytes", "h2 0.3.26", - "http 0.2.11", + "http 0.2.12", "http-body 0.4.6", - "hyper 0.14.28", + "hyper 0.14.30", "hyper-timeout", "percent-encoding", "pin-project", @@ -6633,7 +4681,7 @@ dependencies = [ "rand 0.8.5", "slab", "tokio", - "tokio-util 0.7.10", + "tokio-util", "tower-layer", "tower-service", "tracing", @@ -6641,15 +4689,15 @@ dependencies = [ [[package]] name = "tower-layer" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-service" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" @@ -6683,7 +4731,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", ] [[package]] @@ -6719,7 +4767,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" dependencies = [ - "serde 1.0.203", + "serde", "tracing-core", ] @@ -6733,7 +4781,7 @@ dependencies = [ "nu-ansi-term", "once_cell", "regex", - "serde 1.0.203", + "serde", "serde_json", "sharded-slab", "smallvec", @@ -6748,11 +4796,9 @@ name = "trigger-timer" version = "0.1.0" dependencies = [ "anyhow", - "clap 3.2.25", + "clap", "futures", - "serde 1.0.203", - "spin-app", - "spin-core", + "serde", "spin-trigger", "tokio", "tokio-scoped", @@ -6791,17 +4837,6 @@ dependencies = [ "const_fn", ] -[[package]] -name = "uds_windows" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89daebc3e6fd160ac4aa9fc8b3bf71e1f74fbf92367ae71fb83a037e8bf164b9" -dependencies = [ - "memoffset 0.9.0", - "tempfile", - "winapi", -] - [[package]] name = "uncased" version = "0.9.10" @@ -6811,15 +4846,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "unicase" -version = "2.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] - [[package]] name = "unicode-bidi" version = "0.3.15" @@ -6834,27 +4860,18 @@ checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" dependencies = [ "tinyvec", ] [[package]] -name = "unicode-normalization-alignments" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f613e4fa046e69818dd287fdc4bc78175ff20331479dab6e1b0f98d57062de" -dependencies = [ - "smallvec", -] - -[[package]] -name = "unicode-segmentation" -version = "1.11.0" +name = "unicode-properties" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" +checksum = "52ea75f83c0137a9b98608359a5f1af8144876eb67bcb1ce837368e906a9f524" [[package]] name = "unicode-width" @@ -6868,18 +4885,6 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" -[[package]] -name = "unicode_categories" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" - -[[package]] -name = "unsafe-libyaml" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" - [[package]] name = "untrusted" version = "0.9.0" @@ -6888,14 +4893,14 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.0" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", "idna", "percent-encoding", - "serde 1.0.203", + "serde", ] [[package]] @@ -6904,12 +4909,6 @@ version = "2.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" -[[package]] -name = "utf8parse" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" - [[package]] name = "uuid" version = "1.7.0" @@ -6933,12 +4932,12 @@ checksum = "267f958930e08323a44c12e6c5461f3eaaa16d88785e9ec8550215b8aafc3d0b" dependencies = [ "async-trait", "bytes", - "derive_builder 0.11.2", - "http 0.2.11", - "reqwest 0.11.24", + "derive_builder", + "http 0.2.12", + "reqwest 0.11.27", "rustify", "rustify_derive", - "serde 1.0.203", + "serde", "serde_json", "thiserror", "tracing", @@ -6959,165 +4958,17 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "waker-fn" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" - -[[package]] -name = "walkdir" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" -dependencies = [ - "same-file", - "winapi-util", -] - -[[package]] -name = "want" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" -dependencies = [ - "try-lock", -] - -[[package]] -name = "warg-api" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a22d3c9026f2f6a628cf386963844cdb7baea3b3419ba090c9096da114f977d" -dependencies = [ - "indexmap 2.2.6", - "itertools 0.12.1", - "serde 1.0.203", - "serde_with", - "thiserror", - "warg-crypto", - "warg-protocol", -] - -[[package]] -name = "warg-client" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b8b5a2b17e737e1847dbf4642e4ebe49f5df32a574520251ff080ef0a120423" -dependencies = [ - "anyhow", - "async-recursion", - "async-trait", - "bytes", - "clap 4.5.4", - "dialoguer", - "dirs 5.0.1", - "futures-util", - "indexmap 2.2.6", - "itertools 0.12.1", - "keyring", - "libc", - "normpath", - "once_cell", - "pathdiff", - "ptree", - "reqwest 0.12.4", - "secrecy", - "semver", - "serde 1.0.203", - "serde_json", - "sha256", - "tempfile", - "thiserror", - "tokio", - "tokio-util 0.7.10", - "tracing", - "url", - "walkdir", - "warg-api", - "warg-crypto", - "warg-protocol", - "warg-transparency", - "wasm-compose", - "wasm-encoder 0.41.2", - "wasmparser 0.121.2", - "wasmprinter 0.2.80", - "windows-sys 0.52.0", -] - -[[package]] -name = "warg-crypto" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "834bf58863aa4bc3821732afb0c77e08a5cbf05f63ee93116acae694eab04460" -dependencies = [ - "anyhow", - "base64 0.21.7", - "digest", - "hex", - "leb128", - "once_cell", - "p256", - "rand_core 0.6.4", - "secrecy", - "serde 1.0.203", - "sha2", - "signature", - "thiserror", -] - -[[package]] -name = "warg-protobuf" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf8a2dee6b14f5b0b0c461711a81cdef45d45ea94f8460cb6205cada7fec732a" -dependencies = [ - "anyhow", - "pbjson", - "pbjson-build", - "pbjson-types", - "prost", - "prost-build", - "prost-types", - "protox", - "regex", - "serde 1.0.203", - "warg-crypto", -] - -[[package]] -name = "warg-protocol" -version = "0.7.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4053a3276d3fee83645411b1b5f462f72402e70fbf645164274a3a0a2fd72538" -dependencies = [ - "anyhow", - "base64 0.21.7", - "hex", - "indexmap 2.2.6", - "pbjson-types", - "prost", - "prost-types", - "semver", - "serde 1.0.203", - "serde_with", - "thiserror", - "warg-crypto", - "warg-protobuf", - "warg-transparency", - "wasmparser 0.121.2", -] +checksum = "317211a0dc0ceedd78fb2ca9a44aed3d7b9b26f81870d485c07122b4350673b7" [[package]] -name = "warg-transparency" -version = "0.7.0" +name = "want" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "513ef81a5bb1ac5d7bd04f90d3c192dad8f590f4c02b3ef68d3ae4fbbb53c1d7" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" dependencies = [ - "anyhow", - "indexmap 2.2.6", - "prost", - "thiserror", - "warg-crypto", - "warg-protobuf", + "try-lock", ] [[package]] @@ -7132,33 +4983,6 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" -[[package]] -name = "wasi-common" -version = "22.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b86fd41e1e26ff6af9451c6a332a5ce5f5283ca51e87d875cdd9a05305598ee3" -dependencies = [ - "anyhow", - "bitflags 2.4.2", - "cap-fs-ext", - "cap-rand", - "cap-std", - "cap-time-ext", - "fs-set-times", - "io-extras", - "io-lifetimes 2.0.3", - "log", - "once_cell", - "rustix 0.38.31", - "system-interface", - "thiserror", - "tokio", - "tracing", - "wasmtime", - "wiggle", - "windows-sys 0.52.0", -] - [[package]] name = "wasite" version = "0.1.0" @@ -7167,34 +4991,35 @@ checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" [[package]] name = "wasm-bindgen" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +checksum = "a82edfc16a6c469f5f44dc7b571814045d60404b55a0ee849f9bcfa2e63dd9b5" dependencies = [ "cfg-if", + "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +checksum = "9de396da306523044d3302746f1208fa71d7532227f15e347e2d93e4145dd77b" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.42" +version = "0.4.43" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +checksum = "61e9300f63a621e96ed275155c108eb6f843b6a26d053f122ab69724559dc8ed" dependencies = [ "cfg-if", "js-sys", @@ -7204,9 +5029,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +checksum = "585c4c91a46b072c92e908d99cb1dcdf95c5218eeb6f3bf1efa991ee7a68cccf" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -7214,53 +5039,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.92" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +checksum = "afc340c74d9005395cf9dd098506f7f44e38f2b4a21c6aaacf9a105ea5e1e836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.92" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" - -[[package]] -name = "wasm-compose" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd324927af875ebedb1b820c00e3c585992d33c2c787c5021fe6d8982527359b" -dependencies = [ - "anyhow", - "heck 0.4.1", - "im-rc", - "indexmap 2.2.6", - "log", - "petgraph", - "serde 1.0.203", - "serde_derive", - "serde_yaml", - "smallvec", - "wasm-encoder 0.41.2", - "wasmparser 0.121.2", - "wat", -] - -[[package]] -name = "wasm-encoder" -version = "0.41.2" +version = "0.2.93" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "972f97a5d8318f908dded23594188a90bcd09365986b1163e66d70170e5287ae" -dependencies = [ - "leb128", - "wasmparser 0.121.2", -] +checksum = "c62a0a307cb4a311d3a07867860911ca130c3494e8c2719593806c08bc5d0484" [[package]] name = "wasm-encoder" @@ -7297,7 +5091,7 @@ checksum = "c31b8cc0c21f46d55b0aaa419cacce1eadcf28eaebd0e1488d6a6313ee71a586" dependencies = [ "anyhow", "indexmap 2.2.6", - "serde 1.0.203", + "serde", "serde_derive", "serde_json", "spdx", @@ -7305,71 +5099,6 @@ dependencies = [ "wasmparser 0.200.0", ] -[[package]] -name = "wasm-metadata" -version = "0.209.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d32029ce424f6d3c2b39b4419fb45a0e2d84fb0751e0c0a32b7ce8bd5d97f46" -dependencies = [ - "anyhow", - "indexmap 2.2.6", - "serde 1.0.203", - "serde_derive", - "serde_json", - "spdx", - "wasm-encoder 0.209.1", - "wasmparser 0.209.1", -] - -[[package]] -name = "wasm-pkg-common" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca7a687d110f68a65227a644c7040c7720220e8cb0bb8c803e2b5dcb7fd72468" -dependencies = [ - "anyhow", - "dirs 5.0.1", - "http 1.1.0", - "reqwest 0.12.4", - "semver", - "serde 1.0.203", - "serde_json", - "thiserror", - "toml 0.8.14", - "tracing", -] - -[[package]] -name = "wasm-pkg-loader" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11338b173351bc505bc752c00068a7d1da5106a9d351753f0d01267dcc4747b2" -dependencies = [ - "anyhow", - "async-trait", - "base64 0.22.0", - "bytes", - "dirs 5.0.1", - "docker_credential", - "futures-util", - "oci-distribution", - "oci-wasm", - "secrecy", - "serde 1.0.203", - "serde_json", - "sha2", - "thiserror", - "tokio", - "tokio-util 0.7.10", - "toml 0.8.14", - "tracing", - "tracing-subscriber", - "url", - "warg-client", - "warg-protocol", - "wasm-pkg-common", -] - [[package]] name = "wasm-streams" version = "0.4.0" @@ -7383,17 +5112,6 @@ dependencies = [ "web-sys", ] -[[package]] -name = "wasmparser" -version = "0.121.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dbe55c8f9d0dbd25d9447a5a889ff90c0cc3feaa7395310d3d826b2c703eaab" -dependencies = [ - "bitflags 2.4.2", - "indexmap 2.2.6", - "semver", -] - [[package]] name = "wasmparser" version = "0.200.0" @@ -7416,17 +5134,7 @@ dependencies = [ "hashbrown 0.14.3", "indexmap 2.2.6", "semver", - "serde 1.0.203", -] - -[[package]] -name = "wasmprinter" -version = "0.2.80" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60e73986a6b7fdfedb7c5bf9e7eb71135486507c8fbc4c0c42cffcb6532988b7" -dependencies = [ - "anyhow", - "wasmparser 0.121.2", + "serde", ] [[package]] @@ -7462,16 +5170,16 @@ dependencies = [ "log", "mach2", "memfd", - "memoffset 0.9.0", + "memoffset", "object 0.36.0", "once_cell", "paste", "postcard", "psm", "rayon", - "rustix 0.38.31", + "rustix", "semver", - "serde 1.0.203", + "serde", "serde_derive", "serde_json", "smallvec", @@ -7515,11 +5223,11 @@ dependencies = [ "directories-next", "log", "postcard", - "rustix 0.38.31", - "serde 1.0.203", + "rustix", + "serde", "serde_derive", "sha2", - "toml 0.8.14", + "toml", "windows-sys 0.52.0", "zstd 0.13.1", ] @@ -7533,7 +5241,7 @@ dependencies = [ "anyhow", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", "wasmtime-component-util", "wasmtime-wit-bindgen", "wit-parser 0.209.1", @@ -7584,12 +5292,12 @@ dependencies = [ "object 0.36.0", "postcard", "rustc-demangle", - "serde 1.0.203", + "serde", "serde_derive", "target-lexicon", "wasm-encoder 0.209.1", "wasmparser 0.209.1", - "wasmprinter 0.209.1", + "wasmprinter", "wasmtime-component-util", "wasmtime-types", ] @@ -7603,7 +5311,7 @@ dependencies = [ "anyhow", "cc", "cfg-if", - "rustix 0.38.31", + "rustix", "wasmtime-asm-macros", "wasmtime-versioned-export-macros", "windows-sys 0.52.0", @@ -7617,7 +5325,7 @@ checksum = "9bc54198c6720f098210a85efb3ba8c078d1de4d373cdb6778850a66ae088d11" dependencies = [ "object 0.36.0", "once_cell", - "rustix 0.38.31", + "rustix", "wasmtime-versioned-export-macros", ] @@ -7646,7 +5354,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "412463e9000e14cf6856be48628d2213c20c153e29ffc22b036980c892ea6964" dependencies = [ "cranelift-entity", - "serde 1.0.203", + "serde", "serde_derive", "smallvec", "wasmparser 0.209.1", @@ -7660,7 +5368,7 @@ checksum = "de5a9bc4f44ceeb168e9e8e3be4e0b4beb9095b468479663a9e24c667e36826f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", ] [[package]] @@ -7681,9 +5389,9 @@ dependencies = [ "fs-set-times", "futures", "io-extras", - "io-lifetimes 2.0.3", + "io-lifetimes", "once_cell", - "rustix 0.38.31", + "rustix", "system-interface", "thiserror", "tokio", @@ -7705,16 +5413,16 @@ dependencies = [ "bytes", "futures", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "http-body-util", - "hyper 1.3.1", + "hyper 1.4.1", "rustls 0.22.4", "tokio", "tokio-rustls 0.25.0", "tracing", "wasmtime", "wasmtime-wasi", - "webpki-roots 0.26.1", + "webpki-roots 0.26.3", ] [[package]] @@ -7741,7 +5449,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70dc077306b38288262e5ba01d4b21532a6987416cdc0aedf04bb06c22a68fdc" dependencies = [ "anyhow", - "heck 0.4.1", + "heck", "indexmap 2.2.6", "wit-parser 0.209.1", ] @@ -7779,9 +5487,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.69" +version = "0.3.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +checksum = "26fdeaafd9bd129f65e7c031593c24d62186301e0c72c8978fa1678be7d532c0" dependencies = [ "js-sys", "wasm-bindgen", @@ -7805,9 +5513,9 @@ checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" [[package]] name = "webpki-roots" -version = "0.26.1" +version = "0.26.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3de34ae270483955a94f4b21bdaaeb83d508bb84a01435f393818edb0012009" +checksum = "bd7c23921eeb1713a4e851530e9b9756e4fb0e89978582942612524cf09f01cd" dependencies = [ "rustls-pki-types", ] @@ -7845,11 +5553,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "557567f2793508760cd855f7659b7a0b9dc4dbc451f53f1415d6943a15311ade" dependencies = [ "anyhow", - "heck 0.4.1", + "heck", "proc-macro2", "quote", - "shellexpand 2.1.2", - "syn 2.0.48", + "shellexpand", + "syn 2.0.75", "witx", ] @@ -7861,7 +5569,7 @@ checksum = "cc26129a8aea20b62c961d1b9ab4a3c3b56b10042ed85d004f8678af0f21ba6e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", "wiggle-generate", ] @@ -8054,15 +5762,6 @@ version = "0.52.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" -[[package]] -name = "winnow" -version = "0.5.40" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" -dependencies = [ - "memchr", -] - [[package]] name = "winnow" version = "0.6.13" @@ -8112,34 +5811,15 @@ dependencies = [ "bitflags 2.4.2", "indexmap 2.2.6", "log", - "serde 1.0.203", + "serde", "serde_derive", "serde_json", "wasm-encoder 0.200.0", - "wasm-metadata 0.200.0", + "wasm-metadata", "wasmparser 0.200.0", "wit-parser 0.200.0", ] -[[package]] -name = "wit-component" -version = "0.209.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25a2bb5b039f9cb03425e1d5a6e54b441ca4ca1b1d4fa6a0924db67a55168f99" -dependencies = [ - "anyhow", - "bitflags 2.4.2", - "indexmap 2.2.6", - "log", - "serde 1.0.203", - "serde_derive", - "serde_json", - "wasm-encoder 0.209.1", - "wasm-metadata 0.209.1", - "wasmparser 0.209.1", - "wit-parser 0.209.1", -] - [[package]] name = "wit-parser" version = "0.200.0" @@ -8151,7 +5831,7 @@ dependencies = [ "indexmap 2.2.6", "log", "semver", - "serde 1.0.203", + "serde", "serde_derive", "serde_json", "unicode-xid", @@ -8169,7 +5849,7 @@ dependencies = [ "indexmap 2.2.6", "log", "semver", - "serde 1.0.203", + "serde", "serde_derive", "serde_json", "unicode-xid", @@ -8188,114 +5868,13 @@ dependencies = [ "wast 35.0.2", ] -[[package]] -name = "xattr" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" -dependencies = [ - "libc", - "linux-raw-sys 0.4.13", - "rustix 0.38.31", -] - -[[package]] -name = "xdg-home" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca91dcf8f93db085f3a0a29358cd0b9d670915468f4290e8b85d118a34211ab8" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "yaml-rust" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" -dependencies = [ - "linked-hash-map", -] - -[[package]] -name = "yansi" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" - -[[package]] -name = "zbus" -version = "3.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "675d170b632a6ad49804c8cf2105d7c31eddd3312555cffd4b740e08e97c25e6" -dependencies = [ - "async-broadcast", - "async-executor", - "async-fs", - "async-io 1.13.0", - "async-lock 2.8.0", - "async-process 1.8.1", - "async-recursion", - "async-task", - "async-trait", - "blocking", - "byteorder", - "derivative", - "enumflags2", - "event-listener 2.5.3", - "futures-core", - "futures-sink", - "futures-util", - "hex", - "nix 0.26.4", - "once_cell", - "ordered-stream", - "rand 0.8.5", - "serde 1.0.203", - "serde_repr", - "sha1 0.10.6", - "static_assertions", - "tracing", - "uds_windows", - "winapi", - "xdg-home", - "zbus_macros", - "zbus_names", - "zvariant", -] - -[[package]] -name = "zbus_macros" -version = "3.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7131497b0f887e8061b430c530240063d33bf9455fa34438f388a245da69e0a5" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "regex", - "syn 1.0.109", - "zvariant_utils", -] - -[[package]] -name = "zbus_names" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "437d738d3750bed6ca9b8d423ccc7a8eb284f6b1d6d4e225a0e4e6258d864c8d" -dependencies = [ - "serde 1.0.203", - "static_assertions", - "zvariant", -] - [[package]] name = "zerocopy" version = "0.7.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" dependencies = [ + "byteorder", "zerocopy-derive", ] @@ -8307,43 +5886,14 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.75", ] [[package]] name = "zeroize" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" - -[[package]] -name = "zip" -version = "0.6.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" -dependencies = [ - "aes", - "byteorder", - "bzip2", - "constant_time_eq", - "crc32fast", - "crossbeam-utils", - "flate2", - "hmac", - "pbkdf2", - "sha1 0.10.6", - "time", - "zstd 0.11.2+zstd.1.5.2", -] - -[[package]] -name = "zstd" -version = "0.11.2+zstd.1.5.2" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4" -dependencies = [ - "zstd-safe 5.0.2+zstd.1.5.2", -] +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" [[package]] name = "zstd" @@ -8363,16 +5913,6 @@ dependencies = [ "zstd-safe 7.1.0", ] -[[package]] -name = "zstd-safe" -version = "5.0.2+zstd.1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2a5585e04f9eea4b2a3d1eca508c4dee9592a89ef6f450c11719da0726f4db" -dependencies = [ - "libc", - "zstd-sys", -] - [[package]] name = "zstd-safe" version = "6.0.6" @@ -8401,41 +5941,3 @@ dependencies = [ "cc", "pkg-config", ] - -[[package]] -name = "zvariant" -version = "3.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eef2be88ba09b358d3b58aca6e41cd853631d44787f319a1383ca83424fb2db" -dependencies = [ - "byteorder", - "enumflags2", - "libc", - "serde 1.0.203", - "static_assertions", - "zvariant_derive", -] - -[[package]] -name = "zvariant_derive" -version = "3.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37c24dc0bed72f5f90d1f8bb5b07228cbf63b3c6e9f82d82559d4bae666e7ed9" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 1.0.109", - "zvariant_utils", -] - -[[package]] -name = "zvariant_utils" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7234f0d811589db492d16893e3f21e8e2fd282e6d01b0cddee310322062cc200" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] diff --git a/examples/spin-timer/Cargo.toml b/examples/spin-timer/Cargo.toml index 43472f0ec5..5d2d37d4ba 100644 --- a/examples/spin-timer/Cargo.toml +++ b/examples/spin-timer/Cargo.toml @@ -9,8 +9,6 @@ anyhow = "1.0.68" clap = { version = "3.1.15", features = ["derive", "env"] } futures = "0.3.25" serde = "1.0.188" -spin-app = { path = "../../crates/app" } -spin-core = { path = "../../crates/core" } spin-trigger = { path = "../../crates/trigger" } tokio = { version = "1.11", features = ["full"] } tokio-scoped = "0.2.0" diff --git a/examples/spin-timer/src/lib.rs b/examples/spin-timer/src/lib.rs index a9e9be54e3..58bb7e8e91 100644 --- a/examples/spin-timer/src/lib.rs +++ b/examples/spin-timer/src/lib.rs @@ -2,9 +2,7 @@ use std::collections::HashMap; use clap::Args; use serde::{Deserialize, Serialize}; -use spin_app::MetadataKey; -use spin_core::{async_trait, InstancePre}; -use spin_trigger::{TriggerAppEngine, TriggerExecutor}; +use spin_trigger::{App, Trigger, TriggerApp}; wasmtime::component::bindgen!({ path: ".", @@ -12,9 +10,6 @@ wasmtime::component::bindgen!({ async: true }); -pub(crate) type RuntimeData = (); -pub(crate) type _Store = spin_core::Store; - #[derive(Args)] pub struct CliArgs { /// If true, run each component once and exit @@ -24,7 +19,7 @@ pub struct CliArgs { // The trigger structure with all values processed and ready pub struct TimerTrigger { - engine: TriggerAppEngine, + test: bool, speedup: u64, component_timings: HashMap, } @@ -50,45 +45,36 @@ pub struct TimerTriggerConfig { interval_secs: u64, } -const TRIGGER_METADATA_KEY: MetadataKey = MetadataKey::new("triggers"); - -#[async_trait] -impl TriggerExecutor for TimerTrigger { - const TRIGGER_TYPE: &'static str = "timer"; - - type RuntimeData = RuntimeData; +impl Trigger for TimerTrigger { + const TYPE: &'static str = "timer"; - type TriggerConfig = TimerTriggerConfig; + type CliArgs = CliArgs; - type RunConfig = CliArgs; + type InstanceState = (); - type InstancePre = InstancePre; + fn new(cli_args: Self::CliArgs, app: &App) -> anyhow::Result { + let metadata = app + .get_trigger_metadata::(Self::TYPE)? + .unwrap_or_default(); + let speedup = metadata.speedup.unwrap_or(1); - async fn new(engine: spin_trigger::TriggerAppEngine) -> anyhow::Result { - let speedup = engine - .app() - .require_metadata(TRIGGER_METADATA_KEY)? - .timer - .unwrap_or_default() - .speedup - .unwrap_or(1); - - let component_timings = engine - .trigger_configs() + let component_timings = app + .trigger_configs::(Self::TYPE)? + .into_iter() .map(|(_, config)| (config.component.clone(), config.interval_secs)) .collect(); Ok(Self { - engine, + test: cli_args.test, speedup, component_timings, }) } - async fn run(self, config: Self::RunConfig) -> anyhow::Result<()> { - if config.test { + async fn run(self, trigger_app: TriggerApp) -> anyhow::Result<()> { + if self.test { for component in self.component_timings.keys() { - self.handle_timer_event(component).await?; + self.handle_timer_event(&trigger_app, component).await?; } } else { // This trigger spawns threads, which Ctrl+C does not kill. So @@ -102,12 +88,16 @@ impl TriggerExecutor for TimerTrigger { let speedup = self.speedup; tokio_scoped::scope(|scope| { // For each component, run its own timer loop - for (c, d) in &self.component_timings { + for (component_id, interval_secs) in &self.component_timings { scope.spawn(async { - let duration = tokio::time::Duration::from_millis(*d * 1000 / speedup); + let duration = + tokio::time::Duration::from_millis(*interval_secs * 1000 / speedup); loop { tokio::time::sleep(duration).await; - self.handle_timer_event(c).await.unwrap(); + + self.handle_timer_event(&trigger_app, component_id) + .await + .unwrap(); } }); } @@ -118,11 +108,14 @@ impl TriggerExecutor for TimerTrigger { } impl TimerTrigger { - async fn handle_timer_event(&self, component_id: &str) -> anyhow::Result<()> { - // Load the guest... - let (instance, mut store) = self.engine.prepare_instance(component_id).await?; - let instance = SpinTimer::new(&mut store, &instance)?; - // ...and call the entry point - instance.call_handle_timer_request(&mut store).await + async fn handle_timer_event( + &self, + trigger_app: &TriggerApp, + component_id: &str, + ) -> anyhow::Result<()> { + let instance_builder = trigger_app.prepare(component_id)?; + let (instance, mut store) = instance_builder.instantiate(()).await?; + let timer = SpinTimer::new(&mut store, &instance)?; + timer.call_handle_timer_request(&mut store).await } } diff --git a/examples/spin-timer/src/main.rs b/examples/spin-timer/src/main.rs index 49ba3dfabe..d9c7203b37 100644 --- a/examples/spin-timer/src/main.rs +++ b/examples/spin-timer/src/main.rs @@ -1,9 +1,10 @@ use anyhow::Error; use clap::Parser; -use spin_trigger::cli::TriggerExecutorCommand; +use spin_trigger::cli::FactorsTriggerCommand; + use trigger_timer::TimerTrigger; -type Command = TriggerExecutorCommand; +type Command = FactorsTriggerCommand; #[tokio::main] async fn main() -> Result<(), Error> { diff --git a/examples/spin-wagi-http/spin.toml b/examples/spin-wagi-http/spin.toml index e6280308fd..37ecaee466 100644 --- a/examples/spin-wagi-http/spin.toml +++ b/examples/spin-wagi-http/spin.toml @@ -9,7 +9,7 @@ version = "1.0.0" [[trigger.http]] route = "/hello" component = "hello" -executor = { type = "wagi" } # _start (the default entrypoint) is automatically mapped to main() +executor = { type = "wagi" } [[trigger.http]] route = "/goodbye" diff --git a/examples/wagi-http-rust/spin.toml b/examples/wagi-http-rust/spin.toml index c1cc3f0657..c5d1a7821a 100644 --- a/examples/wagi-http-rust/spin.toml +++ b/examples/wagi-http-rust/spin.toml @@ -9,7 +9,7 @@ version = "1.0.0" [[trigger.http]] route = "/env" component = "env" -executor = { type = "wagi" } # _start (the default entrypoint) is automatically mapped to main() +executor = { type = "wagi" } [component.env] source = "target/wasm32-wasi/release/wagihelloworld.wasm" diff --git a/src/bin/spin.rs b/src/bin/spin.rs index 481f48d0c3..ce5c83671b 100644 --- a/src/bin/spin.rs +++ b/src/bin/spin.rs @@ -16,7 +16,7 @@ use spin_cli::commands::{ }; use spin_cli::{build_info::*, subprocess::ExitStatusError}; use spin_trigger::cli::help::HelpArgsOnlyTrigger; -use spin_trigger::cli::TriggerExecutorCommand; +use spin_trigger::cli::FactorsTriggerCommand; use spin_trigger_http::HttpTrigger; use spin_trigger_redis::RedisTrigger; @@ -73,7 +73,10 @@ async fn _main() -> anyhow::Result<()> { } } - SpinApp::from_arg_matches(&matches)?.run(cmd).await + SpinApp::from_arg_matches(&matches)? + .run(cmd) + .await + .inspect_err(|err| tracing::debug!(?err)) } fn print_error_chain(err: anyhow::Error) { @@ -136,10 +139,10 @@ enum SpinApp { #[derive(Subcommand)] enum TriggerCommands { - Http(TriggerExecutorCommand), - Redis(TriggerExecutorCommand), + Http(FactorsTriggerCommand), + Redis(FactorsTriggerCommand), #[clap(name = spin_cli::HELP_ARGS_ONLY_TRIGGER_TYPE, hide = true)] - HelpArgsOnly(TriggerExecutorCommand), + HelpArgsOnly(FactorsTriggerCommand), } impl SpinApp { diff --git a/supply-chain/config.toml b/supply-chain/config.toml index 142f4441ce..27eaaba2c5 100644 --- a/supply-chain/config.toml +++ b/supply-chain/config.toml @@ -1104,14 +1104,6 @@ criteria = "safe-to-deploy" version = "6.3.0" criteria = "safe-to-deploy" -[[exemptions.ouroboros]] -version = "0.15.6" -criteria = "safe-to-deploy" - -[[exemptions.ouroboros_macro]] -version = "0.15.6" -criteria = "safe-to-deploy" - [[exemptions.overload]] version = "0.1.1" criteria = "safe-to-deploy" diff --git a/tests/conformance-tests/src/main.rs b/tests/conformance-tests/src/main.rs index b6e5804c38..c297b2d07c 100644 --- a/tests/conformance-tests/src/main.rs +++ b/tests/conformance-tests/src/main.rs @@ -3,7 +3,7 @@ fn main() { .nth(1) .expect("expected first argument to be path to spin binary") .into(); - conformance_tests::run_tests("v0.1.0", move |test| { + conformance_tests::run_tests("canary", move |test| { conformance::run_test(test, &spin_binary) }) .unwrap(); diff --git a/tests/integration.rs b/tests/integration.rs index ee869aeef4..c09e241d3f 100644 --- a/tests/integration.rs +++ b/tests/integration.rs @@ -72,12 +72,12 @@ mod integration_tests { let spin = env.runtime_mut(); assert_spin_request( spin, - Request::new(Method::Get, "/test/hello"), + Request::new(Method::Get, "/hello"), Response::new_with_body(200, "I'm a teapot"), )?; assert_spin_request( spin, - Request::new(Method::Get, "/test/hello/wildcards/should/be/handled"), + Request::new(Method::Get, "/hello/wildcards/should/be/handled"), Response::new_with_body(200, "I'm a teapot"), )?; assert_spin_request( @@ -87,7 +87,7 @@ mod integration_tests { )?; assert_spin_request( spin, - Request::new(Method::Get, "/test/hello/test-placement"), + Request::new(Method::Get, "/hello/test-placement"), Response::new_with_body(200, "text for test"), ) }, @@ -183,7 +183,7 @@ mod integration_tests { let spin = env.runtime_mut(); assert_spin_request( spin, - Request::new(Method::Get, "/test/hello"), + Request::new(Method::Get, "/hello"), Response::new_with_body(200, "Hello, Fermyon!\n"), )?; @@ -341,8 +341,13 @@ mod integration_tests { app_type: SpinAppType::None, }, ServicesConfig::none(), - |_| Ok(()), + |env| { + // Since this test asserts exact stderr output, disable logging + env.set_env_var("RUST_LOG", "off"); + Ok(()) + }, )?; + let expected = r#"Error: Couldn't find trigger executor for local app "spin.toml" Caused by: @@ -368,13 +373,13 @@ Caused by: let spin = env.runtime_mut(); assert_spin_request( spin, - Request::new(Method::Get, "/test/outbound-allowed"), + Request::new(Method::Get, "/outbound-allowed"), Response::new_with_body(200, "Hello, Fermyon!\n"), )?; assert_spin_request( spin, - Request::new(Method::Get, "/test/outbound-not-allowed"), + Request::new(Method::Get, "/outbound-not-allowed"), Response::new_with_body( 500, "Error::UnexpectedError(\"ErrorCode::HttpRequestDenied\")", @@ -421,14 +426,10 @@ Caused by: Response::new_with_body(expected_status, expected_body), ) }; - ensure_success("/test/hello", 200, "I'm a teapot")?; - ensure_success( - "/test/hello/wildcards/should/be/handled", - 200, - "I'm a teapot", - )?; + ensure_success("/hello", 200, "I'm a teapot")?; + ensure_success("/hello/wildcards/should/be/handled", 200, "I'm a teapot")?; ensure_success("/thisshouldfail", 404, "")?; - ensure_success("/test/hello/test-placement", 200, "text for test")?; + ensure_success("/hello/test-placement", 200, "text for test")?; Ok(()) }, )?; @@ -627,6 +628,7 @@ Caused by: #[test] #[cfg(target_arch = "x86_64")] #[cfg(feature = "extern-dependencies-tests")] + #[ignore = "https://github.com/fermyon/spin/issues/2774"] fn http_grain_template_smoke_test() -> anyhow::Result<()> { http_smoke_test_template( "http-grain", @@ -655,6 +657,7 @@ Caused by: #[test] #[cfg(feature = "extern-dependencies-tests")] + #[ignore = "https://github.com/fermyon/spin/issues/2774"] fn http_swift_template_smoke_test() -> anyhow::Result<()> { http_smoke_test_template( "http-swift", @@ -1255,14 +1258,14 @@ route = "/..." let spin = env.runtime_mut(); assert_spin_request( spin, - Request::full(Method::Get, "/base/echo", &[], Some("Echo...")), + Request::full(Method::Get, "/echo", &[], Some("Echo...")), Response::new_with_body(200, "Echo..."), )?; assert_spin_request( spin, Request::full( Method::Get, - "/base/assert-headers?k=v", + "/assert-headers?k=v", &[("X-Custom-Foo", "bar")], Some(r#"{"x-custom-foo": "bar"}"#), ), @@ -1288,16 +1291,16 @@ route = "/..." let spin = env.runtime_mut(); assert_spin_request( spin, - Request::full(Method::Get, "/base/echo", &[], Some("Echo...")), + Request::full(Method::Get, "/echo", &[], Some("Echo...")), Response::new_with_body(200, "Echo..."), )?; assert_spin_request( spin, Request::full( Method::Get, - "/base/assert-args?x=y", + "/assert-args?x=y", &[], - Some(r#"["/base/assert-args", "x=y"]"#), + Some(r#"["/assert-args", "x=y"]"#), ), Response::new(200), )?; @@ -1305,7 +1308,7 @@ route = "/..." spin, Request::full( Method::Get, - "/base/assert-env", + "/assert-env", &[("X-Custom-Foo", "bar")], Some(r#"{"HTTP_X_CUSTOM_FOO": "bar"}"#), ), @@ -1464,7 +1467,7 @@ route = "/..." spin, Request::full( Method::Get, - "/test/outbound-allowed/hello", + "/outbound-allowed/hello", &[("Host", "google.com")], Some(""), ), diff --git a/tests/runtime-tests/src/lib.rs b/tests/runtime-tests/src/lib.rs index 560fdd851b..c4427e508f 100644 --- a/tests/runtime-tests/src/lib.rs +++ b/tests/runtime-tests/src/lib.rs @@ -86,7 +86,7 @@ impl RuntimeTest { pub fn run(&mut self) { self.run_test(|env| { let runtime = env.runtime_mut(); - let request = Request::new(Method::Get, "/"); + let request: Request = Request::full(Method::Get, "/", &[("Host", "localhost")], None); let response = runtime.make_http_request(request)?; if response.status() == 200 { return Ok(()); @@ -147,7 +147,7 @@ impl RuntimeTest { pub fn run(&mut self) { self.run_test(|env| { let runtime = env.runtime_mut(); - let response = runtime.make_http_request(Request::new(Method::Get, "/"))?; + let response = runtime.make_http_request(Request::full(Method::Get, "/", &[("Host", "localhost")],None))?; if response.status() == 200 { return Ok(()); } diff --git a/tests/runtime-tests/tests/llm/spin.toml b/tests/runtime-tests/tests/llm/spin.toml index 9a1e18e1f9..bee13ef4a6 100644 --- a/tests/runtime-tests/tests/llm/spin.toml +++ b/tests/runtime-tests/tests/llm/spin.toml @@ -2,7 +2,7 @@ spin_manifest_version = "1" authors = ["Ryan Levick "] description = "" name = "ai" -trigger = { type = "http", base = "/" } +trigger = { type = "http" } version = "0.1.0" [[component]] diff --git a/tests/runtime.rs b/tests/runtime.rs index b18bc3d517..ba3b6b41cd 100644 --- a/tests/runtime.rs +++ b/tests/runtime.rs @@ -28,7 +28,7 @@ mod runtime_tests { #[test] fn conformance_tests() { - conformance_tests::run_tests("v0.1.0", move |test| { + conformance_tests::run_tests("canary", move |test| { conformance::run_test(test, &spin_binary()) }) .unwrap(); diff --git a/tests/test-components/components/internal-http-middle/src/lib.rs b/tests/test-components/components/internal-http-middle/src/lib.rs index 32160ab55d..6095422f01 100644 --- a/tests/test-components/components/internal-http-middle/src/lib.rs +++ b/tests/test-components/components/internal-http-middle/src/lib.rs @@ -16,10 +16,6 @@ async fn handle_middle_impl(req: Request) -> Result { .header("spin-path-info") .and_then(|v| v.as_str()); let inbound_rel_path = ensure_some!(inbound_rel_path); - let inbound_base = req - .header("spin-base-path") - .and_then(|v| v.as_str()); - ensure_eq!("/", ensure_some!(inbound_base)); let out_req = spin_sdk::http::Request::builder() .uri("https://back.spin.internal/hello/from/middle") diff --git a/tests/test-components/components/outbound-http/src/lib.rs b/tests/test-components/components/outbound-http/src/lib.rs index 5799ac6674..d9155ab843 100644 --- a/tests/test-components/components/outbound-http/src/lib.rs +++ b/tests/test-components/components/outbound-http/src/lib.rs @@ -10,7 +10,7 @@ async fn send_outbound(_req: Request) -> Result { let mut res: http::Response = spin_sdk::http::send( http::Request::builder() .method("GET") - .uri("/test/hello") + .uri("/hello") .body(())?, ) .await?; diff --git a/tests/testcases/http-smoke-test/spin.toml b/tests/testcases/http-smoke-test/spin.toml index b12a0e28d6..c0340f9e2c 100644 --- a/tests/testcases/http-smoke-test/spin.toml +++ b/tests/testcases/http-smoke-test/spin.toml @@ -2,7 +2,7 @@ spin_version = "1" authors = ["Fermyon Engineering "] description = "A simple application that returns hello and goodbye." name = "head-rust-sdk-http" -trigger = { type = "http", base = "/test" } +trigger = { type = "http" } version = "1.0.0" [variables] diff --git a/tests/testcases/key-value/spin.toml b/tests/testcases/key-value/spin.toml index d241467176..3202eb21d3 100644 --- a/tests/testcases/key-value/spin.toml +++ b/tests/testcases/key-value/spin.toml @@ -2,7 +2,7 @@ spin_version = "1" authors = ["Fermyon Engineering "] description = "A simple application that exercises key/value storage." name = "key-value" -trigger = { type = "http", base = "/test" } +trigger = { type = "http" } version = "1.0.0" [[component]] diff --git a/tests/testcases/otel-smoke-test/spin.toml b/tests/testcases/otel-smoke-test/spin.toml index c5911038b5..a4eb09f671 100644 --- a/tests/testcases/otel-smoke-test/spin.toml +++ b/tests/testcases/otel-smoke-test/spin.toml @@ -2,7 +2,7 @@ spin_version = "1" authors = ["Fermyon Engineering "] description = "A simple application that returns hello and goodbye." name = "head-rust-sdk-http" -trigger = { type = "http", base = "/test" } +trigger = { type = "http" } version = "1.0.0" [[component]] diff --git a/tests/testcases/outbound-http-to-same-app/spin.toml b/tests/testcases/outbound-http-to-same-app/spin.toml index 90e517f550..8f241d7e63 100644 --- a/tests/testcases/outbound-http-to-same-app/spin.toml +++ b/tests/testcases/outbound-http-to-same-app/spin.toml @@ -2,7 +2,7 @@ spin_version = "1" authors = ["Fermyon Engineering "] description = "An application that demonstates a component making an outbound http request to another component in the same application." name = "local-outbound-http" -trigger = { type = "http", base = "/test" } +trigger = { type = "http" } version = "1.0.0" [[component]] diff --git a/tests/testcases/simple-test/spin.toml b/tests/testcases/simple-test/spin.toml index e069317e11..26fd2fa707 100644 --- a/tests/testcases/simple-test/spin.toml +++ b/tests/testcases/simple-test/spin.toml @@ -2,7 +2,7 @@ spin_version = "1" authors = ["Fermyon Engineering "] description = "A simple application that returns hello and goodbye." name = "spin-hello-world" -trigger = { type = "http", base = "/test" } +trigger = { type = "http" } version = "1.0.0" [variables] diff --git a/tests/testcases/spin-inbound-http/spin.toml b/tests/testcases/spin-inbound-http/spin.toml index 5c018b71e6..a97853d344 100644 --- a/tests/testcases/spin-inbound-http/spin.toml +++ b/tests/testcases/spin-inbound-http/spin.toml @@ -6,9 +6,6 @@ description = "Test using the spin inbound-http interface." name = "spin-inbound-http" version = "1.0.0" -[application.trigger.http] -base = "/base" - [[trigger.http]] route = "/..." [trigger.http.component] diff --git a/tests/testcases/wagi-http/spin.toml b/tests/testcases/wagi-http/spin.toml index 871d681b4a..5e3f028127 100644 --- a/tests/testcases/wagi-http/spin.toml +++ b/tests/testcases/wagi-http/spin.toml @@ -6,9 +6,6 @@ description = "Test using WAGI HTTP." name = "wagi-http" version = "1.0.0" -[application.trigger.http] -base = "/base" - [[trigger.http]] route = "/..." executor = { type = "wagi" } diff --git a/tests/testing-framework/Cargo.toml b/tests/testing-framework/Cargo.toml index d247b4123b..8448faaefe 100644 --- a/tests/testing-framework/Cargo.toml +++ b/tests/testing-framework/Cargo.toml @@ -14,10 +14,12 @@ regex = "1.10.2" reqwest = { workspace = true } temp-dir = "0.1.11" test-environment = { workspace = true } -spin-trigger-http = { path = "../../crates/trigger-http" } +spin-app = { path = "../../crates/app" } +spin-factors-executor = { path = "../../crates/factors-executor" } spin-http = { path = "../../crates/http" } -spin-trigger = { path = "../../crates/trigger" } spin-loader = { path = "../../crates/loader" } +spin-trigger = { path = "../../crates/trigger" } +spin-trigger-http = { path = "../../crates/trigger-http" } toml = "0.8.6" tokio = "1.23" wasmtime-wasi-http = { workspace = true } diff --git a/tests/testing-framework/src/runtimes/in_process_spin.rs b/tests/testing-framework/src/runtimes/in_process_spin.rs index 5574c72b55..35a8ce9afb 100644 --- a/tests/testing-framework/src/runtimes/in_process_spin.rs +++ b/tests/testing-framework/src/runtimes/in_process_spin.rs @@ -1,6 +1,10 @@ //! The Spin runtime running in the same process as the test +use std::{path::PathBuf, sync::Arc}; + use anyhow::Context as _; +use spin_trigger::cli::{TriggerAppBuilder, TriggerAppOptions}; +use spin_trigger_http::{HttpServer, HttpTrigger}; use test_environment::{ http::{Request, Response}, services::ServicesConfig, @@ -11,7 +15,7 @@ use test_environment::{ /// /// Use `runtimes::spin_cli::SpinCli` if you'd rather use Spin as a separate process pub struct InProcessSpin { - trigger: spin_trigger_http::HttpTrigger, + server: Arc, } impl InProcessSpin { @@ -32,31 +36,43 @@ impl InProcessSpin { } /// Create a new instance of Spin running in the same process as the tests - pub fn new(trigger: spin_trigger_http::HttpTrigger) -> Self { - Self { trigger } + pub fn new(server: Arc) -> Self { + Self { server } } /// Make an HTTP request to the Spin instance pub fn make_http_request(&self, req: Request<'_, &[u8]>) -> anyhow::Result { tokio::runtime::Runtime::new()?.block_on(async { let method: reqwest::Method = req.method.into(); - let req = http::request::Request::builder() + let mut builder = http::request::Request::builder() .method(method) - .uri(req.path) - // TODO(rylev): convert headers and body as well - .body(spin_http::body::empty()) - .unwrap(); + .uri(req.path); + + for (key, value) in req.headers { + builder = builder.header(*key, *value); + } + // TODO(rylev): convert body as well + let req = builder.body(spin_http::body::empty()).unwrap(); let response = self - .trigger + .server .handle( req, http::uri::Scheme::HTTP, - (std::net::Ipv4Addr::LOCALHOST, 3000).into(), (std::net::Ipv4Addr::LOCALHOST, 7000).into(), ) .await?; use http_body_util::BodyExt; let status = response.status().as_u16(); + let headers = response + .headers() + .iter() + .map(|(k, v)| { + ( + k.as_str().to_owned(), + String::from_utf8(v.as_bytes().to_owned()).unwrap(), + ) + }) + .collect(); let body = response.into_body(); let chunks = body .collect() @@ -64,7 +80,7 @@ impl InProcessSpin { .context("could not get runtime test HTTP response")? .to_bytes() .to_vec(); - Ok(Response::full(status, Default::default(), chunks)) + Ok(Response::full(status, headers, chunks)) }) } } @@ -79,33 +95,18 @@ impl Runtime for InProcessSpin { async fn initialize_trigger( env: &mut TestEnvironment, ) -> anyhow::Result { - use spin_trigger::{ - loader::TriggerLoader, HostComponentInitData, RuntimeConfig, TriggerExecutorBuilder, - }; - use spin_trigger_http::HttpTrigger; - - // Create the locked app and write it to a file let locked_app = spin_loader::from_file( env.path().join("spin.toml"), spin_loader::FilesMountStrategy::Direct, None, ) .await?; - let json = locked_app.to_json()?; - std::fs::write(env.path().join("locked.json"), json)?; - // Create a loader and trigger builder - let loader = TriggerLoader::new(env.path().join(".working_dir"), false); - let mut builder = TriggerExecutorBuilder::::new(loader); - builder.hooks(spin_trigger::network::Network::default()); + let app = spin_app::App::new("my-app", locked_app); + let trigger = HttpTrigger::new(&app, "127.0.0.1:80".parse().unwrap(), None)?; + let mut builder = TriggerAppBuilder::new(trigger, PathBuf::from(".")); + let trigger_app = builder.build(app, TriggerAppOptions::default()).await?; + let server = builder.trigger.into_server(trigger_app)?; - // Build the trigger - let trigger = builder - .build( - format!("file:{}", env.path().join("locked.json").display()), - RuntimeConfig::default(), - HostComponentInitData::default(), - ) - .await?; - Ok(InProcessSpin::new(trigger)) + Ok(InProcessSpin::new(server)) } diff --git a/tests/testing-framework/src/runtimes/spin_cli.rs b/tests/testing-framework/src/runtimes/spin_cli.rs index 8bb46ee3b3..44332433e2 100644 --- a/tests/testing-framework/src/runtimes/spin_cli.rs +++ b/tests/testing-framework/src/runtimes/spin_cli.rs @@ -59,6 +59,7 @@ impl SpinCli { let port = get_random_port()?; let mut spin_cmd = Command::new(spin_config.binary_path); let child = spin_cmd + .envs(env.env_vars()) .arg("up") .current_dir(env.path()) .args(["--listen", &format!("127.0.0.1:{port}")]) @@ -117,6 +118,7 @@ impl SpinCli { env: &mut TestEnvironment, ) -> anyhow::Result { let mut child = Command::new(spin_config.binary_path) + .envs(env.env_vars()) .arg("up") .current_dir(env.path()) .args(spin_config.spin_up_args) @@ -149,6 +151,7 @@ impl SpinCli { env: &mut TestEnvironment, ) -> anyhow::Result { let mut child = Command::new(spin_config.binary_path) + .envs(env.env_vars()) .arg("up") .current_dir(env.path()) .args(spin_config.spin_up_args)