mirror of
https://github.com/leptos-rs/leptos.git
synced 2025-12-27 15:44:42 -05:00
Compare commits
89 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1524386346 | ||
|
|
426b079709 | ||
|
|
c6f176e2b0 | ||
|
|
75662d08e7 | ||
|
|
4448b77cde | ||
|
|
956af8e466 | ||
|
|
8c469b85d6 | ||
|
|
7f93dd224d | ||
|
|
777b5e1e54 | ||
|
|
433f7284e6 | ||
|
|
4a8a212d84 | ||
|
|
1d7bc021af | ||
|
|
74055a7e13 | ||
|
|
c98082de74 | ||
|
|
b8d44e20a9 | ||
|
|
00e83e0d70 | ||
|
|
e89b1389ca | ||
|
|
bd454d03e2 | ||
|
|
d7f4457ea4 | ||
|
|
17d357bcec | ||
|
|
66d1bead9a | ||
|
|
69c918e813 | ||
|
|
2817a261ce | ||
|
|
972b1ff90b | ||
|
|
3a66a1f3d3 | ||
|
|
504f983996 | ||
|
|
0862385816 | ||
|
|
8319446d3f | ||
|
|
5fa31941bb | ||
|
|
f4bb87ea1e | ||
|
|
016fbf8da1 | ||
|
|
21fd995468 | ||
|
|
683e7177dd | ||
|
|
33b278c014 | ||
|
|
5fc56346f4 | ||
|
|
afb37aaf4b | ||
|
|
f8fd79725a | ||
|
|
131251b361 | ||
|
|
91fb315fe0 | ||
|
|
6954b77b62 | ||
|
|
77176f8395 | ||
|
|
344b79a01b | ||
|
|
051059c761 | ||
|
|
3c540dd858 | ||
|
|
4125688a0a | ||
|
|
bd3b962cfb | ||
|
|
5dd3c217c4 | ||
|
|
ae00e5ae13 | ||
|
|
1ce671ba08 | ||
|
|
ec9f26bd9f | ||
|
|
831eae31bc | ||
|
|
ff6ae5de25 | ||
|
|
c21712ba04 | ||
|
|
45771b6fd3 | ||
|
|
f3557970a7 | ||
|
|
c87ef331b0 | ||
|
|
e767518142 | ||
|
|
f94b681118 | ||
|
|
9c50e49253 | ||
|
|
57c7097ede | ||
|
|
1a06e0eee8 | ||
|
|
ce9af4a685 | ||
|
|
e0c79eb8d8 | ||
|
|
9fd972971e | ||
|
|
9473220639 | ||
|
|
ae11812dc6 | ||
|
|
4c55c25445 | ||
|
|
649b5fbe9e | ||
|
|
adb3e75efc | ||
|
|
f303aa6d5c | ||
|
|
73ca3d7b04 | ||
|
|
235393bfbe | ||
|
|
17d8e2bd09 | ||
|
|
f51c676e0d | ||
|
|
cf0aa0e4d7 | ||
|
|
df09d4a7f6 | ||
|
|
30b0a579ca | ||
|
|
50a4c3b0d9 | ||
|
|
c76649d77b | ||
|
|
911be5007e | ||
|
|
5227221c96 | ||
|
|
3f48b77256 | ||
|
|
99117f496f | ||
|
|
cf12ea3404 | ||
|
|
d555c1e0ce | ||
|
|
40ea20057f | ||
|
|
5587ccd1eb | ||
|
|
50a9df9eea | ||
|
|
c46b1c4e25 |
10
.github/workflows/autofix.yml
vendored
10
.github/workflows/autofix.yml
vendored
@@ -21,7 +21,13 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with: {toolchain: "nightly-2025-04-16", components: "rustfmt, clippy", target: "wasm32-unknown-unknown", rustflags: ""}
|
||||
with:
|
||||
{
|
||||
toolchain: "nightly-2025-07-16",
|
||||
components: "rustfmt, clippy",
|
||||
target: "wasm32-unknown-unknown",
|
||||
rustflags: "",
|
||||
}
|
||||
- name: Install Glib
|
||||
run: |
|
||||
sudo apt-get update
|
||||
@@ -34,7 +40,7 @@ jobs:
|
||||
run: cargo fmt --all
|
||||
- name: Clippy the workspace
|
||||
run: cargo all-features clippy --allow-dirty --fix --lib --no-deps
|
||||
- uses: autofix-ci/action@v1.3.1
|
||||
- uses: autofix-ci/action@v1.3.2
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
fail-fast: false
|
||||
|
||||
6
.github/workflows/run-cargo-make-task.yml
vendored
6
.github/workflows/run-cargo-make-task.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
toolchain: [stable, nightly-2025-04-16]
|
||||
toolchain: [stable, nightly-2025-07-16]
|
||||
erased_mode: [true, false]
|
||||
steps:
|
||||
- name: Free Disk Space
|
||||
@@ -169,7 +169,9 @@ jobs:
|
||||
cd '${{ inputs.directory }}'
|
||||
cargo make --no-workspace --profile=github-actions ci
|
||||
# check the direct-minimal-versions on release
|
||||
if [[ "${{ github.ref_name }}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
COMMIT_MSG=$(git log -1 --pretty=format:'%s')
|
||||
# Supports: v1.2.3, v1.2.3-alpha, v1.2.3-beta1, v1.2.3-rc.1, etc.
|
||||
if [[ "$COMMIT_MSG" =~ ^v[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9]+(\.?[0-9]+)?)?$ ]]; then
|
||||
cargo make --no-workspace --profile=github-actions check-minimal-versions
|
||||
fi
|
||||
# Check if the counter_isomorphic can be built with leptos_debuginfo cfg flag in release mode
|
||||
|
||||
625
Cargo.lock
generated
625
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
59
Cargo.toml
59
Cargo.toml
@@ -40,43 +40,43 @@ members = [
|
||||
exclude = ["benchmarks", "examples", "projects"]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.8.2"
|
||||
version = "0.8.4"
|
||||
edition = "2021"
|
||||
rust-version = "1.76"
|
||||
rust-version = "1.88"
|
||||
|
||||
[workspace.dependencies]
|
||||
# members
|
||||
throw_error = { path = "./any_error/", version = "0.3.0" }
|
||||
any_spawner = { path = "./any_spawner/", version = "0.3.0" }
|
||||
const_str_slice_concat = { path = "./const_str_slice_concat", version = "0.1" }
|
||||
either_of = { path = "./either_of/", version = "0.1.5" }
|
||||
either_of = { path = "./either_of/", version = "0.1.6" }
|
||||
hydration_context = { path = "./hydration_context", version = "0.3.0" }
|
||||
leptos = { path = "./leptos", version = "0.8.2" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.8.2" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.2" }
|
||||
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.8.2" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.2" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.2" }
|
||||
leptos_router = { path = "./router", version = "0.8.2" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.8.2" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.8.2" }
|
||||
leptos_meta = { path = "./meta", version = "0.8.2" }
|
||||
leptos = { path = "./leptos", version = "0.8.4" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.8.4" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.4" }
|
||||
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.8.4" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.4" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.4" }
|
||||
leptos_router = { path = "./router", version = "0.8.4" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.8.4" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.8.4" }
|
||||
leptos_meta = { path = "./meta", version = "0.8.4" }
|
||||
next_tuple = { path = "./next_tuple", version = "0.1.0" }
|
||||
oco_ref = { path = "./oco", version = "0.2.0" }
|
||||
or_poisoned = { path = "./or_poisoned", version = "0.1.0" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.0" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.2.0" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.0" }
|
||||
server_fn = { path = "./server_fn", version = "0.8.2" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.8.2" }
|
||||
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.8.2" }
|
||||
tachys = { path = "./tachys", version = "0.2.0" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.4" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.2.4" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.4" }
|
||||
server_fn = { path = "./server_fn", version = "0.8.4" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.8.4" }
|
||||
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.8.4" }
|
||||
tachys = { path = "./tachys", version = "0.2.5" }
|
||||
|
||||
# members deps
|
||||
itertools = { default-features = false, version = "0.14.0" }
|
||||
convert_case = { default-features = false, version = "0.8.0" }
|
||||
serde_json = { default-features = false, version = "1.0.140" }
|
||||
trybuild = { default-features = false, version = "1.0.105" }
|
||||
trybuild = { default-features = false, version = "1.0.106" }
|
||||
typed-builder = { default-features = false, version = "0.21.0" }
|
||||
thiserror = { default-features = false, version = "2.0.12" }
|
||||
wasm-bindgen = { default-features = false, version = "0.2.100" }
|
||||
@@ -85,7 +85,6 @@ rstml = { default-features = false, version = "0.12.1" }
|
||||
rustc_version = { default-features = false, version = "0.4.1" }
|
||||
guardian = { default-features = false, version = "1.3.0" }
|
||||
rustc-hash = { default-features = false, version = "2.1.1" }
|
||||
once_cell = { default-features = false, version = "1.21.3" }
|
||||
actix-web = { default-features = false, version = "4.11.0" }
|
||||
tracing = { default-features = false, version = "0.1.41" }
|
||||
slotmap = { default-features = false, version = "1.0.7" }
|
||||
@@ -99,7 +98,7 @@ proc-macro-error2 = { default-features = false, version = "2.0.1" }
|
||||
const_format = { default-features = false, version = "0.2.34" }
|
||||
gloo-net = { default-features = false, version = "0.6.0" }
|
||||
url = { default-features = false, version = "2.5.4" }
|
||||
tokio = { default-features = false, version = "1.45.1" }
|
||||
tokio = { default-features = false, version = "1.46.1" }
|
||||
base64 = { default-features = false, version = "0.22.1" }
|
||||
cfg-if = { default-features = false, version = "1.0.0" }
|
||||
wasm-bindgen-futures = { default-features = false, version = "0.4.50" }
|
||||
@@ -109,7 +108,7 @@ serde = { default-features = false, version = "1.0.219" }
|
||||
parking_lot = { default-features = false, version = "0.12.4" }
|
||||
axum = { default-features = false, version = "0.8.4" }
|
||||
serde_qs = { default-features = false, version = "0.15.0" }
|
||||
syn = { default-features = false, version = "2.0.101" }
|
||||
syn = { default-features = false, version = "2.0.104" }
|
||||
xxhash-rust = { default-features = false, version = "0.8.15" }
|
||||
paste = { default-features = false, version = "1.0.15" }
|
||||
quote = { default-features = false, version = "1.0.40" }
|
||||
@@ -117,10 +116,10 @@ web-sys = { default-features = false, version = "0.3.77" }
|
||||
js-sys = { default-features = false, version = "0.3.77" }
|
||||
rand = { default-features = false, version = "0.9.1" }
|
||||
serde-lite = { default-features = false, version = "0.5.0" }
|
||||
tokio-tungstenite = { default-features = false, version = "0.26.2" }
|
||||
tokio-tungstenite = { default-features = false, version = "0.27.0" }
|
||||
serial_test = { default-features = false, version = "3.2.0" }
|
||||
erased = { default-features = false, version = "0.1.2" }
|
||||
glib = { default-features = false, version = "0.20.10" }
|
||||
glib = { default-features = false, version = "0.20.12" }
|
||||
async-trait = { default-features = false, version = "0.1.88" }
|
||||
typed-builder-macro = { default-features = false, version = "0.21.0" }
|
||||
linear-map = { default-features = false, version = "1.2.0" }
|
||||
@@ -128,9 +127,9 @@ anyhow = { default-features = false, version = "1.0.98" }
|
||||
walkdir = { default-features = false, version = "2.5.0" }
|
||||
actix-ws = { default-features = false, version = "0.3.0" }
|
||||
tower-http = { default-features = false, version = "0.6.4" }
|
||||
prettyplease = { default-features = false, version = "0.2.33" }
|
||||
prettyplease = { default-features = false, version = "0.2.35" }
|
||||
inventory = { default-features = false, version = "0.3.20" }
|
||||
config = { default-features = false, version = "0.15.11" }
|
||||
config = { default-features = false, version = "0.15.13" }
|
||||
camino = { default-features = false, version = "1.1.9" }
|
||||
ciborium = { default-features = false, version = "0.2.2" }
|
||||
multer = { default-features = false, version = "3.1.0" }
|
||||
@@ -150,12 +149,12 @@ futures-lite = { default-features = false, version = "2.6.0" }
|
||||
log = { default-features = false, version = "0.4.27" }
|
||||
percent-encoding = { default-features = false, version = "2.3.1" }
|
||||
async-executor = { default-features = false, version = "1.13.2" }
|
||||
const-str = { default-features = false, version = "0.6.2" }
|
||||
const-str = { default-features = false, version = "0.6.3" }
|
||||
http-body-util = { default-features = false, version = "0.1.3" }
|
||||
hyper = { default-features = false, version = "1.6.0" }
|
||||
postcard = { default-features = false, version = "1.1.1" }
|
||||
rmp-serde = { default-features = false, version = "1.3.0" }
|
||||
reqwest = { default-features = false, version = "0.12.18" }
|
||||
reqwest = { default-features = false, version = "0.12.22" }
|
||||
tower-layer = { default-features = false, version = "0.3.3" }
|
||||
attribute-derive = { default-features = false, version = "0.10.3" }
|
||||
insta = { default-features = false, version = "1.43.1" }
|
||||
|
||||
26
README.md
26
README.md
@@ -90,35 +90,13 @@ Here are some resources for learning more about Leptos:
|
||||
- [API Documentation](https://docs.rs/leptos/latest/leptos/)
|
||||
- [Common Bugs](https://github.com/leptos-rs/leptos/tree/main/docs/COMMON_BUGS.md) (and how to fix them!)
|
||||
|
||||
## `nightly` Note
|
||||
|
||||
Most of the examples assume you’re using `nightly` version of Rust and the `nightly` feature of Leptos. To use `nightly` Rust, you can either set your toolchain globally or on per-project basis.
|
||||
|
||||
To set `nightly` as a default toolchain for all projects (and add the ability to compile Rust to WebAssembly, if you haven’t already):
|
||||
|
||||
```
|
||||
rustup toolchain install nightly
|
||||
rustup default nightly
|
||||
rustup target add wasm32-unknown-unknown
|
||||
```
|
||||
|
||||
If you'd like to use `nightly` only in your Leptos project however, add [`rust-toolchain.toml`](https://rust-lang.github.io/rustup/overrides.html#the-toolchain-file) file with the following content:
|
||||
|
||||
```toml
|
||||
[toolchain]
|
||||
channel = "nightly"
|
||||
targets = ["wasm32-unknown-unknown"]
|
||||
```
|
||||
|
||||
The `nightly` feature enables the function call syntax for accessing and setting signals, as opposed to `.get()` and `.set()`. This leads to a consistent mental model in which accessing a reactive value of any kind (a signal, memo, or derived signal) is always represented as a function call. This is only possible with nightly Rust and the `nightly` feature.
|
||||
|
||||
## `cargo-leptos`
|
||||
|
||||
[`cargo-leptos`](https://github.com/leptos-rs/cargo-leptos) is a build tool that's designed to make it easy to build apps that run on both the client and the server, with seamless integration. The best way to get started with a real Leptos project right now is to use `cargo-leptos` and our starter templates for [Actix](https://github.com/leptos-rs/start) or [Axum](https://github.com/leptos-rs/start-axum).
|
||||
|
||||
```bash
|
||||
cargo install cargo-leptos
|
||||
cargo leptos new --git https://github.com/leptos-rs/start
|
||||
cargo leptos new --git https://github.com/leptos-rs/start-axum
|
||||
cd [your project name]
|
||||
cargo leptos watch
|
||||
```
|
||||
@@ -147,7 +125,7 @@ Yes, I’m sure there are. You can see from the state of our issue tracker over
|
||||
|
||||
This may be the big one: “production ready” implies a certain orientation to a library: that you can basically use it, without any special knowledge of its internals or ability to contribute. Everyone has this at some level in their stack: for example I (@gbj) don’t have the capacity or knowledge to contribute to something like `wasm-bindgen` at this point: I simply rely on it to work.
|
||||
|
||||
There are several people in the community using Leptos right now for internal apps at work, who have also become significant contributors. I think this is the right level of production use for now. There may be missing features that you need, and you may end up building them! But for internal apps, if you’re willing to build and contribute missing pieces along the way, the framework is definitely usable right now.
|
||||
There are several people in the community using Leptos right now for many websites at work, who have also become significant contributors. There may be missing features that you need, and you may end up building them! But, if you're willing to contribute a few missing pieces along the way, the framework is most definitely usable for production applications, especially given the ecosystem of libraries that have sprung up around it.
|
||||
|
||||
### Can I use this for native GUI?
|
||||
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
name = "benchmarks"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
# std::sync::LazyLock is stabilized in Rust version 1.80.0
|
||||
rust-version = "1.80.0"
|
||||
|
||||
[dependencies]
|
||||
l0410 = { package = "leptos", version = "0.4.10", features = [
|
||||
|
||||
@@ -19,7 +19,7 @@ leptos_meta = { path = "../../meta" }
|
||||
leptos_axum = { path = "../../integrations/axum", optional = true }
|
||||
leptos_router = { path = "../../router" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
thiserror = "1.0"
|
||||
thiserror = "2.0.12"
|
||||
tokio = { version = "1.39", features = [
|
||||
"rt-multi-thread",
|
||||
"macros",
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
name = "counter_isomorphic"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
# std::sync::LazyLock is stabilized in Rust version 1.80.0
|
||||
rust-version = "1.80.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
@@ -23,7 +21,6 @@ leptos = { path = "../../leptos" }
|
||||
leptos_actix = { path = "../../integrations/actix", optional = true }
|
||||
leptos_router = { path = "../../router" }
|
||||
log = "0.4.22"
|
||||
once_cell = "1.19"
|
||||
gloo-net = { version = "0.6.0" }
|
||||
wasm-bindgen = "0.2.93"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
@@ -18,7 +18,7 @@ tower = { version = "0.4.13", optional = true }
|
||||
tower-http = { version = "0.5.2", features = ["fs"], optional = true }
|
||||
tokio = { version = "1.39", features = ["full"], optional = true }
|
||||
http = { version = "1.1" }
|
||||
thiserror = "1.0"
|
||||
thiserror = "2.0.12"
|
||||
wasm-bindgen = "0.2.93"
|
||||
|
||||
[features]
|
||||
|
||||
@@ -15,7 +15,7 @@ serde = { version = "1.0", features = ["derive"] }
|
||||
log = "0.4.22"
|
||||
console_log = "1.0"
|
||||
console_error_panic_hook = "0.1.7"
|
||||
thiserror = "1.0"
|
||||
thiserror = "2.0.12"
|
||||
tracing = "0.1.40"
|
||||
tracing-subscriber = "0.3.18"
|
||||
tracing-subscriber-wasm = "0.1.0"
|
||||
|
||||
90
examples/regression/Cargo.toml
Normal file
90
examples/regression/Cargo.toml
Normal file
@@ -0,0 +1,90 @@
|
||||
[package]
|
||||
name = "regression"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
axum = { version = "0.8.1", optional = true }
|
||||
console_error_panic_hook = "0.1.7"
|
||||
console_log = "1.0"
|
||||
leptos = { path = "../../leptos", features = ["tracing"] }
|
||||
leptos_meta = { path = "../../meta" }
|
||||
leptos_axum = { path = "../../integrations/axum", optional = true }
|
||||
leptos_router = { path = "../../router" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
thiserror = "2.0.12"
|
||||
tokio = { version = "1.39", features = [ "rt-multi-thread", "macros", "time" ], optional = true }
|
||||
wasm-bindgen = "0.2.92"
|
||||
|
||||
[features]
|
||||
hydrate = [
|
||||
"leptos/hydrate",
|
||||
]
|
||||
ssr = [
|
||||
"dep:axum",
|
||||
"dep:tokio",
|
||||
"leptos/ssr",
|
||||
"leptos_meta/ssr",
|
||||
"dep:leptos_axum",
|
||||
"leptos_router/ssr",
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
panic = "abort"
|
||||
|
||||
[profile.wasm-release]
|
||||
inherits = "release"
|
||||
opt-level = 'z'
|
||||
lto = true
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
|
||||
[package.metadata.cargo-all-features]
|
||||
denylist = ["axum", "tower", "tower-http", "tokio", "sqlx", "leptos_axum"]
|
||||
skip_feature_sets = [["ssr", "hydrate"]]
|
||||
|
||||
[package.metadata.leptos]
|
||||
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
|
||||
output-name = "regression"
|
||||
# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
|
||||
site-root = "target/site"
|
||||
# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
|
||||
# Defaults to pkg
|
||||
site-pkg-dir = "pkg"
|
||||
# The IP and port (ex: 127.0.0.1:3000) where the server serves the content. Use it in your server setup.
|
||||
site-addr = "127.0.0.1:3000"
|
||||
# The port to use for automatic reload monitoring
|
||||
reload-port = 3001
|
||||
# [Optional] Command to use when running end2end tests. It will run in the end2end dir.
|
||||
# [Windows] for non-WSL use "npx.cmd playwright test"
|
||||
# This binary name can be checked in Powershell with Get-Command npx
|
||||
end2end-cmd = "cargo make test-ui"
|
||||
end2end-dir = "e2e"
|
||||
# The browserlist query used for optimizing the CSS.
|
||||
browserquery = "defaults"
|
||||
# Set by cargo-leptos watch when building with that tool. Controls whether autoreload JS will be included in the head
|
||||
watch = false
|
||||
# The environment Leptos will run in, usually either "DEV" or "PROD"
|
||||
env = "DEV"
|
||||
# The features to use when compiling the bin target
|
||||
#
|
||||
# Optional. Can be over-ridden with the command line parameter --bin-features
|
||||
bin-features = ["ssr"]
|
||||
|
||||
# If the --no-default-features flag should be used when compiling the bin target
|
||||
#
|
||||
# Optional. Defaults to false.
|
||||
bin-default-features = false
|
||||
|
||||
# The features to use when compiling the lib target
|
||||
#
|
||||
# Optional. Can be over-ridden with the command line parameter --lib-features
|
||||
lib-features = ["hydrate"]
|
||||
|
||||
# If the --no-default-features flag should be used when compiling the lib target
|
||||
#
|
||||
# Optional. Defaults to false.
|
||||
lib-default-features = false
|
||||
21
examples/regression/LICENSE
Normal file
21
examples/regression/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2025 Leptos
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
8
examples/regression/Makefile.toml
Normal file
8
examples/regression/Makefile.toml
Normal file
@@ -0,0 +1,8 @@
|
||||
extend = [
|
||||
{ path = "../cargo-make/main.toml" },
|
||||
{ path = "../cargo-make/cargo-leptos-webdriver-test.toml" },
|
||||
]
|
||||
|
||||
[env]
|
||||
|
||||
CLIENT_PROCESS_NAME = "regression"
|
||||
8
examples/regression/README.md
Normal file
8
examples/regression/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
# Regression Tests
|
||||
|
||||
This example functions as a catch-all for all current and future regression
|
||||
test cases that typically happens at integration.
|
||||
|
||||
## Quick Start
|
||||
|
||||
Run `cargo leptos watch` to run this example.
|
||||
BIN
examples/regression/assets/favicon.ico
Normal file
BIN
examples/regression/assets/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
18
examples/regression/e2e/Cargo.toml
Normal file
18
examples/regression/e2e/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "regression_e2e"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = "1.0"
|
||||
async-trait = "0.1.81"
|
||||
cucumber = "0.21.1"
|
||||
fantoccini = "0.21.1"
|
||||
pretty_assertions = "1.4"
|
||||
serde_json = "1.0"
|
||||
tokio = { version = "1.39", features = ["macros", "rt-multi-thread", "time"] }
|
||||
url = "2.5"
|
||||
|
||||
[[test]]
|
||||
name = "app_suite"
|
||||
harness = false # Allow Cucumber to print output instead of libtest
|
||||
20
examples/regression/e2e/Makefile.toml
Normal file
20
examples/regression/e2e/Makefile.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
extend = { path = "../../cargo-make/main.toml" }
|
||||
|
||||
[tasks.test]
|
||||
env = { RUN_AUTOMATICALLY = false }
|
||||
condition = { env_true = ["RUN_AUTOMATICALLY"] }
|
||||
|
||||
[tasks.ci]
|
||||
|
||||
[tasks.test-ui]
|
||||
command = "cargo"
|
||||
args = [
|
||||
"test",
|
||||
"--test",
|
||||
"app_suite",
|
||||
"--",
|
||||
"--retry",
|
||||
"2",
|
||||
"--fail-fast",
|
||||
"${@}",
|
||||
]
|
||||
34
examples/regression/e2e/README.md
Normal file
34
examples/regression/e2e/README.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# E2E Testing
|
||||
|
||||
This example demonstrates e2e testing with Rust using executable requirements.
|
||||
|
||||
## Testing Stack
|
||||
|
||||
| | Role | Description |
|
||||
|---|---|---|
|
||||
| [Cucumber](https://github.com/cucumber-rs/cucumber/tree/main) | Test Runner | Run [Gherkin](https://cucumber.io/docs/gherkin/reference/) specifications as Rust tests |
|
||||
| [Fantoccini](https://github.com/jonhoo/fantoccini/tree/main) | Browser Client | Interact with web pages through WebDriver |
|
||||
| [Cargo Leptos](https://github.com/leptos-rs/cargo-leptos) | Build Tool | Compile example and start the server and end-2-end tests |
|
||||
| [chromedriver](https://chromedriver.chromium.org/downloads) | WebDriver | Provide WebDriver for Chrome |
|
||||
|
||||
## Testing Organization
|
||||
|
||||
Testing is organized around what a user can do and see/not see. Test scenarios are grouped by the **user action** and the **object** of that action. This makes it easier to locate and reason about requirements.
|
||||
|
||||
Here is a brief overview of how things fit together.
|
||||
|
||||
```bash
|
||||
features
|
||||
└── {action}_{object}.feature # Specify test scenarios
|
||||
tests
|
||||
├── fixtures
|
||||
│ ├── action.rs # Perform a user action (click, type, etc.)
|
||||
│ ├── check.rs # Assert what a user can see/not see
|
||||
│ ├── find.rs # Query page elements
|
||||
│ ├── mod.rs
|
||||
│ └── world
|
||||
│ ├── action_steps.rs # Map Gherkin steps to user actions
|
||||
│ ├── check_steps.rs # Map Gherkin steps to user expectations
|
||||
│ └── mod.rs
|
||||
└── app_suite.rs # Test main
|
||||
```
|
||||
20
examples/regression/e2e/features/issue_4088.feature
Normal file
20
examples/regression/e2e/features/issue_4088.feature
Normal file
@@ -0,0 +1,20 @@
|
||||
@check_issue_4088
|
||||
Feature: Check that issue 4088 does not reappear
|
||||
|
||||
Scenario: I can see the navbar
|
||||
Given I see the app
|
||||
And I can access regression test 4088
|
||||
Then I see the navbar
|
||||
|
||||
Scenario: The user info is shared via context
|
||||
Given I see the app
|
||||
And I can access regression test 4088
|
||||
When I select the link Class 1
|
||||
Then I see the result is the string Assignments for team of user with id 42
|
||||
|
||||
Scenario: The user info is shared via context
|
||||
Given I see the app
|
||||
And I can access regression test 4088
|
||||
When I select the link Class 1
|
||||
When I refresh the browser
|
||||
Then I see the result is the string Assignments for team of user with id 42
|
||||
8
examples/regression/e2e/features/pr_4015.feature
Normal file
8
examples/regression/e2e/features/pr_4015.feature
Normal file
@@ -0,0 +1,8 @@
|
||||
@check_pr_4015
|
||||
Feature: Check that PR 4015 does not regress
|
||||
|
||||
Scenario: The correct text appears
|
||||
Given I see the app
|
||||
And I can access regression test 4015
|
||||
Then I see the result is the string Some(42)
|
||||
|
||||
48
examples/regression/e2e/features/pr_4091.feature
Normal file
48
examples/regression/e2e/features/pr_4091.feature
Normal file
@@ -0,0 +1,48 @@
|
||||
@check_pr_4091
|
||||
Feature: Regression from pull request 4091
|
||||
|
||||
Scenario: Signal for testing should work
|
||||
Given I see the app
|
||||
And I can access regression test 4091
|
||||
When I select the link test1
|
||||
Then I see the result is the string Test1
|
||||
|
||||
Scenario: The result returns to empty due to on_cleanup
|
||||
Given I see the app
|
||||
And I can access regression test 4091
|
||||
When I select the following links
|
||||
| test1 |
|
||||
| 4091 Home |
|
||||
Then I see the result is empty
|
||||
|
||||
Scenario: The result does not accumulate due to on_cleanup
|
||||
Given I see the app
|
||||
And I can access regression test 4091
|
||||
When I select the following links
|
||||
| test1 |
|
||||
| 4091 Home |
|
||||
| test1 |
|
||||
| 4091 Home |
|
||||
Then I see the result is empty
|
||||
|
||||
Scenario: I can see the navbar
|
||||
Given I see the app
|
||||
And I can access regression test 4091
|
||||
Then I see the navbar
|
||||
|
||||
Scenario: If I navigate to home and back, I can still see the navbar
|
||||
Given I see the app
|
||||
And I can access regression test 4091
|
||||
When I select the following links
|
||||
| Home |
|
||||
| 4091 |
|
||||
Then I see the navbar
|
||||
|
||||
Scenario: The signal is not disposed too early
|
||||
Given I see the app
|
||||
And I can access regression test 4091
|
||||
When I select the following links
|
||||
| test1 |
|
||||
| Home |
|
||||
| 4091 |
|
||||
Then I see the navbar
|
||||
30
examples/regression/e2e/tests/app_suite.rs
Normal file
30
examples/regression/e2e/tests/app_suite.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
mod fixtures;
|
||||
|
||||
use anyhow::Result;
|
||||
use cucumber::World;
|
||||
use fixtures::world::AppWorld;
|
||||
use std::{ffi::OsStr, fs::read_dir};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
// Normally the below is done, but it's now gotten to the point of
|
||||
// having a sufficient number of tests where the resource contention
|
||||
// of the concurrently running browsers will cause failures on CI.
|
||||
// AppWorld::cucumber()
|
||||
// .fail_on_skipped()
|
||||
// .run_and_exit("./features")
|
||||
// .await;
|
||||
|
||||
// Mitigate the issue by manually stepping through each feature,
|
||||
// rather than letting cucumber glob them and dispatch all at once.
|
||||
for entry in read_dir("./features")? {
|
||||
let path = entry?.path();
|
||||
if path.extension() == Some(OsStr::new("feature")) {
|
||||
AppWorld::cucumber()
|
||||
.fail_on_skipped()
|
||||
.run_and_exit(path)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
17
examples/regression/e2e/tests/fixtures/action.rs
vendored
Normal file
17
examples/regression/e2e/tests/fixtures/action.rs
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
use super::{find, world::HOST};
|
||||
use anyhow::Result;
|
||||
use fantoccini::Client;
|
||||
use std::result::Result::Ok;
|
||||
|
||||
pub async fn goto_path(client: &Client, path: &str) -> Result<()> {
|
||||
let url = format!("{}{}", HOST, path);
|
||||
client.goto(&url).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn click_link(client: &Client, text: &str) -> Result<()> {
|
||||
let link = find::link_with_text(&client, &text).await?;
|
||||
link.click().await?;
|
||||
Ok(())
|
||||
}
|
||||
20
examples/regression/e2e/tests/fixtures/check.rs
vendored
Normal file
20
examples/regression/e2e/tests/fixtures/check.rs
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
use crate::fixtures::find;
|
||||
use anyhow::{Ok, Result};
|
||||
use fantoccini::Client;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
pub async fn result_text_is(
|
||||
client: &Client,
|
||||
expected_text: &str,
|
||||
) -> Result<()> {
|
||||
let actual = find::text_at_id(client, "result").await?;
|
||||
assert_eq!(&actual, expected_text);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn element_exists(client: &Client, id: &str) -> Result<()> {
|
||||
find::element_by_id(client, id)
|
||||
.await
|
||||
.expect(&format!("could not find element with id `{id}`"));
|
||||
Ok(())
|
||||
}
|
||||
23
examples/regression/e2e/tests/fixtures/find.rs
vendored
Normal file
23
examples/regression/e2e/tests/fixtures/find.rs
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
use anyhow::{Ok, Result};
|
||||
use fantoccini::{elements::Element, Client, Locator};
|
||||
|
||||
pub async fn text_at_id(client: &Client, id: &str) -> Result<String> {
|
||||
let element = element_by_id(client, id)
|
||||
.await
|
||||
.expect(format!("no such element with id `{}`", id).as_str());
|
||||
let text = element.text().await?;
|
||||
Ok(text)
|
||||
}
|
||||
|
||||
pub async fn link_with_text(client: &Client, text: &str) -> Result<Element> {
|
||||
let link = client
|
||||
.wait()
|
||||
.for_element(Locator::LinkText(text))
|
||||
.await
|
||||
.expect(format!("Link not found by `{}`", text).as_str());
|
||||
Ok(link)
|
||||
}
|
||||
|
||||
pub async fn element_by_id(client: &Client, id: &str) -> Result<Element> {
|
||||
Ok(client.wait().for_element(Locator::Id(id)).await?)
|
||||
}
|
||||
4
examples/regression/e2e/tests/fixtures/mod.rs
vendored
Normal file
4
examples/regression/e2e/tests/fixtures/mod.rs
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
pub mod action;
|
||||
pub mod check;
|
||||
pub mod find;
|
||||
pub mod world;
|
||||
47
examples/regression/e2e/tests/fixtures/world/action_steps.rs
vendored
Normal file
47
examples/regression/e2e/tests/fixtures/world/action_steps.rs
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
use crate::fixtures::{action, world::AppWorld};
|
||||
use anyhow::{Ok, Result};
|
||||
use cucumber::{gherkin::Step, given, when};
|
||||
|
||||
#[given("I see the app")]
|
||||
#[when("I open the app")]
|
||||
async fn i_open_the_app(world: &mut AppWorld) -> Result<()> {
|
||||
let client = &world.client;
|
||||
action::goto_path(client, "").await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[given(regex = "^I can access regression test (.*)$")]
|
||||
#[when(regex = "^I select the link (.*)$")]
|
||||
async fn i_select_the_link(world: &mut AppWorld, text: String) -> Result<()> {
|
||||
let client = &world.client;
|
||||
action::click_link(client, &text).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[given(expr = "I select the following links")]
|
||||
#[when(expr = "I select the following links")]
|
||||
async fn i_select_the_following_links(
|
||||
world: &mut AppWorld,
|
||||
step: &Step,
|
||||
) -> Result<()> {
|
||||
let client = &world.client;
|
||||
|
||||
if let Some(table) = step.table.as_ref() {
|
||||
for row in table.rows.iter() {
|
||||
action::click_link(client, &row[0]).await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[given(regex = "^I (refresh|reload) the (browser|page)$")]
|
||||
#[when(regex = "^I (refresh|reload) the (browser|page)$")]
|
||||
async fn i_refresh_the_browser(world: &mut AppWorld) -> Result<()> {
|
||||
let client = &world.client;
|
||||
client.refresh().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
27
examples/regression/e2e/tests/fixtures/world/check_steps.rs
vendored
Normal file
27
examples/regression/e2e/tests/fixtures/world/check_steps.rs
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
use crate::fixtures::{check, world::AppWorld};
|
||||
use anyhow::{Ok, Result};
|
||||
use cucumber::then;
|
||||
|
||||
#[then(regex = r"^I see the result is empty$")]
|
||||
async fn i_see_the_result_is_empty(world: &mut AppWorld) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::result_text_is(client, "").await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[then(regex = r"^I see the result is the string (.*)$")]
|
||||
async fn i_see_the_result_is_the_string(
|
||||
world: &mut AppWorld,
|
||||
text: String,
|
||||
) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::result_text_is(client, &text).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[then(regex = r"^I see the navbar$")]
|
||||
async fn i_see_the_navbar(world: &mut AppWorld) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::element_exists(client, "nav").await?;
|
||||
Ok(())
|
||||
}
|
||||
39
examples/regression/e2e/tests/fixtures/world/mod.rs
vendored
Normal file
39
examples/regression/e2e/tests/fixtures/world/mod.rs
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
pub mod action_steps;
|
||||
pub mod check_steps;
|
||||
|
||||
use anyhow::Result;
|
||||
use cucumber::World;
|
||||
use fantoccini::{
|
||||
error::NewSessionError, wd::Capabilities, Client, ClientBuilder,
|
||||
};
|
||||
|
||||
pub const HOST: &str = "http://127.0.0.1:3000";
|
||||
|
||||
#[derive(Debug, World)]
|
||||
#[world(init = Self::new)]
|
||||
pub struct AppWorld {
|
||||
pub client: Client,
|
||||
}
|
||||
|
||||
impl AppWorld {
|
||||
async fn new() -> Result<Self, anyhow::Error> {
|
||||
let webdriver_client = build_client().await?;
|
||||
|
||||
Ok(Self {
|
||||
client: webdriver_client,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn build_client() -> Result<Client, NewSessionError> {
|
||||
let mut cap = Capabilities::new();
|
||||
let arg = serde_json::from_str("{\"args\": [\"-headless\"]}").unwrap();
|
||||
cap.insert("goog:chromeOptions".to_string(), arg);
|
||||
|
||||
let client = ClientBuilder::native()
|
||||
.capabilities(cap)
|
||||
.connect("http://localhost:4444")
|
||||
.await?;
|
||||
|
||||
Ok(client)
|
||||
}
|
||||
65
examples/regression/src/app.rs
Normal file
65
examples/regression/src/app.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
use crate::{issue_4088::Routes4088, pr_4015::Routes4015, pr_4091::Routes4091};
|
||||
use leptos::prelude::*;
|
||||
use leptos_meta::{MetaTags, *};
|
||||
use leptos_router::{
|
||||
components::{Route, Router, Routes},
|
||||
path,
|
||||
};
|
||||
|
||||
pub fn shell(options: LeptosOptions) -> impl IntoView {
|
||||
view! {
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1"/>
|
||||
<AutoReload options=options.clone()/>
|
||||
<HydrationScripts options/>
|
||||
<MetaTags/>
|
||||
</head>
|
||||
<body>
|
||||
<App/>
|
||||
</body>
|
||||
</html>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn App() -> impl IntoView {
|
||||
provide_meta_context();
|
||||
let fallback = || view! { "Page not found." }.into_view();
|
||||
view! {
|
||||
<Stylesheet id="leptos" href="/pkg/regression.css"/>
|
||||
<Router>
|
||||
<main>
|
||||
<Routes fallback>
|
||||
<Route path=path!("") view=HomePage/>
|
||||
<Routes4091/>
|
||||
<Routes4015/>
|
||||
<Routes4088/>
|
||||
</Routes>
|
||||
</main>
|
||||
</Router>
|
||||
}
|
||||
}
|
||||
|
||||
#[server]
|
||||
async fn server_call() -> Result<(), ServerFnError> {
|
||||
tokio::time::sleep(std::time::Duration::from_millis(1)).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn HomePage() -> impl IntoView {
|
||||
view! {
|
||||
<Title text="Regression Tests"/>
|
||||
<h1>"Listing of regression tests"</h1>
|
||||
<nav>
|
||||
<ul>
|
||||
<li><a href="/4091/">"4091"</a></li>
|
||||
<li><a href="/4015/">"4015"</a></li>
|
||||
<li><a href="/4088/">"4088"</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
}
|
||||
}
|
||||
119
examples/regression/src/issue_4088.rs
Normal file
119
examples/regression/src/issue_4088.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
use leptos::{either::Either, prelude::*};
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::{Outlet, ParentRoute, Redirect, Route},
|
||||
path, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4088() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<ParentRoute path=path!("4088") view=|| view!{ <LoggedIn/> }>
|
||||
<ParentRoute path=path!("") view=||view!{<AssignmentsSelector/>}>
|
||||
<Route path=path!("/:team_id") view=||view!{<AssignmentsForTeam/>} />
|
||||
<Route path=path!("") view=||view!{ <p>No class selected</p> }/>
|
||||
</ParentRoute>
|
||||
</ParentRoute>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct UserInfo {
|
||||
pub id: usize,
|
||||
}
|
||||
|
||||
#[server]
|
||||
pub async fn get_user_info() -> Result<Option<UserInfo>, ServerFnError> {
|
||||
Ok(Some(UserInfo { id: 42 }))
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn LoggedIn() -> impl IntoView {
|
||||
let user_info_resource =
|
||||
Resource::new(|| (), move |_| async { get_user_info().await });
|
||||
|
||||
view! {
|
||||
|
||||
<Transition fallback=move || view!{
|
||||
"loading"
|
||||
}
|
||||
>
|
||||
{move || {
|
||||
user_info_resource.get()
|
||||
.map(|a|
|
||||
match a {
|
||||
Ok(Some(a)) => Either::Left(view! {
|
||||
<LoggedInContent user_info={a} />
|
||||
}),
|
||||
_ => Either::Right(view!{
|
||||
<Redirect path="/not_logged_in"/>
|
||||
})
|
||||
})
|
||||
}}
|
||||
</Transition>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
/// Component which provides UserInfo and renders it's child
|
||||
/// Can also contain some code to check for specific situations (e.g. privacy policies accepted or not? redirect if needed...)
|
||||
pub fn LoggedInContent(user_info: UserInfo) -> impl IntoView {
|
||||
provide_context(user_info.clone());
|
||||
|
||||
if user_info.id == 42 {
|
||||
Either::Left(Outlet())
|
||||
} else {
|
||||
Either::Right(
|
||||
view! { <Redirect path="/somewhere" options={NavigateOptions::default()}/> },
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
/// This component also uses Outlet (so nested Outlet)
|
||||
fn AssignmentsSelector() -> impl IntoView {
|
||||
let user_info = use_context::<UserInfo>().expect("user info not provided");
|
||||
|
||||
view! {
|
||||
<p>"Assignments for user with ID: "{user_info.id}</p>
|
||||
<ul id="nav">
|
||||
<li><a href="/4088/1">"Class 1"</a></li>
|
||||
<li><a href="/4088/2">"Class 2"</a></li>
|
||||
<li><a href="/4088/3">"Class 3"</a></li>
|
||||
</ul>
|
||||
|
||||
<Outlet />
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn AssignmentsForTeam() -> impl IntoView {
|
||||
// THIS FAILS -> Because of the nested outlet in LoggedInContent > AssignmentsSelector?
|
||||
// It did not fail when LoggedIn did not use a resource and transition (but a hardcoded UserInfo in the component)
|
||||
let user_info = use_context::<UserInfo>().expect("user info not provided");
|
||||
|
||||
let items = vec!["Assignment 1", "Assignment 2", "Assignment 3"];
|
||||
view! {
|
||||
<p id="result">"Assignments for team of user with id " {user_info.id}</p>
|
||||
<ul>
|
||||
{
|
||||
items.into_iter().map(|item| {
|
||||
view! {
|
||||
<Assignment name=item.to_string() />
|
||||
}
|
||||
}).collect_view()
|
||||
}
|
||||
</ul>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn Assignment(name: String) -> impl IntoView {
|
||||
let user_info = use_context::<UserInfo>().expect("user info not provided");
|
||||
|
||||
view! {
|
||||
<li>{name}" "{user_info.id}</li>
|
||||
}
|
||||
}
|
||||
12
examples/regression/src/lib.rs
Normal file
12
examples/regression/src/lib.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
pub mod app;
|
||||
mod issue_4088;
|
||||
mod pr_4015;
|
||||
mod pr_4091;
|
||||
|
||||
#[cfg(feature = "hydrate")]
|
||||
#[wasm_bindgen::prelude::wasm_bindgen]
|
||||
pub fn hydrate() {
|
||||
use app::*;
|
||||
console_error_panic_hook::set_once();
|
||||
leptos::mount::hydrate_body(App);
|
||||
}
|
||||
37
examples/regression/src/main.rs
Normal file
37
examples/regression/src/main.rs
Normal file
@@ -0,0 +1,37 @@
|
||||
#[cfg(feature = "ssr")]
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
use axum::Router;
|
||||
use leptos::prelude::*;
|
||||
use leptos_axum::{generate_route_list, LeptosRoutes};
|
||||
use regression::app::{shell, App};
|
||||
|
||||
let conf = get_configuration(None).unwrap();
|
||||
let addr = conf.leptos_options.site_addr;
|
||||
let leptos_options = conf.leptos_options;
|
||||
// Generate the list of routes in your Leptos App
|
||||
let routes = generate_route_list(App);
|
||||
|
||||
let app = Router::new()
|
||||
.leptos_routes(&leptos_options, routes, {
|
||||
let leptos_options = leptos_options.clone();
|
||||
move || shell(leptos_options.clone())
|
||||
})
|
||||
.fallback(leptos_axum::file_and_error_handler(shell))
|
||||
.with_state(leptos_options);
|
||||
|
||||
// run our app with hyper
|
||||
// `axum::Server` is a re-export of `hyper::Server`
|
||||
println!("listening on http://{}", &addr);
|
||||
let listener = tokio::net::TcpListener::bind(&addr).await.unwrap();
|
||||
axum::serve(listener, app.into_make_service())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "ssr"))]
|
||||
pub fn main() {
|
||||
// no client-side main function
|
||||
// unless we want this to work with e.g., Trunk for pure client-side testing
|
||||
// see lib.rs for hydration function instead
|
||||
}
|
||||
29
examples/regression/src/pr_4015.rs
Normal file
29
examples/regression/src/pr_4015.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
use leptos::{context::Provider, prelude::*};
|
||||
use leptos_router::{
|
||||
components::{ParentRoute, Route},
|
||||
nested_router::Outlet,
|
||||
path,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4015() -> impl leptos_router::MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<ParentRoute path=path!("4015") view=|| view! {
|
||||
<Provider value=42i32>
|
||||
<Outlet/>
|
||||
</Provider>
|
||||
}>
|
||||
<Route path=path!("") view=Child/>
|
||||
</ParentRoute>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn Child() -> impl IntoView {
|
||||
let value = use_context::<i32>();
|
||||
|
||||
view! {
|
||||
<p id="result">{format!("{value:?}")}</p>
|
||||
}
|
||||
}
|
||||
68
examples/regression/src/pr_4091.rs
Normal file
68
examples/regression/src/pr_4091.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
use leptos::{context::Provider, prelude::*};
|
||||
use leptos_router::{
|
||||
components::{ParentRoute, Route, A},
|
||||
nested_router::Outlet,
|
||||
path,
|
||||
};
|
||||
|
||||
// FIXME This should be a set rather than a naive vec for push and pop, as
|
||||
// it may be possible for unexpected token be popped/pushed on multi-level
|
||||
// navigation. For basic naive tests it should be Fine(TM).
|
||||
#[derive(Clone)]
|
||||
struct Expectations(Vec<&'static str>);
|
||||
|
||||
#[component]
|
||||
pub fn Routes4091() -> impl leptos_router::MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<ParentRoute path=path!("4091") view=Container>
|
||||
<Route path=path!("") view=Root/>
|
||||
<Route path=path!("test1") view=Test1/>
|
||||
</ParentRoute>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn Container() -> impl IntoView {
|
||||
let rw_signal = RwSignal::new(Expectations(Vec::new()));
|
||||
provide_context(rw_signal);
|
||||
|
||||
view! {
|
||||
<nav id="nav">
|
||||
<ul>
|
||||
<li><A href="/">"Home"</A></li>
|
||||
<li><A href="./">"4091 Home"</A></li>
|
||||
<li><A href="test1">"test1"</A></li>
|
||||
</ul>
|
||||
</nav>
|
||||
<div id="result">{move || {
|
||||
rw_signal.with(|ex| ex.0.iter().fold(String::new(), |a, b| a + b + " "))
|
||||
}}</div>
|
||||
<Provider value=rw_signal>
|
||||
<Outlet/>
|
||||
</Provider>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn Root() -> impl IntoView {
|
||||
view! {
|
||||
<div>"This is Root"</div>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn Test1() -> impl IntoView {
|
||||
let signal = expect_context::<RwSignal<Expectations>>();
|
||||
|
||||
on_cleanup(move || {
|
||||
signal.update(|ex| {
|
||||
ex.0.pop();
|
||||
});
|
||||
});
|
||||
|
||||
view! {
|
||||
{move || signal.update(|ex| ex.0.push("Test1"))}
|
||||
<div>"This is Test1"</div>
|
||||
}
|
||||
}
|
||||
3
examples/regression/style/main.scss
Normal file
3
examples/regression/style/main.scss
Normal file
@@ -0,0 +1,3 @@
|
||||
body {
|
||||
font-family: sans-serif;
|
||||
}
|
||||
@@ -29,7 +29,7 @@ tower-http = { version = "0.6.2", features = [
|
||||
"trace",
|
||||
], optional = true }
|
||||
tokio = { version = "1.39", features = ["full"], optional = true }
|
||||
thiserror = "2.0.11"
|
||||
thiserror = "2.0.12"
|
||||
wasm-bindgen = "0.2.93"
|
||||
serde_toml = "0.0.1"
|
||||
toml = "0.8.19"
|
||||
@@ -38,7 +38,6 @@ strum = { version = "0.27.1", features = ["strum_macros", "derive"] }
|
||||
notify = { version = "8.0", optional = true }
|
||||
pin-project-lite = "0.2.14"
|
||||
dashmap = { version = "6.0", optional = true }
|
||||
once_cell = { version = "1.19", optional = true }
|
||||
async-broadcast = { version = "0.7.1", optional = true }
|
||||
bytecheck = "0.8.0"
|
||||
rkyv = { version = "0.8.8" }
|
||||
@@ -54,7 +53,6 @@ ssr = [
|
||||
"dep:leptos_axum",
|
||||
"dep:notify",
|
||||
"dep:dashmap",
|
||||
"dep:once_cell",
|
||||
"dep:async-broadcast",
|
||||
]
|
||||
|
||||
|
||||
@@ -424,7 +424,7 @@ pub fn FileUploadWithProgress() -> impl IntoView {
|
||||
use async_broadcast::{broadcast, Receiver, Sender};
|
||||
use dashmap::DashMap;
|
||||
use futures::Stream;
|
||||
use once_cell::sync::Lazy;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
struct File {
|
||||
total: usize,
|
||||
@@ -432,7 +432,8 @@ pub fn FileUploadWithProgress() -> impl IntoView {
|
||||
rx: Receiver<usize>,
|
||||
}
|
||||
|
||||
static FILES: Lazy<DashMap<String, File>> = Lazy::new(DashMap::new);
|
||||
static FILES: LazyLock<DashMap<String, File>> =
|
||||
LazyLock::new(DashMap::new);
|
||||
|
||||
pub async fn add_chunk(filename: &str, len: usize) {
|
||||
println!("[{filename}]\tadding {len}");
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
name = "ssr_modes"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
# std::sync::LazyLock is stabilized in Rust version 1.80.0
|
||||
rust-version = "1.80.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
@@ -19,7 +17,7 @@ leptos_actix = { path = "../../integrations/actix", optional = true }
|
||||
leptos_router = { path = "../../router" }
|
||||
log = "0.4.22"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
thiserror = "1.0"
|
||||
thiserror = "2.0.12"
|
||||
tokio = { version = "1.39", features = ["time"] }
|
||||
wasm-bindgen = "0.2.93"
|
||||
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
name = "ssr_modes_axum"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
# std::sync::LazyLock is stabilized in Rust version 1.80.0
|
||||
rust-version = "1.80.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
@@ -19,7 +17,7 @@ leptos_axum = { path = "../../integrations/axum", optional = true }
|
||||
leptos_router = { path = "../../router" }
|
||||
log = "0.4.22"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
thiserror = "1.0"
|
||||
thiserror = "2.0.12"
|
||||
axum = { version = "0.8.1", optional = true }
|
||||
tower = { version = "0.4.13", optional = true }
|
||||
tower-http = { version = "0.5.2", features = ["fs"], optional = true }
|
||||
|
||||
@@ -17,7 +17,7 @@ leptos_axum = { path = "../../integrations/axum", optional = true }
|
||||
leptos_router = { path = "../../router" }
|
||||
log = "0.4.22"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
thiserror = "1.0"
|
||||
thiserror = "2.0.12"
|
||||
axum = { version = "0.8.1", optional = true }
|
||||
tower = { version = "0.4.13", optional = true }
|
||||
tower-http = { version = "0.5.2", features = ["fs"], optional = true }
|
||||
|
||||
@@ -159,7 +159,7 @@ fn TodoRow(
|
||||
|
||||
view! {
|
||||
<li style:text-decoration=move || {
|
||||
status.done().then_some("line-through").unwrap_or_default()
|
||||
if status.done() { "line-through" } else { Default::default() }
|
||||
}>
|
||||
|
||||
<p
|
||||
|
||||
@@ -20,7 +20,7 @@ tokio = { version = "1.39", features = [
|
||||
tower = { version = "0.4.13", optional = true }
|
||||
tower-http = { version = "0.5.2", features = ["fs"], optional = true }
|
||||
wasm-bindgen = "0.2.93"
|
||||
thiserror = "1.0"
|
||||
thiserror = "2.0.12"
|
||||
tracing = { version = "0.1.40", optional = true }
|
||||
http = "1.1"
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ leptos_actix = { path = "../../integrations/actix", optional = true }
|
||||
log = "0.4.22"
|
||||
simple_logger = "5.0"
|
||||
gloo = { git = "https://github.com/rustwasm/gloo" }
|
||||
sqlx = { version = "0.8.0", features = [
|
||||
sqlx = { version = "0.8.6", features = [
|
||||
"runtime-tokio-rustls",
|
||||
"sqlite",
|
||||
], optional = true }
|
||||
@@ -44,12 +44,12 @@ denylist = ["actix-files", "actix-web", "leptos_actix", "sqlx"]
|
||||
skip_feature_sets = [["csr", "ssr"], ["csr", "hydrate"], ["ssr", "hydrate"], []]
|
||||
|
||||
[package.metadata.leptos]
|
||||
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
|
||||
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
|
||||
output-name = "todo_app_sqlite"
|
||||
# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
|
||||
site-root = "target/site"
|
||||
# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
|
||||
# Defaults to pkg
|
||||
# Defaults to pkg
|
||||
site-pkg-dir = "pkg"
|
||||
# [Optional] The source CSS file. If it ends with .sass or .scss then it will be compiled by dart-sass into CSS. The CSS is optimized by Lightning CSS before being written to <site-root>/<site-pkg>/app.css
|
||||
style-file = "./style.css"
|
||||
|
||||
@@ -20,11 +20,11 @@ axum = { version = "0.8.1", optional = true }
|
||||
tower = { version = "0.4.13", optional = true }
|
||||
tower-http = { version = "0.5.2", features = ["fs"], optional = true }
|
||||
tokio = { version = "1.39", features = ["full"], optional = true }
|
||||
sqlx = { version = "0.8.0", features = [
|
||||
sqlx = { version = "0.8.6", features = [
|
||||
"runtime-tokio-rustls",
|
||||
"sqlite",
|
||||
], optional = true }
|
||||
thiserror = "1.0"
|
||||
thiserror = "2.0.12"
|
||||
wasm-bindgen = "0.2.93"
|
||||
|
||||
[features]
|
||||
|
||||
@@ -20,11 +20,11 @@ tower = { version = "0.5.1", features = ["util"], optional = true }
|
||||
tower-http = { version = "0.6.1", features = ["fs"], optional = true }
|
||||
tokio = { version = "1.39", features = ["full"], optional = true }
|
||||
http = { version = "1.1" }
|
||||
sqlx = { version = "0.8.0", features = [
|
||||
sqlx = { version = "0.8.6", features = [
|
||||
"runtime-tokio-rustls",
|
||||
"sqlite",
|
||||
], optional = true }
|
||||
thiserror = "2.0"
|
||||
thiserror = "2.0.12"
|
||||
wasm-bindgen = "0.2.93"
|
||||
|
||||
[features]
|
||||
|
||||
@@ -17,7 +17,7 @@ simple_logger = "5.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
axum = { version = "0.8.1", optional = true }
|
||||
tokio = { version = "1.39", features = ["full"], optional = true }
|
||||
thiserror = "2.0"
|
||||
thiserror = "2.0.12"
|
||||
wasm-bindgen = "0.2.100"
|
||||
|
||||
[features]
|
||||
|
||||
@@ -16,7 +16,6 @@ futures = { workspace = true, default-features = true }
|
||||
serde = { features = ["derive"] , workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true, optional = true , default-features = true }
|
||||
js-sys = { optional = true , workspace = true, default-features = true }
|
||||
once_cell = { workspace = true, default-features = true }
|
||||
pin-project-lite = { workspace = true, default-features = true }
|
||||
|
||||
[features]
|
||||
|
||||
@@ -7,10 +7,12 @@ use super::{SerializedDataId, SharedContext};
|
||||
use crate::{PinnedFuture, PinnedStream};
|
||||
use core::fmt::Debug;
|
||||
use js_sys::Array;
|
||||
use once_cell::sync::Lazy;
|
||||
use std::{
|
||||
fmt::Display,
|
||||
sync::atomic::{AtomicBool, AtomicUsize, Ordering},
|
||||
sync::{
|
||||
atomic::{AtomicBool, AtomicUsize, Ordering},
|
||||
LazyLock,
|
||||
},
|
||||
};
|
||||
use throw_error::{Error, ErrorId};
|
||||
use wasm_bindgen::{prelude::wasm_bindgen, JsCast};
|
||||
@@ -79,8 +81,8 @@ pub struct HydrateSharedContext {
|
||||
id: AtomicUsize,
|
||||
is_hydrating: AtomicBool,
|
||||
during_hydration: AtomicBool,
|
||||
errors: Lazy<Vec<(SerializedDataId, ErrorId, Error)>>,
|
||||
incomplete: Lazy<Vec<SerializedDataId>>,
|
||||
errors: LazyLock<Vec<(SerializedDataId, ErrorId, Error)>>,
|
||||
incomplete: LazyLock<Vec<SerializedDataId>>,
|
||||
}
|
||||
|
||||
impl HydrateSharedContext {
|
||||
@@ -90,8 +92,8 @@ impl HydrateSharedContext {
|
||||
id: AtomicUsize::new(0),
|
||||
is_hydrating: AtomicBool::new(true),
|
||||
during_hydration: AtomicBool::new(true),
|
||||
errors: Lazy::new(serialized_errors),
|
||||
incomplete: Lazy::new(incomplete_chunks),
|
||||
errors: LazyLock::new(serialized_errors),
|
||||
incomplete: LazyLock::new(incomplete_chunks),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -104,8 +106,8 @@ impl HydrateSharedContext {
|
||||
id: AtomicUsize::new(0),
|
||||
is_hydrating: AtomicBool::new(false),
|
||||
during_hydration: AtomicBool::new(true),
|
||||
errors: Lazy::new(serialized_errors),
|
||||
incomplete: Lazy::new(incomplete_chunks),
|
||||
errors: LazyLock::new(serialized_errors),
|
||||
incomplete: LazyLock::new(incomplete_chunks),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ edition.workspace = true
|
||||
[dependencies]
|
||||
actix-http = { workspace = true, default-features = true }
|
||||
actix-files = { workspace = true, default-features = true }
|
||||
actix-web = { workspace = true, default-features = true }
|
||||
actix-web = { workspace = true, default-features = false }
|
||||
futures = { workspace = true, default-features = true }
|
||||
any_spawner = { workspace = true, features = ["tokio"] }
|
||||
hydration_context = { workspace = true }
|
||||
@@ -20,7 +20,7 @@ leptos_integration_utils = { workspace = true }
|
||||
leptos_macro = { workspace = true, features = ["actix"] }
|
||||
leptos_meta = { workspace = true, features = ["nonce"] }
|
||||
leptos_router = { workspace = true, features = ["ssr"] }
|
||||
server_fn = { workspace = true, features = ["actix"] }
|
||||
server_fn = { workspace = true, features = ["actix-no-default"] }
|
||||
tachys = { workspace = true }
|
||||
serde_json = { workspace = true , default-features = true }
|
||||
parking_lot = { workspace = true, default-features = true }
|
||||
@@ -28,12 +28,13 @@ tracing = { optional = true , workspace = true, default-features = true }
|
||||
tokio = { features = ["rt", "fs"] , workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
dashmap = { workspace = true, default-features = true }
|
||||
once_cell = { workspace = true, default-features = true }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
[features]
|
||||
default = ["actix-default"]
|
||||
actix-default = ["actix-web/default"]
|
||||
islands-router = ["tachys/islands"]
|
||||
tracing = ["dep:tracing"]
|
||||
|
||||
|
||||
@@ -38,7 +38,6 @@ use leptos_router::{
|
||||
static_routes::{RegenerationFn, ResolvedStaticPath},
|
||||
ExpandOptionals, Method, PathSegment, RouteList, RouteListing, SsrMode,
|
||||
};
|
||||
use once_cell::sync::Lazy;
|
||||
use parking_lot::RwLock;
|
||||
use send_wrapper::SendWrapper;
|
||||
use server_fn::{
|
||||
@@ -51,7 +50,7 @@ use std::{
|
||||
future::Future,
|
||||
ops::{Deref, DerefMut},
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
sync::{Arc, LazyLock},
|
||||
};
|
||||
|
||||
/// This struct lets you define headers and override the status of the Response from an Element or a Server Function
|
||||
@@ -283,6 +282,7 @@ pub fn redirect(path: &str) {
|
||||
/// // call ServerFn::register() for each of the server functions you've defined
|
||||
/// }
|
||||
///
|
||||
/// # #[cfg(feature = "default")]
|
||||
/// #[actix_web::main]
|
||||
/// async fn main() -> std::io::Result<()> {
|
||||
/// // make sure you actually register your server functions
|
||||
@@ -298,6 +298,8 @@ pub fn redirect(path: &str) {
|
||||
/// .run()
|
||||
/// .await
|
||||
/// }
|
||||
/// # #[cfg(not(feature = "default"))]
|
||||
/// # fn main() {}
|
||||
/// ```
|
||||
///
|
||||
/// ## Provided Context Types
|
||||
@@ -443,6 +445,7 @@ pub fn handle_server_fns_with_context(
|
||||
/// view! { <main>"Hello, world!"</main> }
|
||||
/// }
|
||||
///
|
||||
/// # #[cfg(feature = "default")]
|
||||
/// #[actix_web::main]
|
||||
/// async fn main() -> std::io::Result<()> {
|
||||
/// let conf = get_configuration(Some("Cargo.toml")).unwrap();
|
||||
@@ -462,6 +465,8 @@ pub fn handle_server_fns_with_context(
|
||||
/// .run()
|
||||
/// .await
|
||||
/// }
|
||||
/// # #[cfg(not(feature = "default"))]
|
||||
/// # fn main() {}
|
||||
/// ```
|
||||
///
|
||||
/// ## Provided Context Types
|
||||
@@ -500,6 +505,7 @@ where
|
||||
/// view! { <main>"Hello, world!"</main> }
|
||||
/// }
|
||||
///
|
||||
/// # #[cfg(feature = "default")]
|
||||
/// #[actix_web::main]
|
||||
/// async fn main() -> std::io::Result<()> {
|
||||
/// let conf = get_configuration(Some("Cargo.toml")).unwrap();
|
||||
@@ -522,6 +528,9 @@ where
|
||||
/// .run()
|
||||
/// .await
|
||||
/// }
|
||||
///
|
||||
/// # #[cfg(not(feature = "default"))]
|
||||
/// # fn main() {}
|
||||
/// ```
|
||||
///
|
||||
/// ## Provided Context Types
|
||||
@@ -558,6 +567,7 @@ where
|
||||
/// view! { <main>"Hello, world!"</main> }
|
||||
/// }
|
||||
///
|
||||
/// # #[cfg(feature = "default")]
|
||||
/// #[actix_web::main]
|
||||
/// async fn main() -> std::io::Result<()> {
|
||||
/// let conf = get_configuration(Some("Cargo.toml")).unwrap();
|
||||
@@ -577,6 +587,8 @@ where
|
||||
/// .run()
|
||||
/// .await
|
||||
/// }
|
||||
/// # #[cfg(not(feature = "default"))]
|
||||
/// # fn main() {}
|
||||
/// ```
|
||||
///
|
||||
/// ## Provided Context Types
|
||||
@@ -1210,8 +1222,8 @@ impl StaticRouteGenerator {
|
||||
}
|
||||
}
|
||||
|
||||
static STATIC_HEADERS: Lazy<DashMap<String, ResponseOptions>> =
|
||||
Lazy::new(DashMap::new);
|
||||
static STATIC_HEADERS: LazyLock<DashMap<String, ResponseOptions>> =
|
||||
LazyLock::new(DashMap::new);
|
||||
|
||||
fn was_404(owner: &Owner) -> bool {
|
||||
let resp = owner.with(|| expect_context::<ResponseOptions>());
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Axum integrations for the Leptos web framework."
|
||||
version = "0.8.2"
|
||||
version = { workspace = true }
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
@@ -13,7 +13,7 @@ any_spawner = { workspace = true, features = ["tokio"] }
|
||||
hydration_context = { workspace = true }
|
||||
axum = { default-features = false, features = [
|
||||
"matched-path",
|
||||
] , workspace = true }
|
||||
], workspace = true }
|
||||
dashmap = { workspace = true, default-features = true }
|
||||
futures = { workspace = true, default-features = true }
|
||||
leptos = { workspace = true, features = ["nonce", "ssr"] }
|
||||
@@ -23,16 +23,18 @@ leptos_meta = { workspace = true, features = ["ssr", "nonce"] }
|
||||
leptos_router = { workspace = true, features = ["ssr"] }
|
||||
leptos_integration_utils = { workspace = true }
|
||||
tachys = { workspace = true }
|
||||
once_cell = { workspace = true, default-features = true }
|
||||
parking_lot = { workspace = true, default-features = true }
|
||||
tokio = { default-features = false , workspace = true }
|
||||
tower = { features = ["util"] , workspace = true, default-features = true }
|
||||
tokio = { default-features = false, workspace = true }
|
||||
tower = { features = ["util"], workspace = true, default-features = true }
|
||||
tower-http = { workspace = true, default-features = true }
|
||||
tracing = { optional = true , workspace = true, default-features = true }
|
||||
tracing = { optional = true, workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
axum = { workspace = true, default-features = true }
|
||||
tokio = { features = ["net", "rt-multi-thread"] , workspace = true, default-features = true }
|
||||
tokio = { features = [
|
||||
"net",
|
||||
"rt-multi-thread",
|
||||
], workspace = true, default-features = true }
|
||||
|
||||
[features]
|
||||
wasm = []
|
||||
|
||||
@@ -69,12 +69,12 @@ use leptos_router::{
|
||||
static_routes::RegenerationFn, ExpandOptionals, PathSegment, RouteList,
|
||||
RouteListing, SsrMode,
|
||||
};
|
||||
#[cfg(feature = "default")]
|
||||
use once_cell::sync::Lazy;
|
||||
use parking_lot::RwLock;
|
||||
use server_fn::{error::ServerFnErrorErr, redirect::REDIRECT_HEADER};
|
||||
#[cfg(feature = "default")]
|
||||
use std::path::Path;
|
||||
#[cfg(feature = "default")]
|
||||
use std::sync::LazyLock;
|
||||
use std::{collections::HashSet, fmt::Debug, io, pin::Pin, sync::Arc};
|
||||
#[cfg(feature = "default")]
|
||||
use tower::util::ServiceExt;
|
||||
@@ -1522,8 +1522,8 @@ impl StaticRouteGenerator {
|
||||
}
|
||||
|
||||
#[cfg(feature = "default")]
|
||||
static STATIC_HEADERS: Lazy<DashMap<String, ResponseOptions>> =
|
||||
Lazy::new(DashMap::new);
|
||||
static STATIC_HEADERS: LazyLock<DashMap<String, ResponseOptions>> =
|
||||
LazyLock::new(DashMap::new);
|
||||
|
||||
#[cfg(feature = "default")]
|
||||
fn was_404(owner: &Owner) -> bool {
|
||||
|
||||
@@ -90,6 +90,7 @@ impl<T: RenderHtml> RenderHtml for View<T> {
|
||||
type Owned = View<T::Owned>;
|
||||
|
||||
const MIN_LENGTH: usize = <T as RenderHtml>::MIN_LENGTH;
|
||||
const EXISTS: bool = <T as RenderHtml>::EXISTS;
|
||||
|
||||
async fn resolve(self) -> Self::AsyncOutput {
|
||||
self.inner.resolve().await
|
||||
@@ -107,9 +108,14 @@ impl<T: RenderHtml> RenderHtml for View<T> {
|
||||
mark_branches: bool,
|
||||
extra_attrs: Vec<AnyAttribute>,
|
||||
) {
|
||||
#[cfg(all(debug_assertions, feature = "nightly", rustc_nightly))]
|
||||
let vm = self.view_marker.to_owned();
|
||||
#[cfg(all(debug_assertions, feature = "nightly", rustc_nightly))]
|
||||
#[cfg(debug_assertions)]
|
||||
let vm = if option_env!("LEPTOS_WATCH").is_some() {
|
||||
self.view_marker.to_owned()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
if let Some(vm) = vm.as_ref() {
|
||||
buf.push_str(&format!("<!--hot-reload|{vm}|open-->"));
|
||||
}
|
||||
@@ -122,7 +128,7 @@ impl<T: RenderHtml> RenderHtml for View<T> {
|
||||
extra_attrs,
|
||||
);
|
||||
|
||||
#[cfg(all(debug_assertions, feature = "nightly", rustc_nightly))]
|
||||
#[cfg(debug_assertions)]
|
||||
if let Some(vm) = vm.as_ref() {
|
||||
buf.push_str(&format!("<!--hot-reload|{vm}|close-->"));
|
||||
}
|
||||
@@ -138,9 +144,14 @@ impl<T: RenderHtml> RenderHtml for View<T> {
|
||||
) where
|
||||
Self: Sized,
|
||||
{
|
||||
#[cfg(all(debug_assertions, feature = "nightly", rustc_nightly))]
|
||||
let vm = self.view_marker.to_owned();
|
||||
#[cfg(all(debug_assertions, feature = "nightly", rustc_nightly))]
|
||||
#[cfg(debug_assertions)]
|
||||
let vm = if option_env!("LEPTOS_WATCH").is_some() {
|
||||
self.view_marker.to_owned()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
if let Some(vm) = vm.as_ref() {
|
||||
buf.push_sync(&format!("<!--hot-reload|{vm}|open-->"));
|
||||
}
|
||||
@@ -153,7 +164,7 @@ impl<T: RenderHtml> RenderHtml for View<T> {
|
||||
extra_attrs,
|
||||
);
|
||||
|
||||
#[cfg(all(debug_assertions, feature = "nightly", rustc_nightly))]
|
||||
#[cfg(debug_assertions)]
|
||||
if let Some(vm) = vm.as_ref() {
|
||||
buf.push_sync(&format!("<!--hot-reload|{vm}|close-->"));
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::sync::Arc;
|
||||
use tachys::prelude::IntoAttributeValue;
|
||||
|
||||
/// Describes a value that is either a static or a reactive string, i.e.,
|
||||
/// a [`String`], a [`&str`], or a reactive `Fn() -> String`.
|
||||
/// a [`String`], a [`&str`], a `Signal` or a reactive `Fn() -> String`.
|
||||
#[derive(Clone)]
|
||||
pub struct TextProp(Arc<dyn Fn() -> Oco<'static, str> + Send + Sync>);
|
||||
|
||||
@@ -82,3 +82,93 @@ impl IntoAttributeValue for TextProp {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! textprop_reactive {
|
||||
($name:ident, <$($gen:ident),*>, $v:ty, $( $where_clause:tt )*) =>
|
||||
{
|
||||
#[allow(deprecated)]
|
||||
impl<$($gen),*> From<$name<$($gen),*>> for TextProp
|
||||
where
|
||||
$v: Into<Oco<'static, str>> + Clone + Send + Sync + 'static,
|
||||
$($where_clause)*
|
||||
{
|
||||
#[inline(always)]
|
||||
fn from(s: $name<$($gen),*>) -> Self {
|
||||
TextProp(Arc::new(move || s.get().into()))
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
mod stable {
|
||||
use super::TextProp;
|
||||
use oco_ref::Oco;
|
||||
#[allow(deprecated)]
|
||||
use reactive_graph::wrappers::read::MaybeSignal;
|
||||
use reactive_graph::{
|
||||
computed::{ArcMemo, Memo},
|
||||
owner::Storage,
|
||||
signal::{ArcReadSignal, ArcRwSignal, ReadSignal, RwSignal},
|
||||
traits::Get,
|
||||
wrappers::read::{ArcSignal, Signal},
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
textprop_reactive!(
|
||||
RwSignal,
|
||||
<V, S>,
|
||||
V,
|
||||
RwSignal<V, S>: Get<Value = V>,
|
||||
S: Storage<V> + Storage<Option<V>>,
|
||||
S: Send + Sync + 'static,
|
||||
);
|
||||
textprop_reactive!(
|
||||
ReadSignal,
|
||||
<V, S>,
|
||||
V,
|
||||
ReadSignal<V, S>: Get<Value = V>,
|
||||
S: Storage<V> + Storage<Option<V>>,
|
||||
S: Send + Sync + 'static,
|
||||
);
|
||||
textprop_reactive!(
|
||||
Memo,
|
||||
<V, S>,
|
||||
V,
|
||||
Memo<V, S>: Get<Value = V>,
|
||||
S: Storage<V> + Storage<Option<V>>,
|
||||
S: Send + Sync + 'static,
|
||||
);
|
||||
textprop_reactive!(
|
||||
Signal,
|
||||
<V, S>,
|
||||
V,
|
||||
Signal<V, S>: Get<Value = V>,
|
||||
S: Storage<V> + Storage<Option<V>>,
|
||||
S: Send + Sync + 'static,
|
||||
);
|
||||
textprop_reactive!(
|
||||
MaybeSignal,
|
||||
<V, S>,
|
||||
V,
|
||||
MaybeSignal<V, S>: Get<Value = V>,
|
||||
S: Storage<V> + Storage<Option<V>>,
|
||||
S: Send + Sync + 'static,
|
||||
);
|
||||
textprop_reactive!(ArcRwSignal, <V>, V, ArcRwSignal<V>: Get<Value = V>);
|
||||
textprop_reactive!(ArcReadSignal, <V>, V, ArcReadSignal<V>: Get<Value = V>);
|
||||
textprop_reactive!(ArcMemo, <V>, V, ArcMemo<V>: Get<Value = V>);
|
||||
textprop_reactive!(ArcSignal, <V>, V, ArcSignal<V>: Get<Value = V>);
|
||||
}
|
||||
|
||||
/// Extension trait for `Option<TextProp>`
|
||||
pub trait OptionTextPropExt {
|
||||
/// Accesses the current value of the `Option<TextProp>` as an `Option<Oco<'static, str>>`.
|
||||
fn get(&self) -> Option<Oco<'static, str>>;
|
||||
}
|
||||
|
||||
impl OptionTextPropExt for Option<TextProp> {
|
||||
fn get(&self) -> Option<Oco<'static, str>> {
|
||||
self.as_ref().map(|text_prop| text_prop.get())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,6 +23,19 @@ macro_rules! error {
|
||||
($($t:tt)*) => ($crate::logging::console_error(&format_args!($($t)*).to_string()))
|
||||
}
|
||||
|
||||
/// Uses `println!()`-style formatting to log something to the console (in the browser)
|
||||
/// or via `println!()` (if not in the browser), but only if it's a debug build.
|
||||
#[macro_export]
|
||||
macro_rules! debug_log {
|
||||
($($x:tt)*) => {
|
||||
{
|
||||
if cfg!(debug_assertions) {
|
||||
$crate::log!($($x)*)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Uses `println!()`-style formatting to log warnings to the console (in the browser)
|
||||
/// or via `eprintln!()` (if not in the browser), but only if it's a debug build.
|
||||
#[macro_export]
|
||||
@@ -36,6 +49,19 @@ macro_rules! debug_warn {
|
||||
}
|
||||
}
|
||||
|
||||
/// Uses `println!()`-style formatting to log errors to the console (in the browser)
|
||||
/// or via `eprintln!()` (if not in the browser), but only if it's a debug build.
|
||||
#[macro_export]
|
||||
macro_rules! debug_error {
|
||||
($($x:tt)*) => {
|
||||
{
|
||||
if cfg!(debug_assertions) {
|
||||
$crate::error!($($x)*)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const fn log_to_stdout() -> bool {
|
||||
cfg!(not(all(
|
||||
target_arch = "wasm32",
|
||||
@@ -55,7 +81,7 @@ pub fn console_log(s: &str) {
|
||||
}
|
||||
|
||||
/// Log a warning to the console (in the browser)
|
||||
/// or via `println!()` (if not in the browser).
|
||||
/// or via `eprintln!()` (if not in the browser).
|
||||
pub fn console_warn(s: &str) {
|
||||
if log_to_stdout() {
|
||||
eprintln!("{s}");
|
||||
@@ -65,7 +91,7 @@ pub fn console_warn(s: &str) {
|
||||
}
|
||||
|
||||
/// Log an error to the console (in the browser)
|
||||
/// or via `println!()` (if not in the browser).
|
||||
/// or via `eprintln!()` (if not in the browser).
|
||||
#[inline(always)]
|
||||
pub fn console_error(s: &str) {
|
||||
if log_to_stdout() {
|
||||
@@ -75,21 +101,29 @@ pub fn console_error(s: &str) {
|
||||
}
|
||||
}
|
||||
|
||||
/// Log an error to the console (in the browser)
|
||||
/// Log a string to the console (in the browser)
|
||||
/// or via `println!()` (if not in the browser), but only in a debug build.
|
||||
#[inline(always)]
|
||||
pub fn console_debug_warn(s: &str) {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
if log_to_stdout() {
|
||||
eprintln!("{s}");
|
||||
} else {
|
||||
web_sys::console::warn_1(&JsValue::from_str(s));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
let _ = s;
|
||||
pub fn console_debug_log(s: &str) {
|
||||
if cfg!(debug_assertions) {
|
||||
console_log(s)
|
||||
}
|
||||
}
|
||||
|
||||
/// Log a warning to the console (in the browser)
|
||||
/// or via `eprintln!()` (if not in the browser), but only in a debug build.
|
||||
#[inline(always)]
|
||||
pub fn console_debug_warn(s: &str) {
|
||||
if cfg!(debug_assertions) {
|
||||
console_warn(s)
|
||||
}
|
||||
}
|
||||
|
||||
/// Log an error to the console (in the browser)
|
||||
/// or via `eprintln!()` (if not in the browser), but only in a debug build.
|
||||
#[inline(always)]
|
||||
pub fn console_debug_error(s: &str) {
|
||||
if cfg!(debug_assertions) {
|
||||
console_error(s)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -251,93 +251,67 @@ impl LNode {
|
||||
action: PatchAction::ClearChildren,
|
||||
}]
|
||||
} else {
|
||||
let mut a = 0;
|
||||
let mut b = std::cmp::max(old.len(), new.len()) - 1; // min is 0, have checked both have items
|
||||
let width = old.len() + 1;
|
||||
let height = new.len() + 1;
|
||||
let mut mat = vec![0; width * height];
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 1..width {
|
||||
mat[i] = i;
|
||||
}
|
||||
for i in 1..height {
|
||||
mat[i * width] = i;
|
||||
}
|
||||
for j in 1..height {
|
||||
for i in 1..width {
|
||||
if old[i - 1] == new[j - 1] {
|
||||
mat[j * width + i] = mat[(j - 1) * width + (i - 1)];
|
||||
} else {
|
||||
mat[j * width + i] = (mat[(j - 1) * width + i] + 1)
|
||||
.min(mat[j * width + (i - 1)] + 1)
|
||||
.min(mat[(j - 1) * width + (i - 1)] + 1)
|
||||
}
|
||||
}
|
||||
}
|
||||
let (mut i, mut j) = (old.len(), new.len());
|
||||
let mut patches = vec![];
|
||||
// common prefix
|
||||
while a < b {
|
||||
let old = old.get(a);
|
||||
let new = new.get(a);
|
||||
|
||||
match (old, new) {
|
||||
(None, Some(new)) => patches.push(Patch {
|
||||
path: path.to_owned(),
|
||||
action: PatchAction::InsertChild {
|
||||
before: a,
|
||||
child: new.to_replacement_node(old_children),
|
||||
},
|
||||
}),
|
||||
(Some(_), None) => patches.push(Patch {
|
||||
path: path.to_owned(),
|
||||
action: PatchAction::RemoveChild { at: a },
|
||||
}),
|
||||
(Some(old), Some(new)) if old != new => {
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
a += 1;
|
||||
}
|
||||
|
||||
// common suffix
|
||||
while b >= a {
|
||||
let old = old.get(b);
|
||||
let new = new.get(b);
|
||||
|
||||
match (old, new) {
|
||||
(None, Some(new)) => patches.push(Patch {
|
||||
path: path.to_owned(),
|
||||
action: PatchAction::InsertChildAfter {
|
||||
after: b - 1,
|
||||
child: new.to_replacement_node(old_children),
|
||||
},
|
||||
}),
|
||||
(Some(_), None) => patches.push(Patch {
|
||||
path: path.to_owned(),
|
||||
action: PatchAction::RemoveChild { at: b },
|
||||
}),
|
||||
(Some(old), Some(new)) if old != new => {
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if b == 0 {
|
||||
break;
|
||||
}
|
||||
b -= 1;
|
||||
}
|
||||
|
||||
// diffing in middle
|
||||
if b >= a {
|
||||
let old_slice_end =
|
||||
if b >= old.len() { old.len() - 1 } else { b };
|
||||
let new_slice_end =
|
||||
if b >= new.len() { new.len() - 1 } else { b };
|
||||
let old = &old[a..=old_slice_end];
|
||||
let new = &new[a..=new_slice_end];
|
||||
|
||||
for (new_idx, new_node) in new.iter().enumerate() {
|
||||
match old.get(new_idx) {
|
||||
Some(old_node) => {
|
||||
let mut new_path = path.to_vec();
|
||||
new_path.push(new_idx + a);
|
||||
let diffs = old_node.diff_at(
|
||||
new_node,
|
||||
&new_path,
|
||||
old_children,
|
||||
);
|
||||
patches.extend(&mut diffs.into_iter());
|
||||
}
|
||||
None => patches.push(Patch {
|
||||
while i > 0 || j > 0 {
|
||||
if i > 0 && j > 0 && old[i - 1] == new[j - 1] {
|
||||
i -= 1;
|
||||
j -= 1;
|
||||
} else {
|
||||
let current = mat[j * width + i];
|
||||
if i > 0
|
||||
&& j > 0
|
||||
&& mat[(j - 1) * width + i - 1] + 1 == current
|
||||
{
|
||||
let mut new_path = path.to_owned();
|
||||
new_path.push(i - 1);
|
||||
let diffs = old[i - 1].diff_at(
|
||||
&new[j - 1],
|
||||
&new_path,
|
||||
old_children,
|
||||
);
|
||||
patches.extend(&mut diffs.into_iter());
|
||||
i -= 1;
|
||||
j -= 1;
|
||||
} else if i > 0 && mat[j * width + i - 1] + 1 == current {
|
||||
patches.push(Patch {
|
||||
path: path.to_owned(),
|
||||
action: PatchAction::RemoveChild { at: i - 1 },
|
||||
});
|
||||
i -= 1;
|
||||
} else if j > 0 && mat[(j - 1) * width + i] + 1 == current {
|
||||
patches.push(Patch {
|
||||
path: path.to_owned(),
|
||||
action: PatchAction::InsertChild {
|
||||
before: new_idx,
|
||||
child: new_node
|
||||
before: i,
|
||||
child: new[j - 1]
|
||||
.to_replacement_node(old_children),
|
||||
},
|
||||
}),
|
||||
});
|
||||
j -= 1;
|
||||
} else {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -514,23 +488,17 @@ mod tests {
|
||||
let delta = a.diff(&b);
|
||||
assert_eq!(
|
||||
delta,
|
||||
vec![
|
||||
Patch {
|
||||
path: vec![],
|
||||
action: PatchAction::InsertChildAfter {
|
||||
after: 0,
|
||||
child: ReplacementNode::Element {
|
||||
name: "button".into(),
|
||||
attrs: vec![],
|
||||
children: vec![ReplacementNode::Html("bar".into())]
|
||||
}
|
||||
vec![Patch {
|
||||
path: vec![],
|
||||
action: PatchAction::InsertChild {
|
||||
before: 0,
|
||||
child: ReplacementNode::Element {
|
||||
name: "button".into(),
|
||||
attrs: vec![],
|
||||
children: vec![ReplacementNode::Html("foo".into())]
|
||||
}
|
||||
},
|
||||
Patch {
|
||||
path: vec![0, 0],
|
||||
action: PatchAction::SetText("foo".into())
|
||||
}
|
||||
]
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -121,6 +121,10 @@ impl ViewMacros {
|
||||
}
|
||||
diffs
|
||||
} else {
|
||||
// TODO: instead of simply returning no patches, when number of views differs,
|
||||
// we can compare views content to determine which views were shifted
|
||||
// or come up with another idea that will allow to send patches when views were shifted/removed/added
|
||||
lock.insert(path.clone(), new_views);
|
||||
return Ok(None);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -66,6 +66,9 @@ impl LNode {
|
||||
LNode::parse_node(child, views)?;
|
||||
}
|
||||
}
|
||||
Node::RawText(text) => {
|
||||
views.push(LNode::Text(text.to_string_best()));
|
||||
}
|
||||
Node::Text(text) => {
|
||||
views.push(LNode::Text(text.value_string()));
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
console.log("[HOT RELOADING] Connected to server.\n\nNote: `cargo-leptos watch --hot-reload` only works with the `nightly` feature enabled on Leptos.");
|
||||
function patch(json) {
|
||||
try {
|
||||
const views = JSON.parse(json);
|
||||
for (const [id, patches] of views) {
|
||||
console.log("[HOT RELOAD]", id, patches);
|
||||
const walker = document.createTreeWalker(document.body, NodeFilter.SHOW_COMMENT),
|
||||
const walker = document.createTreeWalker(
|
||||
document.body,
|
||||
NodeFilter.SHOW_COMMENT,
|
||||
),
|
||||
open = `hot-reload|${id}|open`,
|
||||
close = `hot-reload|${id}|close`;
|
||||
let start, end;
|
||||
@@ -21,150 +23,200 @@ function patch(json) {
|
||||
}
|
||||
|
||||
for (const [start, end] of instances) {
|
||||
// build tree of current actual children
|
||||
const actualChildren = childrenFromRange(start.parentElement, start, end);
|
||||
const actions = [];
|
||||
|
||||
// build up the set of actions
|
||||
for (const patch of patches) {
|
||||
const actualChildren = childrenFromRange(
|
||||
start.parentElement,
|
||||
start,
|
||||
end,
|
||||
);
|
||||
const child = childAtPath(
|
||||
actualChildren.length > 1 ? { children: actualChildren } : actualChildren[0],
|
||||
patch.path
|
||||
actualChildren.length > 1
|
||||
? { children: actualChildren }
|
||||
: actualChildren[0],
|
||||
patch.path,
|
||||
);
|
||||
const action = patch.action;
|
||||
if (action == "ClearChildren") {
|
||||
actions.push(() => {
|
||||
console.log("[HOT RELOAD] > ClearChildren", child.node);
|
||||
console.log("[HOT RELOAD] > ClearChildren", child.node);
|
||||
if (child.node) {
|
||||
child.node.textContent = "";
|
||||
});
|
||||
} else if (action.ReplaceWith) {
|
||||
actions.push(() => {
|
||||
console.log("[HOT RELOAD] > ReplaceWith", child, action.ReplaceWith);
|
||||
const replacement = fromReplacementNode(action.ReplaceWith, actualChildren);
|
||||
if (child.node) {
|
||||
child.node.replaceWith(replacement);
|
||||
} else {
|
||||
const range = new Range();
|
||||
range.setStartAfter(child.start);
|
||||
range.setEndAfter(child.end);
|
||||
range.deleteContents();
|
||||
child.start.replaceWith(replacement);
|
||||
} else {
|
||||
for (const existingChild of child.children) {
|
||||
let parent = existingChild.node.parentElement;
|
||||
parent.removeChild(existingChild.node);
|
||||
}
|
||||
});
|
||||
}
|
||||
} else if (action.ReplaceWith) {
|
||||
console.log(
|
||||
"[HOT RELOAD] > ReplaceWith",
|
||||
child,
|
||||
action.ReplaceWith,
|
||||
);
|
||||
const replacement = fromReplacementNode(
|
||||
action.ReplaceWith,
|
||||
actualChildren,
|
||||
);
|
||||
if (child.node) {
|
||||
child.node.replaceWith(replacement);
|
||||
} else {
|
||||
if (child.children) {
|
||||
child.children[0].node.parentElement.insertBefore(
|
||||
replacement,
|
||||
child.children[0].node,
|
||||
);
|
||||
for (const existingChild of child.children) {
|
||||
existingChild.node.parentElement.removeChild(
|
||||
existingChild.node,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (action.ChangeTagName) {
|
||||
const oldNode = child.node;
|
||||
actions.push(() => {
|
||||
console.log("[HOT RELOAD] > ChangeTagName", child.node, action.ChangeTagName);
|
||||
const newElement = document.createElement(action.ChangeTagName);
|
||||
for (const attr of oldNode.attributes) {
|
||||
newElement.setAttribute(attr.name, attr.value);
|
||||
}
|
||||
for (const childNode of child.node.childNodes) {
|
||||
newElement.appendChild(childNode);
|
||||
}
|
||||
console.log(
|
||||
"[HOT RELOAD] > ChangeTagName",
|
||||
child.node,
|
||||
action.ChangeTagName,
|
||||
);
|
||||
const newElement = document.createElement(action.ChangeTagName);
|
||||
for (const attr of oldNode.attributes) {
|
||||
newElement.setAttribute(attr.name, attr.value);
|
||||
}
|
||||
for (const childNode of child.node.childNodes) {
|
||||
newElement.appendChild(childNode);
|
||||
}
|
||||
|
||||
child.node.replaceWith(newElement);
|
||||
});
|
||||
child.node.replaceWith(newElement);
|
||||
} else if (action.RemoveAttribute) {
|
||||
actions.push(() => {
|
||||
console.log("[HOT RELOAD] > RemoveAttribute", child.node, action.RemoveAttribute);
|
||||
child.node.removeAttribute(action.RemoveAttribute);
|
||||
});
|
||||
console.log(
|
||||
"[HOT RELOAD] > RemoveAttribute",
|
||||
child.node,
|
||||
action.RemoveAttribute,
|
||||
);
|
||||
child.node.removeAttribute(action.RemoveAttribute);
|
||||
} else if (action.SetAttribute) {
|
||||
const [name, value] = action.SetAttribute;
|
||||
actions.push(() => {
|
||||
console.log("[HOT RELOAD] > SetAttribute", child.node, action.SetAttribute);
|
||||
child.node.setAttribute(name, value);
|
||||
});
|
||||
console.log(
|
||||
"[HOT RELOAD] > SetAttribute",
|
||||
child.node,
|
||||
action.SetAttribute,
|
||||
);
|
||||
child.node.setAttribute(name, value);
|
||||
} else if (action.SetText) {
|
||||
const node = child.node;
|
||||
actions.push(() => {
|
||||
console.log("[HOT RELOAD] > SetText", child.node, action.SetText);
|
||||
node.textContent = action.SetText;
|
||||
});
|
||||
console.log("[HOT RELOAD] > SetText", child.node, action.SetText);
|
||||
node.textContent = action.SetText;
|
||||
} else if (action.AppendChildren) {
|
||||
actions.push(() => {
|
||||
console.log("[HOT RELOAD] > AppendChildren", child.node, action.AppendChildren);
|
||||
const newChildren = fromReplacementNode(action.AppendChildren, actualChildren);
|
||||
child.node.append(newChildren);
|
||||
});
|
||||
console.log(
|
||||
"[HOT RELOAD] > AppendChildren",
|
||||
child.node,
|
||||
action.AppendChildren,
|
||||
);
|
||||
const newChildren = action.AppendChildren.map((x) =>
|
||||
fromReplacementNode(x, actualChildren),
|
||||
);
|
||||
child.node.append(...newChildren);
|
||||
} else if (action.RemoveChild) {
|
||||
actions.push(() => {
|
||||
console.log("[HOT RELOAD] > RemoveChild", child.node, child.children, action.RemoveChild);
|
||||
const toRemove = child.children[action.RemoveChild.at];
|
||||
let toRemoveNode = toRemove.node;
|
||||
if (!toRemoveNode) {
|
||||
const range = new Range();
|
||||
range.setStartBefore(toRemove.start);
|
||||
range.setEndAfter(toRemove.end);
|
||||
toRemoveNode = range.deleteContents();
|
||||
} else {
|
||||
toRemoveNode.parentNode.removeChild(toRemoveNode);
|
||||
}
|
||||
});
|
||||
console.log(
|
||||
"[HOT RELOAD] > RemoveChild",
|
||||
child.node,
|
||||
child.children,
|
||||
action.RemoveChild,
|
||||
);
|
||||
const toRemove = child.children[action.RemoveChild.at];
|
||||
let toRemoveNode = toRemove.node;
|
||||
if (!toRemoveNode) {
|
||||
const range = new Range();
|
||||
range.setStartBefore(toRemove.start);
|
||||
range.setEndAfter(toRemove.end);
|
||||
toRemoveNode = range.deleteContents();
|
||||
} else {
|
||||
toRemoveNode.parentNode.removeChild(toRemoveNode);
|
||||
}
|
||||
} else if (action.InsertChild) {
|
||||
const newChild = fromReplacementNode(action.InsertChild.child, actualChildren);
|
||||
const newChild = fromReplacementNode(
|
||||
action.InsertChild.child,
|
||||
actualChildren,
|
||||
);
|
||||
let children = [];
|
||||
if (child.children) {
|
||||
children = child.children;
|
||||
} else if (child.start && child.end) {
|
||||
children = childrenFromRange(child.node || child.start.parentElement, start, end);
|
||||
children = childrenFromRange(
|
||||
child.node || child.start.parentElement,
|
||||
start,
|
||||
end,
|
||||
);
|
||||
} else {
|
||||
console.warn("InsertChildAfter could not build children.");
|
||||
}
|
||||
const before = children[action.InsertChild.before];
|
||||
actions.push(() => {
|
||||
console.log("[HOT RELOAD] > InsertChild", child, child.node, action.InsertChild, " before ", before);
|
||||
if (!before && child.node) {
|
||||
child.node.appendChild(newChild);
|
||||
} else {
|
||||
let node = child.node || child.end.parentElement;
|
||||
const reference = before ? before.node || before.start : child.end;
|
||||
node.insertBefore(newChild, reference);
|
||||
}
|
||||
});
|
||||
const beforeNode = children[action.InsertChild.before];
|
||||
console.log(
|
||||
"[HOT RELOAD] > InsertChild",
|
||||
child,
|
||||
child.node,
|
||||
action.InsertChild,
|
||||
" before ",
|
||||
beforeNode,
|
||||
);
|
||||
if (beforeNode) {
|
||||
let node = beforeNode.node || beforeNode.start.previousSibling;
|
||||
node.parentElement.insertBefore(newChild, node);
|
||||
} else if (child.node) {
|
||||
child.node.appendChild(newChild);
|
||||
} else if (children) {
|
||||
let lastNode = children[children.length - 1];
|
||||
let afterNode = lastNode.node || lastNode.end.nextSibling;
|
||||
afterNode.after(newChild);
|
||||
}
|
||||
} else if (action.InsertChildAfter) {
|
||||
const newChild = fromReplacementNode(action.InsertChildAfter.child, actualChildren);
|
||||
const newChild = fromReplacementNode(
|
||||
action.InsertChildAfter.child,
|
||||
actualChildren,
|
||||
);
|
||||
let children = [];
|
||||
if (child.children) {
|
||||
children = child.children;
|
||||
} else if (child.start && child.end) {
|
||||
children = childrenFromRange(child.node || child.start.parentElement, start, end);
|
||||
children = childrenFromRange(
|
||||
child.node || child.start.parentElement,
|
||||
start,
|
||||
end,
|
||||
);
|
||||
} else {
|
||||
console.warn("InsertChildAfter could not build children.");
|
||||
}
|
||||
const after = children[action.InsertChildAfter.after];
|
||||
actions.push(() => {
|
||||
console.log(
|
||||
"[HOT RELOAD] > InsertChildAfter",
|
||||
child,
|
||||
child.node,
|
||||
action.InsertChildAfter,
|
||||
" after ",
|
||||
after
|
||||
);
|
||||
if (child.node && (!after || !(after.node || after.start).nextSibling)) {
|
||||
child.node.appendChild(newChild);
|
||||
console.log(
|
||||
"[HOT RELOAD] > InsertChildAfter",
|
||||
child,
|
||||
child.node,
|
||||
action.InsertChildAfter,
|
||||
" after ",
|
||||
after,
|
||||
);
|
||||
if (
|
||||
child.node &&
|
||||
(!after || !(after.node || after.start).nextSibling)
|
||||
) {
|
||||
child.node.appendChild(newChild);
|
||||
} else {
|
||||
const node = child.node || child.end;
|
||||
const parent =
|
||||
node.nodeType === Node.COMMENT_NODE ? node.parentNode : node;
|
||||
if (!after) {
|
||||
parent.appendChild(newChild);
|
||||
} else {
|
||||
const node = child.node || child.end;
|
||||
const parent = node.nodeType === Node.COMMENT_NODE ? node.parentNode : node;
|
||||
if (!after) {
|
||||
parent.appendChild(newChild);
|
||||
} else {
|
||||
parent.insertBefore(newChild, (after.node || after.start).nextSibling);
|
||||
}
|
||||
parent.insertBefore(
|
||||
newChild,
|
||||
(after.node || after.start).nextSibling,
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
console.warn("[HOT RELOADING] Unmatched action", action);
|
||||
}
|
||||
}
|
||||
|
||||
// actually run the actions
|
||||
// the reason we delay them is so that children aren't moved before other children are found, etc.
|
||||
for (const action of actions) {
|
||||
action();
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
@@ -191,8 +243,10 @@ function patch(json) {
|
||||
return element;
|
||||
} else {
|
||||
const child = childAtPath(
|
||||
actualChildren.length > 1 ? { children: actualChildren } : actualChildren[0],
|
||||
node.Path
|
||||
actualChildren.length > 1
|
||||
? { children: actualChildren }
|
||||
: actualChildren[0],
|
||||
node.Path,
|
||||
);
|
||||
if (child) {
|
||||
let childNode = child.node;
|
||||
@@ -215,7 +269,10 @@ function patch(json) {
|
||||
}
|
||||
return childNode;
|
||||
} else {
|
||||
console.warn("[HOT RELOADING] Could not find replacement node at ", node.Path);
|
||||
console.warn(
|
||||
"[HOT RELOADING] Could not find replacement node at ",
|
||||
node.Path,
|
||||
);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
@@ -227,13 +284,16 @@ function patch(json) {
|
||||
NodeFilter.SHOW_ELEMENT | NodeFilter.SHOW_TEXT | NodeFilter.SHOW_COMMENT,
|
||||
{
|
||||
acceptNode(node) {
|
||||
if (node.parentNode == element && (!range || range.isPointInRange(node, 0))) {
|
||||
if (
|
||||
node.parentNode == element &&
|
||||
(!range || range.isPointInRange(node, 0))
|
||||
) {
|
||||
return NodeFilter.FILTER_ACCEPT;
|
||||
} else {
|
||||
return NodeFilter.FILTER_REJECT;
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
);
|
||||
const actualChildren = [],
|
||||
elementCount = {};
|
||||
@@ -259,18 +319,22 @@ function patch(json) {
|
||||
node: walker.currentNode,
|
||||
});
|
||||
} else if (walker.currentNode.nodeType == Node.COMMENT_NODE) {
|
||||
if (walker.currentNode.textContent.trim().startsWith("hot-reload")) {
|
||||
if (walker.currentNode.textContent.trim().endsWith("-children|open")) {
|
||||
if (walker.currentNode.textContent.trim().startsWith("hot-reload|")) {
|
||||
if (walker.currentNode.textContent.trim().endsWith("|open")) {
|
||||
const startingName = walker.currentNode.textContent.trim();
|
||||
const componentName = startingName.replace("-children|open").replace("hot-reload|");
|
||||
const endingName = `hot-reload|${componentName}-children|close`;
|
||||
const componentName = startingName
|
||||
.replace("|open", "")
|
||||
.replace("hot-reload|", "");
|
||||
const endingName = `hot-reload|${componentName}|close`;
|
||||
let start = walker.currentNode;
|
||||
let depth = 1;
|
||||
|
||||
while (walker.nextNode()) {
|
||||
if (walker.currentNode.textContent.trim() == endingName) {
|
||||
depth--;
|
||||
} else if (walker.currentNode.textContent.trim() == startingName) {
|
||||
} else if (
|
||||
walker.currentNode.textContent.trim() == startingName
|
||||
) {
|
||||
depth++;
|
||||
}
|
||||
|
||||
@@ -283,7 +347,11 @@ function patch(json) {
|
||||
type: "fragment",
|
||||
start: start.nextSibling,
|
||||
end: end.previousSibling,
|
||||
children: childrenFromRange(start.parentElement, start.nextSibling, end.previousSibling),
|
||||
children: childrenFromRange(
|
||||
start.parentElement,
|
||||
start.nextSibling,
|
||||
end.previousSibling,
|
||||
),
|
||||
});
|
||||
}
|
||||
} else if (walker.currentNode.textContent.trim() == "<() />") {
|
||||
@@ -358,7 +426,10 @@ function patch(json) {
|
||||
});
|
||||
}
|
||||
} else {
|
||||
console.warn("[HOT RELOADING] Building children, encountered", walker.currentNode);
|
||||
console.warn(
|
||||
"[HOT RELOADING] Building children, encountered",
|
||||
walker.currentNode,
|
||||
);
|
||||
}
|
||||
}
|
||||
return actualChildren;
|
||||
@@ -374,7 +445,11 @@ function patch(json) {
|
||||
} else if (path == [0]) {
|
||||
return element;
|
||||
} else if (element.start && element.end) {
|
||||
const actualChildren = childrenFromRange(element.node || element.start.parentElement, element.start, element.end);
|
||||
const actualChildren = childrenFromRange(
|
||||
element.node || element.start.parentElement,
|
||||
element.start,
|
||||
element.end,
|
||||
);
|
||||
return childAtPath({ children: actualChildren }, path);
|
||||
} else {
|
||||
console.warn("[HOT RELOADING] Child at ", path, "not found in ", element);
|
||||
|
||||
@@ -358,16 +358,14 @@ fn view_macro_impl(tokens: TokenStream, template: bool) -> TokenStream {
|
||||
}
|
||||
|
||||
fn normalized_call_site(site: proc_macro::Span) -> Option<String> {
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(all(debug_assertions, feature = "nightly", rustc_nightly))] {
|
||||
Some(leptos_hot_reload::span_to_stable_id(
|
||||
site.file(),
|
||||
site.start().line()
|
||||
))
|
||||
} else {
|
||||
_ = site;
|
||||
None
|
||||
}
|
||||
if cfg!(debug_assertions) {
|
||||
Some(leptos_hot_reload::span_to_stable_id(
|
||||
site.file(),
|
||||
site.start().line(),
|
||||
))
|
||||
} else {
|
||||
_ = site;
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -44,6 +44,8 @@ pub fn render_view(
|
||||
view_marker: Option<String>,
|
||||
disable_inert_html: bool,
|
||||
) -> Option<TokenStream> {
|
||||
let disable_inert_html = disable_inert_html || global_class.is_some();
|
||||
|
||||
let (base, should_add_view) = match nodes.len() {
|
||||
0 => {
|
||||
let span = Span::call_site();
|
||||
@@ -112,9 +114,9 @@ fn is_inert_element(orig_node: &Node<impl CustomNode>) -> bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
// also doesn't work if the top-level element is an SVG/MathML element
|
||||
// also doesn't work if the top-level element is a MathML element
|
||||
let el_name = el.name().to_string();
|
||||
if is_svg_element(&el_name) || is_math_ml_element(&el_name) {
|
||||
if is_math_ml_element(&el_name) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -300,7 +302,7 @@ fn inert_element_to_tokens(
|
||||
node: &Node<impl CustomNode>,
|
||||
escape_text: bool,
|
||||
global_class: Option<&TokenTree>,
|
||||
) -> Option<TokenStream> {
|
||||
) -> TokenStream {
|
||||
let mut html = InertElementBuilder::new(global_class);
|
||||
let mut nodes = VecDeque::from([Item::Node(node, escape_text)]);
|
||||
|
||||
@@ -396,9 +398,117 @@ fn inert_element_to_tokens(
|
||||
|
||||
html.finish();
|
||||
|
||||
Some(quote! {
|
||||
quote! {
|
||||
::leptos::tachys::html::InertElement::new(#html)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// # Note
|
||||
/// Should not be used on top level `<svg>` elements.
|
||||
/// Use [`inert_element_to_tokens`] instead.
|
||||
fn inert_svg_element_to_tokens(
|
||||
node: &Node<impl CustomNode>,
|
||||
escape_text: bool,
|
||||
global_class: Option<&TokenTree>,
|
||||
) -> TokenStream {
|
||||
let mut html = InertElementBuilder::new(global_class);
|
||||
let mut nodes = VecDeque::from([Item::Node(node, escape_text)]);
|
||||
|
||||
while let Some(current) = nodes.pop_front() {
|
||||
match current {
|
||||
Item::ClosingTag(tag) => {
|
||||
// closing tag
|
||||
html.push_str("</");
|
||||
html.push_str(&tag);
|
||||
html.push('>');
|
||||
}
|
||||
Item::Node(current, escape) => {
|
||||
match current {
|
||||
Node::RawText(raw) => {
|
||||
let text = raw.to_string_best();
|
||||
let text = if escape {
|
||||
html_escape::encode_text(&text)
|
||||
} else {
|
||||
text.into()
|
||||
};
|
||||
html.push_str(&text);
|
||||
}
|
||||
Node::Text(text) => {
|
||||
let text = text.value_string();
|
||||
let text = if escape {
|
||||
html_escape::encode_text(&text)
|
||||
} else {
|
||||
text.into()
|
||||
};
|
||||
html.push_str(&text);
|
||||
}
|
||||
Node::Element(node) => {
|
||||
let self_closing = is_self_closing(node);
|
||||
let el_name = node.name().to_string();
|
||||
let escape = el_name != "script"
|
||||
&& el_name != "style"
|
||||
&& el_name != "textarea";
|
||||
|
||||
// opening tag
|
||||
html.push('<');
|
||||
html.push_str(&el_name);
|
||||
|
||||
for attr in node.attributes() {
|
||||
if let NodeAttribute::Attribute(attr) = attr {
|
||||
let attr_name = attr.key.to_string();
|
||||
// trim r# from raw identifiers like r#as
|
||||
let attr_name =
|
||||
attr_name.trim_start_matches("r#");
|
||||
if attr_name != "class" {
|
||||
html.push(' ');
|
||||
html.push_str(attr_name);
|
||||
}
|
||||
|
||||
if let Some(value) =
|
||||
attr.possible_value.to_value()
|
||||
{
|
||||
if let KVAttributeValue::Expr(Expr::Lit(
|
||||
lit,
|
||||
)) = &value.value
|
||||
{
|
||||
if let Lit::Str(txt) = &lit.lit {
|
||||
let value = txt.value();
|
||||
let value = html_escape::encode_double_quoted_attribute(&value);
|
||||
if attr_name == "class" {
|
||||
html.push_class(&value);
|
||||
} else {
|
||||
html.push_str("=\"");
|
||||
html.push_str(&value);
|
||||
html.push('"');
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
html.push('>');
|
||||
|
||||
// render all children
|
||||
if !self_closing {
|
||||
nodes.push_front(Item::ClosingTag(el_name));
|
||||
let children = node.children.iter().rev();
|
||||
for child in children {
|
||||
nodes.push_front(Item::Node(child, escape));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
html.finish();
|
||||
|
||||
quote! {
|
||||
::leptos::tachys::svg::InertElement::new(#html)
|
||||
}
|
||||
}
|
||||
|
||||
fn element_children_to_tokens(
|
||||
@@ -597,7 +707,17 @@ fn node_to_tokens(
|
||||
let escape = el_name != "script"
|
||||
&& el_name != "style"
|
||||
&& el_name != "textarea";
|
||||
inert_element_to_tokens(node, escape, global_class)
|
||||
|
||||
let el_name = el_node.name().to_string();
|
||||
if is_svg_element(&el_name) && el_name != "svg" {
|
||||
Some(inert_svg_element_to_tokens(
|
||||
node,
|
||||
escape,
|
||||
global_class,
|
||||
))
|
||||
} else {
|
||||
Some(inert_element_to_tokens(node, escape, global_class))
|
||||
}
|
||||
} else {
|
||||
element_to_tokens(
|
||||
el_node,
|
||||
|
||||
@@ -20,7 +20,7 @@ use reactive_graph::{
|
||||
};
|
||||
use std::{
|
||||
future::{pending, Future, IntoFuture},
|
||||
ops::DerefMut,
|
||||
ops::{Deref, DerefMut},
|
||||
panic::Location,
|
||||
};
|
||||
|
||||
@@ -43,6 +43,14 @@ impl<T> Clone for ArcLocalResource<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for ArcLocalResource<T> {
|
||||
type Target = ArcAsyncDerived<T>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ArcLocalResource<T> {
|
||||
/// Creates the resource.
|
||||
///
|
||||
@@ -269,6 +277,14 @@ pub struct LocalResource<T> {
|
||||
defined_at: &'static Location<'static>,
|
||||
}
|
||||
|
||||
impl<T> Deref for LocalResource<T> {
|
||||
type Target = AsyncDerived<T>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Clone for LocalResource<T> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_meta"
|
||||
version = "0.8.2"
|
||||
version = "0.8.4"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -10,12 +10,11 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
leptos = { workspace = true }
|
||||
once_cell = { workspace = true, default-features = true }
|
||||
or_poisoned = { workspace = true }
|
||||
indexmap = { workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
tracing = { optional = true , workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true , default-features = true }
|
||||
tracing = { optional = true, workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true, default-features = true }
|
||||
futures = { workspace = true, default-features = true }
|
||||
|
||||
[dependencies.web-sys]
|
||||
|
||||
@@ -63,13 +63,12 @@ use leptos::{
|
||||
},
|
||||
IntoView,
|
||||
};
|
||||
use once_cell::sync::Lazy;
|
||||
use send_wrapper::SendWrapper;
|
||||
use std::{
|
||||
fmt::Debug,
|
||||
sync::{
|
||||
mpsc::{channel, Receiver, Sender},
|
||||
Arc,
|
||||
Arc, LazyLock,
|
||||
},
|
||||
};
|
||||
use wasm_bindgen::JsCast;
|
||||
@@ -101,7 +100,7 @@ pub struct MetaContext {
|
||||
/// Metadata associated with the `<title>` element.
|
||||
pub(crate) title: TitleContext,
|
||||
/// The hydration cursor for the location in the `<head>` for arbitrary tags will be rendered.
|
||||
pub(crate) cursor: Arc<Lazy<SendWrapper<Cursor>>>,
|
||||
pub(crate) cursor: Arc<LazyLock<SendWrapper<Cursor>>>,
|
||||
}
|
||||
|
||||
impl MetaContext {
|
||||
@@ -143,7 +142,7 @@ impl Default for MetaContext {
|
||||
))
|
||||
};
|
||||
|
||||
let cursor = Arc::new(Lazy::new(build_cursor));
|
||||
let cursor = Arc::new(LazyLock::new(build_cursor));
|
||||
Self {
|
||||
title: Default::default(),
|
||||
cursor,
|
||||
@@ -414,6 +413,7 @@ where
|
||||
type Owned = RegisteredMetaTag<E, At::CloneableOwned, Ch::Owned>;
|
||||
|
||||
const MIN_LENGTH: usize = 0;
|
||||
const EXISTS: bool = false;
|
||||
|
||||
fn dry_resolve(&mut self) {
|
||||
self.el.dry_resolve()
|
||||
|
||||
@@ -322,6 +322,7 @@ impl RenderHtml for TitleView {
|
||||
type Owned = Self;
|
||||
|
||||
const MIN_LENGTH: usize = 0;
|
||||
const EXISTS: bool = false;
|
||||
|
||||
fn dry_resolve(&mut self) {}
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@ fake = "2.9"
|
||||
tokio-tungstenite = "0.23.1"
|
||||
futures-util = "0.3.30"
|
||||
uuid = { version = "1.10", features = ["serde"] }
|
||||
once_cell = "1.19"
|
||||
futures = "0.3.30"
|
||||
|
||||
[[test]]
|
||||
@@ -33,6 +32,5 @@ harness = false # Allow Cucumber to print output instead of libtest
|
||||
ssr = []
|
||||
|
||||
[dependencies]
|
||||
once_cell = "1.19.0"
|
||||
regex = "1.10.6"
|
||||
serde.workspace = true
|
||||
|
||||
@@ -18,14 +18,14 @@ use chromiumoxide::{
|
||||
use cucumber::World;
|
||||
use futures::channel::mpsc::Sender;
|
||||
use futures_util::stream::StreamExt;
|
||||
use once_cell::sync::Lazy;
|
||||
use std::sync::LazyLock;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{collections::HashMap, sync::Arc, time::Duration};
|
||||
use tokio::sync::RwLock;
|
||||
use tokio_tungstenite::connect_async;
|
||||
use uuid::Uuid;
|
||||
static EMAIL_ID_MAP: Lazy<RwLock<HashMap<String, String>>> =
|
||||
Lazy::new(|| RwLock::new(HashMap::new()));
|
||||
static EMAIL_ID_MAP: LazyLock<RwLock<HashMap<String, String>>> =
|
||||
LazyLock::new(|| RwLock::new(HashMap::new()));
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct RequestPair {
|
||||
@@ -93,7 +93,7 @@ impl RequestPair {
|
||||
async fn main() -> Result<()> {
|
||||
// create a thread and store a
|
||||
// tokio-tungstenite client that connectsto http://127.0.0.1:1080/ws
|
||||
// and then stores the recieved messages in a once_cell::Lazy<RwLock<Vec<MailCrabMsg>>>
|
||||
// and then stores the recieved messages in a std::sync::LazyLock<RwLock<Vec<MailCrabMsg>>>
|
||||
// or a custom struct that matches the body or has specific impls for verify codes, links etc.
|
||||
let _ = tokio::spawn(async move {
|
||||
let (mut socket, _) = connect_async(
|
||||
@@ -152,7 +152,7 @@ async fn main() -> Result<()> {
|
||||
|
||||
tokio::task::spawn(async move {
|
||||
while let Some(event) = log_events.next().await {
|
||||
if let Some(EventEntryAdded { entry }) =
|
||||
if let Some(EventEntryAdded { entry }) =
|
||||
Arc::<EventEntryAdded>::into_inner(event) {
|
||||
console_logs.write().await.push(format!(" {entry:#?} "));
|
||||
} else {
|
||||
@@ -171,7 +171,7 @@ async fn main() -> Result<()> {
|
||||
} else {
|
||||
tracing::error!("tried to into inner but none")
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
@@ -208,7 +208,7 @@ async fn main() -> Result<()> {
|
||||
thing.cookies_before_request = cookies;
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
CookieEnum::AfterResp(req_id) => {
|
||||
let cookies = page
|
||||
@@ -293,8 +293,8 @@ async fn main() -> Result<()> {
|
||||
} else {
|
||||
tracing::error!(" uhh err here")
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
});
|
||||
// We don't need to join on our join handles, they will run detached and clean up whenever.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_graph"
|
||||
version = "0.2.2"
|
||||
version = "0.2.4"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -16,19 +16,26 @@ futures = { workspace = true, default-features = true }
|
||||
hydration_context = { workspace = true, optional = true }
|
||||
pin-project-lite = { workspace = true, default-features = true }
|
||||
rustc-hash = { workspace = true, default-features = true }
|
||||
serde = { features = ["derive"], optional = true , workspace = true, default-features = true }
|
||||
serde = { features = [
|
||||
"derive",
|
||||
], optional = true, workspace = true, default-features = true }
|
||||
slotmap = { workspace = true, default-features = true }
|
||||
thiserror = { workspace = true , default-features = true }
|
||||
tracing = { optional = true , workspace = true, default-features = true }
|
||||
thiserror = { workspace = true, default-features = true }
|
||||
tracing = { optional = true, workspace = true, default-features = true }
|
||||
guardian = { workspace = true, default-features = true }
|
||||
async-lock = { workspace = true, default-features = true }
|
||||
send_wrapper = { features = ["futures"] , workspace = true, default-features = true }
|
||||
send_wrapper = { features = [
|
||||
"futures",
|
||||
], workspace = true, default-features = true }
|
||||
|
||||
[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dependencies]
|
||||
web-sys = { version = "0.3.77", features = ["console"] }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { features = ["rt-multi-thread", "macros"] , workspace = true, default-features = true }
|
||||
tokio = { features = [
|
||||
"rt-multi-thread",
|
||||
"macros",
|
||||
], workspace = true, default-features = true }
|
||||
tokio-test = { workspace = true, default-features = true }
|
||||
any_spawner = { workspace = true, features = ["futures-executor", "tokio"] }
|
||||
|
||||
|
||||
@@ -167,7 +167,6 @@ impl Owner {
|
||||
.map(|parent| parent.read().or_poisoned().arena.clone())
|
||||
.unwrap_or_default(),
|
||||
paused: false,
|
||||
joined_owners: Vec::new(),
|
||||
})),
|
||||
#[cfg(feature = "hydration")]
|
||||
shared_context,
|
||||
@@ -202,7 +201,6 @@ impl Owner {
|
||||
#[cfg(feature = "sandboxed-arenas")]
|
||||
arena: Default::default(),
|
||||
paused: false,
|
||||
joined_owners: Vec::new(),
|
||||
})),
|
||||
#[cfg(feature = "hydration")]
|
||||
shared_context,
|
||||
@@ -228,7 +226,6 @@ impl Owner {
|
||||
#[cfg(feature = "sandboxed-arenas")]
|
||||
arena,
|
||||
paused,
|
||||
joined_owners: Vec::new(),
|
||||
})),
|
||||
#[cfg(feature = "hydration")]
|
||||
shared_context: self.shared_context.clone(),
|
||||
@@ -464,7 +461,6 @@ pub(crate) struct OwnerInner {
|
||||
#[cfg(feature = "sandboxed-arenas")]
|
||||
arena: Arc<RwLock<ArenaMap>>,
|
||||
paused: bool,
|
||||
joined_owners: Vec<WeakOwner>,
|
||||
}
|
||||
|
||||
impl Debug for OwnerInner {
|
||||
|
||||
@@ -6,15 +6,6 @@ use std::{
|
||||
};
|
||||
|
||||
impl Owner {
|
||||
#[doc(hidden)]
|
||||
pub fn join_contexts(&self, other: &Owner) {
|
||||
self.inner
|
||||
.write()
|
||||
.or_poisoned()
|
||||
.joined_owners
|
||||
.push(other.downgrade());
|
||||
}
|
||||
|
||||
fn provide_context<T: Send + Sync + 'static>(&self, value: T) {
|
||||
self.inner
|
||||
.write()
|
||||
@@ -34,27 +25,18 @@ impl Owner {
|
||||
if let Some(context) = contexts.remove(&ty) {
|
||||
context.downcast::<T>().ok().map(|n| *n)
|
||||
} else {
|
||||
let parent = inner.parent.as_ref().and_then(|p| p.upgrade());
|
||||
let joined = inner
|
||||
.joined_owners
|
||||
.iter()
|
||||
.flat_map(|owner| owner.upgrade().map(|owner| owner.inner));
|
||||
for parent in parent.into_iter().chain(joined) {
|
||||
let mut parent = Some(parent);
|
||||
while let Some(ref this_parent) = parent.clone() {
|
||||
let mut this_parent = this_parent.write().or_poisoned();
|
||||
let contexts = &mut this_parent.contexts;
|
||||
let value = contexts.remove(&ty);
|
||||
let downcast =
|
||||
value.and_then(|context| context.downcast::<T>().ok());
|
||||
if let Some(value) = downcast {
|
||||
return Some(*value);
|
||||
} else {
|
||||
parent = this_parent
|
||||
.parent
|
||||
.as_ref()
|
||||
.and_then(|p| p.upgrade());
|
||||
}
|
||||
let mut parent = inner.parent.as_ref().and_then(|p| p.upgrade());
|
||||
while let Some(ref this_parent) = parent.clone() {
|
||||
let mut this_parent = this_parent.write().or_poisoned();
|
||||
let contexts = &mut this_parent.contexts;
|
||||
let value = contexts.remove(&ty);
|
||||
let downcast =
|
||||
value.and_then(|context| context.downcast::<T>().ok());
|
||||
if let Some(value) = downcast {
|
||||
return Some(*value);
|
||||
} else {
|
||||
parent =
|
||||
this_parent.parent.as_ref().and_then(|p| p.upgrade());
|
||||
}
|
||||
}
|
||||
None
|
||||
@@ -71,29 +53,21 @@ impl Owner {
|
||||
let reference = if let Some(context) = contexts.get(&ty) {
|
||||
context.downcast_ref::<T>()
|
||||
} else {
|
||||
let parent = inner.parent.as_ref().and_then(|p| p.upgrade());
|
||||
let joined = inner
|
||||
.joined_owners
|
||||
.iter()
|
||||
.flat_map(|owner| owner.upgrade().map(|owner| owner.inner));
|
||||
for parent in parent.into_iter().chain(joined) {
|
||||
let mut parent = Some(parent);
|
||||
while let Some(ref this_parent) = parent.clone() {
|
||||
let this_parent = this_parent.read().or_poisoned();
|
||||
let contexts = &this_parent.contexts;
|
||||
let value = contexts.get(&ty);
|
||||
let downcast =
|
||||
value.and_then(|context| context.downcast_ref::<T>());
|
||||
if let Some(value) = downcast {
|
||||
return Some(cb(value));
|
||||
} else {
|
||||
parent = this_parent
|
||||
.parent
|
||||
.as_ref()
|
||||
.and_then(|p| p.upgrade());
|
||||
}
|
||||
let mut parent = inner.parent.as_ref().and_then(|p| p.upgrade());
|
||||
while let Some(ref this_parent) = parent.clone() {
|
||||
let this_parent = this_parent.read().or_poisoned();
|
||||
let contexts = &this_parent.contexts;
|
||||
let value = contexts.get(&ty);
|
||||
let downcast =
|
||||
value.and_then(|context| context.downcast_ref::<T>());
|
||||
if let Some(value) = downcast {
|
||||
return Some(cb(value));
|
||||
} else {
|
||||
parent =
|
||||
this_parent.parent.as_ref().and_then(|p| p.upgrade());
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
};
|
||||
reference.map(cb)
|
||||
@@ -109,27 +83,18 @@ impl Owner {
|
||||
let reference = if let Some(context) = contexts.get_mut(&ty) {
|
||||
context.downcast_mut::<T>()
|
||||
} else {
|
||||
let parent = inner.parent.as_ref().and_then(|p| p.upgrade());
|
||||
let joined = inner
|
||||
.joined_owners
|
||||
.iter()
|
||||
.flat_map(|owner| owner.upgrade().map(|owner| owner.inner));
|
||||
for parent in parent.into_iter().chain(joined) {
|
||||
let mut parent = Some(parent);
|
||||
while let Some(ref this_parent) = parent.clone() {
|
||||
let mut this_parent = this_parent.write().or_poisoned();
|
||||
let contexts = &mut this_parent.contexts;
|
||||
let value = contexts.get_mut(&ty);
|
||||
let downcast =
|
||||
value.and_then(|context| context.downcast_mut::<T>());
|
||||
if let Some(value) = downcast {
|
||||
return Some(cb(value));
|
||||
} else {
|
||||
parent = this_parent
|
||||
.parent
|
||||
.as_ref()
|
||||
.and_then(|p| p.upgrade());
|
||||
}
|
||||
let mut parent = inner.parent.as_ref().and_then(|p| p.upgrade());
|
||||
while let Some(ref this_parent) = parent.clone() {
|
||||
let mut this_parent = this_parent.write().or_poisoned();
|
||||
let contexts = &mut this_parent.contexts;
|
||||
let value = contexts.get_mut(&ty);
|
||||
let downcast =
|
||||
value.and_then(|context| context.downcast_mut::<T>());
|
||||
if let Some(value) = downcast {
|
||||
return Some(cb(value));
|
||||
} else {
|
||||
parent =
|
||||
this_parent.parent.as_ref().and_then(|p| p.upgrade());
|
||||
}
|
||||
}
|
||||
None
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_stores"
|
||||
version = "0.2.2"
|
||||
version = "0.2.4"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -11,7 +11,7 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
guardian = { workspace = true, default-features = true }
|
||||
itertools = { workspace = true , default-features = true }
|
||||
itertools = { workspace = true, default-features = true }
|
||||
or_poisoned = { workspace = true }
|
||||
paste = { workspace = true, default-features = true }
|
||||
reactive_graph = { workspace = true }
|
||||
@@ -21,7 +21,10 @@ dashmap = { workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { features = ["rt-multi-thread", "macros"] , workspace = true, default-features = true }
|
||||
tokio = { features = [
|
||||
"rt-multi-thread",
|
||||
"macros",
|
||||
], workspace = true, default-features = true }
|
||||
tokio-test = { workspace = true, default-features = true }
|
||||
any_spawner = { workspace = true, features = ["futures-executor", "tokio"] }
|
||||
reactive_graph = { workspace = true, features = ["effects"] }
|
||||
|
||||
@@ -1105,11 +1105,6 @@ mod tests {
|
||||
assert_eq!(combined_count.load(Ordering::Relaxed), 3);
|
||||
}
|
||||
|
||||
#[derive(Debug, Store)]
|
||||
pub struct StructWithOption {
|
||||
opt_field: Option<Todo>,
|
||||
}
|
||||
|
||||
// regression test for https://github.com/leptos-rs/leptos/issues/3523
|
||||
#[tokio::test]
|
||||
async fn notifying_all_descendants() {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_stores_macro"
|
||||
version = "0.2.2"
|
||||
version = "0.2.4"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -13,8 +13,8 @@ edition.workspace = true
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
convert_case = { workspace = true , default-features = true }
|
||||
convert_case = { workspace = true, default-features = true }
|
||||
proc-macro-error2 = { workspace = true, default-features = true }
|
||||
proc-macro2 = { workspace = true, default-features = true }
|
||||
quote = { workspace = true, default-features = true }
|
||||
syn = { features = ["full"] , workspace = true, default-features = true }
|
||||
syn = { features = ["full"], workspace = true, default-features = true }
|
||||
|
||||
@@ -111,10 +111,8 @@ impl ToTokens for Model {
|
||||
} = &self;
|
||||
let any_store_field = Ident::new("AnyStoreField", Span::call_site());
|
||||
let trait_name = Ident::new(&format!("{name}StoreFields"), name.span());
|
||||
let generics_with_orig = {
|
||||
let params = &generics.params;
|
||||
quote! { <#any_store_field, #params> }
|
||||
};
|
||||
let params = &generics.params;
|
||||
let generics_with_orig = quote! { <#any_store_field, #params> };
|
||||
let where_with_orig = {
|
||||
generics
|
||||
.where_clause
|
||||
@@ -140,13 +138,13 @@ impl ToTokens for Model {
|
||||
|
||||
// read access
|
||||
tokens.extend(quote! {
|
||||
#vis trait #trait_name <AnyStoreField>
|
||||
#vis trait #trait_name <AnyStoreField, #params>
|
||||
#where_with_orig
|
||||
{
|
||||
#(#trait_fields)*
|
||||
}
|
||||
|
||||
impl #generics_with_orig #trait_name <AnyStoreField> for AnyStoreField
|
||||
impl #generics_with_orig #trait_name <AnyStoreField, #params> for AnyStoreField
|
||||
#where_with_orig
|
||||
{
|
||||
#(#read_fields)*
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router"
|
||||
version = "0.8.2"
|
||||
version = "0.8.4"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -20,12 +20,11 @@ tachys = { workspace = true, features = ["reactive_graph"] }
|
||||
futures = { workspace = true, default-features = true }
|
||||
url = { workspace = true, default-features = true }
|
||||
js-sys = { workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true , default-features = true }
|
||||
tracing = { optional = true , workspace = true, default-features = true }
|
||||
once_cell = { workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true, default-features = true }
|
||||
tracing = { optional = true, workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
thiserror = { workspace = true , default-features = true }
|
||||
percent-encoding = { optional = true , workspace = true, default-features = true }
|
||||
thiserror = { workspace = true, default-features = true }
|
||||
percent-encoding = { optional = true, workspace = true, default-features = true }
|
||||
gloo-net = { workspace = true, default-features = true }
|
||||
|
||||
[dependencies.web-sys]
|
||||
|
||||
@@ -144,16 +144,12 @@ impl RouterContext {
|
||||
resolve_path("", path, None)
|
||||
};
|
||||
|
||||
let mut url = match resolved_to.map(|to| BrowserUrl::parse(&to)) {
|
||||
Some(Ok(url)) => url,
|
||||
Some(Err(e)) => {
|
||||
let mut url = match BrowserUrl::parse(&resolved_to) {
|
||||
Ok(url) => url,
|
||||
Err(e) => {
|
||||
leptos::logging::error!("Error parsing URL: {e:?}");
|
||||
return;
|
||||
}
|
||||
None => {
|
||||
leptos::logging::error!("Error resolving relative URL.");
|
||||
return;
|
||||
}
|
||||
};
|
||||
let query_mutations =
|
||||
mem::take(&mut *self.query_mutations.write_value());
|
||||
@@ -203,7 +199,7 @@ impl RouterContext {
|
||||
&'a self,
|
||||
path: &'a str,
|
||||
from: Option<&'a str>,
|
||||
) -> Option<Cow<'a, str>> {
|
||||
) -> Cow<'a, str> {
|
||||
let base = self.base.as_deref().unwrap_or_default();
|
||||
resolve_path(base, path, from)
|
||||
}
|
||||
@@ -580,18 +576,11 @@ pub fn Redirect<P>(
|
||||
|
||||
// redirect on the server
|
||||
if let Some(redirect_fn) = use_context::<ServerRedirectFunction>() {
|
||||
match resolve_path("", &path, Some(&use_matched().get_untracked())) {
|
||||
Some(path) => (redirect_fn.f)(&path),
|
||||
None => {
|
||||
if cfg!(feature = "ssr") {
|
||||
#[cfg(feature = "tracing")]
|
||||
tracing::warn!("Error resolving relative URL.");
|
||||
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
eprintln!("Error resolving relative URL.");
|
||||
}
|
||||
}
|
||||
}
|
||||
(redirect_fn.f)(&resolve_path(
|
||||
"",
|
||||
&path,
|
||||
Some(&use_matched().get_untracked()),
|
||||
));
|
||||
}
|
||||
// redirect on the client
|
||||
else {
|
||||
|
||||
@@ -87,7 +87,7 @@ where
|
||||
fn inner(
|
||||
has_router: bool,
|
||||
method: Option<&'static str>,
|
||||
action: ArcMemo<Option<String>>,
|
||||
action: ArcMemo<String>,
|
||||
enctype: Option<String>,
|
||||
version: Option<RwSignal<usize>>,
|
||||
error: Option<RwSignal<Option<Box<dyn Error + Send + Sync>>>>,
|
||||
@@ -311,7 +311,7 @@ where
|
||||
let action = if has_router {
|
||||
use_resolved_path(move || action.to_href()())
|
||||
} else {
|
||||
ArcMemo::new(move |_| Some(action.to_href()()))
|
||||
ArcMemo::new(move |_| action.to_href()())
|
||||
};
|
||||
inner(
|
||||
has_router,
|
||||
|
||||
@@ -240,7 +240,7 @@ pub(crate) struct Matched(pub ArcMemo<String>);
|
||||
#[track_caller]
|
||||
pub(crate) fn use_resolved_path(
|
||||
path: impl Fn() -> String + Send + Sync + 'static,
|
||||
) -> ArcMemo<Option<String>> {
|
||||
) -> ArcMemo<String> {
|
||||
let router = use_context::<RouterContext>()
|
||||
.expect("called use_resolved_path outside a <Router>");
|
||||
// TODO make this work with flat routes too?
|
||||
@@ -248,14 +248,14 @@ pub(crate) fn use_resolved_path(
|
||||
ArcMemo::new(move |_| {
|
||||
let path = path();
|
||||
if path.starts_with('/') {
|
||||
Some(path)
|
||||
path
|
||||
} else {
|
||||
router
|
||||
.resolve_path(
|
||||
&path,
|
||||
matched.as_ref().map(|n| n.get()).as_deref(),
|
||||
)
|
||||
.map(|n| n.to_string())
|
||||
.to_string()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -66,6 +66,25 @@ where
|
||||
/// This is helpful for accessibility and for styling. For example, maybe you want to set the link a
|
||||
/// different color if it’s a link to the page you’re currently on.
|
||||
///
|
||||
/// ### Additional Attributes
|
||||
///
|
||||
/// You can add additional HTML attributes to the `<a>` element created by this component using the attribute
|
||||
/// spreading syntax for components. For example, to add a class, you can use `attr:class="my-link"`.
|
||||
/// Alternately, you can add any number of HTML attributes (include `class`) after a `{..}` marker.
|
||||
///
|
||||
/// ```rust
|
||||
/// # use leptos::prelude::*; use leptos_router::components::A;
|
||||
/// # fn spread_example() -> impl IntoView {
|
||||
/// view! {
|
||||
/// <A href="/about" attr:class="my-link" {..} id="foo">"Some link"</A>
|
||||
/// <A href="/about" {..} class="my-link" id="bar">"Another link"</A>
|
||||
/// <A href="/about" {..} class:my-link=true id="baz">"One more"</A>
|
||||
/// }
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// For more information on this attribute spreading syntax, [see here](https://book.leptos.dev/view/03_components.html#spreading-attributes-onto-components).
|
||||
///
|
||||
/// ### DOM Properties
|
||||
///
|
||||
/// `<a>` elements can take several additional DOM properties with special meanings.
|
||||
@@ -102,7 +121,7 @@ where
|
||||
H: ToHref + Send + Sync + 'static,
|
||||
{
|
||||
fn inner(
|
||||
href: ArcMemo<Option<String>>,
|
||||
href: ArcMemo<String>,
|
||||
target: Option<Oco<'static, str>>,
|
||||
exact: bool,
|
||||
children: Children,
|
||||
@@ -114,23 +133,21 @@ where
|
||||
let is_active = {
|
||||
let href = href.clone();
|
||||
move || {
|
||||
href.read().as_deref().is_some_and(|to| {
|
||||
let path = to.split(['?', '#']).next().unwrap_or_default();
|
||||
current_url.with(|loc| {
|
||||
let loc = loc.path();
|
||||
if exact {
|
||||
loc == path
|
||||
} else {
|
||||
is_active_for(path, loc, strict_trailing_slash)
|
||||
}
|
||||
})
|
||||
let path = normalize_path(&href.read());
|
||||
current_url.with(|loc| {
|
||||
let loc = loc.path();
|
||||
if exact {
|
||||
loc == path
|
||||
} else {
|
||||
is_active_for(&path, loc, strict_trailing_slash)
|
||||
}
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
view! {
|
||||
<a
|
||||
href=move || href.get().unwrap_or_default()
|
||||
href=move || href.get()
|
||||
target=target
|
||||
aria-current=move || if is_active() { Some("page") } else { None }
|
||||
data-noscroll=!scroll
|
||||
@@ -172,9 +189,62 @@ fn is_active_for(
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve `".."` segments in the path. Assume path is either empty or starts with a `'/'``.
|
||||
fn normalize_path(path: &str) -> String {
|
||||
// Return only on the only condition where leading slash
|
||||
// is allowed to be missing.
|
||||
if path.is_empty() {
|
||||
return String::new();
|
||||
}
|
||||
let mut del = 0;
|
||||
let mut it = path
|
||||
.split(['?', '#'])
|
||||
.next()
|
||||
.unwrap_or_default()
|
||||
.split(['/'])
|
||||
.rev()
|
||||
.peekable();
|
||||
|
||||
let init = if it.peek() == Some(&"..") {
|
||||
String::from("/")
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let mut path = it
|
||||
.filter(|v| {
|
||||
if *v == ".." {
|
||||
del += 1;
|
||||
false
|
||||
} else if *v == "." {
|
||||
false
|
||||
} else if del > 0 {
|
||||
del -= 1;
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
})
|
||||
// We cannot reverse before the fold again bc the filter
|
||||
// would be forwards again.
|
||||
.fold(init, |mut p, v| {
|
||||
p.reserve(v.len() + 1);
|
||||
p.insert(0, '/');
|
||||
p.insert_str(0, v);
|
||||
p
|
||||
});
|
||||
path.truncate(path.len().saturating_sub(1));
|
||||
|
||||
// Path starts with '/' giving it an extra empty segment after the split
|
||||
// Which should not be removed.
|
||||
if !path.starts_with('/') {
|
||||
path.insert(0, '/');
|
||||
}
|
||||
path
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::is_active_for;
|
||||
use super::{is_active_for, normalize_path};
|
||||
|
||||
#[test]
|
||||
fn is_active_for_matched() {
|
||||
@@ -393,4 +463,37 @@ mod tests {
|
||||
// assert!(!is_same_level("/some/", "/some/level/"))
|
||||
// assert!(!is_same_level("/some/", "/some/level/deeper"))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn normalize_path_test() {
|
||||
// Make sure it doesn't touch already normalized urls.
|
||||
assert!(normalize_path("") == "".to_string());
|
||||
assert!(normalize_path("/") == "/".to_string());
|
||||
assert!(normalize_path("/some") == "/some".to_string());
|
||||
assert!(normalize_path("/some/") == "/some/".to_string());
|
||||
|
||||
// Correctly removes ".." segments.
|
||||
assert!(normalize_path("/some/../another") == "/another".to_string());
|
||||
assert!(
|
||||
normalize_path("/one/two/../three/../../four")
|
||||
== "/four".to_string()
|
||||
);
|
||||
|
||||
// Correctly sets trailing slash if last segement is "..".
|
||||
assert!(normalize_path("/one/two/..") == "/one/".to_string());
|
||||
assert!(normalize_path("/one/two/../") == "/one/".to_string());
|
||||
|
||||
// Level outside of the url.
|
||||
assert!(normalize_path("/..") == "/".to_string());
|
||||
assert!(normalize_path("/../") == "/".to_string());
|
||||
|
||||
// Going into negative levels and coming back into the positives.
|
||||
assert!(
|
||||
normalize_path("/one/../../two/three") == "/two/three".to_string()
|
||||
);
|
||||
assert!(
|
||||
normalize_path("/one/../../two/three/")
|
||||
== "/two/three/".to_string()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,10 +67,32 @@ impl Url {
|
||||
}
|
||||
|
||||
pub fn hash(&self) -> &str {
|
||||
#[cfg(all(feature = "ssr", any(debug_assertions, leptos_debuginfo)))]
|
||||
{
|
||||
#[cfg(feature = "tracing")]
|
||||
tracing::warn!(
|
||||
"Reading hash on the server can lead to hydration errors."
|
||||
);
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
eprintln!(
|
||||
"Reading hash on the server can lead to hydration errors."
|
||||
);
|
||||
}
|
||||
&self.hash
|
||||
}
|
||||
|
||||
pub fn hash_mut(&mut self) -> &mut String {
|
||||
#[cfg(all(feature = "ssr", any(debug_assertions, leptos_debuginfo)))]
|
||||
{
|
||||
#[cfg(feature = "tracing")]
|
||||
tracing::warn!(
|
||||
"Reading hash on the server can lead to hydration errors."
|
||||
);
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
eprintln!(
|
||||
"Reading hash on the server can lead to hydration errors."
|
||||
);
|
||||
}
|
||||
&mut self.hash
|
||||
}
|
||||
|
||||
@@ -173,7 +195,7 @@ impl Location {
|
||||
let state = state.into();
|
||||
let pathname = Memo::new(move |_| url.with(|url| url.path.clone()));
|
||||
let search = Memo::new(move |_| url.with(|url| url.search.clone()));
|
||||
let hash = Memo::new(move |_| url.with(|url| url.hash.clone()));
|
||||
let hash = Memo::new(move |_| url.with(|url| url.hash().to_string()));
|
||||
let query =
|
||||
Memo::new(move |_| url.with(|url| url.search_params.clone()));
|
||||
Location {
|
||||
|
||||
@@ -4,9 +4,9 @@ pub fn resolve_path<'a>(
|
||||
base: &'a str,
|
||||
path: &'a str,
|
||||
from: Option<&'a str>,
|
||||
) -> Option<Cow<'a, str>> {
|
||||
) -> Cow<'a, str> {
|
||||
if has_scheme(path) {
|
||||
Some(path.into())
|
||||
path.into()
|
||||
} else {
|
||||
let base_path = normalize(base, false);
|
||||
let from_path = from.map(|from| normalize(from, false));
|
||||
@@ -25,7 +25,7 @@ pub fn resolve_path<'a>(
|
||||
let result_empty = result.is_empty();
|
||||
let prefix = if result_empty { "/".into() } else { result };
|
||||
|
||||
Some(prefix + normalize(path, result_empty))
|
||||
prefix + normalize(path, result_empty)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -65,6 +65,9 @@ where
|
||||
// TODO loading fallback
|
||||
#[allow(clippy::type_complexity)]
|
||||
view: Rc<RefCell<EitherOf3State<(), Fal, AnyView>>>,
|
||||
// held to keep the Owner alive until the router is dropped
|
||||
#[allow(unused)]
|
||||
outer_owner: Owner,
|
||||
}
|
||||
|
||||
impl<Loc, Defs, FalFn, Fal> Render for NestedRoutesView<Loc, Defs, FalFn>
|
||||
@@ -106,10 +109,10 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
outer_owner.with(|| EitherOf3::C(Outlet().into_any()))
|
||||
|
||||
EitherOf3::C(top_level_outlet(&outlets, &outer_owner))
|
||||
}
|
||||
};
|
||||
|
||||
@@ -130,6 +133,7 @@ where
|
||||
current_url,
|
||||
outlets,
|
||||
view,
|
||||
outer_owner,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -175,7 +179,6 @@ where
|
||||
&mut preloaders,
|
||||
&mut full_loaders,
|
||||
&mut state.outlets,
|
||||
&self.outer_owner,
|
||||
self.set_is_routing.is_some(),
|
||||
0,
|
||||
);
|
||||
@@ -212,17 +215,14 @@ where
|
||||
|
||||
// if it was on the fallback, show the view instead
|
||||
if matches!(state.view.borrow().state, EitherOf3::B(_)) {
|
||||
self.outer_owner.with(|| {
|
||||
EitherOf3::<(), Fal, AnyView>::C(Outlet().into_any())
|
||||
.rebuild(&mut *state.view.borrow_mut());
|
||||
})
|
||||
EitherOf3::<(), Fal, AnyView>::C(top_level_outlet(
|
||||
&state.outlets,
|
||||
&self.outer_owner,
|
||||
))
|
||||
.rebuild(&mut *state.view.borrow_mut());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(outlet) = state.outlets.first() {
|
||||
self.outer_owner.with(|| outlet.provide_contexts());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -338,7 +338,6 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
|
||||
// outlets will not send their views if the loaders are never polled
|
||||
@@ -348,7 +347,7 @@ where
|
||||
.now_or_never()
|
||||
.expect("async routes not supported in SSR");
|
||||
|
||||
outer_owner.with(|| Either::Right(Outlet().into_any()))
|
||||
Either::Right(top_level_outlet(&outlets, &outer_owner))
|
||||
}
|
||||
};
|
||||
view.to_html_with_buf(
|
||||
@@ -392,7 +391,6 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
|
||||
// outlets will not send their views if the loaders are never polled
|
||||
@@ -402,7 +400,7 @@ where
|
||||
.now_or_never()
|
||||
.expect("async routes not supported in SSR");
|
||||
|
||||
outer_owner.with(|| Either::Right(Outlet().into_any()))
|
||||
Either::Right(top_level_outlet(&outlets, &outer_owner))
|
||||
}
|
||||
};
|
||||
view.to_html_async_with_buf::<OUT_OF_ORDER>(
|
||||
@@ -446,7 +444,6 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
|
||||
@@ -454,7 +451,7 @@ where
|
||||
join_all(mem::take(&mut loaders))
|
||||
.now_or_never()
|
||||
.expect("async routes not supported in SSR");
|
||||
outer_owner.with(|| EitherOf3::C(Outlet().into_any()))
|
||||
EitherOf3::C(top_level_outlet(&outlets, &outer_owner))
|
||||
}
|
||||
}
|
||||
.hydrate::<FROM_SERVER>(cursor, position),
|
||||
@@ -465,6 +462,7 @@ where
|
||||
current_url,
|
||||
outlets,
|
||||
view,
|
||||
outer_owner,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -480,12 +478,16 @@ pub(crate) struct RouteContext {
|
||||
trigger: ArcTrigger,
|
||||
url: ArcRwSignal<Url>,
|
||||
params: ArcRwSignal<ParamsMap>,
|
||||
owner: Owner,
|
||||
pub matched: ArcRwSignal<String>,
|
||||
base: Option<Oco<'static, str>>,
|
||||
view_fn: Arc<Mutex<OutletViewFn>>,
|
||||
owner: Arc<Mutex<Option<Owner>>>,
|
||||
child: ChildRoute,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct ChildRoute(Arc<Mutex<Option<RouteContext>>>);
|
||||
|
||||
impl Debug for RouteContext {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("RouteContext")
|
||||
@@ -493,19 +495,12 @@ impl Debug for RouteContext {
|
||||
.field("trigger", &self.trigger)
|
||||
.field("url", &self.url)
|
||||
.field("params", &self.params)
|
||||
.field("owner", &self.owner.debug_id())
|
||||
.field("matched", &self.matched)
|
||||
.field("base", &self.base)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl RouteContext {
|
||||
fn provide_contexts(&self) {
|
||||
provide_context(self.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for RouteContext {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
@@ -513,10 +508,11 @@ impl Clone for RouteContext {
|
||||
id: self.id,
|
||||
trigger: self.trigger.clone(),
|
||||
params: self.params.clone(),
|
||||
owner: self.owner.clone(),
|
||||
matched: self.matched.clone(),
|
||||
base: self.base.clone(),
|
||||
view_fn: Arc::clone(&self.view_fn),
|
||||
owner: Arc::clone(&self.owner),
|
||||
child: self.child.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -528,7 +524,6 @@ trait AddNestedRoute {
|
||||
base: Option<Oco<'static, str>>,
|
||||
loaders: &mut Vec<Pin<Box<dyn Future<Output = ArcTrigger>>>>,
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
parent: &Owner,
|
||||
);
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
@@ -538,9 +533,8 @@ trait AddNestedRoute {
|
||||
base: Option<Oco<'static, str>>,
|
||||
items: &mut usize,
|
||||
loaders: &mut Vec<Pin<Box<dyn Future<Output = ArcTrigger>>>>,
|
||||
full_loaders: &mut Vec<oneshot::Receiver<()>>,
|
||||
full_loaders: &mut Vec<oneshot::Receiver<Option<Owner>>>,
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
parent: &Owner,
|
||||
set_is_routing: bool,
|
||||
level: u8,
|
||||
) -> u8;
|
||||
@@ -556,15 +550,9 @@ where
|
||||
base: Option<Oco<'static, str>>,
|
||||
loaders: &mut Vec<Pin<Box<dyn Future<Output = ArcTrigger>>>>,
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
parent: &Owner,
|
||||
) {
|
||||
let orig_url = url;
|
||||
|
||||
// each Outlet gets its own owner, so it can inherit context from its parent route,
|
||||
// a new owner will be constructed if a different route replaces this one in the outlet,
|
||||
// so that any signals it creates or context it provides will be cleaned up
|
||||
let owner = parent.child();
|
||||
|
||||
// the params signal can be updated to allow the same outlet to update to changes in the
|
||||
// params, even if there's not a route match change
|
||||
let params = ArcRwSignal::new(self.to_params().into_iter().collect());
|
||||
@@ -622,71 +610,82 @@ where
|
||||
url,
|
||||
trigger: trigger.clone(),
|
||||
params,
|
||||
owner: owner.clone(),
|
||||
matched,
|
||||
view_fn: Arc::new(Mutex::new(Box::new(|_owner| {
|
||||
Suspend::new(Box::pin(async { ().into_any() }))
|
||||
}))),
|
||||
base: base.clone(),
|
||||
child: ChildRoute(Arc::new(Mutex::new(None))),
|
||||
owner: Arc::new(Mutex::new(None)),
|
||||
};
|
||||
if !outlets.is_empty() {
|
||||
let prev_index = outlets.len().saturating_sub(1);
|
||||
*outlets[prev_index].child.0.lock().or_poisoned() =
|
||||
Some(outlet.clone());
|
||||
}
|
||||
outlets.push(outlet.clone());
|
||||
|
||||
// send the initial view through the channel, and recurse through the children
|
||||
let (view, child) = self.into_view_and_child();
|
||||
|
||||
loaders.push(Box::pin(owner.with(|| {
|
||||
ScopedFuture::new({
|
||||
let owner = outlet.owner.clone();
|
||||
let url = outlet.url.clone();
|
||||
let matched = Matched(matched_including_parents);
|
||||
let view_fn = Arc::clone(&outlet.view_fn);
|
||||
async move {
|
||||
provide_context(params_including_parents);
|
||||
provide_context(url);
|
||||
provide_context(matched.clone());
|
||||
view.preload().await;
|
||||
*view_fn.lock().or_poisoned() =
|
||||
Box::new(move |owner_where_used| {
|
||||
owner.join_contexts(&owner_where_used);
|
||||
let view = view.clone();
|
||||
owner.with({
|
||||
let matched = matched.clone();
|
||||
move || {
|
||||
Suspend::new(Box::pin(async move {
|
||||
let view = SendWrapper::new(
|
||||
ScopedFuture::new(view.choose()),
|
||||
);
|
||||
let view = view.await;
|
||||
let view = MatchedRoute(
|
||||
matched.0.get_untracked(),
|
||||
view,
|
||||
);
|
||||
OwnedView::new(view).into_any()
|
||||
})
|
||||
as Pin<
|
||||
Box<
|
||||
dyn Future<Output = AnyView>
|
||||
+ Send,
|
||||
>,
|
||||
>)
|
||||
}
|
||||
})
|
||||
});
|
||||
trigger
|
||||
}
|
||||
})
|
||||
})));
|
||||
loaders.push(Box::pin(ScopedFuture::new({
|
||||
let url = outlet.url.clone();
|
||||
let matched = Matched(matched_including_parents);
|
||||
let view_fn = Arc::clone(&outlet.view_fn);
|
||||
let route_owner = Arc::clone(&outlet.owner);
|
||||
let outlet = outlet.clone();
|
||||
let params = params_including_parents.clone();
|
||||
let url = url.clone();
|
||||
let matched = matched.clone();
|
||||
async move {
|
||||
view.preload().await;
|
||||
let child = outlet.child.clone();
|
||||
*view_fn.lock().or_poisoned() =
|
||||
Box::new(move |owner_where_used| {
|
||||
*route_owner.lock().or_poisoned() =
|
||||
Some(owner_where_used.clone());
|
||||
let view = view.clone();
|
||||
let child = child.clone();
|
||||
let params = params.clone();
|
||||
let url = url.clone();
|
||||
let matched = matched.clone();
|
||||
owner_where_used.with({
|
||||
let matched = matched.clone();
|
||||
move || {
|
||||
let child = child.clone();
|
||||
Suspend::new(Box::pin(async move {
|
||||
provide_context(child.clone());
|
||||
provide_context(params.clone());
|
||||
provide_context(url.clone());
|
||||
provide_context(matched.clone());
|
||||
let view = SendWrapper::new(
|
||||
ScopedFuture::new(view.choose()),
|
||||
);
|
||||
let view = view.await;
|
||||
let view = MatchedRoute(
|
||||
matched.0.get_untracked(),
|
||||
view,
|
||||
);
|
||||
|
||||
// and share the outlet with the parent via context
|
||||
// we share it with the *parent* because the <Outlet/> is rendered in or below the parent
|
||||
// wherever it appears, <Outlet/> will look for the closest RouteContext
|
||||
parent.with(|| outlet.provide_contexts());
|
||||
OwnedView::new(view).into_any()
|
||||
})
|
||||
as Pin<
|
||||
Box<
|
||||
dyn Future<Output = AnyView> + Send,
|
||||
>,
|
||||
>)
|
||||
}
|
||||
})
|
||||
});
|
||||
trigger
|
||||
}
|
||||
})));
|
||||
|
||||
// recursively continue building the tree
|
||||
// this is important because to build the view, we need access to the outlet
|
||||
// and the outlet will be returned from building this child
|
||||
if let Some(child) = child {
|
||||
child.build_nested_route(orig_url, base, loaders, outlets, &owner);
|
||||
child.build_nested_route(orig_url, base, loaders, outlets);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -697,9 +696,8 @@ where
|
||||
base: Option<Oco<'static, str>>,
|
||||
items: &mut usize,
|
||||
preloaders: &mut Vec<Pin<Box<dyn Future<Output = ArcTrigger>>>>,
|
||||
full_loaders: &mut Vec<oneshot::Receiver<()>>,
|
||||
full_loaders: &mut Vec<oneshot::Receiver<Option<Owner>>>,
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
parent: &Owner,
|
||||
set_is_routing: bool,
|
||||
level: u8,
|
||||
) -> u8 {
|
||||
@@ -708,11 +706,17 @@ where
|
||||
.take(*items)
|
||||
.map(|route| (route.params.clone(), route.matched.clone()))
|
||||
.unzip();
|
||||
|
||||
if outlets.get(*items).is_some() && *items > 0 {
|
||||
*outlets[*items - 1].child.0.lock().or_poisoned() =
|
||||
Some(outlets[*items].clone());
|
||||
}
|
||||
|
||||
let current = outlets.get_mut(*items);
|
||||
match current {
|
||||
// if there's nothing currently in the routes at this point, build from here
|
||||
None => {
|
||||
self.build_nested_route(url, base, preloaders, outlets, parent);
|
||||
self.build_nested_route(url, base, preloaders, outlets);
|
||||
level
|
||||
}
|
||||
Some(current) => {
|
||||
@@ -779,70 +783,74 @@ where
|
||||
|
||||
// assign a new owner, so that contexts and signals owned by the previous route
|
||||
// in this outlet can be dropped
|
||||
let mut old_owner =
|
||||
Some(mem::replace(&mut current.owner, parent.child()));
|
||||
let owner = current.owner.clone();
|
||||
let (full_tx, full_rx) = oneshot::channel();
|
||||
let full_tx = Mutex::new(Some(full_tx));
|
||||
full_loaders.push(full_rx);
|
||||
let outlet = current.clone();
|
||||
|
||||
// send the new view, with the new owner, through the channel to the Outlet,
|
||||
// and notify the trigger so that the reactive view inside the Outlet tracking
|
||||
// the trigger runs again
|
||||
preloaders.push(Box::pin(owner.with(|| {
|
||||
ScopedFuture::new({
|
||||
let owner = owner.clone();
|
||||
let trigger = current.trigger.clone();
|
||||
let url = current.url.clone();
|
||||
let matched = Matched(matched_including_parents);
|
||||
let view_fn = Arc::clone(¤t.view_fn);
|
||||
async move {
|
||||
provide_context(params_including_parents);
|
||||
provide_context(url);
|
||||
provide_context(matched);
|
||||
view.preload().await;
|
||||
*view_fn.lock().or_poisoned() =
|
||||
Box::new(move |owner_where_used| {
|
||||
owner.join_contexts(&owner_where_used);
|
||||
let owner = owner.clone();
|
||||
let view = view.clone();
|
||||
let full_tx =
|
||||
full_tx.lock().or_poisoned().take();
|
||||
let old_owner = old_owner.take();
|
||||
Suspend::new(Box::pin(async move {
|
||||
let view = SendWrapper::new(
|
||||
owner.with(|| {
|
||||
ScopedFuture::new(
|
||||
async move {
|
||||
if set_is_routing {
|
||||
AsyncTransition::run(|| view.choose()).await
|
||||
} else {
|
||||
view.choose().await
|
||||
}
|
||||
}
|
||||
)
|
||||
}),
|
||||
);
|
||||
let view = view.await;
|
||||
if let Some(old_owner) = old_owner {
|
||||
old_owner.cleanup();
|
||||
}
|
||||
preloaders.push(Box::pin(ScopedFuture::new({
|
||||
let trigger = current.trigger.clone();
|
||||
let url = current.url.clone();
|
||||
let matched = Matched(matched_including_parents);
|
||||
let view_fn = Arc::clone(¤t.view_fn);
|
||||
let route_owner = Arc::clone(¤t.owner);
|
||||
let child = outlet.child.clone();
|
||||
async move {
|
||||
view.preload().await;
|
||||
let child = child.clone();
|
||||
*view_fn.lock().or_poisoned() =
|
||||
Box::new(move |owner_where_used| {
|
||||
let prev_owner = route_owner
|
||||
.lock()
|
||||
.or_poisoned()
|
||||
.replace(owner_where_used.clone());
|
||||
let view = view.clone();
|
||||
let full_tx =
|
||||
full_tx.lock().or_poisoned().take();
|
||||
let child = child.clone();
|
||||
let params =
|
||||
params_including_parents.clone();
|
||||
let url = url.clone();
|
||||
let matched = matched.clone();
|
||||
Suspend::new(Box::pin(async move {
|
||||
let view = SendWrapper::new(
|
||||
owner_where_used.with(|| {
|
||||
provide_context(child.clone());
|
||||
provide_context(params);
|
||||
provide_context(url);
|
||||
provide_context(matched);
|
||||
ScopedFuture::new(async move {
|
||||
if set_is_routing {
|
||||
AsyncTransition::run(
|
||||
|| view.choose(),
|
||||
)
|
||||
.await
|
||||
} else {
|
||||
view.choose().await
|
||||
}
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
if let Some(tx) = full_tx {
|
||||
_ = tx.send(());
|
||||
}
|
||||
owner.with(|| {
|
||||
OwnedView::new(view).into_any()
|
||||
})
|
||||
}))
|
||||
});
|
||||
let view = view.await;
|
||||
|
||||
drop(old_params);
|
||||
drop(old_url);
|
||||
drop(old_matched);
|
||||
trigger
|
||||
}
|
||||
})
|
||||
if let Some(tx) = full_tx {
|
||||
_ = tx.send(prev_owner);
|
||||
}
|
||||
owner_where_used.with(|| {
|
||||
OwnedView::new(view).into_any()
|
||||
})
|
||||
}))
|
||||
});
|
||||
|
||||
drop(old_params);
|
||||
drop(old_url);
|
||||
drop(old_matched);
|
||||
trigger
|
||||
}
|
||||
})));
|
||||
|
||||
// remove all the items lower in the tree
|
||||
@@ -851,9 +859,10 @@ where
|
||||
|
||||
// if this children has matches, then rebuild the lower section of the tree
|
||||
if let Some(child) = child {
|
||||
child.build_nested_route(
|
||||
url, base, preloaders, outlets, &owner,
|
||||
);
|
||||
child
|
||||
.build_nested_route(url, base, preloaders, outlets);
|
||||
} else {
|
||||
*outlets[*items].child.0.lock().or_poisoned() = None;
|
||||
}
|
||||
|
||||
return level;
|
||||
@@ -865,7 +874,6 @@ where
|
||||
current.params.set(new_params);
|
||||
current.url.set(url.to_owned());
|
||||
if let Some(child) = child {
|
||||
let owner = current.owner.clone();
|
||||
*items += 1;
|
||||
child.rebuild_nested_route(
|
||||
url,
|
||||
@@ -874,11 +882,11 @@ where
|
||||
preloaders,
|
||||
full_loaders,
|
||||
outlets,
|
||||
&owner,
|
||||
set_is_routing,
|
||||
level + 1,
|
||||
)
|
||||
} else {
|
||||
*current.child.0.lock().or_poisoned() = None;
|
||||
level
|
||||
}
|
||||
}
|
||||
@@ -911,20 +919,38 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn top_level_outlet(outlets: &[RouteContext], outer_owner: &Owner) -> AnyView {
|
||||
let outlet = outlets.first().unwrap();
|
||||
let child = outlet.child.clone();
|
||||
let view_fn = outlet.view_fn.clone();
|
||||
let trigger = outlet.trigger.clone();
|
||||
outer_owner.clone().with(|| {
|
||||
provide_context(child.clone());
|
||||
let outer_owner = outer_owner.clone();
|
||||
(move || {
|
||||
trigger.track();
|
||||
let mut view_fn = view_fn.lock().or_poisoned();
|
||||
view_fn(outer_owner.child())
|
||||
})
|
||||
.into_any()
|
||||
})
|
||||
}
|
||||
|
||||
/// Displays the child route nested in a parent route, allowing you to control exactly where
|
||||
/// that child route is displayed. Renders nothing if there is no nested child.
|
||||
#[component]
|
||||
pub fn Outlet() -> impl RenderHtml
|
||||
where
|
||||
{
|
||||
move || {
|
||||
let ctx = use_context::<RouteContext>()
|
||||
.expect("<Outlet/> used without RouteContext being provided.");
|
||||
let RouteContext {
|
||||
trigger, view_fn, ..
|
||||
} = ctx;
|
||||
trigger.track();
|
||||
let mut view_fn = view_fn.lock().or_poisoned();
|
||||
view_fn(Owner::current().unwrap())
|
||||
}
|
||||
let ChildRoute(child) = use_context()
|
||||
.expect("<Outlet/> used without RouteContext being provided.");
|
||||
let child = child.lock().or_poisoned().clone();
|
||||
let outer_owner = Owner::current().unwrap();
|
||||
child.map(|child| {
|
||||
move || {
|
||||
child.trigger.track();
|
||||
let mut view_fn = child.view_fn.lock().or_poisoned();
|
||||
view_fn(outer_owner.child())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router_macro"
|
||||
version = "0.8.2"
|
||||
version = "0.8.4"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -13,10 +13,10 @@ edition.workspace = true
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
proc-macro-error2 = { default-features = false , workspace = true }
|
||||
proc-macro-error2 = { default-features = false, workspace = true }
|
||||
proc-macro2 = { workspace = true, default-features = true }
|
||||
quote = { workspace = true, default-features = true }
|
||||
syn = { features = ["full"] , workspace = true, default-features = true }
|
||||
syn = { features = ["full"], workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
leptos_router = { path = "../router" }
|
||||
|
||||
@@ -29,11 +29,10 @@ thiserror = { workspace = true, default-features = true }
|
||||
# registration system
|
||||
inventory = { optional = true, workspace = true, default-features = true }
|
||||
dashmap = { workspace = true, default-features = true }
|
||||
once_cell = { workspace = true, default-features = true }
|
||||
|
||||
## servers
|
||||
# actix
|
||||
actix-web = { optional = true, workspace = true, default-features = true }
|
||||
actix-web = { optional = true, workspace = true, default-features = false }
|
||||
actix-ws = { optional = true, workspace = true, default-features = true }
|
||||
|
||||
# axum
|
||||
@@ -109,7 +108,8 @@ axum-no-default = [
|
||||
"dep:tower-layer",
|
||||
]
|
||||
form-redirects = []
|
||||
actix = ["ssr", "dep:actix-web", "dep:actix-ws", "dep:send_wrapper"]
|
||||
actix-no-default = ["ssr", "dep:actix-web", "dep:actix-ws", "dep:send_wrapper"]
|
||||
actix = ["actix-web/default", "actix-no-default"]
|
||||
axum = ["axum/default", "axum-no-default", "axum/ws", "dep:tokio"]
|
||||
browser = [
|
||||
"dep:gloo-net",
|
||||
|
||||
@@ -120,7 +120,7 @@ pub mod request;
|
||||
/// Types and traits for HTTP responses.
|
||||
pub mod response;
|
||||
|
||||
#[cfg(feature = "actix")]
|
||||
#[cfg(feature = "actix-no-default")]
|
||||
#[doc(hidden)]
|
||||
pub use ::actix_web as actix_export;
|
||||
#[cfg(feature = "axum-no-default")]
|
||||
@@ -151,7 +151,6 @@ use error::{FromServerFnError, ServerFnErrorErr};
|
||||
use futures::{pin_mut, SinkExt, Stream, StreamExt};
|
||||
use http::Method;
|
||||
use middleware::{BoxedService, Layer, Service};
|
||||
use once_cell::sync::Lazy;
|
||||
use redirect::call_redirect_hook;
|
||||
use request::Req;
|
||||
use response::{ClientRes, Res, TryRes};
|
||||
@@ -169,7 +168,7 @@ use std::{
|
||||
marker::PhantomData,
|
||||
ops::{Deref, DerefMut},
|
||||
pin::Pin,
|
||||
sync::Arc,
|
||||
sync::{Arc, LazyLock},
|
||||
};
|
||||
#[doc(hidden)]
|
||||
pub use xxhash_rust;
|
||||
@@ -862,7 +861,7 @@ pub use inventory;
|
||||
#[macro_export]
|
||||
macro_rules! initialize_server_fn_map {
|
||||
($req:ty, $res:ty) => {
|
||||
once_cell::sync::Lazy::new(|| {
|
||||
std::sync::LazyLock::new(|| {
|
||||
$crate::inventory::iter::<ServerFnTraitObj<$req, $res>>
|
||||
.into_iter()
|
||||
.map(|obj| {
|
||||
@@ -981,7 +980,7 @@ impl<Req, Res> Clone for ServerFnTraitObj<Req, Res> {
|
||||
|
||||
#[allow(unused)] // used by server integrations
|
||||
type LazyServerFnMap<Req, Res> =
|
||||
Lazy<DashMap<(String, Method), ServerFnTraitObj<Req, Res>>>;
|
||||
LazyLock<DashMap<(String, Method), ServerFnTraitObj<Req, Res>>>;
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
impl<Req: 'static, Res: 'static> inventory::Collect
|
||||
@@ -1119,7 +1118,7 @@ pub mod axum {
|
||||
}
|
||||
|
||||
/// Actix integration.
|
||||
#[cfg(feature = "actix")]
|
||||
#[cfg(feature = "actix-no-default")]
|
||||
pub mod actix {
|
||||
use crate::{
|
||||
error::FromServerFnError, middleware::BoxedService,
|
||||
|
||||
@@ -123,7 +123,7 @@ mod axum {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "actix")]
|
||||
#[cfg(feature = "actix-no-default")]
|
||||
mod actix {
|
||||
use crate::{
|
||||
error::ServerFnErrorErr,
|
||||
|
||||
@@ -75,6 +75,13 @@ impl DerefMut for BrowserRequest {
|
||||
#[derive(Debug)]
|
||||
pub struct BrowserFormData(pub(crate) SendWrapper<FormData>);
|
||||
|
||||
impl BrowserFormData {
|
||||
/// Returns the raw `web_sys::FormData` struct.
|
||||
pub fn take(self) -> FormData {
|
||||
self.0.take()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FormData> for BrowserFormData {
|
||||
fn from(value: FormData) -> Self {
|
||||
Self(SendWrapper::new(value))
|
||||
|
||||
@@ -4,7 +4,7 @@ use http::Method;
|
||||
use std::{borrow::Cow, future::Future};
|
||||
|
||||
/// Request types for Actix.
|
||||
#[cfg(feature = "actix")]
|
||||
#[cfg(feature = "actix-no-default")]
|
||||
pub mod actix;
|
||||
/// Request types for Axum.
|
||||
#[cfg(feature = "axum-no-default")]
|
||||
|
||||
@@ -5,14 +5,14 @@ use crate::{
|
||||
};
|
||||
use bytes::Bytes;
|
||||
use futures::{Stream, StreamExt};
|
||||
use once_cell::sync::Lazy;
|
||||
use reqwest::{
|
||||
header::{ACCEPT, CONTENT_TYPE},
|
||||
Body,
|
||||
};
|
||||
pub use reqwest::{multipart::Form, Client, Method, Request, Url};
|
||||
use std::sync::LazyLock;
|
||||
|
||||
pub(crate) static CLIENT: Lazy<Client> = Lazy::new(Client::new);
|
||||
pub(crate) static CLIENT: LazyLock<Client> = LazyLock::new(Client::new);
|
||||
|
||||
impl<E> ClientReq<E> for Request
|
||||
where
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/// Response types for Actix.
|
||||
#[cfg(feature = "actix")]
|
||||
#[cfg(feature = "actix-no-default")]
|
||||
pub mod actix;
|
||||
/// Response types for the browser.
|
||||
#[cfg(feature = "browser")]
|
||||
|
||||
@@ -1,3 +1,14 @@
|
||||
error[E0277]: () is not a `Result` or aliased `Result`. Server functions must return a `Result` or aliased `Result`.
|
||||
--> tests/invalid/empty_return.rs:3:1
|
||||
|
|
||||
3 | #[server]
|
||||
| ^^^^^^^^^ Must return a `Result` or aliased `Result`.
|
||||
|
|
||||
= help: the trait `ServerFnMustReturnResult` is not implemented for `()`
|
||||
= note: If you are trying to return an alias of `Result`, you must also implement `FromServerFnError` for the error type.
|
||||
= help: the trait `ServerFnMustReturnResult` is implemented for `Result<T, E>`
|
||||
= note: this error originates in the attribute macro `server` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error[E0277]: () is not a `Result` or aliased `Result`. Server functions must return a `Result` or aliased `Result`.
|
||||
--> tests/invalid/empty_return.rs:3:1
|
||||
|
|
||||
@@ -16,22 +27,11 @@ error[E0271]: expected `impl Future<Output = ()>` to be a future that resolves t
|
||||
|
|
||||
= note: expected enum `Result<_, _>`
|
||||
found unit type `()`
|
||||
note: required by a bound in `ServerFn::{anon_assoc#0}`
|
||||
note: required by a bound in `ServerFn::run_body::{anon_assoc#0}`
|
||||
--> src/lib.rs
|
||||
|
|
||||
| ) -> impl Future<Output = Result<Self::Output, Self::Error>> + Send;
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `ServerFn::{anon_assoc#0}`
|
||||
|
||||
error[E0277]: () is not a `Result` or aliased `Result`. Server functions must return a `Result` or aliased `Result`.
|
||||
--> tests/invalid/empty_return.rs:3:1
|
||||
|
|
||||
3 | #[server]
|
||||
| ^^^^^^^^^ Must return a `Result` or aliased `Result`.
|
||||
|
|
||||
= help: the trait `ServerFnMustReturnResult` is not implemented for `()`
|
||||
= note: If you are trying to return an alias of `Result`, you must also implement `FromServerFnError` for the error type.
|
||||
= help: the trait `ServerFnMustReturnResult` is implemented for `Result<T, E>`
|
||||
= note: this error originates in the attribute macro `server` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `ServerFn::run_body::{anon_assoc#0}`
|
||||
|
||||
error[E0277]: () is not a `Result` or aliased `Result`. Server functions must return a `Result` or aliased `Result`.
|
||||
--> tests/invalid/empty_return.rs:3:1
|
||||
|
||||
@@ -1,3 +1,14 @@
|
||||
error[E0277]: CustomError is not a `Result` or aliased `Result`. Server functions must return a `Result` or aliased `Result`.
|
||||
--> tests/invalid/not_result.rs:25:1
|
||||
|
|
||||
25 | #[server]
|
||||
| ^^^^^^^^^ Must return a `Result` or aliased `Result`.
|
||||
|
|
||||
= help: the trait `ServerFnMustReturnResult` is not implemented for `CustomError`
|
||||
= note: If you are trying to return an alias of `Result`, you must also implement `FromServerFnError` for the error type.
|
||||
= help: the trait `ServerFnMustReturnResult` is implemented for `Result<T, E>`
|
||||
= note: this error originates in the attribute macro `server` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error[E0277]: CustomError is not a `Result` or aliased `Result`. Server functions must return a `Result` or aliased `Result`.
|
||||
--> tests/invalid/not_result.rs:25:1
|
||||
|
|
||||
@@ -16,22 +27,11 @@ error[E0271]: expected `impl Future<Output = CustomError>` to be a future that r
|
||||
|
|
||||
= note: expected enum `Result<_, _>`
|
||||
found enum `CustomError`
|
||||
note: required by a bound in `ServerFn::{anon_assoc#0}`
|
||||
note: required by a bound in `ServerFn::run_body::{anon_assoc#0}`
|
||||
--> src/lib.rs
|
||||
|
|
||||
| ) -> impl Future<Output = Result<Self::Output, Self::Error>> + Send;
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `ServerFn::{anon_assoc#0}`
|
||||
|
||||
error[E0277]: CustomError is not a `Result` or aliased `Result`. Server functions must return a `Result` or aliased `Result`.
|
||||
--> tests/invalid/not_result.rs:25:1
|
||||
|
|
||||
25 | #[server]
|
||||
| ^^^^^^^^^ Must return a `Result` or aliased `Result`.
|
||||
|
|
||||
= help: the trait `ServerFnMustReturnResult` is not implemented for `CustomError`
|
||||
= note: If you are trying to return an alias of `Result`, you must also implement `FromServerFnError` for the error type.
|
||||
= help: the trait `ServerFnMustReturnResult` is implemented for `Result<T, E>`
|
||||
= note: this error originates in the attribute macro `server` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `ServerFn::run_body::{anon_assoc#0}`
|
||||
|
||||
error[E0277]: CustomError is not a `Result` or aliased `Result`. Server functions must return a `Result` or aliased `Result`.
|
||||
--> tests/invalid/not_result.rs:25:1
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "tachys"
|
||||
version = "0.2.3"
|
||||
version = "0.2.5"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -21,7 +21,6 @@ reactive_stores = { workspace = true, optional = true }
|
||||
slotmap = { optional = true, workspace = true, default-features = true }
|
||||
oco_ref = { workspace = true, optional = true }
|
||||
async-trait = { workspace = true, default-features = true }
|
||||
once_cell = { workspace = true, default-features = true }
|
||||
paste = { workspace = true, default-features = true }
|
||||
erased = { workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true, default-features = true }
|
||||
|
||||
@@ -709,7 +709,7 @@ where
|
||||
|
||||
buf.push('<');
|
||||
buf.push_str(E::TAG);
|
||||
<At as ToTemplate>::to_template(
|
||||
<At as ToTemplate>::to_template_attribute(
|
||||
buf,
|
||||
&mut class,
|
||||
&mut style,
|
||||
|
||||
@@ -125,14 +125,14 @@ impl Render for InertElement {
|
||||
type State = InertElementState;
|
||||
|
||||
fn build(self) -> Self::State {
|
||||
let el = Rndr::create_element_from_html(&self.html);
|
||||
let el = Rndr::create_element_from_html(self.html.clone());
|
||||
InertElementState(self.html, el)
|
||||
}
|
||||
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
let InertElementState(prev, el) = state;
|
||||
if &self.html != prev {
|
||||
let mut new_el = Rndr::create_element_from_html(&self.html);
|
||||
let mut new_el = Rndr::create_element_from_html(self.html.clone());
|
||||
el.insert_before_this(&mut new_el);
|
||||
el.unmount();
|
||||
*el = new_el;
|
||||
|
||||
@@ -9,9 +9,12 @@ use crate::{
|
||||
view::{Mountable, ToTemplate},
|
||||
};
|
||||
use linear_map::LinearMap;
|
||||
use once_cell::unsync::Lazy;
|
||||
use rustc_hash::FxHashSet;
|
||||
use std::{any::TypeId, borrow::Cow, cell::RefCell};
|
||||
use std::{
|
||||
any::TypeId,
|
||||
borrow::Cow,
|
||||
cell::{LazyCell, RefCell},
|
||||
};
|
||||
use wasm_bindgen::{intern, prelude::Closure, JsCast, JsValue};
|
||||
use web_sys::{AddEventListenerOptions, Comment, HtmlTemplateElement};
|
||||
|
||||
@@ -21,6 +24,7 @@ pub struct Dom;
|
||||
|
||||
thread_local! {
|
||||
pub(crate) static GLOBAL_EVENTS: RefCell<FxHashSet<Cow<'static, str>>> = Default::default();
|
||||
pub static TEMPLATE_CACHE: RefCell<Vec<(Cow<'static, str>, web_sys::Element)>> = Default::default();
|
||||
}
|
||||
|
||||
pub type Node = web_sys::Node;
|
||||
@@ -57,7 +61,7 @@ impl Dom {
|
||||
|
||||
pub fn create_placeholder() -> Placeholder {
|
||||
thread_local! {
|
||||
static COMMENT: Lazy<Comment> = Lazy::new(|| {
|
||||
static COMMENT: LazyCell<Comment> = LazyCell::new(|| {
|
||||
document().create_comment("")
|
||||
});
|
||||
}
|
||||
@@ -281,9 +285,10 @@ impl Dom {
|
||||
let cb = send_wrapper::SendWrapper::new(cb);
|
||||
move |el: &Element| {
|
||||
or_debug!(
|
||||
el.remove_event_listener_with_callback(
|
||||
el.remove_event_listener_with_callback_and_bool(
|
||||
intern(&name),
|
||||
cb.as_ref().unchecked_ref()
|
||||
cb.as_ref().unchecked_ref(),
|
||||
true
|
||||
),
|
||||
el,
|
||||
"removeEventListener"
|
||||
@@ -451,8 +456,8 @@ impl Dom {
|
||||
V: ToTemplate + 'static,
|
||||
{
|
||||
thread_local! {
|
||||
static TEMPLATE_ELEMENT: Lazy<HtmlTemplateElement> =
|
||||
Lazy::new(|| document().create_element("template").unwrap().unchecked_into());
|
||||
static TEMPLATE_ELEMENT: LazyCell<HtmlTemplateElement> =
|
||||
LazyCell::new(|| document().create_element(Dom::intern("template")).unwrap().unchecked_into());
|
||||
static TEMPLATES: RefCell<LinearMap<TypeId, HtmlTemplateElement>> = Default::default();
|
||||
}
|
||||
|
||||
@@ -487,13 +492,66 @@ impl Dom {
|
||||
.unchecked_into()
|
||||
}
|
||||
|
||||
pub fn create_element_from_html(html: &str) -> Element {
|
||||
// TODO can be optimized to cache HTML strings or cache <template>?
|
||||
let tpl = document().create_element("template").unwrap();
|
||||
tpl.set_inner_html(html);
|
||||
let tpl = Self::clone_template(tpl.unchecked_ref());
|
||||
pub fn create_element_from_html(html: Cow<'static, str>) -> Element {
|
||||
let tpl = TEMPLATE_CACHE.with(|cache| {
|
||||
let mut cache = cache.borrow_mut();
|
||||
if let Some(tpl_content) = cache.iter().find_map(|(key, tpl)| {
|
||||
(html == *key)
|
||||
.then_some(Self::clone_template(tpl.unchecked_ref()))
|
||||
}) {
|
||||
tpl_content
|
||||
} else {
|
||||
let tpl = document()
|
||||
.create_element(Self::intern("template"))
|
||||
.unwrap();
|
||||
tpl.set_inner_html(&html);
|
||||
let tpl_content = Self::clone_template(tpl.unchecked_ref());
|
||||
cache.push((html, tpl));
|
||||
tpl_content
|
||||
}
|
||||
});
|
||||
tpl.first_element_child().unwrap_or(tpl)
|
||||
}
|
||||
|
||||
pub fn create_svg_element_from_html(html: Cow<'static, str>) -> Element {
|
||||
let tpl = TEMPLATE_CACHE.with(|cache| {
|
||||
let mut cache = cache.borrow_mut();
|
||||
if let Some(tpl_content) = cache.iter().find_map(|(key, tpl)| {
|
||||
(html == *key)
|
||||
.then_some(Self::clone_template(tpl.unchecked_ref()))
|
||||
}) {
|
||||
tpl_content
|
||||
} else {
|
||||
let tpl = document()
|
||||
.create_element(Self::intern("template"))
|
||||
.unwrap();
|
||||
let svg = document()
|
||||
.create_element_ns(
|
||||
Some(Self::intern("http://www.w3.org/2000/svg")),
|
||||
Self::intern("svg"),
|
||||
)
|
||||
.unwrap();
|
||||
let g = document()
|
||||
.create_element_ns(
|
||||
Some(Self::intern("http://www.w3.org/2000/svg")),
|
||||
Self::intern("g"),
|
||||
)
|
||||
.unwrap();
|
||||
g.set_inner_html(&html);
|
||||
svg.append_child(&g).unwrap();
|
||||
tpl.unchecked_ref::<TemplateElement>()
|
||||
.content()
|
||||
.append_child(&svg)
|
||||
.unwrap();
|
||||
let tpl_content = Self::clone_template(tpl.unchecked_ref());
|
||||
cache.push((html, tpl));
|
||||
tpl_content
|
||||
}
|
||||
});
|
||||
|
||||
let svg = tpl.first_element_child().unwrap();
|
||||
svg.first_element_child().unwrap_or(svg)
|
||||
}
|
||||
}
|
||||
|
||||
impl Mountable for Node {
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
use crate::{
|
||||
html::{
|
||||
attribute::Attribute,
|
||||
attribute::{any_attribute::AnyAttribute, Attribute},
|
||||
element::{ElementType, ElementWithChildren, HtmlElement},
|
||||
},
|
||||
view::Render,
|
||||
hydration::Cursor,
|
||||
prelude::{AddAnyAttr, Mountable},
|
||||
renderer::{
|
||||
dom::{Element, Node},
|
||||
CastFrom, Rndr,
|
||||
},
|
||||
view::{Position, PositionState, Render, RenderHtml},
|
||||
};
|
||||
use std::fmt::Debug;
|
||||
use std::{borrow::Cow, fmt::Debug};
|
||||
|
||||
macro_rules! svg_elements {
|
||||
($($tag:ident [$($attr:ty),*]),* $(,)?) => {
|
||||
@@ -185,3 +191,124 @@ impl ElementType for Use {
|
||||
}
|
||||
|
||||
impl ElementWithChildren for Use {}
|
||||
|
||||
/// An element that contains no interactivity, and whose contents can be known at compile time.
|
||||
pub struct InertElement {
|
||||
html: Cow<'static, str>,
|
||||
}
|
||||
|
||||
impl InertElement {
|
||||
/// Creates a new inert svg element.
|
||||
pub fn new(html: impl Into<Cow<'static, str>>) -> Self {
|
||||
Self { html: html.into() }
|
||||
}
|
||||
}
|
||||
|
||||
/// Retained view state for [`InertElement`].
|
||||
pub struct InertElementState(Cow<'static, str>, Element);
|
||||
|
||||
impl Mountable for InertElementState {
|
||||
fn unmount(&mut self) {
|
||||
self.1.unmount();
|
||||
}
|
||||
|
||||
fn mount(&mut self, parent: &Element, marker: Option<&Node>) {
|
||||
self.1.mount(parent, marker)
|
||||
}
|
||||
|
||||
fn insert_before_this(&self, child: &mut dyn Mountable) -> bool {
|
||||
self.1.insert_before_this(child)
|
||||
}
|
||||
|
||||
fn elements(&self) -> Vec<crate::renderer::types::Element> {
|
||||
vec![self.1.clone()]
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for InertElement {
|
||||
type State = InertElementState;
|
||||
|
||||
fn build(self) -> Self::State {
|
||||
let el = Rndr::create_svg_element_from_html(self.html.clone());
|
||||
InertElementState(self.html, el)
|
||||
}
|
||||
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
let InertElementState(prev, el) = state;
|
||||
if &self.html != prev {
|
||||
let mut new_el =
|
||||
Rndr::create_svg_element_from_html(self.html.clone());
|
||||
el.insert_before_this(&mut new_el);
|
||||
el.unmount();
|
||||
*el = new_el;
|
||||
*prev = self.html;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAnyAttr for InertElement {
|
||||
type Output<SomeNewAttr: Attribute> = Self;
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr>
|
||||
where
|
||||
Self::Output<NewAttr>: RenderHtml,
|
||||
{
|
||||
panic!(
|
||||
"InertElement does not support adding attributes. It should only \
|
||||
be used as a child, and not returned at the top level."
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl RenderHtml for InertElement {
|
||||
type AsyncOutput = Self;
|
||||
type Owned = Self;
|
||||
|
||||
const MIN_LENGTH: usize = 0;
|
||||
|
||||
fn html_len(&self) -> usize {
|
||||
self.html.len()
|
||||
}
|
||||
|
||||
fn dry_resolve(&mut self) {}
|
||||
|
||||
async fn resolve(self) -> Self {
|
||||
self
|
||||
}
|
||||
|
||||
fn to_html_with_buf(
|
||||
self,
|
||||
buf: &mut String,
|
||||
position: &mut Position,
|
||||
_escape: bool,
|
||||
_mark_branches: bool,
|
||||
_extra_attrs: Vec<AnyAttribute>,
|
||||
) {
|
||||
buf.push_str(&self.html);
|
||||
*position = Position::NextChild;
|
||||
}
|
||||
|
||||
fn hydrate<const FROM_SERVER: bool>(
|
||||
self,
|
||||
cursor: &Cursor,
|
||||
position: &PositionState,
|
||||
) -> Self::State {
|
||||
let curr_position = position.get();
|
||||
if curr_position == Position::FirstChild {
|
||||
cursor.child();
|
||||
} else if curr_position != Position::Current {
|
||||
cursor.sibling();
|
||||
}
|
||||
let el = crate::renderer::types::Element::cast_from(cursor.current())
|
||||
.unwrap();
|
||||
position.set(Position::NextChild);
|
||||
InertElementState(self.html, el)
|
||||
}
|
||||
|
||||
fn into_owned(self) -> Self::Owned {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ use crate::{
|
||||
Attribute,
|
||||
},
|
||||
hydration::Cursor,
|
||||
renderer::Rndr,
|
||||
ssr::StreamBuilder,
|
||||
};
|
||||
use futures::future::{join, join_all};
|
||||
@@ -90,6 +91,7 @@ pub struct AnyViewState {
|
||||
),
|
||||
insert_before_this: fn(&ErasedLocal, child: &mut dyn Mountable) -> bool,
|
||||
elements: fn(&ErasedLocal) -> Vec<crate::renderer::types::Element>,
|
||||
placeholder: Option<crate::renderer::types::Placeholder>,
|
||||
}
|
||||
|
||||
impl Debug for AnyViewState {
|
||||
@@ -214,6 +216,9 @@ where
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if !T::EXISTS {
|
||||
buf.push_str("<!--<() />-->");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
@@ -232,6 +237,9 @@ where
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if !T::EXISTS {
|
||||
buf.push_sync("<!--<() />-->");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
@@ -250,10 +258,14 @@ where
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if !T::EXISTS {
|
||||
buf.push_sync("<!--<() />-->");
|
||||
}
|
||||
}
|
||||
|
||||
fn build<T: RenderHtml + 'static>(value: Erased) -> AnyViewState {
|
||||
let state = ErasedLocal::new(value.into_inner::<T>().build());
|
||||
let placeholder = (!T::EXISTS).then(Rndr::create_placeholder);
|
||||
AnyViewState {
|
||||
type_id: TypeId::of::<T>(),
|
||||
state,
|
||||
@@ -261,6 +273,7 @@ where
|
||||
unmount: unmount_any::<T>,
|
||||
insert_before_this: insert_before_this::<T>,
|
||||
elements: elements::<T>,
|
||||
placeholder,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -273,6 +286,8 @@ where
|
||||
let state = ErasedLocal::new(
|
||||
value.into_inner::<T>().hydrate::<true>(cursor, position),
|
||||
);
|
||||
let placeholder =
|
||||
(!T::EXISTS).then(|| cursor.next_placeholder(position));
|
||||
AnyViewState {
|
||||
type_id: TypeId::of::<T>(),
|
||||
state,
|
||||
@@ -280,6 +295,7 @@ where
|
||||
unmount: unmount_any::<T>,
|
||||
insert_before_this: insert_before_this::<T>,
|
||||
elements: elements::<T>,
|
||||
placeholder,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -327,7 +343,12 @@ impl Render for AnyView {
|
||||
(self.rebuild)(self.value, state)
|
||||
} else {
|
||||
let mut new = self.build();
|
||||
state.insert_before_this(&mut new);
|
||||
if let Some(placeholder) = &mut state.placeholder {
|
||||
placeholder.insert_before_this(&mut new);
|
||||
placeholder.unmount();
|
||||
} else {
|
||||
state.insert_before_this(&mut new);
|
||||
}
|
||||
state.unmount();
|
||||
*state = new;
|
||||
}
|
||||
@@ -554,7 +575,10 @@ impl RenderHtml for AnyView {
|
||||
|
||||
impl Mountable for AnyViewState {
|
||||
fn unmount(&mut self) {
|
||||
(self.unmount)(&mut self.state)
|
||||
(self.unmount)(&mut self.state);
|
||||
if let Some(placeholder) = &mut self.placeholder {
|
||||
placeholder.unmount();
|
||||
}
|
||||
}
|
||||
|
||||
fn mount(
|
||||
@@ -562,11 +586,23 @@ impl Mountable for AnyViewState {
|
||||
parent: &crate::renderer::types::Element,
|
||||
marker: Option<&crate::renderer::types::Node>,
|
||||
) {
|
||||
(self.mount)(&mut self.state, parent, marker)
|
||||
(self.mount)(&mut self.state, parent, marker);
|
||||
if let Some(placeholder) = &mut self.placeholder {
|
||||
placeholder.mount(parent, marker);
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_before_this(&self, child: &mut dyn Mountable) -> bool {
|
||||
(self.insert_before_this)(&self.state, child)
|
||||
let before_view = (self.insert_before_this)(&self.state, child);
|
||||
if before_view {
|
||||
return true;
|
||||
}
|
||||
|
||||
if let Some(placeholder) = &self.placeholder {
|
||||
placeholder.insert_before_this(child)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn elements(&self) -> Vec<crate::renderer::types::Element> {
|
||||
|
||||
@@ -53,6 +53,12 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoFragment for AnyView {
|
||||
fn into_fragment(self) -> Fragment {
|
||||
Fragment::new(vec![self])
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IntoFragment for Vec<T>
|
||||
where
|
||||
T: IntoAny,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user