mirror of
https://github.com/leptos-rs/leptos.git
synced 2025-12-27 16:54:41 -05:00
Compare commits
67 Commits
4324
...
4473-regre
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
549f6c16ff | ||
|
|
9c1efcf267 | ||
|
|
7864a12967 | ||
|
|
9733cdcfe1 | ||
|
|
1aaa716dfc | ||
|
|
779b2f2a9f | ||
|
|
72e0abc75c | ||
|
|
a7a8970150 | ||
|
|
2e09f3d102 | ||
|
|
e6fe7fef07 | ||
|
|
629f4f9d0f | ||
|
|
ff5b612e12 | ||
|
|
61571ed24b | ||
|
|
4f3a26ce88 | ||
|
|
83a848b5ec | ||
|
|
eec9edf517 | ||
|
|
861dcf354c | ||
|
|
af3d6cba22 | ||
|
|
a0d657f9b1 | ||
|
|
cddb24ebd3 | ||
|
|
e8afd11995 | ||
|
|
4d01d95175 | ||
|
|
9bf5b22633 | ||
|
|
da4a7d5285 | ||
|
|
2af6c6353c | ||
|
|
7f4b5eb4d1 | ||
|
|
fbf46ca58c | ||
|
|
0edbd9b3b5 | ||
|
|
43359694b6 | ||
|
|
9dd5501b1a | ||
|
|
6843f654ff | ||
|
|
cb7c648400 | ||
|
|
d3148ac9c9 | ||
|
|
6d7e203efe | ||
|
|
b5c69937b4 | ||
|
|
0b45ff5116 | ||
|
|
13dc6f474d | ||
|
|
21218fc802 | ||
|
|
65b5be2748 | ||
|
|
7c30bb92f7 | ||
|
|
edf369f035 | ||
|
|
eb02304ee1 | ||
|
|
578b672f14 | ||
|
|
b20902aaa1 | ||
|
|
d3ad0c67b6 | ||
|
|
62d8ec9cc5 | ||
|
|
0d2523190d | ||
|
|
338da18ed2 | ||
|
|
616aae4c3c | ||
|
|
c6e59eeb43 | ||
|
|
c025ae59ac | ||
|
|
df46feee5d | ||
|
|
bbf5bf9170 | ||
|
|
7a3556bf34 | ||
|
|
d13936cab5 | ||
|
|
b303a35d76 | ||
|
|
a453b7d1bd | ||
|
|
3b9ccdf57e | ||
|
|
95db8c939e | ||
|
|
2bfa9952af | ||
|
|
4e445f43d6 | ||
|
|
5f544f67ae | ||
|
|
68477d2b76 | ||
|
|
5bd9469b93 | ||
|
|
4bca70dc2f | ||
|
|
b92a14228c | ||
|
|
68967fdad3 |
2
.github/workflows/autofix.yml
vendored
2
.github/workflows/autofix.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
autofix:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with: {toolchain: "nightly-2025-07-16", components: "rustfmt, clippy", target: "wasm32-unknown-unknown", rustflags: ""}
|
||||
- name: Install Glib
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -63,6 +63,6 @@ jobs:
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libglib2.0-dev
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
- name: Semver Checks
|
||||
uses: obi1kenobi/cargo-semver-checks-action@v2
|
||||
|
||||
2
.github/workflows/get-example-changed.yml
vendored
2
.github/workflows/get-example-changed.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
matrix: ${{ steps.set-example-changed.outputs.matrix }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Get example files that changed
|
||||
|
||||
2
.github/workflows/get-examples-matrix.yml
vendored
2
.github/workflows/get-examples-matrix.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
EXCLUDED_EXAMPLES: cargo-make
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
- name: Install jq
|
||||
run: sudo apt-get install jq
|
||||
- name: Set Matrix
|
||||
|
||||
2
.github/workflows/get-leptos-changed.yml
vendored
2
.github/workflows/get-leptos-changed.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
leptos_changed: ${{ steps.set-source-changed.outputs.leptos_changed }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Get source files that changed
|
||||
|
||||
2
.github/workflows/get-leptos-matrix.yml
vendored
2
.github/workflows/get-leptos-matrix.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
- name: Install jq
|
||||
run: sudo apt-get install jq
|
||||
- name: Set Matrix
|
||||
|
||||
2
.github/workflows/publish-book.yml
vendored
2
.github/workflows/publish-book.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
contents: write # To push a branch
|
||||
pull-requests: write # To create a PR from that branch
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Install mdbook
|
||||
|
||||
4
.github/workflows/run-cargo-make-task.yml
vendored
4
.github/workflows/run-cargo-make-task.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libglib2.0-dev
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
run: trunk --version
|
||||
- name: Install Node.js
|
||||
if: contains(inputs.directory, 'examples')
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20
|
||||
- uses: pnpm/action-setup@v4
|
||||
|
||||
420
Cargo.lock
generated
420
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
69
Cargo.toml
69
Cargo.toml
@@ -2,7 +2,6 @@
|
||||
resolver = "2"
|
||||
members = [
|
||||
# utilities
|
||||
"oco",
|
||||
"any_spawner",
|
||||
"const_str_slice_concat",
|
||||
"either_of",
|
||||
@@ -45,33 +44,31 @@ rust-version = "1.88"
|
||||
|
||||
[workspace.dependencies]
|
||||
# members
|
||||
throw_error = { path = "./any_error/", version = "0.3.0" }
|
||||
throw_error = { path = "./any_error/", version = "0.3.1" }
|
||||
any_spawner = { path = "./any_spawner/", version = "0.3.0" }
|
||||
const_str_slice_concat = { path = "./const_str_slice_concat", version = "0.1" }
|
||||
either_of = { path = "./either_of/", version = "0.1.6" }
|
||||
hydration_context = { path = "./hydration_context", version = "0.3.0" }
|
||||
leptos = { path = "./leptos", version = "0.8.9" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.8.7" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.6" }
|
||||
leptos = { path = "./leptos", version = "0.8.14" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.8.8" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.7" }
|
||||
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.8.5" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.5" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.8" }
|
||||
leptos_router = { path = "./router", version = "0.8.7" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.8.5" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.8.5" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.7" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.12" }
|
||||
leptos_router = { path = "./router", version = "0.8.10" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.8.6" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.8.6" }
|
||||
leptos_meta = { path = "./meta", version = "0.8.5" }
|
||||
next_tuple = { path = "./next_tuple", version = "0.1.0" }
|
||||
oco_ref = { path = "./oco", version = "0.2.1" }
|
||||
or_poisoned = { path = "./or_poisoned", version = "0.1.0" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.7" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.2.5" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.11" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.3.0" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.6" }
|
||||
server_fn = { path = "./server_fn", version = "0.8.7" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.8.7" }
|
||||
server_fn = { path = "./server_fn", version = "0.8.8" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.8.8" }
|
||||
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.8.5" }
|
||||
tachys = { path = "./tachys", version = "0.2.8" }
|
||||
wasm_split_helpers = { path = "./wasm_split", version = "0.1.2" }
|
||||
wasm_split_macros = { path = "./wasm_split_macros", version = "0.1.3" }
|
||||
tachys = { path = "./tachys", version = "0.2.11" }
|
||||
|
||||
# members deps
|
||||
async-once-cell = { default-features = false, version = "0.5.3" }
|
||||
@@ -79,8 +76,9 @@ itertools = { default-features = false, version = "0.14.0" }
|
||||
convert_case = { default-features = false, version = "0.8.0" }
|
||||
serde_json = { default-features = false, version = "1.0.143" }
|
||||
trybuild = { default-features = false, version = "1.0.110" }
|
||||
typed-builder = { default-features = false, version = "0.21.2" }
|
||||
thiserror = { default-features = false, version = "2.0.16" }
|
||||
typed-builder = { default-features = false, version = "0.22.0" }
|
||||
typed-builder-macro = { default-features = false, version = "0.22.0" }
|
||||
thiserror = { default-features = false, version = "2.0.17" }
|
||||
wasm-bindgen = { default-features = false, version = "0.2.100" }
|
||||
indexmap = { default-features = false, version = "2.11.0" }
|
||||
rstml = { default-features = false, version = "0.12.1" }
|
||||
@@ -97,7 +95,7 @@ send_wrapper = { default-features = false, version = "0.6.0" }
|
||||
tokio-test = { default-features = false, version = "0.4.4" }
|
||||
html-escape = { default-features = false, version = "0.2.13" }
|
||||
proc-macro-error2 = { default-features = false, version = "2.0.1" }
|
||||
const_format = { default-features = false, version = "0.2.34" }
|
||||
const_format = { default-features = false, version = "0.2.35" }
|
||||
gloo-net = { default-features = false, version = "0.6.0" }
|
||||
url = { default-features = false, version = "2.5.4" }
|
||||
tokio = { default-features = false, version = "1.47.1" }
|
||||
@@ -107,46 +105,46 @@ wasm-bindgen-futures = { default-features = false, version = "0.4.50" }
|
||||
tower = { default-features = false, version = "0.5.2" }
|
||||
proc-macro2 = { default-features = false, version = "1.0.101" }
|
||||
serde = { default-features = false, version = "1.0.219" }
|
||||
parking_lot = { default-features = false, version = "0.12.4" }
|
||||
axum = { default-features = false, version = "0.8.4" }
|
||||
parking_lot = { default-features = false, version = "0.12.5" }
|
||||
axum = { default-features = false, version = "0.8.6" }
|
||||
serde_qs = { default-features = false, version = "0.15.0" }
|
||||
syn = { default-features = false, version = "2.0.106" }
|
||||
xxhash-rust = { default-features = false, version = "0.8.15" }
|
||||
paste = { default-features = false, version = "1.0.15" }
|
||||
quote = { default-features = false, version = "1.0.40" }
|
||||
quote = { default-features = false, version = "1.0.41" }
|
||||
web-sys = { default-features = false, version = "0.3.77" }
|
||||
js-sys = { default-features = false, version = "0.3.77" }
|
||||
rand = { default-features = false, version = "0.9.1" }
|
||||
serde-lite = { default-features = false, version = "0.5.0" }
|
||||
tokio-tungstenite = { default-features = false, version = "0.27.0" }
|
||||
tokio-tungstenite = { default-features = false, version = "0.28.0" }
|
||||
serial_test = { default-features = false, version = "3.2.0" }
|
||||
erased = { default-features = false, version = "0.1.2" }
|
||||
glib = { default-features = false, version = "0.20.12" }
|
||||
async-trait = { default-features = false, version = "0.1.89" }
|
||||
typed-builder-macro = { default-features = false, version = "0.21.0" }
|
||||
linear-map = { default-features = false, version = "1.2.0" }
|
||||
anyhow = { default-features = false, version = "1.0.99" }
|
||||
anyhow = { default-features = false, version = "1.0.100" }
|
||||
walkdir = { default-features = false, version = "2.5.0" }
|
||||
actix-ws = { default-features = false, version = "0.3.0" }
|
||||
tower-http = { default-features = false, version = "0.6.4" }
|
||||
prettyplease = { default-features = false, version = "0.2.37" }
|
||||
inventory = { default-features = false, version = "0.3.21" }
|
||||
config = { default-features = false, version = "0.15.14" }
|
||||
camino = { default-features = false, version = "1.1.11" }
|
||||
camino = { default-features = false, version = "1.2.1" }
|
||||
ciborium = { default-features = false, version = "0.2.2" }
|
||||
bitcode = { default-features = false, version = "0.6.6" }
|
||||
multer = { default-features = false, version = "3.1.0" }
|
||||
leptos-spin-macro = { default-features = false, version = "0.2.0" }
|
||||
sledgehammer_utils = { default-features = false, version = "0.3.1" }
|
||||
sledgehammer_bindgen = { default-features = false, version = "0.6.0" }
|
||||
wasm-streams = { default-features = false, version = "0.4.2" }
|
||||
rkyv = { default-features = false, version = "0.8.11" }
|
||||
rkyv = { default-features = false, version = "0.8.12" }
|
||||
temp-env = { default-features = false, version = "0.3.6" }
|
||||
uuid = { default-features = false, version = "1.18.0" }
|
||||
bytes = { default-features = false, version = "1.10.1" }
|
||||
http = { default-features = false, version = "1.3.1" }
|
||||
regex = { default-features = false, version = "1.11.2" }
|
||||
regex = { default-features = false, version = "1.11.3" }
|
||||
drain_filter_polyfill = { default-features = false, version = "0.1.3" }
|
||||
tempfile = { default-features = false, version = "3.21.0" }
|
||||
tempfile = { default-features = false, version = "3.23.0" }
|
||||
futures-lite = { default-features = false, version = "2.6.1" }
|
||||
log = { default-features = false, version = "0.4.27" }
|
||||
percent-encoding = { default-features = false, version = "2.3.2" }
|
||||
@@ -158,10 +156,10 @@ postcard = { default-features = false, version = "1.1.3" }
|
||||
rmp-serde = { default-features = false, version = "1.3.0" }
|
||||
reqwest = { default-features = false, version = "0.12.23" }
|
||||
tower-layer = { default-features = false, version = "0.3.3" }
|
||||
attribute-derive = { default-features = false, version = "0.10.3" }
|
||||
attribute-derive = { default-features = false, version = "0.10.5" }
|
||||
insta = { default-features = false, version = "1.43.1" }
|
||||
codee = { default-features = false, version = "0.3.0" }
|
||||
actix-http = { default-features = false, version = "3.11.1" }
|
||||
actix-http = { default-features = false, version = "3.11.2" }
|
||||
wasm-bindgen-test = { default-features = false, version = "0.3.50" }
|
||||
rustversion = { default-features = false, version = "1.0.22" }
|
||||
getrandom = { default-features = false, version = "0.3.3" }
|
||||
@@ -170,9 +168,10 @@ async-lock = { default-features = false, version = "3.4.1" }
|
||||
base16 = { default-features = false, version = "0.2.1" }
|
||||
digest = { default-features = false, version = "0.10.7" }
|
||||
sha2 = { default-features = false, version = "0.10.8" }
|
||||
subsecond = { default-features = false, git = "https://github.com/dioxuslabs/dioxus" }
|
||||
dioxus-cli-config = { default-features = false, git = "https://github.com/dioxuslabs/dioxus" }
|
||||
dioxus-devtools = { default-features = false, git = "https://github.com/dioxuslabs/dioxus" }
|
||||
subsecond = { default-features = false, version = "0.7.0-rc.0" }
|
||||
dioxus-cli-config = { default-features = false, version = "0.7.0-rc.0" }
|
||||
dioxus-devtools = { default-features = false, version = "0.7.0-rc.0" }
|
||||
wasm_split_helpers = { default-features = false, version = "0.2.0" }
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "throw_error"
|
||||
version = "0.3.0"
|
||||
version = "0.3.1"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -11,3 +11,6 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
pin-project-lite = { workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow.workspace = true
|
||||
|
||||
@@ -45,10 +45,10 @@ impl fmt::Display for Error {
|
||||
|
||||
impl<T> From<T> for Error
|
||||
where
|
||||
T: error::Error + Send + Sync + 'static,
|
||||
T: Into<Box<dyn error::Error + Send + Sync + 'static>>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Error(Arc::new(value))
|
||||
Error(Arc::from(value.into()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,3 +158,32 @@ where
|
||||
this.inner.poll(cx)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::error::Error as StdError;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MyError;
|
||||
|
||||
impl Display for MyError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "MyError")
|
||||
}
|
||||
}
|
||||
|
||||
impl StdError for MyError {}
|
||||
|
||||
#[test]
|
||||
fn test_from() {
|
||||
let e = MyError;
|
||||
let _le = Error::from(e);
|
||||
|
||||
let e = "some error".to_string();
|
||||
let _le = Error::from(e);
|
||||
|
||||
let e = anyhow::anyhow!("anyhow error");
|
||||
let _le = Error::from(e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ tokio = { version = "1.39", features = [
|
||||
], optional = true }
|
||||
tower = { version = "0.4.13", optional = true }
|
||||
tower-http = { version = "0.5.2", features = ["fs"], optional = true }
|
||||
wasm-bindgen = "0.2.92"
|
||||
wasm-bindgen = "0.2.105"
|
||||
web-sys = { version = "0.3.69", features = [
|
||||
"AddEventListenerOptions",
|
||||
"Document",
|
||||
|
||||
@@ -510,11 +510,9 @@ if (window.hljs) {
|
||||
});
|
||||
view! {
|
||||
<pre><code class="language-rust">{code.await}</code></pre>
|
||||
{
|
||||
move || script.get().map(|script| {
|
||||
view! { <Script>{script}</Script> }
|
||||
})
|
||||
}
|
||||
<ShowLet some=script let:script>
|
||||
<Script>{script}</Script>
|
||||
</ShowLet>
|
||||
}
|
||||
})
|
||||
};
|
||||
@@ -567,11 +565,9 @@ if (window.hljs) {
|
||||
});
|
||||
view! {
|
||||
<pre><code class="language-rust">{code.await}</code></pre>
|
||||
{
|
||||
move || script.get().map(|script| {
|
||||
view! { <Script>{script}</Script> }
|
||||
})
|
||||
}
|
||||
<ShowLet some=script let:script>
|
||||
<Script>{script}</Script>
|
||||
</ShowLet>
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
@@ -25,7 +25,7 @@ log = "0.4.22"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
gloo-net = { version = "0.6.0", features = ["http"] }
|
||||
reqwest = { version = "0.12.5", features = ["json"] }
|
||||
wasm-bindgen = "0.2.93"
|
||||
wasm-bindgen = "0.2.105"
|
||||
web-sys = { version = "0.3.70", features = ["AbortController", "AbortSignal"] }
|
||||
send_wrapper = "0.6.0"
|
||||
|
||||
@@ -46,12 +46,12 @@ denylist = ["actix-files", "actix-web", "leptos_actix"]
|
||||
skip_feature_sets = [["csr", "ssr"], ["csr", "hydrate"], ["ssr", "hydrate"], []]
|
||||
|
||||
[package.metadata.leptos]
|
||||
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
|
||||
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
|
||||
output-name = "hackernews"
|
||||
# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
|
||||
site-root = "target/site"
|
||||
# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
|
||||
# Defaults to pkg
|
||||
# Defaults to pkg
|
||||
site-pkg-dir = "pkg"
|
||||
# [Optional] The source CSS file. If it ends with .sass or .scss then it will be compiled by dart-sass into CSS. The CSS is optimized by Lightning CSS before being written to <site-root>/<site-pkg>/app.css
|
||||
style-file = "./style.css"
|
||||
|
||||
@@ -145,14 +145,11 @@ fn Story(story: api::Story) -> impl IntoView {
|
||||
Either::Left(
|
||||
view! {
|
||||
<span>
|
||||
{"by "}
|
||||
{story
|
||||
.user
|
||||
.map(|user| {
|
||||
view! {
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
}
|
||||
})} {format!(" {} | ", story.time_ago)}
|
||||
"by "
|
||||
<ShowLet some=story.user let:user>
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
</ShowLet>
|
||||
{format!(" {} | ", story.time_ago)}
|
||||
<A href=format!(
|
||||
"/stories/{}",
|
||||
story.id,
|
||||
|
||||
@@ -30,17 +30,13 @@ pub fn Story() -> impl IntoView {
|
||||
<h1>{story.title}</h1>
|
||||
</a>
|
||||
<span class="host">"(" {story.domain} ")"</span>
|
||||
{story
|
||||
.user
|
||||
.map(|user| {
|
||||
view! {
|
||||
<p class="meta">
|
||||
{story.points} " points | by "
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
{format!(" {}", story.time_ago)}
|
||||
</p>
|
||||
}
|
||||
})}
|
||||
<ShowLet some=story.user let:user>
|
||||
<p class="meta">
|
||||
{story.points} " points | by "
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
{format!(" {}", story.time_ago)}
|
||||
</p>
|
||||
</ShowLet>
|
||||
</div>
|
||||
<div class="item-view-comments">
|
||||
<p class="item-view-comments-header">
|
||||
|
||||
@@ -26,7 +26,7 @@ tower-http = { version = "0.5.2", features = ["fs"], optional = true }
|
||||
tokio = { version = "1.39", features = ["full"], optional = true }
|
||||
http = { version = "1.1", optional = true }
|
||||
web-sys = { version = "0.3.70", features = ["AbortController", "AbortSignal"] }
|
||||
wasm-bindgen = "0.2.93"
|
||||
wasm-bindgen = "0.2.105"
|
||||
send_wrapper = { version = "0.6.0", features = ["futures"] }
|
||||
|
||||
[features]
|
||||
|
||||
@@ -133,7 +133,9 @@ fn Story(story: api::Story) -> impl IntoView {
|
||||
Either::Left(view! {
|
||||
<span>
|
||||
{"by "}
|
||||
{story.user.map(|user| view ! { <A href=format!("/users/{user}")>{user.clone()}</A>})}
|
||||
<ShowLet some=story.user let:user>
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
</ShowLet>
|
||||
{format!(" {} | ", story.time_ago)}
|
||||
<A href=format!("/stories/{}", story.id)>
|
||||
{if story.comments_count.unwrap_or_default() > 0 {
|
||||
|
||||
@@ -40,18 +40,20 @@ impl LazyRoute for StoryRoute {
|
||||
<Meta name="description" content=story.title.clone()/>
|
||||
<div class="item-view">
|
||||
<div class="item-view-header">
|
||||
<a href=story.url target="_blank">
|
||||
<h1>{story.title}</h1>
|
||||
</a>
|
||||
<span class="host">
|
||||
"("{story.domain}")"
|
||||
</span>
|
||||
{story.user.map(|user| view! { <p class="meta">
|
||||
{story.points}
|
||||
" points | by "
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
{format!(" {}", story.time_ago)}
|
||||
</p>})}
|
||||
<a href=story.url target="_blank">
|
||||
<h1>{story.title}</h1>
|
||||
</a>
|
||||
<span class="host">
|
||||
"("{story.domain}")"
|
||||
</span>
|
||||
<ShowLet some=story.user let:user>
|
||||
<p class="meta">
|
||||
{story.points}
|
||||
" points | by "
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
{format!(" {}", story.time_ago)}
|
||||
</p>
|
||||
</ShowLet>
|
||||
</div>
|
||||
<div class="item-view-comments">
|
||||
<p class="item-view-comments-header">
|
||||
|
||||
@@ -143,8 +143,10 @@ fn Story(story: api::Story) -> impl IntoView {
|
||||
{if story.story_type != "job" {
|
||||
Either::Left(view! {
|
||||
<span>
|
||||
{"by "}
|
||||
{story.user.map(|user| view ! { <A href=format!("/users/{user}")>{user.clone()}</A>})}
|
||||
"by "
|
||||
<ShowLet some=story.user let:user>
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
</ShowLet>
|
||||
{format!(" {} | ", story.time_ago)}
|
||||
<A href=format!("/stories/{}", story.id)>
|
||||
{if story.comments_count.unwrap_or_default() > 0 {
|
||||
|
||||
@@ -32,18 +32,20 @@ pub fn Story() -> impl IntoView {
|
||||
<Meta name="description" content=story.title.clone()/>
|
||||
<div class="item-view">
|
||||
<div class="item-view-header">
|
||||
<a href=story.url target="_blank">
|
||||
<h1>{story.title}</h1>
|
||||
</a>
|
||||
<span class="host">
|
||||
"("{story.domain}")"
|
||||
</span>
|
||||
{story.user.map(|user| view! { <p class="meta">
|
||||
{story.points}
|
||||
" points | by "
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
{format!(" {}", story.time_ago)}
|
||||
</p>})}
|
||||
<a href=story.url target="_blank">
|
||||
<h1>{story.title}</h1>
|
||||
</a>
|
||||
<span class="host">
|
||||
"("{story.domain}")"
|
||||
</span>
|
||||
<ShowLet some=story.user let:user>
|
||||
<p class="meta">
|
||||
{story.points}
|
||||
" points | by "
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
{format!(" {}", story.time_ago)}
|
||||
</p>
|
||||
</ShowLet>
|
||||
</div>
|
||||
<div class="item-view-comments">
|
||||
<p class="item-view-comments-header">
|
||||
|
||||
@@ -139,14 +139,11 @@ fn Story(story: api::Story) -> impl IntoView {
|
||||
Either::Left(
|
||||
view! {
|
||||
<span>
|
||||
{"by "}
|
||||
{story
|
||||
.user
|
||||
.map(|user| {
|
||||
view! {
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
}
|
||||
})} {format!(" {} | ", story.time_ago)}
|
||||
"by "
|
||||
<ShowLet some=story.user let:user>
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
</ShowLet>
|
||||
{format!(" {} | ", story.time_ago)}
|
||||
<A href=format!(
|
||||
"/stories/{}",
|
||||
story.id,
|
||||
|
||||
@@ -35,17 +35,13 @@ pub fn Story() -> impl IntoView {
|
||||
<h1>{story.title}</h1>
|
||||
</a>
|
||||
<span class="host">"("{story.domain}")"</span>
|
||||
{story
|
||||
.user
|
||||
.map(|user| {
|
||||
view! {
|
||||
<p class="meta">
|
||||
{story.points} " points | by "
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
{format!(" {}", story.time_ago)}
|
||||
</p>
|
||||
}
|
||||
})}
|
||||
<ShowLet some=story.user let:user>
|
||||
<p class="meta">
|
||||
{story.points} " points | by "
|
||||
<A href=format!("/users/{user}")>{user.clone()}</A>
|
||||
{format!(" {}", story.time_ago)}
|
||||
</p>
|
||||
</ShowLet>
|
||||
</div>
|
||||
<div class="item-view-comments">
|
||||
<p class="item-view-comments-header">
|
||||
|
||||
@@ -9,6 +9,3 @@ routing when you use islands.
|
||||
This uses *only* server rendering, with no actual islands, but still maintains client-side state across page navigations.
|
||||
It does this by building on the fact that we now have a statically-typed view tree to do pretty smart updates with
|
||||
new HTML from the client, with extremely minimal diffing.
|
||||
|
||||
The demo itself works, but the feature that supports it is incomplete. A couple people have accidentally
|
||||
used it and broken their applications in ways they don't understand, so I've renamed the feature to `dont-use-islands-router`.
|
||||
|
||||
@@ -5,4 +5,4 @@ test cases that typically happens at integration.
|
||||
|
||||
## Quick Start
|
||||
|
||||
Run `cargo leptos watch` to run this example.
|
||||
Run `cargo leptos watch --split` to run this example.
|
||||
|
||||
@@ -440,7 +440,14 @@ pub fn FileUploadWithProgress() -> impl IntoView {
|
||||
let mut entry =
|
||||
FILES.entry(filename.to_string()).or_insert_with(|| {
|
||||
println!("[{filename}]\tinserting channel");
|
||||
let (tx, rx) = broadcast(128);
|
||||
// NOTE: this channel capacity is set arbitrarily for this demo code.
|
||||
// it allows for up to exactly 1048 chunks to be sent, which sets an upper cap
|
||||
// on upload size (the precise details vary by client)
|
||||
// in a real system, you will want to create some more reasonable ways of
|
||||
// sending and sharing notifications
|
||||
//
|
||||
// see https://github.com/leptos-rs/leptos/issues/4397 for related discussion
|
||||
let (tx, rx) = broadcast(1048);
|
||||
File { total: 0, tx, rx }
|
||||
});
|
||||
entry.total += len;
|
||||
@@ -557,17 +564,12 @@ pub fn FileUploadWithProgress() -> impl IntoView {
|
||||
<input type="submit" />
|
||||
</form>
|
||||
{move || filename.get().map(|filename| view! { <p>Uploading {filename}</p> })}
|
||||
{move || {
|
||||
max.get()
|
||||
.map(|max| {
|
||||
view! {
|
||||
<progress
|
||||
max=max
|
||||
value=move || current.get().unwrap_or_default()
|
||||
></progress>
|
||||
}
|
||||
})
|
||||
}}
|
||||
<ShowLet some=max let:max>
|
||||
<progress
|
||||
max=max
|
||||
value=move || current.get().unwrap_or_default()
|
||||
></progress>
|
||||
</ShowLet>
|
||||
}
|
||||
}
|
||||
#[component]
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
#[cfg(feature = "ssr")]
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
use axum::Router;
|
||||
use axum::{
|
||||
http::{HeaderName, HeaderValue},
|
||||
Router,
|
||||
};
|
||||
use leptos::{logging::log, prelude::*};
|
||||
use leptos_axum::{generate_route_list, LeptosRoutes};
|
||||
use ssr_modes_axum::app::*;
|
||||
@@ -17,7 +20,24 @@ async fn main() {
|
||||
let leptos_options = leptos_options.clone();
|
||||
move || shell(leptos_options.clone())
|
||||
})
|
||||
.fallback(leptos_axum::file_and_error_handler(shell))
|
||||
.fallback(leptos_axum::file_and_error_handler_with_context(
|
||||
move || {
|
||||
// if you want to add custom headers to the static file handler response,
|
||||
// you can do that by providing `ResponseOptions` via context
|
||||
let opts = use_context::<leptos_axum::ResponseOptions>()
|
||||
.unwrap_or_default();
|
||||
opts.insert_header(
|
||||
HeaderName::from_static("cross-origin-opener-policy"),
|
||||
HeaderValue::from_static("same-origin"),
|
||||
);
|
||||
opts.insert_header(
|
||||
HeaderName::from_static("cross-origin-embedder-policy"),
|
||||
HeaderValue::from_static("require-corp"),
|
||||
);
|
||||
provide_context(opts);
|
||||
},
|
||||
shell,
|
||||
))
|
||||
.with_state(leptos_options);
|
||||
|
||||
// run our app with hyper
|
||||
|
||||
@@ -663,26 +663,24 @@ impl From<Vec<FieldNavItem>> for FieldNavCtx {
|
||||
#[component]
|
||||
pub fn FieldNavPortlet() -> impl IntoView {
|
||||
let ctx = expect_context::<ReadSignal<Option<FieldNavCtx>>>();
|
||||
move || {
|
||||
let ctx = ctx.get();
|
||||
ctx.map(|ctx| {
|
||||
view! {
|
||||
<div id="FieldNavPortlet">
|
||||
<span>"FieldNavPortlet:"</span>
|
||||
<nav>
|
||||
{ctx
|
||||
.0
|
||||
.map(|ctx| {
|
||||
ctx.into_iter()
|
||||
.map(|FieldNavItem { href, text }| {
|
||||
view! { <A href=href>{text}</A> }
|
||||
})
|
||||
.collect_view()
|
||||
})}
|
||||
</nav>
|
||||
</div>
|
||||
}
|
||||
})
|
||||
|
||||
view! {
|
||||
<ShowLet some=ctx let:ctx>
|
||||
<div id="FieldNavPortlet">
|
||||
<span>"FieldNavPortlet:"</span>
|
||||
<nav>
|
||||
{ctx
|
||||
.0
|
||||
.map(|ctx| {
|
||||
ctx.into_iter()
|
||||
.map(|FieldNavItem { href, text }| {
|
||||
view! { <A href=href>{text}</A> }
|
||||
})
|
||||
.collect_view()
|
||||
})}
|
||||
</nav>
|
||||
</div>
|
||||
</ShowLet>
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
3
examples/tailwind_csr/Trunk.toml
Normal file
3
examples/tailwind_csr/Trunk.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
[tools]
|
||||
tailwindcss = "4.1.13"
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Actix integrations for the Leptos web framework."
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Axum integrations for the Leptos web framework."
|
||||
version = "0.8.6"
|
||||
version = "0.8.7"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -2050,7 +2050,20 @@ where
|
||||
let res = res.await.unwrap();
|
||||
|
||||
if res.status() == StatusCode::OK {
|
||||
res.into_response()
|
||||
let owner = Owner::new();
|
||||
owner.with(|| {
|
||||
additional_context();
|
||||
let res = res.into_response();
|
||||
if let Some(response_options) =
|
||||
use_context::<ResponseOptions>()
|
||||
{
|
||||
let mut res = AxumResponse(res);
|
||||
res.extend_response(&response_options);
|
||||
res.0
|
||||
} else {
|
||||
res
|
||||
}
|
||||
})
|
||||
} else {
|
||||
let mut res = handle_response_inner(
|
||||
move || {
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Utilities to help build server integrations for the Leptos web framework."
|
||||
version = "0.8.5"
|
||||
version = "0.8.7"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -68,7 +68,8 @@ pub trait ExtendResponse: Sized {
|
||||
let nonce =
|
||||
use_nonce().map(|n| n.to_string()).unwrap_or_default();
|
||||
if let Some(manifest) = use_context::<WasmSplitManifest>() {
|
||||
let (pkg_path, manifest) = &*manifest.0.read_value();
|
||||
let (pkg_path, manifest, wasm_split_file) =
|
||||
&*manifest.0.read_value();
|
||||
let prefetches = prefetches.0.read_value();
|
||||
|
||||
let all_prefetches = prefetches.iter().flat_map(|key| {
|
||||
@@ -90,7 +91,7 @@ pub trait ExtendResponse: Sized {
|
||||
.to_html();
|
||||
}
|
||||
_ = view! {
|
||||
<Link rel="modulepreload" href=format!("{pkg_path}/__wasm_split.js") crossorigin=nonce/>
|
||||
<Link rel="modulepreload" href=format!("{pkg_path}/{wasm_split_file}") crossorigin=nonce/>
|
||||
}
|
||||
.to_html();
|
||||
}
|
||||
@@ -120,7 +121,7 @@ pub trait ExtendResponse: Sized {
|
||||
// drop the owner, cleaning up the reactive runtime,
|
||||
// once the stream is over
|
||||
.chain(once(async move {
|
||||
owner.unset();
|
||||
owner.unset_with_forced_cleanup();
|
||||
Default::default()
|
||||
})),
|
||||
));
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
[package]
|
||||
name = "leptos"
|
||||
version = "0.8.9"
|
||||
version = "0.8.14"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
homepage = "https://leptos.dev/"
|
||||
description = "Leptos is a full-stack, isomorphic Rust web framework leveraging fine-grained reactivity to build declarative user interfaces."
|
||||
readme = "../README.md"
|
||||
rust-version.workspace = true
|
||||
@@ -57,7 +58,7 @@ serde_qs = { workspace = true, default-features = true }
|
||||
slotmap = { workspace = true, default-features = true }
|
||||
futures = { workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
wasm_split_helpers.workspace = true
|
||||
wasm_split_helpers = { workspace = true, default-features = true }
|
||||
subsecond = { workspace = true, default-features = true, optional = true }
|
||||
dioxus-cli-config = { workspace = true, default-features = true, optional = true }
|
||||
dioxus-devtools = { workspace = true, default-features = true, optional = true }
|
||||
@@ -88,6 +89,12 @@ ssr = [
|
||||
]
|
||||
nightly = ["leptos_macro/nightly", "reactive_graph/nightly", "tachys/nightly"]
|
||||
rkyv = ["server_fn/rkyv", "leptos_server/rkyv"]
|
||||
bitcode = ["server_fn/bitcode"]
|
||||
serde-lite = ["server_fn/serde-lite", "leptos_server/serde-lite"]
|
||||
cbor = ["server_fn/cbor"]
|
||||
msgpack = ["server_fn/msgpack"]
|
||||
postcard = ["server_fn/postcard"]
|
||||
multipart = ["server_fn/multipart"]
|
||||
tracing = [
|
||||
"dep:tracing",
|
||||
"reactive_graph/tracing",
|
||||
@@ -147,13 +154,18 @@ denylist = [
|
||||
"trace-component-props",
|
||||
"spin",
|
||||
"islands",
|
||||
"bitcode",
|
||||
"serde-lite",
|
||||
"cbor",
|
||||
"msgpack",
|
||||
"postcard",
|
||||
"multipart",
|
||||
]
|
||||
skip_feature_sets = [
|
||||
["csr", "ssr"],
|
||||
["csr", "hydrate"],
|
||||
["ssr", "hydrate"],
|
||||
["serde", "serde-lite"],
|
||||
["serde-lite", "miniserde"],
|
||||
["serde", "miniserde"],
|
||||
["serde", "rkyv"],
|
||||
["miniserde", "rkyv"],
|
||||
|
||||
@@ -65,16 +65,56 @@ pub fn HydrationScripts(
|
||||
if let Some(splits) = SPLIT_MANIFEST.get_or_init(|| {
|
||||
let root = root.clone().unwrap_or_default();
|
||||
|
||||
let (wasm_split_js, wasm_split_manifest) = if options.hash_files {
|
||||
let hash_path = std::env::current_exe()
|
||||
.map(|path| {
|
||||
path.parent().map(|p| p.to_path_buf()).unwrap_or_default()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
.join(options.hash_file.as_ref());
|
||||
let hashes = std::fs::read_to_string(&hash_path)
|
||||
.expect("failed to read hash file");
|
||||
|
||||
let mut split =
|
||||
"__wasm_split.______________________.js".to_string();
|
||||
let mut manifest = "__wasm_split_manifest.json".to_string();
|
||||
for line in hashes.lines() {
|
||||
let line = line.trim();
|
||||
if !line.is_empty() {
|
||||
if let Some((file, hash)) = line.split_once(':') {
|
||||
if file == "manifest" {
|
||||
manifest.clear();
|
||||
manifest.push_str("__wasm_split_manifest.");
|
||||
manifest.push_str(hash.trim());
|
||||
manifest.push_str(".json");
|
||||
}
|
||||
if file == "split" {
|
||||
split.clear();
|
||||
split.push_str("__wasm_split.");
|
||||
split.push_str(hash.trim());
|
||||
split.push_str(".js");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
(split, manifest)
|
||||
} else {
|
||||
(
|
||||
"__wasm_split.______________________.js".to_string(),
|
||||
"__wasm_split_manifest.json".to_string(),
|
||||
)
|
||||
};
|
||||
|
||||
let site_dir = &options.site_root;
|
||||
let pkg_dir = &options.site_pkg_dir;
|
||||
let path = PathBuf::from(site_dir.to_string());
|
||||
let path = path
|
||||
.join(pkg_dir.to_string())
|
||||
.join("__wasm_split_manifest.json");
|
||||
let path = path.join(pkg_dir.to_string()).join(wasm_split_manifest);
|
||||
let file = std::fs::read_to_string(path).ok()?;
|
||||
|
||||
let manifest = WasmSplitManifest(ArcStoredValue::new((
|
||||
format!("{root}/{pkg_dir}"),
|
||||
serde_json::from_str(&file).expect("could not read manifest file"),
|
||||
wasm_split_js,
|
||||
)));
|
||||
|
||||
Some(manifest)
|
||||
|
||||
@@ -203,7 +203,7 @@ pub mod prelude {
|
||||
pub mod form;
|
||||
|
||||
/// A standard way to wrap functions and closures to pass them to components.
|
||||
pub mod callback;
|
||||
pub use reactive_graph::callback;
|
||||
|
||||
/// Types that can be passed as the `children` prop of a component.
|
||||
pub mod children;
|
||||
@@ -311,7 +311,9 @@ pub mod subsecond;
|
||||
|
||||
/// Utilities for simple isomorphic logging to the console or terminal.
|
||||
pub mod logging {
|
||||
pub use leptos_dom::{debug_warn, error, log, warn};
|
||||
pub use leptos_dom::{
|
||||
debug_error, debug_log, debug_warn, error, log, warn,
|
||||
};
|
||||
}
|
||||
|
||||
/// Utilities for working with asynchronous tasks.
|
||||
@@ -363,7 +365,8 @@ pub use serde_json;
|
||||
pub use tracing;
|
||||
#[doc(hidden)]
|
||||
pub use wasm_bindgen;
|
||||
pub use wasm_split_helpers;
|
||||
#[doc(hidden)]
|
||||
pub use wasm_split_helpers as wasm_split;
|
||||
#[doc(hidden)]
|
||||
pub use web_sys;
|
||||
|
||||
@@ -395,7 +398,8 @@ pub fn prefetch_lazy_fn_on_server(id: &'static str) {
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct WasmSplitManifest(
|
||||
pub reactive_graph::owner::ArcStoredValue<(
|
||||
String,
|
||||
std::collections::HashMap<String, Vec<String>>,
|
||||
String, // the pkg root
|
||||
std::collections::HashMap<String, Vec<String>>, // preloads
|
||||
String, // the name of the __wasm_split.js file
|
||||
)>,
|
||||
);
|
||||
|
||||
@@ -160,3 +160,16 @@ where
|
||||
OptionGetter(Arc::new(move || cloned.get()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Marker type for creating an `OptionGetter` from a static value.
|
||||
/// Used so that the compiler doesn't complain about double implementations of the trait `IntoOptionGetter`.
|
||||
pub struct StaticMarker;
|
||||
|
||||
impl<T> IntoOptionGetter<T, StaticMarker> for Option<T>
|
||||
where
|
||||
T: Clone + Send + Sync + 'static,
|
||||
{
|
||||
fn into_option_getter(self) -> OptionGetter<T> {
|
||||
OptionGetter(Arc::new(move || self.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ use crate::{
|
||||
use futures::{channel::oneshot, select, FutureExt};
|
||||
use hydration_context::SerializedDataId;
|
||||
use leptos_macro::component;
|
||||
use or_poisoned::OrPoisoned;
|
||||
use reactive_graph::{
|
||||
computed::{
|
||||
suspense::{LocalResourceNotifier, SuspenseContext},
|
||||
@@ -14,10 +15,10 @@ use reactive_graph::{
|
||||
effect::RenderEffect,
|
||||
owner::{provide_context, use_context, Owner},
|
||||
signal::ArcRwSignal,
|
||||
traits::{Dispose, Get, Read, Track, With, WriteValue},
|
||||
traits::{Dispose, Get, Read, ReadUntracked, Track, With, WriteValue},
|
||||
};
|
||||
use slotmap::{DefaultKey, SlotMap};
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use tachys::{
|
||||
either::Either,
|
||||
html::attribute::{any_attribute::AnyAttribute, Attribute},
|
||||
@@ -320,23 +321,66 @@ where
|
||||
|
||||
// walk over the tree of children once to make sure that all resource loads are registered
|
||||
self.children.dry_resolve();
|
||||
let children = Arc::new(Mutex::new(Some(self.children)));
|
||||
|
||||
// check the set of tasks to see if it is empty, now or later
|
||||
let eff = reactive_graph::effect::Effect::new_isomorphic({
|
||||
move |_| {
|
||||
tasks.track();
|
||||
if let Some(tasks) = tasks.try_read() {
|
||||
if tasks.is_empty() {
|
||||
if let Some(tx) = tasks_tx.take() {
|
||||
// If the receiver has dropped, it means the ScopedFuture has already
|
||||
// dropped, so it doesn't matter if we manage to send this.
|
||||
_ = tx.send(());
|
||||
}
|
||||
if let Some(tx) = notify_error_boundary.take() {
|
||||
_ = tx.send(());
|
||||
let children = Arc::clone(&children);
|
||||
move |double_checking: Option<bool>| {
|
||||
// on the first run, always track the tasks
|
||||
if double_checking.is_none() {
|
||||
tasks.track();
|
||||
}
|
||||
|
||||
if let Some(curr_tasks) = tasks.try_read_untracked() {
|
||||
if curr_tasks.is_empty() {
|
||||
if double_checking == Some(true) {
|
||||
// we have finished loading, and checking the children again told us there are
|
||||
// no more pending tasks. so we can render both the children and the error boundary
|
||||
|
||||
if let Some(tx) = tasks_tx.take() {
|
||||
// If the receiver has dropped, it means the ScopedFuture has already
|
||||
// dropped, so it doesn't matter if we manage to send this.
|
||||
_ = tx.send(());
|
||||
}
|
||||
if let Some(tx) = notify_error_boundary.take() {
|
||||
_ = tx.send(());
|
||||
}
|
||||
} else {
|
||||
// release the read guard on tasks, as we'll be updating it again
|
||||
drop(curr_tasks);
|
||||
// check the children for additional pending tasks
|
||||
// the will catch additional resource reads nested inside a conditional depending on initial resource reads
|
||||
if let Some(children) =
|
||||
children.lock().or_poisoned().as_mut()
|
||||
{
|
||||
children.dry_resolve();
|
||||
}
|
||||
|
||||
if tasks
|
||||
.try_read()
|
||||
.map(|n| n.is_empty())
|
||||
.unwrap_or(false)
|
||||
{
|
||||
// there are no additional pending tasks, and we can simply return
|
||||
if let Some(tx) = tasks_tx.take() {
|
||||
// If the receiver has dropped, it means the ScopedFuture has already
|
||||
// dropped, so it doesn't matter if we manage to send this.
|
||||
_ = tx.send(());
|
||||
}
|
||||
if let Some(tx) = notify_error_boundary.take() {
|
||||
_ = tx.send(());
|
||||
}
|
||||
}
|
||||
|
||||
// tell ourselves that we're just double-checking
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
tasks.track();
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
@@ -362,12 +406,17 @@ where
|
||||
None
|
||||
}
|
||||
_ = tasks_rx => {
|
||||
let children = {
|
||||
let mut children_lock = children.lock().or_poisoned();
|
||||
children_lock.take().expect("children should not be removed until we render here")
|
||||
};
|
||||
|
||||
// if we ran this earlier, reactive reads would always be registered as None
|
||||
// this is fine in the case where we want to use Suspend and .await on some future
|
||||
// but in situations like a <For each=|| some_resource.snapshot()/> we actually
|
||||
// want to be able to 1) synchronously read a resource's value, but still 2) wait
|
||||
// for it to load before we render everything
|
||||
let mut children = Box::pin(self.children.resolve().fuse());
|
||||
let mut children = Box::pin(children.resolve().fuse());
|
||||
|
||||
// we continue racing the children against the "do we have any local
|
||||
// resources?" Future
|
||||
|
||||
@@ -103,6 +103,76 @@ fn test_classes() {
|
||||
assert_eq!(rendered.to_html(), "<div class=\"my big red car\"></div>");
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
#[test]
|
||||
fn test_class_with_class_directive_merge() {
|
||||
use leptos::prelude::*;
|
||||
|
||||
// class= followed by class: should merge
|
||||
let rendered: View<HtmlElement<_, _, _>> = view! {
|
||||
<div class="foo" class:bar=true></div>
|
||||
};
|
||||
|
||||
assert_eq!(rendered.to_html(), "<div class=\"foo bar\"></div>");
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
#[test]
|
||||
fn test_solo_class_directive() {
|
||||
use leptos::prelude::*;
|
||||
|
||||
// Solo class: directive should work without class attribute
|
||||
let rendered: View<HtmlElement<_, _, _>> = view! {
|
||||
<div class:foo=true></div>
|
||||
};
|
||||
|
||||
assert_eq!(rendered.to_html(), "<div class=\"foo\"></div>");
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
#[test]
|
||||
fn test_class_directive_with_static_class() {
|
||||
use leptos::prelude::*;
|
||||
|
||||
// class:foo comes after class= due to macro sorting
|
||||
// The class= clears buffer, then class:foo appends
|
||||
let rendered: View<HtmlElement<_, _, _>> = view! {
|
||||
<div class:foo=true class="bar"></div>
|
||||
};
|
||||
|
||||
// After macro sorting: class="bar" class:foo=true
|
||||
// Expected: "bar foo"
|
||||
assert_eq!(rendered.to_html(), "<div class=\"bar foo\"></div>");
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
#[test]
|
||||
fn test_global_class_applied() {
|
||||
use leptos::prelude::*;
|
||||
|
||||
// Test that a global class is properly applied
|
||||
let rendered: View<HtmlElement<_, _, _>> = view! { class="global",
|
||||
<div></div>
|
||||
};
|
||||
|
||||
assert_eq!(rendered.to_html(), "<div class=\"global\"></div>");
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
#[test]
|
||||
fn test_multiple_class_attributes_overwrite() {
|
||||
use leptos::prelude::*;
|
||||
|
||||
// When multiple class attributes are applied, the last one should win (browser behavior)
|
||||
// This simulates what happens when attributes are combined programmatically
|
||||
let el = leptos::html::div().class("first").class("second");
|
||||
|
||||
let html = el.to_html();
|
||||
|
||||
// The second class attribute should overwrite the first
|
||||
assert_eq!(html, "<div class=\"second\"></div>");
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
#[test]
|
||||
fn ssr_with_styles() {
|
||||
|
||||
@@ -5,7 +5,7 @@ license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Configuration for the Leptos web framework."
|
||||
readme = "../README.md"
|
||||
version = "0.8.7"
|
||||
version = "0.8.8"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -221,18 +221,15 @@ fn env_w_default(
|
||||
/// An enum that can be used to define the environment Leptos is running in.
|
||||
/// Setting this to the `PROD` variant will not include the WebSocket code for `cargo-leptos` watch mode.
|
||||
/// Defaults to `DEV`.
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq)]
|
||||
#[derive(
|
||||
Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq, Default,
|
||||
)]
|
||||
pub enum Env {
|
||||
PROD,
|
||||
#[default]
|
||||
DEV,
|
||||
}
|
||||
|
||||
impl Default for Env {
|
||||
fn default() -> Self {
|
||||
Self::DEV
|
||||
}
|
||||
}
|
||||
|
||||
fn env_from_str(input: &str) -> Result<Env, LeptosConfigError> {
|
||||
let sanitized = input.to_lowercase();
|
||||
match sanitized.as_ref() {
|
||||
@@ -279,18 +276,15 @@ impl TryFrom<String> for Env {
|
||||
|
||||
/// An enum that can be used to define the websocket protocol Leptos uses for hotreloading
|
||||
/// Defaults to `ws`.
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq)]
|
||||
#[derive(
|
||||
Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq, Default,
|
||||
)]
|
||||
pub enum ReloadWSProtocol {
|
||||
#[default]
|
||||
WS,
|
||||
WSS,
|
||||
}
|
||||
|
||||
impl Default for ReloadWSProtocol {
|
||||
fn default() -> Self {
|
||||
Self::WS
|
||||
}
|
||||
}
|
||||
|
||||
fn ws_from_str(input: &str) -> Result<ReloadWSProtocol, LeptosConfigError> {
|
||||
let sanitized = input.to_lowercase();
|
||||
match sanitized.as_ref() {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_dom"
|
||||
version = "0.8.6"
|
||||
version = "0.8.7"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_macro"
|
||||
version = "0.8.8"
|
||||
version = "0.8.12"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
|
||||
@@ -1016,25 +1016,27 @@ struct PropOpt {
|
||||
name: Option<String>,
|
||||
}
|
||||
|
||||
struct TypedBuilderOpts {
|
||||
struct TypedBuilderOpts<'a> {
|
||||
default: bool,
|
||||
default_with_value: Option<syn::Expr>,
|
||||
strip_option: bool,
|
||||
into: bool,
|
||||
ty: &'a Type,
|
||||
}
|
||||
|
||||
impl TypedBuilderOpts {
|
||||
fn from_opts(opts: &PropOpt, is_ty_option: bool) -> Self {
|
||||
impl<'a> TypedBuilderOpts<'a> {
|
||||
fn from_opts(opts: &PropOpt, ty: &'a Type) -> Self {
|
||||
Self {
|
||||
default: opts.optional || opts.optional_no_strip || opts.attrs,
|
||||
default_with_value: opts.default.clone(),
|
||||
strip_option: opts.strip_option || opts.optional && is_ty_option,
|
||||
strip_option: opts.strip_option || opts.optional && is_option(ty),
|
||||
into: opts.into,
|
||||
ty,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TypedBuilderOpts {
|
||||
impl TypedBuilderOpts<'_> {
|
||||
fn to_serde_tokens(&self) -> TokenStream {
|
||||
let default = if let Some(v) = &self.default_with_value {
|
||||
let v = v.to_token_stream().to_string();
|
||||
@@ -1053,7 +1055,7 @@ impl TypedBuilderOpts {
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TypedBuilderOpts {
|
||||
impl ToTokens for TypedBuilderOpts<'_> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let default = if let Some(v) = &self.default_with_value {
|
||||
let v = v.to_token_stream().to_string();
|
||||
@@ -1064,14 +1066,29 @@ impl ToTokens for TypedBuilderOpts {
|
||||
quote! {}
|
||||
};
|
||||
|
||||
let strip_option = if self.strip_option {
|
||||
// If self.strip_option && self.into, then the strip_option will be represented as part of the transform closure.
|
||||
let strip_option = if self.strip_option && !self.into {
|
||||
quote! { strip_option, }
|
||||
} else {
|
||||
quote! {}
|
||||
};
|
||||
|
||||
let into = if self.into {
|
||||
quote! { into, }
|
||||
if !self.strip_option {
|
||||
let ty = &self.ty;
|
||||
quote! {
|
||||
fn transform<__IntoReactiveValueMarker>(value: impl ::leptos::prelude::IntoReactiveValue<#ty, __IntoReactiveValueMarker>) -> #ty {
|
||||
value.into_reactive_value()
|
||||
},
|
||||
}
|
||||
} else {
|
||||
let ty = unwrap_option(self.ty);
|
||||
quote! {
|
||||
fn transform<__IntoReactiveValueMarker>(value: impl ::leptos::prelude::IntoReactiveValue<#ty, __IntoReactiveValueMarker>) -> Option<#ty> {
|
||||
Some(value.into_reactive_value())
|
||||
},
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote! {}
|
||||
};
|
||||
@@ -1107,8 +1124,7 @@ fn prop_builder_fields(
|
||||
ty,
|
||||
} = prop;
|
||||
|
||||
let builder_attrs =
|
||||
TypedBuilderOpts::from_opts(prop_opts, is_option(ty));
|
||||
let builder_attrs = TypedBuilderOpts::from_opts(prop_opts, ty);
|
||||
|
||||
let builder_docs = prop_to_doc(prop, PropDocStyle::Inline);
|
||||
|
||||
@@ -1153,8 +1169,7 @@ fn prop_serializer_fields(vis: &Visibility, props: &[Prop]) -> TokenStream {
|
||||
ty,
|
||||
} = prop;
|
||||
|
||||
let builder_attrs =
|
||||
TypedBuilderOpts::from_opts(prop_opts, is_option(ty));
|
||||
let builder_attrs = TypedBuilderOpts::from_opts(prop_opts, ty);
|
||||
let serde_attrs = builder_attrs.to_serde_tokens();
|
||||
|
||||
let PatIdent { ident, by_ref, .. } = &name;
|
||||
|
||||
@@ -2,12 +2,12 @@ use convert_case::{Case, Casing};
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2::Ident;
|
||||
use proc_macro_error2::abort;
|
||||
use quote::quote;
|
||||
use quote::{format_ident, quote};
|
||||
use std::{
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
mem,
|
||||
};
|
||||
use syn::{parse_macro_input, ItemFn};
|
||||
use syn::{parse_macro_input, parse_quote, ItemFn, ReturnType, Stmt};
|
||||
|
||||
pub fn lazy_impl(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
let name = if !args.is_empty() {
|
||||
@@ -16,7 +16,7 @@ pub fn lazy_impl(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
None
|
||||
};
|
||||
|
||||
let mut fun = syn::parse::<ItemFn>(s).unwrap_or_else(|e| {
|
||||
let fun = syn::parse::<ItemFn>(s).unwrap_or_else(|e| {
|
||||
abort!(e.span(), "`lazy` can only be used on a function")
|
||||
});
|
||||
|
||||
@@ -47,29 +47,50 @@ pub fn lazy_impl(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
|
||||
let is_wasm = cfg!(feature = "csr") || cfg!(feature = "hydrate");
|
||||
if is_wasm {
|
||||
let mut fun = fun;
|
||||
let mut return_wrapper = None;
|
||||
if was_async {
|
||||
fun.sig.asyncness = None;
|
||||
let ty = match &fun.sig.output {
|
||||
ReturnType::Default => quote! { () },
|
||||
ReturnType::Type(_, ty) => quote! { #ty },
|
||||
};
|
||||
let sync_output: ReturnType = parse_quote! {
|
||||
-> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = #ty> + ::std::marker::Send + ::std::marker::Sync>>
|
||||
};
|
||||
let async_output = mem::replace(&mut fun.sig.output, sync_output);
|
||||
let stmts = mem::take(&mut fun.block.stmts);
|
||||
fun.block.stmts.push(Stmt::Expr(parse_quote! {
|
||||
::std::boxed::Box::pin(::leptos::__reexports::send_wrapper::SendWrapper::new(async move {
|
||||
#( #stmts )*
|
||||
}))
|
||||
}, None));
|
||||
return_wrapper = Some(quote! {
|
||||
return_wrapper(let future = _; { future.await } #async_output),
|
||||
});
|
||||
}
|
||||
let preload_name = format_ident!("__preload_{}", fun.sig.ident);
|
||||
|
||||
quote! {
|
||||
#[::leptos::wasm_split_helpers::wasm_split(
|
||||
#[::leptos::wasm_split::wasm_split(
|
||||
#unique_name,
|
||||
::leptos::__reexports::send_wrapper
|
||||
wasm_split_path = ::leptos::wasm_split,
|
||||
preload(#[doc(hidden)] #[allow(non_snake_case)] #preload_name),
|
||||
#return_wrapper
|
||||
)]
|
||||
#fun
|
||||
}
|
||||
} else {
|
||||
let mut fun = fun;
|
||||
if !was_async {
|
||||
fun.sig.asyncness = Some(Default::default());
|
||||
}
|
||||
|
||||
let statements = &mut fun.block.stmts;
|
||||
let old_statements = mem::take(statements);
|
||||
statements.push(
|
||||
syn::parse(
|
||||
quote! {
|
||||
::leptos::prefetch_lazy_fn_on_server(#unique_name_str);
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
statements.push(parse_quote! {
|
||||
::leptos::prefetch_lazy_fn_on_server(#unique_name_str);
|
||||
});
|
||||
statements.extend(old_statements);
|
||||
quote! { #fun }
|
||||
}
|
||||
|
||||
@@ -159,25 +159,27 @@ struct PropOpt {
|
||||
pub attrs: bool,
|
||||
}
|
||||
|
||||
struct TypedBuilderOpts {
|
||||
struct TypedBuilderOpts<'a> {
|
||||
default: bool,
|
||||
default_with_value: Option<syn::Expr>,
|
||||
strip_option: bool,
|
||||
into: bool,
|
||||
ty: &'a Type,
|
||||
}
|
||||
|
||||
impl TypedBuilderOpts {
|
||||
pub fn from_opts(opts: &PropOpt, is_ty_option: bool) -> Self {
|
||||
impl<'a> TypedBuilderOpts<'a> {
|
||||
pub fn from_opts(opts: &PropOpt, ty: &'a Type) -> Self {
|
||||
Self {
|
||||
default: opts.optional || opts.optional_no_strip || opts.attrs,
|
||||
default_with_value: opts.default.clone(),
|
||||
strip_option: opts.strip_option || opts.optional && is_ty_option,
|
||||
strip_option: opts.strip_option || opts.optional && is_option(ty),
|
||||
into: opts.into,
|
||||
ty,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for TypedBuilderOpts {
|
||||
impl ToTokens for TypedBuilderOpts<'_> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let default = if let Some(v) = &self.default_with_value {
|
||||
let v = v.to_token_stream().to_string();
|
||||
@@ -188,14 +190,29 @@ impl ToTokens for TypedBuilderOpts {
|
||||
quote! {}
|
||||
};
|
||||
|
||||
let strip_option = if self.strip_option {
|
||||
// If self.strip_option && self.into, then the strip_option will be represented as part of the transform closure.
|
||||
let strip_option = if self.strip_option && !self.into {
|
||||
quote! { strip_option, }
|
||||
} else {
|
||||
quote! {}
|
||||
};
|
||||
|
||||
let into = if self.into {
|
||||
quote! { into, }
|
||||
if !self.strip_option {
|
||||
let ty = &self.ty;
|
||||
quote! {
|
||||
fn transform<__IntoReactiveValueMarker>(value: impl ::leptos::prelude::IntoReactiveValue<#ty, __IntoReactiveValueMarker>) -> #ty {
|
||||
value.into_reactive_value()
|
||||
},
|
||||
}
|
||||
} else {
|
||||
let ty = unwrap_option(self.ty);
|
||||
quote! {
|
||||
fn transform<__IntoReactiveValueMarker>(value: impl ::leptos::prelude::IntoReactiveValue<#ty, __IntoReactiveValueMarker>) -> Option<#ty> {
|
||||
Some(value.into_reactive_value())
|
||||
},
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote! {}
|
||||
};
|
||||
@@ -227,8 +244,7 @@ fn prop_builder_fields(vis: &Visibility, props: &[Prop]) -> TokenStream {
|
||||
ty,
|
||||
} = prop;
|
||||
|
||||
let builder_attrs =
|
||||
TypedBuilderOpts::from_opts(prop_opts, is_option(ty));
|
||||
let builder_attrs = TypedBuilderOpts::from_opts(prop_opts, ty);
|
||||
|
||||
let builder_docs = prop_to_doc(prop, PropDocStyle::Inline);
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ pub(crate) fn component_to_tokens(
|
||||
|
||||
if optional {
|
||||
optional_props.push(quote! {
|
||||
props.#name = { #value }.map(Into::into);
|
||||
props.#name = { #value }.map(::leptos::prelude::IntoReactiveValue::into_reactive_value);
|
||||
})
|
||||
} else {
|
||||
required_props.push(quote! {
|
||||
|
||||
@@ -25,9 +25,8 @@ use std::{
|
||||
use syn::{
|
||||
punctuated::Pair::{End, Punctuated},
|
||||
spanned::Spanned,
|
||||
Expr,
|
||||
Expr::Tuple,
|
||||
ExprArray, ExprLit, ExprRange, Lit, LitStr, RangeLimits, Stmt,
|
||||
Expr::{self, Tuple},
|
||||
ExprArray, ExprLit, ExprPath, ExprRange, Lit, LitStr, RangeLimits, Stmt,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
@@ -1679,7 +1678,7 @@ fn attribute_value(
|
||||
}
|
||||
|
||||
// Keep list alphabetized for binary search
|
||||
const TYPED_EVENTS: [&str; 126] = [
|
||||
const TYPED_EVENTS: [&str; 127] = [
|
||||
"DOMContentLoaded",
|
||||
"abort",
|
||||
"afterprint",
|
||||
@@ -1775,6 +1774,7 @@ const TYPED_EVENTS: [&str; 126] = [
|
||||
"reset",
|
||||
"resize",
|
||||
"scroll",
|
||||
"scrollend",
|
||||
"securitypolicyviolation",
|
||||
"seeked",
|
||||
"seeking",
|
||||
@@ -1871,6 +1871,28 @@ pub(crate) fn ident_from_tag_name(tag_name: &NodeName) -> Ident {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn full_path_from_tag_name(tag_name: &NodeName) -> Option<ExprPath> {
|
||||
match tag_name {
|
||||
NodeName::Path(path) => Some(path.clone()),
|
||||
NodeName::Block(_) => {
|
||||
let span = tag_name.span();
|
||||
proc_macro_error2::emit_error!(
|
||||
span,
|
||||
"blocks not allowed in tag-name position"
|
||||
);
|
||||
None
|
||||
}
|
||||
_ => {
|
||||
let span = tag_name.span();
|
||||
proc_macro_error2::emit_error!(
|
||||
span,
|
||||
"punctuated names not allowed in slots"
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn directive_call_from_attribute_node(
|
||||
attr: &KeyedAttribute,
|
||||
directive_name: &str,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use super::{
|
||||
component_builder::maybe_optimised_component_children,
|
||||
convert_to_snake_case, ident_from_tag_name,
|
||||
convert_to_snake_case, full_path_from_tag_name,
|
||||
};
|
||||
use crate::view::{fragment_to_tokens, utils::filter_prefixed_attrs, TagType};
|
||||
use proc_macro2::{Ident, TokenStream, TokenTree};
|
||||
@@ -24,7 +24,7 @@ pub(crate) fn slot_to_tokens(
|
||||
node.name().to_string()
|
||||
});
|
||||
|
||||
let component_name = ident_from_tag_name(node.name());
|
||||
let component_path = full_path_from_tag_name(node.name());
|
||||
|
||||
let Some(parent_slots) = parent_slots else {
|
||||
proc_macro_error2::emit_error!(
|
||||
@@ -190,7 +190,7 @@ pub(crate) fn slot_to_tokens(
|
||||
|
||||
let slot = quote_spanned! {node.span()=>
|
||||
{
|
||||
let slot = #component_name::builder()
|
||||
let slot = #component_path::builder()
|
||||
#(#props)*
|
||||
#(#slots)*
|
||||
#children
|
||||
|
||||
@@ -120,6 +120,124 @@ fn returns_static_lifetime() {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
#[component]
|
||||
pub fn IntoReactiveValueTestComponentSignal(
|
||||
#[prop(into)] arg1: Signal<String>,
|
||||
#[prop(into)] arg2: Signal<String>,
|
||||
#[prop(into)] arg3: Signal<String>,
|
||||
#[prop(into)] arg4: Signal<usize>,
|
||||
#[prop(into)] arg5: Signal<usize>,
|
||||
#[prop(into)] arg6: Signal<usize>,
|
||||
#[prop(into)] arg7: Signal<Option<usize>>,
|
||||
#[prop(into)] arg8: ArcSignal<String>,
|
||||
#[prop(into)] arg9: ArcSignal<String>,
|
||||
#[prop(into)] arg10: ArcSignal<String>,
|
||||
#[prop(into)] arg11: ArcSignal<usize>,
|
||||
#[prop(into)] arg12: ArcSignal<usize>,
|
||||
#[prop(into)] arg13: ArcSignal<usize>,
|
||||
#[prop(into)] arg14: ArcSignal<Option<usize>>,
|
||||
// Optionals:
|
||||
#[prop(into, optional)] arg15: Option<Signal<usize>>,
|
||||
#[prop(into, optional)] arg16_purposely_omitted: Option<Signal<usize>>,
|
||||
#[prop(into, optional)] arg17: Option<Signal<usize>>,
|
||||
#[prop(into, strip_option)] arg18: Option<Signal<usize>>,
|
||||
) -> impl IntoView {
|
||||
move || {
|
||||
view! {
|
||||
<div>
|
||||
<p>{arg1.get()}</p>
|
||||
<p>{arg2.get()}</p>
|
||||
<p>{arg3.get()}</p>
|
||||
<p>{arg4.get()}</p>
|
||||
<p>{arg5.get()}</p>
|
||||
<p>{arg6.get()}</p>
|
||||
<p>{arg7.get()}</p>
|
||||
<p>{arg8.get()}</p>
|
||||
<p>{arg9.get()}</p>
|
||||
<p>{arg10.get()}</p>
|
||||
<p>{arg11.get()}</p>
|
||||
<p>{arg12.get()}</p>
|
||||
<p>{arg13.get()}</p>
|
||||
<p>{arg14.get()}</p>
|
||||
<p>{arg15.get()}</p>
|
||||
<p>{arg16_purposely_omitted.get()}</p>
|
||||
<p>{arg17.get()}</p>
|
||||
<p>{arg18.get()}</p>
|
||||
</div>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn IntoReactiveValueTestComponentCallback(
|
||||
#[prop(into)] arg1: Callback<(), String>,
|
||||
#[prop(into)] arg2: Callback<usize, String>,
|
||||
#[prop(into)] arg3: Callback<(usize,), String>,
|
||||
#[prop(into)] arg4: Callback<(usize, String), String>,
|
||||
#[prop(into)] arg5: UnsyncCallback<(), String>,
|
||||
#[prop(into)] arg6: UnsyncCallback<usize, String>,
|
||||
#[prop(into)] arg7: UnsyncCallback<(usize,), String>,
|
||||
#[prop(into)] arg8: UnsyncCallback<(usize, String), String>,
|
||||
) -> impl IntoView {
|
||||
move || {
|
||||
view! {
|
||||
<div>
|
||||
<p>{arg1.run(())}</p>
|
||||
<p>{arg2.run(1)}</p>
|
||||
<p>{arg3.run((2,))}</p>
|
||||
<p>{arg4.run((3, "three".into()))}</p>
|
||||
<p>{arg5.run(())}</p>
|
||||
<p>{arg6.run(1)}</p>
|
||||
<p>{arg7.run((2,))}</p>
|
||||
<p>{arg8.run((3, "three".into()))}</p>
|
||||
</div>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
#[test]
|
||||
fn test_into_reactive_value_signal() {
|
||||
let _ = view! {
|
||||
<IntoReactiveValueTestComponentSignal
|
||||
arg1=move || "I was a reactive closure!"
|
||||
arg2="I was a basic str!"
|
||||
arg3=Signal::stored("I was already a signal!")
|
||||
arg4=move || 2
|
||||
arg5=3
|
||||
arg6=Signal::stored(4)
|
||||
arg7=|| 2
|
||||
arg8=move || "I was a reactive closure!"
|
||||
arg9="I was a basic str!"
|
||||
arg10=ArcSignal::stored("I was already a signal!".to_string())
|
||||
arg11=move || 2
|
||||
arg12=3
|
||||
arg13=ArcSignal::stored(4)
|
||||
arg14=|| 2
|
||||
arg15=|| 2
|
||||
nostrip:arg17=Some(|| 2)
|
||||
arg18=|| 2
|
||||
/>
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_into_reactive_value_callback() {
|
||||
let _ = view! {
|
||||
<IntoReactiveValueTestComponentCallback
|
||||
arg1=|| "I was a callback static str!"
|
||||
arg2=|_n| "I was a callback static str!"
|
||||
arg3=|(_n,)| "I was a callback static str!"
|
||||
arg4=|(_n, _s)| "I was a callback static str!"
|
||||
arg5=|| "I was a callback static str!"
|
||||
arg6=|_n| "I was a callback static str!"
|
||||
arg7=|(_n,)| "I was a callback static str!"
|
||||
arg8=|(_n, _s)| "I was a callback static str!"
|
||||
/>
|
||||
};
|
||||
}
|
||||
|
||||
// an attempt to catch unhygienic macros regression
|
||||
mod macro_hygiene {
|
||||
// To ensure no relative module path to leptos inside macros.
|
||||
@@ -152,12 +270,7 @@ mod macro_hygiene {
|
||||
|
||||
#[component]
|
||||
fn Component() -> impl IntoView {
|
||||
view! {
|
||||
<div>
|
||||
{().into_any()}
|
||||
{()}
|
||||
</div>
|
||||
}
|
||||
view! { <div>{().into_any()} {()}</div> }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_server"
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
|
||||
@@ -188,6 +188,39 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
thread_local! {
|
||||
static RESOURCE_SOURCE_SIGNAL_ACTIVE: AtomicBool = const { AtomicBool::new(false) };
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
/// Returns whether the current thread is currently running a resource source signal.
|
||||
pub fn in_resource_source_signal() -> bool {
|
||||
RESOURCE_SOURCE_SIGNAL_ACTIVE
|
||||
.with(|scope| scope.load(std::sync::atomic::Ordering::Relaxed))
|
||||
}
|
||||
|
||||
/// Set a static to true whilst running the given function.
|
||||
/// [`is_in_effect_scope`] will return true whilst the function is running.
|
||||
fn run_in_resource_source_signal<T>(fun: impl FnOnce() -> T) -> T {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
// For the theoretical nested case, set back to initial value rather than false:
|
||||
let initial = RESOURCE_SOURCE_SIGNAL_ACTIVE.with(|scope| {
|
||||
scope.swap(true, std::sync::atomic::Ordering::Relaxed)
|
||||
});
|
||||
let result = fun();
|
||||
RESOURCE_SOURCE_SIGNAL_ACTIVE.with(|scope| {
|
||||
scope.store(initial, std::sync::atomic::Ordering::Relaxed)
|
||||
});
|
||||
result
|
||||
}
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
fun()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, Ser> ReadUntracked for ArcResource<T, Ser>
|
||||
where
|
||||
T: 'static,
|
||||
@@ -202,7 +235,9 @@ where
|
||||
computed::suspense::SuspenseContext, effect::in_effect_scope,
|
||||
owner::use_context,
|
||||
};
|
||||
if !in_effect_scope() && use_context::<SuspenseContext>().is_none()
|
||||
if !in_effect_scope()
|
||||
&& !in_resource_source_signal()
|
||||
&& use_context::<SuspenseContext>().is_none()
|
||||
{
|
||||
let location = std::panic::Location::caller();
|
||||
reactive_graph::log_warning(format_args!(
|
||||
@@ -271,7 +306,7 @@ where
|
||||
let refetch = ArcRwSignal::new(0);
|
||||
let source = ArcMemo::new({
|
||||
let refetch = refetch.clone();
|
||||
move |_| (refetch.get(), source())
|
||||
move |_| (refetch.get(), run_in_resource_source_signal(&source))
|
||||
});
|
||||
let fun = {
|
||||
let source = source.clone();
|
||||
@@ -909,7 +944,9 @@ where
|
||||
computed::suspense::SuspenseContext, effect::in_effect_scope,
|
||||
owner::use_context,
|
||||
};
|
||||
if !in_effect_scope() && use_context::<SuspenseContext>().is_none()
|
||||
if !in_effect_scope()
|
||||
&& !in_resource_source_signal()
|
||||
&& use_context::<SuspenseContext>().is_none()
|
||||
{
|
||||
let location = std::panic::Location::caller();
|
||||
reactive_graph::log_warning(format_args!(
|
||||
|
||||
@@ -15,19 +15,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@playwright/test": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.1.tgz",
|
||||
"integrity": "sha512-1hZ4TNvD5z9VuhNJ/walIjvMVvYkZKf71axoF/uiAqpntQJXpG64dlXhoDXE3OczPuTuvjf/M5KWFg5VAVUS3Q==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz",
|
||||
"integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"playwright": "1.44.1"
|
||||
"playwright": "1.56.1"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
@@ -46,7 +45,6 @@
|
||||
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
@@ -56,35 +54,33 @@
|
||||
}
|
||||
},
|
||||
"node_modules/playwright": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.1.tgz",
|
||||
"integrity": "sha512-qr/0UJ5CFAtloI3avF95Y0L1xQo6r3LQArLIg/z/PoGJ6xa+EwzrwO5lpNr/09STxdHuUoP2mvuELJS+hLdtgg==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz",
|
||||
"integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"playwright-core": "1.44.1"
|
||||
"playwright-core": "1.56.1"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=18"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/playwright-core": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.1.tgz",
|
||||
"integrity": "sha512-wh0JWtYTrhv1+OSsLPgFzGzt67Y7BE/ZS3jEqgGBlp2ppp1ZDj8c+9IARNW4dwf1poq5MgHreEM2KV/GuR4cFA==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz",
|
||||
"integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"playwright-core": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
@@ -111,12 +107,12 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@playwright/test": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.1.tgz",
|
||||
"integrity": "sha512-1hZ4TNvD5z9VuhNJ/walIjvMVvYkZKf71axoF/uiAqpntQJXpG64dlXhoDXE3OczPuTuvjf/M5KWFg5VAVUS3Q==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz",
|
||||
"integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"playwright": "1.44.1"
|
||||
"playwright": "1.56.1"
|
||||
}
|
||||
},
|
||||
"@types/node": {
|
||||
@@ -136,19 +132,19 @@
|
||||
"optional": true
|
||||
},
|
||||
"playwright": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.1.tgz",
|
||||
"integrity": "sha512-qr/0UJ5CFAtloI3avF95Y0L1xQo6r3LQArLIg/z/PoGJ6xa+EwzrwO5lpNr/09STxdHuUoP2mvuELJS+hLdtgg==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz",
|
||||
"integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"fsevents": "2.3.2",
|
||||
"playwright-core": "1.44.1"
|
||||
"playwright-core": "1.56.1"
|
||||
}
|
||||
},
|
||||
"playwright-core": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.1.tgz",
|
||||
"integrity": "sha512-wh0JWtYTrhv1+OSsLPgFzGzt67Y7BE/ZS3jEqgGBlp2ppp1ZDj8c+9IARNW4dwf1poq5MgHreEM2KV/GuR4cFA==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz",
|
||||
"integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==",
|
||||
"dev": true
|
||||
},
|
||||
"typescript": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_graph"
|
||||
version = "0.2.7"
|
||||
version = "0.2.11"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -29,6 +29,7 @@ send_wrapper = { features = [
|
||||
], workspace = true, default-features = true }
|
||||
subsecond = { workspace = true, default-features = true, optional = true }
|
||||
indexmap = { workspace = true, default-features = true }
|
||||
paste = { workspace = true, default-features = true }
|
||||
|
||||
[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dependencies]
|
||||
web-sys = { version = "0.3.77", features = ["console"] }
|
||||
@@ -40,6 +41,7 @@ tokio = { features = [
|
||||
], workspace = true, default-features = true }
|
||||
tokio-test = { workspace = true, default-features = true }
|
||||
any_spawner = { workspace = true, features = ["futures-executor", "tokio"] }
|
||||
typed-builder.workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
rustc_version = { workspace = true, default-features = true }
|
||||
|
||||
@@ -2,48 +2,19 @@
|
||||
//! for component properties, because they can be used to define optional callback functions,
|
||||
//! which generic props don’t support.
|
||||
//!
|
||||
//! # Usage
|
||||
//! Callbacks can be created manually from any function or closure, but the easiest way
|
||||
//! to create them is to use `#[prop(into)]]` when defining a component.
|
||||
//! ```
|
||||
//! use leptos::prelude::*;
|
||||
//!
|
||||
//! #[component]
|
||||
//! fn MyComponent(
|
||||
//! #[prop(into)] render_number: Callback<(i32,), String>,
|
||||
//! ) -> impl IntoView {
|
||||
//! view! {
|
||||
//! <div>
|
||||
//! {render_number.run((1,))}
|
||||
//! // callbacks can be called multiple times
|
||||
//! {render_number.run((42,))}
|
||||
//! </div>
|
||||
//! }
|
||||
//! }
|
||||
//! // you can pass a closure directly as `render_number`
|
||||
//! fn test() -> impl IntoView {
|
||||
//! view! {
|
||||
//! <MyComponent render_number=|x: i32| x.to_string()/>
|
||||
//! }
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! *Notes*:
|
||||
//! - The `render_number` prop can receive any type that implements `Fn(i32) -> String`.
|
||||
//! - Callbacks are most useful when you want optional generic props.
|
||||
//! - All callbacks implement the [`Callable`](leptos::callback::Callable) trait, and can be invoked with `my_callback.run(input)`.
|
||||
//! - The callback types implement [`Copy`], so they can easily be moved into and out of other closures, just like signals.
|
||||
//! The callback types implement [`Copy`], so they can easily be moved into and out of other closures, just like signals.
|
||||
//!
|
||||
//! # Types
|
||||
//! This modules implements 2 callback types:
|
||||
//! - [`Callback`](leptos::callback::Callback)
|
||||
//! - [`UnsyncCallback`](leptos::callback::UnsyncCallback)
|
||||
//! - [`Callback`](reactive_graph::callback::Callback)
|
||||
//! - [`UnsyncCallback`](reactive_graph::callback::UnsyncCallback)
|
||||
//!
|
||||
//! Use `SyncCallback` if the function is not `Sync` and `Send`.
|
||||
|
||||
use reactive_graph::{
|
||||
use crate::{
|
||||
owner::{LocalStorage, StoredValue},
|
||||
traits::{Dispose, WithValue},
|
||||
IntoReactiveValue,
|
||||
};
|
||||
use std::{fmt, rc::Rc, sync::Arc};
|
||||
|
||||
@@ -60,7 +31,16 @@ pub trait Callable<In: 'static, Out: 'static = ()> {
|
||||
fn run(&self, input: In) -> Out;
|
||||
}
|
||||
|
||||
/// A callback type that is not required to be `Send + Sync`.
|
||||
/// A callback type that is not required to be [`Send`] or [`Sync`].
|
||||
///
|
||||
/// # Example
|
||||
/// ```
|
||||
/// # use reactive_graph::prelude::*; use reactive_graph::callback::*; let owner = reactive_graph::owner::Owner::new(); owner.set();
|
||||
/// let _: UnsyncCallback<()> = UnsyncCallback::new(|_| {});
|
||||
/// let _: UnsyncCallback<(i32, i32)> = (|_x: i32, _y: i32| {}).into();
|
||||
/// let cb: UnsyncCallback<i32, String> = UnsyncCallback::new(|x: i32| x.to_string());
|
||||
/// assert_eq!(cb.run(42), "42".to_string());
|
||||
/// ```
|
||||
pub struct UnsyncCallback<In: 'static, Out: 'static = ()>(
|
||||
StoredValue<Rc<dyn Fn(In) -> Out>, LocalStorage>,
|
||||
);
|
||||
@@ -148,28 +128,15 @@ impl_unsync_callable_from_fn!(
|
||||
P1, P2, P3, P4, P5, P6, P7, P8, P9, P10, P11, P12
|
||||
);
|
||||
|
||||
/// Callbacks define a standard way to store functions and closures.
|
||||
/// A callback type that is [`Send`] + [`Sync`].
|
||||
///
|
||||
/// # Example
|
||||
/// ```
|
||||
/// # use leptos::prelude::*;
|
||||
/// # use leptos::callback::{Callable, Callback};
|
||||
/// #[component]
|
||||
/// fn MyComponent(
|
||||
/// #[prop(into)] render_number: Callback<(i32,), String>,
|
||||
/// ) -> impl IntoView {
|
||||
/// view! {
|
||||
/// <div>
|
||||
/// {render_number.run((42,))}
|
||||
/// </div>
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// fn test() -> impl IntoView {
|
||||
/// view! {
|
||||
/// <MyComponent render_number=move |x: i32| x.to_string()/>
|
||||
/// }
|
||||
/// }
|
||||
/// # use reactive_graph::prelude::*; use reactive_graph::callback::*; let owner = reactive_graph::owner::Owner::new(); owner.set();
|
||||
/// let _: Callback<()> = Callback::new(|_| {});
|
||||
/// let _: Callback<(i32, i32)> = (|_x: i32, _y: i32| {}).into();
|
||||
/// let cb: Callback<i32, String> = Callback::new(|x: i32| x.to_string());
|
||||
/// assert_eq!(cb.run(42), "42".to_string());
|
||||
/// ```
|
||||
pub struct Callback<In, Out = ()>(
|
||||
StoredValue<Arc<dyn Fn(In) -> Out + Send + Sync>>,
|
||||
@@ -241,6 +208,7 @@ impl_callable_from_fn!(P1, P2, P3, P4, P5, P6, P7, P8, P9, P10, P11, P12);
|
||||
|
||||
impl<In: 'static, Out: 'static> Callback<In, Out> {
|
||||
/// Creates a new callback from the given function.
|
||||
#[track_caller]
|
||||
pub fn new<F>(fun: F) -> Self
|
||||
where
|
||||
F: Fn(In) -> Out + Send + Sync + 'static,
|
||||
@@ -262,22 +230,94 @@ impl<In: 'static, Out: 'static> Callback<In, Out> {
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub struct __IntoReactiveValueMarkerCallbackSingleParam;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub struct __IntoReactiveValueMarkerCallbackStrOutputToString;
|
||||
|
||||
impl<I, O, F>
|
||||
IntoReactiveValue<
|
||||
Callback<I, O>,
|
||||
__IntoReactiveValueMarkerCallbackSingleParam,
|
||||
> for F
|
||||
where
|
||||
F: Fn(I) -> O + Send + Sync + 'static,
|
||||
{
|
||||
#[track_caller]
|
||||
fn into_reactive_value(self) -> Callback<I, O> {
|
||||
Callback::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<I, O, F>
|
||||
IntoReactiveValue<
|
||||
UnsyncCallback<I, O>,
|
||||
__IntoReactiveValueMarkerCallbackSingleParam,
|
||||
> for F
|
||||
where
|
||||
F: Fn(I) -> O + 'static,
|
||||
{
|
||||
#[track_caller]
|
||||
fn into_reactive_value(self) -> UnsyncCallback<I, O> {
|
||||
UnsyncCallback::new(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<I, F>
|
||||
IntoReactiveValue<
|
||||
Callback<I, String>,
|
||||
__IntoReactiveValueMarkerCallbackStrOutputToString,
|
||||
> for F
|
||||
where
|
||||
F: Fn(I) -> &'static str + Send + Sync + 'static,
|
||||
{
|
||||
#[track_caller]
|
||||
fn into_reactive_value(self) -> Callback<I, String> {
|
||||
Callback::new(move |i| self(i).to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl<I, F>
|
||||
IntoReactiveValue<
|
||||
UnsyncCallback<I, String>,
|
||||
__IntoReactiveValueMarkerCallbackStrOutputToString,
|
||||
> for F
|
||||
where
|
||||
F: Fn(I) -> &'static str + 'static,
|
||||
{
|
||||
#[track_caller]
|
||||
fn into_reactive_value(self) -> UnsyncCallback<I, String> {
|
||||
UnsyncCallback::new(move |i| self(i).to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::Callable;
|
||||
use crate::callback::{Callback, UnsyncCallback};
|
||||
use reactive_graph::traits::Dispose;
|
||||
use crate::{
|
||||
callback::{Callback, UnsyncCallback},
|
||||
owner::Owner,
|
||||
traits::Dispose,
|
||||
IntoReactiveValue,
|
||||
};
|
||||
|
||||
struct NoClone {}
|
||||
|
||||
#[test]
|
||||
fn clone_callback() {
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let callback = Callback::new(move |_no_clone: NoClone| NoClone {});
|
||||
let _cloned = callback;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn clone_unsync_callback() {
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let callback =
|
||||
UnsyncCallback::new(move |_no_clone: NoClone| NoClone {});
|
||||
let _cloned = callback;
|
||||
@@ -285,20 +325,39 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn runback_from() {
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let _callback: Callback<(), String> = (|| "test").into();
|
||||
let _callback: Callback<(i32, String), String> =
|
||||
(|num, s| format!("{num} {s}")).into();
|
||||
// Single params should work without needing the (foo,) tuple using IntoReactiveValue:
|
||||
let _callback: Callback<usize, &'static str> =
|
||||
(|_usize| "test").into_reactive_value();
|
||||
let _callback: Callback<usize, String> =
|
||||
(|_usize| "test").into_reactive_value();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sync_callback_from() {
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let _callback: UnsyncCallback<(), String> = (|| "test").into();
|
||||
let _callback: UnsyncCallback<(i32, String), String> =
|
||||
(|num, s| format!("{num} {s}")).into();
|
||||
// Single params should work without needing the (foo,) tuple using IntoReactiveValue:
|
||||
let _callback: UnsyncCallback<usize, &'static str> =
|
||||
(|_usize| "test").into_reactive_value();
|
||||
let _callback: UnsyncCallback<usize, String> =
|
||||
(|_usize| "test").into_reactive_value();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sync_callback_try_run() {
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let callback = Callback::new(move |arg| arg);
|
||||
assert_eq!(callback.try_run((0,)), Some((0,)));
|
||||
callback.dispose();
|
||||
@@ -307,6 +366,9 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn unsync_callback_try_run() {
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let callback = UnsyncCallback::new(move |arg| arg);
|
||||
assert_eq!(callback.try_run((0,)), Some((0,)));
|
||||
callback.dispose();
|
||||
@@ -315,6 +377,9 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn callback_matches_same() {
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let callback1 = Callback::new(|x: i32| x * 2);
|
||||
let callback2 = callback1;
|
||||
assert!(callback1.matches(&callback2));
|
||||
@@ -322,6 +387,9 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn callback_matches_different() {
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let callback1 = Callback::new(|x: i32| x * 2);
|
||||
let callback2 = Callback::new(|x: i32| x + 1);
|
||||
assert!(!callback1.matches(&callback2));
|
||||
@@ -329,6 +397,9 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn unsync_callback_matches_same() {
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let callback1 = UnsyncCallback::new(|x: i32| x * 2);
|
||||
let callback2 = callback1;
|
||||
assert!(callback1.matches(&callback2));
|
||||
@@ -336,6 +407,9 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn unsync_callback_matches_different() {
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let callback1 = UnsyncCallback::new(|x: i32| x * 2);
|
||||
let callback2 = UnsyncCallback::new(|x: i32| x + 1);
|
||||
assert!(!callback1.matches(&callback2));
|
||||
@@ -110,10 +110,12 @@ fn effect_base() -> (Receiver, Owner, Arc<RwLock<EffectInner>>) {
|
||||
(rx, owner, inner)
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
thread_local! {
|
||||
static EFFECT_SCOPE_ACTIVE: AtomicBool = const { AtomicBool::new(false) };
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
/// Returns whether the current thread is currently running an effect.
|
||||
pub fn in_effect_scope() -> bool {
|
||||
EFFECT_SCOPE_ACTIVE
|
||||
@@ -123,14 +125,22 @@ pub fn in_effect_scope() -> bool {
|
||||
/// Set a static to true whilst running the given function.
|
||||
/// [`is_in_effect_scope`] will return true whilst the function is running.
|
||||
fn run_in_effect_scope<T>(fun: impl FnOnce() -> T) -> T {
|
||||
// For the theoretical nested case, set back to initial value rather than false:
|
||||
let initial = EFFECT_SCOPE_ACTIVE
|
||||
.with(|scope| scope.swap(true, std::sync::atomic::Ordering::Relaxed));
|
||||
let result = fun();
|
||||
EFFECT_SCOPE_ACTIVE.with(|scope| {
|
||||
scope.store(initial, std::sync::atomic::Ordering::Relaxed)
|
||||
});
|
||||
result
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
// For the theoretical nested case, set back to initial value rather than false:
|
||||
let initial = EFFECT_SCOPE_ACTIVE.with(|scope| {
|
||||
scope.swap(true, std::sync::atomic::Ordering::Relaxed)
|
||||
});
|
||||
let result = fun();
|
||||
EFFECT_SCOPE_ACTIVE.with(|scope| {
|
||||
scope.store(initial, std::sync::atomic::Ordering::Relaxed)
|
||||
});
|
||||
result
|
||||
}
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
fun()
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> Effect<S>
|
||||
|
||||
@@ -65,6 +65,7 @@ impl Dispose for ImmediateEffect {
|
||||
|
||||
impl ImmediateEffect {
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this requires a `Fn` function because it might recurse.
|
||||
/// Use [Self::new_mut] to pass a `FnMut` function, it'll panic on recursion.
|
||||
@@ -82,6 +83,7 @@ impl ImmediateEffect {
|
||||
Self { inner: Some(inner) }
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics on recursion or if triggered in parallel. Also see [Self::new]
|
||||
@@ -93,8 +95,10 @@ impl ImmediateEffect {
|
||||
Self::new(move || fun.try_lock().expect(MSG)())
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this requires a `Fn` function because it might recurse.
|
||||
/// Use [Self::new_mut_scoped] to pass a `FnMut` function, it'll panic on recursion.
|
||||
/// NOTE: this effect is automatically cleaned up when the current owner is cleared or disposed.
|
||||
#[track_caller]
|
||||
pub fn new_scoped(fun: impl Fn() + Send + Sync + 'static) {
|
||||
@@ -102,6 +106,19 @@ impl ImmediateEffect {
|
||||
|
||||
on_cleanup(move || effect.dispose());
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this effect is automatically cleaned up when the current owner is cleared or disposed.
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics on recursion or if triggered in parallel. Also see [Self::new_scoped]
|
||||
#[track_caller]
|
||||
pub fn new_mut_scoped(fun: impl FnMut() + Send + Sync + 'static) {
|
||||
let effect = Self::new_mut(fun);
|
||||
|
||||
on_cleanup(move || effect.dispose());
|
||||
}
|
||||
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
///
|
||||
@@ -130,6 +147,41 @@ impl DefinedAt for ImmediateEffect {
|
||||
}
|
||||
}
|
||||
|
||||
/// Defers any [ImmediateEffect]s from running until the end of the function.
|
||||
///
|
||||
/// NOTE: this affects only [ImmediateEffect]s, not other effects.
|
||||
///
|
||||
/// NOTE: this is rarely needed, but it is useful for example when multiple signals
|
||||
/// need to be updated atomically (for example a double-bound signal tree).
|
||||
pub fn batch<T>(f: impl FnOnce() -> T) -> T {
|
||||
struct ExecuteOnDrop;
|
||||
impl Drop for ExecuteOnDrop {
|
||||
fn drop(&mut self) {
|
||||
let effects = {
|
||||
let mut batch = inner::BATCH.write().or_poisoned();
|
||||
batch.take().unwrap().into_inner().expect("lock poisoned")
|
||||
};
|
||||
// TODO: Should we skip the effects if it's panicking?
|
||||
for effect in effects {
|
||||
effect.update_if_necessary();
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut execute_on_drop = None;
|
||||
{
|
||||
let mut batch = inner::BATCH.write().or_poisoned();
|
||||
if batch.is_none() {
|
||||
execute_on_drop = Some(ExecuteOnDrop);
|
||||
} else {
|
||||
// Nested batching has no effect.
|
||||
}
|
||||
*batch = Some(batch.take().unwrap_or_default());
|
||||
}
|
||||
let ret = f();
|
||||
drop(execute_on_drop);
|
||||
ret
|
||||
}
|
||||
|
||||
mod inner {
|
||||
use crate::{
|
||||
graph::{
|
||||
@@ -140,6 +192,7 @@ mod inner {
|
||||
owner::Owner,
|
||||
traits::DefinedAt,
|
||||
};
|
||||
use indexmap::IndexSet;
|
||||
use or_poisoned::OrPoisoned;
|
||||
use std::{
|
||||
panic::Location,
|
||||
@@ -147,6 +200,11 @@ mod inner {
|
||||
thread::{self, ThreadId},
|
||||
};
|
||||
|
||||
/// Only the [super::batch] function ever writes to the outer RwLock.
|
||||
/// While the effects will write to the inner one.
|
||||
pub(super) static BATCH: RwLock<Option<RwLock<IndexSet<AnySubscriber>>>> =
|
||||
RwLock::new(None);
|
||||
|
||||
/// Handles subscription logic for effects.
|
||||
///
|
||||
/// To handle parallelism and recursion we assign ordered (1..) ids to each run.
|
||||
@@ -202,6 +260,8 @@ mod inner {
|
||||
fun: impl Fn() + Send + Sync + 'static,
|
||||
) -> Arc<RwLock<EffectInner>> {
|
||||
let owner = Owner::new();
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
let defined_at = Location::caller();
|
||||
|
||||
Arc::new_cyclic(|weak| {
|
||||
let any_subscriber = AnySubscriber(
|
||||
@@ -211,7 +271,7 @@ mod inner {
|
||||
|
||||
RwLock::new(EffectInner {
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
defined_at,
|
||||
owner,
|
||||
state: ReactiveNodeState::Dirty,
|
||||
run_count_start: 0,
|
||||
@@ -260,6 +320,17 @@ mod inner {
|
||||
ReactiveNodeState::Dirty => true,
|
||||
};
|
||||
|
||||
{
|
||||
if let Some(batch) = &*BATCH.read().or_poisoned() {
|
||||
let mut batch = batch.write().or_poisoned();
|
||||
let subscriber =
|
||||
self.read().or_poisoned().any_subscriber.clone();
|
||||
|
||||
batch.insert(subscriber);
|
||||
return needs_update;
|
||||
}
|
||||
}
|
||||
|
||||
if needs_update {
|
||||
let mut guard = self.write().or_poisoned();
|
||||
|
||||
|
||||
67
reactive_graph/src/into_reactive_value.rs
Normal file
67
reactive_graph/src/into_reactive_value.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
#[doc(hidden)]
|
||||
pub struct __IntoReactiveValueMarkerBaseCase;
|
||||
|
||||
/// A helper trait that works like `Into<T>` but uses a marker generic
|
||||
/// to allow more `From` implementations than would be allowed with just `Into<T>`.
|
||||
pub trait IntoReactiveValue<T, M> {
|
||||
/// Converts `self` into a `T`.
|
||||
fn into_reactive_value(self) -> T;
|
||||
}
|
||||
|
||||
// The base case, which allows anything which implements .into() to work:
|
||||
impl<T, I> IntoReactiveValue<T, __IntoReactiveValueMarkerBaseCase> for I
|
||||
where
|
||||
I: Into<T>,
|
||||
{
|
||||
fn into_reactive_value(self) -> T {
|
||||
self.into()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::{
|
||||
into_reactive_value::IntoReactiveValue,
|
||||
owner::{LocalStorage, Owner},
|
||||
traits::GetUntracked,
|
||||
wrappers::read::Signal,
|
||||
};
|
||||
use typed_builder::TypedBuilder;
|
||||
|
||||
#[test]
|
||||
fn test_into_signal_compiles() {
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
let _: Signal<usize> = (|| 2).into_reactive_value();
|
||||
let _: Signal<usize, LocalStorage> = 2.into_reactive_value();
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
let _: Signal<usize, LocalStorage> = (|| 2).into_reactive_value();
|
||||
let _: Signal<String> = "str".into_reactive_value();
|
||||
let _: Signal<String, LocalStorage> = "str".into_reactive_value();
|
||||
|
||||
#[derive(TypedBuilder)]
|
||||
struct Foo {
|
||||
#[builder(setter(
|
||||
fn transform<M>(value: impl IntoReactiveValue<Signal<usize>, M>) {
|
||||
value.into_reactive_value()
|
||||
}
|
||||
))]
|
||||
sig: Signal<usize>,
|
||||
}
|
||||
|
||||
assert_eq!(Foo::builder().sig(2).build().sig.get_untracked(), 2);
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
assert_eq!(Foo::builder().sig(|| 2).build().sig.get_untracked(), 2);
|
||||
assert_eq!(
|
||||
Foo::builder()
|
||||
.sig(Signal::stored(2))
|
||||
.build()
|
||||
.sig
|
||||
.get_untracked(),
|
||||
2
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -90,6 +90,12 @@ pub mod traits;
|
||||
pub mod transition;
|
||||
pub mod wrappers;
|
||||
|
||||
mod into_reactive_value;
|
||||
pub use into_reactive_value::*;
|
||||
|
||||
/// A standard way to wrap functions and closures to pass them to components.
|
||||
pub mod callback;
|
||||
|
||||
use computed::ScopedFuture;
|
||||
|
||||
#[cfg(all(feature = "nightly", rustc_nightly))]
|
||||
@@ -97,7 +103,9 @@ mod nightly;
|
||||
|
||||
/// Reexports frequently-used traits.
|
||||
pub mod prelude {
|
||||
pub use crate::{owner::FromLocal, traits::*};
|
||||
pub use crate::{
|
||||
into_reactive_value::IntoReactiveValue, owner::FromLocal, traits::*,
|
||||
};
|
||||
}
|
||||
|
||||
// TODO remove this, it's just useful while developing
|
||||
|
||||
@@ -209,6 +209,25 @@ impl Owner {
|
||||
this
|
||||
}
|
||||
|
||||
/// Returns the parent of this `Owner`, if any.
|
||||
///
|
||||
/// None when:
|
||||
/// - This is a root owner
|
||||
/// - The parent has been dropped
|
||||
pub fn parent(&self) -> Option<Owner> {
|
||||
self.inner
|
||||
.read()
|
||||
.or_poisoned()
|
||||
.parent
|
||||
.as_ref()
|
||||
.and_then(|p| p.upgrade())
|
||||
.map(|inner| Owner {
|
||||
inner,
|
||||
#[cfg(feature = "hydration")]
|
||||
shared_context: self.shared_context.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new `Owner` that is the child of the current `Owner`, if any.
|
||||
pub fn child(&self) -> Self {
|
||||
let parent = Some(Arc::downgrade(&self.inner));
|
||||
@@ -321,6 +340,8 @@ impl Owner {
|
||||
}
|
||||
|
||||
/// Removes this from its state as the thread-local owner and drops it.
|
||||
/// If there are other holders of this owner, it may not cleanup, if always cleaning up is required,
|
||||
/// see [`Owner::unset_with_forced_cleanup`].
|
||||
pub fn unset(self) {
|
||||
OWNER.with_borrow_mut(|owner| {
|
||||
if owner.as_ref().and_then(|n| n.upgrade()) == Some(self) {
|
||||
@@ -329,6 +350,23 @@ impl Owner {
|
||||
})
|
||||
}
|
||||
|
||||
/// Removes this from its state as the thread-local owner and drops it.
|
||||
/// Unlike [`Owner::unset`], this will always run cleanup on this owner,
|
||||
/// even if there are other holders of this owner.
|
||||
pub fn unset_with_forced_cleanup(self) {
|
||||
OWNER.with_borrow_mut(|owner| {
|
||||
if owner
|
||||
.as_ref()
|
||||
.and_then(|n| n.upgrade())
|
||||
.map(|o| o == self)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
mem::take(owner);
|
||||
}
|
||||
});
|
||||
self.cleanup();
|
||||
}
|
||||
|
||||
/// Returns the current [`SharedContext`], if any.
|
||||
#[cfg(feature = "hydration")]
|
||||
pub fn current_shared_context(
|
||||
|
||||
@@ -324,6 +324,22 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> From<&'static str> for ArcSignal<String, S>
|
||||
where
|
||||
S: Storage<&'static str> + Storage<String>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: &'static str) -> Self {
|
||||
Self {
|
||||
inner: SignalTypes::Stored(ArcStoredValue::new(
|
||||
value.to_string(),
|
||||
)),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: std::panic::Location::caller(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> DefinedAt for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
@@ -1049,6 +1065,13 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Signal<&'static str, LocalStorage>> for Signal<String, LocalStorage> {
|
||||
#[track_caller]
|
||||
fn from(value: Signal<&'static str, LocalStorage>) -> Self {
|
||||
Signal::derive_local(move || value.read().to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Signal<&'static str>> for Signal<String, LocalStorage> {
|
||||
#[track_caller]
|
||||
fn from(value: Signal<&'static str>) -> Self {
|
||||
@@ -1077,6 +1100,15 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Signal<Option<&'static str>, LocalStorage>>
|
||||
for Signal<Option<String>, LocalStorage>
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: Signal<Option<&'static str>, LocalStorage>) -> Self {
|
||||
Signal::derive_local(move || value.read().map(str::to_string))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Signal<Option<&'static str>>>
|
||||
for Signal<Option<String>, LocalStorage>
|
||||
{
|
||||
@@ -1086,6 +1118,192 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
#[doc(hidden)]
|
||||
pub struct __IntoReactiveValueMarkerSignalFromReactiveClosure;
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
#[doc(hidden)]
|
||||
pub struct __IntoReactiveValueMarkerSignalStrOutputToString;
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
#[doc(hidden)]
|
||||
pub struct __IntoReactiveValueMarkerOptionalSignalFromReactiveClosureAlways;
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
impl<T, F>
|
||||
crate::IntoReactiveValue<
|
||||
Signal<T, SyncStorage>,
|
||||
__IntoReactiveValueMarkerSignalFromReactiveClosure,
|
||||
> for F
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
F: Fn() -> T + Send + Sync + 'static,
|
||||
{
|
||||
fn into_reactive_value(self) -> Signal<T, SyncStorage> {
|
||||
Signal::derive(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
impl<T, F>
|
||||
crate::IntoReactiveValue<
|
||||
ArcSignal<T, SyncStorage>,
|
||||
__IntoReactiveValueMarkerSignalFromReactiveClosure,
|
||||
> for F
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
F: Fn() -> T + Send + Sync + 'static,
|
||||
{
|
||||
fn into_reactive_value(self) -> ArcSignal<T, SyncStorage> {
|
||||
ArcSignal::derive(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
impl<T, F>
|
||||
crate::IntoReactiveValue<
|
||||
Signal<T, LocalStorage>,
|
||||
__IntoReactiveValueMarkerSignalFromReactiveClosure,
|
||||
> for F
|
||||
where
|
||||
T: 'static,
|
||||
F: Fn() -> T + 'static,
|
||||
{
|
||||
fn into_reactive_value(self) -> Signal<T, LocalStorage> {
|
||||
Signal::derive_local(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
impl<T, F>
|
||||
crate::IntoReactiveValue<
|
||||
ArcSignal<T, LocalStorage>,
|
||||
__IntoReactiveValueMarkerSignalFromReactiveClosure,
|
||||
> for F
|
||||
where
|
||||
T: 'static,
|
||||
F: Fn() -> T + 'static,
|
||||
{
|
||||
fn into_reactive_value(self) -> ArcSignal<T, LocalStorage> {
|
||||
ArcSignal::derive_local(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
impl<F>
|
||||
crate::IntoReactiveValue<
|
||||
Signal<String, SyncStorage>,
|
||||
__IntoReactiveValueMarkerSignalStrOutputToString,
|
||||
> for F
|
||||
where
|
||||
F: Fn() -> &'static str + Send + Sync + 'static,
|
||||
{
|
||||
fn into_reactive_value(self) -> Signal<String, SyncStorage> {
|
||||
Signal::derive(move || self().to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
impl<F>
|
||||
crate::IntoReactiveValue<
|
||||
ArcSignal<String, SyncStorage>,
|
||||
__IntoReactiveValueMarkerSignalStrOutputToString,
|
||||
> for F
|
||||
where
|
||||
F: Fn() -> &'static str + Send + Sync + 'static,
|
||||
{
|
||||
fn into_reactive_value(self) -> ArcSignal<String, SyncStorage> {
|
||||
ArcSignal::derive(move || self().to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
impl<F>
|
||||
crate::IntoReactiveValue<
|
||||
Signal<String, LocalStorage>,
|
||||
__IntoReactiveValueMarkerSignalStrOutputToString,
|
||||
> for F
|
||||
where
|
||||
F: Fn() -> &'static str + 'static,
|
||||
{
|
||||
fn into_reactive_value(self) -> Signal<String, LocalStorage> {
|
||||
Signal::derive_local(move || self().to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
impl<F>
|
||||
crate::IntoReactiveValue<
|
||||
ArcSignal<String, LocalStorage>,
|
||||
__IntoReactiveValueMarkerSignalStrOutputToString,
|
||||
> for F
|
||||
where
|
||||
F: Fn() -> &'static str + 'static,
|
||||
{
|
||||
fn into_reactive_value(self) -> ArcSignal<String, LocalStorage> {
|
||||
ArcSignal::derive_local(move || self().to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
impl<T, F>
|
||||
crate::IntoReactiveValue<
|
||||
Signal<Option<T>, SyncStorage>,
|
||||
__IntoReactiveValueMarkerOptionalSignalFromReactiveClosureAlways,
|
||||
> for F
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
F: Fn() -> T + Send + Sync + 'static,
|
||||
{
|
||||
fn into_reactive_value(self) -> Signal<Option<T>, SyncStorage> {
|
||||
Signal::derive(move || Some(self()))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
impl<T, F>
|
||||
crate::IntoReactiveValue<
|
||||
ArcSignal<Option<T>, SyncStorage>,
|
||||
__IntoReactiveValueMarkerOptionalSignalFromReactiveClosureAlways,
|
||||
> for F
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
F: Fn() -> T + Send + Sync + 'static,
|
||||
{
|
||||
fn into_reactive_value(self) -> ArcSignal<Option<T>, SyncStorage> {
|
||||
ArcSignal::derive(move || Some(self()))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
impl<T, F>
|
||||
crate::IntoReactiveValue<
|
||||
Signal<Option<T>, LocalStorage>,
|
||||
__IntoReactiveValueMarkerOptionalSignalFromReactiveClosureAlways,
|
||||
> for F
|
||||
where
|
||||
T: 'static,
|
||||
F: Fn() -> T + 'static,
|
||||
{
|
||||
fn into_reactive_value(self) -> Signal<Option<T>, LocalStorage> {
|
||||
Signal::derive_local(move || Some(self()))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "nightly"))]
|
||||
impl<T, F>
|
||||
crate::IntoReactiveValue<
|
||||
ArcSignal<Option<T>, LocalStorage>,
|
||||
__IntoReactiveValueMarkerOptionalSignalFromReactiveClosureAlways,
|
||||
> for F
|
||||
where
|
||||
T: 'static,
|
||||
F: Fn() -> T + 'static,
|
||||
{
|
||||
fn into_reactive_value(self) -> ArcSignal<Option<T>, LocalStorage> {
|
||||
ArcSignal::derive_local(move || Some(self()))
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(deprecated)]
|
||||
impl<T> From<MaybeSignal<T>> for Signal<T>
|
||||
where
|
||||
@@ -1542,7 +1760,6 @@ pub mod read {
|
||||
|
||||
impl<T, S> ReadUntracked for MaybeProp<T, S>
|
||||
where
|
||||
T: Clone,
|
||||
S: Storage<Option<T>> + Storage<SignalTypes<Option<T>, S>>,
|
||||
{
|
||||
type Value = ReadGuard<Option<T>, SignalReadGuard<Option<T>, S>>;
|
||||
|
||||
@@ -225,3 +225,38 @@ fn threaded_chaos_effect() {
|
||||
let values: Vec<_> = signals.iter().map(|s| s.get_untracked()).collect();
|
||||
println!("FINAL: {values:?}");
|
||||
}
|
||||
|
||||
#[cfg(feature = "effects")]
|
||||
#[test]
|
||||
fn test_batch() {
|
||||
use imports::*;
|
||||
use reactive_graph::{effect::batch, owner::StoredValue};
|
||||
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let a = RwSignal::new(0);
|
||||
let b = RwSignal::new(0);
|
||||
|
||||
let values = StoredValue::new(Vec::new());
|
||||
|
||||
ImmediateEffect::new_scoped(move || {
|
||||
println!("{} = {}", a.get(), b.get());
|
||||
values.write_value().push((a.get(), b.get()));
|
||||
});
|
||||
|
||||
a.set(1);
|
||||
b.set(1);
|
||||
|
||||
batch(move || {
|
||||
a.set(2);
|
||||
b.set(2);
|
||||
|
||||
batch(move || {
|
||||
a.set(3);
|
||||
b.set(3);
|
||||
});
|
||||
});
|
||||
|
||||
assert_eq!(values.get_value(), vec![(0, 0), (1, 0), (1, 1), (3, 3)]);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_stores"
|
||||
version = "0.2.5"
|
||||
version = "0.3.0"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -30,6 +30,8 @@ where
|
||||
defined_at: &'static Location<'static>,
|
||||
path: Arc<dyn Fn() -> StorePath + Send + Sync>,
|
||||
get_trigger: Arc<dyn Fn(StorePath) -> StoreFieldTrigger + Send + Sync>,
|
||||
get_trigger_unkeyed:
|
||||
Arc<dyn Fn(StorePath) -> StoreFieldTrigger + Send + Sync>,
|
||||
read: Arc<dyn Fn() -> Option<StoreFieldReader<T>> + Send + Sync>,
|
||||
pub(crate) write:
|
||||
Arc<dyn Fn() -> Option<StoreFieldWriter<T>> + Send + Sync>,
|
||||
@@ -103,6 +105,10 @@ impl<T> StoreField for ArcField<T> {
|
||||
(self.get_trigger)(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
(self.get_trigger_unkeyed)(path)
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
(self.path)()
|
||||
}
|
||||
@@ -132,6 +138,9 @@ where
|
||||
defined_at: Location::caller(),
|
||||
path: Arc::new(move || value.path().into_iter().collect()),
|
||||
get_trigger: Arc::new(move |path| value.get_trigger(path)),
|
||||
get_trigger_unkeyed: Arc::new(move |path| {
|
||||
value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new(move || value.reader().map(StoreFieldReader::new)),
|
||||
write: Arc::new(move || value.writer().map(StoreFieldWriter::new)),
|
||||
keys: Arc::new(move || value.keys()),
|
||||
@@ -158,6 +167,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -202,6 +215,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -245,6 +262,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -289,6 +310,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -337,6 +362,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -368,6 +397,7 @@ impl<T> Clone for ArcField<T> {
|
||||
defined_at: self.defined_at,
|
||||
path: self.path.clone(),
|
||||
get_trigger: Arc::clone(&self.get_trigger),
|
||||
get_trigger_unkeyed: Arc::clone(&self.get_trigger_unkeyed),
|
||||
read: Arc::clone(&self.read),
|
||||
write: Arc::clone(&self.write),
|
||||
keys: Arc::clone(&self.keys),
|
||||
|
||||
@@ -68,6 +68,11 @@ where
|
||||
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner.path()
|
||||
}
|
||||
|
||||
@@ -59,6 +59,13 @@ where
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|inner| inner.get_trigger_unkeyed(path))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
|
||||
@@ -84,6 +84,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
let index = self.index;
|
||||
@@ -109,6 +113,23 @@ where
|
||||
fn keys(&self) -> Option<KeyMap> {
|
||||
self.inner.keys()
|
||||
}
|
||||
|
||||
fn track_field(&self) {
|
||||
let mut full_path = self.path().into_iter().collect::<StorePath>();
|
||||
let trigger = self.get_trigger(self.path().into_iter().collect());
|
||||
trigger.this.track();
|
||||
trigger.children.track();
|
||||
|
||||
// tracks `this` for all ancestors: i.e., it will track any change that is made
|
||||
// directly to one of its ancestors, but not a change made to a *child* of an ancestor
|
||||
// (which would end up with every subfield tracking its own siblings, because they are
|
||||
// children of its parent)
|
||||
while !full_path.is_empty() {
|
||||
full_path.pop();
|
||||
let inner = self.get_trigger(full_path.clone());
|
||||
inner.this.track();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Inner, Prev> DefinedAt for AtIndex<Inner, Prev>
|
||||
|
||||
@@ -110,6 +110,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
Some(Mapped::new_with_guard(inner, self.read))
|
||||
@@ -432,7 +436,7 @@ where
|
||||
let this = keys
|
||||
.with_field_keys(
|
||||
inner.clone(),
|
||||
|keys| keys.get(&self.key),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -444,6 +448,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
|
||||
@@ -452,7 +460,7 @@ where
|
||||
let index = keys
|
||||
.with_field_keys(
|
||||
inner_path,
|
||||
|keys| keys.get(&self.key),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -476,7 +484,7 @@ where
|
||||
let index = keys
|
||||
.with_field_keys(
|
||||
inner_path.clone(),
|
||||
|keys| keys.get(&self.key),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -624,9 +632,7 @@ where
|
||||
let latest = self.latest_keys();
|
||||
keys.with_field_keys(
|
||||
inner_path,
|
||||
|keys| {
|
||||
keys.update(latest);
|
||||
},
|
||||
|keys| ((), keys.update(latest)),
|
||||
|| self.latest_keys(),
|
||||
);
|
||||
}
|
||||
@@ -707,3 +713,144 @@ where
|
||||
.map(|key| AtKeyed::new(self.inner.clone(), key))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{self as reactive_stores, tests::tick, AtKeyed, Store};
|
||||
use reactive_graph::{
|
||||
effect::Effect,
|
||||
traits::{GetUntracked, ReadUntracked, Set, Track, Write},
|
||||
};
|
||||
use std::sync::{
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
Arc,
|
||||
};
|
||||
|
||||
#[derive(Debug, Store, Default)]
|
||||
struct Todos {
|
||||
#[store(key: usize = |todo| todo.id)]
|
||||
todos: Vec<Todo>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Store, Default, Clone, PartialEq, Eq)]
|
||||
struct Todo {
|
||||
id: usize,
|
||||
label: String,
|
||||
}
|
||||
|
||||
impl Todo {
|
||||
pub fn new(id: usize, label: impl ToString) -> Self {
|
||||
Self {
|
||||
id,
|
||||
label: label.to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn data() -> Todos {
|
||||
Todos {
|
||||
todos: vec![
|
||||
Todo {
|
||||
id: 10,
|
||||
label: "A".to_string(),
|
||||
},
|
||||
Todo {
|
||||
id: 11,
|
||||
label: "B".to_string(),
|
||||
},
|
||||
Todo {
|
||||
id: 12,
|
||||
label: "C".to_string(),
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
#[tokio::test]
|
||||
async fn keyed_fields_can_be_moved() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
let store = Store::new(data());
|
||||
assert_eq!(store.read_untracked().todos.len(), 3);
|
||||
|
||||
// create an effect to read from each keyed field
|
||||
let a_count = Arc::new(AtomicUsize::new(0));
|
||||
let b_count = Arc::new(AtomicUsize::new(0));
|
||||
let c_count = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let a = AtKeyed::new(store.todos(), 10);
|
||||
let b = AtKeyed::new(store.todos(), 11);
|
||||
let c = AtKeyed::new(store.todos(), 12);
|
||||
|
||||
Effect::new_sync({
|
||||
let a_count = Arc::clone(&a_count);
|
||||
move || {
|
||||
a.track();
|
||||
a_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
Effect::new_sync({
|
||||
let b_count = Arc::clone(&b_count);
|
||||
move || {
|
||||
b.track();
|
||||
b_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
Effect::new_sync({
|
||||
let c_count = Arc::clone(&c_count);
|
||||
move || {
|
||||
c.track();
|
||||
c_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
|
||||
tick().await;
|
||||
assert_eq!(a_count.load(Ordering::Relaxed), 1);
|
||||
assert_eq!(b_count.load(Ordering::Relaxed), 1);
|
||||
assert_eq!(c_count.load(Ordering::Relaxed), 1);
|
||||
|
||||
// writing at a key doesn't notify siblings
|
||||
*a.label().write() = "Foo".into();
|
||||
tick().await;
|
||||
assert_eq!(a_count.load(Ordering::Relaxed), 2);
|
||||
assert_eq!(b_count.load(Ordering::Relaxed), 1);
|
||||
assert_eq!(c_count.load(Ordering::Relaxed), 1);
|
||||
|
||||
// the keys can be reorganized
|
||||
store.todos().write().swap(0, 2);
|
||||
let after = store.todos().get_untracked();
|
||||
assert_eq!(
|
||||
after,
|
||||
vec![Todo::new(12, "C"), Todo::new(11, "B"), Todo::new(10, "Foo")]
|
||||
);
|
||||
|
||||
tick().await;
|
||||
assert_eq!(a_count.load(Ordering::Relaxed), 2);
|
||||
assert_eq!(b_count.load(Ordering::Relaxed), 1);
|
||||
assert_eq!(c_count.load(Ordering::Relaxed), 1);
|
||||
|
||||
// and after we move the keys around, they still update the moved items
|
||||
a.label().set("Bar".into());
|
||||
let after = store.todos().get_untracked();
|
||||
assert_eq!(
|
||||
after,
|
||||
vec![Todo::new(12, "C"), Todo::new(11, "B"), Todo::new(10, "Bar")]
|
||||
);
|
||||
tick().await;
|
||||
assert_eq!(a_count.load(Ordering::Relaxed), 3);
|
||||
assert_eq!(b_count.load(Ordering::Relaxed), 1);
|
||||
assert_eq!(c_count.load(Ordering::Relaxed), 1);
|
||||
|
||||
// we can remove a key and add a new one
|
||||
store.todos().write().pop();
|
||||
store.todos().write().push(Todo::new(13, "New"));
|
||||
let after = store.todos().get_untracked();
|
||||
assert_eq!(
|
||||
after,
|
||||
vec![Todo::new(12, "C"), Todo::new(11, "B"), Todo::new(13, "New")]
|
||||
);
|
||||
tick().await;
|
||||
assert_eq!(a_count.load(Ordering::Relaxed), 3);
|
||||
assert_eq!(b_count.load(Ordering::Relaxed), 1);
|
||||
assert_eq!(c_count.load(Ordering::Relaxed), 1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -364,13 +364,18 @@ where
|
||||
})
|
||||
}
|
||||
|
||||
fn update(&mut self, iter: impl IntoIterator<Item = K>) {
|
||||
fn update(
|
||||
&mut self,
|
||||
iter: impl IntoIterator<Item = K>,
|
||||
) -> Vec<(usize, StorePathSegment)> {
|
||||
let new_keys = iter
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(idx, key)| (key, idx))
|
||||
.collect::<FxHashMap<K, usize>>();
|
||||
|
||||
let mut index_keys = Vec::with_capacity(new_keys.len());
|
||||
|
||||
// remove old keys and recycle the slots
|
||||
self.keys.retain(|key, old_entry| match new_keys.get(key) {
|
||||
Some(idx) => {
|
||||
@@ -385,14 +390,17 @@ where
|
||||
|
||||
// add new keys
|
||||
for (key, idx) in new_keys {
|
||||
// the suggestion doesn't compile because we need &mut for self.next_key(),
|
||||
// and we don't want to call that until after the check
|
||||
#[allow(clippy::map_entry)]
|
||||
if !self.keys.contains_key(&key) {
|
||||
let path = self.next_key();
|
||||
self.keys.insert(key, (path, idx));
|
||||
match self.keys.get(&key) {
|
||||
Some((segment, idx)) => index_keys.push((*idx, *segment)),
|
||||
None => {
|
||||
let path = self.next_key();
|
||||
self.keys.insert(key, (path, idx));
|
||||
index_keys.push((idx, path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
index_keys
|
||||
}
|
||||
}
|
||||
|
||||
@@ -415,14 +423,20 @@ type HashMap<K, V> = send_wrapper::SendWrapper<
|
||||
|
||||
/// A map of the keys for a keyed subfield.
|
||||
#[derive(Clone)]
|
||||
pub struct KeyMap(HashMap<StorePath, Box<dyn Any + Send + Sync>>);
|
||||
pub struct KeyMap(
|
||||
HashMap<StorePath, Box<dyn Any + Send + Sync>>,
|
||||
HashMap<(StorePath, usize), StorePathSegment>,
|
||||
);
|
||||
|
||||
impl Default for KeyMap {
|
||||
fn default() -> Self {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
return Self(Default::default());
|
||||
return Self(Default::default(), Default::default());
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
return Self(send_wrapper::SendWrapper::new(Default::default()));
|
||||
return Self(
|
||||
send_wrapper::SendWrapper::new(Default::default()),
|
||||
send_wrapper::SendWrapper::new(Default::default()),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -430,31 +444,70 @@ impl KeyMap {
|
||||
fn with_field_keys<K, T>(
|
||||
&self,
|
||||
path: StorePath,
|
||||
fun: impl FnOnce(&mut FieldKeys<K>) -> T,
|
||||
fun: impl FnOnce(&mut FieldKeys<K>) -> (T, Vec<(usize, StorePathSegment)>),
|
||||
initialize: impl FnOnce() -> Vec<K>,
|
||||
) -> Option<T>
|
||||
where
|
||||
K: Debug + Hash + PartialEq + Eq + Send + Sync + 'static,
|
||||
{
|
||||
let initial_keys = initialize();
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let mut entry = self
|
||||
.0
|
||||
.entry(path)
|
||||
.or_insert_with(|| Box::new(FieldKeys::new(initialize())));
|
||||
.entry(path.clone())
|
||||
.or_insert_with(|| Box::new(FieldKeys::new(initial_keys)));
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let entry = if !self.0.borrow().contains_key(&path) {
|
||||
Some(Box::new(FieldKeys::new(initialize())))
|
||||
Some(Box::new(FieldKeys::new(initial_keys)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let mut map = self.0.borrow_mut();
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let entry = map.entry(path).or_insert_with(|| entry.unwrap());
|
||||
let entry = map.entry(path.clone()).or_insert_with(|| entry.unwrap());
|
||||
|
||||
let entry = entry.downcast_mut::<FieldKeys<K>>()?;
|
||||
Some(fun(entry))
|
||||
let (result, new_keys) = fun(entry);
|
||||
if !new_keys.is_empty() {
|
||||
for (idx, segment) in new_keys {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
self.1.insert((path.clone(), idx), segment);
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
self.1.borrow_mut().insert((path.clone(), idx), segment);
|
||||
}
|
||||
}
|
||||
Some(result)
|
||||
}
|
||||
|
||||
fn contains_key(&self, key: &StorePath) -> bool {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.0.contains_key(key)
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
self.0.borrow_mut().contains_key(key)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_key_for_index(
|
||||
&self,
|
||||
key: &(StorePath, usize),
|
||||
) -> Option<StorePathSegment> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.1.get(key).as_deref().copied()
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
self.1.borrow().get(key).as_deref().copied()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -832,6 +885,30 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Foo {
|
||||
id: i32,
|
||||
bar: Bar,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Bar {
|
||||
bar_signature: i32,
|
||||
baz: Baz,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baz {
|
||||
more_data: i32,
|
||||
baw: Baw,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baw {
|
||||
more_data: i32,
|
||||
end: i32,
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mutating_field_triggers_effect() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
@@ -1112,30 +1189,6 @@ mod tests {
|
||||
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Foo {
|
||||
id: i32,
|
||||
bar: Bar,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Bar {
|
||||
bar_signature: i32,
|
||||
baz: Baz,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baz {
|
||||
more_data: i32,
|
||||
baw: Baw,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baw {
|
||||
more_data: i32,
|
||||
end: i32,
|
||||
}
|
||||
|
||||
let store = Store::new(Foo {
|
||||
id: 42,
|
||||
bar: Bar {
|
||||
@@ -1219,4 +1272,107 @@ mod tests {
|
||||
assert_eq!(more_data_runs.get_value(), 3);
|
||||
assert_eq!(baz_baw_end_runs.get_value(), 3);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn changing_parent_notifies_subfield() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
let combined_count = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let store = Store::new(Foo {
|
||||
id: 42,
|
||||
bar: Bar {
|
||||
bar_signature: 69,
|
||||
baz: Baz {
|
||||
more_data: 9999,
|
||||
baw: Baw {
|
||||
more_data: 22,
|
||||
end: 1112,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let tracked_field = store.bar().baz().more_data();
|
||||
|
||||
Effect::new_sync({
|
||||
let combined_count = Arc::clone(&combined_count);
|
||||
move |prev: Option<()>| {
|
||||
if prev.is_none() {
|
||||
println!("first run");
|
||||
} else {
|
||||
println!("next run");
|
||||
}
|
||||
|
||||
// we only track `more`, but this should still be notified
|
||||
// when its parent fields `bar` or `baz` change
|
||||
println!("{:?}", *tracked_field.read());
|
||||
combined_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
tick().await;
|
||||
tick().await;
|
||||
|
||||
store.bar().baz().set(Baz {
|
||||
more_data: 42,
|
||||
baw: Baw {
|
||||
more_data: 11,
|
||||
end: 31,
|
||||
},
|
||||
});
|
||||
tick().await;
|
||||
store.bar().set(Bar {
|
||||
bar_signature: 23,
|
||||
baz: Baz {
|
||||
more_data: 32,
|
||||
baw: Baw {
|
||||
more_data: 432,
|
||||
end: 423,
|
||||
},
|
||||
},
|
||||
});
|
||||
tick().await;
|
||||
|
||||
assert_eq!(combined_count.load(Ordering::Relaxed), 3);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn changing_parent_notifies_unkeyed_child() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
let combined_count = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let store = Store::new(data());
|
||||
|
||||
let tracked_field = store.todos().at_unkeyed(0);
|
||||
|
||||
Effect::new_sync({
|
||||
let combined_count = Arc::clone(&combined_count);
|
||||
move |prev: Option<()>| {
|
||||
if prev.is_none() {
|
||||
println!("first run");
|
||||
} else {
|
||||
println!("next run");
|
||||
}
|
||||
|
||||
// we only track `more`, but this should still be notified
|
||||
// when its parent fields `bar` or `baz` change
|
||||
println!("{:?}", *tracked_field.read());
|
||||
combined_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
tick().await;
|
||||
tick().await;
|
||||
|
||||
store.todos().write().pop();
|
||||
tick().await;
|
||||
|
||||
store.todos().write().push(Todo {
|
||||
label: "another one".into(),
|
||||
completed: false,
|
||||
});
|
||||
tick().await;
|
||||
|
||||
assert_eq!(combined_count.load(Ordering::Relaxed), 3);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ where
|
||||
// don't track the writer for the whole store
|
||||
writer.untrack();
|
||||
let mut notify = |path: &StorePath| {
|
||||
self.triggers_for_path(path.to_owned()).notify();
|
||||
self.triggers_for_path_unkeyed(path.to_owned()).notify();
|
||||
};
|
||||
writer.patch_field(new, &path, &mut notify);
|
||||
}
|
||||
|
||||
@@ -11,6 +11,15 @@ impl IntoIterator for StorePath {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a StorePath {
|
||||
type Item = &'a StorePathSegment;
|
||||
type IntoIter = std::slice::Iter<'a, StorePathSegment>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<StorePathSegment>> for StorePath {
|
||||
fn from(value: Vec<StorePathSegment>) -> Self {
|
||||
Self(value)
|
||||
@@ -18,6 +27,16 @@ impl From<Vec<StorePathSegment>> for StorePath {
|
||||
}
|
||||
|
||||
impl StorePath {
|
||||
/// Creates a new path.
|
||||
pub fn new() -> Self {
|
||||
Self(Vec::new())
|
||||
}
|
||||
|
||||
/// Creates a new path with storage capacity for `capacity` segments.
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
Self(Vec::with_capacity(capacity))
|
||||
}
|
||||
|
||||
/// Adds a new segment to the path.
|
||||
pub fn push(&mut self, segment: impl Into<StorePathSegment>) {
|
||||
self.0.push(segment.into());
|
||||
|
||||
@@ -26,6 +26,14 @@ pub trait StoreField: Sized {
|
||||
#[track_caller]
|
||||
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger;
|
||||
|
||||
/// Returns the trigger that tracks access and updates for this field.
|
||||
///
|
||||
/// This uses *unkeyed* paths: i.e., if any field in the path is keyed, it will
|
||||
/// try to look up the key for the item at the index given in the path, rather than
|
||||
/// the keyed item.
|
||||
#[track_caller]
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger;
|
||||
|
||||
/// The path of this field (see [`StorePath`]).
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment>;
|
||||
@@ -84,6 +92,26 @@ pub trait StoreField: Sized {
|
||||
|
||||
triggers
|
||||
}
|
||||
|
||||
/// Returns triggers for the field at the given path, and all parent fields
|
||||
fn triggers_for_path_unkeyed(&self, path: StorePath) -> Vec<ArcTrigger> {
|
||||
// see notes on triggers_for_path() for additional comments on implementation
|
||||
|
||||
let trigger = self.get_trigger_unkeyed(path.clone());
|
||||
let mut full_path = path;
|
||||
|
||||
let mut triggers = Vec::with_capacity(full_path.len() + 2);
|
||||
triggers.push(trigger.this.clone());
|
||||
triggers.push(trigger.children.clone());
|
||||
while !full_path.is_empty() {
|
||||
full_path.pop();
|
||||
let inner = self.get_trigger_unkeyed(full_path.clone());
|
||||
triggers.push(inner.children.clone());
|
||||
}
|
||||
triggers.reverse();
|
||||
|
||||
triggers
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> StoreField for ArcStore<T>
|
||||
@@ -101,6 +129,26 @@ where
|
||||
trigger
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
let orig_path = path.clone();
|
||||
|
||||
let mut path = StorePath::with_capacity(orig_path.len());
|
||||
for segment in &orig_path {
|
||||
let parent_is_keyed = self.keys.contains_key(&path);
|
||||
|
||||
if parent_is_keyed {
|
||||
let key = self
|
||||
.keys
|
||||
.get_key_for_index(&(path.clone(), segment.0))
|
||||
.expect("could not find key for index");
|
||||
path.push(key);
|
||||
} else {
|
||||
path.push(*segment);
|
||||
}
|
||||
}
|
||||
self.get_trigger(path)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
iter::empty()
|
||||
@@ -141,6 +189,14 @@ where
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|n| n.get_trigger_unkeyed(path))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner
|
||||
|
||||
@@ -88,6 +88,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
Some(Mapped::new_with_guard(inner, self.read))
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router"
|
||||
version = "0.8.7"
|
||||
version = "0.8.10"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -364,6 +364,12 @@ where
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MatchedRoute(pub String, pub AnyView);
|
||||
|
||||
impl MatchedRoute {
|
||||
fn branch_name(&self) -> String {
|
||||
format!("{:?}", self.1.as_type_id())
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for MatchedRoute {
|
||||
type State = <AnyView as Render>::State;
|
||||
|
||||
@@ -414,8 +420,9 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches: bool,
|
||||
extra_attrs: Vec<AnyAttribute>,
|
||||
) {
|
||||
if mark_branches && escape {
|
||||
buf.open_branch(&self.0);
|
||||
let branch_name = (mark_branches && escape).then(|| self.branch_name());
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.open_branch(bn);
|
||||
}
|
||||
self.1.to_html_with_buf(
|
||||
buf,
|
||||
@@ -424,8 +431,8 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if mark_branches && escape {
|
||||
buf.close_branch(&self.0);
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.close_branch(bn);
|
||||
if *position == Position::NextChildAfterText {
|
||||
*position = Position::NextChild;
|
||||
}
|
||||
@@ -442,8 +449,9 @@ impl RenderHtml for MatchedRoute {
|
||||
) where
|
||||
Self: Sized,
|
||||
{
|
||||
if mark_branches && escape {
|
||||
buf.open_branch(&self.0);
|
||||
let branch_name = (mark_branches && escape).then(|| self.branch_name());
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.open_branch(bn);
|
||||
}
|
||||
self.1.to_html_async_with_buf::<OUT_OF_ORDER>(
|
||||
buf,
|
||||
@@ -452,8 +460,8 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if mark_branches && escape {
|
||||
buf.close_branch(&self.0);
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.close_branch(bn);
|
||||
if *position == Position::NextChildAfterText {
|
||||
*position = Position::NextChild;
|
||||
}
|
||||
|
||||
@@ -105,7 +105,7 @@
|
||||
//! move || params.read().get("id").unwrap_or_default(),
|
||||
//! move |id| contact_data(id)
|
||||
//! );
|
||||
//! todo!()
|
||||
//! // ... return some view
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
|
||||
@@ -3,7 +3,7 @@ use crate::{hooks::use_navigate, params::ParamsMap};
|
||||
use core::fmt;
|
||||
use futures::channel::oneshot;
|
||||
use js_sys::{try_iter, Array, JsString};
|
||||
use leptos::prelude::*;
|
||||
use leptos::{ev, prelude::*};
|
||||
use or_poisoned::OrPoisoned;
|
||||
use reactive_graph::{
|
||||
signal::ArcRwSignal,
|
||||
@@ -11,13 +11,12 @@ use reactive_graph::{
|
||||
};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
boxed::Box,
|
||||
string::String,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
use tachys::dom::{document, window};
|
||||
use wasm_bindgen::{closure::Closure, JsCast, JsValue};
|
||||
use web_sys::{Event, UrlSearchParams};
|
||||
use wasm_bindgen::{JsCast, JsValue};
|
||||
use web_sys::UrlSearchParams;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct BrowserUrl {
|
||||
@@ -116,7 +115,6 @@ impl LocationProvider for BrowserUrl {
|
||||
}
|
||||
|
||||
fn init(&self, base: Option<Cow<'static, str>>) {
|
||||
let window = window();
|
||||
let navigate = {
|
||||
let url = self.url.clone();
|
||||
let pending = Arc::clone(&self.pending_navigation);
|
||||
@@ -159,27 +157,18 @@ impl LocationProvider for BrowserUrl {
|
||||
|
||||
let handle_anchor_click =
|
||||
handle_anchor_click(base, Self::parse_with_base, navigate);
|
||||
let closure = Closure::wrap(Box::new(move |ev: Event| {
|
||||
|
||||
let click_handle = window_event_listener(ev::click, move |ev| {
|
||||
if let Err(e) = handle_anchor_click(ev) {
|
||||
#[cfg(feature = "tracing")]
|
||||
tracing::error!("{e:?}");
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
web_sys::console::error_1(&e);
|
||||
}
|
||||
}) as Box<dyn FnMut(Event)>)
|
||||
.into_js_value();
|
||||
window
|
||||
.add_event_listener_with_callback(
|
||||
"click",
|
||||
closure.as_ref().unchecked_ref(),
|
||||
)
|
||||
.expect(
|
||||
"couldn't add `click` listener to `window` to handle `<a>` \
|
||||
clicks",
|
||||
);
|
||||
});
|
||||
|
||||
// handle popstate event (forward/back navigation)
|
||||
let cb = {
|
||||
let popstate_cb = {
|
||||
let url = self.url.clone();
|
||||
let path_stack = self.path_stack.clone();
|
||||
let is_back = self.is_back.clone();
|
||||
@@ -206,14 +195,14 @@ impl LocationProvider for BrowserUrl {
|
||||
}
|
||||
}
|
||||
};
|
||||
let closure =
|
||||
Closure::wrap(Box::new(cb) as Box<dyn Fn()>).into_js_value();
|
||||
window
|
||||
.add_event_listener_with_callback(
|
||||
"popstate",
|
||||
closure.as_ref().unchecked_ref(),
|
||||
)
|
||||
.expect("couldn't add `popstate` listener to `window`");
|
||||
|
||||
let popstate_handle =
|
||||
window_event_listener(ev::popstate, move |_| popstate_cb());
|
||||
|
||||
on_cleanup(|| {
|
||||
click_handle.remove();
|
||||
popstate_handle.remove();
|
||||
});
|
||||
}
|
||||
|
||||
fn ready_to_complete(&self) {
|
||||
|
||||
@@ -14,7 +14,7 @@ use send_wrapper::SendWrapper;
|
||||
use std::{borrow::Cow, future::Future};
|
||||
use tachys::dom::window;
|
||||
use wasm_bindgen::{JsCast, JsValue};
|
||||
use web_sys::{Event, HtmlAnchorElement, MouseEvent};
|
||||
use web_sys::{HtmlAnchorElement, MouseEvent};
|
||||
|
||||
mod history;
|
||||
mod server;
|
||||
@@ -300,15 +300,14 @@ pub(crate) fn handle_anchor_click<NavFn, NavFut>(
|
||||
router_base: Option<Cow<'static, str>>,
|
||||
parse_with_base: fn(&str, &str) -> Result<Url, JsValue>,
|
||||
navigate: NavFn,
|
||||
) -> Box<dyn Fn(Event) -> Result<(), JsValue>>
|
||||
) -> Box<dyn Fn(MouseEvent) -> Result<(), JsValue>>
|
||||
where
|
||||
NavFn: Fn(Url, LocationChange) -> NavFut + 'static,
|
||||
NavFut: Future<Output = ()> + 'static,
|
||||
{
|
||||
let router_base = router_base.unwrap_or_default();
|
||||
|
||||
Box::new(move |ev: Event| {
|
||||
let ev = ev.unchecked_into::<MouseEvent>();
|
||||
Box::new(move |ev: MouseEvent| {
|
||||
let origin = window().location().origin()?;
|
||||
if ev.default_prevented()
|
||||
|| ev.button() != 0
|
||||
@@ -369,8 +368,8 @@ where
|
||||
ev.prevent_default();
|
||||
let to = path_name
|
||||
+ if url.search.is_empty() { "" } else { "?" }
|
||||
+ &Url::unescape(&url.search)
|
||||
+ &Url::unescape(&url.hash);
|
||||
+ &url.search
|
||||
+ &url.hash;
|
||||
let state = Reflect::get(&a, &JsValue::from_str("state"))
|
||||
.ok()
|
||||
.and_then(|value| {
|
||||
|
||||
@@ -4,7 +4,6 @@ macro_rules! tuples {
|
||||
($first:ident => $($ty:ident),*) => {
|
||||
impl<$first, $($ty),*> PossibleRouteMatch for ($first, $($ty,)*)
|
||||
where
|
||||
Self: core::fmt::Debug,
|
||||
$first: PossibleRouteMatch,
|
||||
$($ty: PossibleRouteMatch),*,
|
||||
{
|
||||
|
||||
@@ -369,7 +369,7 @@ impl ResolvedStaticPath {
|
||||
eprintln!("{e}");
|
||||
}
|
||||
}
|
||||
owner.unset();
|
||||
owner.unset_with_forced_cleanup();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router_macro"
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
use proc_macro::{TokenStream, TokenTree};
|
||||
use proc_macro2::Span;
|
||||
use proc_macro_error2::{abort, proc_macro_error, set_dummy};
|
||||
use quote::{quote, ToTokens};
|
||||
use quote::{format_ident, quote, ToTokens};
|
||||
use syn::{
|
||||
spanned::Spanned, FnArg, Ident, ImplItem, ItemImpl, Path, Type, TypePath,
|
||||
};
|
||||
@@ -267,10 +267,7 @@ fn lazy_route_impl(
|
||||
};
|
||||
let lazy_view_ident =
|
||||
Ident::new(&format!("__{ty_name_to_snake}_View"), im.self_ty.span());
|
||||
let preload_lazy_view_ident = Ident::new(
|
||||
&format!("__preload_{lazy_view_ident}"),
|
||||
lazy_view_ident.span(),
|
||||
);
|
||||
let preload_ident = format_ident!("__preload_{lazy_view_ident}");
|
||||
|
||||
im.items.push(
|
||||
syn::parse::<ImplItem>(
|
||||
@@ -280,7 +277,7 @@ fn lazy_route_impl(
|
||||
// we don't split routes for wasm32 ssr
|
||||
// but we don't require a `hydrate`/`csr` feature on leptos_router
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#preload_lazy_view_ident().await;
|
||||
#preload_ident().await;
|
||||
}
|
||||
}
|
||||
.into(),
|
||||
|
||||
@@ -5,7 +5,7 @@ license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "RPC for any web framework."
|
||||
readme = "../README.md"
|
||||
version = "0.8.7"
|
||||
version = "0.8.8"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
@@ -64,6 +64,7 @@ http-body-util = { optional = true, workspace = true, default-features = true }
|
||||
rkyv = { optional = true, workspace = true, default-features = true }
|
||||
rmp-serde = { optional = true, workspace = true, default-features = true }
|
||||
base64 = { workspace = true, default-features = true }
|
||||
bitcode = { optional = true, workspace = true, default-features = true }
|
||||
|
||||
# client
|
||||
gloo-net = { optional = true, workspace = true, default-features = true }
|
||||
@@ -126,6 +127,7 @@ cbor = ["dep:ciborium"]
|
||||
rkyv = ["dep:rkyv"]
|
||||
msgpack = ["dep:rmp-serde"]
|
||||
postcard = ["dep:postcard"]
|
||||
bitcode = ["dep:bitcode"]
|
||||
default-tls = ["reqwest?/default-tls"]
|
||||
rustls = ["reqwest?/rustls-tls", "tokio-tungstenite?/rustls"]
|
||||
reqwest = ["dep:reqwest", "dep:tokio-tungstenite", "dep:tokio"]
|
||||
|
||||
49
server_fn/src/codec/bitcode.rs
Normal file
49
server_fn/src/codec/bitcode.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
use super::{Patch, Post, Put};
|
||||
use crate::{ContentType, Decodes, Encodes, Format, FormatType};
|
||||
use bytes::Bytes;
|
||||
|
||||
/// Serializes and deserializes with [`bitcode`].
|
||||
pub struct BitcodeEncoding;
|
||||
|
||||
impl ContentType for BitcodeEncoding {
|
||||
const CONTENT_TYPE: &'static str = "application/bitcode";
|
||||
}
|
||||
|
||||
impl FormatType for BitcodeEncoding {
|
||||
const FORMAT_TYPE: Format = Format::Binary;
|
||||
}
|
||||
|
||||
impl<T> Encodes<T> for BitcodeEncoding
|
||||
where
|
||||
T: bitcode::Encode,
|
||||
{
|
||||
type Error = std::convert::Infallible;
|
||||
|
||||
fn encode(value: &T) -> Result<Bytes, Self::Error> {
|
||||
Ok(Bytes::from(bitcode::encode(value)))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Decodes<T> for BitcodeEncoding
|
||||
where
|
||||
T: bitcode::DecodeOwned,
|
||||
{
|
||||
type Error = bitcode::Error;
|
||||
|
||||
fn decode(bytes: Bytes) -> Result<T, Self::Error> {
|
||||
bitcode::decode(bytes.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
/// Pass arguments and receive responses using `bitcode` in a `POST` request.
|
||||
pub type Bitcode = Post<BitcodeEncoding>;
|
||||
|
||||
/// Pass arguments and receive responses using `bitcode` in the body of a `PATCH` request.
|
||||
/// **Note**: Browser support for `PATCH` requests without JS/WASM may be poor.
|
||||
/// Consider using a `POST` request if functionality without JS/WASM is required.
|
||||
pub type PatchBitcode = Patch<BitcodeEncoding>;
|
||||
|
||||
/// Pass arguments and receive responses using `bitcode` in the body of a `PUT` request.
|
||||
/// **Note**: Browser support for `PUT` requests without JS/WASM may be poor.
|
||||
/// Consider using a `POST` request if functionality without JS/WASM is required.
|
||||
pub type PutBitcode = Put<BitcodeEncoding>;
|
||||
@@ -50,6 +50,11 @@ mod postcard;
|
||||
#[cfg(feature = "postcard")]
|
||||
pub use postcard::*;
|
||||
|
||||
#[cfg(feature = "bitcode")]
|
||||
mod bitcode;
|
||||
#[cfg(feature = "bitcode")]
|
||||
pub use bitcode::*;
|
||||
|
||||
mod patch;
|
||||
pub use patch::*;
|
||||
mod post;
|
||||
|
||||
@@ -133,6 +133,8 @@ pub use ::bytes as bytes_export;
|
||||
#[doc(hidden)]
|
||||
pub use ::http as http_export;
|
||||
use base64::{engine::general_purpose::STANDARD_NO_PAD, DecodeError, Engine};
|
||||
#[cfg(feature = "bitcode")]
|
||||
pub use bitcode;
|
||||
// re-exported to make it possible to implement a custom Client without adding a separate
|
||||
// dependency on `bytes`
|
||||
pub use bytes::Bytes;
|
||||
|
||||
@@ -10,7 +10,8 @@
|
||||
feature = "multipart",
|
||||
feature = "serde-lite",
|
||||
feature = "cbor",
|
||||
feature = "msgpack"
|
||||
feature = "msgpack",
|
||||
feature = "bitcode",
|
||||
))
|
||||
))]
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "RPC for any web framework."
|
||||
readme = "../README.md"
|
||||
version = "0.8.7"
|
||||
version = "0.8.8"
|
||||
edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -331,6 +331,7 @@ impl ServerFnCall {
|
||||
enum PathInfo {
|
||||
Serde,
|
||||
Rkyv,
|
||||
Bitcode,
|
||||
None,
|
||||
}
|
||||
|
||||
@@ -341,6 +342,12 @@ impl ServerFnCall {
|
||||
Clone, #server_fn_path::rkyv::Archive, #server_fn_path::rkyv::Serialize, #server_fn_path::rkyv::Deserialize
|
||||
},
|
||||
),
|
||||
Some("Bitcode") => (
|
||||
PathInfo::Bitcode,
|
||||
quote! {
|
||||
Clone, #server_fn_path::bitcode::Encode, #server_fn_path::bitcode::Decode
|
||||
},
|
||||
),
|
||||
Some("MultipartFormData")
|
||||
| Some("Streaming")
|
||||
| Some("StreamingText") => (PathInfo::None, quote! {}),
|
||||
@@ -376,6 +383,7 @@ impl ServerFnCall {
|
||||
#[serde(crate = #serde_path)]
|
||||
}
|
||||
}
|
||||
PathInfo::Bitcode => quote! {},
|
||||
PathInfo::Rkyv => quote! {},
|
||||
PathInfo::None => quote! {},
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "tachys"
|
||||
version = "0.2.8"
|
||||
version = "0.2.11"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -205,6 +205,14 @@ where
|
||||
self.add_any_attr(enterkeyhint(value))
|
||||
}
|
||||
|
||||
/// The `exportparts` attribute enables the sharing of parts of an element's shadow DOM with a containing document.
|
||||
fn exportparts(
|
||||
self,
|
||||
value: V,
|
||||
) -> <Self as AddAnyAttr>::Output<Attr<Exportparts, V>> {
|
||||
self.add_any_attr(exportparts(value))
|
||||
}
|
||||
|
||||
/// The `hidden` global attribute is a Boolean attribute indicating that the element is not yet, or is no longer, relevant.
|
||||
fn hidden(self, value: V) -> <Self as AddAnyAttr>::Output<Attr<Hidden, V>> {
|
||||
self.add_any_attr(hidden(value))
|
||||
|
||||
@@ -57,6 +57,10 @@ where
|
||||
_style: &mut String,
|
||||
_inner_html: &mut String,
|
||||
) {
|
||||
// If this is a class="..." attribute (not class:name=value), clear previous value
|
||||
if self.class.should_overwrite() {
|
||||
class.clear();
|
||||
}
|
||||
class.push(' ');
|
||||
self.class.to_html(class);
|
||||
}
|
||||
@@ -156,6 +160,12 @@ pub trait IntoClass: Send {
|
||||
/// Renders the class to HTML.
|
||||
fn to_html(self, class: &mut String);
|
||||
|
||||
/// Whether this class attribute should overwrite previous class values.
|
||||
/// Returns `true` for `class="..."` attributes, `false` for `class:name=value` directives.
|
||||
fn should_overwrite(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
/// Renders the class to HTML for a `<template>`.
|
||||
#[allow(unused)] // it's used with `nightly` feature
|
||||
fn to_template(class: &mut String) {}
|
||||
@@ -289,6 +299,10 @@ impl IntoClass for &str {
|
||||
class.push_str(self);
|
||||
}
|
||||
|
||||
fn should_overwrite(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn hydrate<const FROM_SERVER: bool>(
|
||||
self,
|
||||
el: &crate::renderer::types::Element,
|
||||
@@ -346,6 +360,10 @@ impl IntoClass for Cow<'_, str> {
|
||||
IntoClass::to_html(&*self, class);
|
||||
}
|
||||
|
||||
fn should_overwrite(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn hydrate<const FROM_SERVER: bool>(
|
||||
self,
|
||||
el: &crate::renderer::types::Element,
|
||||
@@ -403,6 +421,10 @@ impl IntoClass for String {
|
||||
IntoClass::to_html(self.as_str(), class);
|
||||
}
|
||||
|
||||
fn should_overwrite(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn hydrate<const FROM_SERVER: bool>(
|
||||
self,
|
||||
el: &crate::renderer::types::Element,
|
||||
@@ -460,6 +482,10 @@ impl IntoClass for Arc<str> {
|
||||
IntoClass::to_html(self.as_ref(), class);
|
||||
}
|
||||
|
||||
fn should_overwrite(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn hydrate<const FROM_SERVER: bool>(
|
||||
self,
|
||||
el: &crate::renderer::types::Element,
|
||||
|
||||
@@ -47,11 +47,13 @@ pub fn directive<T, P, D>(handler: D, param: P) -> Directive<T, D, P>
|
||||
where
|
||||
D: IntoDirective<T, P>,
|
||||
{
|
||||
Directive(Some(SendWrapper::new(DirectiveInner {
|
||||
handler,
|
||||
param,
|
||||
t: PhantomData,
|
||||
})))
|
||||
Directive((!cfg!(feature = "ssr")).then(|| {
|
||||
SendWrapper::new(DirectiveInner {
|
||||
handler,
|
||||
param,
|
||||
t: PhantomData,
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
/// Custom logic that runs in the browser when the element is created or hydrated.
|
||||
@@ -151,13 +153,7 @@ where
|
||||
Directive(inner)
|
||||
}
|
||||
|
||||
fn dry_resolve(&mut self) {
|
||||
// dry_resolve() only runs during SSR, and we should use it to
|
||||
// synchronously remove and drop the SendWrapper value
|
||||
// we don't need this value during SSR and leaving it here could drop it
|
||||
// from a different thread
|
||||
self.0.take();
|
||||
}
|
||||
fn dry_resolve(&mut self) {}
|
||||
|
||||
async fn resolve(self) -> Self::AsyncOutput {
|
||||
self
|
||||
|
||||
@@ -212,7 +212,7 @@ html_self_closing_elements! {
|
||||
/// The `<img>` HTML element embeds an image into the document.
|
||||
img HtmlImageElement [alt, attributionsrc, crossorigin, decoding, elementtiming, fetchpriority, height, ismap, loading, referrerpolicy, sizes, src, srcset, usemap, width] true,
|
||||
/// The `<input>` HTML element is used to create interactive controls for web-based forms in order to accept data from the user; a wide variety of types of input data and control widgets are available, depending on the device and user agent. The `<input>` element is one of the most powerful and complex in all of HTML due to the sheer number of combinations of input types and attributes.
|
||||
input HtmlInputElement [accept, alt, autocomplete, capture, checked, disabled, form, formaction, formenctype, formmethod, formnovalidate, formtarget, height, list, max, maxlength, min, minlength, multiple, name, pattern, placeholder, popovertarget, popovertargetaction, readonly, required, size, src, step, r#type, value, width] true,
|
||||
input HtmlInputElement [accept, alt, autocomplete, capture, checked, dirname, disabled, form, formaction, formenctype, formmethod, formnovalidate, formtarget, height, list, max, maxlength, min, minlength, multiple, name, pattern, placeholder, popovertarget, popovertargetaction, readonly, required, size, src, step, r#type, value, width] true,
|
||||
/// The `<link>` HTML element specifies relationships between the current document and an external resource. This element is most commonly used to link to CSS, but is also used to establish site icons (both "favicon" style icons and icons for the home screen and apps on mobile devices) among other things.
|
||||
link HtmlLinkElement [r#as, blocking, crossorigin, fetchpriority, href, hreflang, imagesizes, imagesrcset, integrity, media, rel, referrerpolicy, sizes, r#type] true,
|
||||
/// The `<meta>` HTML element represents Metadata that cannot be represented by other HTML meta-related elements, like base, link, script, style or title.
|
||||
|
||||
@@ -317,6 +317,26 @@ where
|
||||
type State = ElementState<At::State, Ch::State>;
|
||||
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
// check whether the tag is the same, for custom elements
|
||||
// because this is const `false` for all other element types,
|
||||
// the compiler should be able to optimize it out
|
||||
if E::TAG.is_empty() {
|
||||
// see https://github.com/leptos-rs/leptos/issues/4412
|
||||
let new_tag = self.tag.tag();
|
||||
|
||||
// this is not particularly efficient, but it saves us from
|
||||
// having to keep track of the tag name for every element state
|
||||
let old_tag = state.el.tag_name();
|
||||
if new_tag != old_tag {
|
||||
let mut new_state = self.build();
|
||||
state.insert_before_this(&mut new_state);
|
||||
state.unmount();
|
||||
*state = new_state;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// rebuild attributes and children for any element
|
||||
let ElementState {
|
||||
attrs, children, ..
|
||||
} = state;
|
||||
|
||||
@@ -113,7 +113,7 @@ where
|
||||
event,
|
||||
#[cfg(feature = "reactive_graph")]
|
||||
owner: reactive_graph::owner::Owner::current().unwrap_or_default(),
|
||||
cb: Some(SendWrapper::new(cb)),
|
||||
cb: (!cfg!(feature = "ssr")).then(|| SendWrapper::new(cb)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -323,7 +323,9 @@ where
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
let (el, prev_cleanup) = state;
|
||||
if let Some(prev) = prev_cleanup.take() {
|
||||
(prev.into_inner())(el);
|
||||
if let Some(remove) = prev.into_inner() {
|
||||
remove();
|
||||
}
|
||||
}
|
||||
*prev_cleanup = Some(if E::CAPTURE {
|
||||
self.attach_capture(el)
|
||||
@@ -350,13 +352,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn dry_resolve(&mut self) {
|
||||
// dry_resolve() only runs during SSR, and we should use it to
|
||||
// synchronously remove and drop the SendWrapper value
|
||||
// we don't need this value during SSR and leaving it here could drop it
|
||||
// from a different thread
|
||||
self.cb.take();
|
||||
}
|
||||
fn dry_resolve(&mut self) {}
|
||||
|
||||
async fn resolve(self) -> Self::AsyncOutput {
|
||||
self
|
||||
|
||||
@@ -22,7 +22,7 @@ where
|
||||
{
|
||||
Property {
|
||||
key,
|
||||
value: Some(SendWrapper::new(value)),
|
||||
value: (!cfg!(feature = "ssr")).then(|| SendWrapper::new(value)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -115,13 +115,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn dry_resolve(&mut self) {
|
||||
// dry_resolve() only runs during SSR, and we should use it to
|
||||
// synchronously remove and drop the SendWrapper value
|
||||
// we don't need this value during SSR and leaving it here could drop it
|
||||
// from a different thread
|
||||
self.value.take();
|
||||
}
|
||||
fn dry_resolve(&mut self) {}
|
||||
|
||||
async fn resolve(self) -> Self::AsyncOutput {
|
||||
self
|
||||
|
||||
@@ -7,6 +7,9 @@ use std::cell::Cell;
|
||||
use std::{cell::RefCell, panic::Location, rc::Rc};
|
||||
use web_sys::{Comment, Element, Node, Text};
|
||||
|
||||
#[cfg(feature = "mark_branches")]
|
||||
const COMMENT_NODE: u16 = 8;
|
||||
|
||||
/// Hydration works by walking over the DOM, adding interactivity as needed.
|
||||
///
|
||||
/// This cursor tracks the location in the DOM that is currently being hydrated. Each that type
|
||||
@@ -43,13 +46,27 @@ where
|
||||
///
|
||||
/// Does nothing if there is no child.
|
||||
pub fn child(&self) {
|
||||
//crate::log("advancing to next child of ");
|
||||
//Rndr::log_node(&self.current());
|
||||
let mut inner = self.0.borrow_mut();
|
||||
if let Some(node) = Rndr::first_child(&inner) {
|
||||
*inner = node;
|
||||
}
|
||||
//drop(inner);
|
||||
|
||||
#[cfg(feature = "mark_branches")]
|
||||
{
|
||||
while inner.node_type() == COMMENT_NODE {
|
||||
if let Some(content) = inner.text_content() {
|
||||
if content.starts_with("bo") || content.starts_with("bc") {
|
||||
if let Some(sibling) = Rndr::next_sibling(&inner) {
|
||||
*inner = sibling;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
// //drop(inner);
|
||||
//crate::log(">> which is ");
|
||||
//Rndr::log_node(&self.current());
|
||||
}
|
||||
@@ -58,12 +75,25 @@ where
|
||||
///
|
||||
/// Does nothing if there is no sibling.
|
||||
pub fn sibling(&self) {
|
||||
//crate::log("advancing to next sibling of ");
|
||||
//Rndr::log_node(&self.current());
|
||||
let mut inner = self.0.borrow_mut();
|
||||
if let Some(node) = Rndr::next_sibling(&inner) {
|
||||
*inner = node;
|
||||
}
|
||||
|
||||
#[cfg(feature = "mark_branches")]
|
||||
{
|
||||
while inner.node_type() == COMMENT_NODE {
|
||||
if let Some(content) = inner.text_content() {
|
||||
if content.starts_with("bo") || content.starts_with("bc") {
|
||||
if let Some(sibling) = Rndr::next_sibling(&inner) {
|
||||
*inner = sibling;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
//drop(inner);
|
||||
//crate::log(">> which is ");
|
||||
//Rndr::log_node(&self.current());
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user