mirror of
https://github.com/leptos-rs/leptos.git
synced 2025-12-27 16:54:41 -05:00
Compare commits
83 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b484b39779 | ||
|
|
a0d657f9b1 | ||
|
|
cddb24ebd3 | ||
|
|
e8afd11995 | ||
|
|
4d01d95175 | ||
|
|
9bf5b22633 | ||
|
|
da4a7d5285 | ||
|
|
2af6c6353c | ||
|
|
7f4b5eb4d1 | ||
|
|
fbf46ca58c | ||
|
|
0edbd9b3b5 | ||
|
|
43359694b6 | ||
|
|
9dd5501b1a | ||
|
|
6843f654ff | ||
|
|
cb7c648400 | ||
|
|
d3148ac9c9 | ||
|
|
6d7e203efe | ||
|
|
b5c69937b4 | ||
|
|
0b45ff5116 | ||
|
|
13dc6f474d | ||
|
|
21218fc802 | ||
|
|
65b5be2748 | ||
|
|
7c30bb92f7 | ||
|
|
edf369f035 | ||
|
|
eb02304ee1 | ||
|
|
578b672f14 | ||
|
|
b20902aaa1 | ||
|
|
d3ad0c67b6 | ||
|
|
62d8ec9cc5 | ||
|
|
0d2523190d | ||
|
|
338da18ed2 | ||
|
|
616aae4c3c | ||
|
|
c6e59eeb43 | ||
|
|
c025ae59ac | ||
|
|
df46feee5d | ||
|
|
bbf5bf9170 | ||
|
|
7a3556bf34 | ||
|
|
d13936cab5 | ||
|
|
b303a35d76 | ||
|
|
a453b7d1bd | ||
|
|
3b9ccdf57e | ||
|
|
27cd423ebc | ||
|
|
b3907baf49 | ||
|
|
9a8bb7eb75 | ||
|
|
95db8c939e | ||
|
|
2bfa9952af | ||
|
|
4e445f43d6 | ||
|
|
5f544f67ae | ||
|
|
68477d2b76 | ||
|
|
5bd9469b93 | ||
|
|
4bca70dc2f | ||
|
|
d0295009cf | ||
|
|
3e8b5c9805 | ||
|
|
924efa8ac1 | ||
|
|
b92a14228c | ||
|
|
68967fdad3 | ||
|
|
44bc4fbc31 | ||
|
|
646cfc12ed | ||
|
|
62977a68b0 | ||
|
|
e9ee90c78f | ||
|
|
73e728f145 | ||
|
|
6f047a2271 | ||
|
|
7c942b8b47 | ||
|
|
d4bf6d9cb6 | ||
|
|
9deb96ea01 | ||
|
|
d1899cde1c | ||
|
|
ee731d7a3a | ||
|
|
59cbcfa0fb | ||
|
|
0939cf63ad | ||
|
|
d37512bebd | ||
|
|
7dd44919cf | ||
|
|
3eaabf85ea | ||
|
|
d60c632c90 | ||
|
|
f5ad4f4b88 | ||
|
|
f3a053f99b | ||
|
|
06573cbca1 | ||
|
|
c0ca97e42f | ||
|
|
9a4e93ab07 | ||
|
|
bee2b5ea1c | ||
|
|
3b058e77f1 | ||
|
|
7adb11ec49 | ||
|
|
1af5f66ee6 | ||
|
|
956f1836ec |
2
.github/workflows/get-example-changed.yml
vendored
2
.github/workflows/get-example-changed.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- name: Get example files that changed
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v46
|
||||
uses: tj-actions/changed-files@v47
|
||||
with:
|
||||
files: |
|
||||
examples/**
|
||||
|
||||
2
.github/workflows/get-leptos-changed.yml
vendored
2
.github/workflows/get-leptos-changed.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- name: Get source files that changed
|
||||
id: changed-source
|
||||
uses: tj-actions/changed-files@v46
|
||||
uses: tj-actions/changed-files@v47
|
||||
with:
|
||||
files_ignore: |
|
||||
.*/**/*
|
||||
|
||||
2
.github/workflows/run-cargo-make-task.yml
vendored
2
.github/workflows/run-cargo-make-task.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
run: trunk --version
|
||||
- name: Install Node.js
|
||||
if: contains(inputs.directory, 'examples')
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20
|
||||
- uses: pnpm/action-setup@v4
|
||||
|
||||
1126
Cargo.lock
generated
1126
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
83
Cargo.toml
83
Cargo.toml
@@ -45,44 +45,42 @@ rust-version = "1.88"
|
||||
|
||||
[workspace.dependencies]
|
||||
# members
|
||||
throw_error = { path = "./any_error/", version = "0.3.0" }
|
||||
throw_error = { path = "./any_error/", version = "0.3.1" }
|
||||
any_spawner = { path = "./any_spawner/", version = "0.3.0" }
|
||||
const_str_slice_concat = { path = "./const_str_slice_concat", version = "0.1" }
|
||||
either_of = { path = "./either_of/", version = "0.1.6" }
|
||||
hydration_context = { path = "./hydration_context", version = "0.3.0" }
|
||||
leptos = { path = "./leptos", version = "0.8.8" }
|
||||
leptos = { path = "./leptos", version = "0.8.12" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.8.7" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.6" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.7" }
|
||||
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.8.5" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.5" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.8" }
|
||||
leptos_router = { path = "./router", version = "0.8.6" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.8.5" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.6" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.11" }
|
||||
leptos_router = { path = "./router", version = "0.8.9" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.8.6" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.8.5" }
|
||||
leptos_meta = { path = "./meta", version = "0.8.5" }
|
||||
next_tuple = { path = "./next_tuple", version = "0.1.0" }
|
||||
oco_ref = { path = "./oco", version = "0.2.1" }
|
||||
or_poisoned = { path = "./or_poisoned", version = "0.1.0" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.6" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.2.5" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.9" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.3.0" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.6" }
|
||||
server_fn = { path = "./server_fn", version = "0.8.6" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.8.7" }
|
||||
server_fn = { path = "./server_fn", version = "0.8.8" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.8.8" }
|
||||
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.8.5" }
|
||||
tachys = { path = "./tachys", version = "0.2.7" }
|
||||
wasm_split_helpers = { path = "./wasm_split", version = "0.1.2" }
|
||||
wasm_split_macros = { path = "./wasm_split_macros", version = "0.1.2" }
|
||||
tachys = { path = "./tachys", version = "0.2.10" }
|
||||
|
||||
# members deps
|
||||
async-once-cell = { default-features = false, version = "0.5.3" }
|
||||
itertools = { default-features = false, version = "0.14.0" }
|
||||
convert_case = { default-features = false, version = "0.8.0" }
|
||||
serde_json = { default-features = false, version = "1.0.142" }
|
||||
serde_json = { default-features = false, version = "1.0.143" }
|
||||
trybuild = { default-features = false, version = "1.0.110" }
|
||||
typed-builder = { default-features = false, version = "0.21.0" }
|
||||
thiserror = { default-features = false, version = "2.0.12" }
|
||||
typed-builder = { default-features = false, version = "0.21.2" }
|
||||
thiserror = { default-features = false, version = "2.0.17" }
|
||||
wasm-bindgen = { default-features = false, version = "0.2.100" }
|
||||
indexmap = { default-features = false, version = "2.9.0" }
|
||||
indexmap = { default-features = false, version = "2.11.0" }
|
||||
rstml = { default-features = false, version = "0.12.1" }
|
||||
rustc_version = { default-features = false, version = "0.4.1" }
|
||||
guardian = { default-features = false, version = "1.3.0" }
|
||||
@@ -97,71 +95,72 @@ send_wrapper = { default-features = false, version = "0.6.0" }
|
||||
tokio-test = { default-features = false, version = "0.4.4" }
|
||||
html-escape = { default-features = false, version = "0.2.13" }
|
||||
proc-macro-error2 = { default-features = false, version = "2.0.1" }
|
||||
const_format = { default-features = false, version = "0.2.34" }
|
||||
const_format = { default-features = false, version = "0.2.35" }
|
||||
gloo-net = { default-features = false, version = "0.6.0" }
|
||||
url = { default-features = false, version = "2.5.4" }
|
||||
tokio = { default-features = false, version = "1.47.1" }
|
||||
base64 = { default-features = false, version = "0.22.1" }
|
||||
cfg-if = { default-features = false, version = "1.0.0" }
|
||||
cfg-if = { default-features = false, version = "1.0.3" }
|
||||
wasm-bindgen-futures = { default-features = false, version = "0.4.50" }
|
||||
tower = { default-features = false, version = "0.5.2" }
|
||||
proc-macro2 = { default-features = false, version = "1.0.96" }
|
||||
proc-macro2 = { default-features = false, version = "1.0.101" }
|
||||
serde = { default-features = false, version = "1.0.219" }
|
||||
parking_lot = { default-features = false, version = "0.12.4" }
|
||||
axum = { default-features = false, version = "0.8.4" }
|
||||
parking_lot = { default-features = false, version = "0.12.5" }
|
||||
axum = { default-features = false, version = "0.8.6" }
|
||||
serde_qs = { default-features = false, version = "0.15.0" }
|
||||
syn = { default-features = false, version = "2.0.104" }
|
||||
syn = { default-features = false, version = "2.0.106" }
|
||||
xxhash-rust = { default-features = false, version = "0.8.15" }
|
||||
paste = { default-features = false, version = "1.0.15" }
|
||||
quote = { default-features = false, version = "1.0.40" }
|
||||
quote = { default-features = false, version = "1.0.41" }
|
||||
web-sys = { default-features = false, version = "0.3.77" }
|
||||
js-sys = { default-features = false, version = "0.3.77" }
|
||||
rand = { default-features = false, version = "0.9.1" }
|
||||
serde-lite = { default-features = false, version = "0.5.0" }
|
||||
tokio-tungstenite = { default-features = false, version = "0.27.0" }
|
||||
tokio-tungstenite = { default-features = false, version = "0.28.0" }
|
||||
serial_test = { default-features = false, version = "3.2.0" }
|
||||
erased = { default-features = false, version = "0.1.2" }
|
||||
glib = { default-features = false, version = "0.20.12" }
|
||||
async-trait = { default-features = false, version = "0.1.88" }
|
||||
async-trait = { default-features = false, version = "0.1.89" }
|
||||
typed-builder-macro = { default-features = false, version = "0.21.0" }
|
||||
linear-map = { default-features = false, version = "1.2.0" }
|
||||
anyhow = { default-features = false, version = "1.0.98" }
|
||||
anyhow = { default-features = false, version = "1.0.100" }
|
||||
walkdir = { default-features = false, version = "2.5.0" }
|
||||
actix-ws = { default-features = false, version = "0.3.0" }
|
||||
tower-http = { default-features = false, version = "0.6.4" }
|
||||
prettyplease = { default-features = false, version = "0.2.36" }
|
||||
inventory = { default-features = false, version = "0.3.20" }
|
||||
config = { default-features = false, version = "0.15.13" }
|
||||
camino = { default-features = false, version = "1.1.11" }
|
||||
prettyplease = { default-features = false, version = "0.2.37" }
|
||||
inventory = { default-features = false, version = "0.3.21" }
|
||||
config = { default-features = false, version = "0.15.14" }
|
||||
camino = { default-features = false, version = "1.2.1" }
|
||||
ciborium = { default-features = false, version = "0.2.2" }
|
||||
bitcode = { default-features = false, version = "0.6.6" }
|
||||
multer = { default-features = false, version = "3.1.0" }
|
||||
leptos-spin-macro = { default-features = false, version = "0.2.0" }
|
||||
sledgehammer_utils = { default-features = false, version = "0.3.1" }
|
||||
sledgehammer_bindgen = { default-features = false, version = "0.6.0" }
|
||||
wasm-streams = { default-features = false, version = "0.4.2" }
|
||||
rkyv = { default-features = false, version = "0.8.11" }
|
||||
rkyv = { default-features = false, version = "0.8.12" }
|
||||
temp-env = { default-features = false, version = "0.3.6" }
|
||||
uuid = { default-features = false, version = "1.18.0" }
|
||||
bytes = { default-features = false, version = "1.10.1" }
|
||||
http = { default-features = false, version = "1.3.1" }
|
||||
regex = { default-features = false, version = "1.11.1" }
|
||||
regex = { default-features = false, version = "1.11.3" }
|
||||
drain_filter_polyfill = { default-features = false, version = "0.1.3" }
|
||||
tempfile = { default-features = false, version = "3.20.0" }
|
||||
tempfile = { default-features = false, version = "3.23.0" }
|
||||
futures-lite = { default-features = false, version = "2.6.1" }
|
||||
log = { default-features = false, version = "0.4.27" }
|
||||
percent-encoding = { default-features = false, version = "2.3.1" }
|
||||
percent-encoding = { default-features = false, version = "2.3.2" }
|
||||
async-executor = { default-features = false, version = "1.13.2" }
|
||||
const-str = { default-features = false, version = "0.6.4" }
|
||||
http-body-util = { default-features = false, version = "0.1.3" }
|
||||
hyper = { default-features = false, version = "1.6.0" }
|
||||
hyper = { default-features = false, version = "1.7.0" }
|
||||
postcard = { default-features = false, version = "1.1.3" }
|
||||
rmp-serde = { default-features = false, version = "1.3.0" }
|
||||
reqwest = { default-features = false, version = "0.12.22" }
|
||||
reqwest = { default-features = false, version = "0.12.23" }
|
||||
tower-layer = { default-features = false, version = "0.3.3" }
|
||||
attribute-derive = { default-features = false, version = "0.10.3" }
|
||||
attribute-derive = { default-features = false, version = "0.10.5" }
|
||||
insta = { default-features = false, version = "1.43.1" }
|
||||
codee = { default-features = false, version = "0.3.0" }
|
||||
actix-http = { default-features = false, version = "3.11.0" }
|
||||
actix-http = { default-features = false, version = "3.11.2" }
|
||||
wasm-bindgen-test = { default-features = false, version = "0.3.50" }
|
||||
rustversion = { default-features = false, version = "1.0.22" }
|
||||
getrandom = { default-features = false, version = "0.3.3" }
|
||||
@@ -170,6 +169,10 @@ async-lock = { default-features = false, version = "3.4.1" }
|
||||
base16 = { default-features = false, version = "0.2.1" }
|
||||
digest = { default-features = false, version = "0.10.7" }
|
||||
sha2 = { default-features = false, version = "0.10.8" }
|
||||
subsecond = { default-features = false, version = "0.7.0-rc.0" }
|
||||
dioxus-cli-config = { default-features = false, version = "0.7.0-rc.0" }
|
||||
dioxus-devtools = { default-features = false, version = "0.7.0-rc.0" }
|
||||
wasm_split_helpers = { default-features = false, version = "0.2.0" }
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
||||
@@ -95,7 +95,7 @@ Here are some resources for learning more about Leptos:
|
||||
[`cargo-leptos`](https://github.com/leptos-rs/cargo-leptos) is a build tool that's designed to make it easy to build apps that run on both the client and the server, with seamless integration. The best way to get started with a real Leptos project right now is to use `cargo-leptos` and our starter templates for [Actix](https://github.com/leptos-rs/start) or [Axum](https://github.com/leptos-rs/start-axum).
|
||||
|
||||
```bash
|
||||
cargo install cargo-leptos
|
||||
cargo install cargo-leptos --locked
|
||||
cargo leptos new --git https://github.com/leptos-rs/start-axum
|
||||
cd [your project name]
|
||||
cargo leptos watch
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "throw_error"
|
||||
version = "0.3.0"
|
||||
version = "0.3.1"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -11,3 +11,6 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
pin-project-lite = { workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow.workspace = true
|
||||
|
||||
@@ -45,10 +45,10 @@ impl fmt::Display for Error {
|
||||
|
||||
impl<T> From<T> for Error
|
||||
where
|
||||
T: error::Error + Send + Sync + 'static,
|
||||
T: Into<Box<dyn error::Error + Send + Sync + 'static>>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Error(Arc::new(value))
|
||||
Error(Arc::from(value.into()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,3 +158,32 @@ where
|
||||
this.inner.poll(cx)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::error::Error as StdError;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MyError;
|
||||
|
||||
impl Display for MyError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "MyError")
|
||||
}
|
||||
}
|
||||
|
||||
impl StdError for MyError {}
|
||||
|
||||
#[test]
|
||||
fn test_from() {
|
||||
let e = MyError;
|
||||
let _le = Error::from(e);
|
||||
|
||||
let e = "some error".to_string();
|
||||
let _le = Error::from(e);
|
||||
|
||||
let e = anyhow::anyhow!("anyhow error");
|
||||
let _le = Error::from(e);
|
||||
}
|
||||
}
|
||||
|
||||
13
examples/regression/e2e/features/issue_4251.feature
Normal file
13
examples/regression/e2e/features/issue_4251.feature
Normal file
@@ -0,0 +1,13 @@
|
||||
@check_issue_4251
|
||||
Feature: Check that issue 4251 does not reappear
|
||||
|
||||
Scenario: Clicking a link to the same page you’re currently on should not add the page to the history stack.
|
||||
Given I see the app
|
||||
And I can access regression test 4324
|
||||
When I select the link This page
|
||||
And I select the link This page
|
||||
And I select the link This page
|
||||
Then I see the result is the string Issue4324
|
||||
When I press the back button
|
||||
And I select the link 4324
|
||||
Then I see the result is the string Issue4324
|
||||
9
examples/regression/e2e/features/issue_4285.feature
Normal file
9
examples/regression/e2e/features/issue_4285.feature
Normal file
@@ -0,0 +1,9 @@
|
||||
@check_issue_4285
|
||||
Feature: Check that issue 4285 does not reappear
|
||||
|
||||
Scenario: Navigating several times to same lazy route does not cause issues.
|
||||
Given I see the app
|
||||
And I can access regression test 4285
|
||||
And I can access regression test 4285
|
||||
And I can access regression test 4285
|
||||
Then I see the result is the string 42
|
||||
18
examples/regression/e2e/features/issue_4296.feature
Normal file
18
examples/regression/e2e/features/issue_4296.feature
Normal file
@@ -0,0 +1,18 @@
|
||||
@check_issue_4296
|
||||
Feature: Check that issue 4296 does not reappear
|
||||
|
||||
Scenario: Query param signals created in LazyRoute::data() are reactive in ::view().
|
||||
Given I see the app
|
||||
And I can access regression test 4296
|
||||
Then I see the result is the string None
|
||||
When I select the link abc
|
||||
Then I see the result is the string Some("abc")
|
||||
When I select the link def
|
||||
Then I see the result is the string Some("def")
|
||||
|
||||
Scenario: Loading page with query signal works as well.
|
||||
Given I see the app
|
||||
And I can access regression test 4296
|
||||
When I select the link abc
|
||||
When I reload the page
|
||||
Then I see the result is the string Some("abc")
|
||||
11
examples/regression/e2e/features/issue_4324.feature
Normal file
11
examples/regression/e2e/features/issue_4324.feature
Normal file
@@ -0,0 +1,11 @@
|
||||
@check_issue_4324
|
||||
Feature: Check that issue 4324 does not reappear
|
||||
|
||||
Scenario: Navigating to the same page after clicking "Back" should set the URL correctly
|
||||
Given I see the app
|
||||
And I can access regression test 4324
|
||||
Then I see the path is /4324/
|
||||
When I press the back button
|
||||
Then I see the path is /
|
||||
When I select the link 4324
|
||||
Then I see the path is /4324/
|
||||
10
examples/regression/e2e/tests/fixtures/check.rs
vendored
10
examples/regression/e2e/tests/fixtures/check.rs
vendored
@@ -43,3 +43,13 @@ pub async fn element_value_is(
|
||||
assert_eq!(value.as_deref(), Some(expected));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn path_is(client: &Client, expected_path: &str) -> Result<()> {
|
||||
let url = client
|
||||
.current_url()
|
||||
.await
|
||||
.expect("could not access current URL");
|
||||
let path = url.path();
|
||||
assert_eq!(expected_path, path);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -45,3 +45,12 @@ async fn i_refresh_the_browser(world: &mut AppWorld) -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[given(regex = "^I press the back button$")]
|
||||
#[when(regex = "^I press the back button$")]
|
||||
async fn i_go_back(world: &mut AppWorld) -> Result<()> {
|
||||
let client = &world.client;
|
||||
client.back().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -43,3 +43,10 @@ async fn i_see_the_value(
|
||||
check::element_value_is(client, &id, &value).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[then(regex = r"^I see the path is (.*)$")]
|
||||
async fn i_see_the_path(world: &mut AppWorld, path: String) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::path_is(client, &path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::{
|
||||
issue_4005::Routes4005, issue_4088::Routes4088, issue_4217::Routes4217,
|
||||
issue_4285::Routes4285, issue_4296::Routes4296, issue_4324::Routes4324,
|
||||
pr_4015::Routes4015, pr_4091::Routes4091,
|
||||
};
|
||||
use leptos::prelude::*;
|
||||
@@ -31,9 +32,11 @@ pub fn shell(options: LeptosOptions) -> impl IntoView {
|
||||
pub fn App() -> impl IntoView {
|
||||
provide_meta_context();
|
||||
let fallback = || view! { "Page not found." }.into_view();
|
||||
let (_, set_is_routing) = signal(false);
|
||||
|
||||
view! {
|
||||
<Stylesheet id="leptos" href="/pkg/regression.css"/>
|
||||
<Router>
|
||||
<Router set_is_routing>
|
||||
<main>
|
||||
<Routes fallback>
|
||||
<Route path=path!("") view=HomePage/>
|
||||
@@ -42,6 +45,9 @@ pub fn App() -> impl IntoView {
|
||||
<Routes4088/>
|
||||
<Routes4217/>
|
||||
<Routes4005/>
|
||||
<Routes4285/>
|
||||
<Routes4296/>
|
||||
<Routes4324/>
|
||||
</Routes>
|
||||
</main>
|
||||
</Router>
|
||||
@@ -66,6 +72,9 @@ fn HomePage() -> impl IntoView {
|
||||
<li><a href="/4088/">"4088"</a></li>
|
||||
<li><a href="/4217/">"4217"</a></li>
|
||||
<li><a href="/4005/">"4005"</a></li>
|
||||
<li><a href="/4285/">"4285"</a></li>
|
||||
<li><a href="/4296/">"4296"</a></li>
|
||||
<li><a href="/4324/">"4324"</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
}
|
||||
|
||||
49
examples/regression/src/issue_4285.rs
Normal file
49
examples/regression/src/issue_4285.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
use leptos::prelude::*;
|
||||
use leptos_router::LazyRoute;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, Lazy, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4285() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4285") view={Lazy::<Issue4285>::new()}/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
struct Issue4285 {
|
||||
data: Resource<Result<i32, ServerFnError>>,
|
||||
}
|
||||
|
||||
impl LazyRoute for Issue4285 {
|
||||
fn data() -> Self {
|
||||
Self {
|
||||
data: Resource::new(|| (), |_| slow_call()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn view(this: Self) -> AnyView {
|
||||
let Issue4285 { data } = this;
|
||||
view! {
|
||||
<Suspense>
|
||||
{move || {
|
||||
Suspend::new(async move {
|
||||
let data = data.await;
|
||||
view! {
|
||||
<p id="result">{data}</p>
|
||||
}
|
||||
})
|
||||
}}
|
||||
</Suspense>
|
||||
}
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
|
||||
#[server]
|
||||
async fn slow_call() -> Result<i32, ServerFnError> {
|
||||
tokio::time::sleep(std::time::Duration::from_millis(250)).await;
|
||||
Ok(42)
|
||||
}
|
||||
36
examples/regression/src/issue_4296.rs
Normal file
36
examples/regression/src/issue_4296.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, Lazy, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
use leptos_router::{hooks::use_query_map, LazyRoute};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4296() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4296") view={Lazy::<Issue4296>::new()}/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
struct Issue4296 {
|
||||
query: Signal<Option<String>>,
|
||||
}
|
||||
|
||||
impl LazyRoute for Issue4296 {
|
||||
fn data() -> Self {
|
||||
let query = use_query_map();
|
||||
let query = Signal::derive(move || query.read().get("q"));
|
||||
Self { query }
|
||||
}
|
||||
|
||||
async fn view(this: Self) -> AnyView {
|
||||
let Issue4296 { query } = this;
|
||||
view! {
|
||||
<a href="?q=abc">"abc"</a>
|
||||
<a href="?q=def">"def"</a>
|
||||
<p id="result">{move || format!("{:?}", query.get())}</p>
|
||||
}
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
21
examples/regression/src/issue_4324.rs
Normal file
21
examples/regression/src/issue_4324.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, Lazy, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4324() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4324") view=Issue4324/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn Issue4324() -> impl IntoView {
|
||||
view! {
|
||||
<a href="/4324/">"This page"</a>
|
||||
<p id="result">"Issue4324"</p>
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,9 @@ pub mod app;
|
||||
mod issue_4005;
|
||||
mod issue_4088;
|
||||
mod issue_4217;
|
||||
mod issue_4285;
|
||||
mod issue_4296;
|
||||
mod issue_4324;
|
||||
mod pr_4015;
|
||||
mod pr_4091;
|
||||
|
||||
|
||||
@@ -440,7 +440,14 @@ pub fn FileUploadWithProgress() -> impl IntoView {
|
||||
let mut entry =
|
||||
FILES.entry(filename.to_string()).or_insert_with(|| {
|
||||
println!("[{filename}]\tinserting channel");
|
||||
let (tx, rx) = broadcast(128);
|
||||
// NOTE: this channel capacity is set arbitrarily for this demo code.
|
||||
// it allows for up to exactly 1048 chunks to be sent, which sets an upper cap
|
||||
// on upload size (the precise details vary by client)
|
||||
// in a real system, you will want to create some more reasonable ways of
|
||||
// sending and sharing notifications
|
||||
//
|
||||
// see https://github.com/leptos-rs/leptos/issues/4397 for related discussion
|
||||
let (tx, rx) = broadcast(1048);
|
||||
File { total: 0, tx, rx }
|
||||
});
|
||||
entry.total += len;
|
||||
|
||||
7
examples/subsecond_hot_patch/.gitignore
vendored
Normal file
7
examples/subsecond_hot_patch/.gitignore
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
/target
|
||||
.DS_Store
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
13
examples/subsecond_hot_patch/Cargo.toml
Normal file
13
examples/subsecond_hot_patch/Cargo.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "subsecond_hot_patch"
|
||||
version = "0.1.0"
|
||||
authors = ["Greg Johnston <greg.johnston@gmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
leptos = { path = "../../leptos", features = ["csr", "subsecond"] }
|
||||
leptos_router = { path = "../../router" }
|
||||
|
||||
[features]
|
||||
default = ["web"]
|
||||
web = []
|
||||
21
examples/subsecond_hot_patch/Dioxus.toml
Normal file
21
examples/subsecond_hot_patch/Dioxus.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[application]
|
||||
|
||||
[web.app]
|
||||
|
||||
# HTML title tag content
|
||||
title = "ltest"
|
||||
|
||||
# include `assets` in web platform
|
||||
[web.resource]
|
||||
|
||||
# Additional CSS style files
|
||||
style = []
|
||||
|
||||
# Additional JavaScript files
|
||||
script = []
|
||||
|
||||
[web.resource.dev]
|
||||
|
||||
# Javascript code file
|
||||
# serve: [dev-server] only
|
||||
script = []
|
||||
1
examples/subsecond_hot_patch/Makefile.toml
Normal file
1
examples/subsecond_hot_patch/Makefile.toml
Normal file
@@ -0,0 +1 @@
|
||||
extend = [{ path = "../cargo-make/main.toml" }]
|
||||
31
examples/subsecond_hot_patch/README.md
Normal file
31
examples/subsecond_hot_patch/README.md
Normal file
@@ -0,0 +1,31 @@
|
||||
# Hot Patching with `dx`
|
||||
|
||||
This is an experimental example exploring how to combine Leptos with the binary hot-patching provided by Dioxus's `subsecond` library and `dx` cli.
|
||||
|
||||
### Serving Your App
|
||||
|
||||
This requires installing the Dioxus CLI version 0.7.0. At the time I'm writing this README, that does not yet have a stable release. Once `dioxus-cli` 0.7.0 has been released, you should use the latest stable release. Until then, I'd suggest installing from git:
|
||||
|
||||
```sh
|
||||
cargo install dioxus-cli --git https://github.com/DioxusLabs/dioxus
|
||||
```
|
||||
|
||||
Then you can run the example with `dx serve --hot-patch --platform web`.
|
||||
|
||||
### Hot Patching
|
||||
|
||||
Changes to the your application should be reflected in your app without a full rebuild and reload.
|
||||
|
||||
### Limitatations
|
||||
|
||||
Currently we only support hot-patching for reactive view functions. You probably want to use `AnyView` (via `.into_any()`) on any views that will be hot-patched, so they can be rebuilt correctly despite their types changing when the structure of the view tree changes.
|
||||
|
||||
If you are using `leptos_router` this actually works quite well, as every route’s view is erased to `AnyView` and the router itself is a reactive view function: in other words, changes inside any route should be hot-patched in any case.
|
||||
|
||||
Note that any hot-patch will cause all render effects to run again. This means that some client-side state (like the values of signals) will be wiped out.
|
||||
|
||||
### Build Tooling
|
||||
|
||||
The preference of the Dioxus team is that all hot-patching work that uses their `subsecond` also use `dioxus-cli`. As this demo shows, it's completely possible to use `dioxus-cli` to build and run a Leptos project. We do not plan to build `subsecond` into our own build tooling at this time.
|
||||
|
||||
**This is an experiment/POC. It is being published because members of the community have found it useful and have asked for the support to be merged in its current state. Further development and bugfixes are a relatively low priority at this time.**
|
||||
BIN
examples/subsecond_hot_patch/assets/favicon.ico
Normal file
BIN
examples/subsecond_hot_patch/assets/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 130 KiB |
20
examples/subsecond_hot_patch/assets/header.svg
Normal file
20
examples/subsecond_hot_patch/assets/header.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 23 KiB |
46
examples/subsecond_hot_patch/assets/main.css
Normal file
46
examples/subsecond_hot_patch/assets/main.css
Normal file
@@ -0,0 +1,46 @@
|
||||
/* App-wide styling */
|
||||
body {
|
||||
background-color: #0f1116;
|
||||
color: #ffffff;
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
margin: 20px;
|
||||
}
|
||||
|
||||
#hero {
|
||||
margin: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
#links {
|
||||
width: 400px;
|
||||
text-align: left;
|
||||
font-size: x-large;
|
||||
color: white;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
#links a {
|
||||
color: white;
|
||||
text-decoration: none;
|
||||
margin-top: 20px;
|
||||
margin: 10px 0px;
|
||||
border: white 1px solid;
|
||||
border-radius: 5px;
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
#links a:hover {
|
||||
background-color: #1f1f1f;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#header {
|
||||
max-width: 1200px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
44
examples/subsecond_hot_patch/src/main.rs
Normal file
44
examples/subsecond_hot_patch/src/main.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use leptos::{prelude::*, subsecond::connect_to_hot_patch_messages};
|
||||
use leptos_router::{
|
||||
components::{Route, Router, Routes},
|
||||
path,
|
||||
};
|
||||
|
||||
fn main() {
|
||||
// connect to DX CLI and patch the WASM binary whenever we receive a message
|
||||
connect_to_hot_patch_messages();
|
||||
|
||||
// wrapping App here in a closure so we can hot-reload it, because we only do that
|
||||
// for reactive views right now. changing anything will re-run App and update the view
|
||||
mount_to_body(|| App);
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn App() -> impl IntoView {
|
||||
view! {
|
||||
<nav>
|
||||
<a href="/">"Home"</a>
|
||||
<a href="/about">"About"</a>
|
||||
</nav>
|
||||
<Router>
|
||||
<Routes fallback=|| "Not found">
|
||||
<Route path=path!("/") view=HomePage/>
|
||||
<Route path=path!("/about") view=About/>
|
||||
</Routes>
|
||||
</Router>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn HomePage() -> impl IntoView {
|
||||
view! {
|
||||
<h1>"Home Page"</h1>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn About() -> impl IntoView {
|
||||
view! {
|
||||
<h1>"About"</h1>
|
||||
}
|
||||
}
|
||||
3
examples/tailwind_csr/Trunk.toml
Normal file
3
examples/tailwind_csr/Trunk.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
[tools]
|
||||
tailwindcss = "4.1.13"
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Actix integrations for the Leptos web framework."
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Utilities to help build server integrations for the Leptos web framework."
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -68,7 +68,8 @@ pub trait ExtendResponse: Sized {
|
||||
let nonce =
|
||||
use_nonce().map(|n| n.to_string()).unwrap_or_default();
|
||||
if let Some(manifest) = use_context::<WasmSplitManifest>() {
|
||||
let (pkg_path, manifest) = &*manifest.0.read_value();
|
||||
let (pkg_path, manifest, wasm_split_file) =
|
||||
&*manifest.0.read_value();
|
||||
let prefetches = prefetches.0.read_value();
|
||||
|
||||
let all_prefetches = prefetches.iter().flat_map(|key| {
|
||||
@@ -90,7 +91,7 @@ pub trait ExtendResponse: Sized {
|
||||
.to_html();
|
||||
}
|
||||
_ = view! {
|
||||
<Link rel="modulepreload" href=format!("{pkg_path}/__wasm_split.js") crossorigin=nonce/>
|
||||
<Link rel="modulepreload" href=format!("{pkg_path}/{wasm_split_file}") crossorigin=nonce/>
|
||||
}
|
||||
.to_html();
|
||||
}
|
||||
@@ -120,7 +121,7 @@ pub trait ExtendResponse: Sized {
|
||||
// drop the owner, cleaning up the reactive runtime,
|
||||
// once the stream is over
|
||||
.chain(once(async move {
|
||||
owner.unset();
|
||||
owner.unset_with_forced_cleanup();
|
||||
Default::default()
|
||||
})),
|
||||
));
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
[package]
|
||||
name = "leptos"
|
||||
version = "0.8.8"
|
||||
version = "0.8.12"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
homepage = "https://leptos.dev/"
|
||||
description = "Leptos is a full-stack, isomorphic Rust web framework leveraging fine-grained reactivity to build declarative user interfaces."
|
||||
readme = "../README.md"
|
||||
rust-version.workspace = true
|
||||
@@ -57,7 +58,10 @@ serde_qs = { workspace = true, default-features = true }
|
||||
slotmap = { workspace = true, default-features = true }
|
||||
futures = { workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
wasm_split_helpers.workspace = true
|
||||
wasm_split_helpers = { workspace = true, default-features = true }
|
||||
subsecond = { workspace = true, default-features = true, optional = true }
|
||||
dioxus-cli-config = { workspace = true, default-features = true, optional = true }
|
||||
dioxus-devtools = { workspace = true, default-features = true, optional = true }
|
||||
|
||||
[features]
|
||||
hydration = [
|
||||
@@ -85,6 +89,12 @@ ssr = [
|
||||
]
|
||||
nightly = ["leptos_macro/nightly", "reactive_graph/nightly", "tachys/nightly"]
|
||||
rkyv = ["server_fn/rkyv", "leptos_server/rkyv"]
|
||||
bitcode = ["server_fn/bitcode"]
|
||||
serde-lite = ["server_fn/serde-lite", "leptos_server/serde-lite"]
|
||||
cbor = ["server_fn/cbor"]
|
||||
msgpack = ["server_fn/msgpack"]
|
||||
postcard = ["server_fn/postcard"]
|
||||
multipart = ["server_fn/multipart"]
|
||||
tracing = [
|
||||
"dep:tracing",
|
||||
"reactive_graph/tracing",
|
||||
@@ -102,6 +112,16 @@ trace-component-props = [
|
||||
]
|
||||
delegation = ["tachys/delegation"]
|
||||
islands-router = ["tachys/mark_branches"]
|
||||
subsecond = [
|
||||
"reactive_graph/subsecond",
|
||||
"dep:subsecond",
|
||||
"dep:dioxus-cli-config",
|
||||
"dep:dioxus-devtools",
|
||||
"web-sys/Location",
|
||||
"web-sys/MessageEvent",
|
||||
"web-sys/WebSocket",
|
||||
"web-sys/Window",
|
||||
]
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { features = [
|
||||
@@ -134,13 +154,18 @@ denylist = [
|
||||
"trace-component-props",
|
||||
"spin",
|
||||
"islands",
|
||||
"bitcode",
|
||||
"serde-lite",
|
||||
"cbor",
|
||||
"msgpack",
|
||||
"postcard",
|
||||
"multipart",
|
||||
]
|
||||
skip_feature_sets = [
|
||||
["csr", "ssr"],
|
||||
["csr", "hydrate"],
|
||||
["ssr", "hydrate"],
|
||||
["serde", "serde-lite"],
|
||||
["serde-lite", "miniserde"],
|
||||
["serde", "miniserde"],
|
||||
["serde", "rkyv"],
|
||||
["miniserde", "rkyv"],
|
||||
|
||||
@@ -262,6 +262,16 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<C> From<View<C>> for ViewFn
|
||||
where
|
||||
C: Clone + Send + Sync + 'static,
|
||||
View<C>: IntoAny,
|
||||
{
|
||||
fn from(value: View<C>) -> Self {
|
||||
Self(Arc::new(move || value.clone().into_any()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ViewFn {
|
||||
/// Execute the wrapped function
|
||||
pub fn run(&self) -> AnyView {
|
||||
@@ -289,6 +299,16 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<C> From<View<C>> for ViewFnOnce
|
||||
where
|
||||
C: Send + Sync + 'static,
|
||||
View<C>: IntoAny,
|
||||
{
|
||||
fn from(value: View<C>) -> Self {
|
||||
Self(Box::new(move || value.into_any()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ViewFnOnce {
|
||||
/// Execute the wrapped function
|
||||
pub fn run(self) -> AnyView {
|
||||
|
||||
@@ -65,16 +65,56 @@ pub fn HydrationScripts(
|
||||
if let Some(splits) = SPLIT_MANIFEST.get_or_init(|| {
|
||||
let root = root.clone().unwrap_or_default();
|
||||
|
||||
let (wasm_split_js, wasm_split_manifest) = if options.hash_files {
|
||||
let hash_path = std::env::current_exe()
|
||||
.map(|path| {
|
||||
path.parent().map(|p| p.to_path_buf()).unwrap_or_default()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
.join(options.hash_file.as_ref());
|
||||
let hashes = std::fs::read_to_string(&hash_path)
|
||||
.expect("failed to read hash file");
|
||||
|
||||
let mut split =
|
||||
"__wasm_split.______________________.js".to_string();
|
||||
let mut manifest = "__wasm_split_manifest.json".to_string();
|
||||
for line in hashes.lines() {
|
||||
let line = line.trim();
|
||||
if !line.is_empty() {
|
||||
if let Some((file, hash)) = line.split_once(':') {
|
||||
if file == "manifest" {
|
||||
manifest.clear();
|
||||
manifest.push_str("__wasm_split_manifest.");
|
||||
manifest.push_str(hash.trim());
|
||||
manifest.push_str(".json");
|
||||
}
|
||||
if file == "split" {
|
||||
split.clear();
|
||||
split.push_str("__wasm_split.");
|
||||
split.push_str(hash.trim());
|
||||
split.push_str(".js");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
(split, manifest)
|
||||
} else {
|
||||
(
|
||||
"__wasm_split.______________________.js".to_string(),
|
||||
"__wasm_split_manifest.json".to_string(),
|
||||
)
|
||||
};
|
||||
|
||||
let site_dir = &options.site_root;
|
||||
let pkg_dir = &options.site_pkg_dir;
|
||||
let path = PathBuf::from(site_dir.to_string());
|
||||
let path = path
|
||||
.join(pkg_dir.to_string())
|
||||
.join("__wasm_split_manifest.json");
|
||||
let path = path.join(pkg_dir.to_string()).join(wasm_split_manifest);
|
||||
let file = std::fs::read_to_string(path).ok()?;
|
||||
|
||||
let manifest = WasmSplitManifest(ArcStoredValue::new((
|
||||
format!("{root}/{pkg_dir}"),
|
||||
serde_json::from_str(&file).expect("could not read manifest file"),
|
||||
wasm_split_js,
|
||||
)));
|
||||
|
||||
Some(manifest)
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
if (window.location.protocol === 'https:') {
|
||||
protocol = 'wss://';
|
||||
}
|
||||
|
||||
let host = window.location.hostname;
|
||||
let ws = new WebSocket(`${protocol}${host}:${reload_port}/live_reload`);
|
||||
ws.onmessage = (ev) => {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
#![deny(missing_docs)]
|
||||
#![forbid(unsafe_code)]
|
||||
|
||||
//! # About Leptos
|
||||
//!
|
||||
@@ -85,12 +84,22 @@
|
||||
//! # Feature Flags
|
||||
//!
|
||||
//! - **`nightly`**: On `nightly` Rust, enables the function-call syntax for signal getters and setters.
|
||||
//! Also enables some experimental optimizations that improve the handling of static strings and
|
||||
//! the performance of the `template! {}` macro.
|
||||
//! - **`csr`** Client-side rendering: Generate DOM nodes in the browser.
|
||||
//! - **`ssr`** Server-side rendering: Generate an HTML string (typically on the server).
|
||||
//! - **`islands`** Activates “islands mode,” in which components are not made interactive on the
|
||||
//! client unless they use the `#[island]` macro.
|
||||
//! - **`hydrate`** Hydration: use this to add interactivity to an SSRed Leptos app.
|
||||
//! - **`rkyv`** In SSR/hydrate mode, uses [`rkyv`](https://docs.rs/rkyv/latest/rkyv/) to serialize resources and send them
|
||||
//! from the server to the client.
|
||||
//! - **`nonce`** Adds support for nonces to be added as part of a Content Security Policy.
|
||||
//! - **`rkyv`** In SSR/hydrate mode, enables using [`rkyv`](https://docs.rs/rkyv/latest/rkyv/) to serialize resources.
|
||||
//! - **`tracing`** Adds support for [`tracing`](https://docs.rs/tracing/latest/tracing/).
|
||||
//! - **`trace-component-props`** Adds `tracing` support for component props.
|
||||
//! - **`delegation`** Uses event delegation rather than the browser’s native event handling
|
||||
//! system. (This improves the performance of creating large numbers of elements simultaneously,
|
||||
//! in exchange for occasional edge cases in which events behave differently from native browser
|
||||
//! events.)
|
||||
//! - **`rustls`** Use `rustls` for server functions.
|
||||
//!
|
||||
//! **Important Note:** You must enable one of `csr`, `hydrate`, or `ssr` to tell Leptos
|
||||
//! which mode your app is operating in. You should only enable one of these per build target,
|
||||
@@ -296,9 +305,15 @@ pub use tachys::mathml as math;
|
||||
#[doc(inline)]
|
||||
pub use tachys::svg;
|
||||
|
||||
#[cfg(feature = "subsecond")]
|
||||
/// Utilities for using binary hot-patching with [`subsecond`].
|
||||
pub mod subsecond;
|
||||
|
||||
/// Utilities for simple isomorphic logging to the console or terminal.
|
||||
pub mod logging {
|
||||
pub use leptos_dom::{debug_warn, error, log, warn};
|
||||
pub use leptos_dom::{
|
||||
debug_error, debug_log, debug_warn, error, log, warn,
|
||||
};
|
||||
}
|
||||
|
||||
/// Utilities for working with asynchronous tasks.
|
||||
@@ -350,7 +365,8 @@ pub use serde_json;
|
||||
pub use tracing;
|
||||
#[doc(hidden)]
|
||||
pub use wasm_bindgen;
|
||||
pub use wasm_split_helpers;
|
||||
#[doc(hidden)]
|
||||
pub use wasm_split_helpers as wasm_split;
|
||||
#[doc(hidden)]
|
||||
pub use web_sys;
|
||||
|
||||
@@ -382,7 +398,8 @@ pub fn prefetch_lazy_fn_on_server(id: &'static str) {
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct WasmSplitManifest(
|
||||
pub reactive_graph::owner::ArcStoredValue<(
|
||||
String,
|
||||
std::collections::HashMap<String, Vec<String>>,
|
||||
String, // the pkg root
|
||||
std::collections::HashMap<String, Vec<String>>, // preloads
|
||||
String, // the name of the __wasm_split.js file
|
||||
)>,
|
||||
);
|
||||
|
||||
62
leptos/src/subsecond.rs
Normal file
62
leptos/src/subsecond.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use dioxus_devtools::DevserverMsg;
|
||||
use wasm_bindgen::{prelude::Closure, JsCast};
|
||||
use web_sys::{js_sys::JsString, MessageEvent, WebSocket};
|
||||
|
||||
/// Sets up a websocket connect to the `dx` CLI, waiting for incoming hot-patching messages
|
||||
/// and patching the WASM binary appropriately.
|
||||
//
|
||||
// Note: This is a stripped-down version of Dioxus's `make_ws` from `dioxus_web`
|
||||
// It's essentially copy-pasted here because it's not pub there.
|
||||
// Would love to just take a dependency on that to be able to use it and deduplicate.
|
||||
//
|
||||
// https://github.com/DioxusLabs/dioxus/blob/main/packages/web/src/devtools.rs#L36
|
||||
pub fn connect_to_hot_patch_messages() {
|
||||
// Get the location of the devserver, using the current location plus the /_dioxus path
|
||||
// The idea here being that the devserver is always located on the /_dioxus behind a proxy
|
||||
let location = web_sys::window().unwrap().location();
|
||||
let url = format!(
|
||||
"{protocol}//{host}/_dioxus?build_id={build_id}",
|
||||
protocol = match location.protocol().unwrap() {
|
||||
prot if prot == "https:" => "wss:",
|
||||
_ => "ws:",
|
||||
},
|
||||
host = location.host().unwrap(),
|
||||
build_id = dioxus_cli_config::build_id(),
|
||||
);
|
||||
|
||||
let ws = WebSocket::new(&url).unwrap();
|
||||
|
||||
ws.set_onmessage(Some(
|
||||
Closure::<dyn FnMut(MessageEvent)>::new(move |e: MessageEvent| {
|
||||
let Ok(text) = e.data().dyn_into::<JsString>() else {
|
||||
return;
|
||||
};
|
||||
|
||||
// The devserver messages have some &'static strs in them, so we need to leak the source string
|
||||
let string: String = text.into();
|
||||
let string = Box::leak(string.into_boxed_str());
|
||||
|
||||
if let Ok(DevserverMsg::HotReload(msg)) =
|
||||
serde_json::from_str::<DevserverMsg>(string)
|
||||
{
|
||||
if let Some(jump_table) = msg.jump_table.as_ref().cloned() {
|
||||
if msg.for_build_id == Some(dioxus_cli_config::build_id()) {
|
||||
let our_pid = if cfg!(target_family = "wasm") {
|
||||
None
|
||||
} else {
|
||||
Some(std::process::id())
|
||||
};
|
||||
|
||||
if msg.for_pid == our_pid {
|
||||
unsafe { subsecond::apply_patch(jump_table) }
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_js_value()
|
||||
.as_ref()
|
||||
.unchecked_ref(),
|
||||
));
|
||||
}
|
||||
@@ -32,12 +32,12 @@ use tachys::{
|
||||
};
|
||||
use throw_error::ErrorHookFuture;
|
||||
|
||||
/// If any [`Resource`](leptos_reactive::Resource) is read in the `children` of this
|
||||
/// If any [`Resource`](crate::prelude::Resource) is read in the `children` of this
|
||||
/// component, it will show the `fallback` while they are loading. Once all are resolved,
|
||||
/// it will render the `children`.
|
||||
///
|
||||
/// Each time one of the resources is loading again, it will fall back. To keep the current
|
||||
/// children instead, use [Transition](crate::Transition).
|
||||
/// children instead, use [Transition](crate::prelude::Transition).
|
||||
///
|
||||
/// Note that the `children` will be rendered initially (in order to capture the fact that
|
||||
/// those resources are read under the suspense), so you cannot assume that resources read
|
||||
|
||||
@@ -16,11 +16,11 @@ use reactive_graph::{
|
||||
use slotmap::{DefaultKey, SlotMap};
|
||||
use tachys::reactive_graph::OwnedView;
|
||||
|
||||
/// If any [`Resource`](leptos_reactive::Resource) is read in the `children` of this
|
||||
/// If any [`Resource`](crate::prelude::Resource) is read in the `children` of this
|
||||
/// component, it will show the `fallback` while they are loading. Once all are resolved,
|
||||
/// it will render the `children`.
|
||||
///
|
||||
/// Unlike [`Suspense`](crate::Suspense), this will not fall
|
||||
/// Unlike [`Suspense`](crate::prelude::Suspense), this will not fall
|
||||
/// back to the `fallback` state if there are further changes after the initial load.
|
||||
///
|
||||
/// Note that the `children` will be rendered initially (in order to capture the fact that
|
||||
|
||||
@@ -221,18 +221,15 @@ fn env_w_default(
|
||||
/// An enum that can be used to define the environment Leptos is running in.
|
||||
/// Setting this to the `PROD` variant will not include the WebSocket code for `cargo-leptos` watch mode.
|
||||
/// Defaults to `DEV`.
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq)]
|
||||
#[derive(
|
||||
Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq, Default,
|
||||
)]
|
||||
pub enum Env {
|
||||
PROD,
|
||||
#[default]
|
||||
DEV,
|
||||
}
|
||||
|
||||
impl Default for Env {
|
||||
fn default() -> Self {
|
||||
Self::DEV
|
||||
}
|
||||
}
|
||||
|
||||
fn env_from_str(input: &str) -> Result<Env, LeptosConfigError> {
|
||||
let sanitized = input.to_lowercase();
|
||||
match sanitized.as_ref() {
|
||||
@@ -279,18 +276,15 @@ impl TryFrom<String> for Env {
|
||||
|
||||
/// An enum that can be used to define the websocket protocol Leptos uses for hotreloading
|
||||
/// Defaults to `ws`.
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq)]
|
||||
#[derive(
|
||||
Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq, Eq, Default,
|
||||
)]
|
||||
pub enum ReloadWSProtocol {
|
||||
#[default]
|
||||
WS,
|
||||
WSS,
|
||||
}
|
||||
|
||||
impl Default for ReloadWSProtocol {
|
||||
fn default() -> Self {
|
||||
Self::WS
|
||||
}
|
||||
}
|
||||
|
||||
fn ws_from_str(input: &str) -> Result<ReloadWSProtocol, LeptosConfigError> {
|
||||
let sanitized = input.to_lowercase();
|
||||
match sanitized.as_ref() {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_dom"
|
||||
version = "0.8.6"
|
||||
version = "0.8.7"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
|
||||
@@ -463,7 +463,7 @@ pub fn set_interval_with_handle(
|
||||
|
||||
#[inline(never)]
|
||||
fn si(
|
||||
cb: Box<dyn Fn()>,
|
||||
cb: Box<dyn FnMut()>,
|
||||
duration: Duration,
|
||||
) -> Result<IntervalHandle, JsValue> {
|
||||
let cb = Closure::wrap(cb).into_js_value();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_macro"
|
||||
version = "0.8.8"
|
||||
version = "0.8.11"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
|
||||
@@ -2,12 +2,12 @@ use convert_case::{Case, Casing};
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2::Ident;
|
||||
use proc_macro_error2::abort;
|
||||
use quote::quote;
|
||||
use quote::{format_ident, quote};
|
||||
use std::{
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
mem,
|
||||
};
|
||||
use syn::{parse_macro_input, ItemFn};
|
||||
use syn::{parse_macro_input, parse_quote, ItemFn, ReturnType, Stmt};
|
||||
|
||||
pub fn lazy_impl(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
let name = if !args.is_empty() {
|
||||
@@ -16,7 +16,7 @@ pub fn lazy_impl(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
None
|
||||
};
|
||||
|
||||
let mut fun = syn::parse::<ItemFn>(s).unwrap_or_else(|e| {
|
||||
let fun = syn::parse::<ItemFn>(s).unwrap_or_else(|e| {
|
||||
abort!(e.span(), "`lazy` can only be used on a function")
|
||||
});
|
||||
|
||||
@@ -47,29 +47,50 @@ pub fn lazy_impl(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
|
||||
let is_wasm = cfg!(feature = "csr") || cfg!(feature = "hydrate");
|
||||
if is_wasm {
|
||||
let mut fun = fun;
|
||||
let mut return_wrapper = None;
|
||||
if was_async {
|
||||
fun.sig.asyncness = None;
|
||||
let ty = match &fun.sig.output {
|
||||
ReturnType::Default => quote! { () },
|
||||
ReturnType::Type(_, ty) => quote! { #ty },
|
||||
};
|
||||
let sync_output: ReturnType = parse_quote! {
|
||||
-> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = #ty> + ::std::marker::Send + ::std::marker::Sync>>
|
||||
};
|
||||
let async_output = mem::replace(&mut fun.sig.output, sync_output);
|
||||
let stmts = mem::take(&mut fun.block.stmts);
|
||||
fun.block.stmts.push(Stmt::Expr(parse_quote! {
|
||||
::std::boxed::Box::pin(::leptos::__reexports::send_wrapper::SendWrapper::new(async move {
|
||||
#( #stmts )*
|
||||
}))
|
||||
}, None));
|
||||
return_wrapper = Some(quote! {
|
||||
return_wrapper(let future = _; { future.await } #async_output),
|
||||
});
|
||||
}
|
||||
let preload_name = format_ident!("__preload_{}", fun.sig.ident);
|
||||
|
||||
quote! {
|
||||
#[::leptos::wasm_split_helpers::wasm_split(
|
||||
#[::leptos::wasm_split::wasm_split(
|
||||
#unique_name,
|
||||
::leptos::__reexports::send_wrapper
|
||||
wasm_split_path = ::leptos::wasm_split,
|
||||
preload(#[doc(hidden)] #[allow(non_snake_case)] #preload_name),
|
||||
#return_wrapper
|
||||
)]
|
||||
#fun
|
||||
}
|
||||
} else {
|
||||
let mut fun = fun;
|
||||
if !was_async {
|
||||
fun.sig.asyncness = Some(Default::default());
|
||||
}
|
||||
|
||||
let statements = &mut fun.block.stmts;
|
||||
let old_statements = mem::take(statements);
|
||||
statements.push(
|
||||
syn::parse(
|
||||
quote! {
|
||||
::leptos::prefetch_lazy_fn_on_server(#unique_name_str);
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
statements.push(parse_quote! {
|
||||
::leptos::prefetch_lazy_fn_on_server(#unique_name_str);
|
||||
});
|
||||
statements.extend(old_statements);
|
||||
quote! { #fun }
|
||||
}
|
||||
|
||||
@@ -25,9 +25,8 @@ use std::{
|
||||
use syn::{
|
||||
punctuated::Pair::{End, Punctuated},
|
||||
spanned::Spanned,
|
||||
Expr,
|
||||
Expr::Tuple,
|
||||
ExprArray, ExprLit, ExprRange, Lit, LitStr, RangeLimits, Stmt,
|
||||
Expr::{self, Tuple},
|
||||
ExprArray, ExprLit, ExprPath, ExprRange, Lit, LitStr, RangeLimits, Stmt,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
@@ -1679,7 +1678,7 @@ fn attribute_value(
|
||||
}
|
||||
|
||||
// Keep list alphabetized for binary search
|
||||
const TYPED_EVENTS: [&str; 126] = [
|
||||
const TYPED_EVENTS: [&str; 127] = [
|
||||
"DOMContentLoaded",
|
||||
"abort",
|
||||
"afterprint",
|
||||
@@ -1775,6 +1774,7 @@ const TYPED_EVENTS: [&str; 126] = [
|
||||
"reset",
|
||||
"resize",
|
||||
"scroll",
|
||||
"scrollend",
|
||||
"securitypolicyviolation",
|
||||
"seeked",
|
||||
"seeking",
|
||||
@@ -1871,6 +1871,28 @@ pub(crate) fn ident_from_tag_name(tag_name: &NodeName) -> Ident {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn full_path_from_tag_name(tag_name: &NodeName) -> Option<ExprPath> {
|
||||
match tag_name {
|
||||
NodeName::Path(path) => Some(path.clone()),
|
||||
NodeName::Block(_) => {
|
||||
let span = tag_name.span();
|
||||
proc_macro_error2::emit_error!(
|
||||
span,
|
||||
"blocks not allowed in tag-name position"
|
||||
);
|
||||
None
|
||||
}
|
||||
_ => {
|
||||
let span = tag_name.span();
|
||||
proc_macro_error2::emit_error!(
|
||||
span,
|
||||
"punctuated names not allowed in slots"
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn directive_call_from_attribute_node(
|
||||
attr: &KeyedAttribute,
|
||||
directive_name: &str,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use super::{
|
||||
component_builder::maybe_optimised_component_children,
|
||||
convert_to_snake_case, ident_from_tag_name,
|
||||
convert_to_snake_case, full_path_from_tag_name,
|
||||
};
|
||||
use crate::view::{fragment_to_tokens, utils::filter_prefixed_attrs, TagType};
|
||||
use proc_macro2::{Ident, TokenStream, TokenTree};
|
||||
@@ -24,7 +24,7 @@ pub(crate) fn slot_to_tokens(
|
||||
node.name().to_string()
|
||||
});
|
||||
|
||||
let component_name = ident_from_tag_name(node.name());
|
||||
let component_path = full_path_from_tag_name(node.name());
|
||||
|
||||
let Some(parent_slots) = parent_slots else {
|
||||
proc_macro_error2::emit_error!(
|
||||
@@ -190,7 +190,7 @@ pub(crate) fn slot_to_tokens(
|
||||
|
||||
let slot = quote_spanned! {node.span()=>
|
||||
{
|
||||
let slot = #component_name::builder()
|
||||
let slot = #component_path::builder()
|
||||
#(#props)*
|
||||
#(#slots)*
|
||||
#children
|
||||
|
||||
@@ -15,19 +15,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@playwright/test": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.1.tgz",
|
||||
"integrity": "sha512-1hZ4TNvD5z9VuhNJ/walIjvMVvYkZKf71axoF/uiAqpntQJXpG64dlXhoDXE3OczPuTuvjf/M5KWFg5VAVUS3Q==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz",
|
||||
"integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"playwright": "1.44.1"
|
||||
"playwright": "1.56.1"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
@@ -46,7 +45,6 @@
|
||||
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
@@ -56,35 +54,33 @@
|
||||
}
|
||||
},
|
||||
"node_modules/playwright": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.1.tgz",
|
||||
"integrity": "sha512-qr/0UJ5CFAtloI3avF95Y0L1xQo6r3LQArLIg/z/PoGJ6xa+EwzrwO5lpNr/09STxdHuUoP2mvuELJS+hLdtgg==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz",
|
||||
"integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"playwright-core": "1.44.1"
|
||||
"playwright-core": "1.56.1"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=18"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/playwright-core": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.1.tgz",
|
||||
"integrity": "sha512-wh0JWtYTrhv1+OSsLPgFzGzt67Y7BE/ZS3jEqgGBlp2ppp1ZDj8c+9IARNW4dwf1poq5MgHreEM2KV/GuR4cFA==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz",
|
||||
"integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"playwright-core": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
@@ -111,12 +107,12 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@playwright/test": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.1.tgz",
|
||||
"integrity": "sha512-1hZ4TNvD5z9VuhNJ/walIjvMVvYkZKf71axoF/uiAqpntQJXpG64dlXhoDXE3OczPuTuvjf/M5KWFg5VAVUS3Q==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz",
|
||||
"integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"playwright": "1.44.1"
|
||||
"playwright": "1.56.1"
|
||||
}
|
||||
},
|
||||
"@types/node": {
|
||||
@@ -136,19 +132,19 @@
|
||||
"optional": true
|
||||
},
|
||||
"playwright": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.1.tgz",
|
||||
"integrity": "sha512-qr/0UJ5CFAtloI3avF95Y0L1xQo6r3LQArLIg/z/PoGJ6xa+EwzrwO5lpNr/09STxdHuUoP2mvuELJS+hLdtgg==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz",
|
||||
"integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"fsevents": "2.3.2",
|
||||
"playwright-core": "1.44.1"
|
||||
"playwright-core": "1.56.1"
|
||||
}
|
||||
},
|
||||
"playwright-core": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.1.tgz",
|
||||
"integrity": "sha512-wh0JWtYTrhv1+OSsLPgFzGzt67Y7BE/ZS3jEqgGBlp2ppp1ZDj8c+9IARNW4dwf1poq5MgHreEM2KV/GuR4cFA==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz",
|
||||
"integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==",
|
||||
"dev": true
|
||||
},
|
||||
"typescript": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_graph"
|
||||
version = "0.2.6"
|
||||
version = "0.2.9"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -27,6 +27,7 @@ async-lock = { workspace = true, default-features = true }
|
||||
send_wrapper = { features = [
|
||||
"futures",
|
||||
], workspace = true, default-features = true }
|
||||
subsecond = { workspace = true, default-features = true, optional = true }
|
||||
indexmap = { workspace = true, default-features = true }
|
||||
|
||||
[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dependencies]
|
||||
@@ -51,6 +52,7 @@ hydration = ["dep:hydration_context"]
|
||||
effects = [
|
||||
] # whether to run effects: should be disabled for something like server rendering
|
||||
sandboxed-arenas = []
|
||||
subsecond = ["dep:subsecond"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use crate::{
|
||||
computed::{ArcMemo, Memo},
|
||||
computed::{ArcMemo, Memo, ScopedFuture},
|
||||
diagnostics::is_suppressing_resource_load,
|
||||
owner::{ArcStoredValue, ArenaItem},
|
||||
graph::untrack,
|
||||
owner::{ArcStoredValue, ArenaItem, Owner},
|
||||
send_wrapper_ext::SendOption,
|
||||
signal::{ArcMappedSignal, ArcRwSignal, MappedSignal, RwSignal},
|
||||
traits::{DefinedAt, Dispose, Get, GetUntracked, GetValue, Update, Write},
|
||||
@@ -199,13 +200,18 @@ where
|
||||
I: Send + Sync,
|
||||
O: Send + Sync,
|
||||
{
|
||||
let owner = Owner::current().unwrap_or_default();
|
||||
ArcAction {
|
||||
in_flight: ArcRwSignal::new(0),
|
||||
input: ArcRwSignal::new(SendOption::new(None)),
|
||||
value: ArcRwSignal::new(SendOption::new(value)),
|
||||
version: Default::default(),
|
||||
dispatched: Default::default(),
|
||||
action_fn: Arc::new(move |input| Box::pin(action_fn(input))),
|
||||
action_fn: Arc::new(move |input| {
|
||||
Box::pin(owner.with(|| {
|
||||
ScopedFuture::new_untracked(untrack(|| action_fn(input)))
|
||||
}))
|
||||
}),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
}
|
||||
@@ -370,6 +376,7 @@ where
|
||||
F: Fn(&I) -> Fu + 'static,
|
||||
Fu: Future<Output = O> + 'static,
|
||||
{
|
||||
let owner = Owner::current().unwrap_or_default();
|
||||
let action_fn = SendWrapper::new(action_fn);
|
||||
ArcAction {
|
||||
in_flight: ArcRwSignal::new(0),
|
||||
@@ -378,7 +385,9 @@ where
|
||||
version: Default::default(),
|
||||
dispatched: Default::default(),
|
||||
action_fn: Arc::new(move |input| {
|
||||
Box::pin(SendWrapper::new(action_fn(input)))
|
||||
Box::pin(SendWrapper::new(owner.with(|| {
|
||||
ScopedFuture::new_untracked(untrack(|| action_fn(input)))
|
||||
})))
|
||||
}),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
|
||||
@@ -521,9 +521,10 @@ impl<T: 'static> ArcAsyncDerived<T> {
|
||||
{
|
||||
let fun = move || {
|
||||
let fut = fun();
|
||||
let fut = ScopedFuture::new_untracked(async move {
|
||||
SendOption::new(Some(fut.await))
|
||||
});
|
||||
let fut =
|
||||
ScopedFuture::new_untracked_with_diagnostics(async move {
|
||||
SendOption::new(Some(fut.await))
|
||||
});
|
||||
#[cfg(feature = "sandboxed-arenas")]
|
||||
let fut = Sandboxed::new(fut);
|
||||
fut
|
||||
|
||||
@@ -54,11 +54,55 @@ impl<Fut> ScopedFuture<Fut> {
|
||||
fut,
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[track_caller]
|
||||
pub fn new_untracked_with_diagnostics(
|
||||
fut: Fut,
|
||||
) -> ScopedFutureUntrackedWithDiagnostics<Fut> {
|
||||
let owner = Owner::current().unwrap_or_default();
|
||||
ScopedFutureUntrackedWithDiagnostics {
|
||||
owner,
|
||||
observer: None,
|
||||
fut,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Fut: Future> Future for ScopedFuture<Fut> {
|
||||
type Output = Fut::Output;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.project();
|
||||
this.owner.with(|| {
|
||||
#[cfg(debug_assertions)]
|
||||
let _maybe_guard = if this.observer.is_none() {
|
||||
Some(crate::diagnostics::SpecialNonReactiveZone::enter())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
this.observer.with_observer(|| this.fut.poll(cx))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pin_project! {
|
||||
/// A [`Future`] wrapper that sets the [`Owner`] and [`Observer`] before polling the inner
|
||||
/// `Future`, output of [`ScopedFuture::new_untracked_with_diagnostics`].
|
||||
///
|
||||
/// In leptos 0.9 this will be replaced with `ScopedFuture` itself.
|
||||
#[derive(Clone)]
|
||||
pub struct ScopedFutureUntrackedWithDiagnostics<Fut> {
|
||||
owner: Owner,
|
||||
observer: Option<AnySubscriber>,
|
||||
#[pin]
|
||||
fut: Fut,
|
||||
}
|
||||
}
|
||||
|
||||
impl<Fut: Future> Future for ScopedFutureUntrackedWithDiagnostics<Fut> {
|
||||
type Output = Fut::Output;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.project();
|
||||
this.owner
|
||||
|
||||
@@ -65,6 +65,7 @@ impl Dispose for ImmediateEffect {
|
||||
|
||||
impl ImmediateEffect {
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this requires a `Fn` function because it might recurse.
|
||||
/// Use [Self::new_mut] to pass a `FnMut` function, it'll panic on recursion.
|
||||
@@ -82,6 +83,7 @@ impl ImmediateEffect {
|
||||
Self { inner: Some(inner) }
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics on recursion or if triggered in parallel. Also see [Self::new]
|
||||
@@ -93,8 +95,10 @@ impl ImmediateEffect {
|
||||
Self::new(move || fun.try_lock().expect(MSG)())
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this requires a `Fn` function because it might recurse.
|
||||
/// Use [Self::new_mut_scoped] to pass a `FnMut` function, it'll panic on recursion.
|
||||
/// NOTE: this effect is automatically cleaned up when the current owner is cleared or disposed.
|
||||
#[track_caller]
|
||||
pub fn new_scoped(fun: impl Fn() + Send + Sync + 'static) {
|
||||
@@ -102,6 +106,19 @@ impl ImmediateEffect {
|
||||
|
||||
on_cleanup(move || effect.dispose());
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this effect is automatically cleaned up when the current owner is cleared or disposed.
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics on recursion or if triggered in parallel. Also see [Self::new_scoped]
|
||||
#[track_caller]
|
||||
pub fn new_mut_scoped(fun: impl FnMut() + Send + Sync + 'static) {
|
||||
let effect = Self::new_mut(fun);
|
||||
|
||||
on_cleanup(move || effect.dispose());
|
||||
}
|
||||
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
///
|
||||
@@ -130,6 +147,41 @@ impl DefinedAt for ImmediateEffect {
|
||||
}
|
||||
}
|
||||
|
||||
/// Defers any [ImmediateEffect]s from running until the end of the function.
|
||||
///
|
||||
/// NOTE: this affects only [ImmediateEffect]s, not other effects.
|
||||
///
|
||||
/// NOTE: this is rarely needed, but it is useful for example when multiple signals
|
||||
/// need to be updated atomically (for example a double-bound signal tree).
|
||||
pub fn batch<T>(f: impl FnOnce() -> T) -> T {
|
||||
struct ExecuteOnDrop;
|
||||
impl Drop for ExecuteOnDrop {
|
||||
fn drop(&mut self) {
|
||||
let effects = {
|
||||
let mut batch = inner::BATCH.write().or_poisoned();
|
||||
batch.take().unwrap().into_inner().expect("lock poisoned")
|
||||
};
|
||||
// TODO: Should we skip the effects if it's panicking?
|
||||
for effect in effects {
|
||||
effect.update_if_necessary();
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut execute_on_drop = None;
|
||||
{
|
||||
let mut batch = inner::BATCH.write().or_poisoned();
|
||||
if batch.is_none() {
|
||||
execute_on_drop = Some(ExecuteOnDrop);
|
||||
} else {
|
||||
// Nested batching has no effect.
|
||||
}
|
||||
*batch = Some(batch.take().unwrap_or_default());
|
||||
}
|
||||
let ret = f();
|
||||
drop(execute_on_drop);
|
||||
ret
|
||||
}
|
||||
|
||||
mod inner {
|
||||
use crate::{
|
||||
graph::{
|
||||
@@ -140,6 +192,7 @@ mod inner {
|
||||
owner::Owner,
|
||||
traits::DefinedAt,
|
||||
};
|
||||
use indexmap::IndexSet;
|
||||
use or_poisoned::OrPoisoned;
|
||||
use std::{
|
||||
panic::Location,
|
||||
@@ -147,6 +200,11 @@ mod inner {
|
||||
thread::{self, ThreadId},
|
||||
};
|
||||
|
||||
/// Only the [super::batch] function ever writes to the outer RwLock.
|
||||
/// While the effects will write to the inner one.
|
||||
pub(super) static BATCH: RwLock<Option<RwLock<IndexSet<AnySubscriber>>>> =
|
||||
RwLock::new(None);
|
||||
|
||||
/// Handles subscription logic for effects.
|
||||
///
|
||||
/// To handle parallelism and recursion we assign ordered (1..) ids to each run.
|
||||
@@ -202,6 +260,8 @@ mod inner {
|
||||
fun: impl Fn() + Send + Sync + 'static,
|
||||
) -> Arc<RwLock<EffectInner>> {
|
||||
let owner = Owner::new();
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
let defined_at = Location::caller();
|
||||
|
||||
Arc::new_cyclic(|weak| {
|
||||
let any_subscriber = AnySubscriber(
|
||||
@@ -211,7 +271,7 @@ mod inner {
|
||||
|
||||
RwLock::new(EffectInner {
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
defined_at,
|
||||
owner,
|
||||
state: ReactiveNodeState::Dirty,
|
||||
run_count_start: 0,
|
||||
@@ -260,6 +320,17 @@ mod inner {
|
||||
ReactiveNodeState::Dirty => true,
|
||||
};
|
||||
|
||||
{
|
||||
if let Some(batch) = &*BATCH.read().or_poisoned() {
|
||||
let mut batch = batch.write().or_poisoned();
|
||||
let subscriber =
|
||||
self.read().or_poisoned().any_subscriber.clone();
|
||||
|
||||
batch.insert(subscriber);
|
||||
return needs_update;
|
||||
}
|
||||
}
|
||||
|
||||
if needs_update {
|
||||
let mut guard = self.write().or_poisoned();
|
||||
|
||||
|
||||
@@ -9,6 +9,8 @@ use crate::{
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use or_poisoned::OrPoisoned;
|
||||
#[cfg(feature = "subsecond")]
|
||||
use std::sync::Mutex;
|
||||
use std::{
|
||||
fmt::Debug,
|
||||
future::{Future, IntoFuture},
|
||||
@@ -49,13 +51,39 @@ impl<T> Debug for RenderEffect<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "subsecond")]
|
||||
type CurrentHotPtr = Box<dyn Fn() -> Option<subsecond::HotFnPtr> + Send + Sync>;
|
||||
|
||||
impl<T> RenderEffect<T>
|
||||
where
|
||||
T: 'static,
|
||||
{
|
||||
/// Creates a new render effect, which immediately runs `fun`.
|
||||
pub fn new(fun: impl FnMut(Option<T>) -> T + 'static) -> Self {
|
||||
Self::new_with_value_erased(Box::new(fun), None)
|
||||
#[cfg(feature = "subsecond")]
|
||||
let (hot_fn_ptr, fun) = {
|
||||
let fun = Arc::new(Mutex::new(subsecond::HotFn::current(fun)));
|
||||
(
|
||||
{
|
||||
let fun = Arc::downgrade(&fun);
|
||||
let wrapped = send_wrapper::SendWrapper::new(move || {
|
||||
fun.upgrade()
|
||||
.map(|n| n.lock().or_poisoned().ptr_address())
|
||||
});
|
||||
// it's not redundant, it's due to the SendWrapper deref
|
||||
#[allow(clippy::redundant_closure)]
|
||||
Box::new(move || wrapped())
|
||||
},
|
||||
move |prev| fun.lock().or_poisoned().call((prev,)),
|
||||
)
|
||||
};
|
||||
|
||||
Self::new_with_value_erased(
|
||||
Box::new(fun),
|
||||
None,
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr,
|
||||
)
|
||||
}
|
||||
|
||||
/// Creates a new render effect with an initial value.
|
||||
@@ -63,7 +91,30 @@ where
|
||||
fun: impl FnMut(Option<T>) -> T + 'static,
|
||||
initial_value: Option<T>,
|
||||
) -> Self {
|
||||
Self::new_with_value_erased(Box::new(fun), initial_value)
|
||||
#[cfg(feature = "subsecond")]
|
||||
let (hot_fn_ptr, fun) = {
|
||||
let fun = Arc::new(Mutex::new(subsecond::HotFn::current(fun)));
|
||||
(
|
||||
{
|
||||
let fun = Arc::downgrade(&fun);
|
||||
let wrapped = send_wrapper::SendWrapper::new(move || {
|
||||
fun.upgrade()
|
||||
.map(|n| n.lock().or_poisoned().ptr_address())
|
||||
});
|
||||
// it's not redundant, it's due to the SendWrapper deref
|
||||
#[allow(clippy::redundant_closure)]
|
||||
Box::new(move || wrapped())
|
||||
},
|
||||
move |prev| fun.lock().or_poisoned().call((prev,)),
|
||||
)
|
||||
};
|
||||
|
||||
Self::new_with_value_erased(
|
||||
Box::new(fun),
|
||||
initial_value,
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr,
|
||||
)
|
||||
}
|
||||
|
||||
/// Creates a new render effect, which immediately runs `fun`.
|
||||
@@ -71,6 +122,11 @@ where
|
||||
fun: impl FnMut(Option<T>) -> T + 'static,
|
||||
value: impl IntoFuture<Output = T> + 'static,
|
||||
) -> Self {
|
||||
#[cfg(feature = "subsecond")]
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
#[cfg(feature = "subsecond")]
|
||||
let fun = move |prev| fun.call((prev,));
|
||||
|
||||
Self::new_with_async_value_erased(
|
||||
Box::new(fun),
|
||||
Box::pin(value.into_future()),
|
||||
@@ -79,8 +135,13 @@ where
|
||||
}
|
||||
|
||||
fn new_with_value_erased(
|
||||
mut fun: Box<dyn FnMut(Option<T>) -> T + 'static>,
|
||||
#[allow(unused_mut)] mut fun: Box<dyn FnMut(Option<T>) -> T + 'static>,
|
||||
initial_value: Option<T>,
|
||||
// this argument can be used to invalidate individual effects in the future
|
||||
// in present experiments, I have found that it is not actually granular enough to make a difference
|
||||
#[allow(unused)]
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr: CurrentHotPtr,
|
||||
) -> Self {
|
||||
// codegen optimisation:
|
||||
fn prep() -> (Owner, Arc<RwLock<EffectInner>>, crate::channel::Receiver)
|
||||
@@ -104,12 +165,56 @@ where
|
||||
let _ = initial_value;
|
||||
let _ = owner;
|
||||
let _ = &mut rx;
|
||||
let _ = &mut fun;
|
||||
let _ = fun;
|
||||
}
|
||||
|
||||
#[cfg(feature = "effects")]
|
||||
{
|
||||
let subscriber = inner.to_any_subscriber();
|
||||
|
||||
#[cfg(all(feature = "subsecond", debug_assertions))]
|
||||
let mut fun = {
|
||||
use crate::graph::ReactiveNode;
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::sync::{Arc, LazyLock, Mutex};
|
||||
use subsecond::HotFnPtr;
|
||||
|
||||
static HOT_RELOAD_SUBSCRIBERS: LazyLock<
|
||||
Mutex<FxHashMap<AnySubscriber, (HotFnPtr, CurrentHotPtr)>>,
|
||||
> = LazyLock::new(|| {
|
||||
subsecond::register_handler(Arc::new(|| {
|
||||
HOT_RELOAD_SUBSCRIBERS.lock().or_poisoned().retain(
|
||||
|subscriber, (prev_ptr, hot_fn_ptr)| {
|
||||
match hot_fn_ptr() {
|
||||
None => false,
|
||||
Some(curr_hot_ptr) => {
|
||||
if curr_hot_ptr != *prev_ptr {
|
||||
crate::log_warning(format_args!(
|
||||
"{prev_ptr:?} <> \
|
||||
{curr_hot_ptr:?}",
|
||||
));
|
||||
*prev_ptr = curr_hot_ptr;
|
||||
|
||||
subscriber.mark_dirty();
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
}));
|
||||
Default::default()
|
||||
});
|
||||
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
let initial_ptr = hot_fn_ptr().unwrap();
|
||||
HOT_RELOAD_SUBSCRIBERS
|
||||
.lock()
|
||||
.or_poisoned()
|
||||
.insert(subscriber.clone(), (initial_ptr, hot_fn_ptr));
|
||||
move |prev| fun.call((prev,))
|
||||
};
|
||||
|
||||
*value.write().or_poisoned() = Some(
|
||||
owner.with(|| subscriber.with_observer(|| fun(initial_value))),
|
||||
);
|
||||
@@ -230,6 +335,11 @@ where
|
||||
pub fn new_isomorphic(
|
||||
fun: impl FnMut(Option<T>) -> T + Send + Sync + 'static,
|
||||
) -> Self {
|
||||
#[cfg(feature = "subsecond")]
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
#[cfg(feature = "subsecond")]
|
||||
let fun = move |prev| fun.call((prev,));
|
||||
|
||||
fn erased<T: Send + Sync + 'static>(
|
||||
mut fun: Box<dyn FnMut(Option<T>) -> T + Send + Sync + 'static>,
|
||||
) -> RenderEffect<T> {
|
||||
|
||||
@@ -209,6 +209,25 @@ impl Owner {
|
||||
this
|
||||
}
|
||||
|
||||
/// Returns the parent of this `Owner`, if any.
|
||||
///
|
||||
/// None when:
|
||||
/// - This is a root owner
|
||||
/// - The parent has been dropped
|
||||
pub fn parent(&self) -> Option<Owner> {
|
||||
self.inner
|
||||
.read()
|
||||
.or_poisoned()
|
||||
.parent
|
||||
.as_ref()
|
||||
.and_then(|p| p.upgrade())
|
||||
.map(|inner| Owner {
|
||||
inner,
|
||||
#[cfg(feature = "hydration")]
|
||||
shared_context: self.shared_context.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new `Owner` that is the child of the current `Owner`, if any.
|
||||
pub fn child(&self) -> Self {
|
||||
let parent = Some(Arc::downgrade(&self.inner));
|
||||
@@ -321,6 +340,8 @@ impl Owner {
|
||||
}
|
||||
|
||||
/// Removes this from its state as the thread-local owner and drops it.
|
||||
/// If there are other holders of this owner, it may not cleanup, if always cleaning up is required,
|
||||
/// see [`Owner::unset_with_forced_cleanup`].
|
||||
pub fn unset(self) {
|
||||
OWNER.with_borrow_mut(|owner| {
|
||||
if owner.as_ref().and_then(|n| n.upgrade()) == Some(self) {
|
||||
@@ -329,6 +350,23 @@ impl Owner {
|
||||
})
|
||||
}
|
||||
|
||||
/// Removes this from its state as the thread-local owner and drops it.
|
||||
/// Unlike [`Owner::unset`], this will always run cleanup on this owner,
|
||||
/// even if there are other holders of this owner.
|
||||
pub fn unset_with_forced_cleanup(self) {
|
||||
OWNER.with_borrow_mut(|owner| {
|
||||
if owner
|
||||
.as_ref()
|
||||
.and_then(|n| n.upgrade())
|
||||
.map(|o| o == self)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
mem::take(owner);
|
||||
}
|
||||
});
|
||||
self.cleanup();
|
||||
}
|
||||
|
||||
/// Returns the current [`SharedContext`], if any.
|
||||
#[cfg(feature = "hydration")]
|
||||
pub fn current_shared_context(
|
||||
|
||||
@@ -257,6 +257,20 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<ReadSignal<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<ArcReadSignal<T>> + Storage<T>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: ReadSignal<T, S>) -> Self {
|
||||
Self {
|
||||
inner: SignalTypes::ReadSignal(value.into()),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: std::panic::Location::caller(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Send + Sync> From<ArcRwSignal<T>> for ArcSignal<T, SyncStorage> {
|
||||
#[track_caller]
|
||||
fn from(value: ArcRwSignal<T>) -> Self {
|
||||
@@ -268,6 +282,20 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<RwSignal<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<ArcRwSignal<T>> + Storage<ArcReadSignal<T>> + Storage<T>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: RwSignal<T, S>) -> Self {
|
||||
Self {
|
||||
inner: SignalTypes::ReadSignal(value.read_only().into()),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: std::panic::Location::caller(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<ArcMemo<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
@@ -282,6 +310,20 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<Memo<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<ArcMemo<T, S>> + Storage<T>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: Memo<T, S>) -> Self {
|
||||
Self {
|
||||
inner: SignalTypes::Memo(value.into()),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: std::panic::Location::caller(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> DefinedAt for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
@@ -1500,7 +1542,6 @@ pub mod read {
|
||||
|
||||
impl<T, S> ReadUntracked for MaybeProp<T, S>
|
||||
where
|
||||
T: Clone,
|
||||
S: Storage<Option<T>> + Storage<SignalTypes<Option<T>, S>>,
|
||||
{
|
||||
type Value = ReadGuard<Option<T>, SignalReadGuard<Option<T>, S>>;
|
||||
|
||||
@@ -225,3 +225,38 @@ fn threaded_chaos_effect() {
|
||||
let values: Vec<_> = signals.iter().map(|s| s.get_untracked()).collect();
|
||||
println!("FINAL: {values:?}");
|
||||
}
|
||||
|
||||
#[cfg(feature = "effects")]
|
||||
#[test]
|
||||
fn test_batch() {
|
||||
use imports::*;
|
||||
use reactive_graph::{effect::batch, owner::StoredValue};
|
||||
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let a = RwSignal::new(0);
|
||||
let b = RwSignal::new(0);
|
||||
|
||||
let values = StoredValue::new(Vec::new());
|
||||
|
||||
ImmediateEffect::new_scoped(move || {
|
||||
println!("{} = {}", a.get(), b.get());
|
||||
values.write_value().push((a.get(), b.get()));
|
||||
});
|
||||
|
||||
a.set(1);
|
||||
b.set(1);
|
||||
|
||||
batch(move || {
|
||||
a.set(2);
|
||||
b.set(2);
|
||||
|
||||
batch(move || {
|
||||
a.set(3);
|
||||
b.set(3);
|
||||
});
|
||||
});
|
||||
|
||||
assert_eq!(values.get_value(), vec![(0, 0), (1, 0), (1, 1), (3, 3)]);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_stores"
|
||||
version = "0.2.5"
|
||||
version = "0.3.0"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -30,6 +30,8 @@ where
|
||||
defined_at: &'static Location<'static>,
|
||||
path: Arc<dyn Fn() -> StorePath + Send + Sync>,
|
||||
get_trigger: Arc<dyn Fn(StorePath) -> StoreFieldTrigger + Send + Sync>,
|
||||
get_trigger_unkeyed:
|
||||
Arc<dyn Fn(StorePath) -> StoreFieldTrigger + Send + Sync>,
|
||||
read: Arc<dyn Fn() -> Option<StoreFieldReader<T>> + Send + Sync>,
|
||||
pub(crate) write:
|
||||
Arc<dyn Fn() -> Option<StoreFieldWriter<T>> + Send + Sync>,
|
||||
@@ -103,6 +105,10 @@ impl<T> StoreField for ArcField<T> {
|
||||
(self.get_trigger)(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
(self.get_trigger_unkeyed)(path)
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
(self.path)()
|
||||
}
|
||||
@@ -132,6 +138,9 @@ where
|
||||
defined_at: Location::caller(),
|
||||
path: Arc::new(move || value.path().into_iter().collect()),
|
||||
get_trigger: Arc::new(move |path| value.get_trigger(path)),
|
||||
get_trigger_unkeyed: Arc::new(move |path| {
|
||||
value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new(move || value.reader().map(StoreFieldReader::new)),
|
||||
write: Arc::new(move || value.writer().map(StoreFieldWriter::new)),
|
||||
keys: Arc::new(move || value.keys()),
|
||||
@@ -158,6 +167,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -202,6 +215,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -245,6 +262,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -289,6 +310,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -337,6 +362,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -368,6 +397,7 @@ impl<T> Clone for ArcField<T> {
|
||||
defined_at: self.defined_at,
|
||||
path: self.path.clone(),
|
||||
get_trigger: Arc::clone(&self.get_trigger),
|
||||
get_trigger_unkeyed: Arc::clone(&self.get_trigger_unkeyed),
|
||||
read: Arc::clone(&self.read),
|
||||
write: Arc::clone(&self.write),
|
||||
keys: Arc::clone(&self.keys),
|
||||
|
||||
@@ -68,6 +68,11 @@ where
|
||||
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner.path()
|
||||
}
|
||||
|
||||
@@ -59,6 +59,13 @@ where
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|inner| inner.get_trigger_unkeyed(path))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
|
||||
@@ -84,6 +84,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
let index = self.index;
|
||||
@@ -109,6 +113,23 @@ where
|
||||
fn keys(&self) -> Option<KeyMap> {
|
||||
self.inner.keys()
|
||||
}
|
||||
|
||||
fn track_field(&self) {
|
||||
let mut full_path = self.path().into_iter().collect::<StorePath>();
|
||||
let trigger = self.get_trigger(self.path().into_iter().collect());
|
||||
trigger.this.track();
|
||||
trigger.children.track();
|
||||
|
||||
// tracks `this` for all ancestors: i.e., it will track any change that is made
|
||||
// directly to one of its ancestors, but not a change made to a *child* of an ancestor
|
||||
// (which would end up with every subfield tracking its own siblings, because they are
|
||||
// children of its parent)
|
||||
while !full_path.is_empty() {
|
||||
full_path.pop();
|
||||
let inner = self.get_trigger(full_path.clone());
|
||||
inner.this.track();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Inner, Prev> DefinedAt for AtIndex<Inner, Prev>
|
||||
|
||||
@@ -110,6 +110,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
Some(Mapped::new_with_guard(inner, self.read))
|
||||
@@ -432,7 +436,7 @@ where
|
||||
let this = keys
|
||||
.with_field_keys(
|
||||
inner.clone(),
|
||||
|keys| keys.get(&self.key),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -444,6 +448,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
|
||||
@@ -452,7 +460,7 @@ where
|
||||
let index = keys
|
||||
.with_field_keys(
|
||||
inner_path,
|
||||
|keys| keys.get(&self.key),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -476,7 +484,7 @@ where
|
||||
let index = keys
|
||||
.with_field_keys(
|
||||
inner_path.clone(),
|
||||
|keys| keys.get(&self.key),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -624,9 +632,7 @@ where
|
||||
let latest = self.latest_keys();
|
||||
keys.with_field_keys(
|
||||
inner_path,
|
||||
|keys| {
|
||||
keys.update(latest);
|
||||
},
|
||||
|keys| ((), keys.update(latest)),
|
||||
|| self.latest_keys(),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -364,13 +364,18 @@ where
|
||||
})
|
||||
}
|
||||
|
||||
fn update(&mut self, iter: impl IntoIterator<Item = K>) {
|
||||
fn update(
|
||||
&mut self,
|
||||
iter: impl IntoIterator<Item = K>,
|
||||
) -> Vec<(usize, StorePathSegment)> {
|
||||
let new_keys = iter
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(idx, key)| (key, idx))
|
||||
.collect::<FxHashMap<K, usize>>();
|
||||
|
||||
let mut index_keys = Vec::with_capacity(new_keys.len());
|
||||
|
||||
// remove old keys and recycle the slots
|
||||
self.keys.retain(|key, old_entry| match new_keys.get(key) {
|
||||
Some(idx) => {
|
||||
@@ -385,14 +390,17 @@ where
|
||||
|
||||
// add new keys
|
||||
for (key, idx) in new_keys {
|
||||
// the suggestion doesn't compile because we need &mut for self.next_key(),
|
||||
// and we don't want to call that until after the check
|
||||
#[allow(clippy::map_entry)]
|
||||
if !self.keys.contains_key(&key) {
|
||||
let path = self.next_key();
|
||||
self.keys.insert(key, (path, idx));
|
||||
match self.keys.get(&key) {
|
||||
Some((segment, idx)) => index_keys.push((*idx, *segment)),
|
||||
None => {
|
||||
let path = self.next_key();
|
||||
self.keys.insert(key, (path, idx));
|
||||
index_keys.push((idx, path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
index_keys
|
||||
}
|
||||
}
|
||||
|
||||
@@ -415,14 +423,20 @@ type HashMap<K, V> = send_wrapper::SendWrapper<
|
||||
|
||||
/// A map of the keys for a keyed subfield.
|
||||
#[derive(Clone)]
|
||||
pub struct KeyMap(HashMap<StorePath, Box<dyn Any + Send + Sync>>);
|
||||
pub struct KeyMap(
|
||||
HashMap<StorePath, Box<dyn Any + Send + Sync>>,
|
||||
HashMap<(StorePath, usize), StorePathSegment>,
|
||||
);
|
||||
|
||||
impl Default for KeyMap {
|
||||
fn default() -> Self {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
return Self(Default::default());
|
||||
return Self(Default::default(), Default::default());
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
return Self(send_wrapper::SendWrapper::new(Default::default()));
|
||||
return Self(
|
||||
send_wrapper::SendWrapper::new(Default::default()),
|
||||
send_wrapper::SendWrapper::new(Default::default()),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -430,31 +444,70 @@ impl KeyMap {
|
||||
fn with_field_keys<K, T>(
|
||||
&self,
|
||||
path: StorePath,
|
||||
fun: impl FnOnce(&mut FieldKeys<K>) -> T,
|
||||
fun: impl FnOnce(&mut FieldKeys<K>) -> (T, Vec<(usize, StorePathSegment)>),
|
||||
initialize: impl FnOnce() -> Vec<K>,
|
||||
) -> Option<T>
|
||||
where
|
||||
K: Debug + Hash + PartialEq + Eq + Send + Sync + 'static,
|
||||
{
|
||||
let initial_keys = initialize();
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let mut entry = self
|
||||
.0
|
||||
.entry(path)
|
||||
.or_insert_with(|| Box::new(FieldKeys::new(initialize())));
|
||||
.entry(path.clone())
|
||||
.or_insert_with(|| Box::new(FieldKeys::new(initial_keys)));
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let entry = if !self.0.borrow().contains_key(&path) {
|
||||
Some(Box::new(FieldKeys::new(initialize())))
|
||||
Some(Box::new(FieldKeys::new(initial_keys)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let mut map = self.0.borrow_mut();
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let entry = map.entry(path).or_insert_with(|| entry.unwrap());
|
||||
let entry = map.entry(path.clone()).or_insert_with(|| entry.unwrap());
|
||||
|
||||
let entry = entry.downcast_mut::<FieldKeys<K>>()?;
|
||||
Some(fun(entry))
|
||||
let (result, new_keys) = fun(entry);
|
||||
if !new_keys.is_empty() {
|
||||
for (idx, segment) in new_keys {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
self.1.insert((path.clone(), idx), segment);
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
self.1.borrow_mut().insert((path.clone(), idx), segment);
|
||||
}
|
||||
}
|
||||
Some(result)
|
||||
}
|
||||
|
||||
fn contains_key(&self, key: &StorePath) -> bool {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.0.contains_key(key)
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
self.0.borrow_mut().contains_key(key)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_key_for_index(
|
||||
&self,
|
||||
key: &(StorePath, usize),
|
||||
) -> Option<StorePathSegment> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.1.get(key).as_deref().copied()
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
self.1.borrow().get(key).as_deref().copied()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -832,6 +885,30 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Foo {
|
||||
id: i32,
|
||||
bar: Bar,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Bar {
|
||||
bar_signature: i32,
|
||||
baz: Baz,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baz {
|
||||
more_data: i32,
|
||||
baw: Baw,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baw {
|
||||
more_data: i32,
|
||||
end: i32,
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mutating_field_triggers_effect() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
@@ -1112,30 +1189,6 @@ mod tests {
|
||||
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Foo {
|
||||
id: i32,
|
||||
bar: Bar,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Bar {
|
||||
bar_signature: i32,
|
||||
baz: Baz,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baz {
|
||||
more_data: i32,
|
||||
baw: Baw,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baw {
|
||||
more_data: i32,
|
||||
end: i32,
|
||||
}
|
||||
|
||||
let store = Store::new(Foo {
|
||||
id: 42,
|
||||
bar: Bar {
|
||||
@@ -1219,4 +1272,107 @@ mod tests {
|
||||
assert_eq!(more_data_runs.get_value(), 3);
|
||||
assert_eq!(baz_baw_end_runs.get_value(), 3);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn changing_parent_notifies_subfield() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
let combined_count = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let store = Store::new(Foo {
|
||||
id: 42,
|
||||
bar: Bar {
|
||||
bar_signature: 69,
|
||||
baz: Baz {
|
||||
more_data: 9999,
|
||||
baw: Baw {
|
||||
more_data: 22,
|
||||
end: 1112,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let tracked_field = store.bar().baz().more_data();
|
||||
|
||||
Effect::new_sync({
|
||||
let combined_count = Arc::clone(&combined_count);
|
||||
move |prev: Option<()>| {
|
||||
if prev.is_none() {
|
||||
println!("first run");
|
||||
} else {
|
||||
println!("next run");
|
||||
}
|
||||
|
||||
// we only track `more`, but this should still be notified
|
||||
// when its parent fields `bar` or `baz` change
|
||||
println!("{:?}", *tracked_field.read());
|
||||
combined_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
tick().await;
|
||||
tick().await;
|
||||
|
||||
store.bar().baz().set(Baz {
|
||||
more_data: 42,
|
||||
baw: Baw {
|
||||
more_data: 11,
|
||||
end: 31,
|
||||
},
|
||||
});
|
||||
tick().await;
|
||||
store.bar().set(Bar {
|
||||
bar_signature: 23,
|
||||
baz: Baz {
|
||||
more_data: 32,
|
||||
baw: Baw {
|
||||
more_data: 432,
|
||||
end: 423,
|
||||
},
|
||||
},
|
||||
});
|
||||
tick().await;
|
||||
|
||||
assert_eq!(combined_count.load(Ordering::Relaxed), 3);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn changing_parent_notifies_unkeyed_child() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
let combined_count = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let store = Store::new(data());
|
||||
|
||||
let tracked_field = store.todos().at_unkeyed(0);
|
||||
|
||||
Effect::new_sync({
|
||||
let combined_count = Arc::clone(&combined_count);
|
||||
move |prev: Option<()>| {
|
||||
if prev.is_none() {
|
||||
println!("first run");
|
||||
} else {
|
||||
println!("next run");
|
||||
}
|
||||
|
||||
// we only track `more`, but this should still be notified
|
||||
// when its parent fields `bar` or `baz` change
|
||||
println!("{:?}", *tracked_field.read());
|
||||
combined_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
tick().await;
|
||||
tick().await;
|
||||
|
||||
store.todos().write().pop();
|
||||
tick().await;
|
||||
|
||||
store.todos().write().push(Todo {
|
||||
label: "another one".into(),
|
||||
completed: false,
|
||||
});
|
||||
tick().await;
|
||||
|
||||
assert_eq!(combined_count.load(Ordering::Relaxed), 3);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ where
|
||||
// don't track the writer for the whole store
|
||||
writer.untrack();
|
||||
let mut notify = |path: &StorePath| {
|
||||
self.triggers_for_path(path.to_owned()).notify();
|
||||
self.triggers_for_path_unkeyed(path.to_owned()).notify();
|
||||
};
|
||||
writer.patch_field(new, &path, &mut notify);
|
||||
}
|
||||
|
||||
@@ -11,6 +11,15 @@ impl IntoIterator for StorePath {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a StorePath {
|
||||
type Item = &'a StorePathSegment;
|
||||
type IntoIter = std::slice::Iter<'a, StorePathSegment>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<StorePathSegment>> for StorePath {
|
||||
fn from(value: Vec<StorePathSegment>) -> Self {
|
||||
Self(value)
|
||||
@@ -18,6 +27,16 @@ impl From<Vec<StorePathSegment>> for StorePath {
|
||||
}
|
||||
|
||||
impl StorePath {
|
||||
/// Creates a new path.
|
||||
pub fn new() -> Self {
|
||||
Self(Vec::new())
|
||||
}
|
||||
|
||||
/// Creates a new path with storage capacity for `capacity` segments.
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
Self(Vec::with_capacity(capacity))
|
||||
}
|
||||
|
||||
/// Adds a new segment to the path.
|
||||
pub fn push(&mut self, segment: impl Into<StorePathSegment>) {
|
||||
self.0.push(segment.into());
|
||||
|
||||
@@ -26,6 +26,14 @@ pub trait StoreField: Sized {
|
||||
#[track_caller]
|
||||
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger;
|
||||
|
||||
/// Returns the trigger that tracks access and updates for this field.
|
||||
///
|
||||
/// This uses *unkeyed* paths: i.e., if any field in the path is keyed, it will
|
||||
/// try to look up the key for the item at the index given in the path, rather than
|
||||
/// the keyed item.
|
||||
#[track_caller]
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger;
|
||||
|
||||
/// The path of this field (see [`StorePath`]).
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment>;
|
||||
@@ -84,6 +92,26 @@ pub trait StoreField: Sized {
|
||||
|
||||
triggers
|
||||
}
|
||||
|
||||
/// Returns triggers for the field at the given path, and all parent fields
|
||||
fn triggers_for_path_unkeyed(&self, path: StorePath) -> Vec<ArcTrigger> {
|
||||
// see notes on triggers_for_path() for additional comments on implementation
|
||||
|
||||
let trigger = self.get_trigger_unkeyed(path.clone());
|
||||
let mut full_path = path;
|
||||
|
||||
let mut triggers = Vec::with_capacity(full_path.len() + 2);
|
||||
triggers.push(trigger.this.clone());
|
||||
triggers.push(trigger.children.clone());
|
||||
while !full_path.is_empty() {
|
||||
full_path.pop();
|
||||
let inner = self.get_trigger_unkeyed(full_path.clone());
|
||||
triggers.push(inner.children.clone());
|
||||
}
|
||||
triggers.reverse();
|
||||
|
||||
triggers
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> StoreField for ArcStore<T>
|
||||
@@ -101,6 +129,26 @@ where
|
||||
trigger
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
let orig_path = path.clone();
|
||||
|
||||
let mut path = StorePath::with_capacity(orig_path.len());
|
||||
for segment in &orig_path {
|
||||
let parent_is_keyed = self.keys.contains_key(&path);
|
||||
|
||||
if parent_is_keyed {
|
||||
let key = self
|
||||
.keys
|
||||
.get_key_for_index(&(path.clone(), segment.0))
|
||||
.expect("could not find key for index");
|
||||
path.push(key);
|
||||
} else {
|
||||
path.push(*segment);
|
||||
}
|
||||
}
|
||||
self.get_trigger(path)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
iter::empty()
|
||||
@@ -141,6 +189,14 @@ where
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|n| n.get_trigger_unkeyed(path))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner
|
||||
|
||||
@@ -88,6 +88,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
Some(Mapped::new_with_guard(inner, self.read))
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router"
|
||||
version = "0.8.6"
|
||||
version = "0.8.9"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -364,6 +364,12 @@ where
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MatchedRoute(pub String, pub AnyView);
|
||||
|
||||
impl MatchedRoute {
|
||||
fn branch_name(&self) -> String {
|
||||
format!("{:?}", self.1.as_type_id())
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for MatchedRoute {
|
||||
type State = <AnyView as Render>::State;
|
||||
|
||||
@@ -414,8 +420,9 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches: bool,
|
||||
extra_attrs: Vec<AnyAttribute>,
|
||||
) {
|
||||
if mark_branches && escape {
|
||||
buf.open_branch(&self.0);
|
||||
let branch_name = (mark_branches && escape).then(|| self.branch_name());
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.open_branch(bn);
|
||||
}
|
||||
self.1.to_html_with_buf(
|
||||
buf,
|
||||
@@ -424,8 +431,8 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if mark_branches && escape {
|
||||
buf.close_branch(&self.0);
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.close_branch(bn);
|
||||
if *position == Position::NextChildAfterText {
|
||||
*position = Position::NextChild;
|
||||
}
|
||||
@@ -442,8 +449,9 @@ impl RenderHtml for MatchedRoute {
|
||||
) where
|
||||
Self: Sized,
|
||||
{
|
||||
if mark_branches && escape {
|
||||
buf.open_branch(&self.0);
|
||||
let branch_name = (mark_branches && escape).then(|| self.branch_name());
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.open_branch(bn);
|
||||
}
|
||||
self.1.to_html_async_with_buf::<OUT_OF_ORDER>(
|
||||
buf,
|
||||
@@ -452,8 +460,8 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if mark_branches && escape {
|
||||
buf.close_branch(&self.0);
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.close_branch(bn);
|
||||
if *position == Position::NextChildAfterText {
|
||||
*position = Position::NextChild;
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use crate::{hooks::use_navigate, params::ParamsMap};
|
||||
use core::fmt;
|
||||
use futures::channel::oneshot;
|
||||
use js_sys::{try_iter, Array, JsString};
|
||||
use leptos::prelude::*;
|
||||
use leptos::{ev, prelude::*};
|
||||
use or_poisoned::OrPoisoned;
|
||||
use reactive_graph::{
|
||||
signal::ArcRwSignal,
|
||||
@@ -11,13 +11,12 @@ use reactive_graph::{
|
||||
};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
boxed::Box,
|
||||
string::String,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
use tachys::dom::{document, window};
|
||||
use wasm_bindgen::{closure::Closure, JsCast, JsValue};
|
||||
use web_sys::{Event, UrlSearchParams};
|
||||
use wasm_bindgen::{JsCast, JsValue};
|
||||
use web_sys::UrlSearchParams;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct BrowserUrl {
|
||||
@@ -116,7 +115,6 @@ impl LocationProvider for BrowserUrl {
|
||||
}
|
||||
|
||||
fn init(&self, base: Option<Cow<'static, str>>) {
|
||||
let window = window();
|
||||
let navigate = {
|
||||
let url = self.url.clone();
|
||||
let pending = Arc::clone(&self.pending_navigation);
|
||||
@@ -159,37 +157,32 @@ impl LocationProvider for BrowserUrl {
|
||||
|
||||
let handle_anchor_click =
|
||||
handle_anchor_click(base, Self::parse_with_base, navigate);
|
||||
let closure = Closure::wrap(Box::new(move |ev: Event| {
|
||||
|
||||
let click_handle = window_event_listener(ev::click, move |ev| {
|
||||
if let Err(e) = handle_anchor_click(ev) {
|
||||
#[cfg(feature = "tracing")]
|
||||
tracing::error!("{e:?}");
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
web_sys::console::error_1(&e);
|
||||
}
|
||||
}) as Box<dyn FnMut(Event)>)
|
||||
.into_js_value();
|
||||
window
|
||||
.add_event_listener_with_callback(
|
||||
"click",
|
||||
closure.as_ref().unchecked_ref(),
|
||||
)
|
||||
.expect(
|
||||
"couldn't add `click` listener to `window` to handle `<a>` \
|
||||
clicks",
|
||||
);
|
||||
});
|
||||
|
||||
// handle popstate event (forward/back navigation)
|
||||
let cb = {
|
||||
let popstate_cb = {
|
||||
let url = self.url.clone();
|
||||
let path_stack = self.path_stack.clone();
|
||||
let is_back = self.is_back.clone();
|
||||
move || match Self::current() {
|
||||
Ok(new_url) => {
|
||||
let stack = path_stack.read_value();
|
||||
let mut stack = path_stack.write_value();
|
||||
let is_navigating_back = stack.len() == 1
|
||||
|| (stack.len() >= 2
|
||||
&& stack.get(stack.len() - 2) == Some(&new_url));
|
||||
|
||||
if is_navigating_back {
|
||||
stack.pop();
|
||||
}
|
||||
|
||||
is_back.set(is_navigating_back);
|
||||
|
||||
url.set(new_url);
|
||||
@@ -202,14 +195,14 @@ impl LocationProvider for BrowserUrl {
|
||||
}
|
||||
}
|
||||
};
|
||||
let closure =
|
||||
Closure::wrap(Box::new(cb) as Box<dyn Fn()>).into_js_value();
|
||||
window
|
||||
.add_event_listener_with_callback(
|
||||
"popstate",
|
||||
closure.as_ref().unchecked_ref(),
|
||||
)
|
||||
.expect("couldn't add `popstate` listener to `window`");
|
||||
|
||||
let popstate_handle =
|
||||
window_event_listener(ev::popstate, move |_| popstate_cb());
|
||||
|
||||
on_cleanup(|| {
|
||||
click_handle.remove();
|
||||
popstate_handle.remove();
|
||||
});
|
||||
}
|
||||
|
||||
fn ready_to_complete(&self) {
|
||||
@@ -221,6 +214,13 @@ impl LocationProvider for BrowserUrl {
|
||||
fn complete_navigation(&self, loc: &LocationChange) {
|
||||
let history = window().history().unwrap();
|
||||
|
||||
let current_path = self
|
||||
.path_stack
|
||||
.read_value()
|
||||
.last()
|
||||
.map(|url| url.to_full_path());
|
||||
let add_to_stack = current_path.as_ref() != Some(&loc.value);
|
||||
|
||||
if loc.replace {
|
||||
history
|
||||
.replace_state_with_url(
|
||||
@@ -229,7 +229,7 @@ impl LocationProvider for BrowserUrl {
|
||||
Some(&loc.value),
|
||||
)
|
||||
.unwrap();
|
||||
} else {
|
||||
} else if add_to_stack {
|
||||
// push the "forward direction" marker
|
||||
let state = &loc.state.to_js_value();
|
||||
history
|
||||
@@ -240,7 +240,9 @@ impl LocationProvider for BrowserUrl {
|
||||
// add this URL to the "path stack" for detecting back navigations, and
|
||||
// unset "navigating back" state
|
||||
if let Ok(url) = Self::current() {
|
||||
self.path_stack.write_value().push(url);
|
||||
if add_to_stack {
|
||||
self.path_stack.write_value().push(url);
|
||||
}
|
||||
self.is_back.set(false);
|
||||
}
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ use send_wrapper::SendWrapper;
|
||||
use std::{borrow::Cow, future::Future};
|
||||
use tachys::dom::window;
|
||||
use wasm_bindgen::{JsCast, JsValue};
|
||||
use web_sys::{Event, HtmlAnchorElement, MouseEvent};
|
||||
use web_sys::{HtmlAnchorElement, MouseEvent};
|
||||
|
||||
mod history;
|
||||
mod server;
|
||||
@@ -300,15 +300,14 @@ pub(crate) fn handle_anchor_click<NavFn, NavFut>(
|
||||
router_base: Option<Cow<'static, str>>,
|
||||
parse_with_base: fn(&str, &str) -> Result<Url, JsValue>,
|
||||
navigate: NavFn,
|
||||
) -> Box<dyn Fn(Event) -> Result<(), JsValue>>
|
||||
) -> Box<dyn Fn(MouseEvent) -> Result<(), JsValue>>
|
||||
where
|
||||
NavFn: Fn(Url, LocationChange) -> NavFut + 'static,
|
||||
NavFut: Future<Output = ()> + 'static,
|
||||
{
|
||||
let router_base = router_base.unwrap_or_default();
|
||||
|
||||
Box::new(move |ev: Event| {
|
||||
let ev = ev.unchecked_into::<MouseEvent>();
|
||||
Box::new(move |ev: MouseEvent| {
|
||||
let origin = window().location().origin()?;
|
||||
if ev.default_prevented()
|
||||
|| ev.button() != 0
|
||||
@@ -369,8 +368,8 @@ where
|
||||
ev.prevent_default();
|
||||
let to = path_name
|
||||
+ if url.search.is_empty() { "" } else { "?" }
|
||||
+ &Url::unescape(&url.search)
|
||||
+ &Url::unescape(&url.hash);
|
||||
+ &url.search
|
||||
+ &url.hash;
|
||||
let state = Reflect::get(&a, &JsValue::from_str("state"))
|
||||
.ok()
|
||||
.and_then(|value| {
|
||||
|
||||
@@ -4,7 +4,6 @@ macro_rules! tuples {
|
||||
($first:ident => $($ty:ident),*) => {
|
||||
impl<$first, $($ty),*> PossibleRouteMatch for ($first, $($ty,)*)
|
||||
where
|
||||
Self: core::fmt::Debug,
|
||||
$first: PossibleRouteMatch,
|
||||
$($ty: PossibleRouteMatch),*,
|
||||
{
|
||||
|
||||
@@ -26,7 +26,7 @@ use reactive_graph::{
|
||||
computed::{ArcMemo, ScopedFuture},
|
||||
owner::{provide_context, use_context, Owner},
|
||||
signal::{ArcRwSignal, ArcTrigger},
|
||||
traits::{Get, GetUntracked, Notify, ReadUntracked, Set, Track},
|
||||
traits::{Get, GetUntracked, Notify, ReadUntracked, Set, Track, Write},
|
||||
transition::AsyncTransition,
|
||||
wrappers::write::SignalSetter,
|
||||
};
|
||||
@@ -119,6 +119,7 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
|
||||
@@ -159,13 +160,14 @@ where
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// since the path didn't match, we'll update the retained path for future diffing
|
||||
state.path.clear();
|
||||
state.path.push_str(url_snapshot.path());
|
||||
|
||||
let new_match = self.routes.match_route(url_snapshot.path());
|
||||
|
||||
state.current_url.set(url_snapshot);
|
||||
*state.current_url.write_untracked() = url_snapshot;
|
||||
|
||||
match new_match {
|
||||
None => {
|
||||
@@ -192,6 +194,7 @@ where
|
||||
&mut state.outlets,
|
||||
self.set_is_routing.is_some(),
|
||||
0,
|
||||
&self.outer_owner,
|
||||
);
|
||||
|
||||
let (abort_handle, abort_registration) =
|
||||
@@ -369,6 +372,7 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
|
||||
// outlets will not send their views if the loaders are never polled
|
||||
@@ -422,8 +426,16 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
|
||||
let preload_owners = outlets
|
||||
.iter()
|
||||
.map(|o| o.preload_owner.clone())
|
||||
.collect::<Vec<_>>();
|
||||
outer_owner
|
||||
.with(|| Owner::on_cleanup(move || drop(preload_owners)));
|
||||
|
||||
// outlets will not send their views if the loaders are never polled
|
||||
// the loaders are async so that they can lazy-load routes in the browser,
|
||||
// but they should always be synchronously available on the server
|
||||
@@ -475,6 +487,7 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
|
||||
@@ -530,6 +543,7 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
|
||||
@@ -566,6 +580,7 @@ pub(crate) struct RouteContext {
|
||||
base: Option<Oco<'static, str>>,
|
||||
view_fn: Arc<Mutex<OutletViewFn>>,
|
||||
owner: Arc<Mutex<Option<Owner>>>,
|
||||
preload_owner: Owner,
|
||||
child: ChildRoute,
|
||||
}
|
||||
|
||||
@@ -597,6 +612,7 @@ impl Clone for RouteContext {
|
||||
view_fn: Arc::clone(&self.view_fn),
|
||||
owner: Arc::clone(&self.owner),
|
||||
child: self.child.clone(),
|
||||
preload_owner: self.preload_owner.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -608,6 +624,7 @@ trait AddNestedRoute {
|
||||
base: Option<Oco<'static, str>>,
|
||||
loaders: &mut Vec<Pin<Box<dyn Future<Output = ArcTrigger>>>>,
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
outer_owner: &Owner,
|
||||
);
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
@@ -621,6 +638,7 @@ trait AddNestedRoute {
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
set_is_routing: bool,
|
||||
level: u8,
|
||||
outer_owner: &Owner,
|
||||
) -> u8;
|
||||
}
|
||||
|
||||
@@ -634,6 +652,7 @@ where
|
||||
base: Option<Oco<'static, str>>,
|
||||
loaders: &mut Vec<Pin<Box<dyn Future<Output = ArcTrigger>>>>,
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
outer_owner: &Owner,
|
||||
) {
|
||||
let orig_url = url;
|
||||
|
||||
@@ -701,6 +720,7 @@ where
|
||||
base: base.clone(),
|
||||
child: ChildRoute(Arc::new(Mutex::new(None))),
|
||||
owner: Arc::new(Mutex::new(None)),
|
||||
preload_owner: outer_owner.child(),
|
||||
};
|
||||
if !outlets.is_empty() {
|
||||
let prev_index = outlets.len().saturating_sub(1);
|
||||
@@ -725,7 +745,15 @@ where
|
||||
provide_context(params.clone());
|
||||
provide_context(url.clone());
|
||||
provide_context(matched.clone());
|
||||
view.preload().await;
|
||||
outlet
|
||||
.preload_owner
|
||||
.with(|| {
|
||||
provide_context(params.clone());
|
||||
provide_context(url.clone());
|
||||
provide_context(matched.clone());
|
||||
ScopedFuture::new(view.preload())
|
||||
})
|
||||
.await;
|
||||
let child = outlet.child.clone();
|
||||
*view_fn.lock().or_poisoned() =
|
||||
Box::new(move |owner_where_used| {
|
||||
@@ -772,7 +800,13 @@ where
|
||||
// this is important because to build the view, we need access to the outlet
|
||||
// and the outlet will be returned from building this child
|
||||
if let Some(child) = child {
|
||||
child.build_nested_route(orig_url, base, loaders, outlets);
|
||||
child.build_nested_route(
|
||||
orig_url,
|
||||
base,
|
||||
loaders,
|
||||
outlets,
|
||||
outer_owner,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -787,6 +821,7 @@ where
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
set_is_routing: bool,
|
||||
level: u8,
|
||||
outer_owner: &Owner,
|
||||
) -> u8 {
|
||||
let (parent_params, parent_matches): (Vec<_>, Vec<_>) = outlets
|
||||
.iter()
|
||||
@@ -803,7 +838,13 @@ where
|
||||
match current {
|
||||
// if there's nothing currently in the routes at this point, build from here
|
||||
None => {
|
||||
self.build_nested_route(url, base, preloaders, outlets);
|
||||
self.build_nested_route(
|
||||
url,
|
||||
base,
|
||||
preloaders,
|
||||
outlets,
|
||||
outer_owner,
|
||||
);
|
||||
level
|
||||
}
|
||||
Some(current) => {
|
||||
@@ -843,6 +884,10 @@ where
|
||||
&mut current.matched,
|
||||
ArcRwSignal::new(new_match),
|
||||
);
|
||||
let old_preload_owner = mem::replace(
|
||||
&mut current.preload_owner,
|
||||
outer_owner.child(),
|
||||
);
|
||||
let matched_including_parents = {
|
||||
ArcMemo::new({
|
||||
let matched = current.matched.clone();
|
||||
@@ -885,11 +930,26 @@ where
|
||||
let child = outlet.child.clone();
|
||||
async move {
|
||||
let child = child.clone();
|
||||
if set_is_routing {
|
||||
AsyncTransition::run(|| view.preload()).await;
|
||||
} else {
|
||||
view.preload().await;
|
||||
}
|
||||
outlet
|
||||
.preload_owner
|
||||
.with(|| {
|
||||
provide_context(
|
||||
params_including_parents.clone(),
|
||||
);
|
||||
provide_context(url.clone());
|
||||
provide_context(matched.clone());
|
||||
ScopedFuture::new(async {
|
||||
if set_is_routing {
|
||||
AsyncTransition::run(|| {
|
||||
view.preload()
|
||||
})
|
||||
.await;
|
||||
} else {
|
||||
view.preload().await;
|
||||
}
|
||||
})
|
||||
})
|
||||
.await;
|
||||
*view_fn.lock().or_poisoned() =
|
||||
Box::new(move |owner_where_used| {
|
||||
let prev_owner = route_owner
|
||||
@@ -938,6 +998,7 @@ where
|
||||
drop(old_params);
|
||||
drop(old_url);
|
||||
drop(old_matched);
|
||||
drop(old_preload_owner);
|
||||
trigger
|
||||
}
|
||||
})));
|
||||
@@ -948,8 +1009,13 @@ where
|
||||
|
||||
// if this children has matches, then rebuild the lower section of the tree
|
||||
if let Some(child) = child {
|
||||
child
|
||||
.build_nested_route(url, base, preloaders, outlets);
|
||||
child.build_nested_route(
|
||||
url,
|
||||
base,
|
||||
preloaders,
|
||||
outlets,
|
||||
outer_owner,
|
||||
);
|
||||
} else {
|
||||
*outlets[*items].child.0.lock().or_poisoned() = None;
|
||||
}
|
||||
@@ -973,6 +1039,7 @@ where
|
||||
outlets,
|
||||
set_is_routing,
|
||||
level + 1,
|
||||
outer_owner,
|
||||
)
|
||||
} else {
|
||||
*current.child.0.lock().or_poisoned() = None;
|
||||
|
||||
@@ -369,7 +369,7 @@ impl ResolvedStaticPath {
|
||||
eprintln!("{e}");
|
||||
}
|
||||
}
|
||||
owner.unset();
|
||||
owner.unset_with_forced_cleanup();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router_macro"
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
use proc_macro::{TokenStream, TokenTree};
|
||||
use proc_macro2::Span;
|
||||
use proc_macro_error2::{abort, proc_macro_error, set_dummy};
|
||||
use quote::{quote, ToTokens};
|
||||
use quote::{format_ident, quote, ToTokens};
|
||||
use syn::{
|
||||
spanned::Spanned, FnArg, Ident, ImplItem, ItemImpl, Path, Type, TypePath,
|
||||
};
|
||||
@@ -267,10 +267,7 @@ fn lazy_route_impl(
|
||||
};
|
||||
let lazy_view_ident =
|
||||
Ident::new(&format!("__{ty_name_to_snake}_View"), im.self_ty.span());
|
||||
let preload_lazy_view_ident = Ident::new(
|
||||
&format!("__preload_{lazy_view_ident}"),
|
||||
lazy_view_ident.span(),
|
||||
);
|
||||
let preload_ident = format_ident!("__preload_{lazy_view_ident}");
|
||||
|
||||
im.items.push(
|
||||
syn::parse::<ImplItem>(
|
||||
@@ -280,7 +277,7 @@ fn lazy_route_impl(
|
||||
// we don't split routes for wasm32 ssr
|
||||
// but we don't require a `hydrate`/`csr` feature on leptos_router
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#preload_lazy_view_ident().await;
|
||||
#preload_ident().await;
|
||||
}
|
||||
}
|
||||
.into(),
|
||||
|
||||
@@ -5,7 +5,7 @@ license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "RPC for any web framework."
|
||||
readme = "../README.md"
|
||||
version = "0.8.6"
|
||||
version = "0.8.8"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
@@ -64,6 +64,7 @@ http-body-util = { optional = true, workspace = true, default-features = true }
|
||||
rkyv = { optional = true, workspace = true, default-features = true }
|
||||
rmp-serde = { optional = true, workspace = true, default-features = true }
|
||||
base64 = { workspace = true, default-features = true }
|
||||
bitcode = { optional = true, workspace = true, default-features = true }
|
||||
|
||||
# client
|
||||
gloo-net = { optional = true, workspace = true, default-features = true }
|
||||
@@ -126,6 +127,7 @@ cbor = ["dep:ciborium"]
|
||||
rkyv = ["dep:rkyv"]
|
||||
msgpack = ["dep:rmp-serde"]
|
||||
postcard = ["dep:postcard"]
|
||||
bitcode = ["dep:bitcode"]
|
||||
default-tls = ["reqwest?/default-tls"]
|
||||
rustls = ["reqwest?/rustls-tls", "tokio-tungstenite?/rustls"]
|
||||
reqwest = ["dep:reqwest", "dep:tokio-tungstenite", "dep:tokio"]
|
||||
|
||||
49
server_fn/src/codec/bitcode.rs
Normal file
49
server_fn/src/codec/bitcode.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
use super::{Patch, Post, Put};
|
||||
use crate::{ContentType, Decodes, Encodes, Format, FormatType};
|
||||
use bytes::Bytes;
|
||||
|
||||
/// Serializes and deserializes with [`bitcode`].
|
||||
pub struct BitcodeEncoding;
|
||||
|
||||
impl ContentType for BitcodeEncoding {
|
||||
const CONTENT_TYPE: &'static str = "application/bitcode";
|
||||
}
|
||||
|
||||
impl FormatType for BitcodeEncoding {
|
||||
const FORMAT_TYPE: Format = Format::Binary;
|
||||
}
|
||||
|
||||
impl<T> Encodes<T> for BitcodeEncoding
|
||||
where
|
||||
T: bitcode::Encode,
|
||||
{
|
||||
type Error = std::convert::Infallible;
|
||||
|
||||
fn encode(value: &T) -> Result<Bytes, Self::Error> {
|
||||
Ok(Bytes::from(bitcode::encode(value)))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Decodes<T> for BitcodeEncoding
|
||||
where
|
||||
T: bitcode::DecodeOwned,
|
||||
{
|
||||
type Error = bitcode::Error;
|
||||
|
||||
fn decode(bytes: Bytes) -> Result<T, Self::Error> {
|
||||
bitcode::decode(bytes.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
/// Pass arguments and receive responses using `bitcode` in a `POST` request.
|
||||
pub type Bitcode = Post<BitcodeEncoding>;
|
||||
|
||||
/// Pass arguments and receive responses using `bitcode` in the body of a `PATCH` request.
|
||||
/// **Note**: Browser support for `PATCH` requests without JS/WASM may be poor.
|
||||
/// Consider using a `POST` request if functionality without JS/WASM is required.
|
||||
pub type PatchBitcode = Patch<BitcodeEncoding>;
|
||||
|
||||
/// Pass arguments and receive responses using `bitcode` in the body of a `PUT` request.
|
||||
/// **Note**: Browser support for `PUT` requests without JS/WASM may be poor.
|
||||
/// Consider using a `POST` request if functionality without JS/WASM is required.
|
||||
pub type PutBitcode = Put<BitcodeEncoding>;
|
||||
@@ -50,6 +50,11 @@ mod postcard;
|
||||
#[cfg(feature = "postcard")]
|
||||
pub use postcard::*;
|
||||
|
||||
#[cfg(feature = "bitcode")]
|
||||
mod bitcode;
|
||||
#[cfg(feature = "bitcode")]
|
||||
pub use bitcode::*;
|
||||
|
||||
mod patch;
|
||||
pub use patch::*;
|
||||
mod post;
|
||||
|
||||
@@ -568,7 +568,7 @@ pub trait FromServerFnError: std::fmt::Debug + Sized + 'static {
|
||||
/// Converts a [`ServerFnErrorErr`] into the application-specific custom error type.
|
||||
fn from_server_fn_error(value: ServerFnErrorErr) -> Self;
|
||||
|
||||
/// Converts the custom error type to a [`String`].
|
||||
/// Serializes the custom error type to bytes, according to the encoding given by `Self::Encoding`.
|
||||
fn ser(&self) -> Bytes {
|
||||
Self::Encoder::encode(self).unwrap_or_else(|e| {
|
||||
Self::Encoder::encode(&Self::from_server_fn_error(
|
||||
@@ -581,7 +581,7 @@ pub trait FromServerFnError: std::fmt::Debug + Sized + 'static {
|
||||
})
|
||||
}
|
||||
|
||||
/// Deserializes the custom error type from a [`&str`].
|
||||
/// Deserializes the custom error type, according to the encoding given by `Self::Encoding`.
|
||||
fn de(data: Bytes) -> Self {
|
||||
Self::Encoder::decode(data).unwrap_or_else(|e| {
|
||||
ServerFnErrorErr::Deserialization(e.to_string()).into_app_error()
|
||||
|
||||
@@ -133,6 +133,8 @@ pub use ::bytes as bytes_export;
|
||||
#[doc(hidden)]
|
||||
pub use ::http as http_export;
|
||||
use base64::{engine::general_purpose::STANDARD_NO_PAD, DecodeError, Engine};
|
||||
#[cfg(feature = "bitcode")]
|
||||
pub use bitcode;
|
||||
// re-exported to make it possible to implement a custom Client without adding a separate
|
||||
// dependency on `bytes`
|
||||
pub use bytes::Bytes;
|
||||
@@ -307,16 +309,18 @@ pub trait ServerFn: Send + Sized {
|
||||
.await
|
||||
.map(|res| (res, None))
|
||||
.unwrap_or_else(|e| {
|
||||
(
|
||||
let mut response =
|
||||
<<Self as ServerFn>::Server as crate::Server<
|
||||
Self::Error,
|
||||
Self::InputStreamError,
|
||||
Self::OutputStreamError,
|
||||
>>::Response::error_response(
|
||||
Self::PATH, e.ser()
|
||||
),
|
||||
Some(e),
|
||||
)
|
||||
);
|
||||
let content_type =
|
||||
<Self::Error as FromServerFnError>::Encoder::CONTENT_TYPE;
|
||||
response.content_type(content_type);
|
||||
(response, Some(e))
|
||||
});
|
||||
|
||||
// if it accepts HTML, we'll redirect to the Referer
|
||||
|
||||
@@ -72,6 +72,10 @@ mod axum {
|
||||
let inner = self.call(req);
|
||||
Box::pin(async move {
|
||||
inner.await.unwrap_or_else(|e| {
|
||||
// TODO: This does not set the Content-Type on the response. Doing so will
|
||||
// require a breaking change in order to get the correct encoding from the
|
||||
// error's `FromServerFnError::Encoder::CONTENT_TYPE` impl.
|
||||
// Note: This only applies to middleware errors.
|
||||
let err =
|
||||
ser(ServerFnErrorErr::MiddlewareError(e.to_string()));
|
||||
Response::<Body>::error_response(&path, err)
|
||||
@@ -149,6 +153,10 @@ mod actix {
|
||||
let inner = self.call(req);
|
||||
Box::pin(async move {
|
||||
inner.await.unwrap_or_else(|e| {
|
||||
// TODO: This does not set the Content-Type on the response. Doing so will
|
||||
// require a breaking change in order to get the correct encoding from the
|
||||
// error's `FromServerFnError::Encoder::CONTENT_TYPE` impl.
|
||||
// Note: This only applies to middleware errors.
|
||||
let err =
|
||||
ser(ServerFnErrorErr::MiddlewareError(e.to_string()));
|
||||
ActixResponse::error_response(&path, err).take()
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::error::{
|
||||
use actix_web::{
|
||||
http::{
|
||||
header,
|
||||
header::{HeaderValue, LOCATION},
|
||||
header::{HeaderValue, CONTENT_TYPE, LOCATION},
|
||||
StatusCode,
|
||||
},
|
||||
HttpResponse,
|
||||
@@ -80,6 +80,12 @@ impl Res for ActixResponse {
|
||||
))
|
||||
}
|
||||
|
||||
fn content_type(&mut self, content_type: &str) {
|
||||
if let Ok(content_type) = HeaderValue::from_str(content_type) {
|
||||
self.0.headers_mut().insert(CONTENT_TYPE, content_type);
|
||||
}
|
||||
}
|
||||
|
||||
fn redirect(&mut self, path: &str) {
|
||||
if let Ok(path) = HeaderValue::from_str(path) {
|
||||
*self.0.status_mut() = StatusCode::FOUND;
|
||||
|
||||
@@ -100,6 +100,13 @@ impl Res for Response<Body> {
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn content_type(&mut self, content_type: &str) {
|
||||
if let Ok(content_type) = HeaderValue::from_str(content_type) {
|
||||
self.headers_mut()
|
||||
.insert(header::CONTENT_TYPE, content_type);
|
||||
}
|
||||
}
|
||||
|
||||
fn redirect(&mut self, path: &str) {
|
||||
if let Ok(path) = HeaderValue::from_str(path) {
|
||||
self.headers_mut().insert(header::LOCATION, path);
|
||||
|
||||
@@ -60,6 +60,13 @@ impl Res for Response<Body> {
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn content_type(&mut self, content_type: &str) {
|
||||
if let Ok(content_type) = HeaderValue::from_str(content_type) {
|
||||
self.headers_mut()
|
||||
.insert(header::CONTENT_TYPE, content_type);
|
||||
}
|
||||
}
|
||||
|
||||
fn redirect(&mut self, path: &str) {
|
||||
if let Ok(path) = HeaderValue::from_str(path) {
|
||||
self.headers_mut().insert(header::LOCATION, path);
|
||||
|
||||
@@ -37,9 +37,14 @@ where
|
||||
|
||||
/// Represents the response as created by the server;
|
||||
pub trait Res {
|
||||
/// Converts an error into a response, with a `500` status code and the error text as its body.
|
||||
/// Converts an error into a response, with a `500` status code and the error as its body.
|
||||
fn error_response(path: &str, err: Bytes) -> Self;
|
||||
|
||||
/// Set the `Content-Type` header for the response.
|
||||
fn content_type(&mut self, #[allow(unused_variables)] content_type: &str) {
|
||||
// TODO 0.9: remove this method and default implementation. It is only included here
|
||||
// to allow setting the `Content-Type` header for error responses without requiring a
|
||||
// semver-incompatible change.
|
||||
}
|
||||
/// Redirect the response by setting a 302 code and Location header.
|
||||
fn redirect(&mut self, path: &str);
|
||||
}
|
||||
@@ -103,6 +108,10 @@ impl Res for BrowserMockRes {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn content_type(&mut self, _content_type: &str) {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn redirect(&mut self, _path: &str) {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
@@ -10,7 +10,8 @@
|
||||
feature = "multipart",
|
||||
feature = "serde-lite",
|
||||
feature = "cbor",
|
||||
feature = "msgpack"
|
||||
feature = "msgpack",
|
||||
feature = "bitcode",
|
||||
))
|
||||
))]
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "RPC for any web framework."
|
||||
readme = "../README.md"
|
||||
version = "0.8.7"
|
||||
version = "0.8.8"
|
||||
edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -331,6 +331,7 @@ impl ServerFnCall {
|
||||
enum PathInfo {
|
||||
Serde,
|
||||
Rkyv,
|
||||
Bitcode,
|
||||
None,
|
||||
}
|
||||
|
||||
@@ -341,6 +342,12 @@ impl ServerFnCall {
|
||||
Clone, #server_fn_path::rkyv::Archive, #server_fn_path::rkyv::Serialize, #server_fn_path::rkyv::Deserialize
|
||||
},
|
||||
),
|
||||
Some("Bitcode") => (
|
||||
PathInfo::Bitcode,
|
||||
quote! {
|
||||
Clone, #server_fn_path::bitcode::Encode, #server_fn_path::bitcode::Decode
|
||||
},
|
||||
),
|
||||
Some("MultipartFormData")
|
||||
| Some("Streaming")
|
||||
| Some("StreamingText") => (PathInfo::None, quote! {}),
|
||||
@@ -376,6 +383,7 @@ impl ServerFnCall {
|
||||
#[serde(crate = #serde_path)]
|
||||
}
|
||||
}
|
||||
PathInfo::Bitcode => quote! {},
|
||||
PathInfo::Rkyv => quote! {},
|
||||
PathInfo::None => quote! {},
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "tachys"
|
||||
version = "0.2.7"
|
||||
version = "0.2.10"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
use super::{Attribute, NextAttribute};
|
||||
use crate::erased::{Erased, ErasedLocal};
|
||||
use std::{any::TypeId, fmt::Debug};
|
||||
use crate::{
|
||||
erased::{Erased, ErasedLocal},
|
||||
html::attribute::NamedAttributeKey,
|
||||
renderer::{dom::Element, Rndr},
|
||||
};
|
||||
use std::{any::TypeId, fmt::Debug, mem};
|
||||
#[cfg(feature = "ssr")]
|
||||
use std::{future::Future, pin::Pin};
|
||||
|
||||
@@ -25,6 +29,7 @@ pub struct AnyAttribute {
|
||||
resolve: fn(Erased) -> Pin<Box<dyn Future<Output = AnyAttribute> + Send>>,
|
||||
#[cfg(feature = "ssr")]
|
||||
dry_resolve: fn(&mut Erased),
|
||||
keys: fn(&Erased) -> Vec<NamedAttributeKey>,
|
||||
}
|
||||
|
||||
impl Clone for AnyAttribute {
|
||||
@@ -44,6 +49,7 @@ pub struct AnyAttributeState {
|
||||
type_id: TypeId,
|
||||
state: ErasedLocal,
|
||||
el: crate::renderer::types::Element,
|
||||
keys: Vec<NamedAttributeKey>,
|
||||
}
|
||||
|
||||
/// Converts an [`Attribute`] into [`AnyAttribute`].
|
||||
@@ -84,6 +90,7 @@ where
|
||||
) -> AnyAttributeState {
|
||||
AnyAttributeState {
|
||||
type_id: TypeId::of::<T>(),
|
||||
keys: value.get_ref::<T>().keys(),
|
||||
state: ErasedLocal::new(value.into_inner::<T>().build(&el)),
|
||||
el,
|
||||
}
|
||||
@@ -96,6 +103,7 @@ where
|
||||
) -> AnyAttributeState {
|
||||
AnyAttributeState {
|
||||
type_id: TypeId::of::<T>(),
|
||||
keys: value.get_ref::<T>().keys(),
|
||||
state: ErasedLocal::new(
|
||||
value.into_inner::<T>().hydrate::<true>(&el),
|
||||
),
|
||||
@@ -110,6 +118,7 @@ where
|
||||
) -> AnyAttributeState {
|
||||
AnyAttributeState {
|
||||
type_id: TypeId::of::<T>(),
|
||||
keys: value.get_ref::<T>().keys(),
|
||||
state: ErasedLocal::new(
|
||||
value.into_inner::<T>().hydrate::<true>(&el),
|
||||
),
|
||||
@@ -140,6 +149,12 @@ where
|
||||
async move {value.into_inner::<T>().resolve().await.into_any_attr()}.boxed()
|
||||
}
|
||||
|
||||
fn keys<T: Attribute + 'static>(
|
||||
value: &Erased,
|
||||
) -> Vec<NamedAttributeKey> {
|
||||
value.get_ref::<T>().keys()
|
||||
}
|
||||
|
||||
let value = self.into_cloneable_owned();
|
||||
AnyAttribute {
|
||||
type_id: TypeId::of::<T::CloneableOwned>(),
|
||||
@@ -158,6 +173,7 @@ where
|
||||
resolve: resolve::<T::CloneableOwned>,
|
||||
#[cfg(feature = "ssr")]
|
||||
dry_resolve: dry_resolve::<T::CloneableOwned>,
|
||||
keys: keys::<T::CloneableOwned>,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -268,6 +284,10 @@ impl Attribute for AnyAttribute {
|
||||
enabled."
|
||||
);
|
||||
}
|
||||
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
(self.keys)(&self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl NextAttribute for Vec<AnyAttribute> {
|
||||
@@ -286,7 +306,7 @@ impl Attribute for Vec<AnyAttribute> {
|
||||
const MIN_LENGTH: usize = 0;
|
||||
|
||||
type AsyncOutput = Vec<AnyAttribute>;
|
||||
type State = Vec<AnyAttributeState>;
|
||||
type State = (Element, Vec<AnyAttributeState>);
|
||||
type Cloneable = Vec<AnyAttribute>;
|
||||
type CloneableOwned = Vec<AnyAttribute>;
|
||||
|
||||
@@ -321,13 +341,19 @@ impl Attribute for Vec<AnyAttribute> {
|
||||
) -> Self::State {
|
||||
#[cfg(feature = "hydrate")]
|
||||
if FROM_SERVER {
|
||||
self.into_iter()
|
||||
.map(|attr| attr.hydrate::<true>(el))
|
||||
.collect()
|
||||
(
|
||||
el.clone(),
|
||||
self.into_iter()
|
||||
.map(|attr| attr.hydrate::<true>(el))
|
||||
.collect(),
|
||||
)
|
||||
} else {
|
||||
self.into_iter()
|
||||
.map(|attr| attr.hydrate::<false>(el))
|
||||
.collect()
|
||||
(
|
||||
el.clone(),
|
||||
self.into_iter()
|
||||
.map(|attr| attr.hydrate::<false>(el))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
#[cfg(not(feature = "hydrate"))]
|
||||
{
|
||||
@@ -340,13 +366,34 @@ impl Attribute for Vec<AnyAttribute> {
|
||||
}
|
||||
|
||||
fn build(self, el: &crate::renderer::types::Element) -> Self::State {
|
||||
self.into_iter().map(|attr| attr.build(el)).collect()
|
||||
(
|
||||
el.clone(),
|
||||
self.into_iter().map(|attr| attr.build(el)).collect(),
|
||||
)
|
||||
}
|
||||
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
for (attr, state) in self.into_iter().zip(state.iter_mut()) {
|
||||
attr.rebuild(state)
|
||||
let (el, state) = state;
|
||||
for old in mem::take(state) {
|
||||
for key in old.keys {
|
||||
match key {
|
||||
NamedAttributeKey::InnerHtml => {
|
||||
Rndr::set_inner_html(&old.el, "");
|
||||
}
|
||||
NamedAttributeKey::Property(prop_name) => {
|
||||
Rndr::set_property(
|
||||
&old.el,
|
||||
&prop_name,
|
||||
&wasm_bindgen::JsValue::UNDEFINED,
|
||||
);
|
||||
}
|
||||
NamedAttributeKey::Attribute(key) => {
|
||||
Rndr::remove_attribute(&old.el, &key);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
*state = self.into_iter().map(|s| s.build(el)).collect();
|
||||
}
|
||||
|
||||
fn into_cloneable(self) -> Self::Cloneable {
|
||||
@@ -385,4 +432,8 @@ impl Attribute for Vec<AnyAttribute> {
|
||||
enabled."
|
||||
);
|
||||
}
|
||||
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
self.iter().flat_map(|s| s.keys()).collect()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ use super::{
|
||||
use crate::{
|
||||
html::attribute::{
|
||||
maybe_next_attr_erasure_macros::next_attr_combine, Attribute,
|
||||
AttributeValue,
|
||||
AttributeValue, NamedAttributeKey,
|
||||
},
|
||||
view::{add_attr::AddAnyAttr, Position, ToTemplate},
|
||||
};
|
||||
@@ -112,6 +112,12 @@ where
|
||||
value: self.value.resolve().await,
|
||||
}
|
||||
}
|
||||
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
vec![NamedAttributeKey::Attribute(
|
||||
self.key.as_ref().to_string().into(),
|
||||
)]
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> NextAttribute for CustomAttr<K, V>
|
||||
|
||||
@@ -205,6 +205,14 @@ where
|
||||
self.add_any_attr(enterkeyhint(value))
|
||||
}
|
||||
|
||||
/// The `exportparts` attribute enables the sharing of parts of an element's shadow DOM with a containing document.
|
||||
fn exportparts(
|
||||
self,
|
||||
value: V,
|
||||
) -> <Self as AddAnyAttr>::Output<Attr<Exportparts, V>> {
|
||||
self.add_any_attr(exportparts(value))
|
||||
}
|
||||
|
||||
/// The `hidden` global attribute is a Boolean attribute indicating that the element is not yet, or is no longer, relevant.
|
||||
fn hidden(self, value: V) -> <Self as AddAnyAttr>::Output<Attr<Hidden, V>> {
|
||||
self.add_any_attr(hidden(value))
|
||||
|
||||
@@ -15,7 +15,7 @@ pub use key::*;
|
||||
use maybe_next_attr_erasure_macros::{
|
||||
next_attr_combine, next_attr_output_type,
|
||||
};
|
||||
use std::{fmt::Debug, future::Future};
|
||||
use std::{borrow::Cow, fmt::Debug, future::Future};
|
||||
pub use value::*;
|
||||
|
||||
/// Defines an attribute: anything that can modify an element.
|
||||
@@ -75,6 +75,25 @@ pub trait Attribute: NextAttribute + Send {
|
||||
|
||||
/// “Resolves” this into a type that is not waiting for any asynchronous data.
|
||||
fn resolve(self) -> impl Future<Output = Self::AsyncOutput> + Send;
|
||||
|
||||
/// Returns a set of attribute keys, associated with this attribute, if any.
|
||||
///
|
||||
/// This is only used to manage the removal of type-erased attributes, when needed.
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
// TODO: remove default implementation in 0.9, or fix this whole approach
|
||||
// by making it easier to remove attributes
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
/// An attribute key can be used to remove an attribute from an element.
|
||||
pub enum NamedAttributeKey {
|
||||
/// An ordinary attribute.
|
||||
Attribute(Cow<'static, str>),
|
||||
/// A DOM property.
|
||||
Property(Cow<'static, str>),
|
||||
/// The `inner_html` pseudo-attribute.
|
||||
InnerHtml,
|
||||
}
|
||||
|
||||
/// Adds another attribute to this one, returning a new attribute.
|
||||
@@ -133,6 +152,10 @@ impl Attribute for () {
|
||||
fn dry_resolve(&mut self) {}
|
||||
|
||||
async fn resolve(self) -> Self::AsyncOutput {}
|
||||
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
impl NextAttribute for () {
|
||||
@@ -249,6 +272,10 @@ where
|
||||
async fn resolve(self) -> Self::AsyncOutput {
|
||||
Attr(self.0, self.1.resolve().await)
|
||||
}
|
||||
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
vec![NamedAttributeKey::Attribute(K::KEY.into())]
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> NextAttribute for Attr<K, V>
|
||||
@@ -353,6 +380,14 @@ macro_rules! impl_attr_for_tuples {
|
||||
$($ty.resolve()),*
|
||||
)
|
||||
}
|
||||
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
#[allow(non_snake_case)]
|
||||
let ($first, $($ty,)*) = &self;
|
||||
let mut buf = $first.keys();
|
||||
$(buf.extend($ty.keys());)*
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
impl<$first, $($ty),*> NextAttribute for ($first, $($ty,)*)
|
||||
@@ -462,6 +497,14 @@ macro_rules! impl_attr_for_tuples_truncate_additional {
|
||||
$($ty.resolve()),*
|
||||
)
|
||||
}
|
||||
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
#[allow(non_snake_case)]
|
||||
let ($first, $($ty,)*) = &self;
|
||||
let mut buf = $first.keys();
|
||||
$(buf.extend($ty.keys());)*
|
||||
buf
|
||||
}
|
||||
}
|
||||
|
||||
impl<$first, $($ty),*> NextAttribute for ($first, $($ty,)*)
|
||||
@@ -538,6 +581,10 @@ where
|
||||
async fn resolve(self) -> Self::AsyncOutput {
|
||||
(self.0.resolve().await,)
|
||||
}
|
||||
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
self.0.keys()
|
||||
}
|
||||
}
|
||||
|
||||
impl<A> NextAttribute for (A,)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use super::attribute::{
|
||||
maybe_next_attr_erasure_macros::next_attr_output_type, Attribute,
|
||||
NextAttribute,
|
||||
NamedAttributeKey, NextAttribute,
|
||||
};
|
||||
use crate::{
|
||||
html::attribute::maybe_next_attr_erasure_macros::next_attr_combine,
|
||||
@@ -97,6 +97,10 @@ where
|
||||
class: self.class.resolve().await,
|
||||
}
|
||||
}
|
||||
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
vec![NamedAttributeKey::Attribute("class".into())]
|
||||
}
|
||||
}
|
||||
|
||||
impl<C> NextAttribute for Class<C>
|
||||
|
||||
@@ -3,7 +3,9 @@ use super::attribute::{
|
||||
NextAttribute,
|
||||
};
|
||||
use crate::{
|
||||
html::attribute::maybe_next_attr_erasure_macros::next_attr_combine,
|
||||
html::attribute::{
|
||||
maybe_next_attr_erasure_macros::next_attr_combine, NamedAttributeKey,
|
||||
},
|
||||
prelude::AddAnyAttr,
|
||||
view::{Position, ToTemplate},
|
||||
};
|
||||
@@ -160,6 +162,10 @@ where
|
||||
async fn resolve(self) -> Self::AsyncOutput {
|
||||
self
|
||||
}
|
||||
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, D, P> NextAttribute for Directive<T, D, P>
|
||||
|
||||
@@ -212,7 +212,7 @@ html_self_closing_elements! {
|
||||
/// The `<img>` HTML element embeds an image into the document.
|
||||
img HtmlImageElement [alt, attributionsrc, crossorigin, decoding, elementtiming, fetchpriority, height, ismap, loading, referrerpolicy, sizes, src, srcset, usemap, width] true,
|
||||
/// The `<input>` HTML element is used to create interactive controls for web-based forms in order to accept data from the user; a wide variety of types of input data and control widgets are available, depending on the device and user agent. The `<input>` element is one of the most powerful and complex in all of HTML due to the sheer number of combinations of input types and attributes.
|
||||
input HtmlInputElement [accept, alt, autocomplete, capture, checked, disabled, form, formaction, formenctype, formmethod, formnovalidate, formtarget, height, list, max, maxlength, min, minlength, multiple, name, pattern, placeholder, popovertarget, popovertargetaction, readonly, required, size, src, step, r#type, value, width] true,
|
||||
input HtmlInputElement [accept, alt, autocomplete, capture, checked, dirname, disabled, form, formaction, formenctype, formmethod, formnovalidate, formtarget, height, list, max, maxlength, min, minlength, multiple, name, pattern, placeholder, popovertarget, popovertargetaction, readonly, required, size, src, step, r#type, value, width] true,
|
||||
/// The `<link>` HTML element specifies relationships between the current document and an external resource. This element is most commonly used to link to CSS, but is also used to establish site icons (both "favicon" style icons and icons for the home screen and apps on mobile devices) among other things.
|
||||
link HtmlLinkElement [r#as, blocking, crossorigin, fetchpriority, href, hreflang, imagesizes, imagesrcset, integrity, media, rel, referrerpolicy, sizes, r#type] true,
|
||||
/// The `<meta>` HTML element represents Metadata that cannot be represented by other HTML meta-related elements, like base, link, script, style or title.
|
||||
@@ -227,7 +227,7 @@ html_self_closing_elements! {
|
||||
|
||||
html_elements! {
|
||||
/// The `<a>` HTML element (or anchor element), with its href attribute, creates a hyperlink to web pages, files, email addresses, locations in the same page, or anything else a URL can address.
|
||||
a HtmlAnchorElement [download, href, hreflang, ping, rel, target, r#type ] true,
|
||||
a HtmlAnchorElement [download, href, hreflang, ping, referrerpolicy, rel, target, r#type ] true,
|
||||
/// The `<abbr>` HTML element represents an abbreviation or acronym; the optional title attribute can provide an expansion or description for the abbreviation. If present, title must contain this full description and nothing else.
|
||||
abbr HtmlElement [] true,
|
||||
/// The `<address>` HTML element indicates that the enclosed HTML provides contact information for a person or people, or for an organization.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user