mirror of
https://github.com/leptos-rs/leptos.git
synced 2025-12-27 16:54:41 -05:00
Compare commits
39 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0b45ff5116 | ||
|
|
13dc6f474d | ||
|
|
21218fc802 | ||
|
|
65b5be2748 | ||
|
|
7c30bb92f7 | ||
|
|
edf369f035 | ||
|
|
eb02304ee1 | ||
|
|
578b672f14 | ||
|
|
b20902aaa1 | ||
|
|
d3ad0c67b6 | ||
|
|
62d8ec9cc5 | ||
|
|
0d2523190d | ||
|
|
338da18ed2 | ||
|
|
616aae4c3c | ||
|
|
c6e59eeb43 | ||
|
|
c025ae59ac | ||
|
|
df46feee5d | ||
|
|
bbf5bf9170 | ||
|
|
7a3556bf34 | ||
|
|
d13936cab5 | ||
|
|
b303a35d76 | ||
|
|
a453b7d1bd | ||
|
|
3b9ccdf57e | ||
|
|
27cd423ebc | ||
|
|
b3907baf49 | ||
|
|
9a8bb7eb75 | ||
|
|
95db8c939e | ||
|
|
2bfa9952af | ||
|
|
4e445f43d6 | ||
|
|
5f544f67ae | ||
|
|
68477d2b76 | ||
|
|
5bd9469b93 | ||
|
|
4bca70dc2f | ||
|
|
d0295009cf | ||
|
|
3e8b5c9805 | ||
|
|
924efa8ac1 | ||
|
|
b92a14228c | ||
|
|
68967fdad3 | ||
|
|
44bc4fbc31 |
2
.github/workflows/get-example-changed.yml
vendored
2
.github/workflows/get-example-changed.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- name: Get example files that changed
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v46
|
||||
uses: tj-actions/changed-files@v47
|
||||
with:
|
||||
files: |
|
||||
examples/**
|
||||
|
||||
2
.github/workflows/get-leptos-changed.yml
vendored
2
.github/workflows/get-leptos-changed.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- name: Get source files that changed
|
||||
id: changed-source
|
||||
uses: tj-actions/changed-files@v46
|
||||
uses: tj-actions/changed-files@v47
|
||||
with:
|
||||
files_ignore: |
|
||||
.*/**/*
|
||||
|
||||
2
.github/workflows/run-cargo-make-task.yml
vendored
2
.github/workflows/run-cargo-make-task.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
run: trunk --version
|
||||
- name: Install Node.js
|
||||
if: contains(inputs.directory, 'examples')
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20
|
||||
- uses: pnpm/action-setup@v4
|
||||
|
||||
854
Cargo.lock
generated
854
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
45
Cargo.toml
45
Cargo.toml
@@ -50,26 +50,26 @@ any_spawner = { path = "./any_spawner/", version = "0.3.0" }
|
||||
const_str_slice_concat = { path = "./const_str_slice_concat", version = "0.1" }
|
||||
either_of = { path = "./either_of/", version = "0.1.6" }
|
||||
hydration_context = { path = "./hydration_context", version = "0.3.0" }
|
||||
leptos = { path = "./leptos", version = "0.8.9" }
|
||||
leptos = { path = "./leptos", version = "0.8.10" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.8.7" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.6" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.7" }
|
||||
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.8.5" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.5" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.8" }
|
||||
leptos_router = { path = "./router", version = "0.8.7" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.6" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.9" }
|
||||
leptos_router = { path = "./router", version = "0.8.8" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.8.5" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.8.5" }
|
||||
leptos_meta = { path = "./meta", version = "0.8.5" }
|
||||
next_tuple = { path = "./next_tuple", version = "0.1.0" }
|
||||
oco_ref = { path = "./oco", version = "0.2.1" }
|
||||
or_poisoned = { path = "./or_poisoned", version = "0.1.0" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.7" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.2.5" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.8" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.3.0" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.6" }
|
||||
server_fn = { path = "./server_fn", version = "0.8.7" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.8.7" }
|
||||
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.8.5" }
|
||||
tachys = { path = "./tachys", version = "0.2.8" }
|
||||
tachys = { path = "./tachys", version = "0.2.9" }
|
||||
wasm_split_helpers = { path = "./wasm_split", version = "0.1.2" }
|
||||
wasm_split_macros = { path = "./wasm_split_macros", version = "0.1.3" }
|
||||
|
||||
@@ -80,7 +80,7 @@ convert_case = { default-features = false, version = "0.8.0" }
|
||||
serde_json = { default-features = false, version = "1.0.143" }
|
||||
trybuild = { default-features = false, version = "1.0.110" }
|
||||
typed-builder = { default-features = false, version = "0.21.2" }
|
||||
thiserror = { default-features = false, version = "2.0.16" }
|
||||
thiserror = { default-features = false, version = "2.0.17" }
|
||||
wasm-bindgen = { default-features = false, version = "0.2.100" }
|
||||
indexmap = { default-features = false, version = "2.11.0" }
|
||||
rstml = { default-features = false, version = "0.12.1" }
|
||||
@@ -97,7 +97,7 @@ send_wrapper = { default-features = false, version = "0.6.0" }
|
||||
tokio-test = { default-features = false, version = "0.4.4" }
|
||||
html-escape = { default-features = false, version = "0.2.13" }
|
||||
proc-macro-error2 = { default-features = false, version = "2.0.1" }
|
||||
const_format = { default-features = false, version = "0.2.34" }
|
||||
const_format = { default-features = false, version = "0.2.35" }
|
||||
gloo-net = { default-features = false, version = "0.6.0" }
|
||||
url = { default-features = false, version = "2.5.4" }
|
||||
tokio = { default-features = false, version = "1.47.1" }
|
||||
@@ -107,46 +107,46 @@ wasm-bindgen-futures = { default-features = false, version = "0.4.50" }
|
||||
tower = { default-features = false, version = "0.5.2" }
|
||||
proc-macro2 = { default-features = false, version = "1.0.101" }
|
||||
serde = { default-features = false, version = "1.0.219" }
|
||||
parking_lot = { default-features = false, version = "0.12.4" }
|
||||
axum = { default-features = false, version = "0.8.4" }
|
||||
parking_lot = { default-features = false, version = "0.12.5" }
|
||||
axum = { default-features = false, version = "0.8.6" }
|
||||
serde_qs = { default-features = false, version = "0.15.0" }
|
||||
syn = { default-features = false, version = "2.0.106" }
|
||||
xxhash-rust = { default-features = false, version = "0.8.15" }
|
||||
paste = { default-features = false, version = "1.0.15" }
|
||||
quote = { default-features = false, version = "1.0.40" }
|
||||
quote = { default-features = false, version = "1.0.41" }
|
||||
web-sys = { default-features = false, version = "0.3.77" }
|
||||
js-sys = { default-features = false, version = "0.3.77" }
|
||||
rand = { default-features = false, version = "0.9.1" }
|
||||
serde-lite = { default-features = false, version = "0.5.0" }
|
||||
tokio-tungstenite = { default-features = false, version = "0.27.0" }
|
||||
tokio-tungstenite = { default-features = false, version = "0.28.0" }
|
||||
serial_test = { default-features = false, version = "3.2.0" }
|
||||
erased = { default-features = false, version = "0.1.2" }
|
||||
glib = { default-features = false, version = "0.20.12" }
|
||||
async-trait = { default-features = false, version = "0.1.89" }
|
||||
typed-builder-macro = { default-features = false, version = "0.21.0" }
|
||||
linear-map = { default-features = false, version = "1.2.0" }
|
||||
anyhow = { default-features = false, version = "1.0.99" }
|
||||
anyhow = { default-features = false, version = "1.0.100" }
|
||||
walkdir = { default-features = false, version = "2.5.0" }
|
||||
actix-ws = { default-features = false, version = "0.3.0" }
|
||||
tower-http = { default-features = false, version = "0.6.4" }
|
||||
prettyplease = { default-features = false, version = "0.2.37" }
|
||||
inventory = { default-features = false, version = "0.3.21" }
|
||||
config = { default-features = false, version = "0.15.14" }
|
||||
camino = { default-features = false, version = "1.1.11" }
|
||||
camino = { default-features = false, version = "1.2.1" }
|
||||
ciborium = { default-features = false, version = "0.2.2" }
|
||||
multer = { default-features = false, version = "3.1.0" }
|
||||
leptos-spin-macro = { default-features = false, version = "0.2.0" }
|
||||
sledgehammer_utils = { default-features = false, version = "0.3.1" }
|
||||
sledgehammer_bindgen = { default-features = false, version = "0.6.0" }
|
||||
wasm-streams = { default-features = false, version = "0.4.2" }
|
||||
rkyv = { default-features = false, version = "0.8.11" }
|
||||
rkyv = { default-features = false, version = "0.8.12" }
|
||||
temp-env = { default-features = false, version = "0.3.6" }
|
||||
uuid = { default-features = false, version = "1.18.0" }
|
||||
bytes = { default-features = false, version = "1.10.1" }
|
||||
http = { default-features = false, version = "1.3.1" }
|
||||
regex = { default-features = false, version = "1.11.2" }
|
||||
regex = { default-features = false, version = "1.11.3" }
|
||||
drain_filter_polyfill = { default-features = false, version = "0.1.3" }
|
||||
tempfile = { default-features = false, version = "3.21.0" }
|
||||
tempfile = { default-features = false, version = "3.23.0" }
|
||||
futures-lite = { default-features = false, version = "2.6.1" }
|
||||
log = { default-features = false, version = "0.4.27" }
|
||||
percent-encoding = { default-features = false, version = "2.3.2" }
|
||||
@@ -158,10 +158,10 @@ postcard = { default-features = false, version = "1.1.3" }
|
||||
rmp-serde = { default-features = false, version = "1.3.0" }
|
||||
reqwest = { default-features = false, version = "0.12.23" }
|
||||
tower-layer = { default-features = false, version = "0.3.3" }
|
||||
attribute-derive = { default-features = false, version = "0.10.3" }
|
||||
attribute-derive = { default-features = false, version = "0.10.5" }
|
||||
insta = { default-features = false, version = "1.43.1" }
|
||||
codee = { default-features = false, version = "0.3.0" }
|
||||
actix-http = { default-features = false, version = "3.11.1" }
|
||||
actix-http = { default-features = false, version = "3.11.2" }
|
||||
wasm-bindgen-test = { default-features = false, version = "0.3.50" }
|
||||
rustversion = { default-features = false, version = "1.0.22" }
|
||||
getrandom = { default-features = false, version = "0.3.3" }
|
||||
@@ -170,6 +170,9 @@ async-lock = { default-features = false, version = "3.4.1" }
|
||||
base16 = { default-features = false, version = "0.2.1" }
|
||||
digest = { default-features = false, version = "0.10.7" }
|
||||
sha2 = { default-features = false, version = "0.10.8" }
|
||||
subsecond = { default-features = false, version = "0.7.0-rc.0" }
|
||||
dioxus-cli-config = { default-features = false, version = "0.7.0-rc.0" }
|
||||
dioxus-devtools = { default-features = false, version = "0.7.0-rc.0" }
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
||||
@@ -95,7 +95,7 @@ Here are some resources for learning more about Leptos:
|
||||
[`cargo-leptos`](https://github.com/leptos-rs/cargo-leptos) is a build tool that's designed to make it easy to build apps that run on both the client and the server, with seamless integration. The best way to get started with a real Leptos project right now is to use `cargo-leptos` and our starter templates for [Actix](https://github.com/leptos-rs/start) or [Axum](https://github.com/leptos-rs/start-axum).
|
||||
|
||||
```bash
|
||||
cargo install cargo-leptos
|
||||
cargo install cargo-leptos --locked
|
||||
cargo leptos new --git https://github.com/leptos-rs/start-axum
|
||||
cd [your project name]
|
||||
cargo leptos watch
|
||||
|
||||
@@ -11,3 +11,6 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
pin-project-lite = { workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow.workspace = true
|
||||
|
||||
@@ -45,10 +45,10 @@ impl fmt::Display for Error {
|
||||
|
||||
impl<T> From<T> for Error
|
||||
where
|
||||
T: error::Error + Send + Sync + 'static,
|
||||
T: Into<Box<dyn error::Error + Send + Sync + 'static>>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Error(Arc::new(value))
|
||||
Error(Arc::from(value.into()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,3 +158,32 @@ where
|
||||
this.inner.poll(cx)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::error::Error as StdError;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MyError;
|
||||
|
||||
impl Display for MyError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "MyError")
|
||||
}
|
||||
}
|
||||
|
||||
impl StdError for MyError {}
|
||||
|
||||
#[test]
|
||||
fn test_from() {
|
||||
let e = MyError;
|
||||
let _le = Error::from(e);
|
||||
|
||||
let e = "some error".to_string();
|
||||
let _le = Error::from(e);
|
||||
|
||||
let e = anyhow::anyhow!("anyhow error");
|
||||
let _le = Error::from(e);
|
||||
}
|
||||
}
|
||||
|
||||
13
examples/regression/e2e/features/issue_4251.feature
Normal file
13
examples/regression/e2e/features/issue_4251.feature
Normal file
@@ -0,0 +1,13 @@
|
||||
@check_issue_4251
|
||||
Feature: Check that issue 4251 does not reappear
|
||||
|
||||
Scenario: Clicking a link to the same page you’re currently on should not add the page to the history stack.
|
||||
Given I see the app
|
||||
And I can access regression test 4324
|
||||
When I select the link This page
|
||||
And I select the link This page
|
||||
And I select the link This page
|
||||
Then I see the result is the string Issue4324
|
||||
When I press the back button
|
||||
And I select the link 4324
|
||||
Then I see the result is the string Issue4324
|
||||
11
examples/regression/e2e/features/issue_4324.feature
Normal file
11
examples/regression/e2e/features/issue_4324.feature
Normal file
@@ -0,0 +1,11 @@
|
||||
@check_issue_4324
|
||||
Feature: Check that issue 4324 does not reappear
|
||||
|
||||
Scenario: Navigating to the same page after clicking "Back" should set the URL correctly
|
||||
Given I see the app
|
||||
And I can access regression test 4324
|
||||
Then I see the path is /4324/
|
||||
When I press the back button
|
||||
Then I see the path is /
|
||||
When I select the link 4324
|
||||
Then I see the path is /4324/
|
||||
10
examples/regression/e2e/tests/fixtures/check.rs
vendored
10
examples/regression/e2e/tests/fixtures/check.rs
vendored
@@ -43,3 +43,13 @@ pub async fn element_value_is(
|
||||
assert_eq!(value.as_deref(), Some(expected));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn path_is(client: &Client, expected_path: &str) -> Result<()> {
|
||||
let url = client
|
||||
.current_url()
|
||||
.await
|
||||
.expect("could not access current URL");
|
||||
let path = url.path();
|
||||
assert_eq!(expected_path, path);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -45,3 +45,12 @@ async fn i_refresh_the_browser(world: &mut AppWorld) -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[given(regex = "^I press the back button$")]
|
||||
#[when(regex = "^I press the back button$")]
|
||||
async fn i_go_back(world: &mut AppWorld) -> Result<()> {
|
||||
let client = &world.client;
|
||||
client.back().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -43,3 +43,10 @@ async fn i_see_the_value(
|
||||
check::element_value_is(client, &id, &value).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[then(regex = r"^I see the path is (.*)$")]
|
||||
async fn i_see_the_path(world: &mut AppWorld, path: String) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::path_is(client, &path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::{
|
||||
issue_4005::Routes4005, issue_4088::Routes4088, issue_4217::Routes4217,
|
||||
issue_4285::Routes4285, issue_4296::Routes4296, pr_4015::Routes4015,
|
||||
pr_4091::Routes4091,
|
||||
issue_4285::Routes4285, issue_4296::Routes4296, issue_4324::Routes4324,
|
||||
pr_4015::Routes4015, pr_4091::Routes4091,
|
||||
};
|
||||
use leptos::prelude::*;
|
||||
use leptos_meta::{MetaTags, *};
|
||||
@@ -47,6 +47,7 @@ pub fn App() -> impl IntoView {
|
||||
<Routes4005/>
|
||||
<Routes4285/>
|
||||
<Routes4296/>
|
||||
<Routes4324/>
|
||||
</Routes>
|
||||
</main>
|
||||
</Router>
|
||||
@@ -73,6 +74,7 @@ fn HomePage() -> impl IntoView {
|
||||
<li><a href="/4005/">"4005"</a></li>
|
||||
<li><a href="/4285/">"4285"</a></li>
|
||||
<li><a href="/4296/">"4296"</a></li>
|
||||
<li><a href="/4324/">"4324"</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
}
|
||||
|
||||
21
examples/regression/src/issue_4324.rs
Normal file
21
examples/regression/src/issue_4324.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, Lazy, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4324() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4324") view=Issue4324/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn Issue4324() -> impl IntoView {
|
||||
view! {
|
||||
<a href="/4324/">"This page"</a>
|
||||
<p id="result">"Issue4324"</p>
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ mod issue_4088;
|
||||
mod issue_4217;
|
||||
mod issue_4285;
|
||||
mod issue_4296;
|
||||
mod issue_4324;
|
||||
mod pr_4015;
|
||||
mod pr_4091;
|
||||
|
||||
|
||||
7
examples/subsecond_hot_patch/.gitignore
vendored
Normal file
7
examples/subsecond_hot_patch/.gitignore
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
/target
|
||||
.DS_Store
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
13
examples/subsecond_hot_patch/Cargo.toml
Normal file
13
examples/subsecond_hot_patch/Cargo.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "subsecond_hot_patch"
|
||||
version = "0.1.0"
|
||||
authors = ["Greg Johnston <greg.johnston@gmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
leptos = { path = "../../leptos", features = ["csr", "subsecond"] }
|
||||
leptos_router = { path = "../../router" }
|
||||
|
||||
[features]
|
||||
default = ["web"]
|
||||
web = []
|
||||
21
examples/subsecond_hot_patch/Dioxus.toml
Normal file
21
examples/subsecond_hot_patch/Dioxus.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[application]
|
||||
|
||||
[web.app]
|
||||
|
||||
# HTML title tag content
|
||||
title = "ltest"
|
||||
|
||||
# include `assets` in web platform
|
||||
[web.resource]
|
||||
|
||||
# Additional CSS style files
|
||||
style = []
|
||||
|
||||
# Additional JavaScript files
|
||||
script = []
|
||||
|
||||
[web.resource.dev]
|
||||
|
||||
# Javascript code file
|
||||
# serve: [dev-server] only
|
||||
script = []
|
||||
1
examples/subsecond_hot_patch/Makefile.toml
Normal file
1
examples/subsecond_hot_patch/Makefile.toml
Normal file
@@ -0,0 +1 @@
|
||||
extend = [{ path = "../cargo-make/main.toml" }]
|
||||
31
examples/subsecond_hot_patch/README.md
Normal file
31
examples/subsecond_hot_patch/README.md
Normal file
@@ -0,0 +1,31 @@
|
||||
# Hot Patching with `dx`
|
||||
|
||||
This is an experimental example exploring how to combine Leptos with the binary hot-patching provided by Dioxus's `subsecond` library and `dx` cli.
|
||||
|
||||
### Serving Your App
|
||||
|
||||
This requires installing the Dioxus CLI version 0.7.0. At the time I'm writing this README, that does not yet have a stable release. Once `dioxus-cli` 0.7.0 has been released, you should use the latest stable release. Until then, I'd suggest installing from git:
|
||||
|
||||
```sh
|
||||
cargo install dioxus-cli --git https://github.com/DioxusLabs/dioxus
|
||||
```
|
||||
|
||||
Then you can run the example with `dx serve --hot-patch --platform web`.
|
||||
|
||||
### Hot Patching
|
||||
|
||||
Changes to the your application should be reflected in your app without a full rebuild and reload.
|
||||
|
||||
### Limitatations
|
||||
|
||||
Currently we only support hot-patching for reactive view functions. You probably want to use `AnyView` (via `.into_any()`) on any views that will be hot-patched, so they can be rebuilt correctly despite their types changing when the structure of the view tree changes.
|
||||
|
||||
If you are using `leptos_router` this actually works quite well, as every route’s view is erased to `AnyView` and the router itself is a reactive view function: in other words, changes inside any route should be hot-patched in any case.
|
||||
|
||||
Note that any hot-patch will cause all render effects to run again. This means that some client-side state (like the values of signals) will be wiped out.
|
||||
|
||||
### Build Tooling
|
||||
|
||||
The preference of the Dioxus team is that all hot-patching work that uses their `subsecond` also use `dioxus-cli`. As this demo shows, it's completely possible to use `dioxus-cli` to build and run a Leptos project. We do not plan to build `subsecond` into our own build tooling at this time.
|
||||
|
||||
**This is an experiment/POC. It is being published because members of the community have found it useful and have asked for the support to be merged in its current state. Further development and bugfixes are a relatively low priority at this time.**
|
||||
BIN
examples/subsecond_hot_patch/assets/favicon.ico
Normal file
BIN
examples/subsecond_hot_patch/assets/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 130 KiB |
20
examples/subsecond_hot_patch/assets/header.svg
Normal file
20
examples/subsecond_hot_patch/assets/header.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 23 KiB |
46
examples/subsecond_hot_patch/assets/main.css
Normal file
46
examples/subsecond_hot_patch/assets/main.css
Normal file
@@ -0,0 +1,46 @@
|
||||
/* App-wide styling */
|
||||
body {
|
||||
background-color: #0f1116;
|
||||
color: #ffffff;
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
margin: 20px;
|
||||
}
|
||||
|
||||
#hero {
|
||||
margin: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
#links {
|
||||
width: 400px;
|
||||
text-align: left;
|
||||
font-size: x-large;
|
||||
color: white;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
#links a {
|
||||
color: white;
|
||||
text-decoration: none;
|
||||
margin-top: 20px;
|
||||
margin: 10px 0px;
|
||||
border: white 1px solid;
|
||||
border-radius: 5px;
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
#links a:hover {
|
||||
background-color: #1f1f1f;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#header {
|
||||
max-width: 1200px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
44
examples/subsecond_hot_patch/src/main.rs
Normal file
44
examples/subsecond_hot_patch/src/main.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use leptos::{prelude::*, subsecond::connect_to_hot_patch_messages};
|
||||
use leptos_router::{
|
||||
components::{Route, Router, Routes},
|
||||
path,
|
||||
};
|
||||
|
||||
fn main() {
|
||||
// connect to DX CLI and patch the WASM binary whenever we receive a message
|
||||
connect_to_hot_patch_messages();
|
||||
|
||||
// wrapping App here in a closure so we can hot-reload it, because we only do that
|
||||
// for reactive views right now. changing anything will re-run App and update the view
|
||||
mount_to_body(|| App);
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn App() -> impl IntoView {
|
||||
view! {
|
||||
<nav>
|
||||
<a href="/">"Home"</a>
|
||||
<a href="/about">"About"</a>
|
||||
</nav>
|
||||
<Router>
|
||||
<Routes fallback=|| "Not found">
|
||||
<Route path=path!("/") view=HomePage/>
|
||||
<Route path=path!("/about") view=About/>
|
||||
</Routes>
|
||||
</Router>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn HomePage() -> impl IntoView {
|
||||
view! {
|
||||
<h1>"Home Page"</h1>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn About() -> impl IntoView {
|
||||
view! {
|
||||
<h1>"About"</h1>
|
||||
}
|
||||
}
|
||||
3
examples/tailwind_csr/Trunk.toml
Normal file
3
examples/tailwind_csr/Trunk.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
[tools]
|
||||
tailwindcss = "4.1.13"
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Utilities to help build server integrations for the Leptos web framework."
|
||||
version = "0.8.5"
|
||||
version = "0.8.6"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -68,7 +68,8 @@ pub trait ExtendResponse: Sized {
|
||||
let nonce =
|
||||
use_nonce().map(|n| n.to_string()).unwrap_or_default();
|
||||
if let Some(manifest) = use_context::<WasmSplitManifest>() {
|
||||
let (pkg_path, manifest) = &*manifest.0.read_value();
|
||||
let (pkg_path, manifest, wasm_split_file) =
|
||||
&*manifest.0.read_value();
|
||||
let prefetches = prefetches.0.read_value();
|
||||
|
||||
let all_prefetches = prefetches.iter().flat_map(|key| {
|
||||
@@ -90,7 +91,7 @@ pub trait ExtendResponse: Sized {
|
||||
.to_html();
|
||||
}
|
||||
_ = view! {
|
||||
<Link rel="modulepreload" href=format!("{pkg_path}/__wasm_split.js") crossorigin=nonce/>
|
||||
<Link rel="modulepreload" href=format!("{pkg_path}/{wasm_split_file}") crossorigin=nonce/>
|
||||
}
|
||||
.to_html();
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos"
|
||||
version = "0.8.9"
|
||||
version = "0.8.10"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -58,6 +58,9 @@ slotmap = { workspace = true, default-features = true }
|
||||
futures = { workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
wasm_split_helpers.workspace = true
|
||||
subsecond = { workspace = true, default-features = true, optional = true }
|
||||
dioxus-cli-config = { workspace = true, default-features = true, optional = true }
|
||||
dioxus-devtools = { workspace = true, default-features = true, optional = true }
|
||||
|
||||
[features]
|
||||
hydration = [
|
||||
@@ -102,6 +105,16 @@ trace-component-props = [
|
||||
]
|
||||
delegation = ["tachys/delegation"]
|
||||
islands-router = ["tachys/mark_branches"]
|
||||
subsecond = [
|
||||
"reactive_graph/subsecond",
|
||||
"dep:subsecond",
|
||||
"dep:dioxus-cli-config",
|
||||
"dep:dioxus-devtools",
|
||||
"web-sys/Location",
|
||||
"web-sys/MessageEvent",
|
||||
"web-sys/WebSocket",
|
||||
"web-sys/Window",
|
||||
]
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { features = [
|
||||
|
||||
@@ -65,16 +65,56 @@ pub fn HydrationScripts(
|
||||
if let Some(splits) = SPLIT_MANIFEST.get_or_init(|| {
|
||||
let root = root.clone().unwrap_or_default();
|
||||
|
||||
let (wasm_split_js, wasm_split_manifest) = if options.hash_files {
|
||||
let hash_path = std::env::current_exe()
|
||||
.map(|path| {
|
||||
path.parent().map(|p| p.to_path_buf()).unwrap_or_default()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
.join(options.hash_file.as_ref());
|
||||
let hashes = std::fs::read_to_string(&hash_path)
|
||||
.expect("failed to read hash file");
|
||||
|
||||
let mut split =
|
||||
"__wasm_split.______________________.js".to_string();
|
||||
let mut manifest = "__wasm_split_manifest.json".to_string();
|
||||
for line in hashes.lines() {
|
||||
let line = line.trim();
|
||||
if !line.is_empty() {
|
||||
if let Some((file, hash)) = line.split_once(':') {
|
||||
if file == "manifest" {
|
||||
manifest.clear();
|
||||
manifest.push_str("__wasm_split_manifest.");
|
||||
manifest.push_str(hash.trim());
|
||||
manifest.push_str(".json");
|
||||
}
|
||||
if file == "split" {
|
||||
split.clear();
|
||||
split.push_str("__wasm_split.");
|
||||
split.push_str(hash.trim());
|
||||
split.push_str(".js");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
(split, manifest)
|
||||
} else {
|
||||
(
|
||||
"__wasm_split.______________________.js".to_string(),
|
||||
"__wasm_split_manifest.json".to_string(),
|
||||
)
|
||||
};
|
||||
|
||||
let site_dir = &options.site_root;
|
||||
let pkg_dir = &options.site_pkg_dir;
|
||||
let path = PathBuf::from(site_dir.to_string());
|
||||
let path = path
|
||||
.join(pkg_dir.to_string())
|
||||
.join("__wasm_split_manifest.json");
|
||||
let path = path.join(pkg_dir.to_string()).join(wasm_split_manifest);
|
||||
let file = std::fs::read_to_string(path).ok()?;
|
||||
|
||||
let manifest = WasmSplitManifest(ArcStoredValue::new((
|
||||
format!("{root}/{pkg_dir}"),
|
||||
serde_json::from_str(&file).expect("could not read manifest file"),
|
||||
wasm_split_js,
|
||||
)));
|
||||
|
||||
Some(manifest)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
#![deny(missing_docs)]
|
||||
#![forbid(unsafe_code)]
|
||||
|
||||
//! # About Leptos
|
||||
//!
|
||||
@@ -306,9 +305,15 @@ pub use tachys::mathml as math;
|
||||
#[doc(inline)]
|
||||
pub use tachys::svg;
|
||||
|
||||
#[cfg(feature = "subsecond")]
|
||||
/// Utilities for using binary hot-patching with [`subsecond`].
|
||||
pub mod subsecond;
|
||||
|
||||
/// Utilities for simple isomorphic logging to the console or terminal.
|
||||
pub mod logging {
|
||||
pub use leptos_dom::{debug_warn, error, log, warn};
|
||||
pub use leptos_dom::{
|
||||
debug_error, debug_log, debug_warn, error, log, warn,
|
||||
};
|
||||
}
|
||||
|
||||
/// Utilities for working with asynchronous tasks.
|
||||
@@ -392,7 +397,8 @@ pub fn prefetch_lazy_fn_on_server(id: &'static str) {
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct WasmSplitManifest(
|
||||
pub reactive_graph::owner::ArcStoredValue<(
|
||||
String,
|
||||
std::collections::HashMap<String, Vec<String>>,
|
||||
String, // the pkg root
|
||||
std::collections::HashMap<String, Vec<String>>, // preloads
|
||||
String, // the name of the __wasm_split.js file
|
||||
)>,
|
||||
);
|
||||
|
||||
62
leptos/src/subsecond.rs
Normal file
62
leptos/src/subsecond.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use dioxus_devtools::DevserverMsg;
|
||||
use wasm_bindgen::{prelude::Closure, JsCast};
|
||||
use web_sys::{js_sys::JsString, MessageEvent, WebSocket};
|
||||
|
||||
/// Sets up a websocket connect to the `dx` CLI, waiting for incoming hot-patching messages
|
||||
/// and patching the WASM binary appropriately.
|
||||
//
|
||||
// Note: This is a stripped-down version of Dioxus's `make_ws` from `dioxus_web`
|
||||
// It's essentially copy-pasted here because it's not pub there.
|
||||
// Would love to just take a dependency on that to be able to use it and deduplicate.
|
||||
//
|
||||
// https://github.com/DioxusLabs/dioxus/blob/main/packages/web/src/devtools.rs#L36
|
||||
pub fn connect_to_hot_patch_messages() {
|
||||
// Get the location of the devserver, using the current location plus the /_dioxus path
|
||||
// The idea here being that the devserver is always located on the /_dioxus behind a proxy
|
||||
let location = web_sys::window().unwrap().location();
|
||||
let url = format!(
|
||||
"{protocol}//{host}/_dioxus?build_id={build_id}",
|
||||
protocol = match location.protocol().unwrap() {
|
||||
prot if prot == "https:" => "wss:",
|
||||
_ => "ws:",
|
||||
},
|
||||
host = location.host().unwrap(),
|
||||
build_id = dioxus_cli_config::build_id(),
|
||||
);
|
||||
|
||||
let ws = WebSocket::new(&url).unwrap();
|
||||
|
||||
ws.set_onmessage(Some(
|
||||
Closure::<dyn FnMut(MessageEvent)>::new(move |e: MessageEvent| {
|
||||
let Ok(text) = e.data().dyn_into::<JsString>() else {
|
||||
return;
|
||||
};
|
||||
|
||||
// The devserver messages have some &'static strs in them, so we need to leak the source string
|
||||
let string: String = text.into();
|
||||
let string = Box::leak(string.into_boxed_str());
|
||||
|
||||
if let Ok(DevserverMsg::HotReload(msg)) =
|
||||
serde_json::from_str::<DevserverMsg>(string)
|
||||
{
|
||||
if let Some(jump_table) = msg.jump_table.as_ref().cloned() {
|
||||
if msg.for_build_id == Some(dioxus_cli_config::build_id()) {
|
||||
let our_pid = if cfg!(target_family = "wasm") {
|
||||
None
|
||||
} else {
|
||||
Some(std::process::id())
|
||||
};
|
||||
|
||||
if msg.for_pid == our_pid {
|
||||
unsafe { subsecond::apply_patch(jump_table) }
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_js_value()
|
||||
.as_ref()
|
||||
.unchecked_ref(),
|
||||
));
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_dom"
|
||||
version = "0.8.6"
|
||||
version = "0.8.7"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
|
||||
@@ -463,7 +463,7 @@ pub fn set_interval_with_handle(
|
||||
|
||||
#[inline(never)]
|
||||
fn si(
|
||||
cb: Box<dyn Fn()>,
|
||||
cb: Box<dyn FnMut()>,
|
||||
duration: Duration,
|
||||
) -> Result<IntervalHandle, JsValue> {
|
||||
let cb = Closure::wrap(cb).into_js_value();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_macro"
|
||||
version = "0.8.8"
|
||||
version = "0.8.9"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
|
||||
@@ -25,9 +25,8 @@ use std::{
|
||||
use syn::{
|
||||
punctuated::Pair::{End, Punctuated},
|
||||
spanned::Spanned,
|
||||
Expr,
|
||||
Expr::Tuple,
|
||||
ExprArray, ExprLit, ExprRange, Lit, LitStr, RangeLimits, Stmt,
|
||||
Expr::{self, Tuple},
|
||||
ExprArray, ExprLit, ExprPath, ExprRange, Lit, LitStr, RangeLimits, Stmt,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
@@ -1679,7 +1678,7 @@ fn attribute_value(
|
||||
}
|
||||
|
||||
// Keep list alphabetized for binary search
|
||||
const TYPED_EVENTS: [&str; 126] = [
|
||||
const TYPED_EVENTS: [&str; 127] = [
|
||||
"DOMContentLoaded",
|
||||
"abort",
|
||||
"afterprint",
|
||||
@@ -1775,6 +1774,7 @@ const TYPED_EVENTS: [&str; 126] = [
|
||||
"reset",
|
||||
"resize",
|
||||
"scroll",
|
||||
"scrollend",
|
||||
"securitypolicyviolation",
|
||||
"seeked",
|
||||
"seeking",
|
||||
@@ -1871,6 +1871,28 @@ pub(crate) fn ident_from_tag_name(tag_name: &NodeName) -> Ident {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn full_path_from_tag_name(tag_name: &NodeName) -> Option<ExprPath> {
|
||||
match tag_name {
|
||||
NodeName::Path(path) => Some(path.clone()),
|
||||
NodeName::Block(_) => {
|
||||
let span = tag_name.span();
|
||||
proc_macro_error2::emit_error!(
|
||||
span,
|
||||
"blocks not allowed in tag-name position"
|
||||
);
|
||||
None
|
||||
}
|
||||
_ => {
|
||||
let span = tag_name.span();
|
||||
proc_macro_error2::emit_error!(
|
||||
span,
|
||||
"punctuated names not allowed in slots"
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn directive_call_from_attribute_node(
|
||||
attr: &KeyedAttribute,
|
||||
directive_name: &str,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use super::{
|
||||
component_builder::maybe_optimised_component_children,
|
||||
convert_to_snake_case, ident_from_tag_name,
|
||||
convert_to_snake_case, full_path_from_tag_name,
|
||||
};
|
||||
use crate::view::{fragment_to_tokens, utils::filter_prefixed_attrs, TagType};
|
||||
use proc_macro2::{Ident, TokenStream, TokenTree};
|
||||
@@ -24,7 +24,7 @@ pub(crate) fn slot_to_tokens(
|
||||
node.name().to_string()
|
||||
});
|
||||
|
||||
let component_name = ident_from_tag_name(node.name());
|
||||
let component_path = full_path_from_tag_name(node.name());
|
||||
|
||||
let Some(parent_slots) = parent_slots else {
|
||||
proc_macro_error2::emit_error!(
|
||||
@@ -190,7 +190,7 @@ pub(crate) fn slot_to_tokens(
|
||||
|
||||
let slot = quote_spanned! {node.span()=>
|
||||
{
|
||||
let slot = #component_name::builder()
|
||||
let slot = #component_path::builder()
|
||||
#(#props)*
|
||||
#(#slots)*
|
||||
#children
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_graph"
|
||||
version = "0.2.7"
|
||||
version = "0.2.8"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -27,6 +27,7 @@ async-lock = { workspace = true, default-features = true }
|
||||
send_wrapper = { features = [
|
||||
"futures",
|
||||
], workspace = true, default-features = true }
|
||||
subsecond = { workspace = true, default-features = true, optional = true }
|
||||
indexmap = { workspace = true, default-features = true }
|
||||
|
||||
[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dependencies]
|
||||
@@ -51,6 +52,7 @@ hydration = ["dep:hydration_context"]
|
||||
effects = [
|
||||
] # whether to run effects: should be disabled for something like server rendering
|
||||
sandboxed-arenas = []
|
||||
subsecond = ["dep:subsecond"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
|
||||
@@ -65,6 +65,7 @@ impl Dispose for ImmediateEffect {
|
||||
|
||||
impl ImmediateEffect {
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this requires a `Fn` function because it might recurse.
|
||||
/// Use [Self::new_mut] to pass a `FnMut` function, it'll panic on recursion.
|
||||
@@ -82,6 +83,7 @@ impl ImmediateEffect {
|
||||
Self { inner: Some(inner) }
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics on recursion or if triggered in parallel. Also see [Self::new]
|
||||
@@ -93,8 +95,10 @@ impl ImmediateEffect {
|
||||
Self::new(move || fun.try_lock().expect(MSG)())
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this requires a `Fn` function because it might recurse.
|
||||
/// Use [Self::new_mut_scoped] to pass a `FnMut` function, it'll panic on recursion.
|
||||
/// NOTE: this effect is automatically cleaned up when the current owner is cleared or disposed.
|
||||
#[track_caller]
|
||||
pub fn new_scoped(fun: impl Fn() + Send + Sync + 'static) {
|
||||
@@ -102,6 +106,19 @@ impl ImmediateEffect {
|
||||
|
||||
on_cleanup(move || effect.dispose());
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this effect is automatically cleaned up when the current owner is cleared or disposed.
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics on recursion or if triggered in parallel. Also see [Self::new_scoped]
|
||||
#[track_caller]
|
||||
pub fn new_mut_scoped(fun: impl FnMut() + Send + Sync + 'static) {
|
||||
let effect = Self::new_mut(fun);
|
||||
|
||||
on_cleanup(move || effect.dispose());
|
||||
}
|
||||
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
///
|
||||
@@ -130,6 +147,41 @@ impl DefinedAt for ImmediateEffect {
|
||||
}
|
||||
}
|
||||
|
||||
/// Defers any [ImmediateEffect]s from running until the end of the function.
|
||||
///
|
||||
/// NOTE: this affects only [ImmediateEffect]s, not other effects.
|
||||
///
|
||||
/// NOTE: this is rarely needed, but it is useful for example when multiple signals
|
||||
/// need to be updated atomically (for example a double-bound signal tree).
|
||||
pub fn batch<T>(f: impl FnOnce() -> T) -> T {
|
||||
struct ExecuteOnDrop;
|
||||
impl Drop for ExecuteOnDrop {
|
||||
fn drop(&mut self) {
|
||||
let effects = {
|
||||
let mut batch = inner::BATCH.write().or_poisoned();
|
||||
batch.take().unwrap().into_inner().expect("lock poisoned")
|
||||
};
|
||||
// TODO: Should we skip the effects if it's panicking?
|
||||
for effect in effects {
|
||||
effect.update_if_necessary();
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut execute_on_drop = None;
|
||||
{
|
||||
let mut batch = inner::BATCH.write().or_poisoned();
|
||||
if batch.is_none() {
|
||||
execute_on_drop = Some(ExecuteOnDrop);
|
||||
} else {
|
||||
// Nested batching has no effect.
|
||||
}
|
||||
*batch = Some(batch.take().unwrap_or_default());
|
||||
}
|
||||
let ret = f();
|
||||
drop(execute_on_drop);
|
||||
ret
|
||||
}
|
||||
|
||||
mod inner {
|
||||
use crate::{
|
||||
graph::{
|
||||
@@ -140,6 +192,7 @@ mod inner {
|
||||
owner::Owner,
|
||||
traits::DefinedAt,
|
||||
};
|
||||
use indexmap::IndexSet;
|
||||
use or_poisoned::OrPoisoned;
|
||||
use std::{
|
||||
panic::Location,
|
||||
@@ -147,6 +200,11 @@ mod inner {
|
||||
thread::{self, ThreadId},
|
||||
};
|
||||
|
||||
/// Only the [super::batch] function ever writes to the outer RwLock.
|
||||
/// While the effects will write to the inner one.
|
||||
pub(super) static BATCH: RwLock<Option<RwLock<IndexSet<AnySubscriber>>>> =
|
||||
RwLock::new(None);
|
||||
|
||||
/// Handles subscription logic for effects.
|
||||
///
|
||||
/// To handle parallelism and recursion we assign ordered (1..) ids to each run.
|
||||
@@ -202,6 +260,8 @@ mod inner {
|
||||
fun: impl Fn() + Send + Sync + 'static,
|
||||
) -> Arc<RwLock<EffectInner>> {
|
||||
let owner = Owner::new();
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
let defined_at = Location::caller();
|
||||
|
||||
Arc::new_cyclic(|weak| {
|
||||
let any_subscriber = AnySubscriber(
|
||||
@@ -211,7 +271,7 @@ mod inner {
|
||||
|
||||
RwLock::new(EffectInner {
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
defined_at,
|
||||
owner,
|
||||
state: ReactiveNodeState::Dirty,
|
||||
run_count_start: 0,
|
||||
@@ -260,6 +320,17 @@ mod inner {
|
||||
ReactiveNodeState::Dirty => true,
|
||||
};
|
||||
|
||||
{
|
||||
if let Some(batch) = &*BATCH.read().or_poisoned() {
|
||||
let mut batch = batch.write().or_poisoned();
|
||||
let subscriber =
|
||||
self.read().or_poisoned().any_subscriber.clone();
|
||||
|
||||
batch.insert(subscriber);
|
||||
return needs_update;
|
||||
}
|
||||
}
|
||||
|
||||
if needs_update {
|
||||
let mut guard = self.write().or_poisoned();
|
||||
|
||||
|
||||
@@ -9,6 +9,8 @@ use crate::{
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use or_poisoned::OrPoisoned;
|
||||
#[cfg(feature = "subsecond")]
|
||||
use std::sync::Mutex;
|
||||
use std::{
|
||||
fmt::Debug,
|
||||
future::{Future, IntoFuture},
|
||||
@@ -49,13 +51,39 @@ impl<T> Debug for RenderEffect<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "subsecond")]
|
||||
type CurrentHotPtr = Box<dyn Fn() -> Option<subsecond::HotFnPtr> + Send + Sync>;
|
||||
|
||||
impl<T> RenderEffect<T>
|
||||
where
|
||||
T: 'static,
|
||||
{
|
||||
/// Creates a new render effect, which immediately runs `fun`.
|
||||
pub fn new(fun: impl FnMut(Option<T>) -> T + 'static) -> Self {
|
||||
Self::new_with_value_erased(Box::new(fun), None)
|
||||
#[cfg(feature = "subsecond")]
|
||||
let (hot_fn_ptr, fun) = {
|
||||
let fun = Arc::new(Mutex::new(subsecond::HotFn::current(fun)));
|
||||
(
|
||||
{
|
||||
let fun = Arc::downgrade(&fun);
|
||||
let wrapped = send_wrapper::SendWrapper::new(move || {
|
||||
fun.upgrade()
|
||||
.map(|n| n.lock().or_poisoned().ptr_address())
|
||||
});
|
||||
// it's not redundant, it's due to the SendWrapper deref
|
||||
#[allow(clippy::redundant_closure)]
|
||||
Box::new(move || wrapped())
|
||||
},
|
||||
move |prev| fun.lock().or_poisoned().call((prev,)),
|
||||
)
|
||||
};
|
||||
|
||||
Self::new_with_value_erased(
|
||||
Box::new(fun),
|
||||
None,
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr,
|
||||
)
|
||||
}
|
||||
|
||||
/// Creates a new render effect with an initial value.
|
||||
@@ -63,7 +91,30 @@ where
|
||||
fun: impl FnMut(Option<T>) -> T + 'static,
|
||||
initial_value: Option<T>,
|
||||
) -> Self {
|
||||
Self::new_with_value_erased(Box::new(fun), initial_value)
|
||||
#[cfg(feature = "subsecond")]
|
||||
let (hot_fn_ptr, fun) = {
|
||||
let fun = Arc::new(Mutex::new(subsecond::HotFn::current(fun)));
|
||||
(
|
||||
{
|
||||
let fun = Arc::downgrade(&fun);
|
||||
let wrapped = send_wrapper::SendWrapper::new(move || {
|
||||
fun.upgrade()
|
||||
.map(|n| n.lock().or_poisoned().ptr_address())
|
||||
});
|
||||
// it's not redundant, it's due to the SendWrapper deref
|
||||
#[allow(clippy::redundant_closure)]
|
||||
Box::new(move || wrapped())
|
||||
},
|
||||
move |prev| fun.lock().or_poisoned().call((prev,)),
|
||||
)
|
||||
};
|
||||
|
||||
Self::new_with_value_erased(
|
||||
Box::new(fun),
|
||||
initial_value,
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr,
|
||||
)
|
||||
}
|
||||
|
||||
/// Creates a new render effect, which immediately runs `fun`.
|
||||
@@ -71,6 +122,11 @@ where
|
||||
fun: impl FnMut(Option<T>) -> T + 'static,
|
||||
value: impl IntoFuture<Output = T> + 'static,
|
||||
) -> Self {
|
||||
#[cfg(feature = "subsecond")]
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
#[cfg(feature = "subsecond")]
|
||||
let fun = move |prev| fun.call((prev,));
|
||||
|
||||
Self::new_with_async_value_erased(
|
||||
Box::new(fun),
|
||||
Box::pin(value.into_future()),
|
||||
@@ -79,8 +135,13 @@ where
|
||||
}
|
||||
|
||||
fn new_with_value_erased(
|
||||
mut fun: Box<dyn FnMut(Option<T>) -> T + 'static>,
|
||||
#[allow(unused_mut)] mut fun: Box<dyn FnMut(Option<T>) -> T + 'static>,
|
||||
initial_value: Option<T>,
|
||||
// this argument can be used to invalidate individual effects in the future
|
||||
// in present experiments, I have found that it is not actually granular enough to make a difference
|
||||
#[allow(unused)]
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr: CurrentHotPtr,
|
||||
) -> Self {
|
||||
// codegen optimisation:
|
||||
fn prep() -> (Owner, Arc<RwLock<EffectInner>>, crate::channel::Receiver)
|
||||
@@ -104,12 +165,56 @@ where
|
||||
let _ = initial_value;
|
||||
let _ = owner;
|
||||
let _ = &mut rx;
|
||||
let _ = &mut fun;
|
||||
let _ = fun;
|
||||
}
|
||||
|
||||
#[cfg(feature = "effects")]
|
||||
{
|
||||
let subscriber = inner.to_any_subscriber();
|
||||
|
||||
#[cfg(all(feature = "subsecond", debug_assertions))]
|
||||
let mut fun = {
|
||||
use crate::graph::ReactiveNode;
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::sync::{Arc, LazyLock, Mutex};
|
||||
use subsecond::HotFnPtr;
|
||||
|
||||
static HOT_RELOAD_SUBSCRIBERS: LazyLock<
|
||||
Mutex<FxHashMap<AnySubscriber, (HotFnPtr, CurrentHotPtr)>>,
|
||||
> = LazyLock::new(|| {
|
||||
subsecond::register_handler(Arc::new(|| {
|
||||
HOT_RELOAD_SUBSCRIBERS.lock().or_poisoned().retain(
|
||||
|subscriber, (prev_ptr, hot_fn_ptr)| {
|
||||
match hot_fn_ptr() {
|
||||
None => false,
|
||||
Some(curr_hot_ptr) => {
|
||||
if curr_hot_ptr != *prev_ptr {
|
||||
crate::log_warning(format_args!(
|
||||
"{prev_ptr:?} <> \
|
||||
{curr_hot_ptr:?}",
|
||||
));
|
||||
*prev_ptr = curr_hot_ptr;
|
||||
|
||||
subscriber.mark_dirty();
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
}));
|
||||
Default::default()
|
||||
});
|
||||
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
let initial_ptr = hot_fn_ptr().unwrap();
|
||||
HOT_RELOAD_SUBSCRIBERS
|
||||
.lock()
|
||||
.or_poisoned()
|
||||
.insert(subscriber.clone(), (initial_ptr, hot_fn_ptr));
|
||||
move |prev| fun.call((prev,))
|
||||
};
|
||||
|
||||
*value.write().or_poisoned() = Some(
|
||||
owner.with(|| subscriber.with_observer(|| fun(initial_value))),
|
||||
);
|
||||
@@ -230,6 +335,11 @@ where
|
||||
pub fn new_isomorphic(
|
||||
fun: impl FnMut(Option<T>) -> T + Send + Sync + 'static,
|
||||
) -> Self {
|
||||
#[cfg(feature = "subsecond")]
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
#[cfg(feature = "subsecond")]
|
||||
let fun = move |prev| fun.call((prev,));
|
||||
|
||||
fn erased<T: Send + Sync + 'static>(
|
||||
mut fun: Box<dyn FnMut(Option<T>) -> T + Send + Sync + 'static>,
|
||||
) -> RenderEffect<T> {
|
||||
|
||||
@@ -209,6 +209,25 @@ impl Owner {
|
||||
this
|
||||
}
|
||||
|
||||
/// Returns the parent of this `Owner`, if any.
|
||||
///
|
||||
/// None when:
|
||||
/// - This is a root owner
|
||||
/// - The parent has been dropped
|
||||
pub fn parent(&self) -> Option<Owner> {
|
||||
self.inner
|
||||
.read()
|
||||
.or_poisoned()
|
||||
.parent
|
||||
.as_ref()
|
||||
.and_then(|p| p.upgrade())
|
||||
.map(|inner| Owner {
|
||||
inner,
|
||||
#[cfg(feature = "hydration")]
|
||||
shared_context: self.shared_context.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new `Owner` that is the child of the current `Owner`, if any.
|
||||
pub fn child(&self) -> Self {
|
||||
let parent = Some(Arc::downgrade(&self.inner));
|
||||
|
||||
@@ -1542,7 +1542,6 @@ pub mod read {
|
||||
|
||||
impl<T, S> ReadUntracked for MaybeProp<T, S>
|
||||
where
|
||||
T: Clone,
|
||||
S: Storage<Option<T>> + Storage<SignalTypes<Option<T>, S>>,
|
||||
{
|
||||
type Value = ReadGuard<Option<T>, SignalReadGuard<Option<T>, S>>;
|
||||
|
||||
@@ -225,3 +225,38 @@ fn threaded_chaos_effect() {
|
||||
let values: Vec<_> = signals.iter().map(|s| s.get_untracked()).collect();
|
||||
println!("FINAL: {values:?}");
|
||||
}
|
||||
|
||||
#[cfg(feature = "effects")]
|
||||
#[test]
|
||||
fn test_batch() {
|
||||
use imports::*;
|
||||
use reactive_graph::{effect::batch, owner::StoredValue};
|
||||
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let a = RwSignal::new(0);
|
||||
let b = RwSignal::new(0);
|
||||
|
||||
let values = StoredValue::new(Vec::new());
|
||||
|
||||
ImmediateEffect::new_scoped(move || {
|
||||
println!("{} = {}", a.get(), b.get());
|
||||
values.write_value().push((a.get(), b.get()));
|
||||
});
|
||||
|
||||
a.set(1);
|
||||
b.set(1);
|
||||
|
||||
batch(move || {
|
||||
a.set(2);
|
||||
b.set(2);
|
||||
|
||||
batch(move || {
|
||||
a.set(3);
|
||||
b.set(3);
|
||||
});
|
||||
});
|
||||
|
||||
assert_eq!(values.get_value(), vec![(0, 0), (1, 0), (1, 1), (3, 3)]);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_stores"
|
||||
version = "0.2.5"
|
||||
version = "0.3.0"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -30,6 +30,8 @@ where
|
||||
defined_at: &'static Location<'static>,
|
||||
path: Arc<dyn Fn() -> StorePath + Send + Sync>,
|
||||
get_trigger: Arc<dyn Fn(StorePath) -> StoreFieldTrigger + Send + Sync>,
|
||||
get_trigger_unkeyed:
|
||||
Arc<dyn Fn(StorePath) -> StoreFieldTrigger + Send + Sync>,
|
||||
read: Arc<dyn Fn() -> Option<StoreFieldReader<T>> + Send + Sync>,
|
||||
pub(crate) write:
|
||||
Arc<dyn Fn() -> Option<StoreFieldWriter<T>> + Send + Sync>,
|
||||
@@ -103,6 +105,10 @@ impl<T> StoreField for ArcField<T> {
|
||||
(self.get_trigger)(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
(self.get_trigger_unkeyed)(path)
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
(self.path)()
|
||||
}
|
||||
@@ -132,6 +138,9 @@ where
|
||||
defined_at: Location::caller(),
|
||||
path: Arc::new(move || value.path().into_iter().collect()),
|
||||
get_trigger: Arc::new(move |path| value.get_trigger(path)),
|
||||
get_trigger_unkeyed: Arc::new(move |path| {
|
||||
value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new(move || value.reader().map(StoreFieldReader::new)),
|
||||
write: Arc::new(move || value.writer().map(StoreFieldWriter::new)),
|
||||
keys: Arc::new(move || value.keys()),
|
||||
@@ -158,6 +167,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -202,6 +215,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -245,6 +262,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -289,6 +310,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -337,6 +362,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -368,6 +397,7 @@ impl<T> Clone for ArcField<T> {
|
||||
defined_at: self.defined_at,
|
||||
path: self.path.clone(),
|
||||
get_trigger: Arc::clone(&self.get_trigger),
|
||||
get_trigger_unkeyed: Arc::clone(&self.get_trigger_unkeyed),
|
||||
read: Arc::clone(&self.read),
|
||||
write: Arc::clone(&self.write),
|
||||
keys: Arc::clone(&self.keys),
|
||||
|
||||
@@ -68,6 +68,11 @@ where
|
||||
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner.path()
|
||||
}
|
||||
|
||||
@@ -59,6 +59,13 @@ where
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|inner| inner.get_trigger_unkeyed(path))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
|
||||
@@ -84,6 +84,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
let index = self.index;
|
||||
@@ -109,6 +113,23 @@ where
|
||||
fn keys(&self) -> Option<KeyMap> {
|
||||
self.inner.keys()
|
||||
}
|
||||
|
||||
fn track_field(&self) {
|
||||
let mut full_path = self.path().into_iter().collect::<StorePath>();
|
||||
let trigger = self.get_trigger(self.path().into_iter().collect());
|
||||
trigger.this.track();
|
||||
trigger.children.track();
|
||||
|
||||
// tracks `this` for all ancestors: i.e., it will track any change that is made
|
||||
// directly to one of its ancestors, but not a change made to a *child* of an ancestor
|
||||
// (which would end up with every subfield tracking its own siblings, because they are
|
||||
// children of its parent)
|
||||
while !full_path.is_empty() {
|
||||
full_path.pop();
|
||||
let inner = self.get_trigger(full_path.clone());
|
||||
inner.this.track();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Inner, Prev> DefinedAt for AtIndex<Inner, Prev>
|
||||
|
||||
@@ -110,6 +110,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
Some(Mapped::new_with_guard(inner, self.read))
|
||||
@@ -432,7 +436,7 @@ where
|
||||
let this = keys
|
||||
.with_field_keys(
|
||||
inner.clone(),
|
||||
|keys| keys.get(&self.key),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -444,6 +448,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
|
||||
@@ -452,7 +460,7 @@ where
|
||||
let index = keys
|
||||
.with_field_keys(
|
||||
inner_path,
|
||||
|keys| keys.get(&self.key),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -476,7 +484,7 @@ where
|
||||
let index = keys
|
||||
.with_field_keys(
|
||||
inner_path.clone(),
|
||||
|keys| keys.get(&self.key),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -624,9 +632,7 @@ where
|
||||
let latest = self.latest_keys();
|
||||
keys.with_field_keys(
|
||||
inner_path,
|
||||
|keys| {
|
||||
keys.update(latest);
|
||||
},
|
||||
|keys| ((), keys.update(latest)),
|
||||
|| self.latest_keys(),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -364,13 +364,18 @@ where
|
||||
})
|
||||
}
|
||||
|
||||
fn update(&mut self, iter: impl IntoIterator<Item = K>) {
|
||||
fn update(
|
||||
&mut self,
|
||||
iter: impl IntoIterator<Item = K>,
|
||||
) -> Vec<(usize, StorePathSegment)> {
|
||||
let new_keys = iter
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(idx, key)| (key, idx))
|
||||
.collect::<FxHashMap<K, usize>>();
|
||||
|
||||
let mut index_keys = Vec::with_capacity(new_keys.len());
|
||||
|
||||
// remove old keys and recycle the slots
|
||||
self.keys.retain(|key, old_entry| match new_keys.get(key) {
|
||||
Some(idx) => {
|
||||
@@ -385,14 +390,17 @@ where
|
||||
|
||||
// add new keys
|
||||
for (key, idx) in new_keys {
|
||||
// the suggestion doesn't compile because we need &mut for self.next_key(),
|
||||
// and we don't want to call that until after the check
|
||||
#[allow(clippy::map_entry)]
|
||||
if !self.keys.contains_key(&key) {
|
||||
let path = self.next_key();
|
||||
self.keys.insert(key, (path, idx));
|
||||
match self.keys.get(&key) {
|
||||
Some((segment, idx)) => index_keys.push((*idx, *segment)),
|
||||
None => {
|
||||
let path = self.next_key();
|
||||
self.keys.insert(key, (path, idx));
|
||||
index_keys.push((idx, path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
index_keys
|
||||
}
|
||||
}
|
||||
|
||||
@@ -415,14 +423,20 @@ type HashMap<K, V> = send_wrapper::SendWrapper<
|
||||
|
||||
/// A map of the keys for a keyed subfield.
|
||||
#[derive(Clone)]
|
||||
pub struct KeyMap(HashMap<StorePath, Box<dyn Any + Send + Sync>>);
|
||||
pub struct KeyMap(
|
||||
HashMap<StorePath, Box<dyn Any + Send + Sync>>,
|
||||
HashMap<(StorePath, usize), StorePathSegment>,
|
||||
);
|
||||
|
||||
impl Default for KeyMap {
|
||||
fn default() -> Self {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
return Self(Default::default());
|
||||
return Self(Default::default(), Default::default());
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
return Self(send_wrapper::SendWrapper::new(Default::default()));
|
||||
return Self(
|
||||
send_wrapper::SendWrapper::new(Default::default()),
|
||||
send_wrapper::SendWrapper::new(Default::default()),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -430,31 +444,70 @@ impl KeyMap {
|
||||
fn with_field_keys<K, T>(
|
||||
&self,
|
||||
path: StorePath,
|
||||
fun: impl FnOnce(&mut FieldKeys<K>) -> T,
|
||||
fun: impl FnOnce(&mut FieldKeys<K>) -> (T, Vec<(usize, StorePathSegment)>),
|
||||
initialize: impl FnOnce() -> Vec<K>,
|
||||
) -> Option<T>
|
||||
where
|
||||
K: Debug + Hash + PartialEq + Eq + Send + Sync + 'static,
|
||||
{
|
||||
let initial_keys = initialize();
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let mut entry = self
|
||||
.0
|
||||
.entry(path)
|
||||
.or_insert_with(|| Box::new(FieldKeys::new(initialize())));
|
||||
.entry(path.clone())
|
||||
.or_insert_with(|| Box::new(FieldKeys::new(initial_keys)));
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let entry = if !self.0.borrow().contains_key(&path) {
|
||||
Some(Box::new(FieldKeys::new(initialize())))
|
||||
Some(Box::new(FieldKeys::new(initial_keys)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let mut map = self.0.borrow_mut();
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let entry = map.entry(path).or_insert_with(|| entry.unwrap());
|
||||
let entry = map.entry(path.clone()).or_insert_with(|| entry.unwrap());
|
||||
|
||||
let entry = entry.downcast_mut::<FieldKeys<K>>()?;
|
||||
Some(fun(entry))
|
||||
let (result, new_keys) = fun(entry);
|
||||
if !new_keys.is_empty() {
|
||||
for (idx, segment) in new_keys {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
self.1.insert((path.clone(), idx), segment);
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
self.1.borrow_mut().insert((path.clone(), idx), segment);
|
||||
}
|
||||
}
|
||||
Some(result)
|
||||
}
|
||||
|
||||
fn contains_key(&self, key: &StorePath) -> bool {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.0.contains_key(key)
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
self.0.borrow_mut().contains_key(key)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_key_for_index(
|
||||
&self,
|
||||
key: &(StorePath, usize),
|
||||
) -> Option<StorePathSegment> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.1.get(key).as_deref().copied()
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
self.1.borrow().get(key).as_deref().copied()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -832,6 +885,30 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Foo {
|
||||
id: i32,
|
||||
bar: Bar,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Bar {
|
||||
bar_signature: i32,
|
||||
baz: Baz,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baz {
|
||||
more_data: i32,
|
||||
baw: Baw,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baw {
|
||||
more_data: i32,
|
||||
end: i32,
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mutating_field_triggers_effect() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
@@ -1112,30 +1189,6 @@ mod tests {
|
||||
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Foo {
|
||||
id: i32,
|
||||
bar: Bar,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Bar {
|
||||
bar_signature: i32,
|
||||
baz: Baz,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baz {
|
||||
more_data: i32,
|
||||
baw: Baw,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baw {
|
||||
more_data: i32,
|
||||
end: i32,
|
||||
}
|
||||
|
||||
let store = Store::new(Foo {
|
||||
id: 42,
|
||||
bar: Bar {
|
||||
@@ -1219,4 +1272,107 @@ mod tests {
|
||||
assert_eq!(more_data_runs.get_value(), 3);
|
||||
assert_eq!(baz_baw_end_runs.get_value(), 3);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn changing_parent_notifies_subfield() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
let combined_count = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let store = Store::new(Foo {
|
||||
id: 42,
|
||||
bar: Bar {
|
||||
bar_signature: 69,
|
||||
baz: Baz {
|
||||
more_data: 9999,
|
||||
baw: Baw {
|
||||
more_data: 22,
|
||||
end: 1112,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let tracked_field = store.bar().baz().more_data();
|
||||
|
||||
Effect::new_sync({
|
||||
let combined_count = Arc::clone(&combined_count);
|
||||
move |prev: Option<()>| {
|
||||
if prev.is_none() {
|
||||
println!("first run");
|
||||
} else {
|
||||
println!("next run");
|
||||
}
|
||||
|
||||
// we only track `more`, but this should still be notified
|
||||
// when its parent fields `bar` or `baz` change
|
||||
println!("{:?}", *tracked_field.read());
|
||||
combined_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
tick().await;
|
||||
tick().await;
|
||||
|
||||
store.bar().baz().set(Baz {
|
||||
more_data: 42,
|
||||
baw: Baw {
|
||||
more_data: 11,
|
||||
end: 31,
|
||||
},
|
||||
});
|
||||
tick().await;
|
||||
store.bar().set(Bar {
|
||||
bar_signature: 23,
|
||||
baz: Baz {
|
||||
more_data: 32,
|
||||
baw: Baw {
|
||||
more_data: 432,
|
||||
end: 423,
|
||||
},
|
||||
},
|
||||
});
|
||||
tick().await;
|
||||
|
||||
assert_eq!(combined_count.load(Ordering::Relaxed), 3);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn changing_parent_notifies_unkeyed_child() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
let combined_count = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let store = Store::new(data());
|
||||
|
||||
let tracked_field = store.todos().at_unkeyed(0);
|
||||
|
||||
Effect::new_sync({
|
||||
let combined_count = Arc::clone(&combined_count);
|
||||
move |prev: Option<()>| {
|
||||
if prev.is_none() {
|
||||
println!("first run");
|
||||
} else {
|
||||
println!("next run");
|
||||
}
|
||||
|
||||
// we only track `more`, but this should still be notified
|
||||
// when its parent fields `bar` or `baz` change
|
||||
println!("{:?}", *tracked_field.read());
|
||||
combined_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
tick().await;
|
||||
tick().await;
|
||||
|
||||
store.todos().write().pop();
|
||||
tick().await;
|
||||
|
||||
store.todos().write().push(Todo {
|
||||
label: "another one".into(),
|
||||
completed: false,
|
||||
});
|
||||
tick().await;
|
||||
|
||||
assert_eq!(combined_count.load(Ordering::Relaxed), 3);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ where
|
||||
// don't track the writer for the whole store
|
||||
writer.untrack();
|
||||
let mut notify = |path: &StorePath| {
|
||||
self.triggers_for_path(path.to_owned()).notify();
|
||||
self.triggers_for_path_unkeyed(path.to_owned()).notify();
|
||||
};
|
||||
writer.patch_field(new, &path, &mut notify);
|
||||
}
|
||||
|
||||
@@ -11,6 +11,15 @@ impl IntoIterator for StorePath {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a StorePath {
|
||||
type Item = &'a StorePathSegment;
|
||||
type IntoIter = std::slice::Iter<'a, StorePathSegment>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<StorePathSegment>> for StorePath {
|
||||
fn from(value: Vec<StorePathSegment>) -> Self {
|
||||
Self(value)
|
||||
@@ -18,6 +27,16 @@ impl From<Vec<StorePathSegment>> for StorePath {
|
||||
}
|
||||
|
||||
impl StorePath {
|
||||
/// Creates a new path.
|
||||
pub fn new() -> Self {
|
||||
Self(Vec::new())
|
||||
}
|
||||
|
||||
/// Creates a new path with storage capacity for `capacity` segments.
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
Self(Vec::with_capacity(capacity))
|
||||
}
|
||||
|
||||
/// Adds a new segment to the path.
|
||||
pub fn push(&mut self, segment: impl Into<StorePathSegment>) {
|
||||
self.0.push(segment.into());
|
||||
|
||||
@@ -26,6 +26,14 @@ pub trait StoreField: Sized {
|
||||
#[track_caller]
|
||||
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger;
|
||||
|
||||
/// Returns the trigger that tracks access and updates for this field.
|
||||
///
|
||||
/// This uses *unkeyed* paths: i.e., if any field in the path is keyed, it will
|
||||
/// try to look up the key for the item at the index given in the path, rather than
|
||||
/// the keyed item.
|
||||
#[track_caller]
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger;
|
||||
|
||||
/// The path of this field (see [`StorePath`]).
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment>;
|
||||
@@ -84,6 +92,26 @@ pub trait StoreField: Sized {
|
||||
|
||||
triggers
|
||||
}
|
||||
|
||||
/// Returns triggers for the field at the given path, and all parent fields
|
||||
fn triggers_for_path_unkeyed(&self, path: StorePath) -> Vec<ArcTrigger> {
|
||||
// see notes on triggers_for_path() for additional comments on implementation
|
||||
|
||||
let trigger = self.get_trigger_unkeyed(path.clone());
|
||||
let mut full_path = path;
|
||||
|
||||
let mut triggers = Vec::with_capacity(full_path.len() + 2);
|
||||
triggers.push(trigger.this.clone());
|
||||
triggers.push(trigger.children.clone());
|
||||
while !full_path.is_empty() {
|
||||
full_path.pop();
|
||||
let inner = self.get_trigger_unkeyed(full_path.clone());
|
||||
triggers.push(inner.children.clone());
|
||||
}
|
||||
triggers.reverse();
|
||||
|
||||
triggers
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> StoreField for ArcStore<T>
|
||||
@@ -101,6 +129,26 @@ where
|
||||
trigger
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
let orig_path = path.clone();
|
||||
|
||||
let mut path = StorePath::with_capacity(orig_path.len());
|
||||
for segment in &orig_path {
|
||||
let parent_is_keyed = self.keys.contains_key(&path);
|
||||
|
||||
if parent_is_keyed {
|
||||
let key = self
|
||||
.keys
|
||||
.get_key_for_index(&(path.clone(), segment.0))
|
||||
.expect("could not find key for index");
|
||||
path.push(key);
|
||||
} else {
|
||||
path.push(*segment);
|
||||
}
|
||||
}
|
||||
self.get_trigger(path)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
iter::empty()
|
||||
@@ -141,6 +189,14 @@ where
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|n| n.get_trigger_unkeyed(path))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner
|
||||
|
||||
@@ -88,6 +88,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
Some(Mapped::new_with_guard(inner, self.read))
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router"
|
||||
version = "0.8.7"
|
||||
version = "0.8.8"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -364,6 +364,12 @@ where
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MatchedRoute(pub String, pub AnyView);
|
||||
|
||||
impl MatchedRoute {
|
||||
fn branch_name(&self) -> String {
|
||||
format!("{:?}", self.1.as_type_id())
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for MatchedRoute {
|
||||
type State = <AnyView as Render>::State;
|
||||
|
||||
@@ -414,8 +420,9 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches: bool,
|
||||
extra_attrs: Vec<AnyAttribute>,
|
||||
) {
|
||||
if mark_branches && escape {
|
||||
buf.open_branch(&self.0);
|
||||
let branch_name = (mark_branches && escape).then(|| self.branch_name());
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.open_branch(bn);
|
||||
}
|
||||
self.1.to_html_with_buf(
|
||||
buf,
|
||||
@@ -424,8 +431,8 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if mark_branches && escape {
|
||||
buf.close_branch(&self.0);
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.close_branch(bn);
|
||||
if *position == Position::NextChildAfterText {
|
||||
*position = Position::NextChild;
|
||||
}
|
||||
@@ -442,8 +449,9 @@ impl RenderHtml for MatchedRoute {
|
||||
) where
|
||||
Self: Sized,
|
||||
{
|
||||
if mark_branches && escape {
|
||||
buf.open_branch(&self.0);
|
||||
let branch_name = (mark_branches && escape).then(|| self.branch_name());
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.open_branch(bn);
|
||||
}
|
||||
self.1.to_html_async_with_buf::<OUT_OF_ORDER>(
|
||||
buf,
|
||||
@@ -452,8 +460,8 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if mark_branches && escape {
|
||||
buf.close_branch(&self.0);
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.close_branch(bn);
|
||||
if *position == Position::NextChildAfterText {
|
||||
*position = Position::NextChild;
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use crate::{hooks::use_navigate, params::ParamsMap};
|
||||
use core::fmt;
|
||||
use futures::channel::oneshot;
|
||||
use js_sys::{try_iter, Array, JsString};
|
||||
use leptos::prelude::*;
|
||||
use leptos::{ev, prelude::*};
|
||||
use or_poisoned::OrPoisoned;
|
||||
use reactive_graph::{
|
||||
signal::ArcRwSignal,
|
||||
@@ -11,13 +11,12 @@ use reactive_graph::{
|
||||
};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
boxed::Box,
|
||||
string::String,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
use tachys::dom::{document, window};
|
||||
use wasm_bindgen::{closure::Closure, JsCast, JsValue};
|
||||
use web_sys::{Event, UrlSearchParams};
|
||||
use wasm_bindgen::{JsCast, JsValue};
|
||||
use web_sys::UrlSearchParams;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct BrowserUrl {
|
||||
@@ -116,7 +115,6 @@ impl LocationProvider for BrowserUrl {
|
||||
}
|
||||
|
||||
fn init(&self, base: Option<Cow<'static, str>>) {
|
||||
let window = window();
|
||||
let navigate = {
|
||||
let url = self.url.clone();
|
||||
let pending = Arc::clone(&self.pending_navigation);
|
||||
@@ -159,37 +157,32 @@ impl LocationProvider for BrowserUrl {
|
||||
|
||||
let handle_anchor_click =
|
||||
handle_anchor_click(base, Self::parse_with_base, navigate);
|
||||
let closure = Closure::wrap(Box::new(move |ev: Event| {
|
||||
|
||||
let click_handle = window_event_listener(ev::click, move |ev| {
|
||||
if let Err(e) = handle_anchor_click(ev) {
|
||||
#[cfg(feature = "tracing")]
|
||||
tracing::error!("{e:?}");
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
web_sys::console::error_1(&e);
|
||||
}
|
||||
}) as Box<dyn FnMut(Event)>)
|
||||
.into_js_value();
|
||||
window
|
||||
.add_event_listener_with_callback(
|
||||
"click",
|
||||
closure.as_ref().unchecked_ref(),
|
||||
)
|
||||
.expect(
|
||||
"couldn't add `click` listener to `window` to handle `<a>` \
|
||||
clicks",
|
||||
);
|
||||
});
|
||||
|
||||
// handle popstate event (forward/back navigation)
|
||||
let cb = {
|
||||
let popstate_cb = {
|
||||
let url = self.url.clone();
|
||||
let path_stack = self.path_stack.clone();
|
||||
let is_back = self.is_back.clone();
|
||||
move || match Self::current() {
|
||||
Ok(new_url) => {
|
||||
let stack = path_stack.read_value();
|
||||
let mut stack = path_stack.write_value();
|
||||
let is_navigating_back = stack.len() == 1
|
||||
|| (stack.len() >= 2
|
||||
&& stack.get(stack.len() - 2) == Some(&new_url));
|
||||
|
||||
if is_navigating_back {
|
||||
stack.pop();
|
||||
}
|
||||
|
||||
is_back.set(is_navigating_back);
|
||||
|
||||
url.set(new_url);
|
||||
@@ -202,14 +195,14 @@ impl LocationProvider for BrowserUrl {
|
||||
}
|
||||
}
|
||||
};
|
||||
let closure =
|
||||
Closure::wrap(Box::new(cb) as Box<dyn Fn()>).into_js_value();
|
||||
window
|
||||
.add_event_listener_with_callback(
|
||||
"popstate",
|
||||
closure.as_ref().unchecked_ref(),
|
||||
)
|
||||
.expect("couldn't add `popstate` listener to `window`");
|
||||
|
||||
let popstate_handle =
|
||||
window_event_listener(ev::popstate, move |_| popstate_cb());
|
||||
|
||||
on_cleanup(|| {
|
||||
click_handle.remove();
|
||||
popstate_handle.remove();
|
||||
});
|
||||
}
|
||||
|
||||
fn ready_to_complete(&self) {
|
||||
|
||||
@@ -14,7 +14,7 @@ use send_wrapper::SendWrapper;
|
||||
use std::{borrow::Cow, future::Future};
|
||||
use tachys::dom::window;
|
||||
use wasm_bindgen::{JsCast, JsValue};
|
||||
use web_sys::{Event, HtmlAnchorElement, MouseEvent};
|
||||
use web_sys::{HtmlAnchorElement, MouseEvent};
|
||||
|
||||
mod history;
|
||||
mod server;
|
||||
@@ -300,15 +300,14 @@ pub(crate) fn handle_anchor_click<NavFn, NavFut>(
|
||||
router_base: Option<Cow<'static, str>>,
|
||||
parse_with_base: fn(&str, &str) -> Result<Url, JsValue>,
|
||||
navigate: NavFn,
|
||||
) -> Box<dyn Fn(Event) -> Result<(), JsValue>>
|
||||
) -> Box<dyn Fn(MouseEvent) -> Result<(), JsValue>>
|
||||
where
|
||||
NavFn: Fn(Url, LocationChange) -> NavFut + 'static,
|
||||
NavFut: Future<Output = ()> + 'static,
|
||||
{
|
||||
let router_base = router_base.unwrap_or_default();
|
||||
|
||||
Box::new(move |ev: Event| {
|
||||
let ev = ev.unchecked_into::<MouseEvent>();
|
||||
Box::new(move |ev: MouseEvent| {
|
||||
let origin = window().location().origin()?;
|
||||
if ev.default_prevented()
|
||||
|| ev.button() != 0
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "tachys"
|
||||
version = "0.2.8"
|
||||
version = "0.2.9"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -205,6 +205,14 @@ where
|
||||
self.add_any_attr(enterkeyhint(value))
|
||||
}
|
||||
|
||||
/// The `exportparts` attribute enables the sharing of parts of an element's shadow DOM with a containing document.
|
||||
fn exportparts(
|
||||
self,
|
||||
value: V,
|
||||
) -> <Self as AddAnyAttr>::Output<Attr<Exportparts, V>> {
|
||||
self.add_any_attr(exportparts(value))
|
||||
}
|
||||
|
||||
/// The `hidden` global attribute is a Boolean attribute indicating that the element is not yet, or is no longer, relevant.
|
||||
fn hidden(self, value: V) -> <Self as AddAnyAttr>::Output<Attr<Hidden, V>> {
|
||||
self.add_any_attr(hidden(value))
|
||||
|
||||
@@ -212,7 +212,7 @@ html_self_closing_elements! {
|
||||
/// The `<img>` HTML element embeds an image into the document.
|
||||
img HtmlImageElement [alt, attributionsrc, crossorigin, decoding, elementtiming, fetchpriority, height, ismap, loading, referrerpolicy, sizes, src, srcset, usemap, width] true,
|
||||
/// The `<input>` HTML element is used to create interactive controls for web-based forms in order to accept data from the user; a wide variety of types of input data and control widgets are available, depending on the device and user agent. The `<input>` element is one of the most powerful and complex in all of HTML due to the sheer number of combinations of input types and attributes.
|
||||
input HtmlInputElement [accept, alt, autocomplete, capture, checked, disabled, form, formaction, formenctype, formmethod, formnovalidate, formtarget, height, list, max, maxlength, min, minlength, multiple, name, pattern, placeholder, popovertarget, popovertargetaction, readonly, required, size, src, step, r#type, value, width] true,
|
||||
input HtmlInputElement [accept, alt, autocomplete, capture, checked, dirname, disabled, form, formaction, formenctype, formmethod, formnovalidate, formtarget, height, list, max, maxlength, min, minlength, multiple, name, pattern, placeholder, popovertarget, popovertargetaction, readonly, required, size, src, step, r#type, value, width] true,
|
||||
/// The `<link>` HTML element specifies relationships between the current document and an external resource. This element is most commonly used to link to CSS, but is also used to establish site icons (both "favicon" style icons and icons for the home screen and apps on mobile devices) among other things.
|
||||
link HtmlLinkElement [r#as, blocking, crossorigin, fetchpriority, href, hreflang, imagesizes, imagesrcset, integrity, media, rel, referrerpolicy, sizes, r#type] true,
|
||||
/// The `<meta>` HTML element represents Metadata that cannot be represented by other HTML meta-related elements, like base, link, script, style or title.
|
||||
|
||||
@@ -323,7 +323,9 @@ where
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
let (el, prev_cleanup) = state;
|
||||
if let Some(prev) = prev_cleanup.take() {
|
||||
(prev.into_inner())(el);
|
||||
if let Some(remove) = prev.into_inner() {
|
||||
remove();
|
||||
}
|
||||
}
|
||||
*prev_cleanup = Some(if E::CAPTURE {
|
||||
self.attach_capture(el)
|
||||
|
||||
@@ -258,7 +258,9 @@ where
|
||||
prop(self.key(), signal).rebuild(attr_state);
|
||||
|
||||
if let Some(prev) = prev_cleanup.take() {
|
||||
(prev.into_inner())(el);
|
||||
if let Some(remove) = prev.into_inner() {
|
||||
remove();
|
||||
}
|
||||
}
|
||||
*prev_cleanup = Some(self.attach(el));
|
||||
}
|
||||
|
||||
@@ -7,8 +7,8 @@ use reactive_graph::{
|
||||
RwSignal,
|
||||
},
|
||||
traits::{
|
||||
DefinedAt, Get, Notify, ReadUntracked, Set, Track, UntrackableGuard,
|
||||
Write,
|
||||
DefinedAt, Get, IsDisposed, Notify, ReadUntracked, Set, Track,
|
||||
UntrackableGuard, Write,
|
||||
},
|
||||
};
|
||||
use send_wrapper::SendWrapper;
|
||||
@@ -158,6 +158,16 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<E> IsDisposed for NodeRef<E>
|
||||
where
|
||||
E: ElementType,
|
||||
E::Output: 'static,
|
||||
{
|
||||
fn is_disposed(&self) -> bool {
|
||||
self.0.is_disposed()
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a [NodeRef].
|
||||
#[inline(always)]
|
||||
#[track_caller]
|
||||
|
||||
@@ -296,19 +296,20 @@ impl Dom {
|
||||
// return the remover
|
||||
RemoveEventHandler::new({
|
||||
let name = name.to_owned();
|
||||
let el = el.clone();
|
||||
// safe to construct this here, because it will only run in the browser
|
||||
// so it will always be accessed or dropped from the main thread
|
||||
let cb = send_wrapper::SendWrapper::new(cb);
|
||||
move |el: &Element| {
|
||||
let cb = send_wrapper::SendWrapper::new(move || {
|
||||
or_debug!(
|
||||
el.remove_event_listener_with_callback(
|
||||
intern(&name),
|
||||
cb.as_ref().unchecked_ref()
|
||||
),
|
||||
el,
|
||||
&el,
|
||||
"removeEventListener"
|
||||
)
|
||||
}
|
||||
});
|
||||
move || cb()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -334,20 +335,21 @@ impl Dom {
|
||||
// return the remover
|
||||
RemoveEventHandler::new({
|
||||
let name = name.to_owned();
|
||||
let el = el.clone();
|
||||
// safe to construct this here, because it will only run in the browser
|
||||
// so it will always be accessed or dropped from the main thread
|
||||
let cb = send_wrapper::SendWrapper::new(cb);
|
||||
move |el: &Element| {
|
||||
let cb = send_wrapper::SendWrapper::new(move || {
|
||||
or_debug!(
|
||||
el.remove_event_listener_with_callback_and_bool(
|
||||
intern(&name),
|
||||
cb.as_ref().unchecked_ref(),
|
||||
true
|
||||
),
|
||||
el,
|
||||
&el,
|
||||
"removeEventListener"
|
||||
)
|
||||
}
|
||||
});
|
||||
move || cb()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -448,17 +450,19 @@ impl Dom {
|
||||
// return the remover
|
||||
RemoveEventHandler::new({
|
||||
let key = key.to_owned();
|
||||
let el = el.clone();
|
||||
// safe to construct this here, because it will only run in the browser
|
||||
// so it will always be accessed or dropped from the main thread
|
||||
let cb = send_wrapper::SendWrapper::new(cb);
|
||||
move |el: &Element| {
|
||||
drop(cb.take());
|
||||
let el_cb = send_wrapper::SendWrapper::new((el, cb));
|
||||
move || {
|
||||
let (el, cb) = el_cb.take();
|
||||
drop(cb);
|
||||
or_debug!(
|
||||
js_sys::Reflect::delete_property(
|
||||
el,
|
||||
&el,
|
||||
&JsValue::from_str(&key)
|
||||
),
|
||||
el,
|
||||
&el,
|
||||
"delete property"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::view::{Mountable, ToTemplate};
|
||||
use std::{borrow::Cow, fmt::Debug};
|
||||
use std::{borrow::Cow, fmt::Debug, marker::PhantomData};
|
||||
use wasm_bindgen::JsValue;
|
||||
|
||||
/// A DOM renderer.
|
||||
@@ -120,16 +120,33 @@ pub trait Renderer: Send + Sized + Debug + 'static {
|
||||
should store it in some other data structure to clean it up \
|
||||
later to avoid dropping it immediately, or leak it with \
|
||||
std::mem::forget() to never drop it."]
|
||||
pub struct RemoveEventHandler<T>(Box<dyn FnOnce(&T) + Send + Sync>);
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub struct RemoveEventHandler<T>(
|
||||
Option<Box<dyn FnOnce() + Send + Sync>>,
|
||||
// only here to keep the generic, removing which would be a breaking change
|
||||
// TODO remove generic in 0.9
|
||||
PhantomData<fn() -> T>,
|
||||
);
|
||||
|
||||
impl<T> RemoveEventHandler<T> {
|
||||
/// Creates a new container with a function that will be called when it is dropped.
|
||||
pub(crate) fn new(remove: impl FnOnce(&T) + Send + Sync + 'static) -> Self {
|
||||
Self(Box::new(remove))
|
||||
pub(crate) fn new(remove: impl FnOnce() + Send + Sync + 'static) -> Self {
|
||||
Self(Some(Box::new(remove)), PhantomData)
|
||||
}
|
||||
|
||||
pub(crate) fn into_inner(self) -> Box<dyn FnOnce(&T) + Send + Sync> {
|
||||
self.0
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub(crate) fn into_inner(
|
||||
mut self,
|
||||
) -> Option<Box<dyn FnOnce() + Send + Sync>> {
|
||||
self.0.take()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Drop for RemoveEventHandler<T> {
|
||||
fn drop(&mut self) {
|
||||
if let Some(cb) = self.0.take() {
|
||||
cb()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -378,83 +378,91 @@ impl Stream for StreamBuilder {
|
||||
let next_chunk = this.chunks.pop_front();
|
||||
match next_chunk {
|
||||
None => {
|
||||
// now, handle out-of-order chunks
|
||||
if let Some(mut pending) = this.pending_ooo.pop_front() {
|
||||
match pending.as_mut().poll(cx) {
|
||||
Poll::Ready(OooChunk {
|
||||
id,
|
||||
chunks,
|
||||
replace,
|
||||
nonce,
|
||||
}) => {
|
||||
let opening = format!("<!--s-{id}o-->");
|
||||
let placeholder_at =
|
||||
this.sync_buf.find(&opening);
|
||||
if let Some(start) = placeholder_at {
|
||||
let closing = format!("<!--s-{id}c-->");
|
||||
let end =
|
||||
this.sync_buf.find(&closing).unwrap();
|
||||
let chunks_iter = chunks.into_iter().rev();
|
||||
if this.pending_ooo.is_empty() {
|
||||
if this.sync_buf.is_empty() {
|
||||
Poll::Ready(None)
|
||||
} else {
|
||||
Poll::Ready(Some(mem::take(&mut this.sync_buf)))
|
||||
}
|
||||
} else {
|
||||
// check if *any* pending out-of-order chunk is ready
|
||||
for mut chunk in mem::take(&mut this.pending_ooo) {
|
||||
match chunk.as_mut().poll(cx) {
|
||||
Poll::Ready(OooChunk {
|
||||
id,
|
||||
chunks,
|
||||
replace,
|
||||
nonce,
|
||||
}) => {
|
||||
let opening = format!("<!--s-{id}o-->");
|
||||
let placeholder_at =
|
||||
this.sync_buf.find(&opening);
|
||||
if let Some(start) = placeholder_at {
|
||||
let closing = format!("<!--s-{id}c-->");
|
||||
let end = this
|
||||
.sync_buf
|
||||
.find(&closing)
|
||||
.unwrap();
|
||||
let chunks_iter =
|
||||
chunks.into_iter().rev();
|
||||
|
||||
// TODO can probably make this more efficient
|
||||
let (before, replaced) =
|
||||
this.sync_buf.split_at(start);
|
||||
let (_, after) = replaced
|
||||
.split_at(end - start + closing.len());
|
||||
let mut buf = String::new();
|
||||
buf.push_str(before);
|
||||
// TODO can probably make this more efficient
|
||||
let (before, replaced) =
|
||||
this.sync_buf.split_at(start);
|
||||
let (_, after) = replaced.split_at(
|
||||
end - start + closing.len(),
|
||||
);
|
||||
let mut buf = String::new();
|
||||
buf.push_str(before);
|
||||
|
||||
let mut held_chunks = VecDeque::new();
|
||||
for chunk in chunks_iter {
|
||||
if let StreamChunk::Sync(ready) = chunk
|
||||
{
|
||||
buf.push_str(&ready);
|
||||
} else {
|
||||
held_chunks.push_front(chunk);
|
||||
let mut held_chunks = VecDeque::new();
|
||||
for chunk in chunks_iter {
|
||||
if let StreamChunk::Sync(ready) =
|
||||
chunk
|
||||
{
|
||||
buf.push_str(&ready);
|
||||
} else {
|
||||
held_chunks.push_front(chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
buf.push_str(after);
|
||||
this.sync_buf = buf;
|
||||
for chunk in held_chunks {
|
||||
this.chunks.push_front(chunk);
|
||||
}
|
||||
} else {
|
||||
OooChunk::push_start(
|
||||
&id,
|
||||
&mut this.sync_buf,
|
||||
);
|
||||
for chunk in chunks.into_iter().rev() {
|
||||
if let StreamChunk::Sync(ready) = chunk
|
||||
{
|
||||
this.sync_buf.push_str(&ready);
|
||||
} else {
|
||||
buf.push_str(after);
|
||||
this.sync_buf = buf;
|
||||
for chunk in held_chunks {
|
||||
this.chunks.push_front(chunk);
|
||||
}
|
||||
} else {
|
||||
OooChunk::push_start(
|
||||
&id,
|
||||
&mut this.sync_buf,
|
||||
);
|
||||
for chunk in chunks.into_iter().rev() {
|
||||
if let StreamChunk::Sync(ready) =
|
||||
chunk
|
||||
{
|
||||
this.sync_buf.push_str(&ready);
|
||||
} else {
|
||||
this.chunks.push_front(chunk);
|
||||
}
|
||||
}
|
||||
OooChunk::push_end_with_nonce(
|
||||
replace,
|
||||
&id,
|
||||
&mut this.sync_buf,
|
||||
nonce.as_deref(),
|
||||
);
|
||||
}
|
||||
OooChunk::push_end_with_nonce(
|
||||
replace,
|
||||
&id,
|
||||
&mut this.sync_buf,
|
||||
nonce.as_deref(),
|
||||
);
|
||||
}
|
||||
self.poll_next(cx)
|
||||
}
|
||||
Poll::Pending => {
|
||||
this.pending_ooo.push_back(pending);
|
||||
if this.sync_buf.is_empty() {
|
||||
Poll::Pending
|
||||
} else {
|
||||
Poll::Ready(Some(mem::take(
|
||||
&mut this.sync_buf,
|
||||
)))
|
||||
Poll::Pending => {
|
||||
this.pending_ooo.push_back(chunk);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if this.sync_buf.is_empty() {
|
||||
Poll::Ready(None)
|
||||
} else {
|
||||
Poll::Ready(Some(mem::take(&mut this.sync_buf)))
|
||||
|
||||
if this.sync_buf.is_empty() {
|
||||
Poll::Pending
|
||||
} else {
|
||||
Poll::Ready(Some(mem::take(&mut this.sync_buf)))
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(StreamChunk::Sync(value)) => {
|
||||
|
||||
@@ -79,6 +79,13 @@ pub struct AnyView {
|
||||
) -> Pin<Box<dyn Future<Output = AnyViewState>>>,
|
||||
}
|
||||
|
||||
impl AnyView {
|
||||
#[doc(hidden)]
|
||||
pub fn as_type_id(&self) -> TypeId {
|
||||
self.type_id
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for AnyView {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("AnyView")
|
||||
|
||||
Reference in New Issue
Block a user