mirror of
https://github.com/leptos-rs/leptos.git
synced 2025-12-28 10:11:56 -05:00
Compare commits
2 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e8d80e0733 | ||
|
|
0f2168b92c |
2
.github/workflows/get-example-changed.yml
vendored
2
.github/workflows/get-example-changed.yml
vendored
@@ -24,7 +24,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- name: Get example files that changed
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v47
|
||||
uses: tj-actions/changed-files@v46
|
||||
with:
|
||||
files: |
|
||||
examples/**
|
||||
|
||||
2
.github/workflows/get-leptos-changed.yml
vendored
2
.github/workflows/get-leptos-changed.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
- name: Get source files that changed
|
||||
id: changed-source
|
||||
uses: tj-actions/changed-files@v47
|
||||
uses: tj-actions/changed-files@v46
|
||||
with:
|
||||
files_ignore: |
|
||||
.*/**/*
|
||||
|
||||
2
.github/workflows/run-cargo-make-task.yml
vendored
2
.github/workflows/run-cargo-make-task.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
run: trunk --version
|
||||
- name: Install Node.js
|
||||
if: contains(inputs.directory, 'examples')
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- uses: pnpm/action-setup@v4
|
||||
|
||||
1105
Cargo.lock
generated
1105
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
84
Cargo.toml
84
Cargo.toml
@@ -45,44 +45,44 @@ rust-version = "1.88"
|
||||
|
||||
[workspace.dependencies]
|
||||
# members
|
||||
throw_error = { path = "./any_error/", version = "0.3.1" }
|
||||
throw_error = { path = "./any_error/", version = "0.3.0" }
|
||||
any_spawner = { path = "./any_spawner/", version = "0.3.0" }
|
||||
const_str_slice_concat = { path = "./const_str_slice_concat", version = "0.1" }
|
||||
either_of = { path = "./either_of/", version = "0.1.6" }
|
||||
hydration_context = { path = "./hydration_context", version = "0.3.0" }
|
||||
leptos = { path = "./leptos", version = "0.8.11" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.8.7" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.7" }
|
||||
leptos = { path = "./leptos", version = "0.8.6" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.8.5" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.5" }
|
||||
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.8.5" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.6" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.10" }
|
||||
leptos_router = { path = "./router", version = "0.8.9" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.5" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.6" }
|
||||
leptos_router = { path = "./router", version = "0.8.5" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.8.5" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.8.5" }
|
||||
leptos_meta = { path = "./meta", version = "0.8.5" }
|
||||
next_tuple = { path = "./next_tuple", version = "0.1.0" }
|
||||
oco_ref = { path = "./oco", version = "0.2.1" }
|
||||
or_poisoned = { path = "./or_poisoned", version = "0.1.0" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.9" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.3.0" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.6" }
|
||||
server_fn = { path = "./server_fn", version = "0.8.8" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.8.8" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.5" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.2.5" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.5" }
|
||||
server_fn = { path = "./server_fn", version = "0.8.5" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.8.6" }
|
||||
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.8.5" }
|
||||
tachys = { path = "./tachys", version = "0.2.10" }
|
||||
tachys = { path = "./tachys", version = "0.2.6" }
|
||||
wasm_split_helpers = { path = "./wasm_split", version = "0.1.2" }
|
||||
wasm_split_macros = { path = "./wasm_split_macros", version = "0.1.3" }
|
||||
wasm_split_macros = { path = "./wasm_split_macros", version = "0.1.2" }
|
||||
|
||||
# members deps
|
||||
async-once-cell = { default-features = false, version = "0.5.3" }
|
||||
itertools = { default-features = false, version = "0.14.0" }
|
||||
convert_case = { default-features = false, version = "0.8.0" }
|
||||
serde_json = { default-features = false, version = "1.0.143" }
|
||||
serde_json = { default-features = false, version = "1.0.142" }
|
||||
trybuild = { default-features = false, version = "1.0.110" }
|
||||
typed-builder = { default-features = false, version = "0.21.2" }
|
||||
thiserror = { default-features = false, version = "2.0.17" }
|
||||
typed-builder = { default-features = false, version = "0.21.0" }
|
||||
thiserror = { default-features = false, version = "2.0.12" }
|
||||
wasm-bindgen = { default-features = false, version = "0.2.100" }
|
||||
indexmap = { default-features = false, version = "2.11.0" }
|
||||
indexmap = { default-features = false, version = "2.9.0" }
|
||||
rstml = { default-features = false, version = "0.12.1" }
|
||||
rustc_version = { default-features = false, version = "0.4.1" }
|
||||
guardian = { default-features = false, version = "1.3.0" }
|
||||
@@ -97,72 +97,71 @@ send_wrapper = { default-features = false, version = "0.6.0" }
|
||||
tokio-test = { default-features = false, version = "0.4.4" }
|
||||
html-escape = { default-features = false, version = "0.2.13" }
|
||||
proc-macro-error2 = { default-features = false, version = "2.0.1" }
|
||||
const_format = { default-features = false, version = "0.2.35" }
|
||||
const_format = { default-features = false, version = "0.2.34" }
|
||||
gloo-net = { default-features = false, version = "0.6.0" }
|
||||
url = { default-features = false, version = "2.5.4" }
|
||||
tokio = { default-features = false, version = "1.47.1" }
|
||||
base64 = { default-features = false, version = "0.22.1" }
|
||||
cfg-if = { default-features = false, version = "1.0.3" }
|
||||
cfg-if = { default-features = false, version = "1.0.0" }
|
||||
wasm-bindgen-futures = { default-features = false, version = "0.4.50" }
|
||||
tower = { default-features = false, version = "0.5.2" }
|
||||
proc-macro2 = { default-features = false, version = "1.0.101" }
|
||||
proc-macro2 = { default-features = false, version = "1.0.96" }
|
||||
serde = { default-features = false, version = "1.0.219" }
|
||||
parking_lot = { default-features = false, version = "0.12.5" }
|
||||
axum = { default-features = false, version = "0.8.6" }
|
||||
parking_lot = { default-features = false, version = "0.12.4" }
|
||||
axum = { default-features = false, version = "0.8.4" }
|
||||
serde_qs = { default-features = false, version = "0.15.0" }
|
||||
syn = { default-features = false, version = "2.0.106" }
|
||||
syn = { default-features = false, version = "2.0.104" }
|
||||
xxhash-rust = { default-features = false, version = "0.8.15" }
|
||||
paste = { default-features = false, version = "1.0.15" }
|
||||
quote = { default-features = false, version = "1.0.41" }
|
||||
quote = { default-features = false, version = "1.0.40" }
|
||||
web-sys = { default-features = false, version = "0.3.77" }
|
||||
js-sys = { default-features = false, version = "0.3.77" }
|
||||
rand = { default-features = false, version = "0.9.1" }
|
||||
serde-lite = { default-features = false, version = "0.5.0" }
|
||||
tokio-tungstenite = { default-features = false, version = "0.28.0" }
|
||||
tokio-tungstenite = { default-features = false, version = "0.27.0" }
|
||||
serial_test = { default-features = false, version = "3.2.0" }
|
||||
erased = { default-features = false, version = "0.1.2" }
|
||||
glib = { default-features = false, version = "0.20.12" }
|
||||
async-trait = { default-features = false, version = "0.1.89" }
|
||||
async-trait = { default-features = false, version = "0.1.88" }
|
||||
typed-builder-macro = { default-features = false, version = "0.21.0" }
|
||||
linear-map = { default-features = false, version = "1.2.0" }
|
||||
anyhow = { default-features = false, version = "1.0.100" }
|
||||
anyhow = { default-features = false, version = "1.0.98" }
|
||||
walkdir = { default-features = false, version = "2.5.0" }
|
||||
actix-ws = { default-features = false, version = "0.3.0" }
|
||||
tower-http = { default-features = false, version = "0.6.4" }
|
||||
prettyplease = { default-features = false, version = "0.2.37" }
|
||||
inventory = { default-features = false, version = "0.3.21" }
|
||||
config = { default-features = false, version = "0.15.14" }
|
||||
camino = { default-features = false, version = "1.2.1" }
|
||||
prettyplease = { default-features = false, version = "0.2.36" }
|
||||
inventory = { default-features = false, version = "0.3.20" }
|
||||
config = { default-features = false, version = "0.15.13" }
|
||||
camino = { default-features = false, version = "1.1.11" }
|
||||
ciborium = { default-features = false, version = "0.2.2" }
|
||||
bitcode = { default-features = false, version = "0.6.6" }
|
||||
multer = { default-features = false, version = "3.1.0" }
|
||||
leptos-spin-macro = { default-features = false, version = "0.2.0" }
|
||||
sledgehammer_utils = { default-features = false, version = "0.3.1" }
|
||||
sledgehammer_bindgen = { default-features = false, version = "0.6.0" }
|
||||
wasm-streams = { default-features = false, version = "0.4.2" }
|
||||
rkyv = { default-features = false, version = "0.8.12" }
|
||||
rkyv = { default-features = false, version = "0.8.11" }
|
||||
temp-env = { default-features = false, version = "0.3.6" }
|
||||
uuid = { default-features = false, version = "1.18.0" }
|
||||
bytes = { default-features = false, version = "1.10.1" }
|
||||
http = { default-features = false, version = "1.3.1" }
|
||||
regex = { default-features = false, version = "1.11.3" }
|
||||
regex = { default-features = false, version = "1.11.1" }
|
||||
drain_filter_polyfill = { default-features = false, version = "0.1.3" }
|
||||
tempfile = { default-features = false, version = "3.23.0" }
|
||||
tempfile = { default-features = false, version = "3.20.0" }
|
||||
futures-lite = { default-features = false, version = "2.6.1" }
|
||||
log = { default-features = false, version = "0.4.27" }
|
||||
percent-encoding = { default-features = false, version = "2.3.2" }
|
||||
percent-encoding = { default-features = false, version = "2.3.1" }
|
||||
async-executor = { default-features = false, version = "1.13.2" }
|
||||
const-str = { default-features = false, version = "0.6.4" }
|
||||
http-body-util = { default-features = false, version = "0.1.3" }
|
||||
hyper = { default-features = false, version = "1.7.0" }
|
||||
hyper = { default-features = false, version = "1.6.0" }
|
||||
postcard = { default-features = false, version = "1.1.3" }
|
||||
rmp-serde = { default-features = false, version = "1.3.0" }
|
||||
reqwest = { default-features = false, version = "0.12.23" }
|
||||
reqwest = { default-features = false, version = "0.12.22" }
|
||||
tower-layer = { default-features = false, version = "0.3.3" }
|
||||
attribute-derive = { default-features = false, version = "0.10.5" }
|
||||
attribute-derive = { default-features = false, version = "0.10.3" }
|
||||
insta = { default-features = false, version = "1.43.1" }
|
||||
codee = { default-features = false, version = "0.3.0" }
|
||||
actix-http = { default-features = false, version = "3.11.2" }
|
||||
actix-http = { default-features = false, version = "3.11.0" }
|
||||
wasm-bindgen-test = { default-features = false, version = "0.3.50" }
|
||||
rustversion = { default-features = false, version = "1.0.22" }
|
||||
getrandom = { default-features = false, version = "0.3.3" }
|
||||
@@ -171,9 +170,6 @@ async-lock = { default-features = false, version = "3.4.1" }
|
||||
base16 = { default-features = false, version = "0.2.1" }
|
||||
digest = { default-features = false, version = "0.10.7" }
|
||||
sha2 = { default-features = false, version = "0.10.8" }
|
||||
subsecond = { default-features = false, version = "0.7.0-rc.0" }
|
||||
dioxus-cli-config = { default-features = false, version = "0.7.0-rc.0" }
|
||||
dioxus-devtools = { default-features = false, version = "0.7.0-rc.0" }
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
||||
@@ -95,7 +95,7 @@ Here are some resources for learning more about Leptos:
|
||||
[`cargo-leptos`](https://github.com/leptos-rs/cargo-leptos) is a build tool that's designed to make it easy to build apps that run on both the client and the server, with seamless integration. The best way to get started with a real Leptos project right now is to use `cargo-leptos` and our starter templates for [Actix](https://github.com/leptos-rs/start) or [Axum](https://github.com/leptos-rs/start-axum).
|
||||
|
||||
```bash
|
||||
cargo install cargo-leptos --locked
|
||||
cargo install cargo-leptos
|
||||
cargo leptos new --git https://github.com/leptos-rs/start-axum
|
||||
cd [your project name]
|
||||
cargo leptos watch
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "throw_error"
|
||||
version = "0.3.1"
|
||||
version = "0.3.0"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -11,6 +11,3 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
pin-project-lite = { workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow.workspace = true
|
||||
|
||||
@@ -45,10 +45,10 @@ impl fmt::Display for Error {
|
||||
|
||||
impl<T> From<T> for Error
|
||||
where
|
||||
T: Into<Box<dyn error::Error + Send + Sync + 'static>>,
|
||||
T: error::Error + Send + Sync + 'static,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Error(Arc::from(value.into()))
|
||||
Error(Arc::new(value))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,32 +158,3 @@ where
|
||||
this.inner.poll(cx)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::error::Error as StdError;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MyError;
|
||||
|
||||
impl Display for MyError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "MyError")
|
||||
}
|
||||
}
|
||||
|
||||
impl StdError for MyError {}
|
||||
|
||||
#[test]
|
||||
fn test_from() {
|
||||
let e = MyError;
|
||||
let _le = Error::from(e);
|
||||
|
||||
let e = "some error".to_string();
|
||||
let _le = Error::from(e);
|
||||
|
||||
let e = anyhow::anyhow!("anyhow error");
|
||||
let _le = Error::from(e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
@check_issue_4005
|
||||
Feature: Check that issue 4005 does not reappear
|
||||
|
||||
Scenario: The second item is selected.
|
||||
Given I see the app
|
||||
And I can access regression test 4005
|
||||
Then I see the value of select is 2
|
||||
@@ -1,9 +0,0 @@
|
||||
@check_issue_4217
|
||||
Feature: Check that issue 4217 does not reappear
|
||||
|
||||
Scenario: All items are selected.
|
||||
Given I see the app
|
||||
And I can access regression test 4217
|
||||
Then I see option1 is selected
|
||||
And I see option2 is selected
|
||||
And I see option3 is selected
|
||||
@@ -1,13 +0,0 @@
|
||||
@check_issue_4251
|
||||
Feature: Check that issue 4251 does not reappear
|
||||
|
||||
Scenario: Clicking a link to the same page you’re currently on should not add the page to the history stack.
|
||||
Given I see the app
|
||||
And I can access regression test 4324
|
||||
When I select the link This page
|
||||
And I select the link This page
|
||||
And I select the link This page
|
||||
Then I see the result is the string Issue4324
|
||||
When I press the back button
|
||||
And I select the link 4324
|
||||
Then I see the result is the string Issue4324
|
||||
@@ -1,9 +0,0 @@
|
||||
@check_issue_4285
|
||||
Feature: Check that issue 4285 does not reappear
|
||||
|
||||
Scenario: Navigating several times to same lazy route does not cause issues.
|
||||
Given I see the app
|
||||
And I can access regression test 4285
|
||||
And I can access regression test 4285
|
||||
And I can access regression test 4285
|
||||
Then I see the result is the string 42
|
||||
@@ -1,18 +0,0 @@
|
||||
@check_issue_4296
|
||||
Feature: Check that issue 4296 does not reappear
|
||||
|
||||
Scenario: Query param signals created in LazyRoute::data() are reactive in ::view().
|
||||
Given I see the app
|
||||
And I can access regression test 4296
|
||||
Then I see the result is the string None
|
||||
When I select the link abc
|
||||
Then I see the result is the string Some("abc")
|
||||
When I select the link def
|
||||
Then I see the result is the string Some("def")
|
||||
|
||||
Scenario: Loading page with query signal works as well.
|
||||
Given I see the app
|
||||
And I can access regression test 4296
|
||||
When I select the link abc
|
||||
When I reload the page
|
||||
Then I see the result is the string Some("abc")
|
||||
@@ -1,11 +0,0 @@
|
||||
@check_issue_4324
|
||||
Feature: Check that issue 4324 does not reappear
|
||||
|
||||
Scenario: Navigating to the same page after clicking "Back" should set the URL correctly
|
||||
Given I see the app
|
||||
And I can access regression test 4324
|
||||
Then I see the path is /4324/
|
||||
When I press the back button
|
||||
Then I see the path is /
|
||||
When I select the link 4324
|
||||
Then I see the path is /4324/
|
||||
35
examples/regression/e2e/tests/fixtures/check.rs
vendored
35
examples/regression/e2e/tests/fixtures/check.rs
vendored
@@ -18,38 +18,3 @@ pub async fn element_exists(client: &Client, id: &str) -> Result<()> {
|
||||
.expect(&format!("could not find element with id `{id}`"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn select_option_is_selected(
|
||||
client: &Client,
|
||||
id: &str,
|
||||
) -> Result<()> {
|
||||
let el = find::element_by_id(client, id)
|
||||
.await
|
||||
.expect(&format!("could not find element with id `{id}`"));
|
||||
let selected = el.prop("selected").await?;
|
||||
assert_eq!(selected.as_deref(), Some("true"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn element_value_is(
|
||||
client: &Client,
|
||||
id: &str,
|
||||
expected: &str,
|
||||
) -> Result<()> {
|
||||
let el = find::element_by_id(client, id)
|
||||
.await
|
||||
.expect(&format!("could not find element with id `{id}`"));
|
||||
let value = el.prop("value").await?;
|
||||
assert_eq!(value.as_deref(), Some(expected));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn path_is(client: &Client, expected_path: &str) -> Result<()> {
|
||||
let url = client
|
||||
.current_url()
|
||||
.await
|
||||
.expect("could not access current URL");
|
||||
let path = url.path();
|
||||
assert_eq!(expected_path, path);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -45,12 +45,3 @@ async fn i_refresh_the_browser(world: &mut AppWorld) -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[given(regex = "^I press the back button$")]
|
||||
#[when(regex = "^I press the back button$")]
|
||||
async fn i_go_back(world: &mut AppWorld) -> Result<()> {
|
||||
let client = &world.client;
|
||||
client.back().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -25,28 +25,3 @@ async fn i_see_the_navbar(world: &mut AppWorld) -> Result<()> {
|
||||
check::element_exists(client, "nav").await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[then(regex = r"^I see ([\d\w]+) is selected$")]
|
||||
async fn i_see_the_select(world: &mut AppWorld, id: String) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::select_option_is_selected(client, &id).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[then(regex = r"^I see the value of (\w+) is (.*)$")]
|
||||
async fn i_see_the_value(
|
||||
world: &mut AppWorld,
|
||||
id: String,
|
||||
value: String,
|
||||
) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::element_value_is(client, &id, &value).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[then(regex = r"^I see the path is (.*)$")]
|
||||
async fn i_see_the_path(world: &mut AppWorld, path: String) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::path_is(client, &path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
use crate::{
|
||||
issue_4005::Routes4005, issue_4088::Routes4088, issue_4217::Routes4217,
|
||||
issue_4285::Routes4285, issue_4296::Routes4296, issue_4324::Routes4324,
|
||||
pr_4015::Routes4015, pr_4091::Routes4091,
|
||||
};
|
||||
use crate::{issue_4088::Routes4088, pr_4015::Routes4015, pr_4091::Routes4091};
|
||||
use leptos::prelude::*;
|
||||
use leptos_meta::{MetaTags, *};
|
||||
use leptos_router::{
|
||||
@@ -32,22 +28,15 @@ pub fn shell(options: LeptosOptions) -> impl IntoView {
|
||||
pub fn App() -> impl IntoView {
|
||||
provide_meta_context();
|
||||
let fallback = || view! { "Page not found." }.into_view();
|
||||
let (_, set_is_routing) = signal(false);
|
||||
|
||||
view! {
|
||||
<Stylesheet id="leptos" href="/pkg/regression.css"/>
|
||||
<Router set_is_routing>
|
||||
<Router>
|
||||
<main>
|
||||
<Routes fallback>
|
||||
<Route path=path!("") view=HomePage/>
|
||||
<Routes4091/>
|
||||
<Routes4015/>
|
||||
<Routes4088/>
|
||||
<Routes4217/>
|
||||
<Routes4005/>
|
||||
<Routes4285/>
|
||||
<Routes4296/>
|
||||
<Routes4324/>
|
||||
</Routes>
|
||||
</main>
|
||||
</Router>
|
||||
@@ -70,11 +59,6 @@ fn HomePage() -> impl IntoView {
|
||||
<li><a href="/4091/">"4091"</a></li>
|
||||
<li><a href="/4015/">"4015"</a></li>
|
||||
<li><a href="/4088/">"4088"</a></li>
|
||||
<li><a href="/4217/">"4217"</a></li>
|
||||
<li><a href="/4005/">"4005"</a></li>
|
||||
<li><a href="/4285/">"4285"</a></li>
|
||||
<li><a href="/4296/">"4296"</a></li>
|
||||
<li><a href="/4324/">"4324"</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
}
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4005() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4005") view=Issue4005/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn Issue4005() -> impl IntoView {
|
||||
view! {
|
||||
<select id="select" prop:value="2">
|
||||
<option value="1">"Option 1"</option>
|
||||
<option value="2">"Option 2"</option>
|
||||
<option value="3">"Option 3"</option>
|
||||
</select>
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4217() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4217") view=Issue4217/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn Issue4217() -> impl IntoView {
|
||||
view! {
|
||||
<select multiple=true>
|
||||
<option id="option1" value="1" selected>"Option 1"</option>
|
||||
<option id="option2" value="2" selected>"Option 2"</option>
|
||||
<option id="option3" value="3" selected>"Option 3"</option>
|
||||
</select>
|
||||
}
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
use leptos::prelude::*;
|
||||
use leptos_router::LazyRoute;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, Lazy, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4285() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4285") view={Lazy::<Issue4285>::new()}/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
struct Issue4285 {
|
||||
data: Resource<Result<i32, ServerFnError>>,
|
||||
}
|
||||
|
||||
impl LazyRoute for Issue4285 {
|
||||
fn data() -> Self {
|
||||
Self {
|
||||
data: Resource::new(|| (), |_| slow_call()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn view(this: Self) -> AnyView {
|
||||
let Issue4285 { data } = this;
|
||||
view! {
|
||||
<Suspense>
|
||||
{move || {
|
||||
Suspend::new(async move {
|
||||
let data = data.await;
|
||||
view! {
|
||||
<p id="result">{data}</p>
|
||||
}
|
||||
})
|
||||
}}
|
||||
</Suspense>
|
||||
}
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
|
||||
#[server]
|
||||
async fn slow_call() -> Result<i32, ServerFnError> {
|
||||
tokio::time::sleep(std::time::Duration::from_millis(250)).await;
|
||||
Ok(42)
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, Lazy, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
use leptos_router::{hooks::use_query_map, LazyRoute};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4296() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4296") view={Lazy::<Issue4296>::new()}/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
struct Issue4296 {
|
||||
query: Signal<Option<String>>,
|
||||
}
|
||||
|
||||
impl LazyRoute for Issue4296 {
|
||||
fn data() -> Self {
|
||||
let query = use_query_map();
|
||||
let query = Signal::derive(move || query.read().get("q"));
|
||||
Self { query }
|
||||
}
|
||||
|
||||
async fn view(this: Self) -> AnyView {
|
||||
let Issue4296 { query } = this;
|
||||
view! {
|
||||
<a href="?q=abc">"abc"</a>
|
||||
<a href="?q=def">"def"</a>
|
||||
<p id="result">{move || format!("{:?}", query.get())}</p>
|
||||
}
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, Lazy, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4324() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4324") view=Issue4324/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn Issue4324() -> impl IntoView {
|
||||
view! {
|
||||
<a href="/4324/">"This page"</a>
|
||||
<p id="result">"Issue4324"</p>
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,5 @@
|
||||
pub mod app;
|
||||
mod issue_4005;
|
||||
mod issue_4088;
|
||||
mod issue_4217;
|
||||
mod issue_4285;
|
||||
mod issue_4296;
|
||||
mod issue_4324;
|
||||
mod pr_4015;
|
||||
mod pr_4091;
|
||||
|
||||
|
||||
7
examples/subsecond_hot_patch/.gitignore
vendored
7
examples/subsecond_hot_patch/.gitignore
vendored
@@ -1,7 +0,0 @@
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
/target
|
||||
.DS_Store
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
@@ -1,13 +0,0 @@
|
||||
[package]
|
||||
name = "subsecond_hot_patch"
|
||||
version = "0.1.0"
|
||||
authors = ["Greg Johnston <greg.johnston@gmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
leptos = { path = "../../leptos", features = ["csr", "subsecond"] }
|
||||
leptos_router = { path = "../../router" }
|
||||
|
||||
[features]
|
||||
default = ["web"]
|
||||
web = []
|
||||
@@ -1,21 +0,0 @@
|
||||
[application]
|
||||
|
||||
[web.app]
|
||||
|
||||
# HTML title tag content
|
||||
title = "ltest"
|
||||
|
||||
# include `assets` in web platform
|
||||
[web.resource]
|
||||
|
||||
# Additional CSS style files
|
||||
style = []
|
||||
|
||||
# Additional JavaScript files
|
||||
script = []
|
||||
|
||||
[web.resource.dev]
|
||||
|
||||
# Javascript code file
|
||||
# serve: [dev-server] only
|
||||
script = []
|
||||
@@ -1 +0,0 @@
|
||||
extend = [{ path = "../cargo-make/main.toml" }]
|
||||
@@ -1,31 +0,0 @@
|
||||
# Hot Patching with `dx`
|
||||
|
||||
This is an experimental example exploring how to combine Leptos with the binary hot-patching provided by Dioxus's `subsecond` library and `dx` cli.
|
||||
|
||||
### Serving Your App
|
||||
|
||||
This requires installing the Dioxus CLI version 0.7.0. At the time I'm writing this README, that does not yet have a stable release. Once `dioxus-cli` 0.7.0 has been released, you should use the latest stable release. Until then, I'd suggest installing from git:
|
||||
|
||||
```sh
|
||||
cargo install dioxus-cli --git https://github.com/DioxusLabs/dioxus
|
||||
```
|
||||
|
||||
Then you can run the example with `dx serve --hot-patch --platform web`.
|
||||
|
||||
### Hot Patching
|
||||
|
||||
Changes to the your application should be reflected in your app without a full rebuild and reload.
|
||||
|
||||
### Limitatations
|
||||
|
||||
Currently we only support hot-patching for reactive view functions. You probably want to use `AnyView` (via `.into_any()`) on any views that will be hot-patched, so they can be rebuilt correctly despite their types changing when the structure of the view tree changes.
|
||||
|
||||
If you are using `leptos_router` this actually works quite well, as every route’s view is erased to `AnyView` and the router itself is a reactive view function: in other words, changes inside any route should be hot-patched in any case.
|
||||
|
||||
Note that any hot-patch will cause all render effects to run again. This means that some client-side state (like the values of signals) will be wiped out.
|
||||
|
||||
### Build Tooling
|
||||
|
||||
The preference of the Dioxus team is that all hot-patching work that uses their `subsecond` also use `dioxus-cli`. As this demo shows, it's completely possible to use `dioxus-cli` to build and run a Leptos project. We do not plan to build `subsecond` into our own build tooling at this time.
|
||||
|
||||
**This is an experiment/POC. It is being published because members of the community have found it useful and have asked for the support to be merged in its current state. Further development and bugfixes are a relatively low priority at this time.**
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 130 KiB |
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 23 KiB |
@@ -1,46 +0,0 @@
|
||||
/* App-wide styling */
|
||||
body {
|
||||
background-color: #0f1116;
|
||||
color: #ffffff;
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
margin: 20px;
|
||||
}
|
||||
|
||||
#hero {
|
||||
margin: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
#links {
|
||||
width: 400px;
|
||||
text-align: left;
|
||||
font-size: x-large;
|
||||
color: white;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
#links a {
|
||||
color: white;
|
||||
text-decoration: none;
|
||||
margin-top: 20px;
|
||||
margin: 10px 0px;
|
||||
border: white 1px solid;
|
||||
border-radius: 5px;
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
#links a:hover {
|
||||
background-color: #1f1f1f;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#header {
|
||||
max-width: 1200px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
use leptos::{prelude::*, subsecond::connect_to_hot_patch_messages};
|
||||
use leptos_router::{
|
||||
components::{Route, Router, Routes},
|
||||
path,
|
||||
};
|
||||
|
||||
fn main() {
|
||||
// connect to DX CLI and patch the WASM binary whenever we receive a message
|
||||
connect_to_hot_patch_messages();
|
||||
|
||||
// wrapping App here in a closure so we can hot-reload it, because we only do that
|
||||
// for reactive views right now. changing anything will re-run App and update the view
|
||||
mount_to_body(|| App);
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn App() -> impl IntoView {
|
||||
view! {
|
||||
<nav>
|
||||
<a href="/">"Home"</a>
|
||||
<a href="/about">"About"</a>
|
||||
</nav>
|
||||
<Router>
|
||||
<Routes fallback=|| "Not found">
|
||||
<Route path=path!("/") view=HomePage/>
|
||||
<Route path=path!("/about") view=About/>
|
||||
</Routes>
|
||||
</Router>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn HomePage() -> impl IntoView {
|
||||
view! {
|
||||
<h1>"Home Page"</h1>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn About() -> impl IntoView {
|
||||
view! {
|
||||
<h1>"About"</h1>
|
||||
}
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
[tools]
|
||||
tailwindcss = "4.1.13"
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Actix integrations for the Leptos web framework."
|
||||
version = "0.8.6"
|
||||
version = "0.8.5"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Axum integrations for the Leptos web framework."
|
||||
version = "0.8.6"
|
||||
version = "0.8.5"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Utilities to help build server integrations for the Leptos web framework."
|
||||
version = "0.8.6"
|
||||
version = "0.8.5"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -68,8 +68,7 @@ pub trait ExtendResponse: Sized {
|
||||
let nonce =
|
||||
use_nonce().map(|n| n.to_string()).unwrap_or_default();
|
||||
if let Some(manifest) = use_context::<WasmSplitManifest>() {
|
||||
let (pkg_path, manifest, wasm_split_file) =
|
||||
&*manifest.0.read_value();
|
||||
let (pkg_path, manifest) = &*manifest.0.read_value();
|
||||
let prefetches = prefetches.0.read_value();
|
||||
|
||||
let all_prefetches = prefetches.iter().flat_map(|key| {
|
||||
@@ -91,7 +90,7 @@ pub trait ExtendResponse: Sized {
|
||||
.to_html();
|
||||
}
|
||||
_ = view! {
|
||||
<Link rel="modulepreload" href=format!("{pkg_path}/{wasm_split_file}") crossorigin=nonce/>
|
||||
<Link rel="modulepreload" href=format!("{pkg_path}/__wasm_split.js") crossorigin=nonce/>
|
||||
}
|
||||
.to_html();
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos"
|
||||
version = "0.8.11"
|
||||
version = "0.8.6"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -58,9 +58,6 @@ slotmap = { workspace = true, default-features = true }
|
||||
futures = { workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
wasm_split_helpers.workspace = true
|
||||
subsecond = { workspace = true, default-features = true, optional = true }
|
||||
dioxus-cli-config = { workspace = true, default-features = true, optional = true }
|
||||
dioxus-devtools = { workspace = true, default-features = true, optional = true }
|
||||
|
||||
[features]
|
||||
hydration = [
|
||||
@@ -88,12 +85,6 @@ ssr = [
|
||||
]
|
||||
nightly = ["leptos_macro/nightly", "reactive_graph/nightly", "tachys/nightly"]
|
||||
rkyv = ["server_fn/rkyv", "leptos_server/rkyv"]
|
||||
bitcode = ["server_fn/bitcode"]
|
||||
serde-lite = ["server_fn/serde-lite", "leptos_server/serde-lite"]
|
||||
cbor = ["server_fn/cbor"]
|
||||
msgpack = ["server_fn/msgpack"]
|
||||
postcard = ["server_fn/postcard"]
|
||||
multipart = ["server_fn/multipart"]
|
||||
tracing = [
|
||||
"dep:tracing",
|
||||
"reactive_graph/tracing",
|
||||
@@ -111,16 +102,6 @@ trace-component-props = [
|
||||
]
|
||||
delegation = ["tachys/delegation"]
|
||||
islands-router = ["tachys/mark_branches"]
|
||||
subsecond = [
|
||||
"reactive_graph/subsecond",
|
||||
"dep:subsecond",
|
||||
"dep:dioxus-cli-config",
|
||||
"dep:dioxus-devtools",
|
||||
"web-sys/Location",
|
||||
"web-sys/MessageEvent",
|
||||
"web-sys/WebSocket",
|
||||
"web-sys/Window",
|
||||
]
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { features = [
|
||||
@@ -153,18 +134,13 @@ denylist = [
|
||||
"trace-component-props",
|
||||
"spin",
|
||||
"islands",
|
||||
"bitcode",
|
||||
"serde-lite",
|
||||
"cbor",
|
||||
"msgpack",
|
||||
"postcard",
|
||||
"multipart",
|
||||
]
|
||||
skip_feature_sets = [
|
||||
["csr", "ssr"],
|
||||
["csr", "hydrate"],
|
||||
["ssr", "hydrate"],
|
||||
["serde", "serde-lite"],
|
||||
["serde-lite", "miniserde"],
|
||||
["serde", "miniserde"],
|
||||
["serde", "rkyv"],
|
||||
["miniserde", "rkyv"],
|
||||
|
||||
@@ -262,16 +262,6 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<C> From<View<C>> for ViewFn
|
||||
where
|
||||
C: Clone + Send + Sync + 'static,
|
||||
View<C>: IntoAny,
|
||||
{
|
||||
fn from(value: View<C>) -> Self {
|
||||
Self(Arc::new(move || value.clone().into_any()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ViewFn {
|
||||
/// Execute the wrapped function
|
||||
pub fn run(&self) -> AnyView {
|
||||
@@ -299,16 +289,6 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<C> From<View<C>> for ViewFnOnce
|
||||
where
|
||||
C: Send + Sync + 'static,
|
||||
View<C>: IntoAny,
|
||||
{
|
||||
fn from(value: View<C>) -> Self {
|
||||
Self(Box::new(move || value.into_any()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ViewFnOnce {
|
||||
/// Execute the wrapped function
|
||||
pub fn run(self) -> AnyView {
|
||||
|
||||
@@ -27,6 +27,7 @@ pub fn AutoReload(
|
||||
None => options.reload_port,
|
||||
};
|
||||
let protocol = match options.reload_ws_protocol {
|
||||
leptos_config::ReloadWSProtocol::Auto => "null",
|
||||
leptos_config::ReloadWSProtocol::WS => "'ws://'",
|
||||
leptos_config::ReloadWSProtocol::WSS => "'wss://'",
|
||||
};
|
||||
@@ -65,56 +66,16 @@ pub fn HydrationScripts(
|
||||
if let Some(splits) = SPLIT_MANIFEST.get_or_init(|| {
|
||||
let root = root.clone().unwrap_or_default();
|
||||
|
||||
let (wasm_split_js, wasm_split_manifest) = if options.hash_files {
|
||||
let hash_path = std::env::current_exe()
|
||||
.map(|path| {
|
||||
path.parent().map(|p| p.to_path_buf()).unwrap_or_default()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
.join(options.hash_file.as_ref());
|
||||
let hashes = std::fs::read_to_string(&hash_path)
|
||||
.expect("failed to read hash file");
|
||||
|
||||
let mut split =
|
||||
"__wasm_split.______________________.js".to_string();
|
||||
let mut manifest = "__wasm_split_manifest.json".to_string();
|
||||
for line in hashes.lines() {
|
||||
let line = line.trim();
|
||||
if !line.is_empty() {
|
||||
if let Some((file, hash)) = line.split_once(':') {
|
||||
if file == "manifest" {
|
||||
manifest.clear();
|
||||
manifest.push_str("__wasm_split_manifest.");
|
||||
manifest.push_str(hash.trim());
|
||||
manifest.push_str(".json");
|
||||
}
|
||||
if file == "split" {
|
||||
split.clear();
|
||||
split.push_str("__wasm_split.");
|
||||
split.push_str(hash.trim());
|
||||
split.push_str(".js");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
(split, manifest)
|
||||
} else {
|
||||
(
|
||||
"__wasm_split.______________________.js".to_string(),
|
||||
"__wasm_split_manifest.json".to_string(),
|
||||
)
|
||||
};
|
||||
|
||||
let site_dir = &options.site_root;
|
||||
let pkg_dir = &options.site_pkg_dir;
|
||||
let path = PathBuf::from(site_dir.to_string());
|
||||
let path = path.join(pkg_dir.to_string()).join(wasm_split_manifest);
|
||||
let path = path
|
||||
.join(pkg_dir.to_string())
|
||||
.join("__wasm_split_manifest.json");
|
||||
let file = std::fs::read_to_string(path).ok()?;
|
||||
|
||||
let manifest = WasmSplitManifest(ArcStoredValue::new((
|
||||
format!("{root}/{pkg_dir}"),
|
||||
serde_json::from_str(&file).expect("could not read manifest file"),
|
||||
wasm_split_js,
|
||||
)));
|
||||
|
||||
Some(manifest)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
if (window.location.protocol === 'https:') {
|
||||
protocol = 'wss://';
|
||||
let host = window.location.hostname;
|
||||
|
||||
if (protocol === null) {
|
||||
protocol = window.location.protocol === 'https:' ? 'wss://' : 'ws://';
|
||||
}
|
||||
|
||||
let host = window.location.hostname;
|
||||
let ws = new WebSocket(`${protocol}${host}:${reload_port}/live_reload`);
|
||||
ws.onmessage = (ev) => {
|
||||
let msg = JSON.parse(ev.data);
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#![deny(missing_docs)]
|
||||
#![forbid(unsafe_code)]
|
||||
|
||||
//! # About Leptos
|
||||
//!
|
||||
@@ -84,22 +85,12 @@
|
||||
//! # Feature Flags
|
||||
//!
|
||||
//! - **`nightly`**: On `nightly` Rust, enables the function-call syntax for signal getters and setters.
|
||||
//! Also enables some experimental optimizations that improve the handling of static strings and
|
||||
//! the performance of the `template! {}` macro.
|
||||
//! - **`csr`** Client-side rendering: Generate DOM nodes in the browser.
|
||||
//! - **`ssr`** Server-side rendering: Generate an HTML string (typically on the server).
|
||||
//! - **`islands`** Activates “islands mode,” in which components are not made interactive on the
|
||||
//! client unless they use the `#[island]` macro.
|
||||
//! - **`hydrate`** Hydration: use this to add interactivity to an SSRed Leptos app.
|
||||
//! - **`nonce`** Adds support for nonces to be added as part of a Content Security Policy.
|
||||
//! - **`rkyv`** In SSR/hydrate mode, enables using [`rkyv`](https://docs.rs/rkyv/latest/rkyv/) to serialize resources.
|
||||
//! - **`rkyv`** In SSR/hydrate mode, uses [`rkyv`](https://docs.rs/rkyv/latest/rkyv/) to serialize resources and send them
|
||||
//! from the server to the client.
|
||||
//! - **`tracing`** Adds support for [`tracing`](https://docs.rs/tracing/latest/tracing/).
|
||||
//! - **`trace-component-props`** Adds `tracing` support for component props.
|
||||
//! - **`delegation`** Uses event delegation rather than the browser’s native event handling
|
||||
//! system. (This improves the performance of creating large numbers of elements simultaneously,
|
||||
//! in exchange for occasional edge cases in which events behave differently from native browser
|
||||
//! events.)
|
||||
//! - **`rustls`** Use `rustls` for server functions.
|
||||
//!
|
||||
//! **Important Note:** You must enable one of `csr`, `hydrate`, or `ssr` to tell Leptos
|
||||
//! which mode your app is operating in. You should only enable one of these per build target,
|
||||
@@ -305,15 +296,9 @@ pub use tachys::mathml as math;
|
||||
#[doc(inline)]
|
||||
pub use tachys::svg;
|
||||
|
||||
#[cfg(feature = "subsecond")]
|
||||
/// Utilities for using binary hot-patching with [`subsecond`].
|
||||
pub mod subsecond;
|
||||
|
||||
/// Utilities for simple isomorphic logging to the console or terminal.
|
||||
pub mod logging {
|
||||
pub use leptos_dom::{
|
||||
debug_error, debug_log, debug_warn, error, log, warn,
|
||||
};
|
||||
pub use leptos_dom::{debug_warn, error, log, warn};
|
||||
}
|
||||
|
||||
/// Utilities for working with asynchronous tasks.
|
||||
@@ -397,8 +382,7 @@ pub fn prefetch_lazy_fn_on_server(id: &'static str) {
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct WasmSplitManifest(
|
||||
pub reactive_graph::owner::ArcStoredValue<(
|
||||
String, // the pkg root
|
||||
std::collections::HashMap<String, Vec<String>>, // preloads
|
||||
String, // the name of the __wasm_split.js file
|
||||
String,
|
||||
std::collections::HashMap<String, Vec<String>>,
|
||||
)>,
|
||||
);
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
use dioxus_devtools::DevserverMsg;
|
||||
use wasm_bindgen::{prelude::Closure, JsCast};
|
||||
use web_sys::{js_sys::JsString, MessageEvent, WebSocket};
|
||||
|
||||
/// Sets up a websocket connect to the `dx` CLI, waiting for incoming hot-patching messages
|
||||
/// and patching the WASM binary appropriately.
|
||||
//
|
||||
// Note: This is a stripped-down version of Dioxus's `make_ws` from `dioxus_web`
|
||||
// It's essentially copy-pasted here because it's not pub there.
|
||||
// Would love to just take a dependency on that to be able to use it and deduplicate.
|
||||
//
|
||||
// https://github.com/DioxusLabs/dioxus/blob/main/packages/web/src/devtools.rs#L36
|
||||
pub fn connect_to_hot_patch_messages() {
|
||||
// Get the location of the devserver, using the current location plus the /_dioxus path
|
||||
// The idea here being that the devserver is always located on the /_dioxus behind a proxy
|
||||
let location = web_sys::window().unwrap().location();
|
||||
let url = format!(
|
||||
"{protocol}//{host}/_dioxus?build_id={build_id}",
|
||||
protocol = match location.protocol().unwrap() {
|
||||
prot if prot == "https:" => "wss:",
|
||||
_ => "ws:",
|
||||
},
|
||||
host = location.host().unwrap(),
|
||||
build_id = dioxus_cli_config::build_id(),
|
||||
);
|
||||
|
||||
let ws = WebSocket::new(&url).unwrap();
|
||||
|
||||
ws.set_onmessage(Some(
|
||||
Closure::<dyn FnMut(MessageEvent)>::new(move |e: MessageEvent| {
|
||||
let Ok(text) = e.data().dyn_into::<JsString>() else {
|
||||
return;
|
||||
};
|
||||
|
||||
// The devserver messages have some &'static strs in them, so we need to leak the source string
|
||||
let string: String = text.into();
|
||||
let string = Box::leak(string.into_boxed_str());
|
||||
|
||||
if let Ok(DevserverMsg::HotReload(msg)) =
|
||||
serde_json::from_str::<DevserverMsg>(string)
|
||||
{
|
||||
if let Some(jump_table) = msg.jump_table.as_ref().cloned() {
|
||||
if msg.for_build_id == Some(dioxus_cli_config::build_id()) {
|
||||
let our_pid = if cfg!(target_family = "wasm") {
|
||||
None
|
||||
} else {
|
||||
Some(std::process::id())
|
||||
};
|
||||
|
||||
if msg.for_pid == our_pid {
|
||||
unsafe { subsecond::apply_patch(jump_table) }
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_js_value()
|
||||
.as_ref()
|
||||
.unchecked_ref(),
|
||||
));
|
||||
}
|
||||
@@ -32,12 +32,12 @@ use tachys::{
|
||||
};
|
||||
use throw_error::ErrorHookFuture;
|
||||
|
||||
/// If any [`Resource`](crate::prelude::Resource) is read in the `children` of this
|
||||
/// If any [`Resource`](leptos_reactive::Resource) is read in the `children` of this
|
||||
/// component, it will show the `fallback` while they are loading. Once all are resolved,
|
||||
/// it will render the `children`.
|
||||
///
|
||||
/// Each time one of the resources is loading again, it will fall back. To keep the current
|
||||
/// children instead, use [Transition](crate::prelude::Transition).
|
||||
/// children instead, use [Transition](crate::Transition).
|
||||
///
|
||||
/// Note that the `children` will be rendered initially (in order to capture the fact that
|
||||
/// those resources are read under the suspense), so you cannot assume that resources read
|
||||
|
||||
@@ -16,11 +16,11 @@ use reactive_graph::{
|
||||
use slotmap::{DefaultKey, SlotMap};
|
||||
use tachys::reactive_graph::OwnedView;
|
||||
|
||||
/// If any [`Resource`](crate::prelude::Resource) is read in the `children` of this
|
||||
/// If any [`Resource`](leptos_reactive::Resource) is read in the `children` of this
|
||||
/// component, it will show the `fallback` while they are loading. Once all are resolved,
|
||||
/// it will render the `children`.
|
||||
///
|
||||
/// Unlike [`Suspense`](crate::prelude::Suspense), this will not fall
|
||||
/// Unlike [`Suspense`](crate::Suspense), this will not fall
|
||||
/// back to the `fallback` state if there are further changes after the initial load.
|
||||
///
|
||||
/// Note that the `children` will be rendered initially (in order to capture the fact that
|
||||
|
||||
@@ -5,7 +5,7 @@ license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Configuration for the Leptos web framework."
|
||||
readme = "../README.md"
|
||||
version = "0.8.7"
|
||||
version = "0.8.5"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -153,7 +153,7 @@ impl LeptosOptions {
|
||||
None => None,
|
||||
},
|
||||
reload_ws_protocol: ws_from_str(
|
||||
env_w_default("LEPTOS_RELOAD_WS_PROTOCOL", "ws")?.as_str(),
|
||||
env_w_default("LEPTOS_RELOAD_WS_PROTOCOL", "auto")?.as_str(),
|
||||
)?,
|
||||
not_found_path: env_w_default("LEPTOS_NOT_FOUND_PATH", "/404")?
|
||||
.into(),
|
||||
@@ -283,22 +283,24 @@ impl TryFrom<String> for Env {
|
||||
pub enum ReloadWSProtocol {
|
||||
WS,
|
||||
WSS,
|
||||
Auto,
|
||||
}
|
||||
|
||||
impl Default for ReloadWSProtocol {
|
||||
fn default() -> Self {
|
||||
Self::WS
|
||||
Self::Auto
|
||||
}
|
||||
}
|
||||
|
||||
fn ws_from_str(input: &str) -> Result<ReloadWSProtocol, LeptosConfigError> {
|
||||
let sanitized = input.to_lowercase();
|
||||
match sanitized.as_ref() {
|
||||
"auto" => Ok(ReloadWSProtocol::Auto),
|
||||
"ws" | "WS" => Ok(ReloadWSProtocol::WS),
|
||||
"wss" | "WSS" => Ok(ReloadWSProtocol::WSS),
|
||||
_ => Err(LeptosConfigError::EnvVarError(format!(
|
||||
"{input} is not a supported websocket protocol. Use only `ws` or \
|
||||
`wss`.",
|
||||
"{input} is not a supported websocket protocol. Use only `auto`, \
|
||||
`ws` or `wss`.",
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_dom"
|
||||
version = "0.8.7"
|
||||
version = "0.8.5"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
|
||||
@@ -258,7 +258,15 @@ pub fn request_idle_callback_with_handle(
|
||||
///
|
||||
/// <div class="warning">The task is called outside of the ownership tree, this means that if you want to access for example the context you need to reestablish the owner.</div>
|
||||
pub fn queue_microtask(task: impl FnOnce() + 'static) {
|
||||
tachys::renderer::dom::queue_microtask(task);
|
||||
use js_sys::{Function, Reflect};
|
||||
|
||||
let task = Closure::once_into_js(task);
|
||||
let window = web_sys::window().expect("window not available");
|
||||
let queue_microtask =
|
||||
Reflect::get(&window, &JsValue::from_str("queueMicrotask"))
|
||||
.expect("queueMicrotask not available");
|
||||
let queue_microtask = queue_microtask.unchecked_into::<Function>();
|
||||
_ = queue_microtask.call1(&JsValue::UNDEFINED, &task);
|
||||
}
|
||||
|
||||
/// Handle that is generated by [set_timeout_with_handle] and can be used to clear the timeout.
|
||||
@@ -463,7 +471,7 @@ pub fn set_interval_with_handle(
|
||||
|
||||
#[inline(never)]
|
||||
fn si(
|
||||
cb: Box<dyn FnMut()>,
|
||||
cb: Box<dyn Fn()>,
|
||||
duration: Duration,
|
||||
) -> Result<IntervalHandle, JsValue> {
|
||||
let cb = Closure::wrap(cb).into_js_value();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_macro"
|
||||
version = "0.8.10"
|
||||
version = "0.8.6"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -24,7 +24,9 @@ proc-macro-error2 = { default-features = false, workspace = true }
|
||||
proc-macro2 = { workspace = true, default-features = true }
|
||||
quote = { workspace = true, default-features = true }
|
||||
syn = { features = ["full"], workspace = true, default-features = true }
|
||||
rstml = { workspace = true, default-features = true }
|
||||
rstml = { workspace = true, default-features = true, features = [
|
||||
"rawtext-stable-hack",
|
||||
] }
|
||||
leptos_hot_reload = { workspace = true }
|
||||
server_fn_macro = { workspace = true }
|
||||
convert_case = { workspace = true, default-features = true }
|
||||
|
||||
@@ -323,7 +323,14 @@ fn view_macro_impl(tokens: TokenStream, template: bool) -> TokenStream {
|
||||
.chain(tokens)
|
||||
.collect()
|
||||
};
|
||||
let config = rstml::ParserConfig::default().recover_block(true);
|
||||
let macro_call_pattern = if let Some(class) = &global_class {
|
||||
quote!(view! { class = #class, %% })
|
||||
} else {
|
||||
quote!(view! {%%})
|
||||
};
|
||||
let config = rstml::ParserConfig::default()
|
||||
.recover_block(true)
|
||||
.macro_call_pattern(macro_call_pattern);
|
||||
let parser = rstml::Parser::new(config);
|
||||
let (mut nodes, errors) = parser.parse_recoverable(tokens).split_vec();
|
||||
let errors = errors.into_iter().map(|e| e.emit_as_expr_tokens());
|
||||
|
||||
@@ -25,8 +25,9 @@ use std::{
|
||||
use syn::{
|
||||
punctuated::Pair::{End, Punctuated},
|
||||
spanned::Spanned,
|
||||
Expr::{self, Tuple},
|
||||
ExprArray, ExprLit, ExprPath, ExprRange, Lit, LitStr, RangeLimits, Stmt,
|
||||
Expr,
|
||||
Expr::Tuple,
|
||||
ExprArray, ExprLit, ExprRange, Lit, LitStr, RangeLimits, Stmt,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
@@ -1678,7 +1679,7 @@ fn attribute_value(
|
||||
}
|
||||
|
||||
// Keep list alphabetized for binary search
|
||||
const TYPED_EVENTS: [&str; 127] = [
|
||||
const TYPED_EVENTS: [&str; 126] = [
|
||||
"DOMContentLoaded",
|
||||
"abort",
|
||||
"afterprint",
|
||||
@@ -1774,7 +1775,6 @@ const TYPED_EVENTS: [&str; 127] = [
|
||||
"reset",
|
||||
"resize",
|
||||
"scroll",
|
||||
"scrollend",
|
||||
"securitypolicyviolation",
|
||||
"seeked",
|
||||
"seeking",
|
||||
@@ -1871,28 +1871,6 @@ pub(crate) fn ident_from_tag_name(tag_name: &NodeName) -> Ident {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn full_path_from_tag_name(tag_name: &NodeName) -> Option<ExprPath> {
|
||||
match tag_name {
|
||||
NodeName::Path(path) => Some(path.clone()),
|
||||
NodeName::Block(_) => {
|
||||
let span = tag_name.span();
|
||||
proc_macro_error2::emit_error!(
|
||||
span,
|
||||
"blocks not allowed in tag-name position"
|
||||
);
|
||||
None
|
||||
}
|
||||
_ => {
|
||||
let span = tag_name.span();
|
||||
proc_macro_error2::emit_error!(
|
||||
span,
|
||||
"punctuated names not allowed in slots"
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn directive_call_from_attribute_node(
|
||||
attr: &KeyedAttribute,
|
||||
directive_name: &str,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use super::{
|
||||
component_builder::maybe_optimised_component_children,
|
||||
convert_to_snake_case, full_path_from_tag_name,
|
||||
convert_to_snake_case, ident_from_tag_name,
|
||||
};
|
||||
use crate::view::{fragment_to_tokens, utils::filter_prefixed_attrs, TagType};
|
||||
use proc_macro2::{Ident, TokenStream, TokenTree};
|
||||
@@ -24,7 +24,7 @@ pub(crate) fn slot_to_tokens(
|
||||
node.name().to_string()
|
||||
});
|
||||
|
||||
let component_path = full_path_from_tag_name(node.name());
|
||||
let component_name = ident_from_tag_name(node.name());
|
||||
|
||||
let Some(parent_slots) = parent_slots else {
|
||||
proc_macro_error2::emit_error!(
|
||||
@@ -190,7 +190,7 @@ pub(crate) fn slot_to_tokens(
|
||||
|
||||
let slot = quote_spanned! {node.span()=>
|
||||
{
|
||||
let slot = #component_path::builder()
|
||||
let slot = #component_name::builder()
|
||||
#(#props)*
|
||||
#(#slots)*
|
||||
#children
|
||||
|
||||
@@ -44,8 +44,7 @@ denylist = ["tracing"]
|
||||
max_combination_size = 2
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition", "--cfg", "docsrs"]
|
||||
all-features = true
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
[lints.rust]
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(leptos_debuginfo)'] }
|
||||
|
||||
@@ -386,7 +386,6 @@ T: Send + Sync + 'static,
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-wasm-bindgen")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-wasm-bindgen")))]
|
||||
impl<T> ArcOnceResource<T, JsonSerdeWasmCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -419,7 +418,6 @@ fut: impl Future<Output = T> + Send + 'static
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "miniserde")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "miniserde")))]
|
||||
impl<T> ArcOnceResource<T, MiniserdeCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -453,7 +451,6 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-lite")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-lite")))]
|
||||
impl<T> ArcOnceResource<T, SerdeLite<JsonSerdeCodec>>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -487,7 +484,6 @@ fut: impl Future<Output = T> + Send + 'static
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> ArcOnceResource<T, RkyvCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -752,7 +748,6 @@ T: Send + Sync + 'static,
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-wasm-bindgen")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-wasm-bindgen")))]
|
||||
impl<T> OnceResource<T, JsonSerdeWasmCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -785,7 +780,6 @@ fut: impl Future<Output = T> + Send + 'static
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "miniserde")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "miniserde")))]
|
||||
impl<T> OnceResource<T, MiniserdeCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -819,7 +813,6 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-lite")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-lite")))]
|
||||
impl<T> OnceResource<T, SerdeLite<JsonSerdeCodec>>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -853,7 +846,6 @@ fut: impl Future<Output = T> + Send + 'static
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> OnceResource<T, RkyvCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
|
||||
@@ -709,7 +709,6 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> ArcResource<T, RkyvCodec>
|
||||
where
|
||||
RkyvCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -1049,7 +1048,6 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-wasm-bindgen")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-wasm-bindgen")))]
|
||||
impl<T> Resource<T, JsonSerdeWasmCodec>
|
||||
where
|
||||
JsonSerdeWasmCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -1107,7 +1105,6 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "miniserde")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "miniserde")))]
|
||||
impl<T> Resource<T, MiniserdeCodec>
|
||||
where
|
||||
MiniserdeCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -1167,7 +1164,6 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-lite")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-lite")))]
|
||||
impl<T> Resource<T, SerdeLite<JsonSerdeCodec>>
|
||||
where
|
||||
SerdeLite<JsonSerdeCodec>: Encoder<T> + Decoder<T>,
|
||||
@@ -1226,7 +1222,6 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> Resource<T, RkyvCodec>
|
||||
where
|
||||
RkyvCodec: Encoder<T> + Decoder<T>,
|
||||
|
||||
@@ -80,7 +80,6 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-lite")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-lite")))]
|
||||
impl<T> SharedValue<T, SerdeLite<JsonSerdeCodec>>
|
||||
where
|
||||
SerdeLite<JsonSerdeCodec>: Encoder<T> + Decoder<T>,
|
||||
@@ -103,7 +102,6 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-wasm-bindgen")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-wasm-bindgen")))]
|
||||
impl<T> SharedValue<T, JsonSerdeWasmCodec>
|
||||
where
|
||||
JsonSerdeWasmCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -126,7 +124,6 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "miniserde")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "miniserde")))]
|
||||
impl<T> SharedValue<T, MiniserdeCodec>
|
||||
where
|
||||
MiniserdeCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -149,7 +146,6 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> SharedValue<T, RkyvCodec>
|
||||
where
|
||||
RkyvCodec: Encoder<T> + Decoder<T>,
|
||||
|
||||
@@ -15,18 +15,19 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@playwright/test": {
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz",
|
||||
"integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==",
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.1.tgz",
|
||||
"integrity": "sha512-1hZ4TNvD5z9VuhNJ/walIjvMVvYkZKf71axoF/uiAqpntQJXpG64dlXhoDXE3OczPuTuvjf/M5KWFg5VAVUS3Q==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"playwright": "1.56.1"
|
||||
"playwright": "1.44.1"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
@@ -45,6 +46,7 @@
|
||||
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
@@ -54,33 +56,35 @@
|
||||
}
|
||||
},
|
||||
"node_modules/playwright": {
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz",
|
||||
"integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==",
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.1.tgz",
|
||||
"integrity": "sha512-qr/0UJ5CFAtloI3avF95Y0L1xQo6r3LQArLIg/z/PoGJ6xa+EwzrwO5lpNr/09STxdHuUoP2mvuELJS+hLdtgg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"playwright-core": "1.56.1"
|
||||
"playwright-core": "1.44.1"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
"node": ">=16"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/playwright-core": {
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz",
|
||||
"integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==",
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.1.tgz",
|
||||
"integrity": "sha512-wh0JWtYTrhv1+OSsLPgFzGzt67Y7BE/ZS3jEqgGBlp2ppp1ZDj8c+9IARNW4dwf1poq5MgHreEM2KV/GuR4cFA==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"playwright-core": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
@@ -107,12 +111,12 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@playwright/test": {
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz",
|
||||
"integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==",
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.1.tgz",
|
||||
"integrity": "sha512-1hZ4TNvD5z9VuhNJ/walIjvMVvYkZKf71axoF/uiAqpntQJXpG64dlXhoDXE3OczPuTuvjf/M5KWFg5VAVUS3Q==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"playwright": "1.56.1"
|
||||
"playwright": "1.44.1"
|
||||
}
|
||||
},
|
||||
"@types/node": {
|
||||
@@ -132,19 +136,19 @@
|
||||
"optional": true
|
||||
},
|
||||
"playwright": {
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz",
|
||||
"integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==",
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.1.tgz",
|
||||
"integrity": "sha512-qr/0UJ5CFAtloI3avF95Y0L1xQo6r3LQArLIg/z/PoGJ6xa+EwzrwO5lpNr/09STxdHuUoP2mvuELJS+hLdtgg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"fsevents": "2.3.2",
|
||||
"playwright-core": "1.56.1"
|
||||
"playwright-core": "1.44.1"
|
||||
}
|
||||
},
|
||||
"playwright-core": {
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz",
|
||||
"integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==",
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.1.tgz",
|
||||
"integrity": "sha512-wh0JWtYTrhv1+OSsLPgFzGzt67Y7BE/ZS3jEqgGBlp2ppp1ZDj8c+9IARNW4dwf1poq5MgHreEM2KV/GuR4cFA==",
|
||||
"dev": true
|
||||
},
|
||||
"typescript": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_graph"
|
||||
version = "0.2.9"
|
||||
version = "0.2.5"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -27,7 +27,6 @@ async-lock = { workspace = true, default-features = true }
|
||||
send_wrapper = { features = [
|
||||
"futures",
|
||||
], workspace = true, default-features = true }
|
||||
subsecond = { workspace = true, default-features = true, optional = true }
|
||||
indexmap = { workspace = true, default-features = true }
|
||||
|
||||
[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dependencies]
|
||||
@@ -52,7 +51,6 @@ hydration = ["dep:hydration_context"]
|
||||
effects = [
|
||||
] # whether to run effects: should be disabled for something like server rendering
|
||||
sandboxed-arenas = []
|
||||
subsecond = ["dep:subsecond"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use crate::{
|
||||
computed::{ArcMemo, Memo, ScopedFuture},
|
||||
computed::{ArcMemo, Memo},
|
||||
diagnostics::is_suppressing_resource_load,
|
||||
graph::untrack,
|
||||
owner::{ArcStoredValue, ArenaItem, Owner},
|
||||
owner::{ArcStoredValue, ArenaItem},
|
||||
send_wrapper_ext::SendOption,
|
||||
signal::{ArcMappedSignal, ArcRwSignal, MappedSignal, RwSignal},
|
||||
traits::{DefinedAt, Dispose, Get, GetUntracked, GetValue, Update, Write},
|
||||
@@ -200,18 +199,13 @@ where
|
||||
I: Send + Sync,
|
||||
O: Send + Sync,
|
||||
{
|
||||
let owner = Owner::current().unwrap_or_default();
|
||||
ArcAction {
|
||||
in_flight: ArcRwSignal::new(0),
|
||||
input: ArcRwSignal::new(SendOption::new(None)),
|
||||
value: ArcRwSignal::new(SendOption::new(value)),
|
||||
version: Default::default(),
|
||||
dispatched: Default::default(),
|
||||
action_fn: Arc::new(move |input| {
|
||||
Box::pin(owner.with(|| {
|
||||
ScopedFuture::new_untracked(untrack(|| action_fn(input)))
|
||||
}))
|
||||
}),
|
||||
action_fn: Arc::new(move |input| Box::pin(action_fn(input))),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
}
|
||||
@@ -376,7 +370,6 @@ where
|
||||
F: Fn(&I) -> Fu + 'static,
|
||||
Fu: Future<Output = O> + 'static,
|
||||
{
|
||||
let owner = Owner::current().unwrap_or_default();
|
||||
let action_fn = SendWrapper::new(action_fn);
|
||||
ArcAction {
|
||||
in_flight: ArcRwSignal::new(0),
|
||||
@@ -385,9 +378,7 @@ where
|
||||
version: Default::default(),
|
||||
dispatched: Default::default(),
|
||||
action_fn: Arc::new(move |input| {
|
||||
Box::pin(SendWrapper::new(owner.with(|| {
|
||||
ScopedFuture::new_untracked(untrack(|| action_fn(input)))
|
||||
})))
|
||||
Box::pin(SendWrapper::new(action_fn(input)))
|
||||
}),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
|
||||
@@ -521,10 +521,9 @@ impl<T: 'static> ArcAsyncDerived<T> {
|
||||
{
|
||||
let fun = move || {
|
||||
let fut = fun();
|
||||
let fut =
|
||||
ScopedFuture::new_untracked_with_diagnostics(async move {
|
||||
SendOption::new(Some(fut.await))
|
||||
});
|
||||
let fut = ScopedFuture::new_untracked(async move {
|
||||
SendOption::new(Some(fut.await))
|
||||
});
|
||||
#[cfg(feature = "sandboxed-arenas")]
|
||||
let fut = Sandboxed::new(fut);
|
||||
fut
|
||||
|
||||
@@ -54,55 +54,11 @@ impl<Fut> ScopedFuture<Fut> {
|
||||
fut,
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[track_caller]
|
||||
pub fn new_untracked_with_diagnostics(
|
||||
fut: Fut,
|
||||
) -> ScopedFutureUntrackedWithDiagnostics<Fut> {
|
||||
let owner = Owner::current().unwrap_or_default();
|
||||
ScopedFutureUntrackedWithDiagnostics {
|
||||
owner,
|
||||
observer: None,
|
||||
fut,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Fut: Future> Future for ScopedFuture<Fut> {
|
||||
type Output = Fut::Output;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.project();
|
||||
this.owner.with(|| {
|
||||
#[cfg(debug_assertions)]
|
||||
let _maybe_guard = if this.observer.is_none() {
|
||||
Some(crate::diagnostics::SpecialNonReactiveZone::enter())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
this.observer.with_observer(|| this.fut.poll(cx))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pin_project! {
|
||||
/// A [`Future`] wrapper that sets the [`Owner`] and [`Observer`] before polling the inner
|
||||
/// `Future`, output of [`ScopedFuture::new_untracked_with_diagnostics`].
|
||||
///
|
||||
/// In leptos 0.9 this will be replaced with `ScopedFuture` itself.
|
||||
#[derive(Clone)]
|
||||
pub struct ScopedFutureUntrackedWithDiagnostics<Fut> {
|
||||
owner: Owner,
|
||||
observer: Option<AnySubscriber>,
|
||||
#[pin]
|
||||
fut: Fut,
|
||||
}
|
||||
}
|
||||
|
||||
impl<Fut: Future> Future for ScopedFutureUntrackedWithDiagnostics<Fut> {
|
||||
type Output = Fut::Output;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.project();
|
||||
this.owner
|
||||
|
||||
@@ -65,7 +65,6 @@ impl Dispose for ImmediateEffect {
|
||||
|
||||
impl ImmediateEffect {
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this requires a `Fn` function because it might recurse.
|
||||
/// Use [Self::new_mut] to pass a `FnMut` function, it'll panic on recursion.
|
||||
@@ -83,7 +82,6 @@ impl ImmediateEffect {
|
||||
Self { inner: Some(inner) }
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics on recursion or if triggered in parallel. Also see [Self::new]
|
||||
@@ -95,10 +93,8 @@ impl ImmediateEffect {
|
||||
Self::new(move || fun.try_lock().expect(MSG)())
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this requires a `Fn` function because it might recurse.
|
||||
/// Use [Self::new_mut_scoped] to pass a `FnMut` function, it'll panic on recursion.
|
||||
/// NOTE: this effect is automatically cleaned up when the current owner is cleared or disposed.
|
||||
#[track_caller]
|
||||
pub fn new_scoped(fun: impl Fn() + Send + Sync + 'static) {
|
||||
@@ -106,19 +102,6 @@ impl ImmediateEffect {
|
||||
|
||||
on_cleanup(move || effect.dispose());
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this effect is automatically cleaned up when the current owner is cleared or disposed.
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics on recursion or if triggered in parallel. Also see [Self::new_scoped]
|
||||
#[track_caller]
|
||||
pub fn new_mut_scoped(fun: impl FnMut() + Send + Sync + 'static) {
|
||||
let effect = Self::new_mut(fun);
|
||||
|
||||
on_cleanup(move || effect.dispose());
|
||||
}
|
||||
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
///
|
||||
@@ -147,41 +130,6 @@ impl DefinedAt for ImmediateEffect {
|
||||
}
|
||||
}
|
||||
|
||||
/// Defers any [ImmediateEffect]s from running until the end of the function.
|
||||
///
|
||||
/// NOTE: this affects only [ImmediateEffect]s, not other effects.
|
||||
///
|
||||
/// NOTE: this is rarely needed, but it is useful for example when multiple signals
|
||||
/// need to be updated atomically (for example a double-bound signal tree).
|
||||
pub fn batch<T>(f: impl FnOnce() -> T) -> T {
|
||||
struct ExecuteOnDrop;
|
||||
impl Drop for ExecuteOnDrop {
|
||||
fn drop(&mut self) {
|
||||
let effects = {
|
||||
let mut batch = inner::BATCH.write().or_poisoned();
|
||||
batch.take().unwrap().into_inner().expect("lock poisoned")
|
||||
};
|
||||
// TODO: Should we skip the effects if it's panicking?
|
||||
for effect in effects {
|
||||
effect.update_if_necessary();
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut execute_on_drop = None;
|
||||
{
|
||||
let mut batch = inner::BATCH.write().or_poisoned();
|
||||
if batch.is_none() {
|
||||
execute_on_drop = Some(ExecuteOnDrop);
|
||||
} else {
|
||||
// Nested batching has no effect.
|
||||
}
|
||||
*batch = Some(batch.take().unwrap_or_default());
|
||||
}
|
||||
let ret = f();
|
||||
drop(execute_on_drop);
|
||||
ret
|
||||
}
|
||||
|
||||
mod inner {
|
||||
use crate::{
|
||||
graph::{
|
||||
@@ -192,7 +140,6 @@ mod inner {
|
||||
owner::Owner,
|
||||
traits::DefinedAt,
|
||||
};
|
||||
use indexmap::IndexSet;
|
||||
use or_poisoned::OrPoisoned;
|
||||
use std::{
|
||||
panic::Location,
|
||||
@@ -200,11 +147,6 @@ mod inner {
|
||||
thread::{self, ThreadId},
|
||||
};
|
||||
|
||||
/// Only the [super::batch] function ever writes to the outer RwLock.
|
||||
/// While the effects will write to the inner one.
|
||||
pub(super) static BATCH: RwLock<Option<RwLock<IndexSet<AnySubscriber>>>> =
|
||||
RwLock::new(None);
|
||||
|
||||
/// Handles subscription logic for effects.
|
||||
///
|
||||
/// To handle parallelism and recursion we assign ordered (1..) ids to each run.
|
||||
@@ -260,8 +202,6 @@ mod inner {
|
||||
fun: impl Fn() + Send + Sync + 'static,
|
||||
) -> Arc<RwLock<EffectInner>> {
|
||||
let owner = Owner::new();
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
let defined_at = Location::caller();
|
||||
|
||||
Arc::new_cyclic(|weak| {
|
||||
let any_subscriber = AnySubscriber(
|
||||
@@ -271,7 +211,7 @@ mod inner {
|
||||
|
||||
RwLock::new(EffectInner {
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at,
|
||||
defined_at: Location::caller(),
|
||||
owner,
|
||||
state: ReactiveNodeState::Dirty,
|
||||
run_count_start: 0,
|
||||
@@ -320,17 +260,6 @@ mod inner {
|
||||
ReactiveNodeState::Dirty => true,
|
||||
};
|
||||
|
||||
{
|
||||
if let Some(batch) = &*BATCH.read().or_poisoned() {
|
||||
let mut batch = batch.write().or_poisoned();
|
||||
let subscriber =
|
||||
self.read().or_poisoned().any_subscriber.clone();
|
||||
|
||||
batch.insert(subscriber);
|
||||
return needs_update;
|
||||
}
|
||||
}
|
||||
|
||||
if needs_update {
|
||||
let mut guard = self.write().or_poisoned();
|
||||
|
||||
|
||||
@@ -9,8 +9,6 @@ use crate::{
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use or_poisoned::OrPoisoned;
|
||||
#[cfg(feature = "subsecond")]
|
||||
use std::sync::Mutex;
|
||||
use std::{
|
||||
fmt::Debug,
|
||||
future::{Future, IntoFuture},
|
||||
@@ -51,39 +49,13 @@ impl<T> Debug for RenderEffect<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "subsecond")]
|
||||
type CurrentHotPtr = Box<dyn Fn() -> Option<subsecond::HotFnPtr> + Send + Sync>;
|
||||
|
||||
impl<T> RenderEffect<T>
|
||||
where
|
||||
T: 'static,
|
||||
{
|
||||
/// Creates a new render effect, which immediately runs `fun`.
|
||||
pub fn new(fun: impl FnMut(Option<T>) -> T + 'static) -> Self {
|
||||
#[cfg(feature = "subsecond")]
|
||||
let (hot_fn_ptr, fun) = {
|
||||
let fun = Arc::new(Mutex::new(subsecond::HotFn::current(fun)));
|
||||
(
|
||||
{
|
||||
let fun = Arc::downgrade(&fun);
|
||||
let wrapped = send_wrapper::SendWrapper::new(move || {
|
||||
fun.upgrade()
|
||||
.map(|n| n.lock().or_poisoned().ptr_address())
|
||||
});
|
||||
// it's not redundant, it's due to the SendWrapper deref
|
||||
#[allow(clippy::redundant_closure)]
|
||||
Box::new(move || wrapped())
|
||||
},
|
||||
move |prev| fun.lock().or_poisoned().call((prev,)),
|
||||
)
|
||||
};
|
||||
|
||||
Self::new_with_value_erased(
|
||||
Box::new(fun),
|
||||
None,
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr,
|
||||
)
|
||||
Self::new_with_value_erased(Box::new(fun), None)
|
||||
}
|
||||
|
||||
/// Creates a new render effect with an initial value.
|
||||
@@ -91,30 +63,7 @@ where
|
||||
fun: impl FnMut(Option<T>) -> T + 'static,
|
||||
initial_value: Option<T>,
|
||||
) -> Self {
|
||||
#[cfg(feature = "subsecond")]
|
||||
let (hot_fn_ptr, fun) = {
|
||||
let fun = Arc::new(Mutex::new(subsecond::HotFn::current(fun)));
|
||||
(
|
||||
{
|
||||
let fun = Arc::downgrade(&fun);
|
||||
let wrapped = send_wrapper::SendWrapper::new(move || {
|
||||
fun.upgrade()
|
||||
.map(|n| n.lock().or_poisoned().ptr_address())
|
||||
});
|
||||
// it's not redundant, it's due to the SendWrapper deref
|
||||
#[allow(clippy::redundant_closure)]
|
||||
Box::new(move || wrapped())
|
||||
},
|
||||
move |prev| fun.lock().or_poisoned().call((prev,)),
|
||||
)
|
||||
};
|
||||
|
||||
Self::new_with_value_erased(
|
||||
Box::new(fun),
|
||||
initial_value,
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr,
|
||||
)
|
||||
Self::new_with_value_erased(Box::new(fun), initial_value)
|
||||
}
|
||||
|
||||
/// Creates a new render effect, which immediately runs `fun`.
|
||||
@@ -122,11 +71,6 @@ where
|
||||
fun: impl FnMut(Option<T>) -> T + 'static,
|
||||
value: impl IntoFuture<Output = T> + 'static,
|
||||
) -> Self {
|
||||
#[cfg(feature = "subsecond")]
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
#[cfg(feature = "subsecond")]
|
||||
let fun = move |prev| fun.call((prev,));
|
||||
|
||||
Self::new_with_async_value_erased(
|
||||
Box::new(fun),
|
||||
Box::pin(value.into_future()),
|
||||
@@ -135,13 +79,8 @@ where
|
||||
}
|
||||
|
||||
fn new_with_value_erased(
|
||||
#[allow(unused_mut)] mut fun: Box<dyn FnMut(Option<T>) -> T + 'static>,
|
||||
mut fun: Box<dyn FnMut(Option<T>) -> T + 'static>,
|
||||
initial_value: Option<T>,
|
||||
// this argument can be used to invalidate individual effects in the future
|
||||
// in present experiments, I have found that it is not actually granular enough to make a difference
|
||||
#[allow(unused)]
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr: CurrentHotPtr,
|
||||
) -> Self {
|
||||
// codegen optimisation:
|
||||
fn prep() -> (Owner, Arc<RwLock<EffectInner>>, crate::channel::Receiver)
|
||||
@@ -165,56 +104,12 @@ where
|
||||
let _ = initial_value;
|
||||
let _ = owner;
|
||||
let _ = &mut rx;
|
||||
let _ = fun;
|
||||
let _ = &mut fun;
|
||||
}
|
||||
|
||||
#[cfg(feature = "effects")]
|
||||
{
|
||||
let subscriber = inner.to_any_subscriber();
|
||||
|
||||
#[cfg(all(feature = "subsecond", debug_assertions))]
|
||||
let mut fun = {
|
||||
use crate::graph::ReactiveNode;
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::sync::{Arc, LazyLock, Mutex};
|
||||
use subsecond::HotFnPtr;
|
||||
|
||||
static HOT_RELOAD_SUBSCRIBERS: LazyLock<
|
||||
Mutex<FxHashMap<AnySubscriber, (HotFnPtr, CurrentHotPtr)>>,
|
||||
> = LazyLock::new(|| {
|
||||
subsecond::register_handler(Arc::new(|| {
|
||||
HOT_RELOAD_SUBSCRIBERS.lock().or_poisoned().retain(
|
||||
|subscriber, (prev_ptr, hot_fn_ptr)| {
|
||||
match hot_fn_ptr() {
|
||||
None => false,
|
||||
Some(curr_hot_ptr) => {
|
||||
if curr_hot_ptr != *prev_ptr {
|
||||
crate::log_warning(format_args!(
|
||||
"{prev_ptr:?} <> \
|
||||
{curr_hot_ptr:?}",
|
||||
));
|
||||
*prev_ptr = curr_hot_ptr;
|
||||
|
||||
subscriber.mark_dirty();
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
}));
|
||||
Default::default()
|
||||
});
|
||||
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
let initial_ptr = hot_fn_ptr().unwrap();
|
||||
HOT_RELOAD_SUBSCRIBERS
|
||||
.lock()
|
||||
.or_poisoned()
|
||||
.insert(subscriber.clone(), (initial_ptr, hot_fn_ptr));
|
||||
move |prev| fun.call((prev,))
|
||||
};
|
||||
|
||||
*value.write().or_poisoned() = Some(
|
||||
owner.with(|| subscriber.with_observer(|| fun(initial_value))),
|
||||
);
|
||||
@@ -335,11 +230,6 @@ where
|
||||
pub fn new_isomorphic(
|
||||
fun: impl FnMut(Option<T>) -> T + Send + Sync + 'static,
|
||||
) -> Self {
|
||||
#[cfg(feature = "subsecond")]
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
#[cfg(feature = "subsecond")]
|
||||
let fun = move |prev| fun.call((prev,));
|
||||
|
||||
fn erased<T: Send + Sync + 'static>(
|
||||
mut fun: Box<dyn FnMut(Option<T>) -> T + Send + Sync + 'static>,
|
||||
) -> RenderEffect<T> {
|
||||
|
||||
@@ -209,25 +209,6 @@ impl Owner {
|
||||
this
|
||||
}
|
||||
|
||||
/// Returns the parent of this `Owner`, if any.
|
||||
///
|
||||
/// None when:
|
||||
/// - This is a root owner
|
||||
/// - The parent has been dropped
|
||||
pub fn parent(&self) -> Option<Owner> {
|
||||
self.inner
|
||||
.read()
|
||||
.or_poisoned()
|
||||
.parent
|
||||
.as_ref()
|
||||
.and_then(|p| p.upgrade())
|
||||
.map(|inner| Owner {
|
||||
inner,
|
||||
#[cfg(feature = "hydration")]
|
||||
shared_context: self.shared_context.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new `Owner` that is the child of the current `Owner`, if any.
|
||||
pub fn child(&self) -> Self {
|
||||
let parent = Some(Arc::downgrade(&self.inner));
|
||||
|
||||
@@ -257,20 +257,6 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<ReadSignal<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<ArcReadSignal<T>> + Storage<T>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: ReadSignal<T, S>) -> Self {
|
||||
Self {
|
||||
inner: SignalTypes::ReadSignal(value.into()),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: std::panic::Location::caller(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Send + Sync> From<ArcRwSignal<T>> for ArcSignal<T, SyncStorage> {
|
||||
#[track_caller]
|
||||
fn from(value: ArcRwSignal<T>) -> Self {
|
||||
@@ -282,20 +268,6 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<RwSignal<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<ArcRwSignal<T>> + Storage<ArcReadSignal<T>> + Storage<T>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: RwSignal<T, S>) -> Self {
|
||||
Self {
|
||||
inner: SignalTypes::ReadSignal(value.read_only().into()),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: std::panic::Location::caller(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<ArcMemo<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
@@ -310,20 +282,6 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<Memo<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<ArcMemo<T, S>> + Storage<T>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: Memo<T, S>) -> Self {
|
||||
Self {
|
||||
inner: SignalTypes::Memo(value.into()),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: std::panic::Location::caller(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> DefinedAt for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
@@ -1542,6 +1500,7 @@ pub mod read {
|
||||
|
||||
impl<T, S> ReadUntracked for MaybeProp<T, S>
|
||||
where
|
||||
T: Clone,
|
||||
S: Storage<Option<T>> + Storage<SignalTypes<Option<T>, S>>,
|
||||
{
|
||||
type Value = ReadGuard<Option<T>, SignalReadGuard<Option<T>, S>>;
|
||||
|
||||
@@ -225,38 +225,3 @@ fn threaded_chaos_effect() {
|
||||
let values: Vec<_> = signals.iter().map(|s| s.get_untracked()).collect();
|
||||
println!("FINAL: {values:?}");
|
||||
}
|
||||
|
||||
#[cfg(feature = "effects")]
|
||||
#[test]
|
||||
fn test_batch() {
|
||||
use imports::*;
|
||||
use reactive_graph::{effect::batch, owner::StoredValue};
|
||||
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let a = RwSignal::new(0);
|
||||
let b = RwSignal::new(0);
|
||||
|
||||
let values = StoredValue::new(Vec::new());
|
||||
|
||||
ImmediateEffect::new_scoped(move || {
|
||||
println!("{} = {}", a.get(), b.get());
|
||||
values.write_value().push((a.get(), b.get()));
|
||||
});
|
||||
|
||||
a.set(1);
|
||||
b.set(1);
|
||||
|
||||
batch(move || {
|
||||
a.set(2);
|
||||
b.set(2);
|
||||
|
||||
batch(move || {
|
||||
a.set(3);
|
||||
b.set(3);
|
||||
});
|
||||
});
|
||||
|
||||
assert_eq!(values.get_value(), vec![(0, 0), (1, 0), (1, 1), (3, 3)]);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_stores"
|
||||
version = "0.3.0"
|
||||
version = "0.2.5"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -30,8 +30,6 @@ where
|
||||
defined_at: &'static Location<'static>,
|
||||
path: Arc<dyn Fn() -> StorePath + Send + Sync>,
|
||||
get_trigger: Arc<dyn Fn(StorePath) -> StoreFieldTrigger + Send + Sync>,
|
||||
get_trigger_unkeyed:
|
||||
Arc<dyn Fn(StorePath) -> StoreFieldTrigger + Send + Sync>,
|
||||
read: Arc<dyn Fn() -> Option<StoreFieldReader<T>> + Send + Sync>,
|
||||
pub(crate) write:
|
||||
Arc<dyn Fn() -> Option<StoreFieldWriter<T>> + Send + Sync>,
|
||||
@@ -105,10 +103,6 @@ impl<T> StoreField for ArcField<T> {
|
||||
(self.get_trigger)(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
(self.get_trigger_unkeyed)(path)
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
(self.path)()
|
||||
}
|
||||
@@ -138,9 +132,6 @@ where
|
||||
defined_at: Location::caller(),
|
||||
path: Arc::new(move || value.path().into_iter().collect()),
|
||||
get_trigger: Arc::new(move |path| value.get_trigger(path)),
|
||||
get_trigger_unkeyed: Arc::new(move |path| {
|
||||
value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new(move || value.reader().map(StoreFieldReader::new)),
|
||||
write: Arc::new(move || value.writer().map(StoreFieldWriter::new)),
|
||||
keys: Arc::new(move || value.keys()),
|
||||
@@ -167,10 +158,6 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -215,10 +202,6 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -262,10 +245,6 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -310,10 +289,6 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -362,10 +337,6 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -397,7 +368,6 @@ impl<T> Clone for ArcField<T> {
|
||||
defined_at: self.defined_at,
|
||||
path: self.path.clone(),
|
||||
get_trigger: Arc::clone(&self.get_trigger),
|
||||
get_trigger_unkeyed: Arc::clone(&self.get_trigger_unkeyed),
|
||||
read: Arc::clone(&self.read),
|
||||
write: Arc::clone(&self.write),
|
||||
keys: Arc::clone(&self.keys),
|
||||
|
||||
@@ -68,11 +68,6 @@ where
|
||||
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner.path()
|
||||
}
|
||||
|
||||
@@ -59,13 +59,6 @@ where
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|inner| inner.get_trigger_unkeyed(path))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
|
||||
@@ -84,10 +84,6 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
let index = self.index;
|
||||
@@ -113,23 +109,6 @@ where
|
||||
fn keys(&self) -> Option<KeyMap> {
|
||||
self.inner.keys()
|
||||
}
|
||||
|
||||
fn track_field(&self) {
|
||||
let mut full_path = self.path().into_iter().collect::<StorePath>();
|
||||
let trigger = self.get_trigger(self.path().into_iter().collect());
|
||||
trigger.this.track();
|
||||
trigger.children.track();
|
||||
|
||||
// tracks `this` for all ancestors: i.e., it will track any change that is made
|
||||
// directly to one of its ancestors, but not a change made to a *child* of an ancestor
|
||||
// (which would end up with every subfield tracking its own siblings, because they are
|
||||
// children of its parent)
|
||||
while !full_path.is_empty() {
|
||||
full_path.pop();
|
||||
let inner = self.get_trigger(full_path.clone());
|
||||
inner.this.track();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Inner, Prev> DefinedAt for AtIndex<Inner, Prev>
|
||||
|
||||
@@ -110,10 +110,6 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
Some(Mapped::new_with_guard(inner, self.read))
|
||||
@@ -436,7 +432,7 @@ where
|
||||
let this = keys
|
||||
.with_field_keys(
|
||||
inner.clone(),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|keys| keys.get(&self.key),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -448,10 +444,6 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
|
||||
@@ -460,7 +452,7 @@ where
|
||||
let index = keys
|
||||
.with_field_keys(
|
||||
inner_path,
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|keys| keys.get(&self.key),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -484,7 +476,7 @@ where
|
||||
let index = keys
|
||||
.with_field_keys(
|
||||
inner_path.clone(),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|keys| keys.get(&self.key),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -632,7 +624,9 @@ where
|
||||
let latest = self.latest_keys();
|
||||
keys.with_field_keys(
|
||||
inner_path,
|
||||
|keys| ((), keys.update(latest)),
|
||||
|keys| {
|
||||
keys.update(latest);
|
||||
},
|
||||
|| self.latest_keys(),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -364,18 +364,13 @@ where
|
||||
})
|
||||
}
|
||||
|
||||
fn update(
|
||||
&mut self,
|
||||
iter: impl IntoIterator<Item = K>,
|
||||
) -> Vec<(usize, StorePathSegment)> {
|
||||
fn update(&mut self, iter: impl IntoIterator<Item = K>) {
|
||||
let new_keys = iter
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(idx, key)| (key, idx))
|
||||
.collect::<FxHashMap<K, usize>>();
|
||||
|
||||
let mut index_keys = Vec::with_capacity(new_keys.len());
|
||||
|
||||
// remove old keys and recycle the slots
|
||||
self.keys.retain(|key, old_entry| match new_keys.get(key) {
|
||||
Some(idx) => {
|
||||
@@ -390,17 +385,14 @@ where
|
||||
|
||||
// add new keys
|
||||
for (key, idx) in new_keys {
|
||||
match self.keys.get(&key) {
|
||||
Some((segment, idx)) => index_keys.push((*idx, *segment)),
|
||||
None => {
|
||||
let path = self.next_key();
|
||||
self.keys.insert(key, (path, idx));
|
||||
index_keys.push((idx, path));
|
||||
}
|
||||
// the suggestion doesn't compile because we need &mut for self.next_key(),
|
||||
// and we don't want to call that until after the check
|
||||
#[allow(clippy::map_entry)]
|
||||
if !self.keys.contains_key(&key) {
|
||||
let path = self.next_key();
|
||||
self.keys.insert(key, (path, idx));
|
||||
}
|
||||
}
|
||||
|
||||
index_keys
|
||||
}
|
||||
}
|
||||
|
||||
@@ -423,20 +415,14 @@ type HashMap<K, V> = send_wrapper::SendWrapper<
|
||||
|
||||
/// A map of the keys for a keyed subfield.
|
||||
#[derive(Clone)]
|
||||
pub struct KeyMap(
|
||||
HashMap<StorePath, Box<dyn Any + Send + Sync>>,
|
||||
HashMap<(StorePath, usize), StorePathSegment>,
|
||||
);
|
||||
pub struct KeyMap(HashMap<StorePath, Box<dyn Any + Send + Sync>>);
|
||||
|
||||
impl Default for KeyMap {
|
||||
fn default() -> Self {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
return Self(Default::default(), Default::default());
|
||||
return Self(Default::default());
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
return Self(
|
||||
send_wrapper::SendWrapper::new(Default::default()),
|
||||
send_wrapper::SendWrapper::new(Default::default()),
|
||||
);
|
||||
return Self(send_wrapper::SendWrapper::new(Default::default()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -444,70 +430,31 @@ impl KeyMap {
|
||||
fn with_field_keys<K, T>(
|
||||
&self,
|
||||
path: StorePath,
|
||||
fun: impl FnOnce(&mut FieldKeys<K>) -> (T, Vec<(usize, StorePathSegment)>),
|
||||
fun: impl FnOnce(&mut FieldKeys<K>) -> T,
|
||||
initialize: impl FnOnce() -> Vec<K>,
|
||||
) -> Option<T>
|
||||
where
|
||||
K: Debug + Hash + PartialEq + Eq + Send + Sync + 'static,
|
||||
{
|
||||
let initial_keys = initialize();
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let mut entry = self
|
||||
.0
|
||||
.entry(path.clone())
|
||||
.or_insert_with(|| Box::new(FieldKeys::new(initial_keys)));
|
||||
.entry(path)
|
||||
.or_insert_with(|| Box::new(FieldKeys::new(initialize())));
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let entry = if !self.0.borrow().contains_key(&path) {
|
||||
Some(Box::new(FieldKeys::new(initial_keys)))
|
||||
Some(Box::new(FieldKeys::new(initialize())))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let mut map = self.0.borrow_mut();
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let entry = map.entry(path.clone()).or_insert_with(|| entry.unwrap());
|
||||
let entry = map.entry(path).or_insert_with(|| entry.unwrap());
|
||||
|
||||
let entry = entry.downcast_mut::<FieldKeys<K>>()?;
|
||||
let (result, new_keys) = fun(entry);
|
||||
if !new_keys.is_empty() {
|
||||
for (idx, segment) in new_keys {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
self.1.insert((path.clone(), idx), segment);
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
self.1.borrow_mut().insert((path.clone(), idx), segment);
|
||||
}
|
||||
}
|
||||
Some(result)
|
||||
}
|
||||
|
||||
fn contains_key(&self, key: &StorePath) -> bool {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.0.contains_key(key)
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
self.0.borrow_mut().contains_key(key)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_key_for_index(
|
||||
&self,
|
||||
key: &(StorePath, usize),
|
||||
) -> Option<StorePathSegment> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.1.get(key).as_deref().copied()
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
self.1.borrow().get(key).as_deref().copied()
|
||||
}
|
||||
Some(fun(entry))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -885,30 +832,6 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Foo {
|
||||
id: i32,
|
||||
bar: Bar,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Bar {
|
||||
bar_signature: i32,
|
||||
baz: Baz,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baz {
|
||||
more_data: i32,
|
||||
baw: Baw,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baw {
|
||||
more_data: i32,
|
||||
end: i32,
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mutating_field_triggers_effect() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
@@ -1189,6 +1112,30 @@ mod tests {
|
||||
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Foo {
|
||||
id: i32,
|
||||
bar: Bar,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Bar {
|
||||
bar_signature: i32,
|
||||
baz: Baz,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baz {
|
||||
more_data: i32,
|
||||
baw: Baw,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baw {
|
||||
more_data: i32,
|
||||
end: i32,
|
||||
}
|
||||
|
||||
let store = Store::new(Foo {
|
||||
id: 42,
|
||||
bar: Bar {
|
||||
@@ -1272,107 +1219,4 @@ mod tests {
|
||||
assert_eq!(more_data_runs.get_value(), 3);
|
||||
assert_eq!(baz_baw_end_runs.get_value(), 3);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn changing_parent_notifies_subfield() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
let combined_count = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let store = Store::new(Foo {
|
||||
id: 42,
|
||||
bar: Bar {
|
||||
bar_signature: 69,
|
||||
baz: Baz {
|
||||
more_data: 9999,
|
||||
baw: Baw {
|
||||
more_data: 22,
|
||||
end: 1112,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let tracked_field = store.bar().baz().more_data();
|
||||
|
||||
Effect::new_sync({
|
||||
let combined_count = Arc::clone(&combined_count);
|
||||
move |prev: Option<()>| {
|
||||
if prev.is_none() {
|
||||
println!("first run");
|
||||
} else {
|
||||
println!("next run");
|
||||
}
|
||||
|
||||
// we only track `more`, but this should still be notified
|
||||
// when its parent fields `bar` or `baz` change
|
||||
println!("{:?}", *tracked_field.read());
|
||||
combined_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
tick().await;
|
||||
tick().await;
|
||||
|
||||
store.bar().baz().set(Baz {
|
||||
more_data: 42,
|
||||
baw: Baw {
|
||||
more_data: 11,
|
||||
end: 31,
|
||||
},
|
||||
});
|
||||
tick().await;
|
||||
store.bar().set(Bar {
|
||||
bar_signature: 23,
|
||||
baz: Baz {
|
||||
more_data: 32,
|
||||
baw: Baw {
|
||||
more_data: 432,
|
||||
end: 423,
|
||||
},
|
||||
},
|
||||
});
|
||||
tick().await;
|
||||
|
||||
assert_eq!(combined_count.load(Ordering::Relaxed), 3);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn changing_parent_notifies_unkeyed_child() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
let combined_count = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let store = Store::new(data());
|
||||
|
||||
let tracked_field = store.todos().at_unkeyed(0);
|
||||
|
||||
Effect::new_sync({
|
||||
let combined_count = Arc::clone(&combined_count);
|
||||
move |prev: Option<()>| {
|
||||
if prev.is_none() {
|
||||
println!("first run");
|
||||
} else {
|
||||
println!("next run");
|
||||
}
|
||||
|
||||
// we only track `more`, but this should still be notified
|
||||
// when its parent fields `bar` or `baz` change
|
||||
println!("{:?}", *tracked_field.read());
|
||||
combined_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
tick().await;
|
||||
tick().await;
|
||||
|
||||
store.todos().write().pop();
|
||||
tick().await;
|
||||
|
||||
store.todos().write().push(Todo {
|
||||
label: "another one".into(),
|
||||
completed: false,
|
||||
});
|
||||
tick().await;
|
||||
|
||||
assert_eq!(combined_count.load(Ordering::Relaxed), 3);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ where
|
||||
// don't track the writer for the whole store
|
||||
writer.untrack();
|
||||
let mut notify = |path: &StorePath| {
|
||||
self.triggers_for_path_unkeyed(path.to_owned()).notify();
|
||||
self.triggers_for_path(path.to_owned()).notify();
|
||||
};
|
||||
writer.patch_field(new, &path, &mut notify);
|
||||
}
|
||||
|
||||
@@ -11,15 +11,6 @@ impl IntoIterator for StorePath {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a StorePath {
|
||||
type Item = &'a StorePathSegment;
|
||||
type IntoIter = std::slice::Iter<'a, StorePathSegment>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<StorePathSegment>> for StorePath {
|
||||
fn from(value: Vec<StorePathSegment>) -> Self {
|
||||
Self(value)
|
||||
@@ -27,16 +18,6 @@ impl From<Vec<StorePathSegment>> for StorePath {
|
||||
}
|
||||
|
||||
impl StorePath {
|
||||
/// Creates a new path.
|
||||
pub fn new() -> Self {
|
||||
Self(Vec::new())
|
||||
}
|
||||
|
||||
/// Creates a new path with storage capacity for `capacity` segments.
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
Self(Vec::with_capacity(capacity))
|
||||
}
|
||||
|
||||
/// Adds a new segment to the path.
|
||||
pub fn push(&mut self, segment: impl Into<StorePathSegment>) {
|
||||
self.0.push(segment.into());
|
||||
|
||||
@@ -26,14 +26,6 @@ pub trait StoreField: Sized {
|
||||
#[track_caller]
|
||||
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger;
|
||||
|
||||
/// Returns the trigger that tracks access and updates for this field.
|
||||
///
|
||||
/// This uses *unkeyed* paths: i.e., if any field in the path is keyed, it will
|
||||
/// try to look up the key for the item at the index given in the path, rather than
|
||||
/// the keyed item.
|
||||
#[track_caller]
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger;
|
||||
|
||||
/// The path of this field (see [`StorePath`]).
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment>;
|
||||
@@ -92,26 +84,6 @@ pub trait StoreField: Sized {
|
||||
|
||||
triggers
|
||||
}
|
||||
|
||||
/// Returns triggers for the field at the given path, and all parent fields
|
||||
fn triggers_for_path_unkeyed(&self, path: StorePath) -> Vec<ArcTrigger> {
|
||||
// see notes on triggers_for_path() for additional comments on implementation
|
||||
|
||||
let trigger = self.get_trigger_unkeyed(path.clone());
|
||||
let mut full_path = path;
|
||||
|
||||
let mut triggers = Vec::with_capacity(full_path.len() + 2);
|
||||
triggers.push(trigger.this.clone());
|
||||
triggers.push(trigger.children.clone());
|
||||
while !full_path.is_empty() {
|
||||
full_path.pop();
|
||||
let inner = self.get_trigger_unkeyed(full_path.clone());
|
||||
triggers.push(inner.children.clone());
|
||||
}
|
||||
triggers.reverse();
|
||||
|
||||
triggers
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> StoreField for ArcStore<T>
|
||||
@@ -129,26 +101,6 @@ where
|
||||
trigger
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
let orig_path = path.clone();
|
||||
|
||||
let mut path = StorePath::with_capacity(orig_path.len());
|
||||
for segment in &orig_path {
|
||||
let parent_is_keyed = self.keys.contains_key(&path);
|
||||
|
||||
if parent_is_keyed {
|
||||
let key = self
|
||||
.keys
|
||||
.get_key_for_index(&(path.clone(), segment.0))
|
||||
.expect("could not find key for index");
|
||||
path.push(key);
|
||||
} else {
|
||||
path.push(*segment);
|
||||
}
|
||||
}
|
||||
self.get_trigger(path)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
iter::empty()
|
||||
@@ -189,14 +141,6 @@ where
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|n| n.get_trigger_unkeyed(path))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner
|
||||
|
||||
@@ -88,10 +88,6 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
Some(Mapped::new_with_guard(inner, self.read))
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_stores_macro"
|
||||
version = "0.2.6"
|
||||
version = "0.2.5"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router"
|
||||
version = "0.8.9"
|
||||
version = "0.8.5"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -364,12 +364,6 @@ where
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MatchedRoute(pub String, pub AnyView);
|
||||
|
||||
impl MatchedRoute {
|
||||
fn branch_name(&self) -> String {
|
||||
format!("{:?}", self.1.as_type_id())
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for MatchedRoute {
|
||||
type State = <AnyView as Render>::State;
|
||||
|
||||
@@ -420,9 +414,8 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches: bool,
|
||||
extra_attrs: Vec<AnyAttribute>,
|
||||
) {
|
||||
let branch_name = (mark_branches && escape).then(|| self.branch_name());
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.open_branch(bn);
|
||||
if mark_branches && escape {
|
||||
buf.open_branch(&self.0);
|
||||
}
|
||||
self.1.to_html_with_buf(
|
||||
buf,
|
||||
@@ -431,8 +424,8 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.close_branch(bn);
|
||||
if mark_branches && escape {
|
||||
buf.close_branch(&self.0);
|
||||
if *position == Position::NextChildAfterText {
|
||||
*position = Position::NextChild;
|
||||
}
|
||||
@@ -449,9 +442,8 @@ impl RenderHtml for MatchedRoute {
|
||||
) where
|
||||
Self: Sized,
|
||||
{
|
||||
let branch_name = (mark_branches && escape).then(|| self.branch_name());
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.open_branch(bn);
|
||||
if mark_branches && escape {
|
||||
buf.open_branch(&self.0);
|
||||
}
|
||||
self.1.to_html_async_with_buf::<OUT_OF_ORDER>(
|
||||
buf,
|
||||
@@ -460,8 +452,8 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.close_branch(bn);
|
||||
if mark_branches && escape {
|
||||
buf.close_branch(&self.0);
|
||||
if *position == Position::NextChildAfterText {
|
||||
*position = Position::NextChild;
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use crate::{hooks::use_navigate, params::ParamsMap};
|
||||
use core::fmt;
|
||||
use futures::channel::oneshot;
|
||||
use js_sys::{try_iter, Array, JsString};
|
||||
use leptos::{ev, prelude::*};
|
||||
use leptos::prelude::*;
|
||||
use or_poisoned::OrPoisoned;
|
||||
use reactive_graph::{
|
||||
signal::ArcRwSignal,
|
||||
@@ -11,12 +11,13 @@ use reactive_graph::{
|
||||
};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
boxed::Box,
|
||||
string::String,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
use tachys::dom::{document, window};
|
||||
use wasm_bindgen::{JsCast, JsValue};
|
||||
use web_sys::UrlSearchParams;
|
||||
use wasm_bindgen::{closure::Closure, JsCast, JsValue};
|
||||
use web_sys::{Event, UrlSearchParams};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct BrowserUrl {
|
||||
@@ -115,6 +116,7 @@ impl LocationProvider for BrowserUrl {
|
||||
}
|
||||
|
||||
fn init(&self, base: Option<Cow<'static, str>>) {
|
||||
let window = window();
|
||||
let navigate = {
|
||||
let url = self.url.clone();
|
||||
let pending = Arc::clone(&self.pending_navigation);
|
||||
@@ -157,32 +159,37 @@ impl LocationProvider for BrowserUrl {
|
||||
|
||||
let handle_anchor_click =
|
||||
handle_anchor_click(base, Self::parse_with_base, navigate);
|
||||
|
||||
let click_handle = window_event_listener(ev::click, move |ev| {
|
||||
let closure = Closure::wrap(Box::new(move |ev: Event| {
|
||||
if let Err(e) = handle_anchor_click(ev) {
|
||||
#[cfg(feature = "tracing")]
|
||||
tracing::error!("{e:?}");
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
web_sys::console::error_1(&e);
|
||||
}
|
||||
});
|
||||
}) as Box<dyn FnMut(Event)>)
|
||||
.into_js_value();
|
||||
window
|
||||
.add_event_listener_with_callback(
|
||||
"click",
|
||||
closure.as_ref().unchecked_ref(),
|
||||
)
|
||||
.expect(
|
||||
"couldn't add `click` listener to `window` to handle `<a>` \
|
||||
clicks",
|
||||
);
|
||||
|
||||
// handle popstate event (forward/back navigation)
|
||||
let popstate_cb = {
|
||||
let cb = {
|
||||
let url = self.url.clone();
|
||||
let path_stack = self.path_stack.clone();
|
||||
let is_back = self.is_back.clone();
|
||||
move || match Self::current() {
|
||||
Ok(new_url) => {
|
||||
let mut stack = path_stack.write_value();
|
||||
let stack = path_stack.read_value();
|
||||
let is_navigating_back = stack.len() == 1
|
||||
|| (stack.len() >= 2
|
||||
&& stack.get(stack.len() - 2) == Some(&new_url));
|
||||
|
||||
if is_navigating_back {
|
||||
stack.pop();
|
||||
}
|
||||
|
||||
is_back.set(is_navigating_back);
|
||||
|
||||
url.set(new_url);
|
||||
@@ -195,14 +202,14 @@ impl LocationProvider for BrowserUrl {
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let popstate_handle =
|
||||
window_event_listener(ev::popstate, move |_| popstate_cb());
|
||||
|
||||
on_cleanup(|| {
|
||||
click_handle.remove();
|
||||
popstate_handle.remove();
|
||||
});
|
||||
let closure =
|
||||
Closure::wrap(Box::new(cb) as Box<dyn Fn()>).into_js_value();
|
||||
window
|
||||
.add_event_listener_with_callback(
|
||||
"popstate",
|
||||
closure.as_ref().unchecked_ref(),
|
||||
)
|
||||
.expect("couldn't add `popstate` listener to `window`");
|
||||
}
|
||||
|
||||
fn ready_to_complete(&self) {
|
||||
@@ -214,13 +221,6 @@ impl LocationProvider for BrowserUrl {
|
||||
fn complete_navigation(&self, loc: &LocationChange) {
|
||||
let history = window().history().unwrap();
|
||||
|
||||
let current_path = self
|
||||
.path_stack
|
||||
.read_value()
|
||||
.last()
|
||||
.map(|url| url.to_full_path());
|
||||
let add_to_stack = current_path.as_ref() != Some(&loc.value);
|
||||
|
||||
if loc.replace {
|
||||
history
|
||||
.replace_state_with_url(
|
||||
@@ -229,7 +229,7 @@ impl LocationProvider for BrowserUrl {
|
||||
Some(&loc.value),
|
||||
)
|
||||
.unwrap();
|
||||
} else if add_to_stack {
|
||||
} else {
|
||||
// push the "forward direction" marker
|
||||
let state = &loc.state.to_js_value();
|
||||
history
|
||||
@@ -240,9 +240,7 @@ impl LocationProvider for BrowserUrl {
|
||||
// add this URL to the "path stack" for detecting back navigations, and
|
||||
// unset "navigating back" state
|
||||
if let Ok(url) = Self::current() {
|
||||
if add_to_stack {
|
||||
self.path_stack.write_value().push(url);
|
||||
}
|
||||
self.path_stack.write_value().push(url);
|
||||
self.is_back.set(false);
|
||||
}
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ use send_wrapper::SendWrapper;
|
||||
use std::{borrow::Cow, future::Future};
|
||||
use tachys::dom::window;
|
||||
use wasm_bindgen::{JsCast, JsValue};
|
||||
use web_sys::{HtmlAnchorElement, MouseEvent};
|
||||
use web_sys::{Event, HtmlAnchorElement, MouseEvent};
|
||||
|
||||
mod history;
|
||||
mod server;
|
||||
@@ -300,14 +300,15 @@ pub(crate) fn handle_anchor_click<NavFn, NavFut>(
|
||||
router_base: Option<Cow<'static, str>>,
|
||||
parse_with_base: fn(&str, &str) -> Result<Url, JsValue>,
|
||||
navigate: NavFn,
|
||||
) -> Box<dyn Fn(MouseEvent) -> Result<(), JsValue>>
|
||||
) -> Box<dyn Fn(Event) -> Result<(), JsValue>>
|
||||
where
|
||||
NavFn: Fn(Url, LocationChange) -> NavFut + 'static,
|
||||
NavFut: Future<Output = ()> + 'static,
|
||||
{
|
||||
let router_base = router_base.unwrap_or_default();
|
||||
|
||||
Box::new(move |ev: MouseEvent| {
|
||||
Box::new(move |ev: Event| {
|
||||
let ev = ev.unchecked_into::<MouseEvent>();
|
||||
let origin = window().location().origin()?;
|
||||
if ev.default_prevented()
|
||||
|| ev.button() != 0
|
||||
|
||||
@@ -26,7 +26,7 @@ use reactive_graph::{
|
||||
computed::{ArcMemo, ScopedFuture},
|
||||
owner::{provide_context, use_context, Owner},
|
||||
signal::{ArcRwSignal, ArcTrigger},
|
||||
traits::{Get, GetUntracked, Notify, ReadUntracked, Set, Track, Write},
|
||||
traits::{Get, GetUntracked, Notify, ReadUntracked, Set, Track},
|
||||
transition::AsyncTransition,
|
||||
wrappers::write::SignalSetter,
|
||||
};
|
||||
@@ -119,7 +119,6 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
|
||||
@@ -160,14 +159,13 @@ where
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// since the path didn't match, we'll update the retained path for future diffing
|
||||
state.path.clear();
|
||||
state.path.push_str(url_snapshot.path());
|
||||
|
||||
let new_match = self.routes.match_route(url_snapshot.path());
|
||||
|
||||
*state.current_url.write_untracked() = url_snapshot;
|
||||
state.current_url.set(url_snapshot);
|
||||
|
||||
match new_match {
|
||||
None => {
|
||||
@@ -194,7 +192,6 @@ where
|
||||
&mut state.outlets,
|
||||
self.set_is_routing.is_some(),
|
||||
0,
|
||||
&self.outer_owner,
|
||||
);
|
||||
|
||||
let (abort_handle, abort_registration) =
|
||||
@@ -372,7 +369,6 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
|
||||
// outlets will not send their views if the loaders are never polled
|
||||
@@ -426,16 +422,8 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
|
||||
let preload_owners = outlets
|
||||
.iter()
|
||||
.map(|o| o.preload_owner.clone())
|
||||
.collect::<Vec<_>>();
|
||||
outer_owner
|
||||
.with(|| Owner::on_cleanup(move || drop(preload_owners)));
|
||||
|
||||
// outlets will not send their views if the loaders are never polled
|
||||
// the loaders are async so that they can lazy-load routes in the browser,
|
||||
// but they should always be synchronously available on the server
|
||||
@@ -487,7 +475,6 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
|
||||
@@ -543,7 +530,6 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
|
||||
@@ -580,7 +566,6 @@ pub(crate) struct RouteContext {
|
||||
base: Option<Oco<'static, str>>,
|
||||
view_fn: Arc<Mutex<OutletViewFn>>,
|
||||
owner: Arc<Mutex<Option<Owner>>>,
|
||||
preload_owner: Owner,
|
||||
child: ChildRoute,
|
||||
}
|
||||
|
||||
@@ -612,7 +597,6 @@ impl Clone for RouteContext {
|
||||
view_fn: Arc::clone(&self.view_fn),
|
||||
owner: Arc::clone(&self.owner),
|
||||
child: self.child.clone(),
|
||||
preload_owner: self.preload_owner.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -624,7 +608,6 @@ trait AddNestedRoute {
|
||||
base: Option<Oco<'static, str>>,
|
||||
loaders: &mut Vec<Pin<Box<dyn Future<Output = ArcTrigger>>>>,
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
outer_owner: &Owner,
|
||||
);
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
@@ -638,7 +621,6 @@ trait AddNestedRoute {
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
set_is_routing: bool,
|
||||
level: u8,
|
||||
outer_owner: &Owner,
|
||||
) -> u8;
|
||||
}
|
||||
|
||||
@@ -652,7 +634,6 @@ where
|
||||
base: Option<Oco<'static, str>>,
|
||||
loaders: &mut Vec<Pin<Box<dyn Future<Output = ArcTrigger>>>>,
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
outer_owner: &Owner,
|
||||
) {
|
||||
let orig_url = url;
|
||||
|
||||
@@ -720,7 +701,6 @@ where
|
||||
base: base.clone(),
|
||||
child: ChildRoute(Arc::new(Mutex::new(None))),
|
||||
owner: Arc::new(Mutex::new(None)),
|
||||
preload_owner: outer_owner.child(),
|
||||
};
|
||||
if !outlets.is_empty() {
|
||||
let prev_index = outlets.len().saturating_sub(1);
|
||||
@@ -745,15 +725,7 @@ where
|
||||
provide_context(params.clone());
|
||||
provide_context(url.clone());
|
||||
provide_context(matched.clone());
|
||||
outlet
|
||||
.preload_owner
|
||||
.with(|| {
|
||||
provide_context(params.clone());
|
||||
provide_context(url.clone());
|
||||
provide_context(matched.clone());
|
||||
ScopedFuture::new(view.preload())
|
||||
})
|
||||
.await;
|
||||
view.preload().await;
|
||||
let child = outlet.child.clone();
|
||||
*view_fn.lock().or_poisoned() =
|
||||
Box::new(move |owner_where_used| {
|
||||
@@ -800,13 +772,7 @@ where
|
||||
// this is important because to build the view, we need access to the outlet
|
||||
// and the outlet will be returned from building this child
|
||||
if let Some(child) = child {
|
||||
child.build_nested_route(
|
||||
orig_url,
|
||||
base,
|
||||
loaders,
|
||||
outlets,
|
||||
outer_owner,
|
||||
);
|
||||
child.build_nested_route(orig_url, base, loaders, outlets);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -821,7 +787,6 @@ where
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
set_is_routing: bool,
|
||||
level: u8,
|
||||
outer_owner: &Owner,
|
||||
) -> u8 {
|
||||
let (parent_params, parent_matches): (Vec<_>, Vec<_>) = outlets
|
||||
.iter()
|
||||
@@ -838,13 +803,7 @@ where
|
||||
match current {
|
||||
// if there's nothing currently in the routes at this point, build from here
|
||||
None => {
|
||||
self.build_nested_route(
|
||||
url,
|
||||
base,
|
||||
preloaders,
|
||||
outlets,
|
||||
outer_owner,
|
||||
);
|
||||
self.build_nested_route(url, base, preloaders, outlets);
|
||||
level
|
||||
}
|
||||
Some(current) => {
|
||||
@@ -884,10 +843,6 @@ where
|
||||
&mut current.matched,
|
||||
ArcRwSignal::new(new_match),
|
||||
);
|
||||
let old_preload_owner = mem::replace(
|
||||
&mut current.preload_owner,
|
||||
outer_owner.child(),
|
||||
);
|
||||
let matched_including_parents = {
|
||||
ArcMemo::new({
|
||||
let matched = current.matched.clone();
|
||||
@@ -930,26 +885,11 @@ where
|
||||
let child = outlet.child.clone();
|
||||
async move {
|
||||
let child = child.clone();
|
||||
outlet
|
||||
.preload_owner
|
||||
.with(|| {
|
||||
provide_context(
|
||||
params_including_parents.clone(),
|
||||
);
|
||||
provide_context(url.clone());
|
||||
provide_context(matched.clone());
|
||||
ScopedFuture::new(async {
|
||||
if set_is_routing {
|
||||
AsyncTransition::run(|| {
|
||||
view.preload()
|
||||
})
|
||||
.await;
|
||||
} else {
|
||||
view.preload().await;
|
||||
}
|
||||
})
|
||||
})
|
||||
.await;
|
||||
if set_is_routing {
|
||||
AsyncTransition::run(|| view.preload()).await;
|
||||
} else {
|
||||
view.preload().await;
|
||||
}
|
||||
*view_fn.lock().or_poisoned() =
|
||||
Box::new(move |owner_where_used| {
|
||||
let prev_owner = route_owner
|
||||
@@ -998,7 +938,6 @@ where
|
||||
drop(old_params);
|
||||
drop(old_url);
|
||||
drop(old_matched);
|
||||
drop(old_preload_owner);
|
||||
trigger
|
||||
}
|
||||
})));
|
||||
@@ -1009,13 +948,8 @@ where
|
||||
|
||||
// if this children has matches, then rebuild the lower section of the tree
|
||||
if let Some(child) = child {
|
||||
child.build_nested_route(
|
||||
url,
|
||||
base,
|
||||
preloaders,
|
||||
outlets,
|
||||
outer_owner,
|
||||
);
|
||||
child
|
||||
.build_nested_route(url, base, preloaders, outlets);
|
||||
} else {
|
||||
*outlets[*items].child.0.lock().or_poisoned() = None;
|
||||
}
|
||||
@@ -1039,7 +973,6 @@ where
|
||||
outlets,
|
||||
set_is_routing,
|
||||
level + 1,
|
||||
outer_owner,
|
||||
)
|
||||
} else {
|
||||
*current.child.0.lock().or_poisoned() = None;
|
||||
|
||||
@@ -5,7 +5,7 @@ license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "RPC for any web framework."
|
||||
readme = "../README.md"
|
||||
version = "0.8.8"
|
||||
version = "0.8.5"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
@@ -64,7 +64,6 @@ http-body-util = { optional = true, workspace = true, default-features = true }
|
||||
rkyv = { optional = true, workspace = true, default-features = true }
|
||||
rmp-serde = { optional = true, workspace = true, default-features = true }
|
||||
base64 = { workspace = true, default-features = true }
|
||||
bitcode = { optional = true, workspace = true, default-features = true }
|
||||
|
||||
# client
|
||||
gloo-net = { optional = true, workspace = true, default-features = true }
|
||||
@@ -127,7 +126,6 @@ cbor = ["dep:ciborium"]
|
||||
rkyv = ["dep:rkyv"]
|
||||
msgpack = ["dep:rmp-serde"]
|
||||
postcard = ["dep:postcard"]
|
||||
bitcode = ["dep:bitcode"]
|
||||
default-tls = ["reqwest?/default-tls"]
|
||||
rustls = ["reqwest?/rustls-tls", "tokio-tungstenite?/rustls"]
|
||||
reqwest = ["dep:reqwest", "dep:tokio-tungstenite", "dep:tokio"]
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
use super::{Patch, Post, Put};
|
||||
use crate::{ContentType, Decodes, Encodes, Format, FormatType};
|
||||
use bytes::Bytes;
|
||||
|
||||
/// Serializes and deserializes with [`bitcode`].
|
||||
pub struct BitcodeEncoding;
|
||||
|
||||
impl ContentType for BitcodeEncoding {
|
||||
const CONTENT_TYPE: &'static str = "application/bitcode";
|
||||
}
|
||||
|
||||
impl FormatType for BitcodeEncoding {
|
||||
const FORMAT_TYPE: Format = Format::Binary;
|
||||
}
|
||||
|
||||
impl<T> Encodes<T> for BitcodeEncoding
|
||||
where
|
||||
T: bitcode::Encode,
|
||||
{
|
||||
type Error = std::convert::Infallible;
|
||||
|
||||
fn encode(value: &T) -> Result<Bytes, Self::Error> {
|
||||
Ok(Bytes::from(bitcode::encode(value)))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Decodes<T> for BitcodeEncoding
|
||||
where
|
||||
T: bitcode::DecodeOwned,
|
||||
{
|
||||
type Error = bitcode::Error;
|
||||
|
||||
fn decode(bytes: Bytes) -> Result<T, Self::Error> {
|
||||
bitcode::decode(bytes.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
/// Pass arguments and receive responses using `bitcode` in a `POST` request.
|
||||
pub type Bitcode = Post<BitcodeEncoding>;
|
||||
|
||||
/// Pass arguments and receive responses using `bitcode` in the body of a `PATCH` request.
|
||||
/// **Note**: Browser support for `PATCH` requests without JS/WASM may be poor.
|
||||
/// Consider using a `POST` request if functionality without JS/WASM is required.
|
||||
pub type PatchBitcode = Patch<BitcodeEncoding>;
|
||||
|
||||
/// Pass arguments and receive responses using `bitcode` in the body of a `PUT` request.
|
||||
/// **Note**: Browser support for `PUT` requests without JS/WASM may be poor.
|
||||
/// Consider using a `POST` request if functionality without JS/WASM is required.
|
||||
pub type PutBitcode = Put<BitcodeEncoding>;
|
||||
@@ -50,11 +50,6 @@ mod postcard;
|
||||
#[cfg(feature = "postcard")]
|
||||
pub use postcard::*;
|
||||
|
||||
#[cfg(feature = "bitcode")]
|
||||
mod bitcode;
|
||||
#[cfg(feature = "bitcode")]
|
||||
pub use bitcode::*;
|
||||
|
||||
mod patch;
|
||||
pub use patch::*;
|
||||
mod post;
|
||||
|
||||
@@ -133,8 +133,6 @@ pub use ::bytes as bytes_export;
|
||||
#[doc(hidden)]
|
||||
pub use ::http as http_export;
|
||||
use base64::{engine::general_purpose::STANDARD_NO_PAD, DecodeError, Engine};
|
||||
#[cfg(feature = "bitcode")]
|
||||
pub use bitcode;
|
||||
// re-exported to make it possible to implement a custom Client without adding a separate
|
||||
// dependency on `bytes`
|
||||
pub use bytes::Bytes;
|
||||
@@ -309,20 +307,24 @@ pub trait ServerFn: Send + Sized {
|
||||
.await
|
||||
.map(|res| (res, None))
|
||||
.unwrap_or_else(|e| {
|
||||
let mut response =
|
||||
(
|
||||
<<Self as ServerFn>::Server as crate::Server<
|
||||
Self::Error,
|
||||
Self::InputStreamError,
|
||||
Self::OutputStreamError,
|
||||
>>::Response::error_response(
|
||||
Self::PATH, e.ser()
|
||||
);
|
||||
let content_type =
|
||||
<Self::Error as FromServerFnError>::Encoder::CONTENT_TYPE;
|
||||
response.content_type(content_type);
|
||||
(response, Some(e))
|
||||
),
|
||||
Some(e),
|
||||
)
|
||||
});
|
||||
|
||||
if err.is_some() {
|
||||
res.set_content_type(
|
||||
<<Self::Error as FromServerFnError>::Encoder>::CONTENT_TYPE,
|
||||
);
|
||||
}
|
||||
|
||||
// if it accepts HTML, we'll redirect to the Referer
|
||||
#[cfg(feature = "form-redirects")]
|
||||
if accepts_html {
|
||||
|
||||
@@ -72,10 +72,8 @@ mod axum {
|
||||
let inner = self.call(req);
|
||||
Box::pin(async move {
|
||||
inner.await.unwrap_or_else(|e| {
|
||||
// TODO: This does not set the Content-Type on the response. Doing so will
|
||||
// require a breaking change in order to get the correct encoding from the
|
||||
// error's `FromServerFnError::Encoder::CONTENT_TYPE` impl.
|
||||
// Note: This only applies to middleware errors.
|
||||
// TODO: this needs to set the Content-Type correctly depending on the error type
|
||||
// note that this only applies to middleware errors
|
||||
let err =
|
||||
ser(ServerFnErrorErr::MiddlewareError(e.to_string()));
|
||||
Response::<Body>::error_response(&path, err)
|
||||
@@ -153,10 +151,8 @@ mod actix {
|
||||
let inner = self.call(req);
|
||||
Box::pin(async move {
|
||||
inner.await.unwrap_or_else(|e| {
|
||||
// TODO: This does not set the Content-Type on the response. Doing so will
|
||||
// require a breaking change in order to get the correct encoding from the
|
||||
// error's `FromServerFnError::Encoder::CONTENT_TYPE` impl.
|
||||
// Note: This only applies to middleware errors.
|
||||
// TODO: this needs to set the Content-Type correctly depending on the error type
|
||||
// note that this only applies to middleware errors
|
||||
let err =
|
||||
ser(ServerFnErrorErr::MiddlewareError(e.to_string()));
|
||||
ActixResponse::error_response(&path, err).take()
|
||||
|
||||
@@ -4,8 +4,7 @@ use crate::error::{
|
||||
};
|
||||
use actix_web::{
|
||||
http::{
|
||||
header,
|
||||
header::{HeaderValue, CONTENT_TYPE, LOCATION},
|
||||
header::{self, HeaderValue, CONTENT_TYPE, LOCATION},
|
||||
StatusCode,
|
||||
},
|
||||
HttpResponse,
|
||||
@@ -80,10 +79,10 @@ impl Res for ActixResponse {
|
||||
))
|
||||
}
|
||||
|
||||
fn content_type(&mut self, content_type: &str) {
|
||||
if let Ok(content_type) = HeaderValue::from_str(content_type) {
|
||||
self.0.headers_mut().insert(CONTENT_TYPE, content_type);
|
||||
}
|
||||
fn set_content_type(&mut self, content_type: &str) {
|
||||
self.0
|
||||
.headers_mut()
|
||||
.append(CONTENT_TYPE, HeaderValue::from_str(content_type).unwrap());
|
||||
}
|
||||
|
||||
fn redirect(&mut self, path: &str) {
|
||||
|
||||
@@ -19,7 +19,10 @@ use crate::error::{
|
||||
};
|
||||
use bytes::Bytes;
|
||||
use futures::{Stream, TryStreamExt};
|
||||
use http::{header, HeaderValue, Response, StatusCode};
|
||||
use http::{
|
||||
header::{self, CONTENT_TYPE},
|
||||
HeaderValue, Response, StatusCode,
|
||||
};
|
||||
use std::pin::Pin;
|
||||
use throw_error::Error;
|
||||
|
||||
@@ -100,11 +103,9 @@ impl Res for Response<Body> {
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn content_type(&mut self, content_type: &str) {
|
||||
if let Ok(content_type) = HeaderValue::from_str(content_type) {
|
||||
self.headers_mut()
|
||||
.insert(header::CONTENT_TYPE, content_type);
|
||||
}
|
||||
fn set_content_type(&mut self, content_type: &str) {
|
||||
self.headers_mut()
|
||||
.append(CONTENT_TYPE, HeaderValue::from_str(content_type).unwrap());
|
||||
}
|
||||
|
||||
fn redirect(&mut self, path: &str) {
|
||||
|
||||
@@ -6,7 +6,10 @@ use crate::error::{
|
||||
use axum::body::Body;
|
||||
use bytes::Bytes;
|
||||
use futures::{Stream, TryStreamExt};
|
||||
use http::{header, HeaderValue, Response, StatusCode};
|
||||
use http::{
|
||||
header::{self, CONTENT_TYPE},
|
||||
HeaderValue, Response, StatusCode,
|
||||
};
|
||||
|
||||
impl<E> TryRes<E> for Response<Body>
|
||||
where
|
||||
@@ -60,11 +63,9 @@ impl Res for Response<Body> {
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn content_type(&mut self, content_type: &str) {
|
||||
if let Ok(content_type) = HeaderValue::from_str(content_type) {
|
||||
self.headers_mut()
|
||||
.insert(header::CONTENT_TYPE, content_type);
|
||||
}
|
||||
fn set_content_type(&mut self, content_type: &str) {
|
||||
self.headers_mut()
|
||||
.append(CONTENT_TYPE, HeaderValue::from_str(content_type).unwrap());
|
||||
}
|
||||
|
||||
fn redirect(&mut self, path: &str) {
|
||||
|
||||
@@ -39,12 +39,14 @@ where
|
||||
pub trait Res {
|
||||
/// Converts an error into a response, with a `500` status code and the error as its body.
|
||||
fn error_response(path: &str, err: Bytes) -> Self;
|
||||
/// Set the `Content-Type` header for the response.
|
||||
fn content_type(&mut self, #[allow(unused_variables)] content_type: &str) {
|
||||
// TODO 0.9: remove this method and default implementation. It is only included here
|
||||
// to allow setting the `Content-Type` header for error responses without requiring a
|
||||
// semver-incompatible change.
|
||||
|
||||
/// Sets the `Content-Type` header on the response.
|
||||
fn set_content_type(&mut self, content_type: &str) {
|
||||
// TODO 0.9: remove this method and default implementation
|
||||
// it is included here to make this change non-breaking
|
||||
_ = content_type;
|
||||
}
|
||||
|
||||
/// Redirect the response by setting a 302 code and Location header.
|
||||
fn redirect(&mut self, path: &str);
|
||||
}
|
||||
@@ -108,7 +110,8 @@ impl Res for BrowserMockRes {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn content_type(&mut self, _content_type: &str) {
|
||||
fn set_content_type(&mut self, content_type: &str) {
|
||||
_ = content_type;
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
|
||||
@@ -10,8 +10,7 @@
|
||||
feature = "multipart",
|
||||
feature = "serde-lite",
|
||||
feature = "cbor",
|
||||
feature = "msgpack",
|
||||
feature = "bitcode",
|
||||
feature = "msgpack"
|
||||
))
|
||||
))]
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "RPC for any web framework."
|
||||
readme = "../README.md"
|
||||
version = "0.8.8"
|
||||
version = "0.8.6"
|
||||
edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
|
||||
@@ -331,7 +331,6 @@ impl ServerFnCall {
|
||||
enum PathInfo {
|
||||
Serde,
|
||||
Rkyv,
|
||||
Bitcode,
|
||||
None,
|
||||
}
|
||||
|
||||
@@ -342,12 +341,6 @@ impl ServerFnCall {
|
||||
Clone, #server_fn_path::rkyv::Archive, #server_fn_path::rkyv::Serialize, #server_fn_path::rkyv::Deserialize
|
||||
},
|
||||
),
|
||||
Some("Bitcode") => (
|
||||
PathInfo::Bitcode,
|
||||
quote! {
|
||||
Clone, #server_fn_path::bitcode::Encode, #server_fn_path::bitcode::Decode
|
||||
},
|
||||
),
|
||||
Some("MultipartFormData")
|
||||
| Some("Streaming")
|
||||
| Some("StreamingText") => (PathInfo::None, quote! {}),
|
||||
@@ -383,7 +376,6 @@ impl ServerFnCall {
|
||||
#[serde(crate = #serde_path)]
|
||||
}
|
||||
}
|
||||
PathInfo::Bitcode => quote! {},
|
||||
PathInfo::Rkyv => quote! {},
|
||||
PathInfo::None => quote! {},
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "tachys"
|
||||
version = "0.2.10"
|
||||
version = "0.2.6"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
use super::{Attribute, NextAttribute};
|
||||
use crate::{
|
||||
erased::{Erased, ErasedLocal},
|
||||
html::attribute::NamedAttributeKey,
|
||||
renderer::{dom::Element, Rndr},
|
||||
};
|
||||
use std::{any::TypeId, fmt::Debug, mem};
|
||||
use crate::erased::{Erased, ErasedLocal};
|
||||
use std::{any::TypeId, fmt::Debug};
|
||||
#[cfg(feature = "ssr")]
|
||||
use std::{future::Future, pin::Pin};
|
||||
|
||||
@@ -29,7 +25,6 @@ pub struct AnyAttribute {
|
||||
resolve: fn(Erased) -> Pin<Box<dyn Future<Output = AnyAttribute> + Send>>,
|
||||
#[cfg(feature = "ssr")]
|
||||
dry_resolve: fn(&mut Erased),
|
||||
keys: fn(&Erased) -> Vec<NamedAttributeKey>,
|
||||
}
|
||||
|
||||
impl Clone for AnyAttribute {
|
||||
@@ -49,7 +44,6 @@ pub struct AnyAttributeState {
|
||||
type_id: TypeId,
|
||||
state: ErasedLocal,
|
||||
el: crate::renderer::types::Element,
|
||||
keys: Vec<NamedAttributeKey>,
|
||||
}
|
||||
|
||||
/// Converts an [`Attribute`] into [`AnyAttribute`].
|
||||
@@ -90,7 +84,6 @@ where
|
||||
) -> AnyAttributeState {
|
||||
AnyAttributeState {
|
||||
type_id: TypeId::of::<T>(),
|
||||
keys: value.get_ref::<T>().keys(),
|
||||
state: ErasedLocal::new(value.into_inner::<T>().build(&el)),
|
||||
el,
|
||||
}
|
||||
@@ -103,7 +96,6 @@ where
|
||||
) -> AnyAttributeState {
|
||||
AnyAttributeState {
|
||||
type_id: TypeId::of::<T>(),
|
||||
keys: value.get_ref::<T>().keys(),
|
||||
state: ErasedLocal::new(
|
||||
value.into_inner::<T>().hydrate::<true>(&el),
|
||||
),
|
||||
@@ -118,7 +110,6 @@ where
|
||||
) -> AnyAttributeState {
|
||||
AnyAttributeState {
|
||||
type_id: TypeId::of::<T>(),
|
||||
keys: value.get_ref::<T>().keys(),
|
||||
state: ErasedLocal::new(
|
||||
value.into_inner::<T>().hydrate::<true>(&el),
|
||||
),
|
||||
@@ -149,12 +140,6 @@ where
|
||||
async move {value.into_inner::<T>().resolve().await.into_any_attr()}.boxed()
|
||||
}
|
||||
|
||||
fn keys<T: Attribute + 'static>(
|
||||
value: &Erased,
|
||||
) -> Vec<NamedAttributeKey> {
|
||||
value.get_ref::<T>().keys()
|
||||
}
|
||||
|
||||
let value = self.into_cloneable_owned();
|
||||
AnyAttribute {
|
||||
type_id: TypeId::of::<T::CloneableOwned>(),
|
||||
@@ -173,7 +158,6 @@ where
|
||||
resolve: resolve::<T::CloneableOwned>,
|
||||
#[cfg(feature = "ssr")]
|
||||
dry_resolve: dry_resolve::<T::CloneableOwned>,
|
||||
keys: keys::<T::CloneableOwned>,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -284,10 +268,6 @@ impl Attribute for AnyAttribute {
|
||||
enabled."
|
||||
);
|
||||
}
|
||||
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
(self.keys)(&self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl NextAttribute for Vec<AnyAttribute> {
|
||||
@@ -306,7 +286,7 @@ impl Attribute for Vec<AnyAttribute> {
|
||||
const MIN_LENGTH: usize = 0;
|
||||
|
||||
type AsyncOutput = Vec<AnyAttribute>;
|
||||
type State = (Element, Vec<AnyAttributeState>);
|
||||
type State = Vec<AnyAttributeState>;
|
||||
type Cloneable = Vec<AnyAttribute>;
|
||||
type CloneableOwned = Vec<AnyAttribute>;
|
||||
|
||||
@@ -341,19 +321,13 @@ impl Attribute for Vec<AnyAttribute> {
|
||||
) -> Self::State {
|
||||
#[cfg(feature = "hydrate")]
|
||||
if FROM_SERVER {
|
||||
(
|
||||
el.clone(),
|
||||
self.into_iter()
|
||||
.map(|attr| attr.hydrate::<true>(el))
|
||||
.collect(),
|
||||
)
|
||||
self.into_iter()
|
||||
.map(|attr| attr.hydrate::<true>(el))
|
||||
.collect()
|
||||
} else {
|
||||
(
|
||||
el.clone(),
|
||||
self.into_iter()
|
||||
.map(|attr| attr.hydrate::<false>(el))
|
||||
.collect(),
|
||||
)
|
||||
self.into_iter()
|
||||
.map(|attr| attr.hydrate::<false>(el))
|
||||
.collect()
|
||||
}
|
||||
#[cfg(not(feature = "hydrate"))]
|
||||
{
|
||||
@@ -366,34 +340,13 @@ impl Attribute for Vec<AnyAttribute> {
|
||||
}
|
||||
|
||||
fn build(self, el: &crate::renderer::types::Element) -> Self::State {
|
||||
(
|
||||
el.clone(),
|
||||
self.into_iter().map(|attr| attr.build(el)).collect(),
|
||||
)
|
||||
self.into_iter().map(|attr| attr.build(el)).collect()
|
||||
}
|
||||
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
let (el, state) = state;
|
||||
for old in mem::take(state) {
|
||||
for key in old.keys {
|
||||
match key {
|
||||
NamedAttributeKey::InnerHtml => {
|
||||
Rndr::set_inner_html(&old.el, "");
|
||||
}
|
||||
NamedAttributeKey::Property(prop_name) => {
|
||||
Rndr::set_property(
|
||||
&old.el,
|
||||
&prop_name,
|
||||
&wasm_bindgen::JsValue::UNDEFINED,
|
||||
);
|
||||
}
|
||||
NamedAttributeKey::Attribute(key) => {
|
||||
Rndr::remove_attribute(&old.el, &key);
|
||||
}
|
||||
}
|
||||
}
|
||||
for (attr, state) in self.into_iter().zip(state.iter_mut()) {
|
||||
attr.rebuild(state)
|
||||
}
|
||||
*state = self.into_iter().map(|s| s.build(el)).collect();
|
||||
}
|
||||
|
||||
fn into_cloneable(self) -> Self::Cloneable {
|
||||
@@ -432,8 +385,4 @@ impl Attribute for Vec<AnyAttribute> {
|
||||
enabled."
|
||||
);
|
||||
}
|
||||
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
self.iter().flat_map(|s| s.keys()).collect()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ use super::{
|
||||
use crate::{
|
||||
html::attribute::{
|
||||
maybe_next_attr_erasure_macros::next_attr_combine, Attribute,
|
||||
AttributeValue, NamedAttributeKey,
|
||||
AttributeValue,
|
||||
},
|
||||
view::{add_attr::AddAnyAttr, Position, ToTemplate},
|
||||
};
|
||||
@@ -112,12 +112,6 @@ where
|
||||
value: self.value.resolve().await,
|
||||
}
|
||||
}
|
||||
|
||||
fn keys(&self) -> Vec<NamedAttributeKey> {
|
||||
vec![NamedAttributeKey::Attribute(
|
||||
self.key.as_ref().to_string().into(),
|
||||
)]
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V> NextAttribute for CustomAttr<K, V>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user