mirror of
https://github.com/leptos-rs/leptos.git
synced 2025-12-27 16:54:41 -05:00
Compare commits
129 Commits
wasm-split
...
4397
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b0e056bdcd | ||
|
|
7f4b5eb4d1 | ||
|
|
fbf46ca58c | ||
|
|
0edbd9b3b5 | ||
|
|
43359694b6 | ||
|
|
9dd5501b1a | ||
|
|
6843f654ff | ||
|
|
cb7c648400 | ||
|
|
d3148ac9c9 | ||
|
|
6d7e203efe | ||
|
|
b5c69937b4 | ||
|
|
0b45ff5116 | ||
|
|
13dc6f474d | ||
|
|
21218fc802 | ||
|
|
65b5be2748 | ||
|
|
7c30bb92f7 | ||
|
|
edf369f035 | ||
|
|
eb02304ee1 | ||
|
|
578b672f14 | ||
|
|
b20902aaa1 | ||
|
|
d3ad0c67b6 | ||
|
|
62d8ec9cc5 | ||
|
|
0d2523190d | ||
|
|
338da18ed2 | ||
|
|
616aae4c3c | ||
|
|
c6e59eeb43 | ||
|
|
c025ae59ac | ||
|
|
df46feee5d | ||
|
|
bbf5bf9170 | ||
|
|
7a3556bf34 | ||
|
|
d13936cab5 | ||
|
|
b303a35d76 | ||
|
|
a453b7d1bd | ||
|
|
3b9ccdf57e | ||
|
|
27cd423ebc | ||
|
|
b3907baf49 | ||
|
|
9a8bb7eb75 | ||
|
|
95db8c939e | ||
|
|
2bfa9952af | ||
|
|
4e445f43d6 | ||
|
|
5f544f67ae | ||
|
|
68477d2b76 | ||
|
|
5bd9469b93 | ||
|
|
4bca70dc2f | ||
|
|
d0295009cf | ||
|
|
3e8b5c9805 | ||
|
|
924efa8ac1 | ||
|
|
b92a14228c | ||
|
|
68967fdad3 | ||
|
|
44bc4fbc31 | ||
|
|
646cfc12ed | ||
|
|
62977a68b0 | ||
|
|
e9ee90c78f | ||
|
|
73e728f145 | ||
|
|
6f047a2271 | ||
|
|
7c942b8b47 | ||
|
|
d4bf6d9cb6 | ||
|
|
9deb96ea01 | ||
|
|
d1899cde1c | ||
|
|
ee731d7a3a | ||
|
|
59cbcfa0fb | ||
|
|
0939cf63ad | ||
|
|
d37512bebd | ||
|
|
7dd44919cf | ||
|
|
3eaabf85ea | ||
|
|
d60c632c90 | ||
|
|
f5ad4f4b88 | ||
|
|
f3a053f99b | ||
|
|
06573cbca1 | ||
|
|
9f4d826533 | ||
|
|
a305ae7227 | ||
|
|
65557c5723 | ||
|
|
a529f87ee2 | ||
|
|
c0ca97e42f | ||
|
|
9a4e93ab07 | ||
|
|
bee2b5ea1c | ||
|
|
3b058e77f1 | ||
|
|
7adb11ec49 | ||
|
|
1af5f66ee6 | ||
|
|
956f1836ec | ||
|
|
b54f80f529 | ||
|
|
a48a2994ee | ||
|
|
aedcd5148c | ||
|
|
9160d8aaa6 | ||
|
|
274fe07dae | ||
|
|
7add26fc41 | ||
|
|
d9213850f7 | ||
|
|
db9f323f8d | ||
|
|
1d0f668dc3 | ||
|
|
a97eceacf1 | ||
|
|
3d6ea6d285 | ||
|
|
99c3d8f9e9 | ||
|
|
a394eb211f | ||
|
|
ceb7dd8ae5 | ||
|
|
f50adc00bc | ||
|
|
1340deee96 | ||
|
|
8da3011a7f | ||
|
|
959677f018 | ||
|
|
03529b3992 | ||
|
|
8bfd0ce143 | ||
|
|
47199bbbf3 | ||
|
|
9ed7e9de61 | ||
|
|
26ecbf4df5 | ||
|
|
b3885c7be4 | ||
|
|
436e5aa141 | ||
|
|
05cafa8b06 | ||
|
|
9e3c0cc402 | ||
|
|
30141293f6 | ||
|
|
8f623a2d5b | ||
|
|
f2fe791f6b | ||
|
|
30dbb7ccc8 | ||
|
|
b986fe11dc | ||
|
|
e2e28ef180 | ||
|
|
a5e0053bab | ||
|
|
6c04a1cd76 | ||
|
|
87fb947465 | ||
|
|
5ba818132a | ||
|
|
30b917cfc3 | ||
|
|
6cd731cbb1 | ||
|
|
f1b6b79e27 | ||
|
|
623ee08f82 | ||
|
|
877849a5dd | ||
|
|
fb59da90c2 | ||
|
|
d33f5c9e77 | ||
|
|
deb8e96eb0 | ||
|
|
181e4d0566 | ||
|
|
525379a9b3 | ||
|
|
783a233167 | ||
|
|
8079956d1b |
11
.github/workflows/autofix.yml
vendored
11
.github/workflows/autofix.yml
vendored
@@ -17,17 +17,10 @@ env:
|
||||
jobs:
|
||||
autofix:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
{
|
||||
toolchain: "nightly-2025-07-16",
|
||||
components: "rustfmt, clippy",
|
||||
target: "wasm32-unknown-unknown",
|
||||
rustflags: "",
|
||||
}
|
||||
with: {toolchain: "nightly-2025-07-16", components: "rustfmt, clippy", target: "wasm32-unknown-unknown", rustflags: ""}
|
||||
- name: Install Glib
|
||||
run: |
|
||||
sudo apt-get update
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -63,6 +63,6 @@ jobs:
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libglib2.0-dev
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Semver Checks
|
||||
uses: obi1kenobi/cargo-semver-checks-action@v2
|
||||
|
||||
4
.github/workflows/get-example-changed.yml
vendored
4
.github/workflows/get-example-changed.yml
vendored
@@ -19,12 +19,12 @@ jobs:
|
||||
matrix: ${{ steps.set-example-changed.outputs.matrix }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Get example files that changed
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v46
|
||||
uses: tj-actions/changed-files@v47
|
||||
with:
|
||||
files: |
|
||||
examples/**
|
||||
|
||||
2
.github/workflows/get-examples-matrix.yml
vendored
2
.github/workflows/get-examples-matrix.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
EXCLUDED_EXAMPLES: cargo-make
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Install jq
|
||||
run: sudo apt-get install jq
|
||||
- name: Set Matrix
|
||||
|
||||
4
.github/workflows/get-leptos-changed.yml
vendored
4
.github/workflows/get-leptos-changed.yml
vendored
@@ -13,12 +13,12 @@ jobs:
|
||||
leptos_changed: ${{ steps.set-source-changed.outputs.leptos_changed }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Get source files that changed
|
||||
id: changed-source
|
||||
uses: tj-actions/changed-files@v46
|
||||
uses: tj-actions/changed-files@v47
|
||||
with:
|
||||
files_ignore: |
|
||||
.*/**/*
|
||||
|
||||
2
.github/workflows/get-leptos-matrix.yml
vendored
2
.github/workflows/get-leptos-matrix.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Install jq
|
||||
run: sudo apt-get install jq
|
||||
- name: Set Matrix
|
||||
|
||||
2
.github/workflows/publish-book.yml
vendored
2
.github/workflows/publish-book.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
contents: write # To push a branch
|
||||
pull-requests: write # To create a PR from that branch
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Install mdbook
|
||||
|
||||
4
.github/workflows/run-cargo-make-task.yml
vendored
4
.github/workflows/run-cargo-make-task.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libglib2.0-dev
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
run: trunk --version
|
||||
- name: Install Node.js
|
||||
if: contains(inputs.directory, 'examples')
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20
|
||||
- uses: pnpm/action-setup@v4
|
||||
|
||||
1204
Cargo.lock
generated
1204
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
104
Cargo.toml
104
Cargo.toml
@@ -40,50 +40,47 @@ members = [
|
||||
exclude = ["benchmarks", "examples", "projects"]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.8.5"
|
||||
edition = "2021"
|
||||
rust-version = "1.88"
|
||||
|
||||
[workspace.dependencies]
|
||||
# members
|
||||
throw_error = { path = "./any_error/", version = "0.3.0" }
|
||||
throw_error = { path = "./any_error/", version = "0.3.1" }
|
||||
any_spawner = { path = "./any_spawner/", version = "0.3.0" }
|
||||
const_str_slice_concat = { path = "./const_str_slice_concat", version = "0.1" }
|
||||
either_of = { path = "./either_of/", version = "0.1.6" }
|
||||
hydration_context = { path = "./hydration_context", version = "0.3.0" }
|
||||
leptos = { path = "./leptos", version = "0.8.5" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.8.5" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.5" }
|
||||
leptos = { path = "./leptos", version = "0.8.12" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.8.7" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.7" }
|
||||
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.8.5" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.5" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.5" }
|
||||
leptos_router = { path = "./router", version = "0.8.5" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.8.5" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.6" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.11" }
|
||||
leptos_router = { path = "./router", version = "0.8.9" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.8.6" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.8.5" }
|
||||
leptos_meta = { path = "./meta", version = "0.8.5" }
|
||||
next_tuple = { path = "./next_tuple", version = "0.1.0" }
|
||||
oco_ref = { path = "./oco", version = "0.2.1" }
|
||||
or_poisoned = { path = "./or_poisoned", version = "0.1.0" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.5" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.2.5" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.5" }
|
||||
server_fn = { path = "./server_fn", version = "0.8.5" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.8.5" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.9" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.3.0" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.6" }
|
||||
server_fn = { path = "./server_fn", version = "0.8.8" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.8.8" }
|
||||
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.8.5" }
|
||||
tachys = { path = "./tachys", version = "0.2.6" }
|
||||
wasm_split_helpers = { path = "./wasm_split", version = "0.1.0" }
|
||||
wasm_split_macros = { path = "./wasm_split_macros", version = "0.1.0" }
|
||||
tachys = { path = "./tachys", version = "0.2.10" }
|
||||
|
||||
# members deps
|
||||
async-once-cell = { default-features = false, version = "0.5.3" }
|
||||
itertools = { default-features = false, version = "0.14.0" }
|
||||
convert_case = { default-features = false, version = "0.8.0" }
|
||||
serde_json = { default-features = false, version = "1.0.140" }
|
||||
trybuild = { default-features = false, version = "1.0.106" }
|
||||
typed-builder = { default-features = false, version = "0.21.0" }
|
||||
thiserror = { default-features = false, version = "2.0.12" }
|
||||
serde_json = { default-features = false, version = "1.0.143" }
|
||||
trybuild = { default-features = false, version = "1.0.110" }
|
||||
typed-builder = { default-features = false, version = "0.21.2" }
|
||||
thiserror = { default-features = false, version = "2.0.17" }
|
||||
wasm-bindgen = { default-features = false, version = "0.2.100" }
|
||||
indexmap = { default-features = false, version = "2.9.0" }
|
||||
indexmap = { default-features = false, version = "2.11.0" }
|
||||
rstml = { default-features = false, version = "0.12.1" }
|
||||
rustc_version = { default-features = false, version = "0.4.1" }
|
||||
guardian = { default-features = false, version = "1.3.0" }
|
||||
@@ -98,79 +95,84 @@ send_wrapper = { default-features = false, version = "0.6.0" }
|
||||
tokio-test = { default-features = false, version = "0.4.4" }
|
||||
html-escape = { default-features = false, version = "0.2.13" }
|
||||
proc-macro-error2 = { default-features = false, version = "2.0.1" }
|
||||
const_format = { default-features = false, version = "0.2.34" }
|
||||
const_format = { default-features = false, version = "0.2.35" }
|
||||
gloo-net = { default-features = false, version = "0.6.0" }
|
||||
url = { default-features = false, version = "2.5.4" }
|
||||
tokio = { default-features = false, version = "1.46.1" }
|
||||
tokio = { default-features = false, version = "1.47.1" }
|
||||
base64 = { default-features = false, version = "0.22.1" }
|
||||
cfg-if = { default-features = false, version = "1.0.0" }
|
||||
cfg-if = { default-features = false, version = "1.0.3" }
|
||||
wasm-bindgen-futures = { default-features = false, version = "0.4.50" }
|
||||
tower = { default-features = false, version = "0.5.2" }
|
||||
proc-macro2 = { default-features = false, version = "1.0.95" }
|
||||
proc-macro2 = { default-features = false, version = "1.0.101" }
|
||||
serde = { default-features = false, version = "1.0.219" }
|
||||
parking_lot = { default-features = false, version = "0.12.4" }
|
||||
axum = { default-features = false, version = "0.8.4" }
|
||||
parking_lot = { default-features = false, version = "0.12.5" }
|
||||
axum = { default-features = false, version = "0.8.6" }
|
||||
serde_qs = { default-features = false, version = "0.15.0" }
|
||||
syn = { default-features = false, version = "2.0.104" }
|
||||
syn = { default-features = false, version = "2.0.106" }
|
||||
xxhash-rust = { default-features = false, version = "0.8.15" }
|
||||
paste = { default-features = false, version = "1.0.15" }
|
||||
quote = { default-features = false, version = "1.0.40" }
|
||||
quote = { default-features = false, version = "1.0.41" }
|
||||
web-sys = { default-features = false, version = "0.3.77" }
|
||||
js-sys = { default-features = false, version = "0.3.77" }
|
||||
rand = { default-features = false, version = "0.9.1" }
|
||||
serde-lite = { default-features = false, version = "0.5.0" }
|
||||
tokio-tungstenite = { default-features = false, version = "0.27.0" }
|
||||
tokio-tungstenite = { default-features = false, version = "0.28.0" }
|
||||
serial_test = { default-features = false, version = "3.2.0" }
|
||||
erased = { default-features = false, version = "0.1.2" }
|
||||
glib = { default-features = false, version = "0.20.12" }
|
||||
async-trait = { default-features = false, version = "0.1.88" }
|
||||
async-trait = { default-features = false, version = "0.1.89" }
|
||||
typed-builder-macro = { default-features = false, version = "0.21.0" }
|
||||
linear-map = { default-features = false, version = "1.2.0" }
|
||||
anyhow = { default-features = false, version = "1.0.98" }
|
||||
anyhow = { default-features = false, version = "1.0.100" }
|
||||
walkdir = { default-features = false, version = "2.5.0" }
|
||||
actix-ws = { default-features = false, version = "0.3.0" }
|
||||
tower-http = { default-features = false, version = "0.6.4" }
|
||||
prettyplease = { default-features = false, version = "0.2.35" }
|
||||
inventory = { default-features = false, version = "0.3.20" }
|
||||
config = { default-features = false, version = "0.15.13" }
|
||||
camino = { default-features = false, version = "1.1.9" }
|
||||
prettyplease = { default-features = false, version = "0.2.37" }
|
||||
inventory = { default-features = false, version = "0.3.21" }
|
||||
config = { default-features = false, version = "0.15.14" }
|
||||
camino = { default-features = false, version = "1.2.1" }
|
||||
ciborium = { default-features = false, version = "0.2.2" }
|
||||
bitcode = { default-features = false, version = "0.6.6" }
|
||||
multer = { default-features = false, version = "3.1.0" }
|
||||
leptos-spin-macro = { default-features = false, version = "0.2.0" }
|
||||
sledgehammer_utils = { default-features = false, version = "0.3.1" }
|
||||
sledgehammer_bindgen = { default-features = false, version = "0.6.0" }
|
||||
wasm-streams = { default-features = false, version = "0.4.2" }
|
||||
rkyv = { default-features = false, version = "0.8.10" }
|
||||
rkyv = { default-features = false, version = "0.8.12" }
|
||||
temp-env = { default-features = false, version = "0.3.6" }
|
||||
uuid = { default-features = false, version = "1.17.0" }
|
||||
uuid = { default-features = false, version = "1.18.0" }
|
||||
bytes = { default-features = false, version = "1.10.1" }
|
||||
http = { default-features = false, version = "1.3.1" }
|
||||
regex = { default-features = false, version = "1.11.1" }
|
||||
regex = { default-features = false, version = "1.11.3" }
|
||||
drain_filter_polyfill = { default-features = false, version = "0.1.3" }
|
||||
tempfile = { default-features = false, version = "3.20.0" }
|
||||
futures-lite = { default-features = false, version = "2.6.0" }
|
||||
tempfile = { default-features = false, version = "3.23.0" }
|
||||
futures-lite = { default-features = false, version = "2.6.1" }
|
||||
log = { default-features = false, version = "0.4.27" }
|
||||
percent-encoding = { default-features = false, version = "2.3.1" }
|
||||
percent-encoding = { default-features = false, version = "2.3.2" }
|
||||
async-executor = { default-features = false, version = "1.13.2" }
|
||||
const-str = { default-features = false, version = "0.6.3" }
|
||||
const-str = { default-features = false, version = "0.6.4" }
|
||||
http-body-util = { default-features = false, version = "0.1.3" }
|
||||
hyper = { default-features = false, version = "1.6.0" }
|
||||
postcard = { default-features = false, version = "1.1.1" }
|
||||
hyper = { default-features = false, version = "1.7.0" }
|
||||
postcard = { default-features = false, version = "1.1.3" }
|
||||
rmp-serde = { default-features = false, version = "1.3.0" }
|
||||
reqwest = { default-features = false, version = "0.12.22" }
|
||||
reqwest = { default-features = false, version = "0.12.23" }
|
||||
tower-layer = { default-features = false, version = "0.3.3" }
|
||||
attribute-derive = { default-features = false, version = "0.10.3" }
|
||||
attribute-derive = { default-features = false, version = "0.10.5" }
|
||||
insta = { default-features = false, version = "1.43.1" }
|
||||
codee = { default-features = false, version = "0.3.0" }
|
||||
actix-http = { default-features = false, version = "3.11.0" }
|
||||
actix-http = { default-features = false, version = "3.11.2" }
|
||||
wasm-bindgen-test = { default-features = false, version = "0.3.50" }
|
||||
rustversion = { default-features = false, version = "1.0.21" }
|
||||
rustversion = { default-features = false, version = "1.0.22" }
|
||||
getrandom = { default-features = false, version = "0.3.3" }
|
||||
actix-files = { default-features = false, version = "0.6.6" }
|
||||
async-lock = { default-features = false, version = "3.4.0" }
|
||||
async-lock = { default-features = false, version = "3.4.1" }
|
||||
base16 = { default-features = false, version = "0.2.1" }
|
||||
digest = { default-features = false, version = "0.10.7" }
|
||||
sha2 = { default-features = false, version = "0.10.8" }
|
||||
subsecond = { default-features = false, version = "0.7.0-rc.0" }
|
||||
dioxus-cli-config = { default-features = false, version = "0.7.0-rc.0" }
|
||||
dioxus-devtools = { default-features = false, version = "0.7.0-rc.0" }
|
||||
wasm_split_helpers = { default-features = false, version = "0.2.0" }
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
||||
@@ -95,7 +95,7 @@ Here are some resources for learning more about Leptos:
|
||||
[`cargo-leptos`](https://github.com/leptos-rs/cargo-leptos) is a build tool that's designed to make it easy to build apps that run on both the client and the server, with seamless integration. The best way to get started with a real Leptos project right now is to use `cargo-leptos` and our starter templates for [Actix](https://github.com/leptos-rs/start) or [Axum](https://github.com/leptos-rs/start-axum).
|
||||
|
||||
```bash
|
||||
cargo install cargo-leptos
|
||||
cargo install cargo-leptos --locked
|
||||
cargo leptos new --git https://github.com/leptos-rs/start-axum
|
||||
cd [your project name]
|
||||
cargo leptos watch
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "throw_error"
|
||||
version = "0.3.0"
|
||||
version = "0.3.1"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -11,3 +11,6 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
pin-project-lite = { workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow.workspace = true
|
||||
|
||||
@@ -45,10 +45,10 @@ impl fmt::Display for Error {
|
||||
|
||||
impl<T> From<T> for Error
|
||||
where
|
||||
T: error::Error + Send + Sync + 'static,
|
||||
T: Into<Box<dyn error::Error + Send + Sync + 'static>>,
|
||||
{
|
||||
fn from(value: T) -> Self {
|
||||
Error(Arc::new(value))
|
||||
Error(Arc::from(value.into()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -158,3 +158,32 @@ where
|
||||
this.inner.poll(cx)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::error::Error as StdError;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MyError;
|
||||
|
||||
impl Display for MyError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "MyError")
|
||||
}
|
||||
}
|
||||
|
||||
impl StdError for MyError {}
|
||||
|
||||
#[test]
|
||||
fn test_from() {
|
||||
let e = MyError;
|
||||
let _le = Error::from(e);
|
||||
|
||||
let e = "some error".to_string();
|
||||
let _le = Error::from(e);
|
||||
|
||||
let e = anyhow::anyhow!("anyhow error");
|
||||
let _le = Error::from(e);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -305,7 +305,10 @@ impl LazyRoute for ViewD {
|
||||
}
|
||||
}
|
||||
|
||||
// Server functions can be made lazy by combining the two macros,
|
||||
// with `#[server]` coming first, then `#[lazy]`
|
||||
#[server]
|
||||
#[lazy]
|
||||
async fn d_data() -> Result<Vec<i32>, ServerFnError> {
|
||||
tokio::time::sleep(std::time::Duration::from_millis(250)).await;
|
||||
Ok(vec![1, 1, 2, 3, 5, 8, 13])
|
||||
|
||||
7
examples/regression/e2e/features/issue_4005.feature
Normal file
7
examples/regression/e2e/features/issue_4005.feature
Normal file
@@ -0,0 +1,7 @@
|
||||
@check_issue_4005
|
||||
Feature: Check that issue 4005 does not reappear
|
||||
|
||||
Scenario: The second item is selected.
|
||||
Given I see the app
|
||||
And I can access regression test 4005
|
||||
Then I see the value of select is 2
|
||||
9
examples/regression/e2e/features/issue_4217.feature
Normal file
9
examples/regression/e2e/features/issue_4217.feature
Normal file
@@ -0,0 +1,9 @@
|
||||
@check_issue_4217
|
||||
Feature: Check that issue 4217 does not reappear
|
||||
|
||||
Scenario: All items are selected.
|
||||
Given I see the app
|
||||
And I can access regression test 4217
|
||||
Then I see option1 is selected
|
||||
And I see option2 is selected
|
||||
And I see option3 is selected
|
||||
13
examples/regression/e2e/features/issue_4251.feature
Normal file
13
examples/regression/e2e/features/issue_4251.feature
Normal file
@@ -0,0 +1,13 @@
|
||||
@check_issue_4251
|
||||
Feature: Check that issue 4251 does not reappear
|
||||
|
||||
Scenario: Clicking a link to the same page you’re currently on should not add the page to the history stack.
|
||||
Given I see the app
|
||||
And I can access regression test 4324
|
||||
When I select the link This page
|
||||
And I select the link This page
|
||||
And I select the link This page
|
||||
Then I see the result is the string Issue4324
|
||||
When I press the back button
|
||||
And I select the link 4324
|
||||
Then I see the result is the string Issue4324
|
||||
9
examples/regression/e2e/features/issue_4285.feature
Normal file
9
examples/regression/e2e/features/issue_4285.feature
Normal file
@@ -0,0 +1,9 @@
|
||||
@check_issue_4285
|
||||
Feature: Check that issue 4285 does not reappear
|
||||
|
||||
Scenario: Navigating several times to same lazy route does not cause issues.
|
||||
Given I see the app
|
||||
And I can access regression test 4285
|
||||
And I can access regression test 4285
|
||||
And I can access regression test 4285
|
||||
Then I see the result is the string 42
|
||||
18
examples/regression/e2e/features/issue_4296.feature
Normal file
18
examples/regression/e2e/features/issue_4296.feature
Normal file
@@ -0,0 +1,18 @@
|
||||
@check_issue_4296
|
||||
Feature: Check that issue 4296 does not reappear
|
||||
|
||||
Scenario: Query param signals created in LazyRoute::data() are reactive in ::view().
|
||||
Given I see the app
|
||||
And I can access regression test 4296
|
||||
Then I see the result is the string None
|
||||
When I select the link abc
|
||||
Then I see the result is the string Some("abc")
|
||||
When I select the link def
|
||||
Then I see the result is the string Some("def")
|
||||
|
||||
Scenario: Loading page with query signal works as well.
|
||||
Given I see the app
|
||||
And I can access regression test 4296
|
||||
When I select the link abc
|
||||
When I reload the page
|
||||
Then I see the result is the string Some("abc")
|
||||
11
examples/regression/e2e/features/issue_4324.feature
Normal file
11
examples/regression/e2e/features/issue_4324.feature
Normal file
@@ -0,0 +1,11 @@
|
||||
@check_issue_4324
|
||||
Feature: Check that issue 4324 does not reappear
|
||||
|
||||
Scenario: Navigating to the same page after clicking "Back" should set the URL correctly
|
||||
Given I see the app
|
||||
And I can access regression test 4324
|
||||
Then I see the path is /4324/
|
||||
When I press the back button
|
||||
Then I see the path is /
|
||||
When I select the link 4324
|
||||
Then I see the path is /4324/
|
||||
35
examples/regression/e2e/tests/fixtures/check.rs
vendored
35
examples/regression/e2e/tests/fixtures/check.rs
vendored
@@ -18,3 +18,38 @@ pub async fn element_exists(client: &Client, id: &str) -> Result<()> {
|
||||
.expect(&format!("could not find element with id `{id}`"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn select_option_is_selected(
|
||||
client: &Client,
|
||||
id: &str,
|
||||
) -> Result<()> {
|
||||
let el = find::element_by_id(client, id)
|
||||
.await
|
||||
.expect(&format!("could not find element with id `{id}`"));
|
||||
let selected = el.prop("selected").await?;
|
||||
assert_eq!(selected.as_deref(), Some("true"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn element_value_is(
|
||||
client: &Client,
|
||||
id: &str,
|
||||
expected: &str,
|
||||
) -> Result<()> {
|
||||
let el = find::element_by_id(client, id)
|
||||
.await
|
||||
.expect(&format!("could not find element with id `{id}`"));
|
||||
let value = el.prop("value").await?;
|
||||
assert_eq!(value.as_deref(), Some(expected));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn path_is(client: &Client, expected_path: &str) -> Result<()> {
|
||||
let url = client
|
||||
.current_url()
|
||||
.await
|
||||
.expect("could not access current URL");
|
||||
let path = url.path();
|
||||
assert_eq!(expected_path, path);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -45,3 +45,12 @@ async fn i_refresh_the_browser(world: &mut AppWorld) -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[given(regex = "^I press the back button$")]
|
||||
#[when(regex = "^I press the back button$")]
|
||||
async fn i_go_back(world: &mut AppWorld) -> Result<()> {
|
||||
let client = &world.client;
|
||||
client.back().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -25,3 +25,28 @@ async fn i_see_the_navbar(world: &mut AppWorld) -> Result<()> {
|
||||
check::element_exists(client, "nav").await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[then(regex = r"^I see ([\d\w]+) is selected$")]
|
||||
async fn i_see_the_select(world: &mut AppWorld, id: String) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::select_option_is_selected(client, &id).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[then(regex = r"^I see the value of (\w+) is (.*)$")]
|
||||
async fn i_see_the_value(
|
||||
world: &mut AppWorld,
|
||||
id: String,
|
||||
value: String,
|
||||
) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::element_value_is(client, &id, &value).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[then(regex = r"^I see the path is (.*)$")]
|
||||
async fn i_see_the_path(world: &mut AppWorld, path: String) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::path_is(client, &path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
use crate::{issue_4088::Routes4088, pr_4015::Routes4015, pr_4091::Routes4091};
|
||||
use crate::{
|
||||
issue_4005::Routes4005, issue_4088::Routes4088, issue_4217::Routes4217,
|
||||
issue_4285::Routes4285, issue_4296::Routes4296, issue_4324::Routes4324,
|
||||
pr_4015::Routes4015, pr_4091::Routes4091,
|
||||
};
|
||||
use leptos::prelude::*;
|
||||
use leptos_meta::{MetaTags, *};
|
||||
use leptos_router::{
|
||||
@@ -28,15 +32,22 @@ pub fn shell(options: LeptosOptions) -> impl IntoView {
|
||||
pub fn App() -> impl IntoView {
|
||||
provide_meta_context();
|
||||
let fallback = || view! { "Page not found." }.into_view();
|
||||
let (_, set_is_routing) = signal(false);
|
||||
|
||||
view! {
|
||||
<Stylesheet id="leptos" href="/pkg/regression.css"/>
|
||||
<Router>
|
||||
<Router set_is_routing>
|
||||
<main>
|
||||
<Routes fallback>
|
||||
<Route path=path!("") view=HomePage/>
|
||||
<Routes4091/>
|
||||
<Routes4015/>
|
||||
<Routes4088/>
|
||||
<Routes4217/>
|
||||
<Routes4005/>
|
||||
<Routes4285/>
|
||||
<Routes4296/>
|
||||
<Routes4324/>
|
||||
</Routes>
|
||||
</main>
|
||||
</Router>
|
||||
@@ -59,6 +70,11 @@ fn HomePage() -> impl IntoView {
|
||||
<li><a href="/4091/">"4091"</a></li>
|
||||
<li><a href="/4015/">"4015"</a></li>
|
||||
<li><a href="/4088/">"4088"</a></li>
|
||||
<li><a href="/4217/">"4217"</a></li>
|
||||
<li><a href="/4005/">"4005"</a></li>
|
||||
<li><a href="/4285/">"4285"</a></li>
|
||||
<li><a href="/4296/">"4296"</a></li>
|
||||
<li><a href="/4324/">"4324"</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
}
|
||||
|
||||
24
examples/regression/src/issue_4005.rs
Normal file
24
examples/regression/src/issue_4005.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4005() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4005") view=Issue4005/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn Issue4005() -> impl IntoView {
|
||||
view! {
|
||||
<select id="select" prop:value="2">
|
||||
<option value="1">"Option 1"</option>
|
||||
<option value="2">"Option 2"</option>
|
||||
<option value="3">"Option 3"</option>
|
||||
</select>
|
||||
}
|
||||
}
|
||||
24
examples/regression/src/issue_4217.rs
Normal file
24
examples/regression/src/issue_4217.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4217() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4217") view=Issue4217/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn Issue4217() -> impl IntoView {
|
||||
view! {
|
||||
<select multiple=true>
|
||||
<option id="option1" value="1" selected>"Option 1"</option>
|
||||
<option id="option2" value="2" selected>"Option 2"</option>
|
||||
<option id="option3" value="3" selected>"Option 3"</option>
|
||||
</select>
|
||||
}
|
||||
}
|
||||
49
examples/regression/src/issue_4285.rs
Normal file
49
examples/regression/src/issue_4285.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
use leptos::prelude::*;
|
||||
use leptos_router::LazyRoute;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, Lazy, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4285() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4285") view={Lazy::<Issue4285>::new()}/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
struct Issue4285 {
|
||||
data: Resource<Result<i32, ServerFnError>>,
|
||||
}
|
||||
|
||||
impl LazyRoute for Issue4285 {
|
||||
fn data() -> Self {
|
||||
Self {
|
||||
data: Resource::new(|| (), |_| slow_call()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn view(this: Self) -> AnyView {
|
||||
let Issue4285 { data } = this;
|
||||
view! {
|
||||
<Suspense>
|
||||
{move || {
|
||||
Suspend::new(async move {
|
||||
let data = data.await;
|
||||
view! {
|
||||
<p id="result">{data}</p>
|
||||
}
|
||||
})
|
||||
}}
|
||||
</Suspense>
|
||||
}
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
|
||||
#[server]
|
||||
async fn slow_call() -> Result<i32, ServerFnError> {
|
||||
tokio::time::sleep(std::time::Duration::from_millis(250)).await;
|
||||
Ok(42)
|
||||
}
|
||||
36
examples/regression/src/issue_4296.rs
Normal file
36
examples/regression/src/issue_4296.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, Lazy, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
use leptos_router::{hooks::use_query_map, LazyRoute};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4296() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4296") view={Lazy::<Issue4296>::new()}/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
struct Issue4296 {
|
||||
query: Signal<Option<String>>,
|
||||
}
|
||||
|
||||
impl LazyRoute for Issue4296 {
|
||||
fn data() -> Self {
|
||||
let query = use_query_map();
|
||||
let query = Signal::derive(move || query.read().get("q"));
|
||||
Self { query }
|
||||
}
|
||||
|
||||
async fn view(this: Self) -> AnyView {
|
||||
let Issue4296 { query } = this;
|
||||
view! {
|
||||
<a href="?q=abc">"abc"</a>
|
||||
<a href="?q=def">"def"</a>
|
||||
<p id="result">{move || format!("{:?}", query.get())}</p>
|
||||
}
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
21
examples/regression/src/issue_4324.rs
Normal file
21
examples/regression/src/issue_4324.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, Lazy, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4324() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4324") view=Issue4324/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn Issue4324() -> impl IntoView {
|
||||
view! {
|
||||
<a href="/4324/">"This page"</a>
|
||||
<p id="result">"Issue4324"</p>
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,10 @@
|
||||
pub mod app;
|
||||
mod issue_4005;
|
||||
mod issue_4088;
|
||||
mod issue_4217;
|
||||
mod issue_4285;
|
||||
mod issue_4296;
|
||||
mod issue_4324;
|
||||
mod pr_4015;
|
||||
mod pr_4091;
|
||||
|
||||
|
||||
@@ -440,7 +440,14 @@ pub fn FileUploadWithProgress() -> impl IntoView {
|
||||
let mut entry =
|
||||
FILES.entry(filename.to_string()).or_insert_with(|| {
|
||||
println!("[{filename}]\tinserting channel");
|
||||
let (tx, rx) = broadcast(128);
|
||||
// NOTE: this channel capacity is set arbitrarily for this demo code.
|
||||
// it allows for up to exactly 1048 chunks to be sent, which sets an upper cap
|
||||
// on upload size (the precise details vary by client)
|
||||
// in a real system, you will want to create some more reasonable ways of
|
||||
// sending and sharing notifications
|
||||
//
|
||||
// see https://github.com/leptos-rs/leptos/issues/4397 for related discussion
|
||||
let (tx, rx) = broadcast(1048);
|
||||
File { total: 0, tx, rx }
|
||||
});
|
||||
entry.total += len;
|
||||
|
||||
7
examples/subsecond_hot_patch/.gitignore
vendored
Normal file
7
examples/subsecond_hot_patch/.gitignore
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
/target
|
||||
.DS_Store
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
13
examples/subsecond_hot_patch/Cargo.toml
Normal file
13
examples/subsecond_hot_patch/Cargo.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "subsecond_hot_patch"
|
||||
version = "0.1.0"
|
||||
authors = ["Greg Johnston <greg.johnston@gmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
leptos = { path = "../../leptos", features = ["csr", "subsecond"] }
|
||||
leptos_router = { path = "../../router" }
|
||||
|
||||
[features]
|
||||
default = ["web"]
|
||||
web = []
|
||||
21
examples/subsecond_hot_patch/Dioxus.toml
Normal file
21
examples/subsecond_hot_patch/Dioxus.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[application]
|
||||
|
||||
[web.app]
|
||||
|
||||
# HTML title tag content
|
||||
title = "ltest"
|
||||
|
||||
# include `assets` in web platform
|
||||
[web.resource]
|
||||
|
||||
# Additional CSS style files
|
||||
style = []
|
||||
|
||||
# Additional JavaScript files
|
||||
script = []
|
||||
|
||||
[web.resource.dev]
|
||||
|
||||
# Javascript code file
|
||||
# serve: [dev-server] only
|
||||
script = []
|
||||
1
examples/subsecond_hot_patch/Makefile.toml
Normal file
1
examples/subsecond_hot_patch/Makefile.toml
Normal file
@@ -0,0 +1 @@
|
||||
extend = [{ path = "../cargo-make/main.toml" }]
|
||||
31
examples/subsecond_hot_patch/README.md
Normal file
31
examples/subsecond_hot_patch/README.md
Normal file
@@ -0,0 +1,31 @@
|
||||
# Hot Patching with `dx`
|
||||
|
||||
This is an experimental example exploring how to combine Leptos with the binary hot-patching provided by Dioxus's `subsecond` library and `dx` cli.
|
||||
|
||||
### Serving Your App
|
||||
|
||||
This requires installing the Dioxus CLI version 0.7.0. At the time I'm writing this README, that does not yet have a stable release. Once `dioxus-cli` 0.7.0 has been released, you should use the latest stable release. Until then, I'd suggest installing from git:
|
||||
|
||||
```sh
|
||||
cargo install dioxus-cli --git https://github.com/DioxusLabs/dioxus
|
||||
```
|
||||
|
||||
Then you can run the example with `dx serve --hot-patch --platform web`.
|
||||
|
||||
### Hot Patching
|
||||
|
||||
Changes to the your application should be reflected in your app without a full rebuild and reload.
|
||||
|
||||
### Limitatations
|
||||
|
||||
Currently we only support hot-patching for reactive view functions. You probably want to use `AnyView` (via `.into_any()`) on any views that will be hot-patched, so they can be rebuilt correctly despite their types changing when the structure of the view tree changes.
|
||||
|
||||
If you are using `leptos_router` this actually works quite well, as every route’s view is erased to `AnyView` and the router itself is a reactive view function: in other words, changes inside any route should be hot-patched in any case.
|
||||
|
||||
Note that any hot-patch will cause all render effects to run again. This means that some client-side state (like the values of signals) will be wiped out.
|
||||
|
||||
### Build Tooling
|
||||
|
||||
The preference of the Dioxus team is that all hot-patching work that uses their `subsecond` also use `dioxus-cli`. As this demo shows, it's completely possible to use `dioxus-cli` to build and run a Leptos project. We do not plan to build `subsecond` into our own build tooling at this time.
|
||||
|
||||
**This is an experiment/POC. It is being published because members of the community have found it useful and have asked for the support to be merged in its current state. Further development and bugfixes are a relatively low priority at this time.**
|
||||
BIN
examples/subsecond_hot_patch/assets/favicon.ico
Normal file
BIN
examples/subsecond_hot_patch/assets/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 130 KiB |
20
examples/subsecond_hot_patch/assets/header.svg
Normal file
20
examples/subsecond_hot_patch/assets/header.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 23 KiB |
46
examples/subsecond_hot_patch/assets/main.css
Normal file
46
examples/subsecond_hot_patch/assets/main.css
Normal file
@@ -0,0 +1,46 @@
|
||||
/* App-wide styling */
|
||||
body {
|
||||
background-color: #0f1116;
|
||||
color: #ffffff;
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
margin: 20px;
|
||||
}
|
||||
|
||||
#hero {
|
||||
margin: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
#links {
|
||||
width: 400px;
|
||||
text-align: left;
|
||||
font-size: x-large;
|
||||
color: white;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
#links a {
|
||||
color: white;
|
||||
text-decoration: none;
|
||||
margin-top: 20px;
|
||||
margin: 10px 0px;
|
||||
border: white 1px solid;
|
||||
border-radius: 5px;
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
#links a:hover {
|
||||
background-color: #1f1f1f;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#header {
|
||||
max-width: 1200px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
44
examples/subsecond_hot_patch/src/main.rs
Normal file
44
examples/subsecond_hot_patch/src/main.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use leptos::{prelude::*, subsecond::connect_to_hot_patch_messages};
|
||||
use leptos_router::{
|
||||
components::{Route, Router, Routes},
|
||||
path,
|
||||
};
|
||||
|
||||
fn main() {
|
||||
// connect to DX CLI and patch the WASM binary whenever we receive a message
|
||||
connect_to_hot_patch_messages();
|
||||
|
||||
// wrapping App here in a closure so we can hot-reload it, because we only do that
|
||||
// for reactive views right now. changing anything will re-run App and update the view
|
||||
mount_to_body(|| App);
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn App() -> impl IntoView {
|
||||
view! {
|
||||
<nav>
|
||||
<a href="/">"Home"</a>
|
||||
<a href="/about">"About"</a>
|
||||
</nav>
|
||||
<Router>
|
||||
<Routes fallback=|| "Not found">
|
||||
<Route path=path!("/") view=HomePage/>
|
||||
<Route path=path!("/about") view=About/>
|
||||
</Routes>
|
||||
</Router>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn HomePage() -> impl IntoView {
|
||||
view! {
|
||||
<h1>"Home Page"</h1>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn About() -> impl IntoView {
|
||||
view! {
|
||||
<h1>"About"</h1>
|
||||
}
|
||||
}
|
||||
3
examples/tailwind_csr/Trunk.toml
Normal file
3
examples/tailwind_csr/Trunk.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
[tools]
|
||||
tailwindcss = "4.1.13"
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Actix integrations for the Leptos web framework."
|
||||
version = { workspace = true }
|
||||
version = "0.8.6"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
@@ -22,10 +22,10 @@ leptos_meta = { workspace = true, features = ["nonce"] }
|
||||
leptos_router = { workspace = true, features = ["ssr"] }
|
||||
server_fn = { workspace = true, features = ["actix-no-default"] }
|
||||
tachys = { workspace = true }
|
||||
serde_json = { workspace = true , default-features = true }
|
||||
serde_json = { workspace = true, default-features = true }
|
||||
parking_lot = { workspace = true, default-features = true }
|
||||
tracing = { optional = true , workspace = true, default-features = true }
|
||||
tokio = { features = ["rt", "fs"] , workspace = true, default-features = true }
|
||||
tracing = { optional = true, workspace = true, default-features = true }
|
||||
tokio = { features = ["rt", "fs"], workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
dashmap = { workspace = true, default-features = true }
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Axum integrations for the Leptos web framework."
|
||||
version = { workspace = true }
|
||||
version = "0.8.6"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -1177,7 +1177,7 @@ where
|
||||
generate_route_list_with_exclusions_and_ssg(app_fn, None).0
|
||||
}
|
||||
|
||||
/// Generates a list of all routes defined in Leptos's Router in your app. We can then use t.clone()his to automatically
|
||||
/// Generates a list of all routes defined in Leptos's Router in your app. We can then use this to automatically
|
||||
/// create routes in Axum's Router without having to use wildcard matching or fallbacks. Takes in your root app Element
|
||||
/// as an argument so it can walk you app tree. This version is tailored to generate Axum compatible paths.
|
||||
#[cfg_attr(
|
||||
@@ -2061,10 +2061,12 @@ where
|
||||
req,
|
||||
|app, chunks, _supports_ooo| {
|
||||
Box::pin(async move {
|
||||
let app = app
|
||||
.to_html_stream_in_order()
|
||||
.collect::<String>()
|
||||
.await;
|
||||
let app = if cfg!(feature = "islands-router") {
|
||||
app.to_html_stream_in_order_branching()
|
||||
} else {
|
||||
app.to_html_stream_in_order()
|
||||
};
|
||||
let app = app.collect::<String>().await;
|
||||
let chunks = chunks();
|
||||
Box::pin(once(async move { app }).chain(chunks))
|
||||
as PinnedStream<String>
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Utilities to help build server integrations for the Leptos web framework."
|
||||
version = { workspace = true }
|
||||
version = "0.8.6"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -68,7 +68,8 @@ pub trait ExtendResponse: Sized {
|
||||
let nonce =
|
||||
use_nonce().map(|n| n.to_string()).unwrap_or_default();
|
||||
if let Some(manifest) = use_context::<WasmSplitManifest>() {
|
||||
let (pkg_path, manifest) = &*manifest.0.read_value();
|
||||
let (pkg_path, manifest, wasm_split_file) =
|
||||
&*manifest.0.read_value();
|
||||
let prefetches = prefetches.0.read_value();
|
||||
|
||||
let all_prefetches = prefetches.iter().flat_map(|key| {
|
||||
@@ -90,7 +91,7 @@ pub trait ExtendResponse: Sized {
|
||||
.to_html();
|
||||
}
|
||||
_ = view! {
|
||||
<Link rel="modulepreload" href=format!("{pkg_path}/__wasm_split.js") crossorigin=nonce/>
|
||||
<Link rel="modulepreload" href=format!("{pkg_path}/{wasm_split_file}") crossorigin=nonce/>
|
||||
}
|
||||
.to_html();
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos"
|
||||
version = { workspace = true }
|
||||
version = "0.8.12"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -57,7 +57,10 @@ serde_qs = { workspace = true, default-features = true }
|
||||
slotmap = { workspace = true, default-features = true }
|
||||
futures = { workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
wasm_split_helpers.workspace = true
|
||||
wasm_split_helpers = { workspace = true, default-features = true }
|
||||
subsecond = { workspace = true, default-features = true, optional = true }
|
||||
dioxus-cli-config = { workspace = true, default-features = true, optional = true }
|
||||
dioxus-devtools = { workspace = true, default-features = true, optional = true }
|
||||
|
||||
[features]
|
||||
hydration = [
|
||||
@@ -85,6 +88,12 @@ ssr = [
|
||||
]
|
||||
nightly = ["leptos_macro/nightly", "reactive_graph/nightly", "tachys/nightly"]
|
||||
rkyv = ["server_fn/rkyv", "leptos_server/rkyv"]
|
||||
bitcode = ["server_fn/bitcode"]
|
||||
serde-lite = ["server_fn/serde-lite", "leptos_server/serde-lite"]
|
||||
cbor = ["server_fn/cbor"]
|
||||
msgpack = ["server_fn/msgpack"]
|
||||
postcard = ["server_fn/postcard"]
|
||||
multipart = ["server_fn/multipart"]
|
||||
tracing = [
|
||||
"dep:tracing",
|
||||
"reactive_graph/tracing",
|
||||
@@ -102,6 +111,16 @@ trace-component-props = [
|
||||
]
|
||||
delegation = ["tachys/delegation"]
|
||||
islands-router = ["tachys/mark_branches"]
|
||||
subsecond = [
|
||||
"reactive_graph/subsecond",
|
||||
"dep:subsecond",
|
||||
"dep:dioxus-cli-config",
|
||||
"dep:dioxus-devtools",
|
||||
"web-sys/Location",
|
||||
"web-sys/MessageEvent",
|
||||
"web-sys/WebSocket",
|
||||
"web-sys/Window",
|
||||
]
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { features = [
|
||||
@@ -134,13 +153,18 @@ denylist = [
|
||||
"trace-component-props",
|
||||
"spin",
|
||||
"islands",
|
||||
"bitcode",
|
||||
"serde-lite",
|
||||
"cbor",
|
||||
"msgpack",
|
||||
"postcard",
|
||||
"multipart",
|
||||
]
|
||||
skip_feature_sets = [
|
||||
["csr", "ssr"],
|
||||
["csr", "hydrate"],
|
||||
["ssr", "hydrate"],
|
||||
["serde", "serde-lite"],
|
||||
["serde-lite", "miniserde"],
|
||||
["serde", "miniserde"],
|
||||
["serde", "rkyv"],
|
||||
["miniserde", "rkyv"],
|
||||
|
||||
@@ -262,6 +262,16 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<C> From<View<C>> for ViewFn
|
||||
where
|
||||
C: Clone + Send + Sync + 'static,
|
||||
View<C>: IntoAny,
|
||||
{
|
||||
fn from(value: View<C>) -> Self {
|
||||
Self(Arc::new(move || value.clone().into_any()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ViewFn {
|
||||
/// Execute the wrapped function
|
||||
pub fn run(&self) -> AnyView {
|
||||
@@ -289,6 +299,16 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<C> From<View<C>> for ViewFnOnce
|
||||
where
|
||||
C: Send + Sync + 'static,
|
||||
View<C>: IntoAny,
|
||||
{
|
||||
fn from(value: View<C>) -> Self {
|
||||
Self(Box::new(move || value.into_any()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ViewFnOnce {
|
||||
/// Execute the wrapped function
|
||||
pub fn run(self) -> AnyView {
|
||||
|
||||
@@ -65,16 +65,56 @@ pub fn HydrationScripts(
|
||||
if let Some(splits) = SPLIT_MANIFEST.get_or_init(|| {
|
||||
let root = root.clone().unwrap_or_default();
|
||||
|
||||
let (wasm_split_js, wasm_split_manifest) = if options.hash_files {
|
||||
let hash_path = std::env::current_exe()
|
||||
.map(|path| {
|
||||
path.parent().map(|p| p.to_path_buf()).unwrap_or_default()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
.join(options.hash_file.as_ref());
|
||||
let hashes = std::fs::read_to_string(&hash_path)
|
||||
.expect("failed to read hash file");
|
||||
|
||||
let mut split =
|
||||
"__wasm_split.______________________.js".to_string();
|
||||
let mut manifest = "__wasm_split_manifest.json".to_string();
|
||||
for line in hashes.lines() {
|
||||
let line = line.trim();
|
||||
if !line.is_empty() {
|
||||
if let Some((file, hash)) = line.split_once(':') {
|
||||
if file == "manifest" {
|
||||
manifest.clear();
|
||||
manifest.push_str("__wasm_split_manifest.");
|
||||
manifest.push_str(hash.trim());
|
||||
manifest.push_str(".json");
|
||||
}
|
||||
if file == "split" {
|
||||
split.clear();
|
||||
split.push_str("__wasm_split.");
|
||||
split.push_str(hash.trim());
|
||||
split.push_str(".js");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
(split, manifest)
|
||||
} else {
|
||||
(
|
||||
"__wasm_split.______________________.js".to_string(),
|
||||
"__wasm_split_manifest.json".to_string(),
|
||||
)
|
||||
};
|
||||
|
||||
let site_dir = &options.site_root;
|
||||
let pkg_dir = &options.site_pkg_dir;
|
||||
let path = PathBuf::from(site_dir.to_string());
|
||||
let path = path
|
||||
.join(pkg_dir.to_string())
|
||||
.join("__wasm_split_manifest.json");
|
||||
let path = path.join(pkg_dir.to_string()).join(wasm_split_manifest);
|
||||
let file = std::fs::read_to_string(path).ok()?;
|
||||
|
||||
let manifest = WasmSplitManifest(ArcStoredValue::new((
|
||||
format!("{root}/{pkg_dir}"),
|
||||
serde_json::from_str(&file).expect("could not read manifest file"),
|
||||
wasm_split_js,
|
||||
)));
|
||||
|
||||
Some(manifest)
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
if (window.location.protocol === 'https:') {
|
||||
protocol = 'wss://';
|
||||
}
|
||||
|
||||
let host = window.location.hostname;
|
||||
let ws = new WebSocket(`${protocol}${host}:${reload_port}/live_reload`);
|
||||
ws.onmessage = (ev) => {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
#![deny(missing_docs)]
|
||||
#![forbid(unsafe_code)]
|
||||
|
||||
//! # About Leptos
|
||||
//!
|
||||
@@ -85,12 +84,22 @@
|
||||
//! # Feature Flags
|
||||
//!
|
||||
//! - **`nightly`**: On `nightly` Rust, enables the function-call syntax for signal getters and setters.
|
||||
//! Also enables some experimental optimizations that improve the handling of static strings and
|
||||
//! the performance of the `template! {}` macro.
|
||||
//! - **`csr`** Client-side rendering: Generate DOM nodes in the browser.
|
||||
//! - **`ssr`** Server-side rendering: Generate an HTML string (typically on the server).
|
||||
//! - **`islands`** Activates “islands mode,” in which components are not made interactive on the
|
||||
//! client unless they use the `#[island]` macro.
|
||||
//! - **`hydrate`** Hydration: use this to add interactivity to an SSRed Leptos app.
|
||||
//! - **`rkyv`** In SSR/hydrate mode, uses [`rkyv`](https://docs.rs/rkyv/latest/rkyv/) to serialize resources and send them
|
||||
//! from the server to the client.
|
||||
//! - **`nonce`** Adds support for nonces to be added as part of a Content Security Policy.
|
||||
//! - **`rkyv`** In SSR/hydrate mode, enables using [`rkyv`](https://docs.rs/rkyv/latest/rkyv/) to serialize resources.
|
||||
//! - **`tracing`** Adds support for [`tracing`](https://docs.rs/tracing/latest/tracing/).
|
||||
//! - **`trace-component-props`** Adds `tracing` support for component props.
|
||||
//! - **`delegation`** Uses event delegation rather than the browser’s native event handling
|
||||
//! system. (This improves the performance of creating large numbers of elements simultaneously,
|
||||
//! in exchange for occasional edge cases in which events behave differently from native browser
|
||||
//! events.)
|
||||
//! - **`rustls`** Use `rustls` for server functions.
|
||||
//!
|
||||
//! **Important Note:** You must enable one of `csr`, `hydrate`, or `ssr` to tell Leptos
|
||||
//! which mode your app is operating in. You should only enable one of these per build target,
|
||||
@@ -215,12 +224,15 @@ pub mod error {
|
||||
|
||||
/// Control-flow components like `<Show>`, `<For>`, and `<Await>`.
|
||||
pub mod control_flow {
|
||||
pub use crate::{animated_show::*, await_::*, for_loop::*, show::*};
|
||||
pub use crate::{
|
||||
animated_show::*, await_::*, for_loop::*, show::*, show_let::*,
|
||||
};
|
||||
}
|
||||
mod animated_show;
|
||||
mod await_;
|
||||
mod for_loop;
|
||||
mod show;
|
||||
mod show_let;
|
||||
|
||||
/// A component that allows rendering a component somewhere else.
|
||||
pub mod portal;
|
||||
@@ -293,20 +305,31 @@ pub use tachys::mathml as math;
|
||||
#[doc(inline)]
|
||||
pub use tachys::svg;
|
||||
|
||||
#[cfg(feature = "subsecond")]
|
||||
/// Utilities for using binary hot-patching with [`subsecond`].
|
||||
pub mod subsecond;
|
||||
|
||||
/// Utilities for simple isomorphic logging to the console or terminal.
|
||||
pub mod logging {
|
||||
pub use leptos_dom::{debug_warn, error, log, warn};
|
||||
pub use leptos_dom::{
|
||||
debug_error, debug_log, debug_warn, error, log, warn,
|
||||
};
|
||||
}
|
||||
|
||||
/// Utilities for working with asynchronous tasks.
|
||||
pub mod task {
|
||||
use any_spawner::Executor;
|
||||
use reactive_graph::computed::ScopedFuture;
|
||||
use std::future::Future;
|
||||
|
||||
/// Spawns a thread-safe [`Future`].
|
||||
///
|
||||
/// This will be run with the current reactive owner and observer using a [`ScopedFuture`].
|
||||
#[track_caller]
|
||||
#[inline(always)]
|
||||
pub fn spawn(fut: impl Future<Output = ()> + Send + 'static) {
|
||||
let fut = ScopedFuture::new(fut);
|
||||
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
Executor::spawn(fut);
|
||||
|
||||
@@ -342,12 +365,14 @@ pub use serde_json;
|
||||
pub use tracing;
|
||||
#[doc(hidden)]
|
||||
pub use wasm_bindgen;
|
||||
pub use wasm_split_helpers;
|
||||
#[doc(hidden)]
|
||||
pub use wasm_split_helpers as wasm_split;
|
||||
#[doc(hidden)]
|
||||
pub use web_sys;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub mod __reexports {
|
||||
pub use send_wrapper;
|
||||
pub use wasm_bindgen_futures;
|
||||
}
|
||||
|
||||
@@ -373,7 +398,8 @@ pub fn prefetch_lazy_fn_on_server(id: &'static str) {
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct WasmSplitManifest(
|
||||
pub reactive_graph::owner::ArcStoredValue<(
|
||||
String,
|
||||
std::collections::HashMap<String, Vec<String>>,
|
||||
String, // the pkg root
|
||||
std::collections::HashMap<String, Vec<String>>, // preloads
|
||||
String, // the name of the __wasm_split.js file
|
||||
)>,
|
||||
);
|
||||
|
||||
162
leptos/src/show_let.rs
Normal file
162
leptos/src/show_let.rs
Normal file
@@ -0,0 +1,162 @@
|
||||
use crate::{children::ViewFn, IntoView};
|
||||
use leptos_macro::component;
|
||||
use reactive_graph::traits::Get;
|
||||
use std::{marker::PhantomData, sync::Arc};
|
||||
use tachys::either::Either;
|
||||
|
||||
/// Like `<Show>` but for `Option`. This is a shortcut for
|
||||
///
|
||||
/// ```ignore
|
||||
/// value.map(|value| {
|
||||
/// view! { ... }
|
||||
/// })
|
||||
/// ```
|
||||
///
|
||||
/// If you specify a `fallback` it is equvalent to
|
||||
///
|
||||
/// ```ignore
|
||||
/// value
|
||||
/// .map(
|
||||
/// |value| children(value),
|
||||
/// )
|
||||
/// .unwrap_or_else(fallback)
|
||||
/// ```
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```
|
||||
/// # use leptos::prelude::*;
|
||||
/// #
|
||||
/// # #[component]
|
||||
/// # pub fn Example() -> impl IntoView {
|
||||
/// let (opt_value, set_opt_value) = signal(None::<i32>);
|
||||
///
|
||||
/// view! {
|
||||
/// <ShowLet some=opt_value let:value>
|
||||
/// "We have a value: " {value}
|
||||
/// </ShowLet>
|
||||
/// }
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// You can also specify a fallback:
|
||||
/// ```
|
||||
/// # use leptos::prelude::*;
|
||||
/// #
|
||||
/// # #[component]
|
||||
/// # pub fn Example() -> impl IntoView {
|
||||
/// let (opt_value, set_opt_value) = signal(None::<i32>);
|
||||
///
|
||||
/// view! {
|
||||
/// <ShowLet some=opt_value let:value fallback=|| "Got nothing">
|
||||
/// "We have a value: " {value}
|
||||
/// </ShowLet>
|
||||
/// }
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// In addition to signals you can also use a closure that returns an `Option`:
|
||||
///
|
||||
/// ```
|
||||
/// # use leptos::prelude::*;
|
||||
/// #
|
||||
/// # #[component]
|
||||
/// # pub fn Example() -> impl IntoView {
|
||||
/// let (opt_value, set_opt_value) = signal(None::<i32>);
|
||||
///
|
||||
/// view! {
|
||||
/// <ShowLet some=move || opt_value.get().map(|v| v * 2) let:value>
|
||||
/// "We have a value: " {value}
|
||||
/// </ShowLet>
|
||||
/// }
|
||||
/// # }
|
||||
/// ```
|
||||
#[component]
|
||||
pub fn ShowLet<T, ChFn, V, M>(
|
||||
/// The children will be shown whenever `value` is `Some`.
|
||||
///
|
||||
/// They take the inner value as an argument. Use `let:` to bind the value to a variable.
|
||||
children: ChFn,
|
||||
|
||||
/// A signal of type `Option` or a closure that returns an `Option`.
|
||||
/// If the value is `Some`, the children will be shown.
|
||||
/// Otherwise the fallback will be shown, if present.
|
||||
some: impl IntoOptionGetter<T, M>,
|
||||
|
||||
/// A closure that returns what gets rendered when the value is `None`.
|
||||
/// By default this is the empty view.
|
||||
///
|
||||
/// You can think of it as the closure inside `.unwrap_or_else(|| fallback())`.
|
||||
#[prop(optional, into)]
|
||||
fallback: ViewFn,
|
||||
|
||||
/// Marker for generic parameters. Ignore this.
|
||||
#[prop(optional)]
|
||||
_marker: PhantomData<(T, M)>,
|
||||
) -> impl IntoView
|
||||
where
|
||||
ChFn: Fn(T) -> V + Send + Clone + 'static,
|
||||
V: IntoView + 'static,
|
||||
T: 'static,
|
||||
{
|
||||
let getter = some.into_option_getter();
|
||||
|
||||
move || {
|
||||
let children = children.clone();
|
||||
let fallback = fallback.clone();
|
||||
|
||||
getter
|
||||
.run()
|
||||
.map(move |t| Either::Left(children(t)))
|
||||
.unwrap_or_else(move || Either::Right(fallback.run()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Servers as a wrapper for both, an `Option` signal or a closure that returns an `Option`.
|
||||
pub struct OptionGetter<T>(Arc<dyn Fn() -> Option<T> + Send + Sync + 'static>);
|
||||
|
||||
impl<T> Clone for OptionGetter<T> {
|
||||
fn clone(&self) -> Self {
|
||||
Self(Arc::clone(&self.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> OptionGetter<T> {
|
||||
/// Runs the getter and returns the result.
|
||||
pub fn run(&self) -> Option<T> {
|
||||
(self.0)()
|
||||
}
|
||||
}
|
||||
|
||||
/// Conversion trait for creating an `OptionGetter` from a closure or a signal.
|
||||
pub trait IntoOptionGetter<T, M> {
|
||||
/// Converts the given value into an `OptionGetter`.
|
||||
fn into_option_getter(self) -> OptionGetter<T>;
|
||||
}
|
||||
|
||||
/// Marker type for creating an `OptionGetter` from a closure.
|
||||
/// Used so that the compiler doesn't complain about double implementations of the trait `IntoOptionGetter`.
|
||||
pub struct FunctionMarker;
|
||||
|
||||
impl<T, F> IntoOptionGetter<T, FunctionMarker> for F
|
||||
where
|
||||
F: Fn() -> Option<T> + Send + Sync + 'static,
|
||||
{
|
||||
fn into_option_getter(self) -> OptionGetter<T> {
|
||||
OptionGetter(Arc::new(self))
|
||||
}
|
||||
}
|
||||
|
||||
/// Marker type for creating an `OptionGetter` from a signal.
|
||||
/// Used so that the compiler doesn't complain about double implementations of the trait `IntoOptionGetter`.
|
||||
pub struct SignalMarker;
|
||||
|
||||
impl<T, S> IntoOptionGetter<T, SignalMarker> for S
|
||||
where
|
||||
S: Get<Value = Option<T>> + Clone + Send + Sync + 'static,
|
||||
{
|
||||
fn into_option_getter(self) -> OptionGetter<T> {
|
||||
let cloned = self.clone();
|
||||
OptionGetter(Arc::new(move || cloned.get()))
|
||||
}
|
||||
}
|
||||
62
leptos/src/subsecond.rs
Normal file
62
leptos/src/subsecond.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use dioxus_devtools::DevserverMsg;
|
||||
use wasm_bindgen::{prelude::Closure, JsCast};
|
||||
use web_sys::{js_sys::JsString, MessageEvent, WebSocket};
|
||||
|
||||
/// Sets up a websocket connect to the `dx` CLI, waiting for incoming hot-patching messages
|
||||
/// and patching the WASM binary appropriately.
|
||||
//
|
||||
// Note: This is a stripped-down version of Dioxus's `make_ws` from `dioxus_web`
|
||||
// It's essentially copy-pasted here because it's not pub there.
|
||||
// Would love to just take a dependency on that to be able to use it and deduplicate.
|
||||
//
|
||||
// https://github.com/DioxusLabs/dioxus/blob/main/packages/web/src/devtools.rs#L36
|
||||
pub fn connect_to_hot_patch_messages() {
|
||||
// Get the location of the devserver, using the current location plus the /_dioxus path
|
||||
// The idea here being that the devserver is always located on the /_dioxus behind a proxy
|
||||
let location = web_sys::window().unwrap().location();
|
||||
let url = format!(
|
||||
"{protocol}//{host}/_dioxus?build_id={build_id}",
|
||||
protocol = match location.protocol().unwrap() {
|
||||
prot if prot == "https:" => "wss:",
|
||||
_ => "ws:",
|
||||
},
|
||||
host = location.host().unwrap(),
|
||||
build_id = dioxus_cli_config::build_id(),
|
||||
);
|
||||
|
||||
let ws = WebSocket::new(&url).unwrap();
|
||||
|
||||
ws.set_onmessage(Some(
|
||||
Closure::<dyn FnMut(MessageEvent)>::new(move |e: MessageEvent| {
|
||||
let Ok(text) = e.data().dyn_into::<JsString>() else {
|
||||
return;
|
||||
};
|
||||
|
||||
// The devserver messages have some &'static strs in them, so we need to leak the source string
|
||||
let string: String = text.into();
|
||||
let string = Box::leak(string.into_boxed_str());
|
||||
|
||||
if let Ok(DevserverMsg::HotReload(msg)) =
|
||||
serde_json::from_str::<DevserverMsg>(string)
|
||||
{
|
||||
if let Some(jump_table) = msg.jump_table.as_ref().cloned() {
|
||||
if msg.for_build_id == Some(dioxus_cli_config::build_id()) {
|
||||
let our_pid = if cfg!(target_family = "wasm") {
|
||||
None
|
||||
} else {
|
||||
Some(std::process::id())
|
||||
};
|
||||
|
||||
if msg.for_pid == our_pid {
|
||||
unsafe { subsecond::apply_patch(jump_table) }
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_js_value()
|
||||
.as_ref()
|
||||
.unchecked_ref(),
|
||||
));
|
||||
}
|
||||
@@ -32,12 +32,12 @@ use tachys::{
|
||||
};
|
||||
use throw_error::ErrorHookFuture;
|
||||
|
||||
/// If any [`Resource`](leptos_reactive::Resource) is read in the `children` of this
|
||||
/// If any [`Resource`](crate::prelude::Resource) is read in the `children` of this
|
||||
/// component, it will show the `fallback` while they are loading. Once all are resolved,
|
||||
/// it will render the `children`.
|
||||
///
|
||||
/// Each time one of the resources is loading again, it will fall back. To keep the current
|
||||
/// children instead, use [Transition](crate::Transition).
|
||||
/// children instead, use [Transition](crate::prelude::Transition).
|
||||
///
|
||||
/// Note that the `children` will be rendered initially (in order to capture the fact that
|
||||
/// those resources are read under the suspense), so you cannot assume that resources read
|
||||
|
||||
@@ -16,11 +16,11 @@ use reactive_graph::{
|
||||
use slotmap::{DefaultKey, SlotMap};
|
||||
use tachys::reactive_graph::OwnedView;
|
||||
|
||||
/// If any [`Resource`](leptos_reactive::Resource) is read in the `children` of this
|
||||
/// If any [`Resource`](crate::prelude::Resource) is read in the `children` of this
|
||||
/// component, it will show the `fallback` while they are loading. Once all are resolved,
|
||||
/// it will render the `children`.
|
||||
///
|
||||
/// Unlike [`Suspense`](crate::Suspense), this will not fall
|
||||
/// Unlike [`Suspense`](crate::prelude::Suspense), this will not fall
|
||||
/// back to the `fallback` state if there are further changes after the initial load.
|
||||
///
|
||||
/// Note that the `children` will be rendered initially (in order to capture the fact that
|
||||
|
||||
@@ -5,7 +5,7 @@ license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Configuration for the Leptos web framework."
|
||||
readme = "../README.md"
|
||||
version = { workspace = true }
|
||||
version = "0.8.7"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
@@ -13,16 +13,24 @@ edition.workspace = true
|
||||
config = { default-features = false, features = [
|
||||
"toml",
|
||||
"convert-case",
|
||||
] , workspace = true }
|
||||
], workspace = true }
|
||||
regex = { workspace = true, default-features = true }
|
||||
serde = { features = ["derive", "rc"] , workspace = true, default-features = true }
|
||||
thiserror = { workspace = true , default-features = true }
|
||||
typed-builder = { workspace = true , default-features = true }
|
||||
serde = { features = [
|
||||
"derive",
|
||||
"rc",
|
||||
], workspace = true, default-features = true }
|
||||
thiserror = { workspace = true, default-features = true }
|
||||
typed-builder = { workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { features = ["rt", "macros"] , workspace = true, default-features = true }
|
||||
tokio = { features = [
|
||||
"rt",
|
||||
"macros",
|
||||
], workspace = true, default-features = true }
|
||||
tempfile = { workspace = true, default-features = true }
|
||||
temp-env = { features = ["async_closure"] , workspace = true, default-features = true }
|
||||
temp-env = { features = [
|
||||
"async_closure",
|
||||
], workspace = true, default-features = true }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_dom"
|
||||
version = { workspace = true }
|
||||
version = "0.8.7"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -14,10 +14,10 @@ reactive_graph = { workspace = true }
|
||||
or_poisoned = { workspace = true }
|
||||
js-sys = { workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
tracing = { optional = true , workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true , default-features = true }
|
||||
serde_json = { optional = true , workspace = true, default-features = true }
|
||||
serde = { optional = true , workspace = true, default-features = true }
|
||||
tracing = { optional = true, workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true, default-features = true }
|
||||
serde_json = { optional = true, workspace = true, default-features = true }
|
||||
serde = { optional = true, workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
leptos = { path = "../leptos" }
|
||||
|
||||
@@ -258,15 +258,7 @@ pub fn request_idle_callback_with_handle(
|
||||
///
|
||||
/// <div class="warning">The task is called outside of the ownership tree, this means that if you want to access for example the context you need to reestablish the owner.</div>
|
||||
pub fn queue_microtask(task: impl FnOnce() + 'static) {
|
||||
use js_sys::{Function, Reflect};
|
||||
|
||||
let task = Closure::once_into_js(task);
|
||||
let window = web_sys::window().expect("window not available");
|
||||
let queue_microtask =
|
||||
Reflect::get(&window, &JsValue::from_str("queueMicrotask"))
|
||||
.expect("queueMicrotask not available");
|
||||
let queue_microtask = queue_microtask.unchecked_into::<Function>();
|
||||
_ = queue_microtask.call1(&JsValue::UNDEFINED, &task);
|
||||
tachys::renderer::dom::queue_microtask(task);
|
||||
}
|
||||
|
||||
/// Handle that is generated by [set_timeout_with_handle] and can be used to clear the timeout.
|
||||
@@ -471,7 +463,7 @@ pub fn set_interval_with_handle(
|
||||
|
||||
#[inline(never)]
|
||||
fn si(
|
||||
cb: Box<dyn Fn()>,
|
||||
cb: Box<dyn FnMut()>,
|
||||
duration: Duration,
|
||||
) -> Result<IntervalHandle, JsValue> {
|
||||
let cb = Closure::wrap(cb).into_js_value();
|
||||
@@ -593,7 +585,8 @@ impl WindowListenerHandle {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_server() -> bool {
|
||||
/// Returns `true` if the current environment is a server.
|
||||
pub fn is_server() -> bool {
|
||||
#[cfg(feature = "hydration")]
|
||||
{
|
||||
Owner::current_shared_context()
|
||||
@@ -605,3 +598,8 @@ fn is_server() -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the current environment is a browser.
|
||||
pub fn is_browser() -> bool {
|
||||
!is_server()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_hot_reload"
|
||||
version = { workspace = true }
|
||||
version = "0.8.5"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -11,17 +11,20 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true, default-features = true }
|
||||
serde = { features = ["derive"] , workspace = true, default-features = true }
|
||||
serde = { features = ["derive"], workspace = true, default-features = true }
|
||||
syn = { features = [
|
||||
"full",
|
||||
"parsing",
|
||||
"extra-traits",
|
||||
"visit",
|
||||
"printing",
|
||||
] , workspace = true, default-features = true }
|
||||
], workspace = true, default-features = true }
|
||||
quote = { workspace = true, default-features = true }
|
||||
rstml = { workspace = true, default-features = true }
|
||||
proc-macro2 = { features = ["span-locations", "nightly"] , workspace = true, default-features = true }
|
||||
proc-macro2 = { features = [
|
||||
"span-locations",
|
||||
"nightly",
|
||||
], workspace = true, default-features = true }
|
||||
parking_lot = { workspace = true, default-features = true }
|
||||
walkdir = { workspace = true, default-features = true }
|
||||
camino = { workspace = true, default-features = true }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_macro"
|
||||
version = { workspace = true }
|
||||
version = "0.8.11"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -13,26 +13,28 @@ edition.workspace = true
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
attribute-derive = { features = ["syn-full"] , workspace = true, default-features = true }
|
||||
attribute-derive = { features = [
|
||||
"syn-full",
|
||||
], workspace = true, default-features = true }
|
||||
cfg-if = { workspace = true, default-features = true }
|
||||
html-escape = { workspace = true, default-features = true }
|
||||
itertools = { workspace = true , default-features = true }
|
||||
itertools = { workspace = true, default-features = true }
|
||||
prettyplease = { workspace = true, default-features = true }
|
||||
proc-macro-error2 = { default-features = false , workspace = true }
|
||||
proc-macro-error2 = { default-features = false, workspace = true }
|
||||
proc-macro2 = { workspace = true, default-features = true }
|
||||
quote = { workspace = true, default-features = true }
|
||||
syn = { features = ["full"] , workspace = true, default-features = true }
|
||||
syn = { features = ["full"], workspace = true, default-features = true }
|
||||
rstml = { workspace = true, default-features = true }
|
||||
leptos_hot_reload = { workspace = true }
|
||||
server_fn_macro = { workspace = true }
|
||||
convert_case = { workspace = true , default-features = true }
|
||||
uuid = { features = ["v4"] , workspace = true, default-features = true }
|
||||
tracing = { optional = true , workspace = true, default-features = true }
|
||||
convert_case = { workspace = true, default-features = true }
|
||||
uuid = { features = ["v4"], workspace = true, default-features = true }
|
||||
tracing = { optional = true, workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
log = { workspace = true, default-features = true }
|
||||
typed-builder = { workspace = true, default-features = true }
|
||||
trybuild = { workspace = true , default-features = true }
|
||||
trybuild = { workspace = true, default-features = true }
|
||||
leptos = { path = "../leptos" }
|
||||
leptos_router = { path = "../router", features = ["ssr"] }
|
||||
server_fn = { path = "../server_fn", features = ["cbor"] }
|
||||
|
||||
@@ -548,7 +548,7 @@ impl ToTokens for Model {
|
||||
quote! {
|
||||
#[::leptos::prelude::lazy]
|
||||
#[allow(non_snake_case)]
|
||||
async fn #outer_name (el: ::leptos::web_sys::HtmlElement) {
|
||||
fn #outer_name (el: ::leptos::web_sys::HtmlElement) {
|
||||
#hydrate_fn_inner
|
||||
}
|
||||
|
||||
@@ -1360,7 +1360,10 @@ fn prop_to_doc(
|
||||
}
|
||||
|
||||
pub fn unmodified_fn_name_from_fn_name(ident: &Ident) -> Ident {
|
||||
Ident::new(&format!("__{ident}"), ident.span())
|
||||
Ident::new(
|
||||
&format!("__component_{}", ident.to_string().to_case(Snake)),
|
||||
ident.span(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Converts all `impl Trait`s in a function signature to use generic params instead.
|
||||
|
||||
@@ -2,12 +2,12 @@ use convert_case::{Case, Casing};
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2::Ident;
|
||||
use proc_macro_error2::abort;
|
||||
use quote::quote;
|
||||
use quote::{format_ident, quote};
|
||||
use std::{
|
||||
hash::{DefaultHasher, Hash, Hasher},
|
||||
mem,
|
||||
};
|
||||
use syn::{parse_macro_input, ItemFn};
|
||||
use syn::{parse_macro_input, parse_quote, ItemFn, ReturnType, Stmt};
|
||||
|
||||
pub fn lazy_impl(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
let name = if !args.is_empty() {
|
||||
@@ -16,7 +16,7 @@ pub fn lazy_impl(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
None
|
||||
};
|
||||
|
||||
let mut fun = syn::parse::<ItemFn>(s).unwrap_or_else(|e| {
|
||||
let fun = syn::parse::<ItemFn>(s).unwrap_or_else(|e| {
|
||||
abort!(e.span(), "`lazy` can only be used on a function")
|
||||
});
|
||||
|
||||
@@ -47,26 +47,50 @@ pub fn lazy_impl(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
|
||||
let is_wasm = cfg!(feature = "csr") || cfg!(feature = "hydrate");
|
||||
if is_wasm {
|
||||
let mut fun = fun;
|
||||
let mut return_wrapper = None;
|
||||
if was_async {
|
||||
fun.sig.asyncness = None;
|
||||
let ty = match &fun.sig.output {
|
||||
ReturnType::Default => quote! { () },
|
||||
ReturnType::Type(_, ty) => quote! { #ty },
|
||||
};
|
||||
let sync_output: ReturnType = parse_quote! {
|
||||
-> ::std::pin::Pin<::std::boxed::Box<dyn ::std::future::Future<Output = #ty> + ::std::marker::Send + ::std::marker::Sync>>
|
||||
};
|
||||
let async_output = mem::replace(&mut fun.sig.output, sync_output);
|
||||
let stmts = mem::take(&mut fun.block.stmts);
|
||||
fun.block.stmts.push(Stmt::Expr(parse_quote! {
|
||||
::std::boxed::Box::pin(::leptos::__reexports::send_wrapper::SendWrapper::new(async move {
|
||||
#( #stmts )*
|
||||
}))
|
||||
}, None));
|
||||
return_wrapper = Some(quote! {
|
||||
return_wrapper(let future = _; { future.await } #async_output),
|
||||
});
|
||||
}
|
||||
let preload_name = format_ident!("__preload_{}", fun.sig.ident);
|
||||
|
||||
quote! {
|
||||
#[::leptos::wasm_split_helpers::wasm_split(#unique_name)]
|
||||
#[::leptos::wasm_split::wasm_split(
|
||||
#unique_name,
|
||||
wasm_split_path = ::leptos::wasm_split,
|
||||
preload(#[doc(hidden)] #[allow(non_snake_case)] #preload_name),
|
||||
#return_wrapper
|
||||
)]
|
||||
#fun
|
||||
}
|
||||
} else {
|
||||
let mut fun = fun;
|
||||
if !was_async {
|
||||
fun.sig.asyncness = Some(Default::default());
|
||||
}
|
||||
|
||||
let statements = &mut fun.block.stmts;
|
||||
let old_statements = mem::take(statements);
|
||||
statements.push(
|
||||
syn::parse(
|
||||
quote! {
|
||||
::leptos::prefetch_lazy_fn_on_server(#unique_name_str);
|
||||
}
|
||||
.into(),
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
statements.push(parse_quote! {
|
||||
::leptos::prefetch_lazy_fn_on_server(#unique_name_str);
|
||||
});
|
||||
statements.extend(old_statements);
|
||||
quote! { #fun }
|
||||
}
|
||||
|
||||
@@ -683,7 +683,11 @@ fn component_macro(
|
||||
let parse_result = syn::parse::<component::Model>(s);
|
||||
|
||||
if let (Ok(ref mut unexpanded), Ok(model)) = (&mut dummy, parse_result) {
|
||||
let expanded = model.is_transparent(is_transparent).is_lazy(is_lazy).with_island(island).into_token_stream();
|
||||
let expanded = model
|
||||
.is_transparent(is_transparent)
|
||||
.is_lazy(is_lazy)
|
||||
.with_island(island)
|
||||
.into_token_stream();
|
||||
if !matches!(unexpanded.vis, Visibility::Public(_)) {
|
||||
unexpanded.vis = Visibility::Public(Pub {
|
||||
span: unexpanded.vis.span(),
|
||||
@@ -696,7 +700,7 @@ fn component_macro(
|
||||
#expanded
|
||||
|
||||
#[doc(hidden)]
|
||||
#[allow(non_snake_case, dead_code, clippy::too_many_arguments, clippy::needless_lifetimes)]
|
||||
#[allow(clippy::too_many_arguments, clippy::needless_lifetimes)]
|
||||
#unexpanded
|
||||
}
|
||||
} else {
|
||||
@@ -705,7 +709,7 @@ fn component_macro(
|
||||
dummy.sig.ident = unmodified_fn_name_from_fn_name(&dummy.sig.ident);
|
||||
quote! {
|
||||
#[doc(hidden)]
|
||||
#[allow(non_snake_case, dead_code, clippy::too_many_arguments, clippy::needless_lifetimes)]
|
||||
#[allow(clippy::too_many_arguments, clippy::needless_lifetimes)]
|
||||
#dummy
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,9 +25,8 @@ use std::{
|
||||
use syn::{
|
||||
punctuated::Pair::{End, Punctuated},
|
||||
spanned::Spanned,
|
||||
Expr,
|
||||
Expr::Tuple,
|
||||
ExprArray, ExprLit, ExprRange, Lit, LitStr, RangeLimits, Stmt,
|
||||
Expr::{self, Tuple},
|
||||
ExprArray, ExprLit, ExprPath, ExprRange, Lit, LitStr, RangeLimits, Stmt,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
@@ -1679,7 +1678,7 @@ fn attribute_value(
|
||||
}
|
||||
|
||||
// Keep list alphabetized for binary search
|
||||
const TYPED_EVENTS: [&str; 126] = [
|
||||
const TYPED_EVENTS: [&str; 127] = [
|
||||
"DOMContentLoaded",
|
||||
"abort",
|
||||
"afterprint",
|
||||
@@ -1775,6 +1774,7 @@ const TYPED_EVENTS: [&str; 126] = [
|
||||
"reset",
|
||||
"resize",
|
||||
"scroll",
|
||||
"scrollend",
|
||||
"securitypolicyviolation",
|
||||
"seeked",
|
||||
"seeking",
|
||||
@@ -1871,6 +1871,28 @@ pub(crate) fn ident_from_tag_name(tag_name: &NodeName) -> Ident {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn full_path_from_tag_name(tag_name: &NodeName) -> Option<ExprPath> {
|
||||
match tag_name {
|
||||
NodeName::Path(path) => Some(path.clone()),
|
||||
NodeName::Block(_) => {
|
||||
let span = tag_name.span();
|
||||
proc_macro_error2::emit_error!(
|
||||
span,
|
||||
"blocks not allowed in tag-name position"
|
||||
);
|
||||
None
|
||||
}
|
||||
_ => {
|
||||
let span = tag_name.span();
|
||||
proc_macro_error2::emit_error!(
|
||||
span,
|
||||
"punctuated names not allowed in slots"
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn directive_call_from_attribute_node(
|
||||
attr: &KeyedAttribute,
|
||||
directive_name: &str,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use super::{
|
||||
component_builder::maybe_optimised_component_children,
|
||||
convert_to_snake_case, ident_from_tag_name,
|
||||
convert_to_snake_case, full_path_from_tag_name,
|
||||
};
|
||||
use crate::view::{fragment_to_tokens, utils::filter_prefixed_attrs, TagType};
|
||||
use proc_macro2::{Ident, TokenStream, TokenTree};
|
||||
@@ -24,7 +24,7 @@ pub(crate) fn slot_to_tokens(
|
||||
node.name().to_string()
|
||||
});
|
||||
|
||||
let component_name = ident_from_tag_name(node.name());
|
||||
let component_path = full_path_from_tag_name(node.name());
|
||||
|
||||
let Some(parent_slots) = parent_slots else {
|
||||
proc_macro_error2::emit_error!(
|
||||
@@ -190,7 +190,7 @@ pub(crate) fn slot_to_tokens(
|
||||
|
||||
let slot = quote_spanned! {node.span()=>
|
||||
{
|
||||
let slot = #component_name::builder()
|
||||
let slot = #component_path::builder()
|
||||
#(#props)*
|
||||
#(#slots)*
|
||||
#children
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_server"
|
||||
version = { workspace = true }
|
||||
version = "0.8.5"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -11,11 +11,11 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
base64 = { workspace = true, default-features = true }
|
||||
codee = { features = ["json_serde"] , workspace = true, default-features = true }
|
||||
codee = { features = ["json_serde"], workspace = true, default-features = true }
|
||||
hydration_context = { workspace = true }
|
||||
reactive_graph = { workspace = true, features = ["hydration"] }
|
||||
server_fn = { workspace = true }
|
||||
tracing = { optional = true , workspace = true, default-features = true }
|
||||
tracing = { optional = true, workspace = true, default-features = true }
|
||||
futures = { workspace = true, default-features = true }
|
||||
|
||||
any_spawner = { workspace = true }
|
||||
@@ -25,9 +25,9 @@ send_wrapper = { workspace = true, default-features = true }
|
||||
|
||||
# serialization formats
|
||||
serde = { workspace = true, default-features = true }
|
||||
js-sys = { optional = true , workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true, optional = true , default-features = true }
|
||||
serde_json = { workspace = true , default-features = true }
|
||||
js-sys = { optional = true, workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true, optional = true, default-features = true }
|
||||
serde_json = { workspace = true, default-features = true }
|
||||
|
||||
[features]
|
||||
ssr = []
|
||||
@@ -44,7 +44,8 @@ denylist = ["tracing"]
|
||||
max_combination_size = 2
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
rustdoc-args = ["--generate-link-to-definition", "--cfg", "docsrs"]
|
||||
all-features = true
|
||||
|
||||
[lints.rust]
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(leptos_debuginfo)'] }
|
||||
|
||||
@@ -386,6 +386,7 @@ T: Send + Sync + 'static,
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-wasm-bindgen")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-wasm-bindgen")))]
|
||||
impl<T> ArcOnceResource<T, JsonSerdeWasmCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -418,6 +419,7 @@ fut: impl Future<Output = T> + Send + 'static
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "miniserde")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "miniserde")))]
|
||||
impl<T> ArcOnceResource<T, MiniserdeCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -451,6 +453,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-lite")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-lite")))]
|
||||
impl<T> ArcOnceResource<T, SerdeLite<JsonSerdeCodec>>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -484,6 +487,7 @@ fut: impl Future<Output = T> + Send + 'static
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> ArcOnceResource<T, RkyvCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -748,6 +752,7 @@ T: Send + Sync + 'static,
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-wasm-bindgen")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-wasm-bindgen")))]
|
||||
impl<T> OnceResource<T, JsonSerdeWasmCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -780,6 +785,7 @@ fut: impl Future<Output = T> + Send + 'static
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "miniserde")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "miniserde")))]
|
||||
impl<T> OnceResource<T, MiniserdeCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -813,6 +819,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-lite")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-lite")))]
|
||||
impl<T> OnceResource<T, SerdeLite<JsonSerdeCodec>>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -846,6 +853,7 @@ fut: impl Future<Output = T> + Send + 'static
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> OnceResource<T, RkyvCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
|
||||
@@ -709,6 +709,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> ArcResource<T, RkyvCodec>
|
||||
where
|
||||
RkyvCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -1048,6 +1049,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-wasm-bindgen")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-wasm-bindgen")))]
|
||||
impl<T> Resource<T, JsonSerdeWasmCodec>
|
||||
where
|
||||
JsonSerdeWasmCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -1105,6 +1107,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "miniserde")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "miniserde")))]
|
||||
impl<T> Resource<T, MiniserdeCodec>
|
||||
where
|
||||
MiniserdeCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -1164,6 +1167,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-lite")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-lite")))]
|
||||
impl<T> Resource<T, SerdeLite<JsonSerdeCodec>>
|
||||
where
|
||||
SerdeLite<JsonSerdeCodec>: Encoder<T> + Decoder<T>,
|
||||
@@ -1222,6 +1226,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> Resource<T, RkyvCodec>
|
||||
where
|
||||
RkyvCodec: Encoder<T> + Decoder<T>,
|
||||
|
||||
@@ -80,6 +80,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-lite")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-lite")))]
|
||||
impl<T> SharedValue<T, SerdeLite<JsonSerdeCodec>>
|
||||
where
|
||||
SerdeLite<JsonSerdeCodec>: Encoder<T> + Decoder<T>,
|
||||
@@ -102,6 +103,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-wasm-bindgen")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-wasm-bindgen")))]
|
||||
impl<T> SharedValue<T, JsonSerdeWasmCodec>
|
||||
where
|
||||
JsonSerdeWasmCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -124,6 +126,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "miniserde")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "miniserde")))]
|
||||
impl<T> SharedValue<T, MiniserdeCodec>
|
||||
where
|
||||
MiniserdeCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -146,6 +149,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> SharedValue<T, RkyvCodec>
|
||||
where
|
||||
RkyvCodec: Encoder<T> + Decoder<T>,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_meta"
|
||||
version = { workspace = true }
|
||||
version = "0.8.5"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
|
||||
@@ -15,19 +15,18 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@playwright/test": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.1.tgz",
|
||||
"integrity": "sha512-1hZ4TNvD5z9VuhNJ/walIjvMVvYkZKf71axoF/uiAqpntQJXpG64dlXhoDXE3OczPuTuvjf/M5KWFg5VAVUS3Q==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz",
|
||||
"integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"playwright": "1.44.1"
|
||||
"playwright": "1.56.1"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
@@ -46,7 +45,6 @@
|
||||
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
@@ -56,35 +54,33 @@
|
||||
}
|
||||
},
|
||||
"node_modules/playwright": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.1.tgz",
|
||||
"integrity": "sha512-qr/0UJ5CFAtloI3avF95Y0L1xQo6r3LQArLIg/z/PoGJ6xa+EwzrwO5lpNr/09STxdHuUoP2mvuELJS+hLdtgg==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz",
|
||||
"integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"playwright-core": "1.44.1"
|
||||
"playwright-core": "1.56.1"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=18"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/playwright-core": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.1.tgz",
|
||||
"integrity": "sha512-wh0JWtYTrhv1+OSsLPgFzGzt67Y7BE/ZS3jEqgGBlp2ppp1ZDj8c+9IARNW4dwf1poq5MgHreEM2KV/GuR4cFA==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz",
|
||||
"integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"playwright-core": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
@@ -111,12 +107,12 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@playwright/test": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.1.tgz",
|
||||
"integrity": "sha512-1hZ4TNvD5z9VuhNJ/walIjvMVvYkZKf71axoF/uiAqpntQJXpG64dlXhoDXE3OczPuTuvjf/M5KWFg5VAVUS3Q==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz",
|
||||
"integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"playwright": "1.44.1"
|
||||
"playwright": "1.56.1"
|
||||
}
|
||||
},
|
||||
"@types/node": {
|
||||
@@ -136,19 +132,19 @@
|
||||
"optional": true
|
||||
},
|
||||
"playwright": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.1.tgz",
|
||||
"integrity": "sha512-qr/0UJ5CFAtloI3avF95Y0L1xQo6r3LQArLIg/z/PoGJ6xa+EwzrwO5lpNr/09STxdHuUoP2mvuELJS+hLdtgg==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz",
|
||||
"integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"fsevents": "2.3.2",
|
||||
"playwright-core": "1.44.1"
|
||||
"playwright-core": "1.56.1"
|
||||
}
|
||||
},
|
||||
"playwright-core": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.1.tgz",
|
||||
"integrity": "sha512-wh0JWtYTrhv1+OSsLPgFzGzt67Y7BE/ZS3jEqgGBlp2ppp1ZDj8c+9IARNW4dwf1poq5MgHreEM2KV/GuR4cFA==",
|
||||
"version": "1.56.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz",
|
||||
"integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==",
|
||||
"dev": true
|
||||
},
|
||||
"typescript": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_graph"
|
||||
version = "0.2.5"
|
||||
version = "0.2.9"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -27,6 +27,8 @@ async-lock = { workspace = true, default-features = true }
|
||||
send_wrapper = { features = [
|
||||
"futures",
|
||||
], workspace = true, default-features = true }
|
||||
subsecond = { workspace = true, default-features = true, optional = true }
|
||||
indexmap = { workspace = true, default-features = true }
|
||||
|
||||
[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dependencies]
|
||||
web-sys = { version = "0.3.77", features = ["console"] }
|
||||
@@ -50,6 +52,7 @@ hydration = ["dep:hydration_context"]
|
||||
effects = [
|
||||
] # whether to run effects: should be disabled for something like server rendering
|
||||
sandboxed-arenas = []
|
||||
subsecond = ["dep:subsecond"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use crate::{
|
||||
computed::{ArcMemo, Memo},
|
||||
computed::{ArcMemo, Memo, ScopedFuture},
|
||||
diagnostics::is_suppressing_resource_load,
|
||||
owner::{ArcStoredValue, ArenaItem},
|
||||
graph::untrack,
|
||||
owner::{ArcStoredValue, ArenaItem, Owner},
|
||||
send_wrapper_ext::SendOption,
|
||||
signal::{ArcMappedSignal, ArcRwSignal, MappedSignal, RwSignal},
|
||||
traits::{DefinedAt, Dispose, Get, GetUntracked, GetValue, Update, Write},
|
||||
@@ -199,13 +200,18 @@ where
|
||||
I: Send + Sync,
|
||||
O: Send + Sync,
|
||||
{
|
||||
let owner = Owner::current().unwrap_or_default();
|
||||
ArcAction {
|
||||
in_flight: ArcRwSignal::new(0),
|
||||
input: ArcRwSignal::new(SendOption::new(None)),
|
||||
value: ArcRwSignal::new(SendOption::new(value)),
|
||||
version: Default::default(),
|
||||
dispatched: Default::default(),
|
||||
action_fn: Arc::new(move |input| Box::pin(action_fn(input))),
|
||||
action_fn: Arc::new(move |input| {
|
||||
Box::pin(owner.with(|| {
|
||||
ScopedFuture::new_untracked(untrack(|| action_fn(input)))
|
||||
}))
|
||||
}),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
}
|
||||
@@ -370,6 +376,7 @@ where
|
||||
F: Fn(&I) -> Fu + 'static,
|
||||
Fu: Future<Output = O> + 'static,
|
||||
{
|
||||
let owner = Owner::current().unwrap_or_default();
|
||||
let action_fn = SendWrapper::new(action_fn);
|
||||
ArcAction {
|
||||
in_flight: ArcRwSignal::new(0),
|
||||
@@ -378,7 +385,9 @@ where
|
||||
version: Default::default(),
|
||||
dispatched: Default::default(),
|
||||
action_fn: Arc::new(move |input| {
|
||||
Box::pin(SendWrapper::new(action_fn(input)))
|
||||
Box::pin(SendWrapper::new(owner.with(|| {
|
||||
ScopedFuture::new_untracked(untrack(|| action_fn(input)))
|
||||
})))
|
||||
}),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
|
||||
@@ -521,9 +521,10 @@ impl<T: 'static> ArcAsyncDerived<T> {
|
||||
{
|
||||
let fun = move || {
|
||||
let fut = fun();
|
||||
let fut = ScopedFuture::new_untracked(async move {
|
||||
SendOption::new(Some(fut.await))
|
||||
});
|
||||
let fut =
|
||||
ScopedFuture::new_untracked_with_diagnostics(async move {
|
||||
SendOption::new(Some(fut.await))
|
||||
});
|
||||
#[cfg(feature = "sandboxed-arenas")]
|
||||
let fut = Sandboxed::new(fut);
|
||||
fut
|
||||
|
||||
@@ -54,11 +54,55 @@ impl<Fut> ScopedFuture<Fut> {
|
||||
fut,
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[track_caller]
|
||||
pub fn new_untracked_with_diagnostics(
|
||||
fut: Fut,
|
||||
) -> ScopedFutureUntrackedWithDiagnostics<Fut> {
|
||||
let owner = Owner::current().unwrap_or_default();
|
||||
ScopedFutureUntrackedWithDiagnostics {
|
||||
owner,
|
||||
observer: None,
|
||||
fut,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Fut: Future> Future for ScopedFuture<Fut> {
|
||||
type Output = Fut::Output;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.project();
|
||||
this.owner.with(|| {
|
||||
#[cfg(debug_assertions)]
|
||||
let _maybe_guard = if this.observer.is_none() {
|
||||
Some(crate::diagnostics::SpecialNonReactiveZone::enter())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
this.observer.with_observer(|| this.fut.poll(cx))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pin_project! {
|
||||
/// A [`Future`] wrapper that sets the [`Owner`] and [`Observer`] before polling the inner
|
||||
/// `Future`, output of [`ScopedFuture::new_untracked_with_diagnostics`].
|
||||
///
|
||||
/// In leptos 0.9 this will be replaced with `ScopedFuture` itself.
|
||||
#[derive(Clone)]
|
||||
pub struct ScopedFutureUntrackedWithDiagnostics<Fut> {
|
||||
owner: Owner,
|
||||
observer: Option<AnySubscriber>,
|
||||
#[pin]
|
||||
fut: Fut,
|
||||
}
|
||||
}
|
||||
|
||||
impl<Fut: Future> Future for ScopedFutureUntrackedWithDiagnostics<Fut> {
|
||||
type Output = Fut::Output;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.project();
|
||||
this.owner
|
||||
|
||||
@@ -65,6 +65,7 @@ impl Dispose for ImmediateEffect {
|
||||
|
||||
impl ImmediateEffect {
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this requires a `Fn` function because it might recurse.
|
||||
/// Use [Self::new_mut] to pass a `FnMut` function, it'll panic on recursion.
|
||||
@@ -82,6 +83,7 @@ impl ImmediateEffect {
|
||||
Self { inner: Some(inner) }
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics on recursion or if triggered in parallel. Also see [Self::new]
|
||||
@@ -93,8 +95,10 @@ impl ImmediateEffect {
|
||||
Self::new(move || fun.try_lock().expect(MSG)())
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this requires a `Fn` function because it might recurse.
|
||||
/// Use [Self::new_mut_scoped] to pass a `FnMut` function, it'll panic on recursion.
|
||||
/// NOTE: this effect is automatically cleaned up when the current owner is cleared or disposed.
|
||||
#[track_caller]
|
||||
pub fn new_scoped(fun: impl Fn() + Send + Sync + 'static) {
|
||||
@@ -102,6 +106,19 @@ impl ImmediateEffect {
|
||||
|
||||
on_cleanup(move || effect.dispose());
|
||||
}
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
/// (Unless [batch] is used.)
|
||||
///
|
||||
/// NOTE: this effect is automatically cleaned up when the current owner is cleared or disposed.
|
||||
///
|
||||
/// # Panics
|
||||
/// Panics on recursion or if triggered in parallel. Also see [Self::new_scoped]
|
||||
#[track_caller]
|
||||
pub fn new_mut_scoped(fun: impl FnMut() + Send + Sync + 'static) {
|
||||
let effect = Self::new_mut(fun);
|
||||
|
||||
on_cleanup(move || effect.dispose());
|
||||
}
|
||||
|
||||
/// Creates a new effect which runs immediately, then again as soon as any tracked signal changes.
|
||||
///
|
||||
@@ -130,6 +147,41 @@ impl DefinedAt for ImmediateEffect {
|
||||
}
|
||||
}
|
||||
|
||||
/// Defers any [ImmediateEffect]s from running until the end of the function.
|
||||
///
|
||||
/// NOTE: this affects only [ImmediateEffect]s, not other effects.
|
||||
///
|
||||
/// NOTE: this is rarely needed, but it is useful for example when multiple signals
|
||||
/// need to be updated atomically (for example a double-bound signal tree).
|
||||
pub fn batch<T>(f: impl FnOnce() -> T) -> T {
|
||||
struct ExecuteOnDrop;
|
||||
impl Drop for ExecuteOnDrop {
|
||||
fn drop(&mut self) {
|
||||
let effects = {
|
||||
let mut batch = inner::BATCH.write().or_poisoned();
|
||||
batch.take().unwrap().into_inner().expect("lock poisoned")
|
||||
};
|
||||
// TODO: Should we skip the effects if it's panicking?
|
||||
for effect in effects {
|
||||
effect.update_if_necessary();
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut execute_on_drop = None;
|
||||
{
|
||||
let mut batch = inner::BATCH.write().or_poisoned();
|
||||
if batch.is_none() {
|
||||
execute_on_drop = Some(ExecuteOnDrop);
|
||||
} else {
|
||||
// Nested batching has no effect.
|
||||
}
|
||||
*batch = Some(batch.take().unwrap_or_default());
|
||||
}
|
||||
let ret = f();
|
||||
drop(execute_on_drop);
|
||||
ret
|
||||
}
|
||||
|
||||
mod inner {
|
||||
use crate::{
|
||||
graph::{
|
||||
@@ -140,6 +192,7 @@ mod inner {
|
||||
owner::Owner,
|
||||
traits::DefinedAt,
|
||||
};
|
||||
use indexmap::IndexSet;
|
||||
use or_poisoned::OrPoisoned;
|
||||
use std::{
|
||||
panic::Location,
|
||||
@@ -147,6 +200,11 @@ mod inner {
|
||||
thread::{self, ThreadId},
|
||||
};
|
||||
|
||||
/// Only the [super::batch] function ever writes to the outer RwLock.
|
||||
/// While the effects will write to the inner one.
|
||||
pub(super) static BATCH: RwLock<Option<RwLock<IndexSet<AnySubscriber>>>> =
|
||||
RwLock::new(None);
|
||||
|
||||
/// Handles subscription logic for effects.
|
||||
///
|
||||
/// To handle parallelism and recursion we assign ordered (1..) ids to each run.
|
||||
@@ -202,6 +260,8 @@ mod inner {
|
||||
fun: impl Fn() + Send + Sync + 'static,
|
||||
) -> Arc<RwLock<EffectInner>> {
|
||||
let owner = Owner::new();
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
let defined_at = Location::caller();
|
||||
|
||||
Arc::new_cyclic(|weak| {
|
||||
let any_subscriber = AnySubscriber(
|
||||
@@ -211,7 +271,7 @@ mod inner {
|
||||
|
||||
RwLock::new(EffectInner {
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
defined_at,
|
||||
owner,
|
||||
state: ReactiveNodeState::Dirty,
|
||||
run_count_start: 0,
|
||||
@@ -260,6 +320,17 @@ mod inner {
|
||||
ReactiveNodeState::Dirty => true,
|
||||
};
|
||||
|
||||
{
|
||||
if let Some(batch) = &*BATCH.read().or_poisoned() {
|
||||
let mut batch = batch.write().or_poisoned();
|
||||
let subscriber =
|
||||
self.read().or_poisoned().any_subscriber.clone();
|
||||
|
||||
batch.insert(subscriber);
|
||||
return needs_update;
|
||||
}
|
||||
}
|
||||
|
||||
if needs_update {
|
||||
let mut guard = self.write().or_poisoned();
|
||||
|
||||
|
||||
@@ -9,6 +9,8 @@ use crate::{
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use or_poisoned::OrPoisoned;
|
||||
#[cfg(feature = "subsecond")]
|
||||
use std::sync::Mutex;
|
||||
use std::{
|
||||
fmt::Debug,
|
||||
future::{Future, IntoFuture},
|
||||
@@ -49,13 +51,39 @@ impl<T> Debug for RenderEffect<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "subsecond")]
|
||||
type CurrentHotPtr = Box<dyn Fn() -> Option<subsecond::HotFnPtr> + Send + Sync>;
|
||||
|
||||
impl<T> RenderEffect<T>
|
||||
where
|
||||
T: 'static,
|
||||
{
|
||||
/// Creates a new render effect, which immediately runs `fun`.
|
||||
pub fn new(fun: impl FnMut(Option<T>) -> T + 'static) -> Self {
|
||||
Self::new_with_value_erased(Box::new(fun), None)
|
||||
#[cfg(feature = "subsecond")]
|
||||
let (hot_fn_ptr, fun) = {
|
||||
let fun = Arc::new(Mutex::new(subsecond::HotFn::current(fun)));
|
||||
(
|
||||
{
|
||||
let fun = Arc::downgrade(&fun);
|
||||
let wrapped = send_wrapper::SendWrapper::new(move || {
|
||||
fun.upgrade()
|
||||
.map(|n| n.lock().or_poisoned().ptr_address())
|
||||
});
|
||||
// it's not redundant, it's due to the SendWrapper deref
|
||||
#[allow(clippy::redundant_closure)]
|
||||
Box::new(move || wrapped())
|
||||
},
|
||||
move |prev| fun.lock().or_poisoned().call((prev,)),
|
||||
)
|
||||
};
|
||||
|
||||
Self::new_with_value_erased(
|
||||
Box::new(fun),
|
||||
None,
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr,
|
||||
)
|
||||
}
|
||||
|
||||
/// Creates a new render effect with an initial value.
|
||||
@@ -63,7 +91,30 @@ where
|
||||
fun: impl FnMut(Option<T>) -> T + 'static,
|
||||
initial_value: Option<T>,
|
||||
) -> Self {
|
||||
Self::new_with_value_erased(Box::new(fun), initial_value)
|
||||
#[cfg(feature = "subsecond")]
|
||||
let (hot_fn_ptr, fun) = {
|
||||
let fun = Arc::new(Mutex::new(subsecond::HotFn::current(fun)));
|
||||
(
|
||||
{
|
||||
let fun = Arc::downgrade(&fun);
|
||||
let wrapped = send_wrapper::SendWrapper::new(move || {
|
||||
fun.upgrade()
|
||||
.map(|n| n.lock().or_poisoned().ptr_address())
|
||||
});
|
||||
// it's not redundant, it's due to the SendWrapper deref
|
||||
#[allow(clippy::redundant_closure)]
|
||||
Box::new(move || wrapped())
|
||||
},
|
||||
move |prev| fun.lock().or_poisoned().call((prev,)),
|
||||
)
|
||||
};
|
||||
|
||||
Self::new_with_value_erased(
|
||||
Box::new(fun),
|
||||
initial_value,
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr,
|
||||
)
|
||||
}
|
||||
|
||||
/// Creates a new render effect, which immediately runs `fun`.
|
||||
@@ -71,6 +122,11 @@ where
|
||||
fun: impl FnMut(Option<T>) -> T + 'static,
|
||||
value: impl IntoFuture<Output = T> + 'static,
|
||||
) -> Self {
|
||||
#[cfg(feature = "subsecond")]
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
#[cfg(feature = "subsecond")]
|
||||
let fun = move |prev| fun.call((prev,));
|
||||
|
||||
Self::new_with_async_value_erased(
|
||||
Box::new(fun),
|
||||
Box::pin(value.into_future()),
|
||||
@@ -79,8 +135,13 @@ where
|
||||
}
|
||||
|
||||
fn new_with_value_erased(
|
||||
mut fun: Box<dyn FnMut(Option<T>) -> T + 'static>,
|
||||
#[allow(unused_mut)] mut fun: Box<dyn FnMut(Option<T>) -> T + 'static>,
|
||||
initial_value: Option<T>,
|
||||
// this argument can be used to invalidate individual effects in the future
|
||||
// in present experiments, I have found that it is not actually granular enough to make a difference
|
||||
#[allow(unused)]
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr: CurrentHotPtr,
|
||||
) -> Self {
|
||||
// codegen optimisation:
|
||||
fn prep() -> (Owner, Arc<RwLock<EffectInner>>, crate::channel::Receiver)
|
||||
@@ -104,12 +165,56 @@ where
|
||||
let _ = initial_value;
|
||||
let _ = owner;
|
||||
let _ = &mut rx;
|
||||
let _ = &mut fun;
|
||||
let _ = fun;
|
||||
}
|
||||
|
||||
#[cfg(feature = "effects")]
|
||||
{
|
||||
let subscriber = inner.to_any_subscriber();
|
||||
|
||||
#[cfg(all(feature = "subsecond", debug_assertions))]
|
||||
let mut fun = {
|
||||
use crate::graph::ReactiveNode;
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::sync::{Arc, LazyLock, Mutex};
|
||||
use subsecond::HotFnPtr;
|
||||
|
||||
static HOT_RELOAD_SUBSCRIBERS: LazyLock<
|
||||
Mutex<FxHashMap<AnySubscriber, (HotFnPtr, CurrentHotPtr)>>,
|
||||
> = LazyLock::new(|| {
|
||||
subsecond::register_handler(Arc::new(|| {
|
||||
HOT_RELOAD_SUBSCRIBERS.lock().or_poisoned().retain(
|
||||
|subscriber, (prev_ptr, hot_fn_ptr)| {
|
||||
match hot_fn_ptr() {
|
||||
None => false,
|
||||
Some(curr_hot_ptr) => {
|
||||
if curr_hot_ptr != *prev_ptr {
|
||||
crate::log_warning(format_args!(
|
||||
"{prev_ptr:?} <> \
|
||||
{curr_hot_ptr:?}",
|
||||
));
|
||||
*prev_ptr = curr_hot_ptr;
|
||||
|
||||
subscriber.mark_dirty();
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
}));
|
||||
Default::default()
|
||||
});
|
||||
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
let initial_ptr = hot_fn_ptr().unwrap();
|
||||
HOT_RELOAD_SUBSCRIBERS
|
||||
.lock()
|
||||
.or_poisoned()
|
||||
.insert(subscriber.clone(), (initial_ptr, hot_fn_ptr));
|
||||
move |prev| fun.call((prev,))
|
||||
};
|
||||
|
||||
*value.write().or_poisoned() = Some(
|
||||
owner.with(|| subscriber.with_observer(|| fun(initial_value))),
|
||||
);
|
||||
@@ -230,6 +335,11 @@ where
|
||||
pub fn new_isomorphic(
|
||||
fun: impl FnMut(Option<T>) -> T + Send + Sync + 'static,
|
||||
) -> Self {
|
||||
#[cfg(feature = "subsecond")]
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
#[cfg(feature = "subsecond")]
|
||||
let fun = move |prev| fun.call((prev,));
|
||||
|
||||
fn erased<T: Send + Sync + 'static>(
|
||||
mut fun: Box<dyn FnMut(Option<T>) -> T + Send + Sync + 'static>,
|
||||
) -> RenderEffect<T> {
|
||||
|
||||
@@ -6,11 +6,14 @@
|
||||
//! a linear search is not significantly more expensive than a hash and lookup.
|
||||
|
||||
use super::{AnySource, AnySubscriber, Source};
|
||||
use core::slice;
|
||||
use std::{mem, vec::IntoIter};
|
||||
use indexmap::IndexSet;
|
||||
use rustc_hash::FxHasher;
|
||||
use std::{hash::BuildHasherDefault, mem};
|
||||
|
||||
type FxIndexSet<T> = IndexSet<T, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
#[derive(Default, Clone, Debug)]
|
||||
pub struct SourceSet(Vec<AnySource>);
|
||||
pub struct SourceSet(FxIndexSet<AnySource>);
|
||||
|
||||
impl SourceSet {
|
||||
pub fn new() -> Self {
|
||||
@@ -18,16 +21,14 @@ impl SourceSet {
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, source: AnySource) {
|
||||
self.0.push(source);
|
||||
self.0.insert(source);
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, source: &AnySource) {
|
||||
if let Some(pos) = self.0.iter().position(|s| s == source) {
|
||||
self.0.remove(pos);
|
||||
}
|
||||
self.0.shift_remove(source);
|
||||
}
|
||||
|
||||
pub fn take(&mut self) -> Vec<AnySource> {
|
||||
pub fn take(&mut self) -> FxIndexSet<AnySource> {
|
||||
mem::take(&mut self.0)
|
||||
}
|
||||
|
||||
@@ -44,7 +45,7 @@ impl SourceSet {
|
||||
|
||||
impl IntoIterator for SourceSet {
|
||||
type Item = AnySource;
|
||||
type IntoIter = IntoIter<AnySource>;
|
||||
type IntoIter = <FxIndexSet<AnySource> as IntoIterator>::IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.into_iter()
|
||||
@@ -53,40 +54,36 @@ impl IntoIterator for SourceSet {
|
||||
|
||||
impl<'a> IntoIterator for &'a SourceSet {
|
||||
type Item = &'a AnySource;
|
||||
type IntoIter = slice::Iter<'a, AnySource>;
|
||||
type IntoIter = <&'a FxIndexSet<AnySource> as IntoIterator>::IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.iter()
|
||||
}
|
||||
}
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct SubscriberSet(Vec<AnySubscriber>);
|
||||
pub struct SubscriberSet(FxIndexSet<AnySubscriber>);
|
||||
|
||||
impl SubscriberSet {
|
||||
pub fn new() -> Self {
|
||||
Self(Vec::with_capacity(2))
|
||||
Self(FxIndexSet::with_capacity_and_hasher(2, Default::default()))
|
||||
}
|
||||
|
||||
pub fn subscribe(&mut self, subscriber: AnySubscriber) {
|
||||
if !self.0.contains(&subscriber) {
|
||||
self.0.push(subscriber);
|
||||
}
|
||||
self.0.insert(subscriber);
|
||||
}
|
||||
|
||||
pub fn unsubscribe(&mut self, subscriber: &AnySubscriber) {
|
||||
if let Some(pos) = self.0.iter().position(|s| s == subscriber) {
|
||||
// note: do not use `.swap_remove()` here.
|
||||
// using `.remove()` is slower because it shifts other items
|
||||
// but it maintains the order of the subscribers, which is important
|
||||
// to correctness when you're using this to drive something like a UI,
|
||||
// which can have nested effects, where the inner one assumes the outer
|
||||
// has already run (for example, an outer effect that checks .is_some(),
|
||||
// and an inner effect that unwraps)
|
||||
self.0.remove(pos);
|
||||
}
|
||||
// note: do not use `.swap_remove()` here.
|
||||
// using `.remove()` is slower because it shifts other items
|
||||
// but it maintains the order of the subscribers, which is important
|
||||
// to correctness when you're using this to drive something like a UI,
|
||||
// which can have nested effects, where the inner one assumes the outer
|
||||
// has already run (for example, an outer effect that checks .is_some(),
|
||||
// and an inner effect that unwraps)
|
||||
self.0.shift_remove(subscriber);
|
||||
}
|
||||
|
||||
pub fn take(&mut self) -> Vec<AnySubscriber> {
|
||||
pub fn take(&mut self) -> FxIndexSet<AnySubscriber> {
|
||||
mem::take(&mut self.0)
|
||||
}
|
||||
|
||||
@@ -97,7 +94,7 @@ impl SubscriberSet {
|
||||
|
||||
impl IntoIterator for SubscriberSet {
|
||||
type Item = AnySubscriber;
|
||||
type IntoIter = IntoIter<AnySubscriber>;
|
||||
type IntoIter = <FxIndexSet<AnySubscriber> as IntoIterator>::IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.into_iter()
|
||||
@@ -106,7 +103,7 @@ impl IntoIterator for SubscriberSet {
|
||||
|
||||
impl<'a> IntoIterator for &'a SubscriberSet {
|
||||
type Item = &'a AnySubscriber;
|
||||
type IntoIter = slice::Iter<'a, AnySubscriber>;
|
||||
type IntoIter = <&'a FxIndexSet<AnySubscriber> as IntoIterator>::IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.iter()
|
||||
|
||||
@@ -209,6 +209,25 @@ impl Owner {
|
||||
this
|
||||
}
|
||||
|
||||
/// Returns the parent of this `Owner`, if any.
|
||||
///
|
||||
/// None when:
|
||||
/// - This is a root owner
|
||||
/// - The parent has been dropped
|
||||
pub fn parent(&self) -> Option<Owner> {
|
||||
self.inner
|
||||
.read()
|
||||
.or_poisoned()
|
||||
.parent
|
||||
.as_ref()
|
||||
.and_then(|p| p.upgrade())
|
||||
.map(|inner| Owner {
|
||||
inner,
|
||||
#[cfg(feature = "hydration")]
|
||||
shared_context: self.shared_context.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new `Owner` that is the child of the current `Owner`, if any.
|
||||
pub fn child(&self) -> Self {
|
||||
let parent = Some(Arc::downgrade(&self.inner));
|
||||
|
||||
@@ -257,6 +257,20 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<ReadSignal<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<ArcReadSignal<T>> + Storage<T>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: ReadSignal<T, S>) -> Self {
|
||||
Self {
|
||||
inner: SignalTypes::ReadSignal(value.into()),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: std::panic::Location::caller(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Send + Sync> From<ArcRwSignal<T>> for ArcSignal<T, SyncStorage> {
|
||||
#[track_caller]
|
||||
fn from(value: ArcRwSignal<T>) -> Self {
|
||||
@@ -268,6 +282,20 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<RwSignal<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<ArcRwSignal<T>> + Storage<ArcReadSignal<T>> + Storage<T>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: RwSignal<T, S>) -> Self {
|
||||
Self {
|
||||
inner: SignalTypes::ReadSignal(value.read_only().into()),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: std::panic::Location::caller(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<ArcMemo<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
@@ -282,6 +310,20 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<Memo<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<ArcMemo<T, S>> + Storage<T>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: Memo<T, S>) -> Self {
|
||||
Self {
|
||||
inner: SignalTypes::Memo(value.into()),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: std::panic::Location::caller(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> DefinedAt for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
@@ -1500,7 +1542,6 @@ pub mod read {
|
||||
|
||||
impl<T, S> ReadUntracked for MaybeProp<T, S>
|
||||
where
|
||||
T: Clone,
|
||||
S: Storage<Option<T>> + Storage<SignalTypes<Option<T>, S>>,
|
||||
{
|
||||
type Value = ReadGuard<Option<T>, SignalReadGuard<Option<T>, S>>;
|
||||
|
||||
@@ -225,3 +225,38 @@ fn threaded_chaos_effect() {
|
||||
let values: Vec<_> = signals.iter().map(|s| s.get_untracked()).collect();
|
||||
println!("FINAL: {values:?}");
|
||||
}
|
||||
|
||||
#[cfg(feature = "effects")]
|
||||
#[test]
|
||||
fn test_batch() {
|
||||
use imports::*;
|
||||
use reactive_graph::{effect::batch, owner::StoredValue};
|
||||
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
|
||||
let a = RwSignal::new(0);
|
||||
let b = RwSignal::new(0);
|
||||
|
||||
let values = StoredValue::new(Vec::new());
|
||||
|
||||
ImmediateEffect::new_scoped(move || {
|
||||
println!("{} = {}", a.get(), b.get());
|
||||
values.write_value().push((a.get(), b.get()));
|
||||
});
|
||||
|
||||
a.set(1);
|
||||
b.set(1);
|
||||
|
||||
batch(move || {
|
||||
a.set(2);
|
||||
b.set(2);
|
||||
|
||||
batch(move || {
|
||||
a.set(3);
|
||||
b.set(3);
|
||||
});
|
||||
});
|
||||
|
||||
assert_eq!(values.get_value(), vec![(0, 0), (1, 0), (1, 1), (3, 3)]);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_stores"
|
||||
version = "0.2.5"
|
||||
version = "0.3.0"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -30,6 +30,8 @@ where
|
||||
defined_at: &'static Location<'static>,
|
||||
path: Arc<dyn Fn() -> StorePath + Send + Sync>,
|
||||
get_trigger: Arc<dyn Fn(StorePath) -> StoreFieldTrigger + Send + Sync>,
|
||||
get_trigger_unkeyed:
|
||||
Arc<dyn Fn(StorePath) -> StoreFieldTrigger + Send + Sync>,
|
||||
read: Arc<dyn Fn() -> Option<StoreFieldReader<T>> + Send + Sync>,
|
||||
pub(crate) write:
|
||||
Arc<dyn Fn() -> Option<StoreFieldWriter<T>> + Send + Sync>,
|
||||
@@ -103,6 +105,10 @@ impl<T> StoreField for ArcField<T> {
|
||||
(self.get_trigger)(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
(self.get_trigger_unkeyed)(path)
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
(self.path)()
|
||||
}
|
||||
@@ -132,6 +138,9 @@ where
|
||||
defined_at: Location::caller(),
|
||||
path: Arc::new(move || value.path().into_iter().collect()),
|
||||
get_trigger: Arc::new(move |path| value.get_trigger(path)),
|
||||
get_trigger_unkeyed: Arc::new(move |path| {
|
||||
value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new(move || value.reader().map(StoreFieldReader::new)),
|
||||
write: Arc::new(move || value.writer().map(StoreFieldWriter::new)),
|
||||
keys: Arc::new(move || value.keys()),
|
||||
@@ -158,6 +167,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -202,6 +215,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -245,6 +262,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -289,6 +310,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -337,6 +362,10 @@ where
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
get_trigger_unkeyed: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger_unkeyed(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
@@ -368,6 +397,7 @@ impl<T> Clone for ArcField<T> {
|
||||
defined_at: self.defined_at,
|
||||
path: self.path.clone(),
|
||||
get_trigger: Arc::clone(&self.get_trigger),
|
||||
get_trigger_unkeyed: Arc::clone(&self.get_trigger_unkeyed),
|
||||
read: Arc::clone(&self.read),
|
||||
write: Arc::clone(&self.write),
|
||||
keys: Arc::clone(&self.keys),
|
||||
|
||||
@@ -68,6 +68,11 @@ where
|
||||
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner.path()
|
||||
}
|
||||
|
||||
@@ -59,6 +59,13 @@ where
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|inner| inner.get_trigger_unkeyed(path))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
|
||||
@@ -84,6 +84,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
let index = self.index;
|
||||
@@ -109,6 +113,23 @@ where
|
||||
fn keys(&self) -> Option<KeyMap> {
|
||||
self.inner.keys()
|
||||
}
|
||||
|
||||
fn track_field(&self) {
|
||||
let mut full_path = self.path().into_iter().collect::<StorePath>();
|
||||
let trigger = self.get_trigger(self.path().into_iter().collect());
|
||||
trigger.this.track();
|
||||
trigger.children.track();
|
||||
|
||||
// tracks `this` for all ancestors: i.e., it will track any change that is made
|
||||
// directly to one of its ancestors, but not a change made to a *child* of an ancestor
|
||||
// (which would end up with every subfield tracking its own siblings, because they are
|
||||
// children of its parent)
|
||||
while !full_path.is_empty() {
|
||||
full_path.pop();
|
||||
let inner = self.get_trigger(full_path.clone());
|
||||
inner.this.track();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Inner, Prev> DefinedAt for AtIndex<Inner, Prev>
|
||||
|
||||
@@ -110,6 +110,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
Some(Mapped::new_with_guard(inner, self.read))
|
||||
@@ -432,7 +436,7 @@ where
|
||||
let this = keys
|
||||
.with_field_keys(
|
||||
inner.clone(),
|
||||
|keys| keys.get(&self.key),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -444,6 +448,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
|
||||
@@ -452,7 +460,7 @@ where
|
||||
let index = keys
|
||||
.with_field_keys(
|
||||
inner_path,
|
||||
|keys| keys.get(&self.key),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -476,7 +484,7 @@ where
|
||||
let index = keys
|
||||
.with_field_keys(
|
||||
inner_path.clone(),
|
||||
|keys| keys.get(&self.key),
|
||||
|keys| (keys.get(&self.key), vec![]),
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
@@ -624,9 +632,7 @@ where
|
||||
let latest = self.latest_keys();
|
||||
keys.with_field_keys(
|
||||
inner_path,
|
||||
|keys| {
|
||||
keys.update(latest);
|
||||
},
|
||||
|keys| ((), keys.update(latest)),
|
||||
|| self.latest_keys(),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -364,13 +364,18 @@ where
|
||||
})
|
||||
}
|
||||
|
||||
fn update(&mut self, iter: impl IntoIterator<Item = K>) {
|
||||
fn update(
|
||||
&mut self,
|
||||
iter: impl IntoIterator<Item = K>,
|
||||
) -> Vec<(usize, StorePathSegment)> {
|
||||
let new_keys = iter
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(idx, key)| (key, idx))
|
||||
.collect::<FxHashMap<K, usize>>();
|
||||
|
||||
let mut index_keys = Vec::with_capacity(new_keys.len());
|
||||
|
||||
// remove old keys and recycle the slots
|
||||
self.keys.retain(|key, old_entry| match new_keys.get(key) {
|
||||
Some(idx) => {
|
||||
@@ -385,14 +390,17 @@ where
|
||||
|
||||
// add new keys
|
||||
for (key, idx) in new_keys {
|
||||
// the suggestion doesn't compile because we need &mut for self.next_key(),
|
||||
// and we don't want to call that until after the check
|
||||
#[allow(clippy::map_entry)]
|
||||
if !self.keys.contains_key(&key) {
|
||||
let path = self.next_key();
|
||||
self.keys.insert(key, (path, idx));
|
||||
match self.keys.get(&key) {
|
||||
Some((segment, idx)) => index_keys.push((*idx, *segment)),
|
||||
None => {
|
||||
let path = self.next_key();
|
||||
self.keys.insert(key, (path, idx));
|
||||
index_keys.push((idx, path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
index_keys
|
||||
}
|
||||
}
|
||||
|
||||
@@ -415,14 +423,20 @@ type HashMap<K, V> = send_wrapper::SendWrapper<
|
||||
|
||||
/// A map of the keys for a keyed subfield.
|
||||
#[derive(Clone)]
|
||||
pub struct KeyMap(HashMap<StorePath, Box<dyn Any + Send + Sync>>);
|
||||
pub struct KeyMap(
|
||||
HashMap<StorePath, Box<dyn Any + Send + Sync>>,
|
||||
HashMap<(StorePath, usize), StorePathSegment>,
|
||||
);
|
||||
|
||||
impl Default for KeyMap {
|
||||
fn default() -> Self {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
return Self(Default::default());
|
||||
return Self(Default::default(), Default::default());
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
return Self(send_wrapper::SendWrapper::new(Default::default()));
|
||||
return Self(
|
||||
send_wrapper::SendWrapper::new(Default::default()),
|
||||
send_wrapper::SendWrapper::new(Default::default()),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -430,31 +444,70 @@ impl KeyMap {
|
||||
fn with_field_keys<K, T>(
|
||||
&self,
|
||||
path: StorePath,
|
||||
fun: impl FnOnce(&mut FieldKeys<K>) -> T,
|
||||
fun: impl FnOnce(&mut FieldKeys<K>) -> (T, Vec<(usize, StorePathSegment)>),
|
||||
initialize: impl FnOnce() -> Vec<K>,
|
||||
) -> Option<T>
|
||||
where
|
||||
K: Debug + Hash + PartialEq + Eq + Send + Sync + 'static,
|
||||
{
|
||||
let initial_keys = initialize();
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let mut entry = self
|
||||
.0
|
||||
.entry(path)
|
||||
.or_insert_with(|| Box::new(FieldKeys::new(initialize())));
|
||||
.entry(path.clone())
|
||||
.or_insert_with(|| Box::new(FieldKeys::new(initial_keys)));
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let entry = if !self.0.borrow().contains_key(&path) {
|
||||
Some(Box::new(FieldKeys::new(initialize())))
|
||||
Some(Box::new(FieldKeys::new(initial_keys)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let mut map = self.0.borrow_mut();
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let entry = map.entry(path).or_insert_with(|| entry.unwrap());
|
||||
let entry = map.entry(path.clone()).or_insert_with(|| entry.unwrap());
|
||||
|
||||
let entry = entry.downcast_mut::<FieldKeys<K>>()?;
|
||||
Some(fun(entry))
|
||||
let (result, new_keys) = fun(entry);
|
||||
if !new_keys.is_empty() {
|
||||
for (idx, segment) in new_keys {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
self.1.insert((path.clone(), idx), segment);
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
self.1.borrow_mut().insert((path.clone(), idx), segment);
|
||||
}
|
||||
}
|
||||
Some(result)
|
||||
}
|
||||
|
||||
fn contains_key(&self, key: &StorePath) -> bool {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.0.contains_key(key)
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
self.0.borrow_mut().contains_key(key)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_key_for_index(
|
||||
&self,
|
||||
key: &(StorePath, usize),
|
||||
) -> Option<StorePathSegment> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
{
|
||||
self.1.get(key).as_deref().copied()
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
{
|
||||
self.1.borrow().get(key).as_deref().copied()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -832,6 +885,30 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Foo {
|
||||
id: i32,
|
||||
bar: Bar,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Bar {
|
||||
bar_signature: i32,
|
||||
baz: Baz,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baz {
|
||||
more_data: i32,
|
||||
baw: Baw,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baw {
|
||||
more_data: i32,
|
||||
end: i32,
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn mutating_field_triggers_effect() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
@@ -1112,30 +1189,6 @@ mod tests {
|
||||
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Foo {
|
||||
id: i32,
|
||||
bar: Bar,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Bar {
|
||||
bar_signature: i32,
|
||||
baz: Baz,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baz {
|
||||
more_data: i32,
|
||||
baw: Baw,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baw {
|
||||
more_data: i32,
|
||||
end: i32,
|
||||
}
|
||||
|
||||
let store = Store::new(Foo {
|
||||
id: 42,
|
||||
bar: Bar {
|
||||
@@ -1219,4 +1272,107 @@ mod tests {
|
||||
assert_eq!(more_data_runs.get_value(), 3);
|
||||
assert_eq!(baz_baw_end_runs.get_value(), 3);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn changing_parent_notifies_subfield() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
let combined_count = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let store = Store::new(Foo {
|
||||
id: 42,
|
||||
bar: Bar {
|
||||
bar_signature: 69,
|
||||
baz: Baz {
|
||||
more_data: 9999,
|
||||
baw: Baw {
|
||||
more_data: 22,
|
||||
end: 1112,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let tracked_field = store.bar().baz().more_data();
|
||||
|
||||
Effect::new_sync({
|
||||
let combined_count = Arc::clone(&combined_count);
|
||||
move |prev: Option<()>| {
|
||||
if prev.is_none() {
|
||||
println!("first run");
|
||||
} else {
|
||||
println!("next run");
|
||||
}
|
||||
|
||||
// we only track `more`, but this should still be notified
|
||||
// when its parent fields `bar` or `baz` change
|
||||
println!("{:?}", *tracked_field.read());
|
||||
combined_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
tick().await;
|
||||
tick().await;
|
||||
|
||||
store.bar().baz().set(Baz {
|
||||
more_data: 42,
|
||||
baw: Baw {
|
||||
more_data: 11,
|
||||
end: 31,
|
||||
},
|
||||
});
|
||||
tick().await;
|
||||
store.bar().set(Bar {
|
||||
bar_signature: 23,
|
||||
baz: Baz {
|
||||
more_data: 32,
|
||||
baw: Baw {
|
||||
more_data: 432,
|
||||
end: 423,
|
||||
},
|
||||
},
|
||||
});
|
||||
tick().await;
|
||||
|
||||
assert_eq!(combined_count.load(Ordering::Relaxed), 3);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn changing_parent_notifies_unkeyed_child() {
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
let combined_count = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let store = Store::new(data());
|
||||
|
||||
let tracked_field = store.todos().at_unkeyed(0);
|
||||
|
||||
Effect::new_sync({
|
||||
let combined_count = Arc::clone(&combined_count);
|
||||
move |prev: Option<()>| {
|
||||
if prev.is_none() {
|
||||
println!("first run");
|
||||
} else {
|
||||
println!("next run");
|
||||
}
|
||||
|
||||
// we only track `more`, but this should still be notified
|
||||
// when its parent fields `bar` or `baz` change
|
||||
println!("{:?}", *tracked_field.read());
|
||||
combined_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
tick().await;
|
||||
tick().await;
|
||||
|
||||
store.todos().write().pop();
|
||||
tick().await;
|
||||
|
||||
store.todos().write().push(Todo {
|
||||
label: "another one".into(),
|
||||
completed: false,
|
||||
});
|
||||
tick().await;
|
||||
|
||||
assert_eq!(combined_count.load(Ordering::Relaxed), 3);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ where
|
||||
// don't track the writer for the whole store
|
||||
writer.untrack();
|
||||
let mut notify = |path: &StorePath| {
|
||||
self.triggers_for_path(path.to_owned()).notify();
|
||||
self.triggers_for_path_unkeyed(path.to_owned()).notify();
|
||||
};
|
||||
writer.patch_field(new, &path, &mut notify);
|
||||
}
|
||||
|
||||
@@ -11,6 +11,15 @@ impl IntoIterator for StorePath {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a StorePath {
|
||||
type Item = &'a StorePathSegment;
|
||||
type IntoIter = std::slice::Iter<'a, StorePathSegment>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Vec<StorePathSegment>> for StorePath {
|
||||
fn from(value: Vec<StorePathSegment>) -> Self {
|
||||
Self(value)
|
||||
@@ -18,6 +27,16 @@ impl From<Vec<StorePathSegment>> for StorePath {
|
||||
}
|
||||
|
||||
impl StorePath {
|
||||
/// Creates a new path.
|
||||
pub fn new() -> Self {
|
||||
Self(Vec::new())
|
||||
}
|
||||
|
||||
/// Creates a new path with storage capacity for `capacity` segments.
|
||||
pub fn with_capacity(capacity: usize) -> Self {
|
||||
Self(Vec::with_capacity(capacity))
|
||||
}
|
||||
|
||||
/// Adds a new segment to the path.
|
||||
pub fn push(&mut self, segment: impl Into<StorePathSegment>) {
|
||||
self.0.push(segment.into());
|
||||
|
||||
@@ -26,6 +26,14 @@ pub trait StoreField: Sized {
|
||||
#[track_caller]
|
||||
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger;
|
||||
|
||||
/// Returns the trigger that tracks access and updates for this field.
|
||||
///
|
||||
/// This uses *unkeyed* paths: i.e., if any field in the path is keyed, it will
|
||||
/// try to look up the key for the item at the index given in the path, rather than
|
||||
/// the keyed item.
|
||||
#[track_caller]
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger;
|
||||
|
||||
/// The path of this field (see [`StorePath`]).
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment>;
|
||||
@@ -84,6 +92,26 @@ pub trait StoreField: Sized {
|
||||
|
||||
triggers
|
||||
}
|
||||
|
||||
/// Returns triggers for the field at the given path, and all parent fields
|
||||
fn triggers_for_path_unkeyed(&self, path: StorePath) -> Vec<ArcTrigger> {
|
||||
// see notes on triggers_for_path() for additional comments on implementation
|
||||
|
||||
let trigger = self.get_trigger_unkeyed(path.clone());
|
||||
let mut full_path = path;
|
||||
|
||||
let mut triggers = Vec::with_capacity(full_path.len() + 2);
|
||||
triggers.push(trigger.this.clone());
|
||||
triggers.push(trigger.children.clone());
|
||||
while !full_path.is_empty() {
|
||||
full_path.pop();
|
||||
let inner = self.get_trigger_unkeyed(full_path.clone());
|
||||
triggers.push(inner.children.clone());
|
||||
}
|
||||
triggers.reverse();
|
||||
|
||||
triggers
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> StoreField for ArcStore<T>
|
||||
@@ -101,6 +129,26 @@ where
|
||||
trigger
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
let orig_path = path.clone();
|
||||
|
||||
let mut path = StorePath::with_capacity(orig_path.len());
|
||||
for segment in &orig_path {
|
||||
let parent_is_keyed = self.keys.contains_key(&path);
|
||||
|
||||
if parent_is_keyed {
|
||||
let key = self
|
||||
.keys
|
||||
.get_key_for_index(&(path.clone(), segment.0))
|
||||
.expect("could not find key for index");
|
||||
path.push(key);
|
||||
} else {
|
||||
path.push(*segment);
|
||||
}
|
||||
}
|
||||
self.get_trigger(path)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
iter::empty()
|
||||
@@ -141,6 +189,14 @@ where
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|n| n.get_trigger_unkeyed(path))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner
|
||||
|
||||
@@ -88,6 +88,10 @@ where
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
|
||||
fn get_trigger_unkeyed(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger_unkeyed(path)
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
Some(Mapped::new_with_guard(inner, self.read))
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_stores_macro"
|
||||
version = "0.2.5"
|
||||
version = "0.2.6"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -6,8 +6,8 @@ use syn::{
|
||||
parse::{Parse, ParseStream, Parser},
|
||||
punctuated::Punctuated,
|
||||
token::Comma,
|
||||
ExprClosure, Field, Fields, Generics, Ident, Index, Meta, Result, Token,
|
||||
Type, Variant, Visibility, WhereClause,
|
||||
ExprClosure, Field, Fields, GenericParam, Generics, Ident, Index, Meta,
|
||||
Result, Token, Type, TypeParam, Variant, Visibility, WhereClause,
|
||||
};
|
||||
|
||||
#[proc_macro_error]
|
||||
@@ -26,6 +26,103 @@ pub fn derive_patch(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
.into()
|
||||
}
|
||||
|
||||
/// Removes all constraints from generics arguments list.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust,ignore
|
||||
/// struct Data<
|
||||
/// 'a,
|
||||
/// T1: ToString + PatchField,
|
||||
/// T2: PatchField,
|
||||
/// T3: 'static + PatchField,
|
||||
/// T4,
|
||||
/// >
|
||||
/// where
|
||||
/// T3: ToString,
|
||||
/// T4: ToString + PatchField,
|
||||
/// {
|
||||
/// data1: &'a T1,
|
||||
/// data2: T2,
|
||||
/// data3: T3,
|
||||
/// data4: T4,
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Fort the struct above the `[syn::DeriveInput::parse]` will return the instance of [syn::Generics]
|
||||
/// which will conceptually look like this
|
||||
///
|
||||
/// ```text
|
||||
/// Generics:
|
||||
/// params:
|
||||
/// [
|
||||
/// 'a,
|
||||
/// T1: ToString + PatchField,
|
||||
/// T2: PatchField,
|
||||
/// T3: 'static + PatchField,
|
||||
/// T4,
|
||||
/// ]
|
||||
/// where_clause:
|
||||
/// [
|
||||
/// T3: ToString,
|
||||
/// T4: ToString + PatchField,
|
||||
/// ]
|
||||
/// ```
|
||||
///
|
||||
/// This method would return a new instance of [syn::Generics] which will conceptually look like this
|
||||
///
|
||||
/// ```text
|
||||
/// Generics:
|
||||
/// params:
|
||||
/// [
|
||||
/// 'a,
|
||||
/// T1,
|
||||
/// T2,
|
||||
/// T3,
|
||||
/// T4,
|
||||
/// ]
|
||||
/// where_clause:
|
||||
/// []
|
||||
/// ```
|
||||
///
|
||||
/// This is useful when you want to use a generic arguments list for `impl` sections for type definitions.
|
||||
fn remove_constraint_from_generics(generics: &Generics) -> Generics {
|
||||
let mut new_generics = generics.clone();
|
||||
|
||||
// remove contraints directly placed in the generic arguments list
|
||||
//
|
||||
// For generics for `struct A<T: MyTrait>` the `T: MyTrait` becomes `T`
|
||||
for param in new_generics.params.iter_mut() {
|
||||
match param {
|
||||
GenericParam::Lifetime(lifetime) => {
|
||||
lifetime.bounds.clear(); // remove bounds
|
||||
lifetime.colon_token = None;
|
||||
}
|
||||
GenericParam::Type(type_param) => {
|
||||
type_param.bounds.clear(); // remove bounds
|
||||
type_param.colon_token = None;
|
||||
type_param.eq_token = None;
|
||||
type_param.default = None;
|
||||
}
|
||||
GenericParam::Const(const_param) => {
|
||||
// replaces const generic with type param without bounds which is basically an `ident` token
|
||||
*param = GenericParam::Type(TypeParam {
|
||||
attrs: const_param.attrs.clone(),
|
||||
ident: const_param.ident.clone(),
|
||||
colon_token: None,
|
||||
bounds: Punctuated::new(),
|
||||
eq_token: None,
|
||||
default: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
new_generics.where_clause = None; // remove where clause
|
||||
|
||||
new_generics
|
||||
}
|
||||
|
||||
struct Model {
|
||||
vis: Visibility,
|
||||
name: Ident,
|
||||
@@ -111,7 +208,9 @@ impl ToTokens for Model {
|
||||
} = &self;
|
||||
let any_store_field = Ident::new("AnyStoreField", Span::call_site());
|
||||
let trait_name = Ident::new(&format!("{name}StoreFields"), name.span());
|
||||
let clear_generics = remove_constraint_from_generics(generics);
|
||||
let params = &generics.params;
|
||||
let clear_params = &clear_generics.params;
|
||||
let generics_with_orig = quote! { <#any_store_field, #params> };
|
||||
let where_with_orig = {
|
||||
generics
|
||||
@@ -124,17 +223,22 @@ impl ToTokens for Model {
|
||||
} = &w;
|
||||
quote! {
|
||||
#where_token
|
||||
#any_store_field: #library_path::StoreField<Value = #name #generics>,
|
||||
#any_store_field: #library_path::StoreField<Value = #name < #clear_params > >,
|
||||
#predicates
|
||||
}
|
||||
})
|
||||
.unwrap_or_else(|| quote! { where #any_store_field: #library_path::StoreField<Value = #name #generics> })
|
||||
.unwrap_or_else(|| quote! { where #any_store_field: #library_path::StoreField<Value = #name < #clear_params > > })
|
||||
};
|
||||
|
||||
// define an extension trait that matches this struct
|
||||
// and implement that trait for all StoreFields
|
||||
let (trait_fields, read_fields): (Vec<_>, Vec<_>) =
|
||||
ty.to_field_data(&library_path, generics, &any_store_field, name);
|
||||
let (trait_fields, read_fields): (Vec<_>, Vec<_>) = ty.to_field_data(
|
||||
&library_path,
|
||||
generics,
|
||||
&clear_generics,
|
||||
&any_store_field,
|
||||
name,
|
||||
);
|
||||
|
||||
// read access
|
||||
tokens.extend(quote! {
|
||||
@@ -144,7 +248,7 @@ impl ToTokens for Model {
|
||||
#(#trait_fields)*
|
||||
}
|
||||
|
||||
impl #generics_with_orig #trait_name <AnyStoreField, #params> for AnyStoreField
|
||||
impl #generics_with_orig #trait_name <AnyStoreField, #clear_params> for AnyStoreField
|
||||
#where_with_orig
|
||||
{
|
||||
#(#read_fields)*
|
||||
@@ -158,6 +262,7 @@ impl ModelTy {
|
||||
&self,
|
||||
library_path: &TokenStream,
|
||||
generics: &Generics,
|
||||
clear_generics: &Generics,
|
||||
any_store_field: &Ident,
|
||||
name: &Ident,
|
||||
) -> (Vec<TokenStream>, Vec<TokenStream>) {
|
||||
@@ -204,6 +309,7 @@ impl ModelTy {
|
||||
library_path,
|
||||
ident.as_ref(),
|
||||
generics,
|
||||
clear_generics,
|
||||
any_store_field,
|
||||
name,
|
||||
ty,
|
||||
@@ -215,6 +321,7 @@ impl ModelTy {
|
||||
library_path,
|
||||
ident.as_ref(),
|
||||
generics,
|
||||
clear_generics,
|
||||
any_store_field,
|
||||
name,
|
||||
ty,
|
||||
@@ -233,6 +340,7 @@ impl ModelTy {
|
||||
library_path,
|
||||
ident,
|
||||
generics,
|
||||
clear_generics,
|
||||
any_store_field,
|
||||
name,
|
||||
fields,
|
||||
@@ -242,6 +350,7 @@ impl ModelTy {
|
||||
library_path,
|
||||
ident,
|
||||
generics,
|
||||
clear_generics,
|
||||
any_store_field,
|
||||
name,
|
||||
fields,
|
||||
@@ -260,7 +369,8 @@ fn field_to_tokens(
|
||||
modes: Option<&[SubfieldMode]>,
|
||||
library_path: &proc_macro2::TokenStream,
|
||||
orig_ident: Option<&Ident>,
|
||||
generics: &Generics,
|
||||
_generics: &Generics,
|
||||
clear_generics: &Generics,
|
||||
any_store_field: &Ident,
|
||||
name: &Ident,
|
||||
ty: &Type,
|
||||
@@ -285,7 +395,7 @@ fn field_to_tokens(
|
||||
SubfieldMode::Keyed(keyed_by, key_ty) => {
|
||||
let signature = quote! {
|
||||
#[track_caller]
|
||||
fn #ident(self) -> #library_path::KeyedSubfield<#any_store_field, #name #generics, #key_ty, #ty>
|
||||
fn #ident(self) -> #library_path::KeyedSubfield<#any_store_field, #name #clear_generics, #key_ty, #ty>
|
||||
};
|
||||
return if include_body {
|
||||
quote! {
|
||||
@@ -318,7 +428,7 @@ fn field_to_tokens(
|
||||
// default subfield
|
||||
if include_body {
|
||||
quote! {
|
||||
fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #generics, #ty> {
|
||||
fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #clear_generics, #ty> {
|
||||
#library_path::Subfield::new(
|
||||
self,
|
||||
#idx.into(),
|
||||
@@ -329,7 +439,7 @@ fn field_to_tokens(
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #generics, #ty>;
|
||||
fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #clear_generics, #ty>;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -339,7 +449,8 @@ fn variant_to_tokens(
|
||||
include_body: bool,
|
||||
library_path: &proc_macro2::TokenStream,
|
||||
ident: &Ident,
|
||||
generics: &Generics,
|
||||
_generics: &Generics,
|
||||
clear_generics: &Generics,
|
||||
any_store_field: &Ident,
|
||||
name: &Ident,
|
||||
fields: &Fields,
|
||||
@@ -408,7 +519,7 @@ fn variant_to_tokens(
|
||||
// default subfield
|
||||
if include_body {
|
||||
quote! {
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>> {
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #clear_generics, #field_ty>> {
|
||||
#library_path::StoreField::track_field(&self);
|
||||
let reader = #library_path::StoreField::reader(&self);
|
||||
let matches = reader
|
||||
@@ -440,7 +551,7 @@ fn variant_to_tokens(
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>>;
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #clear_generics, #field_ty>>;
|
||||
}
|
||||
}
|
||||
}));
|
||||
@@ -491,7 +602,7 @@ fn variant_to_tokens(
|
||||
// default subfield
|
||||
if include_body {
|
||||
quote! {
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>> {
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #clear_generics, #field_ty>> {
|
||||
#library_path::StoreField::track_field(&self);
|
||||
let reader = #library_path::StoreField::reader(&self);
|
||||
let matches = reader
|
||||
@@ -523,7 +634,7 @@ fn variant_to_tokens(
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>>;
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #clear_generics, #field_ty>>;
|
||||
}
|
||||
}
|
||||
}));
|
||||
@@ -665,9 +776,14 @@ impl ToTokens for PatchModel {
|
||||
}
|
||||
};
|
||||
|
||||
let clear_generics = remove_constraint_from_generics(generics);
|
||||
let params = clear_generics.params;
|
||||
let where_clause = &generics.where_clause;
|
||||
|
||||
// read access
|
||||
tokens.extend(quote! {
|
||||
impl #library_path::PatchField for #name #generics
|
||||
impl #generics #library_path::PatchField for #name <#params>
|
||||
#where_clause
|
||||
{
|
||||
fn patch_field(
|
||||
&mut self,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router"
|
||||
version = { workspace = true }
|
||||
version = "0.8.9"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -364,6 +364,12 @@ where
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MatchedRoute(pub String, pub AnyView);
|
||||
|
||||
impl MatchedRoute {
|
||||
fn branch_name(&self) -> String {
|
||||
format!("{:?}", self.1.as_type_id())
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for MatchedRoute {
|
||||
type State = <AnyView as Render>::State;
|
||||
|
||||
@@ -414,8 +420,9 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches: bool,
|
||||
extra_attrs: Vec<AnyAttribute>,
|
||||
) {
|
||||
if mark_branches && escape {
|
||||
buf.open_branch(&self.0);
|
||||
let branch_name = (mark_branches && escape).then(|| self.branch_name());
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.open_branch(bn);
|
||||
}
|
||||
self.1.to_html_with_buf(
|
||||
buf,
|
||||
@@ -424,8 +431,8 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if mark_branches && escape {
|
||||
buf.close_branch(&self.0);
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.close_branch(bn);
|
||||
if *position == Position::NextChildAfterText {
|
||||
*position = Position::NextChild;
|
||||
}
|
||||
@@ -442,8 +449,9 @@ impl RenderHtml for MatchedRoute {
|
||||
) where
|
||||
Self: Sized,
|
||||
{
|
||||
if mark_branches && escape {
|
||||
buf.open_branch(&self.0);
|
||||
let branch_name = (mark_branches && escape).then(|| self.branch_name());
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.open_branch(bn);
|
||||
}
|
||||
self.1.to_html_async_with_buf::<OUT_OF_ORDER>(
|
||||
buf,
|
||||
@@ -452,8 +460,8 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if mark_branches && escape {
|
||||
buf.close_branch(&self.0);
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.close_branch(bn);
|
||||
if *position == Position::NextChildAfterText {
|
||||
*position = Position::NextChild;
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ use crate::{hooks::use_navigate, params::ParamsMap};
|
||||
use core::fmt;
|
||||
use futures::channel::oneshot;
|
||||
use js_sys::{try_iter, Array, JsString};
|
||||
use leptos::prelude::*;
|
||||
use leptos::{ev, prelude::*};
|
||||
use or_poisoned::OrPoisoned;
|
||||
use reactive_graph::{
|
||||
signal::ArcRwSignal,
|
||||
@@ -11,13 +11,12 @@ use reactive_graph::{
|
||||
};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
boxed::Box,
|
||||
string::String,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
use tachys::dom::{document, window};
|
||||
use wasm_bindgen::{closure::Closure, JsCast, JsValue};
|
||||
use web_sys::{Event, UrlSearchParams};
|
||||
use wasm_bindgen::{JsCast, JsValue};
|
||||
use web_sys::UrlSearchParams;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct BrowserUrl {
|
||||
@@ -116,7 +115,6 @@ impl LocationProvider for BrowserUrl {
|
||||
}
|
||||
|
||||
fn init(&self, base: Option<Cow<'static, str>>) {
|
||||
let window = window();
|
||||
let navigate = {
|
||||
let url = self.url.clone();
|
||||
let pending = Arc::clone(&self.pending_navigation);
|
||||
@@ -159,37 +157,32 @@ impl LocationProvider for BrowserUrl {
|
||||
|
||||
let handle_anchor_click =
|
||||
handle_anchor_click(base, Self::parse_with_base, navigate);
|
||||
let closure = Closure::wrap(Box::new(move |ev: Event| {
|
||||
|
||||
let click_handle = window_event_listener(ev::click, move |ev| {
|
||||
if let Err(e) = handle_anchor_click(ev) {
|
||||
#[cfg(feature = "tracing")]
|
||||
tracing::error!("{e:?}");
|
||||
#[cfg(not(feature = "tracing"))]
|
||||
web_sys::console::error_1(&e);
|
||||
}
|
||||
}) as Box<dyn FnMut(Event)>)
|
||||
.into_js_value();
|
||||
window
|
||||
.add_event_listener_with_callback(
|
||||
"click",
|
||||
closure.as_ref().unchecked_ref(),
|
||||
)
|
||||
.expect(
|
||||
"couldn't add `click` listener to `window` to handle `<a>` \
|
||||
clicks",
|
||||
);
|
||||
});
|
||||
|
||||
// handle popstate event (forward/back navigation)
|
||||
let cb = {
|
||||
let popstate_cb = {
|
||||
let url = self.url.clone();
|
||||
let path_stack = self.path_stack.clone();
|
||||
let is_back = self.is_back.clone();
|
||||
move || match Self::current() {
|
||||
Ok(new_url) => {
|
||||
let stack = path_stack.read_value();
|
||||
let mut stack = path_stack.write_value();
|
||||
let is_navigating_back = stack.len() == 1
|
||||
|| (stack.len() >= 2
|
||||
&& stack.get(stack.len() - 2) == Some(&new_url));
|
||||
|
||||
if is_navigating_back {
|
||||
stack.pop();
|
||||
}
|
||||
|
||||
is_back.set(is_navigating_back);
|
||||
|
||||
url.set(new_url);
|
||||
@@ -202,14 +195,14 @@ impl LocationProvider for BrowserUrl {
|
||||
}
|
||||
}
|
||||
};
|
||||
let closure =
|
||||
Closure::wrap(Box::new(cb) as Box<dyn Fn()>).into_js_value();
|
||||
window
|
||||
.add_event_listener_with_callback(
|
||||
"popstate",
|
||||
closure.as_ref().unchecked_ref(),
|
||||
)
|
||||
.expect("couldn't add `popstate` listener to `window`");
|
||||
|
||||
let popstate_handle =
|
||||
window_event_listener(ev::popstate, move |_| popstate_cb());
|
||||
|
||||
on_cleanup(|| {
|
||||
click_handle.remove();
|
||||
popstate_handle.remove();
|
||||
});
|
||||
}
|
||||
|
||||
fn ready_to_complete(&self) {
|
||||
@@ -221,6 +214,13 @@ impl LocationProvider for BrowserUrl {
|
||||
fn complete_navigation(&self, loc: &LocationChange) {
|
||||
let history = window().history().unwrap();
|
||||
|
||||
let current_path = self
|
||||
.path_stack
|
||||
.read_value()
|
||||
.last()
|
||||
.map(|url| url.to_full_path());
|
||||
let add_to_stack = current_path.as_ref() != Some(&loc.value);
|
||||
|
||||
if loc.replace {
|
||||
history
|
||||
.replace_state_with_url(
|
||||
@@ -229,7 +229,7 @@ impl LocationProvider for BrowserUrl {
|
||||
Some(&loc.value),
|
||||
)
|
||||
.unwrap();
|
||||
} else {
|
||||
} else if add_to_stack {
|
||||
// push the "forward direction" marker
|
||||
let state = &loc.state.to_js_value();
|
||||
history
|
||||
@@ -240,7 +240,9 @@ impl LocationProvider for BrowserUrl {
|
||||
// add this URL to the "path stack" for detecting back navigations, and
|
||||
// unset "navigating back" state
|
||||
if let Ok(url) = Self::current() {
|
||||
self.path_stack.write_value().push(url);
|
||||
if add_to_stack {
|
||||
self.path_stack.write_value().push(url);
|
||||
}
|
||||
self.is_back.set(false);
|
||||
}
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ use send_wrapper::SendWrapper;
|
||||
use std::{borrow::Cow, future::Future};
|
||||
use tachys::dom::window;
|
||||
use wasm_bindgen::{JsCast, JsValue};
|
||||
use web_sys::{Event, HtmlAnchorElement, MouseEvent};
|
||||
use web_sys::{HtmlAnchorElement, MouseEvent};
|
||||
|
||||
mod history;
|
||||
mod server;
|
||||
@@ -300,15 +300,14 @@ pub(crate) fn handle_anchor_click<NavFn, NavFut>(
|
||||
router_base: Option<Cow<'static, str>>,
|
||||
parse_with_base: fn(&str, &str) -> Result<Url, JsValue>,
|
||||
navigate: NavFn,
|
||||
) -> Box<dyn Fn(Event) -> Result<(), JsValue>>
|
||||
) -> Box<dyn Fn(MouseEvent) -> Result<(), JsValue>>
|
||||
where
|
||||
NavFn: Fn(Url, LocationChange) -> NavFut + 'static,
|
||||
NavFut: Future<Output = ()> + 'static,
|
||||
{
|
||||
let router_base = router_base.unwrap_or_default();
|
||||
|
||||
Box::new(move |ev: Event| {
|
||||
let ev = ev.unchecked_into::<MouseEvent>();
|
||||
Box::new(move |ev: MouseEvent| {
|
||||
let origin = window().location().origin()?;
|
||||
if ev.default_prevented()
|
||||
|| ev.button() != 0
|
||||
|
||||
@@ -10,14 +10,23 @@ use crate::{
|
||||
};
|
||||
use any_spawner::Executor;
|
||||
use either_of::{Either, EitherOf3};
|
||||
use futures::{channel::oneshot, future::join_all, FutureExt};
|
||||
use leptos::{attr::any_attribute::AnyAttribute, component, oco::Oco};
|
||||
use futures::{
|
||||
channel::oneshot,
|
||||
future::{join_all, AbortHandle, Abortable},
|
||||
FutureExt,
|
||||
};
|
||||
use leptos::{
|
||||
attr::any_attribute::AnyAttribute,
|
||||
component,
|
||||
oco::Oco,
|
||||
prelude::{ArcStoredValue, WriteValue},
|
||||
};
|
||||
use or_poisoned::OrPoisoned;
|
||||
use reactive_graph::{
|
||||
computed::{ArcMemo, ScopedFuture},
|
||||
owner::{provide_context, use_context, Owner},
|
||||
signal::{ArcRwSignal, ArcTrigger},
|
||||
traits::{Get, GetUntracked, Notify, ReadUntracked, Set, Track},
|
||||
traits::{Get, GetUntracked, Notify, ReadUntracked, Set, Track, Write},
|
||||
transition::AsyncTransition,
|
||||
wrappers::write::SignalSetter,
|
||||
};
|
||||
@@ -68,6 +77,7 @@ where
|
||||
// held to keep the Owner alive until the router is dropped
|
||||
#[allow(unused)]
|
||||
outer_owner: Owner,
|
||||
abort_navigation: ArcStoredValue<Option<AbortHandle>>,
|
||||
}
|
||||
|
||||
impl<Loc, Defs, FalFn, Fal> Render for NestedRoutesView<Loc, Defs, FalFn>
|
||||
@@ -109,6 +119,7 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
|
||||
@@ -134,6 +145,7 @@ where
|
||||
outlets,
|
||||
view,
|
||||
outer_owner,
|
||||
abort_navigation: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -148,13 +160,14 @@ where
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// since the path didn't match, we'll update the retained path for future diffing
|
||||
state.path.clear();
|
||||
state.path.push_str(url_snapshot.path());
|
||||
|
||||
let new_match = self.routes.match_route(url_snapshot.path());
|
||||
|
||||
state.current_url.set(url_snapshot);
|
||||
*state.current_url.write_untracked() = url_snapshot;
|
||||
|
||||
match new_match {
|
||||
None => {
|
||||
@@ -181,30 +194,51 @@ where
|
||||
&mut state.outlets,
|
||||
self.set_is_routing.is_some(),
|
||||
0,
|
||||
&self.outer_owner,
|
||||
);
|
||||
|
||||
let (abort_handle, abort_registration) =
|
||||
AbortHandle::new_pair();
|
||||
|
||||
if let Some(prev_handle) =
|
||||
state.abort_navigation.write_value().replace(abort_handle)
|
||||
{
|
||||
prev_handle.abort();
|
||||
}
|
||||
|
||||
let location = self.location.clone();
|
||||
let is_back = location
|
||||
.as_ref()
|
||||
.map(|nav| nav.is_back().get_untracked())
|
||||
.unwrap_or(false);
|
||||
Executor::spawn_local(async move {
|
||||
let triggers = join_all(preloaders).await;
|
||||
// tell each one of the outlet triggers that it's ready
|
||||
let notify = move || {
|
||||
for trigger in triggers {
|
||||
trigger.notify();
|
||||
let triggers = Abortable::new(
|
||||
join_all(preloaders),
|
||||
abort_registration,
|
||||
);
|
||||
if let Ok(triggers) = triggers.await {
|
||||
// tell each one of the outlet triggers that it's ready
|
||||
let notify = move || {
|
||||
for trigger in triggers {
|
||||
trigger.notify();
|
||||
}
|
||||
};
|
||||
if self.transition {
|
||||
start_view_transition(
|
||||
different_level,
|
||||
is_back,
|
||||
notify,
|
||||
);
|
||||
} else {
|
||||
notify();
|
||||
}
|
||||
};
|
||||
if self.transition {
|
||||
start_view_transition(different_level, is_back, notify);
|
||||
} else {
|
||||
notify();
|
||||
}
|
||||
});
|
||||
|
||||
let abort_navigation = state.abort_navigation.clone();
|
||||
Executor::spawn_local(async move {
|
||||
join_all(full_loaders).await;
|
||||
_ = abort_navigation.write_value().take();
|
||||
if let Some(set_is_routing) = self.set_is_routing {
|
||||
set_is_routing.set(false);
|
||||
}
|
||||
@@ -338,6 +372,7 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
|
||||
// outlets will not send their views if the loaders are never polled
|
||||
@@ -391,8 +426,16 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
|
||||
let preload_owners = outlets
|
||||
.iter()
|
||||
.map(|o| o.preload_owner.clone())
|
||||
.collect::<Vec<_>>();
|
||||
outer_owner
|
||||
.with(|| Owner::on_cleanup(move || drop(preload_owners)));
|
||||
|
||||
// outlets will not send their views if the loaders are never polled
|
||||
// the loaders are async so that they can lazy-load routes in the browser,
|
||||
// but they should always be synchronously available on the server
|
||||
@@ -444,6 +487,7 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
|
||||
@@ -463,6 +507,7 @@ where
|
||||
outlets,
|
||||
view,
|
||||
outer_owner,
|
||||
abort_navigation: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -498,6 +543,7 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
|
||||
@@ -514,6 +560,7 @@ where
|
||||
outlets,
|
||||
view,
|
||||
outer_owner,
|
||||
abort_navigation: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -533,6 +580,7 @@ pub(crate) struct RouteContext {
|
||||
base: Option<Oco<'static, str>>,
|
||||
view_fn: Arc<Mutex<OutletViewFn>>,
|
||||
owner: Arc<Mutex<Option<Owner>>>,
|
||||
preload_owner: Owner,
|
||||
child: ChildRoute,
|
||||
}
|
||||
|
||||
@@ -564,6 +612,7 @@ impl Clone for RouteContext {
|
||||
view_fn: Arc::clone(&self.view_fn),
|
||||
owner: Arc::clone(&self.owner),
|
||||
child: self.child.clone(),
|
||||
preload_owner: self.preload_owner.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -575,6 +624,7 @@ trait AddNestedRoute {
|
||||
base: Option<Oco<'static, str>>,
|
||||
loaders: &mut Vec<Pin<Box<dyn Future<Output = ArcTrigger>>>>,
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
outer_owner: &Owner,
|
||||
);
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
@@ -588,6 +638,7 @@ trait AddNestedRoute {
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
set_is_routing: bool,
|
||||
level: u8,
|
||||
outer_owner: &Owner,
|
||||
) -> u8;
|
||||
}
|
||||
|
||||
@@ -601,6 +652,7 @@ where
|
||||
base: Option<Oco<'static, str>>,
|
||||
loaders: &mut Vec<Pin<Box<dyn Future<Output = ArcTrigger>>>>,
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
outer_owner: &Owner,
|
||||
) {
|
||||
let orig_url = url;
|
||||
|
||||
@@ -668,6 +720,7 @@ where
|
||||
base: base.clone(),
|
||||
child: ChildRoute(Arc::new(Mutex::new(None))),
|
||||
owner: Arc::new(Mutex::new(None)),
|
||||
preload_owner: outer_owner.child(),
|
||||
};
|
||||
if !outlets.is_empty() {
|
||||
let prev_index = outlets.len().saturating_sub(1);
|
||||
@@ -692,7 +745,15 @@ where
|
||||
provide_context(params.clone());
|
||||
provide_context(url.clone());
|
||||
provide_context(matched.clone());
|
||||
view.preload().await;
|
||||
outlet
|
||||
.preload_owner
|
||||
.with(|| {
|
||||
provide_context(params.clone());
|
||||
provide_context(url.clone());
|
||||
provide_context(matched.clone());
|
||||
ScopedFuture::new(view.preload())
|
||||
})
|
||||
.await;
|
||||
let child = outlet.child.clone();
|
||||
*view_fn.lock().or_poisoned() =
|
||||
Box::new(move |owner_where_used| {
|
||||
@@ -739,7 +800,13 @@ where
|
||||
// this is important because to build the view, we need access to the outlet
|
||||
// and the outlet will be returned from building this child
|
||||
if let Some(child) = child {
|
||||
child.build_nested_route(orig_url, base, loaders, outlets);
|
||||
child.build_nested_route(
|
||||
orig_url,
|
||||
base,
|
||||
loaders,
|
||||
outlets,
|
||||
outer_owner,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -754,6 +821,7 @@ where
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
set_is_routing: bool,
|
||||
level: u8,
|
||||
outer_owner: &Owner,
|
||||
) -> u8 {
|
||||
let (parent_params, parent_matches): (Vec<_>, Vec<_>) = outlets
|
||||
.iter()
|
||||
@@ -770,7 +838,13 @@ where
|
||||
match current {
|
||||
// if there's nothing currently in the routes at this point, build from here
|
||||
None => {
|
||||
self.build_nested_route(url, base, preloaders, outlets);
|
||||
self.build_nested_route(
|
||||
url,
|
||||
base,
|
||||
preloaders,
|
||||
outlets,
|
||||
outer_owner,
|
||||
);
|
||||
level
|
||||
}
|
||||
Some(current) => {
|
||||
@@ -810,6 +884,10 @@ where
|
||||
&mut current.matched,
|
||||
ArcRwSignal::new(new_match),
|
||||
);
|
||||
let old_preload_owner = mem::replace(
|
||||
&mut current.preload_owner,
|
||||
outer_owner.child(),
|
||||
);
|
||||
let matched_including_parents = {
|
||||
ArcMemo::new({
|
||||
let matched = current.matched.clone();
|
||||
@@ -852,11 +930,26 @@ where
|
||||
let child = outlet.child.clone();
|
||||
async move {
|
||||
let child = child.clone();
|
||||
if set_is_routing {
|
||||
AsyncTransition::run(|| view.preload()).await;
|
||||
} else {
|
||||
view.preload().await;
|
||||
}
|
||||
outlet
|
||||
.preload_owner
|
||||
.with(|| {
|
||||
provide_context(
|
||||
params_including_parents.clone(),
|
||||
);
|
||||
provide_context(url.clone());
|
||||
provide_context(matched.clone());
|
||||
ScopedFuture::new(async {
|
||||
if set_is_routing {
|
||||
AsyncTransition::run(|| {
|
||||
view.preload()
|
||||
})
|
||||
.await;
|
||||
} else {
|
||||
view.preload().await;
|
||||
}
|
||||
})
|
||||
})
|
||||
.await;
|
||||
*view_fn.lock().or_poisoned() =
|
||||
Box::new(move |owner_where_used| {
|
||||
let prev_owner = route_owner
|
||||
@@ -905,6 +998,7 @@ where
|
||||
drop(old_params);
|
||||
drop(old_url);
|
||||
drop(old_matched);
|
||||
drop(old_preload_owner);
|
||||
trigger
|
||||
}
|
||||
})));
|
||||
@@ -915,8 +1009,13 @@ where
|
||||
|
||||
// if this children has matches, then rebuild the lower section of the tree
|
||||
if let Some(child) = child {
|
||||
child
|
||||
.build_nested_route(url, base, preloaders, outlets);
|
||||
child.build_nested_route(
|
||||
url,
|
||||
base,
|
||||
preloaders,
|
||||
outlets,
|
||||
outer_owner,
|
||||
);
|
||||
} else {
|
||||
*outlets[*items].child.0.lock().or_poisoned() = None;
|
||||
}
|
||||
@@ -940,6 +1039,7 @@ where
|
||||
outlets,
|
||||
set_is_routing,
|
||||
level + 1,
|
||||
outer_owner,
|
||||
)
|
||||
} else {
|
||||
*current.child.0.lock().or_poisoned() = None;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router_macro"
|
||||
version = { workspace = true }
|
||||
version = "0.8.6"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user