mirror of
https://github.com/leptos-rs/leptos.git
synced 2025-12-28 14:52:35 -05:00
Compare commits
105 Commits
wasm-split
...
4378
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
99c6134bdf | ||
|
|
c025ae59ac | ||
|
|
df46feee5d | ||
|
|
bbf5bf9170 | ||
|
|
a453b7d1bd | ||
|
|
3b9ccdf57e | ||
|
|
27cd423ebc | ||
|
|
b3907baf49 | ||
|
|
9a8bb7eb75 | ||
|
|
95db8c939e | ||
|
|
2bfa9952af | ||
|
|
4e445f43d6 | ||
|
|
5f544f67ae | ||
|
|
68477d2b76 | ||
|
|
5bd9469b93 | ||
|
|
4bca70dc2f | ||
|
|
d0295009cf | ||
|
|
3e8b5c9805 | ||
|
|
924efa8ac1 | ||
|
|
b92a14228c | ||
|
|
68967fdad3 | ||
|
|
44bc4fbc31 | ||
|
|
646cfc12ed | ||
|
|
62977a68b0 | ||
|
|
e9ee90c78f | ||
|
|
73e728f145 | ||
|
|
6f047a2271 | ||
|
|
7c942b8b47 | ||
|
|
d4bf6d9cb6 | ||
|
|
9deb96ea01 | ||
|
|
d1899cde1c | ||
|
|
ee731d7a3a | ||
|
|
59cbcfa0fb | ||
|
|
0939cf63ad | ||
|
|
d37512bebd | ||
|
|
7dd44919cf | ||
|
|
3eaabf85ea | ||
|
|
d60c632c90 | ||
|
|
f5ad4f4b88 | ||
|
|
f3a053f99b | ||
|
|
06573cbca1 | ||
|
|
9f4d826533 | ||
|
|
a305ae7227 | ||
|
|
65557c5723 | ||
|
|
a529f87ee2 | ||
|
|
c0ca97e42f | ||
|
|
9a4e93ab07 | ||
|
|
bee2b5ea1c | ||
|
|
3b058e77f1 | ||
|
|
7adb11ec49 | ||
|
|
1af5f66ee6 | ||
|
|
956f1836ec | ||
|
|
b54f80f529 | ||
|
|
a48a2994ee | ||
|
|
aedcd5148c | ||
|
|
9160d8aaa6 | ||
|
|
274fe07dae | ||
|
|
7add26fc41 | ||
|
|
d9213850f7 | ||
|
|
db9f323f8d | ||
|
|
1d0f668dc3 | ||
|
|
a97eceacf1 | ||
|
|
3d6ea6d285 | ||
|
|
99c3d8f9e9 | ||
|
|
a394eb211f | ||
|
|
ceb7dd8ae5 | ||
|
|
f50adc00bc | ||
|
|
1340deee96 | ||
|
|
8da3011a7f | ||
|
|
959677f018 | ||
|
|
03529b3992 | ||
|
|
8bfd0ce143 | ||
|
|
47199bbbf3 | ||
|
|
9ed7e9de61 | ||
|
|
26ecbf4df5 | ||
|
|
b3885c7be4 | ||
|
|
436e5aa141 | ||
|
|
05cafa8b06 | ||
|
|
9e3c0cc402 | ||
|
|
30141293f6 | ||
|
|
8f623a2d5b | ||
|
|
f2fe791f6b | ||
|
|
30dbb7ccc8 | ||
|
|
b986fe11dc | ||
|
|
e2e28ef180 | ||
|
|
a5e0053bab | ||
|
|
6c04a1cd76 | ||
|
|
87fb947465 | ||
|
|
5ba818132a | ||
|
|
30b917cfc3 | ||
|
|
6cd731cbb1 | ||
|
|
f1b6b79e27 | ||
|
|
623ee08f82 | ||
|
|
877849a5dd | ||
|
|
fb59da90c2 | ||
|
|
d33f5c9e77 | ||
|
|
deb8e96eb0 | ||
|
|
181e4d0566 | ||
|
|
525379a9b3 | ||
|
|
783a233167 | ||
|
|
8079956d1b | ||
|
|
f5d3fbb091 | ||
|
|
fbe7cdc482 | ||
|
|
14884bc8ac | ||
|
|
5f2d511553 |
11
.github/workflows/autofix.yml
vendored
11
.github/workflows/autofix.yml
vendored
@@ -17,17 +17,10 @@ env:
|
||||
jobs:
|
||||
autofix:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
{
|
||||
toolchain: "nightly-2025-07-16",
|
||||
components: "rustfmt, clippy",
|
||||
target: "wasm32-unknown-unknown",
|
||||
rustflags: "",
|
||||
}
|
||||
with: {toolchain: "nightly-2025-07-16", components: "rustfmt, clippy", target: "wasm32-unknown-unknown", rustflags: ""}
|
||||
- name: Install Glib
|
||||
run: |
|
||||
sudo apt-get update
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -63,6 +63,6 @@ jobs:
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libglib2.0-dev
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Semver Checks
|
||||
uses: obi1kenobi/cargo-semver-checks-action@v2
|
||||
|
||||
4
.github/workflows/get-example-changed.yml
vendored
4
.github/workflows/get-example-changed.yml
vendored
@@ -19,12 +19,12 @@ jobs:
|
||||
matrix: ${{ steps.set-example-changed.outputs.matrix }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Get example files that changed
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v46
|
||||
uses: tj-actions/changed-files@v47
|
||||
with:
|
||||
files: |
|
||||
examples/**
|
||||
|
||||
2
.github/workflows/get-examples-matrix.yml
vendored
2
.github/workflows/get-examples-matrix.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
EXCLUDED_EXAMPLES: cargo-make
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Install jq
|
||||
run: sudo apt-get install jq
|
||||
- name: Set Matrix
|
||||
|
||||
4
.github/workflows/get-leptos-changed.yml
vendored
4
.github/workflows/get-leptos-changed.yml
vendored
@@ -13,12 +13,12 @@ jobs:
|
||||
leptos_changed: ${{ steps.set-source-changed.outputs.leptos_changed }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Get source files that changed
|
||||
id: changed-source
|
||||
uses: tj-actions/changed-files@v46
|
||||
uses: tj-actions/changed-files@v47
|
||||
with:
|
||||
files_ignore: |
|
||||
.*/**/*
|
||||
|
||||
2
.github/workflows/get-leptos-matrix.yml
vendored
2
.github/workflows/get-leptos-matrix.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v5
|
||||
- name: Install jq
|
||||
run: sudo apt-get install jq
|
||||
- name: Set Matrix
|
||||
|
||||
2
.github/workflows/publish-book.yml
vendored
2
.github/workflows/publish-book.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
contents: write # To push a branch
|
||||
pull-requests: write # To create a PR from that branch
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Install mdbook
|
||||
|
||||
4
.github/workflows/run-cargo-make-task.yml
vendored
4
.github/workflows/run-cargo-make-task.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libglib2.0-dev
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v5
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
@@ -88,7 +88,7 @@ jobs:
|
||||
run: trunk --version
|
||||
- name: Install Node.js
|
||||
if: contains(inputs.directory, 'examples')
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@v5
|
||||
with:
|
||||
node-version: 20
|
||||
- uses: pnpm/action-setup@v4
|
||||
|
||||
988
Cargo.lock
generated
988
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
100
Cargo.toml
100
Cargo.toml
@@ -40,7 +40,6 @@ members = [
|
||||
exclude = ["benchmarks", "examples", "projects"]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.8.4"
|
||||
edition = "2021"
|
||||
rust-version = "1.88"
|
||||
|
||||
@@ -51,39 +50,39 @@ any_spawner = { path = "./any_spawner/", version = "0.3.0" }
|
||||
const_str_slice_concat = { path = "./const_str_slice_concat", version = "0.1" }
|
||||
either_of = { path = "./either_of/", version = "0.1.6" }
|
||||
hydration_context = { path = "./hydration_context", version = "0.3.0" }
|
||||
leptos = { path = "./leptos", version = "0.8.4" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.8.4" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.4" }
|
||||
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.8.4" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.4" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.4" }
|
||||
leptos_router = { path = "./router", version = "0.8.4" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.8.4" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.8.4" }
|
||||
leptos_meta = { path = "./meta", version = "0.8.4" }
|
||||
leptos = { path = "./leptos", version = "0.8.10" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.8.7" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.8.7" }
|
||||
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.8.5" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.6" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.8.9" }
|
||||
leptos_router = { path = "./router", version = "0.8.8" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.8.5" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.8.5" }
|
||||
leptos_meta = { path = "./meta", version = "0.8.5" }
|
||||
next_tuple = { path = "./next_tuple", version = "0.1.0" }
|
||||
oco_ref = { path = "./oco", version = "0.2.0" }
|
||||
oco_ref = { path = "./oco", version = "0.2.1" }
|
||||
or_poisoned = { path = "./or_poisoned", version = "0.1.0" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.4" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.2.4" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.4" }
|
||||
server_fn = { path = "./server_fn", version = "0.8.4" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.8.4" }
|
||||
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.8.4" }
|
||||
tachys = { path = "./tachys", version = "0.2.5" }
|
||||
wasm_split_helpers = { path = "./wasm_split", version = "0.1.0" }
|
||||
wasm_split_macros = { path = "./wasm_split_macros", version = "0.1.0" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.2.8" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.2.5" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.6" }
|
||||
server_fn = { path = "./server_fn", version = "0.8.7" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.8.7" }
|
||||
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.8.5" }
|
||||
tachys = { path = "./tachys", version = "0.2.9" }
|
||||
wasm_split_helpers = { path = "./wasm_split", version = "0.1.2" }
|
||||
wasm_split_macros = { path = "./wasm_split_macros", version = "0.1.3" }
|
||||
|
||||
# members deps
|
||||
async-once-cell = { default-features = false, version = "0.5.3" }
|
||||
itertools = { default-features = false, version = "0.14.0" }
|
||||
convert_case = { default-features = false, version = "0.8.0" }
|
||||
serde_json = { default-features = false, version = "1.0.140" }
|
||||
trybuild = { default-features = false, version = "1.0.106" }
|
||||
typed-builder = { default-features = false, version = "0.21.0" }
|
||||
thiserror = { default-features = false, version = "2.0.12" }
|
||||
serde_json = { default-features = false, version = "1.0.143" }
|
||||
trybuild = { default-features = false, version = "1.0.110" }
|
||||
typed-builder = { default-features = false, version = "0.21.2" }
|
||||
thiserror = { default-features = false, version = "2.0.16" }
|
||||
wasm-bindgen = { default-features = false, version = "0.2.100" }
|
||||
indexmap = { default-features = false, version = "2.9.0" }
|
||||
indexmap = { default-features = false, version = "2.11.0" }
|
||||
rstml = { default-features = false, version = "0.12.1" }
|
||||
rustc_version = { default-features = false, version = "0.4.1" }
|
||||
guardian = { default-features = false, version = "1.3.0" }
|
||||
@@ -101,17 +100,17 @@ proc-macro-error2 = { default-features = false, version = "2.0.1" }
|
||||
const_format = { default-features = false, version = "0.2.34" }
|
||||
gloo-net = { default-features = false, version = "0.6.0" }
|
||||
url = { default-features = false, version = "2.5.4" }
|
||||
tokio = { default-features = false, version = "1.46.1" }
|
||||
tokio = { default-features = false, version = "1.47.1" }
|
||||
base64 = { default-features = false, version = "0.22.1" }
|
||||
cfg-if = { default-features = false, version = "1.0.0" }
|
||||
cfg-if = { default-features = false, version = "1.0.3" }
|
||||
wasm-bindgen-futures = { default-features = false, version = "0.4.50" }
|
||||
tower = { default-features = false, version = "0.5.2" }
|
||||
proc-macro2 = { default-features = false, version = "1.0.95" }
|
||||
proc-macro2 = { default-features = false, version = "1.0.101" }
|
||||
serde = { default-features = false, version = "1.0.219" }
|
||||
parking_lot = { default-features = false, version = "0.12.4" }
|
||||
axum = { default-features = false, version = "0.8.4" }
|
||||
serde_qs = { default-features = false, version = "0.15.0" }
|
||||
syn = { default-features = false, version = "2.0.104" }
|
||||
syn = { default-features = false, version = "2.0.106" }
|
||||
xxhash-rust = { default-features = false, version = "0.8.15" }
|
||||
paste = { default-features = false, version = "1.0.15" }
|
||||
quote = { default-features = false, version = "1.0.40" }
|
||||
@@ -123,54 +122,57 @@ tokio-tungstenite = { default-features = false, version = "0.27.0" }
|
||||
serial_test = { default-features = false, version = "3.2.0" }
|
||||
erased = { default-features = false, version = "0.1.2" }
|
||||
glib = { default-features = false, version = "0.20.12" }
|
||||
async-trait = { default-features = false, version = "0.1.88" }
|
||||
async-trait = { default-features = false, version = "0.1.89" }
|
||||
typed-builder-macro = { default-features = false, version = "0.21.0" }
|
||||
linear-map = { default-features = false, version = "1.2.0" }
|
||||
anyhow = { default-features = false, version = "1.0.98" }
|
||||
anyhow = { default-features = false, version = "1.0.100" }
|
||||
walkdir = { default-features = false, version = "2.5.0" }
|
||||
actix-ws = { default-features = false, version = "0.3.0" }
|
||||
tower-http = { default-features = false, version = "0.6.4" }
|
||||
prettyplease = { default-features = false, version = "0.2.35" }
|
||||
inventory = { default-features = false, version = "0.3.20" }
|
||||
config = { default-features = false, version = "0.15.13" }
|
||||
camino = { default-features = false, version = "1.1.9" }
|
||||
prettyplease = { default-features = false, version = "0.2.37" }
|
||||
inventory = { default-features = false, version = "0.3.21" }
|
||||
config = { default-features = false, version = "0.15.14" }
|
||||
camino = { default-features = false, version = "1.1.11" }
|
||||
ciborium = { default-features = false, version = "0.2.2" }
|
||||
multer = { default-features = false, version = "3.1.0" }
|
||||
leptos-spin-macro = { default-features = false, version = "0.2.0" }
|
||||
sledgehammer_utils = { default-features = false, version = "0.3.1" }
|
||||
sledgehammer_bindgen = { default-features = false, version = "0.6.0" }
|
||||
wasm-streams = { default-features = false, version = "0.4.2" }
|
||||
rkyv = { default-features = false, version = "0.8.10" }
|
||||
rkyv = { default-features = false, version = "0.8.11" }
|
||||
temp-env = { default-features = false, version = "0.3.6" }
|
||||
uuid = { default-features = false, version = "1.17.0" }
|
||||
uuid = { default-features = false, version = "1.18.0" }
|
||||
bytes = { default-features = false, version = "1.10.1" }
|
||||
http = { default-features = false, version = "1.3.1" }
|
||||
regex = { default-features = false, version = "1.11.1" }
|
||||
regex = { default-features = false, version = "1.11.2" }
|
||||
drain_filter_polyfill = { default-features = false, version = "0.1.3" }
|
||||
tempfile = { default-features = false, version = "3.20.0" }
|
||||
futures-lite = { default-features = false, version = "2.6.0" }
|
||||
tempfile = { default-features = false, version = "3.21.0" }
|
||||
futures-lite = { default-features = false, version = "2.6.1" }
|
||||
log = { default-features = false, version = "0.4.27" }
|
||||
percent-encoding = { default-features = false, version = "2.3.1" }
|
||||
percent-encoding = { default-features = false, version = "2.3.2" }
|
||||
async-executor = { default-features = false, version = "1.13.2" }
|
||||
const-str = { default-features = false, version = "0.6.3" }
|
||||
const-str = { default-features = false, version = "0.6.4" }
|
||||
http-body-util = { default-features = false, version = "0.1.3" }
|
||||
hyper = { default-features = false, version = "1.6.0" }
|
||||
postcard = { default-features = false, version = "1.1.1" }
|
||||
hyper = { default-features = false, version = "1.7.0" }
|
||||
postcard = { default-features = false, version = "1.1.3" }
|
||||
rmp-serde = { default-features = false, version = "1.3.0" }
|
||||
reqwest = { default-features = false, version = "0.12.22" }
|
||||
reqwest = { default-features = false, version = "0.12.23" }
|
||||
tower-layer = { default-features = false, version = "0.3.3" }
|
||||
attribute-derive = { default-features = false, version = "0.10.3" }
|
||||
insta = { default-features = false, version = "1.43.1" }
|
||||
codee = { default-features = false, version = "0.3.0" }
|
||||
actix-http = { default-features = false, version = "3.11.0" }
|
||||
actix-http = { default-features = false, version = "3.11.1" }
|
||||
wasm-bindgen-test = { default-features = false, version = "0.3.50" }
|
||||
rustversion = { default-features = false, version = "1.0.21" }
|
||||
rustversion = { default-features = false, version = "1.0.22" }
|
||||
getrandom = { default-features = false, version = "0.3.3" }
|
||||
actix-files = { default-features = false, version = "0.6.6" }
|
||||
async-lock = { default-features = false, version = "3.4.0" }
|
||||
async-lock = { default-features = false, version = "3.4.1" }
|
||||
base16 = { default-features = false, version = "0.2.1" }
|
||||
digest = { default-features = false, version = "0.10.7" }
|
||||
sha2 = { default-features = false, version = "0.10.8" }
|
||||
subsecond = { default-features = false, version = "0.7.0-rc.0" }
|
||||
dioxus-cli-config = { default-features = false, version = "0.7.0-rc.0" }
|
||||
dioxus-devtools = { default-features = false, version = "0.7.0-rc.0" }
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
||||
@@ -95,7 +95,7 @@ Here are some resources for learning more about Leptos:
|
||||
[`cargo-leptos`](https://github.com/leptos-rs/cargo-leptos) is a build tool that's designed to make it easy to build apps that run on both the client and the server, with seamless integration. The best way to get started with a real Leptos project right now is to use `cargo-leptos` and our starter templates for [Actix](https://github.com/leptos-rs/start) or [Axum](https://github.com/leptos-rs/start-axum).
|
||||
|
||||
```bash
|
||||
cargo install cargo-leptos
|
||||
cargo install cargo-leptos --locked
|
||||
cargo leptos new --git https://github.com/leptos-rs/start-axum
|
||||
cd [your project name]
|
||||
cargo leptos watch
|
||||
|
||||
@@ -305,7 +305,10 @@ impl LazyRoute for ViewD {
|
||||
}
|
||||
}
|
||||
|
||||
// Server functions can be made lazy by combining the two macros,
|
||||
// with `#[server]` coming first, then `#[lazy]`
|
||||
#[server]
|
||||
#[lazy]
|
||||
async fn d_data() -> Result<Vec<i32>, ServerFnError> {
|
||||
tokio::time::sleep(std::time::Duration::from_millis(250)).await;
|
||||
Ok(vec![1, 1, 2, 3, 5, 8, 13])
|
||||
|
||||
7
examples/regression/e2e/features/issue_4005.feature
Normal file
7
examples/regression/e2e/features/issue_4005.feature
Normal file
@@ -0,0 +1,7 @@
|
||||
@check_issue_4005
|
||||
Feature: Check that issue 4005 does not reappear
|
||||
|
||||
Scenario: The second item is selected.
|
||||
Given I see the app
|
||||
And I can access regression test 4005
|
||||
Then I see the value of select is 2
|
||||
9
examples/regression/e2e/features/issue_4217.feature
Normal file
9
examples/regression/e2e/features/issue_4217.feature
Normal file
@@ -0,0 +1,9 @@
|
||||
@check_issue_4217
|
||||
Feature: Check that issue 4217 does not reappear
|
||||
|
||||
Scenario: All items are selected.
|
||||
Given I see the app
|
||||
And I can access regression test 4217
|
||||
Then I see option1 is selected
|
||||
And I see option2 is selected
|
||||
And I see option3 is selected
|
||||
13
examples/regression/e2e/features/issue_4251.feature
Normal file
13
examples/regression/e2e/features/issue_4251.feature
Normal file
@@ -0,0 +1,13 @@
|
||||
@check_issue_4251
|
||||
Feature: Check that issue 4251 does not reappear
|
||||
|
||||
Scenario: Clicking a link to the same page you’re currently on should not add the page to the history stack.
|
||||
Given I see the app
|
||||
And I can access regression test 4324
|
||||
When I select the link This page
|
||||
And I select the link This page
|
||||
And I select the link This page
|
||||
Then I see the result is the string Issue4324
|
||||
When I press the back button
|
||||
And I select the link 4324
|
||||
Then I see the result is the string Issue4324
|
||||
9
examples/regression/e2e/features/issue_4285.feature
Normal file
9
examples/regression/e2e/features/issue_4285.feature
Normal file
@@ -0,0 +1,9 @@
|
||||
@check_issue_4285
|
||||
Feature: Check that issue 4285 does not reappear
|
||||
|
||||
Scenario: Navigating several times to same lazy route does not cause issues.
|
||||
Given I see the app
|
||||
And I can access regression test 4285
|
||||
And I can access regression test 4285
|
||||
And I can access regression test 4285
|
||||
Then I see the result is the string 42
|
||||
18
examples/regression/e2e/features/issue_4296.feature
Normal file
18
examples/regression/e2e/features/issue_4296.feature
Normal file
@@ -0,0 +1,18 @@
|
||||
@check_issue_4296
|
||||
Feature: Check that issue 4296 does not reappear
|
||||
|
||||
Scenario: Query param signals created in LazyRoute::data() are reactive in ::view().
|
||||
Given I see the app
|
||||
And I can access regression test 4296
|
||||
Then I see the result is the string None
|
||||
When I select the link abc
|
||||
Then I see the result is the string Some("abc")
|
||||
When I select the link def
|
||||
Then I see the result is the string Some("def")
|
||||
|
||||
Scenario: Loading page with query signal works as well.
|
||||
Given I see the app
|
||||
And I can access regression test 4296
|
||||
When I select the link abc
|
||||
When I reload the page
|
||||
Then I see the result is the string Some("abc")
|
||||
11
examples/regression/e2e/features/issue_4324.feature
Normal file
11
examples/regression/e2e/features/issue_4324.feature
Normal file
@@ -0,0 +1,11 @@
|
||||
@check_issue_4324
|
||||
Feature: Check that issue 4324 does not reappear
|
||||
|
||||
Scenario: Navigating to the same page after clicking "Back" should set the URL correctly
|
||||
Given I see the app
|
||||
And I can access regression test 4324
|
||||
Then I see the path is /4324/
|
||||
When I press the back button
|
||||
Then I see the path is /
|
||||
When I select the link 4324
|
||||
Then I see the path is /4324/
|
||||
35
examples/regression/e2e/tests/fixtures/check.rs
vendored
35
examples/regression/e2e/tests/fixtures/check.rs
vendored
@@ -18,3 +18,38 @@ pub async fn element_exists(client: &Client, id: &str) -> Result<()> {
|
||||
.expect(&format!("could not find element with id `{id}`"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn select_option_is_selected(
|
||||
client: &Client,
|
||||
id: &str,
|
||||
) -> Result<()> {
|
||||
let el = find::element_by_id(client, id)
|
||||
.await
|
||||
.expect(&format!("could not find element with id `{id}`"));
|
||||
let selected = el.prop("selected").await?;
|
||||
assert_eq!(selected.as_deref(), Some("true"));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn element_value_is(
|
||||
client: &Client,
|
||||
id: &str,
|
||||
expected: &str,
|
||||
) -> Result<()> {
|
||||
let el = find::element_by_id(client, id)
|
||||
.await
|
||||
.expect(&format!("could not find element with id `{id}`"));
|
||||
let value = el.prop("value").await?;
|
||||
assert_eq!(value.as_deref(), Some(expected));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn path_is(client: &Client, expected_path: &str) -> Result<()> {
|
||||
let url = client
|
||||
.current_url()
|
||||
.await
|
||||
.expect("could not access current URL");
|
||||
let path = url.path();
|
||||
assert_eq!(expected_path, path);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -45,3 +45,12 @@ async fn i_refresh_the_browser(world: &mut AppWorld) -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[given(regex = "^I press the back button$")]
|
||||
#[when(regex = "^I press the back button$")]
|
||||
async fn i_go_back(world: &mut AppWorld) -> Result<()> {
|
||||
let client = &world.client;
|
||||
client.back().await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -25,3 +25,28 @@ async fn i_see_the_navbar(world: &mut AppWorld) -> Result<()> {
|
||||
check::element_exists(client, "nav").await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[then(regex = r"^I see ([\d\w]+) is selected$")]
|
||||
async fn i_see_the_select(world: &mut AppWorld, id: String) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::select_option_is_selected(client, &id).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[then(regex = r"^I see the value of (\w+) is (.*)$")]
|
||||
async fn i_see_the_value(
|
||||
world: &mut AppWorld,
|
||||
id: String,
|
||||
value: String,
|
||||
) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::element_value_is(client, &id, &value).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[then(regex = r"^I see the path is (.*)$")]
|
||||
async fn i_see_the_path(world: &mut AppWorld, path: String) -> Result<()> {
|
||||
let client = &world.client;
|
||||
check::path_is(client, &path).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
use crate::{issue_4088::Routes4088, pr_4015::Routes4015, pr_4091::Routes4091};
|
||||
use crate::{
|
||||
issue_4005::Routes4005, issue_4088::Routes4088, issue_4217::Routes4217,
|
||||
issue_4285::Routes4285, issue_4296::Routes4296, issue_4324::Routes4324,
|
||||
pr_4015::Routes4015, pr_4091::Routes4091,
|
||||
};
|
||||
use leptos::prelude::*;
|
||||
use leptos_meta::{MetaTags, *};
|
||||
use leptos_router::{
|
||||
@@ -28,15 +32,22 @@ pub fn shell(options: LeptosOptions) -> impl IntoView {
|
||||
pub fn App() -> impl IntoView {
|
||||
provide_meta_context();
|
||||
let fallback = || view! { "Page not found." }.into_view();
|
||||
let (_, set_is_routing) = signal(false);
|
||||
|
||||
view! {
|
||||
<Stylesheet id="leptos" href="/pkg/regression.css"/>
|
||||
<Router>
|
||||
<Router set_is_routing>
|
||||
<main>
|
||||
<Routes fallback>
|
||||
<Route path=path!("") view=HomePage/>
|
||||
<Routes4091/>
|
||||
<Routes4015/>
|
||||
<Routes4088/>
|
||||
<Routes4217/>
|
||||
<Routes4005/>
|
||||
<Routes4285/>
|
||||
<Routes4296/>
|
||||
<Routes4324/>
|
||||
</Routes>
|
||||
</main>
|
||||
</Router>
|
||||
@@ -59,6 +70,11 @@ fn HomePage() -> impl IntoView {
|
||||
<li><a href="/4091/">"4091"</a></li>
|
||||
<li><a href="/4015/">"4015"</a></li>
|
||||
<li><a href="/4088/">"4088"</a></li>
|
||||
<li><a href="/4217/">"4217"</a></li>
|
||||
<li><a href="/4005/">"4005"</a></li>
|
||||
<li><a href="/4285/">"4285"</a></li>
|
||||
<li><a href="/4296/">"4296"</a></li>
|
||||
<li><a href="/4324/">"4324"</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
}
|
||||
|
||||
24
examples/regression/src/issue_4005.rs
Normal file
24
examples/regression/src/issue_4005.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4005() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4005") view=Issue4005/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn Issue4005() -> impl IntoView {
|
||||
view! {
|
||||
<select id="select" prop:value="2">
|
||||
<option value="1">"Option 1"</option>
|
||||
<option value="2">"Option 2"</option>
|
||||
<option value="3">"Option 3"</option>
|
||||
</select>
|
||||
}
|
||||
}
|
||||
24
examples/regression/src/issue_4217.rs
Normal file
24
examples/regression/src/issue_4217.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4217() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4217") view=Issue4217/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn Issue4217() -> impl IntoView {
|
||||
view! {
|
||||
<select multiple=true>
|
||||
<option id="option1" value="1" selected>"Option 1"</option>
|
||||
<option id="option2" value="2" selected>"Option 2"</option>
|
||||
<option id="option3" value="3" selected>"Option 3"</option>
|
||||
</select>
|
||||
}
|
||||
}
|
||||
49
examples/regression/src/issue_4285.rs
Normal file
49
examples/regression/src/issue_4285.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
use leptos::prelude::*;
|
||||
use leptos_router::LazyRoute;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, Lazy, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4285() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4285") view={Lazy::<Issue4285>::new()}/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
struct Issue4285 {
|
||||
data: Resource<Result<i32, ServerFnError>>,
|
||||
}
|
||||
|
||||
impl LazyRoute for Issue4285 {
|
||||
fn data() -> Self {
|
||||
Self {
|
||||
data: Resource::new(|| (), |_| slow_call()),
|
||||
}
|
||||
}
|
||||
|
||||
async fn view(this: Self) -> AnyView {
|
||||
let Issue4285 { data } = this;
|
||||
view! {
|
||||
<Suspense>
|
||||
{move || {
|
||||
Suspend::new(async move {
|
||||
let data = data.await;
|
||||
view! {
|
||||
<p id="result">{data}</p>
|
||||
}
|
||||
})
|
||||
}}
|
||||
</Suspense>
|
||||
}
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
|
||||
#[server]
|
||||
async fn slow_call() -> Result<i32, ServerFnError> {
|
||||
tokio::time::sleep(std::time::Duration::from_millis(250)).await;
|
||||
Ok(42)
|
||||
}
|
||||
36
examples/regression/src/issue_4296.rs
Normal file
36
examples/regression/src/issue_4296.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, Lazy, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
use leptos_router::{hooks::use_query_map, LazyRoute};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4296() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4296") view={Lazy::<Issue4296>::new()}/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
struct Issue4296 {
|
||||
query: Signal<Option<String>>,
|
||||
}
|
||||
|
||||
impl LazyRoute for Issue4296 {
|
||||
fn data() -> Self {
|
||||
let query = use_query_map();
|
||||
let query = Signal::derive(move || query.read().get("q"));
|
||||
Self { query }
|
||||
}
|
||||
|
||||
async fn view(this: Self) -> AnyView {
|
||||
let Issue4296 { query } = this;
|
||||
view! {
|
||||
<a href="?q=abc">"abc"</a>
|
||||
<a href="?q=def">"def"</a>
|
||||
<p id="result">{move || format!("{:?}", query.get())}</p>
|
||||
}
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
21
examples/regression/src/issue_4324.rs
Normal file
21
examples/regression/src/issue_4324.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
use leptos::prelude::*;
|
||||
#[allow(unused_imports)]
|
||||
use leptos_router::{
|
||||
components::Route, path, Lazy, MatchNestedRoutes, NavigateOptions,
|
||||
};
|
||||
|
||||
#[component]
|
||||
pub fn Routes4324() -> impl MatchNestedRoutes + Clone {
|
||||
view! {
|
||||
<Route path=path!("4324") view=Issue4324/>
|
||||
}
|
||||
.into_inner()
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn Issue4324() -> impl IntoView {
|
||||
view! {
|
||||
<a href="/4324/">"This page"</a>
|
||||
<p id="result">"Issue4324"</p>
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,10 @@
|
||||
pub mod app;
|
||||
mod issue_4005;
|
||||
mod issue_4088;
|
||||
mod issue_4217;
|
||||
mod issue_4285;
|
||||
mod issue_4296;
|
||||
mod issue_4324;
|
||||
mod pr_4015;
|
||||
mod pr_4091;
|
||||
|
||||
|
||||
7
examples/subsecond_hot_patch/.gitignore
vendored
Normal file
7
examples/subsecond_hot_patch/.gitignore
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
/target
|
||||
.DS_Store
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
13
examples/subsecond_hot_patch/Cargo.toml
Normal file
13
examples/subsecond_hot_patch/Cargo.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "subsecond_hot_patch"
|
||||
version = "0.1.0"
|
||||
authors = ["Greg Johnston <greg.johnston@gmail.com>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
leptos = { path = "../../leptos", features = ["csr", "subsecond"] }
|
||||
leptos_router = { path = "../../router" }
|
||||
|
||||
[features]
|
||||
default = ["web"]
|
||||
web = []
|
||||
21
examples/subsecond_hot_patch/Dioxus.toml
Normal file
21
examples/subsecond_hot_patch/Dioxus.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[application]
|
||||
|
||||
[web.app]
|
||||
|
||||
# HTML title tag content
|
||||
title = "ltest"
|
||||
|
||||
# include `assets` in web platform
|
||||
[web.resource]
|
||||
|
||||
# Additional CSS style files
|
||||
style = []
|
||||
|
||||
# Additional JavaScript files
|
||||
script = []
|
||||
|
||||
[web.resource.dev]
|
||||
|
||||
# Javascript code file
|
||||
# serve: [dev-server] only
|
||||
script = []
|
||||
1
examples/subsecond_hot_patch/Makefile.toml
Normal file
1
examples/subsecond_hot_patch/Makefile.toml
Normal file
@@ -0,0 +1 @@
|
||||
extend = [{ path = "../cargo-make/main.toml" }]
|
||||
31
examples/subsecond_hot_patch/README.md
Normal file
31
examples/subsecond_hot_patch/README.md
Normal file
@@ -0,0 +1,31 @@
|
||||
# Hot Patching with `dx`
|
||||
|
||||
This is an experimental example exploring how to combine Leptos with the binary hot-patching provided by Dioxus's `subsecond` library and `dx` cli.
|
||||
|
||||
### Serving Your App
|
||||
|
||||
This requires installing the Dioxus CLI version 0.7.0. At the time I'm writing this README, that does not yet have a stable release. Once `dioxus-cli` 0.7.0 has been released, you should use the latest stable release. Until then, I'd suggest installing from git:
|
||||
|
||||
```sh
|
||||
cargo install dioxus-cli --git https://github.com/DioxusLabs/dioxus
|
||||
```
|
||||
|
||||
Then you can run the example with `dx serve --hot-patch --platform web`.
|
||||
|
||||
### Hot Patching
|
||||
|
||||
Changes to the your application should be reflected in your app without a full rebuild and reload.
|
||||
|
||||
### Limitatations
|
||||
|
||||
Currently we only support hot-patching for reactive view functions. You probably want to use `AnyView` (via `.into_any()`) on any views that will be hot-patched, so they can be rebuilt correctly despite their types changing when the structure of the view tree changes.
|
||||
|
||||
If you are using `leptos_router` this actually works quite well, as every route’s view is erased to `AnyView` and the router itself is a reactive view function: in other words, changes inside any route should be hot-patched in any case.
|
||||
|
||||
Note that any hot-patch will cause all render effects to run again. This means that some client-side state (like the values of signals) will be wiped out.
|
||||
|
||||
### Build Tooling
|
||||
|
||||
The preference of the Dioxus team is that all hot-patching work that uses their `subsecond` also use `dioxus-cli`. As this demo shows, it's completely possible to use `dioxus-cli` to build and run a Leptos project. We do not plan to build `subsecond` into our own build tooling at this time.
|
||||
|
||||
**This is an experiment/POC. It is being published because members of the community have found it useful and have asked for the support to be merged in its current state. Further development and bugfixes are a relatively low priority at this time.**
|
||||
BIN
examples/subsecond_hot_patch/assets/favicon.ico
Normal file
BIN
examples/subsecond_hot_patch/assets/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 130 KiB |
20
examples/subsecond_hot_patch/assets/header.svg
Normal file
20
examples/subsecond_hot_patch/assets/header.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 23 KiB |
46
examples/subsecond_hot_patch/assets/main.css
Normal file
46
examples/subsecond_hot_patch/assets/main.css
Normal file
@@ -0,0 +1,46 @@
|
||||
/* App-wide styling */
|
||||
body {
|
||||
background-color: #0f1116;
|
||||
color: #ffffff;
|
||||
font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
|
||||
margin: 20px;
|
||||
}
|
||||
|
||||
#hero {
|
||||
margin: 0;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
#links {
|
||||
width: 400px;
|
||||
text-align: left;
|
||||
font-size: x-large;
|
||||
color: white;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
#links a {
|
||||
color: white;
|
||||
text-decoration: none;
|
||||
margin-top: 20px;
|
||||
margin: 10px 0px;
|
||||
border: white 1px solid;
|
||||
border-radius: 5px;
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
#links a:hover {
|
||||
background-color: #1f1f1f;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#header {
|
||||
max-width: 1200px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
44
examples/subsecond_hot_patch/src/main.rs
Normal file
44
examples/subsecond_hot_patch/src/main.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use leptos::{prelude::*, subsecond::connect_to_hot_patch_messages};
|
||||
use leptos_router::{
|
||||
components::{Route, Router, Routes},
|
||||
path,
|
||||
};
|
||||
|
||||
fn main() {
|
||||
// connect to DX CLI and patch the WASM binary whenever we receive a message
|
||||
connect_to_hot_patch_messages();
|
||||
|
||||
// wrapping App here in a closure so we can hot-reload it, because we only do that
|
||||
// for reactive views right now. changing anything will re-run App and update the view
|
||||
mount_to_body(|| App);
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn App() -> impl IntoView {
|
||||
view! {
|
||||
<nav>
|
||||
<a href="/">"Home"</a>
|
||||
<a href="/about">"About"</a>
|
||||
</nav>
|
||||
<Router>
|
||||
<Routes fallback=|| "Not found">
|
||||
<Route path=path!("/") view=HomePage/>
|
||||
<Route path=path!("/about") view=About/>
|
||||
</Routes>
|
||||
</Router>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn HomePage() -> impl IntoView {
|
||||
view! {
|
||||
<h1>"Home Page"</h1>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn About() -> impl IntoView {
|
||||
view! {
|
||||
<h1>"About"</h1>
|
||||
}
|
||||
}
|
||||
3
examples/tailwind_csr/Trunk.toml
Normal file
3
examples/tailwind_csr/Trunk.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
[tools]
|
||||
tailwindcss = "4.1.13"
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Actix integrations for the Leptos web framework."
|
||||
version = { workspace = true }
|
||||
version = "0.8.5"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
@@ -22,10 +22,10 @@ leptos_meta = { workspace = true, features = ["nonce"] }
|
||||
leptos_router = { workspace = true, features = ["ssr"] }
|
||||
server_fn = { workspace = true, features = ["actix-no-default"] }
|
||||
tachys = { workspace = true }
|
||||
serde_json = { workspace = true , default-features = true }
|
||||
serde_json = { workspace = true, default-features = true }
|
||||
parking_lot = { workspace = true, default-features = true }
|
||||
tracing = { optional = true , workspace = true, default-features = true }
|
||||
tokio = { features = ["rt", "fs"] , workspace = true, default-features = true }
|
||||
tracing = { optional = true, workspace = true, default-features = true }
|
||||
tokio = { features = ["rt", "fs"], workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
dashmap = { workspace = true, default-features = true }
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Axum integrations for the Leptos web framework."
|
||||
version = { workspace = true }
|
||||
version = "0.8.6"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -1177,7 +1177,7 @@ where
|
||||
generate_route_list_with_exclusions_and_ssg(app_fn, None).0
|
||||
}
|
||||
|
||||
/// Generates a list of all routes defined in Leptos's Router in your app. We can then use t.clone()his to automatically
|
||||
/// Generates a list of all routes defined in Leptos's Router in your app. We can then use this to automatically
|
||||
/// create routes in Axum's Router without having to use wildcard matching or fallbacks. Takes in your root app Element
|
||||
/// as an argument so it can walk you app tree. This version is tailored to generate Axum compatible paths.
|
||||
#[cfg_attr(
|
||||
@@ -2061,10 +2061,12 @@ where
|
||||
req,
|
||||
|app, chunks, _supports_ooo| {
|
||||
Box::pin(async move {
|
||||
let app = app
|
||||
.to_html_stream_in_order()
|
||||
.collect::<String>()
|
||||
.await;
|
||||
let app = if cfg!(feature = "islands-router") {
|
||||
app.to_html_stream_in_order_branching()
|
||||
} else {
|
||||
app.to_html_stream_in_order()
|
||||
};
|
||||
let app = app.collect::<String>().await;
|
||||
let chunks = chunks();
|
||||
Box::pin(once(async move { app }).chain(chunks))
|
||||
as PinnedStream<String>
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Utilities to help build server integrations for the Leptos web framework."
|
||||
version = { workspace = true }
|
||||
version = "0.8.6"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -68,7 +68,8 @@ pub trait ExtendResponse: Sized {
|
||||
let nonce =
|
||||
use_nonce().map(|n| n.to_string()).unwrap_or_default();
|
||||
if let Some(manifest) = use_context::<WasmSplitManifest>() {
|
||||
let (pkg_path, manifest) = &*manifest.0.read_value();
|
||||
let (pkg_path, manifest, wasm_split_file) =
|
||||
&*manifest.0.read_value();
|
||||
let prefetches = prefetches.0.read_value();
|
||||
|
||||
let all_prefetches = prefetches.iter().flat_map(|key| {
|
||||
@@ -90,7 +91,7 @@ pub trait ExtendResponse: Sized {
|
||||
.to_html();
|
||||
}
|
||||
_ = view! {
|
||||
<Link rel="modulepreload" href=format!("{pkg_path}/__wasm_split.js") crossorigin=nonce/>
|
||||
<Link rel="modulepreload" href=format!("{pkg_path}/{wasm_split_file}") crossorigin=nonce/>
|
||||
}
|
||||
.to_html();
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos"
|
||||
version = { workspace = true }
|
||||
version = "0.8.10"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -58,6 +58,9 @@ slotmap = { workspace = true, default-features = true }
|
||||
futures = { workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
wasm_split_helpers.workspace = true
|
||||
subsecond = { workspace = true, default-features = true, optional = true }
|
||||
dioxus-cli-config = { workspace = true, default-features = true, optional = true }
|
||||
dioxus-devtools = { workspace = true, default-features = true, optional = true }
|
||||
|
||||
[features]
|
||||
hydration = [
|
||||
@@ -102,6 +105,16 @@ trace-component-props = [
|
||||
]
|
||||
delegation = ["tachys/delegation"]
|
||||
islands-router = ["tachys/mark_branches"]
|
||||
subsecond = [
|
||||
"reactive_graph/subsecond",
|
||||
"dep:subsecond",
|
||||
"dep:dioxus-cli-config",
|
||||
"dep:dioxus-devtools",
|
||||
"web-sys/Location",
|
||||
"web-sys/MessageEvent",
|
||||
"web-sys/WebSocket",
|
||||
"web-sys/Window",
|
||||
]
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { features = [
|
||||
|
||||
@@ -262,6 +262,16 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<C> From<View<C>> for ViewFn
|
||||
where
|
||||
C: Clone + Send + Sync + 'static,
|
||||
View<C>: IntoAny,
|
||||
{
|
||||
fn from(value: View<C>) -> Self {
|
||||
Self(Arc::new(move || value.clone().into_any()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ViewFn {
|
||||
/// Execute the wrapped function
|
||||
pub fn run(&self) -> AnyView {
|
||||
@@ -289,6 +299,16 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<C> From<View<C>> for ViewFnOnce
|
||||
where
|
||||
C: Send + Sync + 'static,
|
||||
View<C>: IntoAny,
|
||||
{
|
||||
fn from(value: View<C>) -> Self {
|
||||
Self(Box::new(move || value.into_any()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ViewFnOnce {
|
||||
/// Execute the wrapped function
|
||||
pub fn run(self) -> AnyView {
|
||||
|
||||
@@ -65,16 +65,56 @@ pub fn HydrationScripts(
|
||||
if let Some(splits) = SPLIT_MANIFEST.get_or_init(|| {
|
||||
let root = root.clone().unwrap_or_default();
|
||||
|
||||
let (wasm_split_js, wasm_split_manifest) = if options.hash_files {
|
||||
let hash_path = std::env::current_exe()
|
||||
.map(|path| {
|
||||
path.parent().map(|p| p.to_path_buf()).unwrap_or_default()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
.join(options.hash_file.as_ref());
|
||||
let hashes = std::fs::read_to_string(&hash_path)
|
||||
.expect("failed to read hash file");
|
||||
|
||||
let mut split =
|
||||
"__wasm_split.______________________.js".to_string();
|
||||
let mut manifest = "__wasm_split_manifest.json".to_string();
|
||||
for line in hashes.lines() {
|
||||
let line = line.trim();
|
||||
if !line.is_empty() {
|
||||
if let Some((file, hash)) = line.split_once(':') {
|
||||
if file == "manifest" {
|
||||
manifest.clear();
|
||||
manifest.push_str("__wasm_split_manifest.");
|
||||
manifest.push_str(hash.trim());
|
||||
manifest.push_str(".json");
|
||||
}
|
||||
if file == "split" {
|
||||
split.clear();
|
||||
split.push_str("__wasm_split.");
|
||||
split.push_str(hash.trim());
|
||||
split.push_str(".js");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
(split, manifest)
|
||||
} else {
|
||||
(
|
||||
"__wasm_split.______________________.js".to_string(),
|
||||
"__wasm_split_manifest.json".to_string(),
|
||||
)
|
||||
};
|
||||
|
||||
let site_dir = &options.site_root;
|
||||
let pkg_dir = &options.site_pkg_dir;
|
||||
let path = PathBuf::from(site_dir.to_string());
|
||||
let path = path
|
||||
.join(pkg_dir.to_string())
|
||||
.join("__wasm_split_manifest.json");
|
||||
let path = path.join(pkg_dir.to_string()).join(wasm_split_manifest);
|
||||
let file = std::fs::read_to_string(path).ok()?;
|
||||
|
||||
let manifest = WasmSplitManifest(ArcStoredValue::new((
|
||||
format!("{root}/{pkg_dir}"),
|
||||
serde_json::from_str(&file).expect("could not read manifest file"),
|
||||
wasm_split_js,
|
||||
)));
|
||||
|
||||
Some(manifest)
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
if (window.location.protocol === 'https:') {
|
||||
protocol = 'wss://';
|
||||
}
|
||||
|
||||
let host = window.location.hostname;
|
||||
let ws = new WebSocket(`${protocol}${host}:${reload_port}/live_reload`);
|
||||
ws.onmessage = (ev) => {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
#![deny(missing_docs)]
|
||||
#![forbid(unsafe_code)]
|
||||
|
||||
//! # About Leptos
|
||||
//!
|
||||
@@ -85,12 +84,22 @@
|
||||
//! # Feature Flags
|
||||
//!
|
||||
//! - **`nightly`**: On `nightly` Rust, enables the function-call syntax for signal getters and setters.
|
||||
//! Also enables some experimental optimizations that improve the handling of static strings and
|
||||
//! the performance of the `template! {}` macro.
|
||||
//! - **`csr`** Client-side rendering: Generate DOM nodes in the browser.
|
||||
//! - **`ssr`** Server-side rendering: Generate an HTML string (typically on the server).
|
||||
//! - **`islands`** Activates “islands mode,” in which components are not made interactive on the
|
||||
//! client unless they use the `#[island]` macro.
|
||||
//! - **`hydrate`** Hydration: use this to add interactivity to an SSRed Leptos app.
|
||||
//! - **`rkyv`** In SSR/hydrate mode, uses [`rkyv`](https://docs.rs/rkyv/latest/rkyv/) to serialize resources and send them
|
||||
//! from the server to the client.
|
||||
//! - **`nonce`** Adds support for nonces to be added as part of a Content Security Policy.
|
||||
//! - **`rkyv`** In SSR/hydrate mode, enables using [`rkyv`](https://docs.rs/rkyv/latest/rkyv/) to serialize resources.
|
||||
//! - **`tracing`** Adds support for [`tracing`](https://docs.rs/tracing/latest/tracing/).
|
||||
//! - **`trace-component-props`** Adds `tracing` support for component props.
|
||||
//! - **`delegation`** Uses event delegation rather than the browser’s native event handling
|
||||
//! system. (This improves the performance of creating large numbers of elements simultaneously,
|
||||
//! in exchange for occasional edge cases in which events behave differently from native browser
|
||||
//! events.)
|
||||
//! - **`rustls`** Use `rustls` for server functions.
|
||||
//!
|
||||
//! **Important Note:** You must enable one of `csr`, `hydrate`, or `ssr` to tell Leptos
|
||||
//! which mode your app is operating in. You should only enable one of these per build target,
|
||||
@@ -215,12 +224,15 @@ pub mod error {
|
||||
|
||||
/// Control-flow components like `<Show>`, `<For>`, and `<Await>`.
|
||||
pub mod control_flow {
|
||||
pub use crate::{animated_show::*, await_::*, for_loop::*, show::*};
|
||||
pub use crate::{
|
||||
animated_show::*, await_::*, for_loop::*, show::*, show_let::*,
|
||||
};
|
||||
}
|
||||
mod animated_show;
|
||||
mod await_;
|
||||
mod for_loop;
|
||||
mod show;
|
||||
mod show_let;
|
||||
|
||||
/// A component that allows rendering a component somewhere else.
|
||||
pub mod portal;
|
||||
@@ -293,6 +305,10 @@ pub use tachys::mathml as math;
|
||||
#[doc(inline)]
|
||||
pub use tachys::svg;
|
||||
|
||||
#[cfg(feature = "subsecond")]
|
||||
/// Utilities for using binary hot-patching with [`subsecond`].
|
||||
pub mod subsecond;
|
||||
|
||||
/// Utilities for simple isomorphic logging to the console or terminal.
|
||||
pub mod logging {
|
||||
pub use leptos_dom::{debug_warn, error, log, warn};
|
||||
@@ -301,12 +317,17 @@ pub mod logging {
|
||||
/// Utilities for working with asynchronous tasks.
|
||||
pub mod task {
|
||||
use any_spawner::Executor;
|
||||
use reactive_graph::computed::ScopedFuture;
|
||||
use std::future::Future;
|
||||
|
||||
/// Spawns a thread-safe [`Future`].
|
||||
///
|
||||
/// This will be run with the current reactive owner and observer using a [`ScopedFuture`].
|
||||
#[track_caller]
|
||||
#[inline(always)]
|
||||
pub fn spawn(fut: impl Future<Output = ()> + Send + 'static) {
|
||||
let fut = ScopedFuture::new(fut);
|
||||
|
||||
#[cfg(not(target_family = "wasm"))]
|
||||
Executor::spawn(fut);
|
||||
|
||||
@@ -348,6 +369,7 @@ pub use web_sys;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub mod __reexports {
|
||||
pub use send_wrapper;
|
||||
pub use wasm_bindgen_futures;
|
||||
}
|
||||
|
||||
@@ -373,7 +395,8 @@ pub fn prefetch_lazy_fn_on_server(id: &'static str) {
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct WasmSplitManifest(
|
||||
pub reactive_graph::owner::ArcStoredValue<(
|
||||
String,
|
||||
std::collections::HashMap<String, Vec<String>>,
|
||||
String, // the pkg root
|
||||
std::collections::HashMap<String, Vec<String>>, // preloads
|
||||
String, // the name of the __wasm_split.js file
|
||||
)>,
|
||||
);
|
||||
|
||||
162
leptos/src/show_let.rs
Normal file
162
leptos/src/show_let.rs
Normal file
@@ -0,0 +1,162 @@
|
||||
use crate::{children::ViewFn, IntoView};
|
||||
use leptos_macro::component;
|
||||
use reactive_graph::traits::Get;
|
||||
use std::{marker::PhantomData, sync::Arc};
|
||||
use tachys::either::Either;
|
||||
|
||||
/// Like `<Show>` but for `Option`. This is a shortcut for
|
||||
///
|
||||
/// ```ignore
|
||||
/// value.map(|value| {
|
||||
/// view! { ... }
|
||||
/// })
|
||||
/// ```
|
||||
///
|
||||
/// If you specify a `fallback` it is equvalent to
|
||||
///
|
||||
/// ```ignore
|
||||
/// value
|
||||
/// .map(
|
||||
/// |value| children(value),
|
||||
/// )
|
||||
/// .unwrap_or_else(fallback)
|
||||
/// ```
|
||||
///
|
||||
/// ## Example
|
||||
///
|
||||
/// ```
|
||||
/// # use leptos::prelude::*;
|
||||
/// #
|
||||
/// # #[component]
|
||||
/// # pub fn Example() -> impl IntoView {
|
||||
/// let (opt_value, set_opt_value) = signal(None::<i32>);
|
||||
///
|
||||
/// view! {
|
||||
/// <ShowLet some=opt_value let:value>
|
||||
/// "We have a value: " {value}
|
||||
/// </ShowLet>
|
||||
/// }
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// You can also specify a fallback:
|
||||
/// ```
|
||||
/// # use leptos::prelude::*;
|
||||
/// #
|
||||
/// # #[component]
|
||||
/// # pub fn Example() -> impl IntoView {
|
||||
/// let (opt_value, set_opt_value) = signal(None::<i32>);
|
||||
///
|
||||
/// view! {
|
||||
/// <ShowLet some=opt_value let:value fallback=|| "Got nothing">
|
||||
/// "We have a value: " {value}
|
||||
/// </ShowLet>
|
||||
/// }
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// In addition to signals you can also use a closure that returns an `Option`:
|
||||
///
|
||||
/// ```
|
||||
/// # use leptos::prelude::*;
|
||||
/// #
|
||||
/// # #[component]
|
||||
/// # pub fn Example() -> impl IntoView {
|
||||
/// let (opt_value, set_opt_value) = signal(None::<i32>);
|
||||
///
|
||||
/// view! {
|
||||
/// <ShowLet some=move || opt_value.get().map(|v| v * 2) let:value>
|
||||
/// "We have a value: " {value}
|
||||
/// </ShowLet>
|
||||
/// }
|
||||
/// # }
|
||||
/// ```
|
||||
#[component]
|
||||
pub fn ShowLet<T, ChFn, V, M>(
|
||||
/// The children will be shown whenever `value` is `Some`.
|
||||
///
|
||||
/// They take the inner value as an argument. Use `let:` to bind the value to a variable.
|
||||
children: ChFn,
|
||||
|
||||
/// A signal of type `Option` or a closure that returns an `Option`.
|
||||
/// If the value is `Some`, the children will be shown.
|
||||
/// Otherwise the fallback will be shown, if present.
|
||||
some: impl IntoOptionGetter<T, M>,
|
||||
|
||||
/// A closure that returns what gets rendered when the value is `None`.
|
||||
/// By default this is the empty view.
|
||||
///
|
||||
/// You can think of it as the closure inside `.unwrap_or_else(|| fallback())`.
|
||||
#[prop(optional, into)]
|
||||
fallback: ViewFn,
|
||||
|
||||
/// Marker for generic parameters. Ignore this.
|
||||
#[prop(optional)]
|
||||
_marker: PhantomData<(T, M)>,
|
||||
) -> impl IntoView
|
||||
where
|
||||
ChFn: Fn(T) -> V + Send + Clone + 'static,
|
||||
V: IntoView + 'static,
|
||||
T: 'static,
|
||||
{
|
||||
let getter = some.into_option_getter();
|
||||
|
||||
move || {
|
||||
let children = children.clone();
|
||||
let fallback = fallback.clone();
|
||||
|
||||
getter
|
||||
.run()
|
||||
.map(move |t| Either::Left(children(t)))
|
||||
.unwrap_or_else(move || Either::Right(fallback.run()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Servers as a wrapper for both, an `Option` signal or a closure that returns an `Option`.
|
||||
pub struct OptionGetter<T>(Arc<dyn Fn() -> Option<T> + Send + Sync + 'static>);
|
||||
|
||||
impl<T> Clone for OptionGetter<T> {
|
||||
fn clone(&self) -> Self {
|
||||
Self(Arc::clone(&self.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> OptionGetter<T> {
|
||||
/// Runs the getter and returns the result.
|
||||
pub fn run(&self) -> Option<T> {
|
||||
(self.0)()
|
||||
}
|
||||
}
|
||||
|
||||
/// Conversion trait for creating an `OptionGetter` from a closure or a signal.
|
||||
pub trait IntoOptionGetter<T, M> {
|
||||
/// Converts the given value into an `OptionGetter`.
|
||||
fn into_option_getter(self) -> OptionGetter<T>;
|
||||
}
|
||||
|
||||
/// Marker type for creating an `OptionGetter` from a closure.
|
||||
/// Used so that the compiler doesn't complain about double implementations of the trait `IntoOptionGetter`.
|
||||
pub struct FunctionMarker;
|
||||
|
||||
impl<T, F> IntoOptionGetter<T, FunctionMarker> for F
|
||||
where
|
||||
F: Fn() -> Option<T> + Send + Sync + 'static,
|
||||
{
|
||||
fn into_option_getter(self) -> OptionGetter<T> {
|
||||
OptionGetter(Arc::new(self))
|
||||
}
|
||||
}
|
||||
|
||||
/// Marker type for creating an `OptionGetter` from a signal.
|
||||
/// Used so that the compiler doesn't complain about double implementations of the trait `IntoOptionGetter`.
|
||||
pub struct SignalMarker;
|
||||
|
||||
impl<T, S> IntoOptionGetter<T, SignalMarker> for S
|
||||
where
|
||||
S: Get<Value = Option<T>> + Clone + Send + Sync + 'static,
|
||||
{
|
||||
fn into_option_getter(self) -> OptionGetter<T> {
|
||||
let cloned = self.clone();
|
||||
OptionGetter(Arc::new(move || cloned.get()))
|
||||
}
|
||||
}
|
||||
62
leptos/src/subsecond.rs
Normal file
62
leptos/src/subsecond.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use dioxus_devtools::DevserverMsg;
|
||||
use wasm_bindgen::{prelude::Closure, JsCast};
|
||||
use web_sys::{js_sys::JsString, MessageEvent, WebSocket};
|
||||
|
||||
/// Sets up a websocket connect to the `dx` CLI, waiting for incoming hot-patching messages
|
||||
/// and patching the WASM binary appropriately.
|
||||
//
|
||||
// Note: This is a stripped-down version of Dioxus's `make_ws` from `dioxus_web`
|
||||
// It's essentially copy-pasted here because it's not pub there.
|
||||
// Would love to just take a dependency on that to be able to use it and deduplicate.
|
||||
//
|
||||
// https://github.com/DioxusLabs/dioxus/blob/main/packages/web/src/devtools.rs#L36
|
||||
pub fn connect_to_hot_patch_messages() {
|
||||
// Get the location of the devserver, using the current location plus the /_dioxus path
|
||||
// The idea here being that the devserver is always located on the /_dioxus behind a proxy
|
||||
let location = web_sys::window().unwrap().location();
|
||||
let url = format!(
|
||||
"{protocol}//{host}/_dioxus?build_id={build_id}",
|
||||
protocol = match location.protocol().unwrap() {
|
||||
prot if prot == "https:" => "wss:",
|
||||
_ => "ws:",
|
||||
},
|
||||
host = location.host().unwrap(),
|
||||
build_id = dioxus_cli_config::build_id(),
|
||||
);
|
||||
|
||||
let ws = WebSocket::new(&url).unwrap();
|
||||
|
||||
ws.set_onmessage(Some(
|
||||
Closure::<dyn FnMut(MessageEvent)>::new(move |e: MessageEvent| {
|
||||
let Ok(text) = e.data().dyn_into::<JsString>() else {
|
||||
return;
|
||||
};
|
||||
|
||||
// The devserver messages have some &'static strs in them, so we need to leak the source string
|
||||
let string: String = text.into();
|
||||
let string = Box::leak(string.into_boxed_str());
|
||||
|
||||
if let Ok(DevserverMsg::HotReload(msg)) =
|
||||
serde_json::from_str::<DevserverMsg>(string)
|
||||
{
|
||||
if let Some(jump_table) = msg.jump_table.as_ref().cloned() {
|
||||
if msg.for_build_id == Some(dioxus_cli_config::build_id()) {
|
||||
let our_pid = if cfg!(target_family = "wasm") {
|
||||
None
|
||||
} else {
|
||||
Some(std::process::id())
|
||||
};
|
||||
|
||||
if msg.for_pid == our_pid {
|
||||
unsafe { subsecond::apply_patch(jump_table) }
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.into_js_value()
|
||||
.as_ref()
|
||||
.unchecked_ref(),
|
||||
));
|
||||
}
|
||||
@@ -32,12 +32,12 @@ use tachys::{
|
||||
};
|
||||
use throw_error::ErrorHookFuture;
|
||||
|
||||
/// If any [`Resource`](leptos_reactive::Resource) is read in the `children` of this
|
||||
/// If any [`Resource`](crate::prelude::Resource) is read in the `children` of this
|
||||
/// component, it will show the `fallback` while they are loading. Once all are resolved,
|
||||
/// it will render the `children`.
|
||||
///
|
||||
/// Each time one of the resources is loading again, it will fall back. To keep the current
|
||||
/// children instead, use [Transition](crate::Transition).
|
||||
/// children instead, use [Transition](crate::prelude::Transition).
|
||||
///
|
||||
/// Note that the `children` will be rendered initially (in order to capture the fact that
|
||||
/// those resources are read under the suspense), so you cannot assume that resources read
|
||||
|
||||
@@ -16,11 +16,11 @@ use reactive_graph::{
|
||||
use slotmap::{DefaultKey, SlotMap};
|
||||
use tachys::reactive_graph::OwnedView;
|
||||
|
||||
/// If any [`Resource`](leptos_reactive::Resource) is read in the `children` of this
|
||||
/// If any [`Resource`](crate::prelude::Resource) is read in the `children` of this
|
||||
/// component, it will show the `fallback` while they are loading. Once all are resolved,
|
||||
/// it will render the `children`.
|
||||
///
|
||||
/// Unlike [`Suspense`](crate::Suspense), this will not fall
|
||||
/// Unlike [`Suspense`](crate::prelude::Suspense), this will not fall
|
||||
/// back to the `fallback` state if there are further changes after the initial load.
|
||||
///
|
||||
/// Note that the `children` will be rendered initially (in order to capture the fact that
|
||||
|
||||
@@ -5,7 +5,7 @@ license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "Configuration for the Leptos web framework."
|
||||
readme = "../README.md"
|
||||
version = { workspace = true }
|
||||
version = "0.8.7"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
@@ -13,16 +13,24 @@ edition.workspace = true
|
||||
config = { default-features = false, features = [
|
||||
"toml",
|
||||
"convert-case",
|
||||
] , workspace = true }
|
||||
], workspace = true }
|
||||
regex = { workspace = true, default-features = true }
|
||||
serde = { features = ["derive", "rc"] , workspace = true, default-features = true }
|
||||
thiserror = { workspace = true , default-features = true }
|
||||
typed-builder = { workspace = true , default-features = true }
|
||||
serde = { features = [
|
||||
"derive",
|
||||
"rc",
|
||||
], workspace = true, default-features = true }
|
||||
thiserror = { workspace = true, default-features = true }
|
||||
typed-builder = { workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tokio = { features = ["rt", "macros"] , workspace = true, default-features = true }
|
||||
tokio = { features = [
|
||||
"rt",
|
||||
"macros",
|
||||
], workspace = true, default-features = true }
|
||||
tempfile = { workspace = true, default-features = true }
|
||||
temp-env = { features = ["async_closure"] , workspace = true, default-features = true }
|
||||
temp-env = { features = [
|
||||
"async_closure",
|
||||
], workspace = true, default-features = true }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_dom"
|
||||
version = { workspace = true }
|
||||
version = "0.8.7"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -14,10 +14,10 @@ reactive_graph = { workspace = true }
|
||||
or_poisoned = { workspace = true }
|
||||
js-sys = { workspace = true, default-features = true }
|
||||
send_wrapper = { workspace = true, default-features = true }
|
||||
tracing = { optional = true , workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true , default-features = true }
|
||||
serde_json = { optional = true , workspace = true, default-features = true }
|
||||
serde = { optional = true , workspace = true, default-features = true }
|
||||
tracing = { optional = true, workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true, default-features = true }
|
||||
serde_json = { optional = true, workspace = true, default-features = true }
|
||||
serde = { optional = true, workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
leptos = { path = "../leptos" }
|
||||
|
||||
@@ -258,15 +258,7 @@ pub fn request_idle_callback_with_handle(
|
||||
///
|
||||
/// <div class="warning">The task is called outside of the ownership tree, this means that if you want to access for example the context you need to reestablish the owner.</div>
|
||||
pub fn queue_microtask(task: impl FnOnce() + 'static) {
|
||||
use js_sys::{Function, Reflect};
|
||||
|
||||
let task = Closure::once_into_js(task);
|
||||
let window = web_sys::window().expect("window not available");
|
||||
let queue_microtask =
|
||||
Reflect::get(&window, &JsValue::from_str("queueMicrotask"))
|
||||
.expect("queueMicrotask not available");
|
||||
let queue_microtask = queue_microtask.unchecked_into::<Function>();
|
||||
_ = queue_microtask.call1(&JsValue::UNDEFINED, &task);
|
||||
tachys::renderer::dom::queue_microtask(task);
|
||||
}
|
||||
|
||||
/// Handle that is generated by [set_timeout_with_handle] and can be used to clear the timeout.
|
||||
@@ -471,7 +463,7 @@ pub fn set_interval_with_handle(
|
||||
|
||||
#[inline(never)]
|
||||
fn si(
|
||||
cb: Box<dyn Fn()>,
|
||||
cb: Box<dyn FnMut()>,
|
||||
duration: Duration,
|
||||
) -> Result<IntervalHandle, JsValue> {
|
||||
let cb = Closure::wrap(cb).into_js_value();
|
||||
@@ -593,7 +585,8 @@ impl WindowListenerHandle {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_server() -> bool {
|
||||
/// Returns `true` if the current environment is a server.
|
||||
pub fn is_server() -> bool {
|
||||
#[cfg(feature = "hydration")]
|
||||
{
|
||||
Owner::current_shared_context()
|
||||
@@ -605,3 +598,8 @@ fn is_server() -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the current environment is a browser.
|
||||
pub fn is_browser() -> bool {
|
||||
!is_server()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_hot_reload"
|
||||
version = { workspace = true }
|
||||
version = "0.8.5"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -11,17 +11,20 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow = { workspace = true, default-features = true }
|
||||
serde = { features = ["derive"] , workspace = true, default-features = true }
|
||||
serde = { features = ["derive"], workspace = true, default-features = true }
|
||||
syn = { features = [
|
||||
"full",
|
||||
"parsing",
|
||||
"extra-traits",
|
||||
"visit",
|
||||
"printing",
|
||||
] , workspace = true, default-features = true }
|
||||
], workspace = true, default-features = true }
|
||||
quote = { workspace = true, default-features = true }
|
||||
rstml = { workspace = true, default-features = true }
|
||||
proc-macro2 = { features = ["span-locations", "nightly"] , workspace = true, default-features = true }
|
||||
proc-macro2 = { features = [
|
||||
"span-locations",
|
||||
"nightly",
|
||||
], workspace = true, default-features = true }
|
||||
parking_lot = { workspace = true, default-features = true }
|
||||
walkdir = { workspace = true, default-features = true }
|
||||
camino = { workspace = true, default-features = true }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_macro"
|
||||
version = { workspace = true }
|
||||
version = "0.8.9"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -13,26 +13,28 @@ edition.workspace = true
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
attribute-derive = { features = ["syn-full"] , workspace = true, default-features = true }
|
||||
attribute-derive = { features = [
|
||||
"syn-full",
|
||||
], workspace = true, default-features = true }
|
||||
cfg-if = { workspace = true, default-features = true }
|
||||
html-escape = { workspace = true, default-features = true }
|
||||
itertools = { workspace = true , default-features = true }
|
||||
itertools = { workspace = true, default-features = true }
|
||||
prettyplease = { workspace = true, default-features = true }
|
||||
proc-macro-error2 = { default-features = false , workspace = true }
|
||||
proc-macro-error2 = { default-features = false, workspace = true }
|
||||
proc-macro2 = { workspace = true, default-features = true }
|
||||
quote = { workspace = true, default-features = true }
|
||||
syn = { features = ["full"] , workspace = true, default-features = true }
|
||||
syn = { features = ["full"], workspace = true, default-features = true }
|
||||
rstml = { workspace = true, default-features = true }
|
||||
leptos_hot_reload = { workspace = true }
|
||||
server_fn_macro = { workspace = true }
|
||||
convert_case = { workspace = true , default-features = true }
|
||||
uuid = { features = ["v4"] , workspace = true, default-features = true }
|
||||
tracing = { optional = true , workspace = true, default-features = true }
|
||||
convert_case = { workspace = true, default-features = true }
|
||||
uuid = { features = ["v4"], workspace = true, default-features = true }
|
||||
tracing = { optional = true, workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
log = { workspace = true, default-features = true }
|
||||
typed-builder = { workspace = true, default-features = true }
|
||||
trybuild = { workspace = true , default-features = true }
|
||||
trybuild = { workspace = true, default-features = true }
|
||||
leptos = { path = "../leptos" }
|
||||
leptos_router = { path = "../router", features = ["ssr"] }
|
||||
server_fn = { path = "../server_fn", features = ["cbor"] }
|
||||
|
||||
@@ -548,7 +548,7 @@ impl ToTokens for Model {
|
||||
quote! {
|
||||
#[::leptos::prelude::lazy]
|
||||
#[allow(non_snake_case)]
|
||||
async fn #outer_name (el: ::leptos::web_sys::HtmlElement) {
|
||||
fn #outer_name (el: ::leptos::web_sys::HtmlElement) {
|
||||
#hydrate_fn_inner
|
||||
}
|
||||
|
||||
@@ -1360,7 +1360,10 @@ fn prop_to_doc(
|
||||
}
|
||||
|
||||
pub fn unmodified_fn_name_from_fn_name(ident: &Ident) -> Ident {
|
||||
Ident::new(&format!("__{ident}"), ident.span())
|
||||
Ident::new(
|
||||
&format!("__component_{}", ident.to_string().to_case(Snake)),
|
||||
ident.span(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Converts all `impl Trait`s in a function signature to use generic params instead.
|
||||
|
||||
@@ -48,7 +48,10 @@ pub fn lazy_impl(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
let is_wasm = cfg!(feature = "csr") || cfg!(feature = "hydrate");
|
||||
if is_wasm {
|
||||
quote! {
|
||||
#[::leptos::wasm_split_helpers::wasm_split(#unique_name)]
|
||||
#[::leptos::wasm_split_helpers::wasm_split(
|
||||
#unique_name,
|
||||
::leptos::__reexports::send_wrapper
|
||||
)]
|
||||
#fun
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -683,7 +683,11 @@ fn component_macro(
|
||||
let parse_result = syn::parse::<component::Model>(s);
|
||||
|
||||
if let (Ok(ref mut unexpanded), Ok(model)) = (&mut dummy, parse_result) {
|
||||
let expanded = model.is_transparent(is_transparent).is_lazy(is_lazy).with_island(island).into_token_stream();
|
||||
let expanded = model
|
||||
.is_transparent(is_transparent)
|
||||
.is_lazy(is_lazy)
|
||||
.with_island(island)
|
||||
.into_token_stream();
|
||||
if !matches!(unexpanded.vis, Visibility::Public(_)) {
|
||||
unexpanded.vis = Visibility::Public(Pub {
|
||||
span: unexpanded.vis.span(),
|
||||
@@ -696,7 +700,7 @@ fn component_macro(
|
||||
#expanded
|
||||
|
||||
#[doc(hidden)]
|
||||
#[allow(non_snake_case, dead_code, clippy::too_many_arguments, clippy::needless_lifetimes)]
|
||||
#[allow(clippy::too_many_arguments, clippy::needless_lifetimes)]
|
||||
#unexpanded
|
||||
}
|
||||
} else {
|
||||
@@ -705,7 +709,7 @@ fn component_macro(
|
||||
dummy.sig.ident = unmodified_fn_name_from_fn_name(&dummy.sig.ident);
|
||||
quote! {
|
||||
#[doc(hidden)]
|
||||
#[allow(non_snake_case, dead_code, clippy::too_many_arguments, clippy::needless_lifetimes)]
|
||||
#[allow(clippy::too_many_arguments, clippy::needless_lifetimes)]
|
||||
#dummy
|
||||
}
|
||||
}
|
||||
@@ -1030,14 +1034,41 @@ pub fn memo(input: TokenStream) -> TokenStream {
|
||||
memo::memo_impl(input)
|
||||
}
|
||||
|
||||
/// The `#[lazy]` macro marks an `async` function as a function that can be lazy-loaded from a
|
||||
/// separate (WebAssembly) binary.
|
||||
/// The `#[lazy]` macro indicates that a function can be lazy-loaded from a separate WebAssembly (WASM) binary.
|
||||
///
|
||||
/// The first time the function is called, calling the function will first load that other binary,
|
||||
/// then call the function. On subsequent call it will be called immediately, but still return
|
||||
/// then call the function. On subsequent calls it will be called immediately, but still return
|
||||
/// asynchronously to maintain the same API.
|
||||
///
|
||||
/// All parameters and output types should be concrete types, with no generics.
|
||||
/// `#[lazy]` can be used to annotate synchronous or `async` functions. In both cases, the final function will be
|
||||
/// `async` and must be called as such.
|
||||
///
|
||||
/// All parameters and output types should be concrete types, with no generics or `impl Trait` types.
|
||||
///
|
||||
/// This should be used in tandem with a suitable build process, such as `cargo leptos --split`.
|
||||
///
|
||||
/// ```rust
|
||||
/// # use leptos_macro::lazy;
|
||||
///
|
||||
/// #[lazy]
|
||||
/// fn lazy_synchronous_function() -> String {
|
||||
/// "Hello, lazy world!".to_string()
|
||||
/// }
|
||||
///
|
||||
/// #[lazy]
|
||||
/// async fn lazy_async_function() -> String {
|
||||
/// /* do something that requires async work */
|
||||
/// "Hello, lazy async world!".to_string()
|
||||
/// }
|
||||
///
|
||||
/// async fn use_lazy_functions() {
|
||||
/// // synchronous function has been converted to async
|
||||
/// let value1 = lazy_synchronous_function().await;
|
||||
///
|
||||
/// // async function is still async
|
||||
/// let value1 = lazy_async_function().await;
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_attribute]
|
||||
#[proc_macro_error]
|
||||
pub fn lazy(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
|
||||
@@ -25,9 +25,8 @@ use std::{
|
||||
use syn::{
|
||||
punctuated::Pair::{End, Punctuated},
|
||||
spanned::Spanned,
|
||||
Expr,
|
||||
Expr::Tuple,
|
||||
ExprArray, ExprLit, ExprRange, Lit, LitStr, RangeLimits, Stmt,
|
||||
Expr::{self, Tuple},
|
||||
ExprArray, ExprLit, ExprPath, ExprRange, Lit, LitStr, RangeLimits, Stmt,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
@@ -1679,7 +1678,7 @@ fn attribute_value(
|
||||
}
|
||||
|
||||
// Keep list alphabetized for binary search
|
||||
const TYPED_EVENTS: [&str; 126] = [
|
||||
const TYPED_EVENTS: [&str; 127] = [
|
||||
"DOMContentLoaded",
|
||||
"abort",
|
||||
"afterprint",
|
||||
@@ -1775,6 +1774,7 @@ const TYPED_EVENTS: [&str; 126] = [
|
||||
"reset",
|
||||
"resize",
|
||||
"scroll",
|
||||
"scrollend",
|
||||
"securitypolicyviolation",
|
||||
"seeked",
|
||||
"seeking",
|
||||
@@ -1871,6 +1871,28 @@ pub(crate) fn ident_from_tag_name(tag_name: &NodeName) -> Ident {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn full_path_from_tag_name(tag_name: &NodeName) -> Option<ExprPath> {
|
||||
match tag_name {
|
||||
NodeName::Path(path) => Some(path.clone()),
|
||||
NodeName::Block(_) => {
|
||||
let span = tag_name.span();
|
||||
proc_macro_error2::emit_error!(
|
||||
span,
|
||||
"blocks not allowed in tag-name position"
|
||||
);
|
||||
None
|
||||
}
|
||||
_ => {
|
||||
let span = tag_name.span();
|
||||
proc_macro_error2::emit_error!(
|
||||
span,
|
||||
"punctuated names not allowed in slots"
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn directive_call_from_attribute_node(
|
||||
attr: &KeyedAttribute,
|
||||
directive_name: &str,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use super::{
|
||||
component_builder::maybe_optimised_component_children,
|
||||
convert_to_snake_case, ident_from_tag_name,
|
||||
convert_to_snake_case, full_path_from_tag_name,
|
||||
};
|
||||
use crate::view::{fragment_to_tokens, utils::filter_prefixed_attrs, TagType};
|
||||
use proc_macro2::{Ident, TokenStream, TokenTree};
|
||||
@@ -24,7 +24,7 @@ pub(crate) fn slot_to_tokens(
|
||||
node.name().to_string()
|
||||
});
|
||||
|
||||
let component_name = ident_from_tag_name(node.name());
|
||||
let component_path = full_path_from_tag_name(node.name());
|
||||
|
||||
let Some(parent_slots) = parent_slots else {
|
||||
proc_macro_error2::emit_error!(
|
||||
@@ -190,7 +190,7 @@ pub(crate) fn slot_to_tokens(
|
||||
|
||||
let slot = quote_spanned! {node.span()=>
|
||||
{
|
||||
let slot = #component_name::builder()
|
||||
let slot = #component_path::builder()
|
||||
#(#props)*
|
||||
#(#slots)*
|
||||
#children
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_server"
|
||||
version = { workspace = true }
|
||||
version = "0.8.5"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -11,11 +11,11 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
base64 = { workspace = true, default-features = true }
|
||||
codee = { features = ["json_serde"] , workspace = true, default-features = true }
|
||||
codee = { features = ["json_serde"], workspace = true, default-features = true }
|
||||
hydration_context = { workspace = true }
|
||||
reactive_graph = { workspace = true, features = ["hydration"] }
|
||||
server_fn = { workspace = true }
|
||||
tracing = { optional = true , workspace = true, default-features = true }
|
||||
tracing = { optional = true, workspace = true, default-features = true }
|
||||
futures = { workspace = true, default-features = true }
|
||||
|
||||
any_spawner = { workspace = true }
|
||||
@@ -25,9 +25,9 @@ send_wrapper = { workspace = true, default-features = true }
|
||||
|
||||
# serialization formats
|
||||
serde = { workspace = true, default-features = true }
|
||||
js-sys = { optional = true , workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true, optional = true , default-features = true }
|
||||
serde_json = { workspace = true , default-features = true }
|
||||
js-sys = { optional = true, workspace = true, default-features = true }
|
||||
wasm-bindgen = { workspace = true, optional = true, default-features = true }
|
||||
serde_json = { workspace = true, default-features = true }
|
||||
|
||||
[features]
|
||||
ssr = []
|
||||
@@ -44,7 +44,8 @@ denylist = ["tracing"]
|
||||
max_combination_size = 2
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
rustdoc-args = ["--generate-link-to-definition", "--cfg", "docsrs"]
|
||||
all-features = true
|
||||
|
||||
[lints.rust]
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(leptos_debuginfo)'] }
|
||||
|
||||
@@ -386,6 +386,7 @@ T: Send + Sync + 'static,
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-wasm-bindgen")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-wasm-bindgen")))]
|
||||
impl<T> ArcOnceResource<T, JsonSerdeWasmCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -418,6 +419,7 @@ fut: impl Future<Output = T> + Send + 'static
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "miniserde")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "miniserde")))]
|
||||
impl<T> ArcOnceResource<T, MiniserdeCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -451,6 +453,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-lite")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-lite")))]
|
||||
impl<T> ArcOnceResource<T, SerdeLite<JsonSerdeCodec>>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -484,6 +487,7 @@ fut: impl Future<Output = T> + Send + 'static
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> ArcOnceResource<T, RkyvCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -748,6 +752,7 @@ T: Send + Sync + 'static,
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-wasm-bindgen")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-wasm-bindgen")))]
|
||||
impl<T> OnceResource<T, JsonSerdeWasmCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -780,6 +785,7 @@ fut: impl Future<Output = T> + Send + 'static
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "miniserde")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "miniserde")))]
|
||||
impl<T> OnceResource<T, MiniserdeCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -813,6 +819,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-lite")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-lite")))]
|
||||
impl<T> OnceResource<T, SerdeLite<JsonSerdeCodec>>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -846,6 +853,7 @@ fut: impl Future<Output = T> + Send + 'static
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> OnceResource<T, RkyvCodec>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
|
||||
@@ -709,6 +709,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> ArcResource<T, RkyvCodec>
|
||||
where
|
||||
RkyvCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -1048,6 +1049,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-wasm-bindgen")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-wasm-bindgen")))]
|
||||
impl<T> Resource<T, JsonSerdeWasmCodec>
|
||||
where
|
||||
JsonSerdeWasmCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -1105,6 +1107,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "miniserde")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "miniserde")))]
|
||||
impl<T> Resource<T, MiniserdeCodec>
|
||||
where
|
||||
MiniserdeCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -1164,6 +1167,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-lite")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-lite")))]
|
||||
impl<T> Resource<T, SerdeLite<JsonSerdeCodec>>
|
||||
where
|
||||
SerdeLite<JsonSerdeCodec>: Encoder<T> + Decoder<T>,
|
||||
@@ -1222,6 +1226,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> Resource<T, RkyvCodec>
|
||||
where
|
||||
RkyvCodec: Encoder<T> + Decoder<T>,
|
||||
|
||||
@@ -80,6 +80,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-lite")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-lite")))]
|
||||
impl<T> SharedValue<T, SerdeLite<JsonSerdeCodec>>
|
||||
where
|
||||
SerdeLite<JsonSerdeCodec>: Encoder<T> + Decoder<T>,
|
||||
@@ -102,6 +103,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "serde-wasm-bindgen")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "serde-wasm-bindgen")))]
|
||||
impl<T> SharedValue<T, JsonSerdeWasmCodec>
|
||||
where
|
||||
JsonSerdeWasmCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -124,6 +126,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "miniserde")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "miniserde")))]
|
||||
impl<T> SharedValue<T, MiniserdeCodec>
|
||||
where
|
||||
MiniserdeCodec: Encoder<T> + Decoder<T>,
|
||||
@@ -146,6 +149,7 @@ where
|
||||
}
|
||||
|
||||
#[cfg(feature = "rkyv")]
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "rkyv")))]
|
||||
impl<T> SharedValue<T, RkyvCodec>
|
||||
where
|
||||
RkyvCodec: Encoder<T> + Decoder<T>,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_meta"
|
||||
version = "0.8.4"
|
||||
version = "0.8.5"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "oco_ref"
|
||||
version = "0.2.0"
|
||||
version = "0.2.1"
|
||||
authors = ["Danik Vitek", "Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -10,7 +10,7 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
serde = { workspace = true, default-features = true }
|
||||
thiserror = { workspace = true , default-features = true }
|
||||
thiserror = { workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
serde_json = { workspace = true , default-features = true }
|
||||
serde_json = { workspace = true, default-features = true }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_graph"
|
||||
version = "0.2.4"
|
||||
version = "0.2.8"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -27,6 +27,8 @@ async-lock = { workspace = true, default-features = true }
|
||||
send_wrapper = { features = [
|
||||
"futures",
|
||||
], workspace = true, default-features = true }
|
||||
subsecond = { workspace = true, default-features = true, optional = true }
|
||||
indexmap = { workspace = true, default-features = true }
|
||||
|
||||
[target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dependencies]
|
||||
web-sys = { version = "0.3.77", features = ["console"] }
|
||||
@@ -50,6 +52,7 @@ hydration = ["dep:hydration_context"]
|
||||
effects = [
|
||||
] # whether to run effects: should be disabled for something like server rendering
|
||||
sandboxed-arenas = []
|
||||
subsecond = ["dep:subsecond"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use crate::{
|
||||
computed::{ArcMemo, Memo},
|
||||
computed::{ArcMemo, Memo, ScopedFuture},
|
||||
diagnostics::is_suppressing_resource_load,
|
||||
owner::{ArcStoredValue, ArenaItem},
|
||||
graph::untrack,
|
||||
owner::{ArcStoredValue, ArenaItem, Owner},
|
||||
send_wrapper_ext::SendOption,
|
||||
signal::{ArcMappedSignal, ArcRwSignal, MappedSignal, RwSignal},
|
||||
traits::{DefinedAt, Dispose, Get, GetUntracked, GetValue, Update, Write},
|
||||
@@ -199,13 +200,18 @@ where
|
||||
I: Send + Sync,
|
||||
O: Send + Sync,
|
||||
{
|
||||
let owner = Owner::current().unwrap_or_default();
|
||||
ArcAction {
|
||||
in_flight: ArcRwSignal::new(0),
|
||||
input: ArcRwSignal::new(SendOption::new(None)),
|
||||
value: ArcRwSignal::new(SendOption::new(value)),
|
||||
version: Default::default(),
|
||||
dispatched: Default::default(),
|
||||
action_fn: Arc::new(move |input| Box::pin(action_fn(input))),
|
||||
action_fn: Arc::new(move |input| {
|
||||
Box::pin(owner.with(|| {
|
||||
ScopedFuture::new_untracked(untrack(|| action_fn(input)))
|
||||
}))
|
||||
}),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
}
|
||||
@@ -370,6 +376,7 @@ where
|
||||
F: Fn(&I) -> Fu + 'static,
|
||||
Fu: Future<Output = O> + 'static,
|
||||
{
|
||||
let owner = Owner::current().unwrap_or_default();
|
||||
let action_fn = SendWrapper::new(action_fn);
|
||||
ArcAction {
|
||||
in_flight: ArcRwSignal::new(0),
|
||||
@@ -378,7 +385,9 @@ where
|
||||
version: Default::default(),
|
||||
dispatched: Default::default(),
|
||||
action_fn: Arc::new(move |input| {
|
||||
Box::pin(SendWrapper::new(action_fn(input)))
|
||||
Box::pin(SendWrapper::new(owner.with(|| {
|
||||
ScopedFuture::new_untracked(untrack(|| action_fn(input)))
|
||||
})))
|
||||
}),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
|
||||
@@ -521,9 +521,10 @@ impl<T: 'static> ArcAsyncDerived<T> {
|
||||
{
|
||||
let fun = move || {
|
||||
let fut = fun();
|
||||
let fut = ScopedFuture::new_untracked(async move {
|
||||
SendOption::new(Some(fut.await))
|
||||
});
|
||||
let fut =
|
||||
ScopedFuture::new_untracked_with_diagnostics(async move {
|
||||
SendOption::new(Some(fut.await))
|
||||
});
|
||||
#[cfg(feature = "sandboxed-arenas")]
|
||||
let fut = Sandboxed::new(fut);
|
||||
fut
|
||||
|
||||
@@ -54,11 +54,55 @@ impl<Fut> ScopedFuture<Fut> {
|
||||
fut,
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[track_caller]
|
||||
pub fn new_untracked_with_diagnostics(
|
||||
fut: Fut,
|
||||
) -> ScopedFutureUntrackedWithDiagnostics<Fut> {
|
||||
let owner = Owner::current().unwrap_or_default();
|
||||
ScopedFutureUntrackedWithDiagnostics {
|
||||
owner,
|
||||
observer: None,
|
||||
fut,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Fut: Future> Future for ScopedFuture<Fut> {
|
||||
type Output = Fut::Output;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.project();
|
||||
this.owner.with(|| {
|
||||
#[cfg(debug_assertions)]
|
||||
let _maybe_guard = if this.observer.is_none() {
|
||||
Some(crate::diagnostics::SpecialNonReactiveZone::enter())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
this.observer.with_observer(|| this.fut.poll(cx))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pin_project! {
|
||||
/// A [`Future`] wrapper that sets the [`Owner`] and [`Observer`] before polling the inner
|
||||
/// `Future`, output of [`ScopedFuture::new_untracked_with_diagnostics`].
|
||||
///
|
||||
/// In leptos 0.9 this will be replaced with `ScopedFuture` itself.
|
||||
#[derive(Clone)]
|
||||
pub struct ScopedFutureUntrackedWithDiagnostics<Fut> {
|
||||
owner: Owner,
|
||||
observer: Option<AnySubscriber>,
|
||||
#[pin]
|
||||
fut: Fut,
|
||||
}
|
||||
}
|
||||
|
||||
impl<Fut: Future> Future for ScopedFutureUntrackedWithDiagnostics<Fut> {
|
||||
type Output = Fut::Output;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.project();
|
||||
this.owner
|
||||
|
||||
@@ -9,6 +9,8 @@ use crate::{
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use or_poisoned::OrPoisoned;
|
||||
#[cfg(feature = "subsecond")]
|
||||
use std::sync::Mutex;
|
||||
use std::{
|
||||
fmt::Debug,
|
||||
future::{Future, IntoFuture},
|
||||
@@ -49,13 +51,39 @@ impl<T> Debug for RenderEffect<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "subsecond")]
|
||||
type CurrentHotPtr = Box<dyn Fn() -> Option<subsecond::HotFnPtr> + Send + Sync>;
|
||||
|
||||
impl<T> RenderEffect<T>
|
||||
where
|
||||
T: 'static,
|
||||
{
|
||||
/// Creates a new render effect, which immediately runs `fun`.
|
||||
pub fn new(fun: impl FnMut(Option<T>) -> T + 'static) -> Self {
|
||||
Self::new_with_value_erased(Box::new(fun), None)
|
||||
#[cfg(feature = "subsecond")]
|
||||
let (hot_fn_ptr, fun) = {
|
||||
let fun = Arc::new(Mutex::new(subsecond::HotFn::current(fun)));
|
||||
(
|
||||
{
|
||||
let fun = Arc::downgrade(&fun);
|
||||
let wrapped = send_wrapper::SendWrapper::new(move || {
|
||||
fun.upgrade()
|
||||
.map(|n| n.lock().or_poisoned().ptr_address())
|
||||
});
|
||||
// it's not redundant, it's due to the SendWrapper deref
|
||||
#[allow(clippy::redundant_closure)]
|
||||
Box::new(move || wrapped())
|
||||
},
|
||||
move |prev| fun.lock().or_poisoned().call((prev,)),
|
||||
)
|
||||
};
|
||||
|
||||
Self::new_with_value_erased(
|
||||
Box::new(fun),
|
||||
None,
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr,
|
||||
)
|
||||
}
|
||||
|
||||
/// Creates a new render effect with an initial value.
|
||||
@@ -63,7 +91,30 @@ where
|
||||
fun: impl FnMut(Option<T>) -> T + 'static,
|
||||
initial_value: Option<T>,
|
||||
) -> Self {
|
||||
Self::new_with_value_erased(Box::new(fun), initial_value)
|
||||
#[cfg(feature = "subsecond")]
|
||||
let (hot_fn_ptr, fun) = {
|
||||
let fun = Arc::new(Mutex::new(subsecond::HotFn::current(fun)));
|
||||
(
|
||||
{
|
||||
let fun = Arc::downgrade(&fun);
|
||||
let wrapped = send_wrapper::SendWrapper::new(move || {
|
||||
fun.upgrade()
|
||||
.map(|n| n.lock().or_poisoned().ptr_address())
|
||||
});
|
||||
// it's not redundant, it's due to the SendWrapper deref
|
||||
#[allow(clippy::redundant_closure)]
|
||||
Box::new(move || wrapped())
|
||||
},
|
||||
move |prev| fun.lock().or_poisoned().call((prev,)),
|
||||
)
|
||||
};
|
||||
|
||||
Self::new_with_value_erased(
|
||||
Box::new(fun),
|
||||
initial_value,
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr,
|
||||
)
|
||||
}
|
||||
|
||||
/// Creates a new render effect, which immediately runs `fun`.
|
||||
@@ -71,6 +122,11 @@ where
|
||||
fun: impl FnMut(Option<T>) -> T + 'static,
|
||||
value: impl IntoFuture<Output = T> + 'static,
|
||||
) -> Self {
|
||||
#[cfg(feature = "subsecond")]
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
#[cfg(feature = "subsecond")]
|
||||
let fun = move |prev| fun.call((prev,));
|
||||
|
||||
Self::new_with_async_value_erased(
|
||||
Box::new(fun),
|
||||
Box::pin(value.into_future()),
|
||||
@@ -79,8 +135,13 @@ where
|
||||
}
|
||||
|
||||
fn new_with_value_erased(
|
||||
mut fun: Box<dyn FnMut(Option<T>) -> T + 'static>,
|
||||
#[allow(unused_mut)] mut fun: Box<dyn FnMut(Option<T>) -> T + 'static>,
|
||||
initial_value: Option<T>,
|
||||
// this argument can be used to invalidate individual effects in the future
|
||||
// in present experiments, I have found that it is not actually granular enough to make a difference
|
||||
#[allow(unused)]
|
||||
#[cfg(feature = "subsecond")]
|
||||
hot_fn_ptr: CurrentHotPtr,
|
||||
) -> Self {
|
||||
// codegen optimisation:
|
||||
fn prep() -> (Owner, Arc<RwLock<EffectInner>>, crate::channel::Receiver)
|
||||
@@ -104,12 +165,56 @@ where
|
||||
let _ = initial_value;
|
||||
let _ = owner;
|
||||
let _ = &mut rx;
|
||||
let _ = &mut fun;
|
||||
let _ = fun;
|
||||
}
|
||||
|
||||
#[cfg(feature = "effects")]
|
||||
{
|
||||
let subscriber = inner.to_any_subscriber();
|
||||
|
||||
#[cfg(all(feature = "subsecond", debug_assertions))]
|
||||
let mut fun = {
|
||||
use crate::graph::ReactiveNode;
|
||||
use rustc_hash::FxHashMap;
|
||||
use std::sync::{Arc, LazyLock, Mutex};
|
||||
use subsecond::HotFnPtr;
|
||||
|
||||
static HOT_RELOAD_SUBSCRIBERS: LazyLock<
|
||||
Mutex<FxHashMap<AnySubscriber, (HotFnPtr, CurrentHotPtr)>>,
|
||||
> = LazyLock::new(|| {
|
||||
subsecond::register_handler(Arc::new(|| {
|
||||
HOT_RELOAD_SUBSCRIBERS.lock().or_poisoned().retain(
|
||||
|subscriber, (prev_ptr, hot_fn_ptr)| {
|
||||
match hot_fn_ptr() {
|
||||
None => false,
|
||||
Some(curr_hot_ptr) => {
|
||||
if curr_hot_ptr != *prev_ptr {
|
||||
crate::log_warning(format_args!(
|
||||
"{prev_ptr:?} <> \
|
||||
{curr_hot_ptr:?}",
|
||||
));
|
||||
*prev_ptr = curr_hot_ptr;
|
||||
|
||||
subscriber.mark_dirty();
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
}));
|
||||
Default::default()
|
||||
});
|
||||
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
let initial_ptr = hot_fn_ptr().unwrap();
|
||||
HOT_RELOAD_SUBSCRIBERS
|
||||
.lock()
|
||||
.or_poisoned()
|
||||
.insert(subscriber.clone(), (initial_ptr, hot_fn_ptr));
|
||||
move |prev| fun.call((prev,))
|
||||
};
|
||||
|
||||
*value.write().or_poisoned() = Some(
|
||||
owner.with(|| subscriber.with_observer(|| fun(initial_value))),
|
||||
);
|
||||
@@ -230,6 +335,11 @@ where
|
||||
pub fn new_isomorphic(
|
||||
fun: impl FnMut(Option<T>) -> T + Send + Sync + 'static,
|
||||
) -> Self {
|
||||
#[cfg(feature = "subsecond")]
|
||||
let mut fun = subsecond::HotFn::current(fun);
|
||||
#[cfg(feature = "subsecond")]
|
||||
let fun = move |prev| fun.call((prev,));
|
||||
|
||||
fn erased<T: Send + Sync + 'static>(
|
||||
mut fun: Box<dyn FnMut(Option<T>) -> T + Send + Sync + 'static>,
|
||||
) -> RenderEffect<T> {
|
||||
|
||||
@@ -6,11 +6,14 @@
|
||||
//! a linear search is not significantly more expensive than a hash and lookup.
|
||||
|
||||
use super::{AnySource, AnySubscriber, Source};
|
||||
use core::slice;
|
||||
use std::{mem, vec::IntoIter};
|
||||
use indexmap::IndexSet;
|
||||
use rustc_hash::FxHasher;
|
||||
use std::{hash::BuildHasherDefault, mem};
|
||||
|
||||
type FxIndexSet<T> = IndexSet<T, BuildHasherDefault<FxHasher>>;
|
||||
|
||||
#[derive(Default, Clone, Debug)]
|
||||
pub struct SourceSet(Vec<AnySource>);
|
||||
pub struct SourceSet(FxIndexSet<AnySource>);
|
||||
|
||||
impl SourceSet {
|
||||
pub fn new() -> Self {
|
||||
@@ -18,16 +21,14 @@ impl SourceSet {
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, source: AnySource) {
|
||||
self.0.push(source);
|
||||
self.0.insert(source);
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, source: &AnySource) {
|
||||
if let Some(pos) = self.0.iter().position(|s| s == source) {
|
||||
self.0.remove(pos);
|
||||
}
|
||||
self.0.shift_remove(source);
|
||||
}
|
||||
|
||||
pub fn take(&mut self) -> Vec<AnySource> {
|
||||
pub fn take(&mut self) -> FxIndexSet<AnySource> {
|
||||
mem::take(&mut self.0)
|
||||
}
|
||||
|
||||
@@ -44,7 +45,7 @@ impl SourceSet {
|
||||
|
||||
impl IntoIterator for SourceSet {
|
||||
type Item = AnySource;
|
||||
type IntoIter = IntoIter<AnySource>;
|
||||
type IntoIter = <FxIndexSet<AnySource> as IntoIterator>::IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.into_iter()
|
||||
@@ -53,40 +54,36 @@ impl IntoIterator for SourceSet {
|
||||
|
||||
impl<'a> IntoIterator for &'a SourceSet {
|
||||
type Item = &'a AnySource;
|
||||
type IntoIter = slice::Iter<'a, AnySource>;
|
||||
type IntoIter = <&'a FxIndexSet<AnySource> as IntoIterator>::IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.iter()
|
||||
}
|
||||
}
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct SubscriberSet(Vec<AnySubscriber>);
|
||||
pub struct SubscriberSet(FxIndexSet<AnySubscriber>);
|
||||
|
||||
impl SubscriberSet {
|
||||
pub fn new() -> Self {
|
||||
Self(Vec::with_capacity(2))
|
||||
Self(FxIndexSet::with_capacity_and_hasher(2, Default::default()))
|
||||
}
|
||||
|
||||
pub fn subscribe(&mut self, subscriber: AnySubscriber) {
|
||||
if !self.0.contains(&subscriber) {
|
||||
self.0.push(subscriber);
|
||||
}
|
||||
self.0.insert(subscriber);
|
||||
}
|
||||
|
||||
pub fn unsubscribe(&mut self, subscriber: &AnySubscriber) {
|
||||
if let Some(pos) = self.0.iter().position(|s| s == subscriber) {
|
||||
// note: do not use `.swap_remove()` here.
|
||||
// using `.remove()` is slower because it shifts other items
|
||||
// but it maintains the order of the subscribers, which is important
|
||||
// to correctness when you're using this to drive something like a UI,
|
||||
// which can have nested effects, where the inner one assumes the outer
|
||||
// has already run (for example, an outer effect that checks .is_some(),
|
||||
// and an inner effect that unwraps)
|
||||
self.0.remove(pos);
|
||||
}
|
||||
// note: do not use `.swap_remove()` here.
|
||||
// using `.remove()` is slower because it shifts other items
|
||||
// but it maintains the order of the subscribers, which is important
|
||||
// to correctness when you're using this to drive something like a UI,
|
||||
// which can have nested effects, where the inner one assumes the outer
|
||||
// has already run (for example, an outer effect that checks .is_some(),
|
||||
// and an inner effect that unwraps)
|
||||
self.0.shift_remove(subscriber);
|
||||
}
|
||||
|
||||
pub fn take(&mut self) -> Vec<AnySubscriber> {
|
||||
pub fn take(&mut self) -> FxIndexSet<AnySubscriber> {
|
||||
mem::take(&mut self.0)
|
||||
}
|
||||
|
||||
@@ -97,7 +94,7 @@ impl SubscriberSet {
|
||||
|
||||
impl IntoIterator for SubscriberSet {
|
||||
type Item = AnySubscriber;
|
||||
type IntoIter = IntoIter<AnySubscriber>;
|
||||
type IntoIter = <FxIndexSet<AnySubscriber> as IntoIterator>::IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.into_iter()
|
||||
@@ -106,7 +103,7 @@ impl IntoIterator for SubscriberSet {
|
||||
|
||||
impl<'a> IntoIterator for &'a SubscriberSet {
|
||||
type Item = &'a AnySubscriber;
|
||||
type IntoIter = slice::Iter<'a, AnySubscriber>;
|
||||
type IntoIter = <&'a FxIndexSet<AnySubscriber> as IntoIterator>::IntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.0.iter()
|
||||
|
||||
@@ -209,6 +209,25 @@ impl Owner {
|
||||
this
|
||||
}
|
||||
|
||||
/// Returns the parent of this `Owner`, if any.
|
||||
///
|
||||
/// None when:
|
||||
/// - This is a root owner
|
||||
/// - The parent has been dropped
|
||||
pub fn parent(&self) -> Option<Owner> {
|
||||
self.inner
|
||||
.read()
|
||||
.or_poisoned()
|
||||
.parent
|
||||
.as_ref()
|
||||
.and_then(|p| p.upgrade())
|
||||
.map(|inner| Owner {
|
||||
inner,
|
||||
#[cfg(feature = "hydration")]
|
||||
shared_context: self.shared_context.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new `Owner` that is the child of the current `Owner`, if any.
|
||||
pub fn child(&self) -> Self {
|
||||
let parent = Some(Arc::downgrade(&self.inner));
|
||||
|
||||
@@ -257,6 +257,20 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<ReadSignal<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<ArcReadSignal<T>> + Storage<T>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: ReadSignal<T, S>) -> Self {
|
||||
Self {
|
||||
inner: SignalTypes::ReadSignal(value.into()),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: std::panic::Location::caller(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Send + Sync> From<ArcRwSignal<T>> for ArcSignal<T, SyncStorage> {
|
||||
#[track_caller]
|
||||
fn from(value: ArcRwSignal<T>) -> Self {
|
||||
@@ -268,6 +282,20 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<RwSignal<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<ArcRwSignal<T>> + Storage<ArcReadSignal<T>> + Storage<T>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: RwSignal<T, S>) -> Self {
|
||||
Self {
|
||||
inner: SignalTypes::ReadSignal(value.read_only().into()),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: std::panic::Location::caller(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<ArcMemo<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
@@ -282,6 +310,20 @@ pub mod read {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<Memo<T, S>> for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<ArcMemo<T, S>> + Storage<T>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: Memo<T, S>) -> Self {
|
||||
Self {
|
||||
inner: SignalTypes::Memo(value.into()),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: std::panic::Location::caller(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> DefinedAt for ArcSignal<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
@@ -1500,7 +1542,6 @@ pub mod read {
|
||||
|
||||
impl<T, S> ReadUntracked for MaybeProp<T, S>
|
||||
where
|
||||
T: Clone,
|
||||
S: Storage<Option<T>> + Storage<SignalTypes<Option<T>, S>>,
|
||||
{
|
||||
type Value = ReadGuard<Option<T>, SignalReadGuard<Option<T>, S>>;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_stores"
|
||||
version = "0.2.4"
|
||||
version = "0.2.5"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_stores_macro"
|
||||
version = "0.2.4"
|
||||
version = "0.2.6"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -6,8 +6,8 @@ use syn::{
|
||||
parse::{Parse, ParseStream, Parser},
|
||||
punctuated::Punctuated,
|
||||
token::Comma,
|
||||
ExprClosure, Field, Fields, Generics, Ident, Index, Meta, Result, Token,
|
||||
Type, Variant, Visibility, WhereClause,
|
||||
ExprClosure, Field, Fields, GenericParam, Generics, Ident, Index, Meta,
|
||||
Result, Token, Type, TypeParam, Variant, Visibility, WhereClause,
|
||||
};
|
||||
|
||||
#[proc_macro_error]
|
||||
@@ -26,6 +26,103 @@ pub fn derive_patch(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
.into()
|
||||
}
|
||||
|
||||
/// Removes all constraints from generics arguments list.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```rust,ignore
|
||||
/// struct Data<
|
||||
/// 'a,
|
||||
/// T1: ToString + PatchField,
|
||||
/// T2: PatchField,
|
||||
/// T3: 'static + PatchField,
|
||||
/// T4,
|
||||
/// >
|
||||
/// where
|
||||
/// T3: ToString,
|
||||
/// T4: ToString + PatchField,
|
||||
/// {
|
||||
/// data1: &'a T1,
|
||||
/// data2: T2,
|
||||
/// data3: T3,
|
||||
/// data4: T4,
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Fort the struct above the `[syn::DeriveInput::parse]` will return the instance of [syn::Generics]
|
||||
/// which will conceptually look like this
|
||||
///
|
||||
/// ```text
|
||||
/// Generics:
|
||||
/// params:
|
||||
/// [
|
||||
/// 'a,
|
||||
/// T1: ToString + PatchField,
|
||||
/// T2: PatchField,
|
||||
/// T3: 'static + PatchField,
|
||||
/// T4,
|
||||
/// ]
|
||||
/// where_clause:
|
||||
/// [
|
||||
/// T3: ToString,
|
||||
/// T4: ToString + PatchField,
|
||||
/// ]
|
||||
/// ```
|
||||
///
|
||||
/// This method would return a new instance of [syn::Generics] which will conceptually look like this
|
||||
///
|
||||
/// ```text
|
||||
/// Generics:
|
||||
/// params:
|
||||
/// [
|
||||
/// 'a,
|
||||
/// T1,
|
||||
/// T2,
|
||||
/// T3,
|
||||
/// T4,
|
||||
/// ]
|
||||
/// where_clause:
|
||||
/// []
|
||||
/// ```
|
||||
///
|
||||
/// This is useful when you want to use a generic arguments list for `impl` sections for type definitions.
|
||||
fn remove_constraint_from_generics(generics: &Generics) -> Generics {
|
||||
let mut new_generics = generics.clone();
|
||||
|
||||
// remove contraints directly placed in the generic arguments list
|
||||
//
|
||||
// For generics for `struct A<T: MyTrait>` the `T: MyTrait` becomes `T`
|
||||
for param in new_generics.params.iter_mut() {
|
||||
match param {
|
||||
GenericParam::Lifetime(lifetime) => {
|
||||
lifetime.bounds.clear(); // remove bounds
|
||||
lifetime.colon_token = None;
|
||||
}
|
||||
GenericParam::Type(type_param) => {
|
||||
type_param.bounds.clear(); // remove bounds
|
||||
type_param.colon_token = None;
|
||||
type_param.eq_token = None;
|
||||
type_param.default = None;
|
||||
}
|
||||
GenericParam::Const(const_param) => {
|
||||
// replaces const generic with type param without bounds which is basically an `ident` token
|
||||
*param = GenericParam::Type(TypeParam {
|
||||
attrs: const_param.attrs.clone(),
|
||||
ident: const_param.ident.clone(),
|
||||
colon_token: None,
|
||||
bounds: Punctuated::new(),
|
||||
eq_token: None,
|
||||
default: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
new_generics.where_clause = None; // remove where clause
|
||||
|
||||
new_generics
|
||||
}
|
||||
|
||||
struct Model {
|
||||
vis: Visibility,
|
||||
name: Ident,
|
||||
@@ -111,7 +208,9 @@ impl ToTokens for Model {
|
||||
} = &self;
|
||||
let any_store_field = Ident::new("AnyStoreField", Span::call_site());
|
||||
let trait_name = Ident::new(&format!("{name}StoreFields"), name.span());
|
||||
let clear_generics = remove_constraint_from_generics(generics);
|
||||
let params = &generics.params;
|
||||
let clear_params = &clear_generics.params;
|
||||
let generics_with_orig = quote! { <#any_store_field, #params> };
|
||||
let where_with_orig = {
|
||||
generics
|
||||
@@ -124,17 +223,22 @@ impl ToTokens for Model {
|
||||
} = &w;
|
||||
quote! {
|
||||
#where_token
|
||||
#any_store_field: #library_path::StoreField<Value = #name #generics>,
|
||||
#any_store_field: #library_path::StoreField<Value = #name < #clear_params > >,
|
||||
#predicates
|
||||
}
|
||||
})
|
||||
.unwrap_or_else(|| quote! { where #any_store_field: #library_path::StoreField<Value = #name #generics> })
|
||||
.unwrap_or_else(|| quote! { where #any_store_field: #library_path::StoreField<Value = #name < #clear_params > > })
|
||||
};
|
||||
|
||||
// define an extension trait that matches this struct
|
||||
// and implement that trait for all StoreFields
|
||||
let (trait_fields, read_fields): (Vec<_>, Vec<_>) =
|
||||
ty.to_field_data(&library_path, generics, &any_store_field, name);
|
||||
let (trait_fields, read_fields): (Vec<_>, Vec<_>) = ty.to_field_data(
|
||||
&library_path,
|
||||
generics,
|
||||
&clear_generics,
|
||||
&any_store_field,
|
||||
name,
|
||||
);
|
||||
|
||||
// read access
|
||||
tokens.extend(quote! {
|
||||
@@ -144,7 +248,7 @@ impl ToTokens for Model {
|
||||
#(#trait_fields)*
|
||||
}
|
||||
|
||||
impl #generics_with_orig #trait_name <AnyStoreField, #params> for AnyStoreField
|
||||
impl #generics_with_orig #trait_name <AnyStoreField, #clear_params> for AnyStoreField
|
||||
#where_with_orig
|
||||
{
|
||||
#(#read_fields)*
|
||||
@@ -158,6 +262,7 @@ impl ModelTy {
|
||||
&self,
|
||||
library_path: &TokenStream,
|
||||
generics: &Generics,
|
||||
clear_generics: &Generics,
|
||||
any_store_field: &Ident,
|
||||
name: &Ident,
|
||||
) -> (Vec<TokenStream>, Vec<TokenStream>) {
|
||||
@@ -204,6 +309,7 @@ impl ModelTy {
|
||||
library_path,
|
||||
ident.as_ref(),
|
||||
generics,
|
||||
clear_generics,
|
||||
any_store_field,
|
||||
name,
|
||||
ty,
|
||||
@@ -215,6 +321,7 @@ impl ModelTy {
|
||||
library_path,
|
||||
ident.as_ref(),
|
||||
generics,
|
||||
clear_generics,
|
||||
any_store_field,
|
||||
name,
|
||||
ty,
|
||||
@@ -233,6 +340,7 @@ impl ModelTy {
|
||||
library_path,
|
||||
ident,
|
||||
generics,
|
||||
clear_generics,
|
||||
any_store_field,
|
||||
name,
|
||||
fields,
|
||||
@@ -242,6 +350,7 @@ impl ModelTy {
|
||||
library_path,
|
||||
ident,
|
||||
generics,
|
||||
clear_generics,
|
||||
any_store_field,
|
||||
name,
|
||||
fields,
|
||||
@@ -260,7 +369,8 @@ fn field_to_tokens(
|
||||
modes: Option<&[SubfieldMode]>,
|
||||
library_path: &proc_macro2::TokenStream,
|
||||
orig_ident: Option<&Ident>,
|
||||
generics: &Generics,
|
||||
_generics: &Generics,
|
||||
clear_generics: &Generics,
|
||||
any_store_field: &Ident,
|
||||
name: &Ident,
|
||||
ty: &Type,
|
||||
@@ -285,7 +395,7 @@ fn field_to_tokens(
|
||||
SubfieldMode::Keyed(keyed_by, key_ty) => {
|
||||
let signature = quote! {
|
||||
#[track_caller]
|
||||
fn #ident(self) -> #library_path::KeyedSubfield<#any_store_field, #name #generics, #key_ty, #ty>
|
||||
fn #ident(self) -> #library_path::KeyedSubfield<#any_store_field, #name #clear_generics, #key_ty, #ty>
|
||||
};
|
||||
return if include_body {
|
||||
quote! {
|
||||
@@ -318,7 +428,7 @@ fn field_to_tokens(
|
||||
// default subfield
|
||||
if include_body {
|
||||
quote! {
|
||||
fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #generics, #ty> {
|
||||
fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #clear_generics, #ty> {
|
||||
#library_path::Subfield::new(
|
||||
self,
|
||||
#idx.into(),
|
||||
@@ -329,7 +439,7 @@ fn field_to_tokens(
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #generics, #ty>;
|
||||
fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #clear_generics, #ty>;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -339,7 +449,8 @@ fn variant_to_tokens(
|
||||
include_body: bool,
|
||||
library_path: &proc_macro2::TokenStream,
|
||||
ident: &Ident,
|
||||
generics: &Generics,
|
||||
_generics: &Generics,
|
||||
clear_generics: &Generics,
|
||||
any_store_field: &Ident,
|
||||
name: &Ident,
|
||||
fields: &Fields,
|
||||
@@ -408,7 +519,7 @@ fn variant_to_tokens(
|
||||
// default subfield
|
||||
if include_body {
|
||||
quote! {
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>> {
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #clear_generics, #field_ty>> {
|
||||
#library_path::StoreField::track_field(&self);
|
||||
let reader = #library_path::StoreField::reader(&self);
|
||||
let matches = reader
|
||||
@@ -440,7 +551,7 @@ fn variant_to_tokens(
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>>;
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #clear_generics, #field_ty>>;
|
||||
}
|
||||
}
|
||||
}));
|
||||
@@ -491,7 +602,7 @@ fn variant_to_tokens(
|
||||
// default subfield
|
||||
if include_body {
|
||||
quote! {
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>> {
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #clear_generics, #field_ty>> {
|
||||
#library_path::StoreField::track_field(&self);
|
||||
let reader = #library_path::StoreField::reader(&self);
|
||||
let matches = reader
|
||||
@@ -523,7 +634,7 @@ fn variant_to_tokens(
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>>;
|
||||
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #clear_generics, #field_ty>>;
|
||||
}
|
||||
}
|
||||
}));
|
||||
@@ -665,9 +776,14 @@ impl ToTokens for PatchModel {
|
||||
}
|
||||
};
|
||||
|
||||
let clear_generics = remove_constraint_from_generics(generics);
|
||||
let params = clear_generics.params;
|
||||
let where_clause = &generics.where_clause;
|
||||
|
||||
// read access
|
||||
tokens.extend(quote! {
|
||||
impl #library_path::PatchField for #name #generics
|
||||
impl #generics #library_path::PatchField for #name <#params>
|
||||
#where_clause
|
||||
{
|
||||
fn patch_field(
|
||||
&mut self,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router"
|
||||
version = "0.8.4"
|
||||
version = "0.8.8"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -364,6 +364,12 @@ where
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct MatchedRoute(pub String, pub AnyView);
|
||||
|
||||
impl MatchedRoute {
|
||||
fn branch_name(&self) -> String {
|
||||
format!("{:?}", self.1.as_type_id())
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for MatchedRoute {
|
||||
type State = <AnyView as Render>::State;
|
||||
|
||||
@@ -414,8 +420,9 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches: bool,
|
||||
extra_attrs: Vec<AnyAttribute>,
|
||||
) {
|
||||
if mark_branches && escape {
|
||||
buf.open_branch(&self.0);
|
||||
let branch_name = (mark_branches && escape).then(|| self.branch_name());
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.open_branch(bn);
|
||||
}
|
||||
self.1.to_html_with_buf(
|
||||
buf,
|
||||
@@ -424,8 +431,8 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if mark_branches && escape {
|
||||
buf.close_branch(&self.0);
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.close_branch(bn);
|
||||
if *position == Position::NextChildAfterText {
|
||||
*position = Position::NextChild;
|
||||
}
|
||||
@@ -442,8 +449,9 @@ impl RenderHtml for MatchedRoute {
|
||||
) where
|
||||
Self: Sized,
|
||||
{
|
||||
if mark_branches && escape {
|
||||
buf.open_branch(&self.0);
|
||||
let branch_name = (mark_branches && escape).then(|| self.branch_name());
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.open_branch(bn);
|
||||
}
|
||||
self.1.to_html_async_with_buf::<OUT_OF_ORDER>(
|
||||
buf,
|
||||
@@ -452,8 +460,8 @@ impl RenderHtml for MatchedRoute {
|
||||
mark_branches,
|
||||
extra_attrs,
|
||||
);
|
||||
if mark_branches && escape {
|
||||
buf.close_branch(&self.0);
|
||||
if let Some(bn) = &branch_name {
|
||||
buf.close_branch(bn);
|
||||
if *position == Position::NextChildAfterText {
|
||||
*position = Position::NextChild;
|
||||
}
|
||||
|
||||
@@ -185,11 +185,15 @@ impl LocationProvider for BrowserUrl {
|
||||
let is_back = self.is_back.clone();
|
||||
move || match Self::current() {
|
||||
Ok(new_url) => {
|
||||
let stack = path_stack.read_value();
|
||||
let mut stack = path_stack.write_value();
|
||||
let is_navigating_back = stack.len() == 1
|
||||
|| (stack.len() >= 2
|
||||
&& stack.get(stack.len() - 2) == Some(&new_url));
|
||||
|
||||
if is_navigating_back {
|
||||
stack.pop();
|
||||
}
|
||||
|
||||
is_back.set(is_navigating_back);
|
||||
|
||||
url.set(new_url);
|
||||
@@ -221,6 +225,13 @@ impl LocationProvider for BrowserUrl {
|
||||
fn complete_navigation(&self, loc: &LocationChange) {
|
||||
let history = window().history().unwrap();
|
||||
|
||||
let current_path = self
|
||||
.path_stack
|
||||
.read_value()
|
||||
.last()
|
||||
.map(|url| url.to_full_path());
|
||||
let add_to_stack = current_path.as_ref() != Some(&loc.value);
|
||||
|
||||
if loc.replace {
|
||||
history
|
||||
.replace_state_with_url(
|
||||
@@ -229,7 +240,7 @@ impl LocationProvider for BrowserUrl {
|
||||
Some(&loc.value),
|
||||
)
|
||||
.unwrap();
|
||||
} else {
|
||||
} else if add_to_stack {
|
||||
// push the "forward direction" marker
|
||||
let state = &loc.state.to_js_value();
|
||||
history
|
||||
@@ -240,7 +251,9 @@ impl LocationProvider for BrowserUrl {
|
||||
// add this URL to the "path stack" for detecting back navigations, and
|
||||
// unset "navigating back" state
|
||||
if let Ok(url) = Self::current() {
|
||||
self.path_stack.write_value().push(url);
|
||||
if add_to_stack {
|
||||
self.path_stack.write_value().push(url);
|
||||
}
|
||||
self.is_back.set(false);
|
||||
}
|
||||
|
||||
|
||||
@@ -10,14 +10,23 @@ use crate::{
|
||||
};
|
||||
use any_spawner::Executor;
|
||||
use either_of::{Either, EitherOf3};
|
||||
use futures::{channel::oneshot, future::join_all, FutureExt};
|
||||
use leptos::{attr::any_attribute::AnyAttribute, component, oco::Oco};
|
||||
use futures::{
|
||||
channel::oneshot,
|
||||
future::{join_all, AbortHandle, Abortable},
|
||||
FutureExt,
|
||||
};
|
||||
use leptos::{
|
||||
attr::any_attribute::AnyAttribute,
|
||||
component,
|
||||
oco::Oco,
|
||||
prelude::{ArcStoredValue, WriteValue},
|
||||
};
|
||||
use or_poisoned::OrPoisoned;
|
||||
use reactive_graph::{
|
||||
computed::{ArcMemo, ScopedFuture},
|
||||
owner::{provide_context, use_context, Owner},
|
||||
signal::{ArcRwSignal, ArcTrigger},
|
||||
traits::{Get, GetUntracked, Notify, ReadUntracked, Set, Track},
|
||||
traits::{Get, GetUntracked, Notify, ReadUntracked, Set, Track, Write},
|
||||
transition::AsyncTransition,
|
||||
wrappers::write::SignalSetter,
|
||||
};
|
||||
@@ -68,6 +77,7 @@ where
|
||||
// held to keep the Owner alive until the router is dropped
|
||||
#[allow(unused)]
|
||||
outer_owner: Owner,
|
||||
abort_navigation: ArcStoredValue<Option<AbortHandle>>,
|
||||
}
|
||||
|
||||
impl<Loc, Defs, FalFn, Fal> Render for NestedRoutesView<Loc, Defs, FalFn>
|
||||
@@ -109,6 +119,7 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
|
||||
@@ -134,6 +145,7 @@ where
|
||||
outlets,
|
||||
view,
|
||||
outer_owner,
|
||||
abort_navigation: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -148,13 +160,14 @@ where
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// since the path didn't match, we'll update the retained path for future diffing
|
||||
state.path.clear();
|
||||
state.path.push_str(url_snapshot.path());
|
||||
|
||||
let new_match = self.routes.match_route(url_snapshot.path());
|
||||
|
||||
state.current_url.set(url_snapshot);
|
||||
*state.current_url.write_untracked() = url_snapshot;
|
||||
|
||||
match new_match {
|
||||
None => {
|
||||
@@ -181,30 +194,51 @@ where
|
||||
&mut state.outlets,
|
||||
self.set_is_routing.is_some(),
|
||||
0,
|
||||
&self.outer_owner,
|
||||
);
|
||||
|
||||
let (abort_handle, abort_registration) =
|
||||
AbortHandle::new_pair();
|
||||
|
||||
if let Some(prev_handle) =
|
||||
state.abort_navigation.write_value().replace(abort_handle)
|
||||
{
|
||||
prev_handle.abort();
|
||||
}
|
||||
|
||||
let location = self.location.clone();
|
||||
let is_back = location
|
||||
.as_ref()
|
||||
.map(|nav| nav.is_back().get_untracked())
|
||||
.unwrap_or(false);
|
||||
Executor::spawn_local(async move {
|
||||
let triggers = join_all(preloaders).await;
|
||||
// tell each one of the outlet triggers that it's ready
|
||||
let notify = move || {
|
||||
for trigger in triggers {
|
||||
trigger.notify();
|
||||
let triggers = Abortable::new(
|
||||
join_all(preloaders),
|
||||
abort_registration,
|
||||
);
|
||||
if let Ok(triggers) = triggers.await {
|
||||
// tell each one of the outlet triggers that it's ready
|
||||
let notify = move || {
|
||||
for trigger in triggers {
|
||||
trigger.notify();
|
||||
}
|
||||
};
|
||||
if self.transition {
|
||||
start_view_transition(
|
||||
different_level,
|
||||
is_back,
|
||||
notify,
|
||||
);
|
||||
} else {
|
||||
notify();
|
||||
}
|
||||
};
|
||||
if self.transition {
|
||||
start_view_transition(different_level, is_back, notify);
|
||||
} else {
|
||||
notify();
|
||||
}
|
||||
});
|
||||
|
||||
let abort_navigation = state.abort_navigation.clone();
|
||||
Executor::spawn_local(async move {
|
||||
join_all(full_loaders).await;
|
||||
_ = abort_navigation.write_value().take();
|
||||
if let Some(set_is_routing) = self.set_is_routing {
|
||||
set_is_routing.set(false);
|
||||
}
|
||||
@@ -338,6 +372,7 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
|
||||
// outlets will not send their views if the loaders are never polled
|
||||
@@ -391,8 +426,16 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
|
||||
let preload_owners = outlets
|
||||
.iter()
|
||||
.map(|o| o.preload_owner.clone())
|
||||
.collect::<Vec<_>>();
|
||||
outer_owner
|
||||
.with(|| Owner::on_cleanup(move || drop(preload_owners)));
|
||||
|
||||
// outlets will not send their views if the loaders are never polled
|
||||
// the loaders are async so that they can lazy-load routes in the browser,
|
||||
// but they should always be synchronously available on the server
|
||||
@@ -444,6 +487,7 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
|
||||
@@ -463,6 +507,7 @@ where
|
||||
outlets,
|
||||
view,
|
||||
outer_owner,
|
||||
abort_navigation: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -498,6 +543,7 @@ where
|
||||
base,
|
||||
&mut loaders,
|
||||
&mut outlets,
|
||||
&outer_owner,
|
||||
);
|
||||
drop(url);
|
||||
|
||||
@@ -514,6 +560,7 @@ where
|
||||
outlets,
|
||||
view,
|
||||
outer_owner,
|
||||
abort_navigation: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -533,6 +580,7 @@ pub(crate) struct RouteContext {
|
||||
base: Option<Oco<'static, str>>,
|
||||
view_fn: Arc<Mutex<OutletViewFn>>,
|
||||
owner: Arc<Mutex<Option<Owner>>>,
|
||||
preload_owner: Owner,
|
||||
child: ChildRoute,
|
||||
}
|
||||
|
||||
@@ -564,6 +612,7 @@ impl Clone for RouteContext {
|
||||
view_fn: Arc::clone(&self.view_fn),
|
||||
owner: Arc::clone(&self.owner),
|
||||
child: self.child.clone(),
|
||||
preload_owner: self.preload_owner.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -575,6 +624,7 @@ trait AddNestedRoute {
|
||||
base: Option<Oco<'static, str>>,
|
||||
loaders: &mut Vec<Pin<Box<dyn Future<Output = ArcTrigger>>>>,
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
outer_owner: &Owner,
|
||||
);
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
@@ -588,6 +638,7 @@ trait AddNestedRoute {
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
set_is_routing: bool,
|
||||
level: u8,
|
||||
outer_owner: &Owner,
|
||||
) -> u8;
|
||||
}
|
||||
|
||||
@@ -601,6 +652,7 @@ where
|
||||
base: Option<Oco<'static, str>>,
|
||||
loaders: &mut Vec<Pin<Box<dyn Future<Output = ArcTrigger>>>>,
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
outer_owner: &Owner,
|
||||
) {
|
||||
let orig_url = url;
|
||||
|
||||
@@ -668,6 +720,7 @@ where
|
||||
base: base.clone(),
|
||||
child: ChildRoute(Arc::new(Mutex::new(None))),
|
||||
owner: Arc::new(Mutex::new(None)),
|
||||
preload_owner: outer_owner.child(),
|
||||
};
|
||||
if !outlets.is_empty() {
|
||||
let prev_index = outlets.len().saturating_sub(1);
|
||||
@@ -692,7 +745,15 @@ where
|
||||
provide_context(params.clone());
|
||||
provide_context(url.clone());
|
||||
provide_context(matched.clone());
|
||||
view.preload().await;
|
||||
outlet
|
||||
.preload_owner
|
||||
.with(|| {
|
||||
provide_context(params.clone());
|
||||
provide_context(url.clone());
|
||||
provide_context(matched.clone());
|
||||
ScopedFuture::new(view.preload())
|
||||
})
|
||||
.await;
|
||||
let child = outlet.child.clone();
|
||||
*view_fn.lock().or_poisoned() =
|
||||
Box::new(move |owner_where_used| {
|
||||
@@ -739,7 +800,13 @@ where
|
||||
// this is important because to build the view, we need access to the outlet
|
||||
// and the outlet will be returned from building this child
|
||||
if let Some(child) = child {
|
||||
child.build_nested_route(orig_url, base, loaders, outlets);
|
||||
child.build_nested_route(
|
||||
orig_url,
|
||||
base,
|
||||
loaders,
|
||||
outlets,
|
||||
outer_owner,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -754,6 +821,7 @@ where
|
||||
outlets: &mut Vec<RouteContext>,
|
||||
set_is_routing: bool,
|
||||
level: u8,
|
||||
outer_owner: &Owner,
|
||||
) -> u8 {
|
||||
let (parent_params, parent_matches): (Vec<_>, Vec<_>) = outlets
|
||||
.iter()
|
||||
@@ -770,7 +838,13 @@ where
|
||||
match current {
|
||||
// if there's nothing currently in the routes at this point, build from here
|
||||
None => {
|
||||
self.build_nested_route(url, base, preloaders, outlets);
|
||||
self.build_nested_route(
|
||||
url,
|
||||
base,
|
||||
preloaders,
|
||||
outlets,
|
||||
outer_owner,
|
||||
);
|
||||
level
|
||||
}
|
||||
Some(current) => {
|
||||
@@ -810,6 +884,10 @@ where
|
||||
&mut current.matched,
|
||||
ArcRwSignal::new(new_match),
|
||||
);
|
||||
let old_preload_owner = mem::replace(
|
||||
&mut current.preload_owner,
|
||||
outer_owner.child(),
|
||||
);
|
||||
let matched_including_parents = {
|
||||
ArcMemo::new({
|
||||
let matched = current.matched.clone();
|
||||
@@ -852,11 +930,26 @@ where
|
||||
let child = outlet.child.clone();
|
||||
async move {
|
||||
let child = child.clone();
|
||||
if set_is_routing {
|
||||
AsyncTransition::run(|| view.preload()).await;
|
||||
} else {
|
||||
view.preload().await;
|
||||
}
|
||||
outlet
|
||||
.preload_owner
|
||||
.with(|| {
|
||||
provide_context(
|
||||
params_including_parents.clone(),
|
||||
);
|
||||
provide_context(url.clone());
|
||||
provide_context(matched.clone());
|
||||
ScopedFuture::new(async {
|
||||
if set_is_routing {
|
||||
AsyncTransition::run(|| {
|
||||
view.preload()
|
||||
})
|
||||
.await;
|
||||
} else {
|
||||
view.preload().await;
|
||||
}
|
||||
})
|
||||
})
|
||||
.await;
|
||||
*view_fn.lock().or_poisoned() =
|
||||
Box::new(move |owner_where_used| {
|
||||
let prev_owner = route_owner
|
||||
@@ -905,6 +998,7 @@ where
|
||||
drop(old_params);
|
||||
drop(old_url);
|
||||
drop(old_matched);
|
||||
drop(old_preload_owner);
|
||||
trigger
|
||||
}
|
||||
})));
|
||||
@@ -915,8 +1009,13 @@ where
|
||||
|
||||
// if this children has matches, then rebuild the lower section of the tree
|
||||
if let Some(child) = child {
|
||||
child
|
||||
.build_nested_route(url, base, preloaders, outlets);
|
||||
child.build_nested_route(
|
||||
url,
|
||||
base,
|
||||
preloaders,
|
||||
outlets,
|
||||
outer_owner,
|
||||
);
|
||||
} else {
|
||||
*outlets[*items].child.0.lock().or_poisoned() = None;
|
||||
}
|
||||
@@ -940,6 +1039,7 @@ where
|
||||
outlets,
|
||||
set_is_routing,
|
||||
level + 1,
|
||||
outer_owner,
|
||||
)
|
||||
} else {
|
||||
*current.child.0.lock().or_poisoned() = None;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router_macro"
|
||||
version = "0.8.4"
|
||||
version = "0.8.5"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -19,6 +19,7 @@ quote = { workspace = true, default-features = true }
|
||||
syn = { features = ["full"], workspace = true, default-features = true }
|
||||
|
||||
[dev-dependencies]
|
||||
leptos = { path = "../leptos" }
|
||||
leptos_router = { path = "../router" }
|
||||
leptos_macro = { path = "../leptos_macro" }
|
||||
|
||||
|
||||
@@ -198,6 +198,37 @@ impl ToTokens for Segments {
|
||||
/// add a [`lazy`] annotation to the `view` method, which will cause the code for the view
|
||||
/// to lazy-load concurrently with the `data` being loaded for the route.
|
||||
///
|
||||
/// ```rust
|
||||
/// use leptos::prelude::*;
|
||||
/// use leptos_router::{lazy_route, LazyRoute};
|
||||
///
|
||||
/// // the route definition
|
||||
/// #[derive(Debug)]
|
||||
/// struct BlogListingRoute {
|
||||
/// titles: Resource<Vec<String>>
|
||||
/// }
|
||||
///
|
||||
/// #[lazy_route]
|
||||
/// impl LazyRoute for BlogListingRoute {
|
||||
/// fn data() -> Self {
|
||||
/// Self {
|
||||
/// titles: Resource::new(|| (), |_| async {
|
||||
/// vec![/* todo: load blog posts */]
|
||||
/// })
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// // this function will be lazy-loaded, concurrently with data()
|
||||
/// fn view(this: Self) -> AnyView {
|
||||
/// let BlogListingRoute { titles } = this;
|
||||
///
|
||||
/// // ... now you can use the `posts` resource with Suspense, etc.,
|
||||
/// // and return AnyView by calling .into_any() on a view
|
||||
/// # ().into_any()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// [`impl LazyRoute`]: https://docs.rs/leptos_router/latest/leptos_router/trait.LazyRoute.html
|
||||
/// [`lazy`]: https://docs.rs/leptos_macro/latest/leptos_macro/macro.lazy.html
|
||||
#[proc_macro_attribute]
|
||||
|
||||
46
scripts/bump.sh
Executable file
46
scripts/bump.sh
Executable file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
LAST_TAG=$(git describe --tags --abbrev=0 --match "v*")
|
||||
|
||||
# Get package name and manifest_path for all members
|
||||
PACKAGES=$(cargo metadata --no-deps --format-version=1 | jq -r '.packages[] | "\(.name):::\(.manifest_path)"')
|
||||
|
||||
for PKG in $PACKAGES; do
|
||||
NAME="${PKG%%:::*}"
|
||||
MANIFEST_PATH="${PKG##*:::}"
|
||||
DIR=$(dirname "$MANIFEST_PATH")
|
||||
|
||||
# Look for release commit for this member up to the last tag
|
||||
RELEASE_COMMIT=$(git log --oneline --grep="^$NAME-v" --format="%H" "$LAST_TAG"..HEAD | head -n1)
|
||||
|
||||
if [[ -z "$RELEASE_COMMIT" ]]; then
|
||||
# No release commit found, use the latest release tag commit
|
||||
RELEASE_COMMIT=$(git rev-list -n 1 "$LAST_TAG")
|
||||
fi
|
||||
|
||||
# Check if any file in the package directory changed since the member's release commit or latest tag release
|
||||
if git diff --quiet "$RELEASE_COMMIT"..HEAD -- "$DIR"; then
|
||||
continue
|
||||
fi
|
||||
|
||||
echo "Changes detected in $NAME ($DIR)"
|
||||
PS3="Select version bump for $NAME: "
|
||||
select BUMP in patch minor major; do
|
||||
if [[ "$BUMP" == "patch" || "$BUMP" == "minor" || "$BUMP" == "major" ]]; then
|
||||
break
|
||||
else
|
||||
echo "Invalid option"
|
||||
fi
|
||||
done
|
||||
|
||||
if cargo set-version --help >/dev/null 2>&1; then
|
||||
cargo set-version --bump "$BUMP" --package "$NAME"
|
||||
else
|
||||
echo "Please install cargo-edit first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "$NAME bumped to $BUMP"
|
||||
done
|
||||
@@ -5,7 +5,7 @@ license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "RPC for any web framework."
|
||||
readme = "../README.md"
|
||||
version = { workspace = true }
|
||||
version = "0.8.7"
|
||||
rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "The default implementation of the server_fn macro without a context"
|
||||
version = { workspace = true }
|
||||
version = "0.8.5"
|
||||
edition.workspace = true
|
||||
|
||||
[lib]
|
||||
|
||||
@@ -30,8 +30,8 @@ where
|
||||
})?;
|
||||
Request::try_new_patch_bytes(
|
||||
path,
|
||||
accepts,
|
||||
Encoding::CONTENT_TYPE,
|
||||
accepts,
|
||||
data,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ where
|
||||
let data = Encoding::encode(&self).map_err(|e| {
|
||||
ServerFnErrorErr::Serialization(e.to_string()).into_app_error()
|
||||
})?;
|
||||
Request::try_new_post_bytes(path, accepts, Encoding::CONTENT_TYPE, data)
|
||||
Request::try_new_post_bytes(path, Encoding::CONTENT_TYPE, accepts, data)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ where
|
||||
let data = Encoding::encode(&self).map_err(|e| {
|
||||
ServerFnErrorErr::Serialization(e.to_string()).into_app_error()
|
||||
})?;
|
||||
Request::try_new_put_bytes(path, accepts, Encoding::CONTENT_TYPE, data)
|
||||
Request::try_new_put_bytes(path, Encoding::CONTENT_TYPE, accepts, data)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ pub struct GetUrl;
|
||||
/// Pass arguments as the URL-encoded body of a `POST` request.
|
||||
pub struct PostUrl;
|
||||
|
||||
/// Pass arguments as the URL-encoded body of a `DELETE` request.
|
||||
/// Pass arguments as the URL-encoded query string of a `DELETE` request.
|
||||
/// **Note**: Browser support for `DELETE` requests without JS/WASM may be poor.
|
||||
/// Consider using a `POST` request if functionality without JS/WASM is required.
|
||||
pub struct DeleteUrl;
|
||||
@@ -46,7 +46,7 @@ where
|
||||
let data = serde_qs::to_string(&self).map_err(|e| {
|
||||
ServerFnErrorErr::Serialization(e.to_string()).into_app_error()
|
||||
})?;
|
||||
Request::try_new_get(path, accepts, GetUrl::CONTENT_TYPE, &data)
|
||||
Request::try_new_get(path, GetUrl::CONTENT_TYPE, accepts, &data)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,7 +85,7 @@ where
|
||||
let qs = serde_qs::to_string(&self).map_err(|e| {
|
||||
ServerFnErrorErr::Serialization(e.to_string()).into_app_error()
|
||||
})?;
|
||||
Request::try_new_post(path, accepts, PostUrl::CONTENT_TYPE, qs)
|
||||
Request::try_new_post(path, PostUrl::CONTENT_TYPE, accepts, qs)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -124,7 +124,7 @@ where
|
||||
let data = serde_qs::to_string(&self).map_err(|e| {
|
||||
ServerFnErrorErr::Serialization(e.to_string()).into_app_error()
|
||||
})?;
|
||||
Request::try_new_delete(path, accepts, GetUrl::CONTENT_TYPE, &data)
|
||||
Request::try_new_delete(path, DeleteUrl::CONTENT_TYPE, accepts, &data)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -163,7 +163,7 @@ where
|
||||
let data = serde_qs::to_string(&self).map_err(|e| {
|
||||
ServerFnErrorErr::Serialization(e.to_string()).into_app_error()
|
||||
})?;
|
||||
Request::try_new_patch(path, accepts, GetUrl::CONTENT_TYPE, data)
|
||||
Request::try_new_patch(path, PatchUrl::CONTENT_TYPE, accepts, data)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -174,9 +174,9 @@ where
|
||||
E: FromServerFnError,
|
||||
{
|
||||
async fn from_req(req: Request) -> Result<Self, E> {
|
||||
let string_data = req.as_query().unwrap_or_default();
|
||||
let string_data = req.try_into_string().await?;
|
||||
let args = serde_qs::Config::new(5, false)
|
||||
.deserialize_str::<Self>(string_data)
|
||||
.deserialize_str::<Self>(&string_data)
|
||||
.map_err(|e| {
|
||||
ServerFnErrorErr::Args(e.to_string()).into_app_error()
|
||||
})?;
|
||||
@@ -202,7 +202,7 @@ where
|
||||
let data = serde_qs::to_string(&self).map_err(|e| {
|
||||
ServerFnErrorErr::Serialization(e.to_string()).into_app_error()
|
||||
})?;
|
||||
Request::try_new_put(path, accepts, GetUrl::CONTENT_TYPE, data)
|
||||
Request::try_new_put(path, PutUrl::CONTENT_TYPE, accepts, data)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -213,9 +213,9 @@ where
|
||||
E: FromServerFnError,
|
||||
{
|
||||
async fn from_req(req: Request) -> Result<Self, E> {
|
||||
let string_data = req.as_query().unwrap_or_default();
|
||||
let string_data = req.try_into_string().await?;
|
||||
let args = serde_qs::Config::new(5, false)
|
||||
.deserialize_str::<Self>(string_data)
|
||||
.deserialize_str::<Self>(&string_data)
|
||||
.map_err(|e| {
|
||||
ServerFnErrorErr::Args(e.to_string()).into_app_error()
|
||||
})?;
|
||||
|
||||
@@ -568,7 +568,7 @@ pub trait FromServerFnError: std::fmt::Debug + Sized + 'static {
|
||||
/// Converts a [`ServerFnErrorErr`] into the application-specific custom error type.
|
||||
fn from_server_fn_error(value: ServerFnErrorErr) -> Self;
|
||||
|
||||
/// Converts the custom error type to a [`String`].
|
||||
/// Serializes the custom error type to bytes, according to the encoding given by `Self::Encoding`.
|
||||
fn ser(&self) -> Bytes {
|
||||
Self::Encoder::encode(self).unwrap_or_else(|e| {
|
||||
Self::Encoder::encode(&Self::from_server_fn_error(
|
||||
@@ -581,7 +581,7 @@ pub trait FromServerFnError: std::fmt::Debug + Sized + 'static {
|
||||
})
|
||||
}
|
||||
|
||||
/// Deserializes the custom error type from a [`&str`].
|
||||
/// Deserializes the custom error type, according to the encoding given by `Self::Encoding`.
|
||||
fn de(data: Bytes) -> Self {
|
||||
Self::Encoder::decode(data).unwrap_or_else(|e| {
|
||||
ServerFnErrorErr::Deserialization(e.to_string()).into_app_error()
|
||||
|
||||
@@ -307,16 +307,18 @@ pub trait ServerFn: Send + Sized {
|
||||
.await
|
||||
.map(|res| (res, None))
|
||||
.unwrap_or_else(|e| {
|
||||
(
|
||||
let mut response =
|
||||
<<Self as ServerFn>::Server as crate::Server<
|
||||
Self::Error,
|
||||
Self::InputStreamError,
|
||||
Self::OutputStreamError,
|
||||
>>::Response::error_response(
|
||||
Self::PATH, e.ser()
|
||||
),
|
||||
Some(e),
|
||||
)
|
||||
);
|
||||
let content_type =
|
||||
<Self::Error as FromServerFnError>::Encoder::CONTENT_TYPE;
|
||||
response.content_type(content_type);
|
||||
(response, Some(e))
|
||||
});
|
||||
|
||||
// if it accepts HTML, we'll redirect to the Referer
|
||||
|
||||
@@ -72,6 +72,10 @@ mod axum {
|
||||
let inner = self.call(req);
|
||||
Box::pin(async move {
|
||||
inner.await.unwrap_or_else(|e| {
|
||||
// TODO: This does not set the Content-Type on the response. Doing so will
|
||||
// require a breaking change in order to get the correct encoding from the
|
||||
// error's `FromServerFnError::Encoder::CONTENT_TYPE` impl.
|
||||
// Note: This only applies to middleware errors.
|
||||
let err =
|
||||
ser(ServerFnErrorErr::MiddlewareError(e.to_string()));
|
||||
Response::<Body>::error_response(&path, err)
|
||||
@@ -149,6 +153,10 @@ mod actix {
|
||||
let inner = self.call(req);
|
||||
Box::pin(async move {
|
||||
inner.await.unwrap_or_else(|e| {
|
||||
// TODO: This does not set the Content-Type on the response. Doing so will
|
||||
// require a breaking change in order to get the correct encoding from the
|
||||
// error's `FromServerFnError::Encoder::CONTENT_TYPE` impl.
|
||||
// Note: This only applies to middleware errors.
|
||||
let err =
|
||||
ser(ServerFnErrorErr::MiddlewareError(e.to_string()));
|
||||
ActixResponse::error_response(&path, err).take()
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::error::{
|
||||
use actix_web::{
|
||||
http::{
|
||||
header,
|
||||
header::{HeaderValue, LOCATION},
|
||||
header::{HeaderValue, CONTENT_TYPE, LOCATION},
|
||||
StatusCode,
|
||||
},
|
||||
HttpResponse,
|
||||
@@ -80,6 +80,12 @@ impl Res for ActixResponse {
|
||||
))
|
||||
}
|
||||
|
||||
fn content_type(&mut self, content_type: &str) {
|
||||
if let Ok(content_type) = HeaderValue::from_str(content_type) {
|
||||
self.0.headers_mut().insert(CONTENT_TYPE, content_type);
|
||||
}
|
||||
}
|
||||
|
||||
fn redirect(&mut self, path: &str) {
|
||||
if let Ok(path) = HeaderValue::from_str(path) {
|
||||
*self.0.status_mut() = StatusCode::FOUND;
|
||||
|
||||
@@ -100,6 +100,13 @@ impl Res for Response<Body> {
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn content_type(&mut self, content_type: &str) {
|
||||
if let Ok(content_type) = HeaderValue::from_str(content_type) {
|
||||
self.headers_mut()
|
||||
.insert(header::CONTENT_TYPE, content_type);
|
||||
}
|
||||
}
|
||||
|
||||
fn redirect(&mut self, path: &str) {
|
||||
if let Ok(path) = HeaderValue::from_str(path) {
|
||||
self.headers_mut().insert(header::LOCATION, path);
|
||||
|
||||
@@ -60,6 +60,13 @@ impl Res for Response<Body> {
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn content_type(&mut self, content_type: &str) {
|
||||
if let Ok(content_type) = HeaderValue::from_str(content_type) {
|
||||
self.headers_mut()
|
||||
.insert(header::CONTENT_TYPE, content_type);
|
||||
}
|
||||
}
|
||||
|
||||
fn redirect(&mut self, path: &str) {
|
||||
if let Ok(path) = HeaderValue::from_str(path) {
|
||||
self.headers_mut().insert(header::LOCATION, path);
|
||||
|
||||
@@ -37,9 +37,14 @@ where
|
||||
|
||||
/// Represents the response as created by the server;
|
||||
pub trait Res {
|
||||
/// Converts an error into a response, with a `500` status code and the error text as its body.
|
||||
/// Converts an error into a response, with a `500` status code and the error as its body.
|
||||
fn error_response(path: &str, err: Bytes) -> Self;
|
||||
|
||||
/// Set the `Content-Type` header for the response.
|
||||
fn content_type(&mut self, #[allow(unused_variables)] content_type: &str) {
|
||||
// TODO 0.9: remove this method and default implementation. It is only included here
|
||||
// to allow setting the `Content-Type` header for error responses without requiring a
|
||||
// semver-incompatible change.
|
||||
}
|
||||
/// Redirect the response by setting a 302 code and Location header.
|
||||
fn redirect(&mut self, path: &str);
|
||||
}
|
||||
@@ -103,6 +108,10 @@ impl Res for BrowserMockRes {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn content_type(&mut self, _content_type: &str) {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn redirect(&mut self, _path: &str) {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
@@ -5,16 +5,22 @@ license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
description = "RPC for any web framework."
|
||||
readme = "../README.md"
|
||||
version = { workspace = true }
|
||||
version = "0.8.7"
|
||||
edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
quote = { workspace = true, default-features = true }
|
||||
syn = { features = ["full", "parsing", "extra-traits"] , workspace = true, default-features = true }
|
||||
syn = { features = [
|
||||
"full",
|
||||
"parsing",
|
||||
"extra-traits",
|
||||
], workspace = true, default-features = true }
|
||||
proc-macro2 = { workspace = true, default-features = true }
|
||||
xxhash-rust = { features = ["const_xxh64"] , workspace = true, default-features = true }
|
||||
xxhash-rust = { features = [
|
||||
"const_xxh64",
|
||||
], workspace = true, default-features = true }
|
||||
const_format = { workspace = true, default-features = true }
|
||||
convert_case = { workspace = true , default-features = true }
|
||||
convert_case = { workspace = true, default-features = true }
|
||||
|
||||
|
||||
[build-dependencies]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user