mirror of
https://github.com/leptos-rs/leptos.git
synced 2025-12-27 15:44:42 -05:00
Compare commits
39 Commits
workspace-
...
v0.7.7
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7be6a9da86 | ||
|
|
c51e07b5a4 | ||
|
|
b17a4c92c7 | ||
|
|
03f9c6cb6d | ||
|
|
9e8b8886da | ||
|
|
6a6b3dee15 | ||
|
|
5d71913523 | ||
|
|
706617ab0a | ||
|
|
cd64bb9d67 | ||
|
|
f881c1877d | ||
|
|
8783f6a478 | ||
|
|
2add714c65 | ||
|
|
88b1b2d882 | ||
|
|
9d3a743d33 | ||
|
|
c6de7c714e | ||
|
|
6154199850 | ||
|
|
32be3a023a | ||
|
|
d9043e4f34 | ||
|
|
e3010c7f1f | ||
|
|
1dcc5838f7 | ||
|
|
1ff373dbc2 | ||
|
|
c35c42c6e3 | ||
|
|
d4cbba7c63 | ||
|
|
9cc8ee3c5a | ||
|
|
f0c5ffe55f | ||
|
|
586c330995 | ||
|
|
72f960a026 | ||
|
|
b62ae56094 | ||
|
|
9ccefbbd8c | ||
|
|
d1108826cc | ||
|
|
d6c4cd2a81 | ||
|
|
f8acdf9168 | ||
|
|
09bbae2a72 | ||
|
|
ac3352724b | ||
|
|
3413825638 | ||
|
|
22b2d8ec84 | ||
|
|
1c3e013a63 | ||
|
|
9d4b1bc4b7 | ||
|
|
f303ff9706 |
2
.github/workflows/ci-changed-examples.yml
vendored
2
.github/workflows/ci-changed-examples.yml
vendored
@@ -4,10 +4,12 @@ on:
|
||||
branches:
|
||||
- main
|
||||
- leptos_0.6
|
||||
- leptos_0.8
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- leptos_0.6
|
||||
- leptos_0.8
|
||||
jobs:
|
||||
get-example-changed:
|
||||
uses: ./.github/workflows/get-example-changed.yml
|
||||
|
||||
2
.github/workflows/ci-examples.yml
vendored
2
.github/workflows/ci-examples.yml
vendored
@@ -4,10 +4,12 @@ on:
|
||||
branches:
|
||||
- main
|
||||
- leptos_0.6
|
||||
- leptos_0.8
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- leptos_0.6
|
||||
- leptos_0.8
|
||||
jobs:
|
||||
get-leptos-changed:
|
||||
uses: ./.github/workflows/get-leptos-changed.yml
|
||||
|
||||
2
.github/workflows/ci-semver.yml
vendored
2
.github/workflows/ci-semver.yml
vendored
@@ -4,10 +4,12 @@ on:
|
||||
branches:
|
||||
- main
|
||||
- leptos_0.6
|
||||
- leptos_0.8
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- leptos_0.6
|
||||
- leptos_0.8
|
||||
env:
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
jobs:
|
||||
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -4,10 +4,12 @@ on:
|
||||
branches:
|
||||
- main
|
||||
- leptos_0.6
|
||||
- leptos_0.8
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- leptos_0.6
|
||||
- leptos_0.8
|
||||
jobs:
|
||||
get-leptos-changed:
|
||||
uses: ./.github/workflows/get-leptos-changed.yml
|
||||
|
||||
2
.github/workflows/run-cargo-make-task.yml
vendored
2
.github/workflows/run-cargo-make-task.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
- name: Install wasm-bindgen
|
||||
run: cargo binstall wasm-bindgen-cli --no-confirm
|
||||
- name: Install cargo-leptos
|
||||
run: cargo binstall cargo-leptos --no-confirm
|
||||
run: cargo binstall cargo-leptos --locked --no-confirm
|
||||
- name: Install Trunk
|
||||
uses: jetli/trunk-action@v0.5.0
|
||||
with:
|
||||
|
||||
243
Cargo.lock
generated
243
Cargo.lock
generated
@@ -52,7 +52,7 @@ dependencies = [
|
||||
"actix-rt",
|
||||
"actix-service",
|
||||
"actix-utils",
|
||||
"ahash 0.8.11",
|
||||
"ahash",
|
||||
"base64",
|
||||
"bitflags",
|
||||
"brotli",
|
||||
@@ -169,7 +169,7 @@ dependencies = [
|
||||
"actix-service",
|
||||
"actix-utils",
|
||||
"actix-web-codegen",
|
||||
"ahash 0.8.11",
|
||||
"ahash",
|
||||
"bytes",
|
||||
"bytestring",
|
||||
"cfg-if",
|
||||
@@ -223,17 +223,6 @@ version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627"
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.7.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9"
|
||||
dependencies = [
|
||||
"getrandom",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.11"
|
||||
@@ -344,9 +333,9 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de"
|
||||
|
||||
[[package]]
|
||||
name = "async-trait"
|
||||
version = "0.1.83"
|
||||
version = "0.1.85"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd"
|
||||
checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -481,18 +470,6 @@ version = "2.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
|
||||
|
||||
[[package]]
|
||||
name = "bitvec"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c"
|
||||
dependencies = [
|
||||
"funty",
|
||||
"radium",
|
||||
"tap",
|
||||
"wyz",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
version = "0.10.4"
|
||||
@@ -529,40 +506,18 @@ version = "3.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
|
||||
|
||||
[[package]]
|
||||
name = "bytecheck"
|
||||
version = "0.6.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "23cdc57ce23ac53c931e88a43d06d070a6fd142f2617be5855eb75efc9beb1c2"
|
||||
dependencies = [
|
||||
"bytecheck_derive 0.6.12",
|
||||
"ptr_meta 0.1.4",
|
||||
"simdutf8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bytecheck"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "50c8f430744b23b54ad15161fcbc22d82a29b73eacbe425fea23ec822600bc6f"
|
||||
dependencies = [
|
||||
"bytecheck_derive 0.8.0",
|
||||
"ptr_meta 0.3.0",
|
||||
"bytecheck_derive",
|
||||
"ptr_meta",
|
||||
"rancor",
|
||||
"simdutf8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bytecheck_derive"
|
||||
version = "0.6.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3db406d29fbcd95542e92559bed4d8ad92636d1ca8b3b72ede10b4bcc010e659"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bytecheck_derive"
|
||||
version = "0.8.0"
|
||||
@@ -669,18 +624,18 @@ checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15"
|
||||
|
||||
[[package]]
|
||||
name = "codee"
|
||||
version = "0.2.0"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5d3ad3122b0001c7f140cf4d605ef9a9e2c24d96ab0b4fb4347b76de2425f445"
|
||||
checksum = "0f18d705321923b1a9358e3fc3c57c3b50171196827fc7f5f10b053242aca627"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"miniserde",
|
||||
"rkyv 0.7.45",
|
||||
"rkyv",
|
||||
"serde",
|
||||
"serde-lite",
|
||||
"serde-wasm-bindgen",
|
||||
"serde_json",
|
||||
"thiserror 1.0.69",
|
||||
"thiserror 2.0.9",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
@@ -910,6 +865,12 @@ version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "669a445ee724c5c69b1b06fe0b63e70a1c84bc9bb7d9696cd4f4e3ec45050408"
|
||||
|
||||
[[package]]
|
||||
name = "dyn-clone"
|
||||
version = "1.0.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125"
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.13.0"
|
||||
@@ -918,8 +879,9 @@ checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
|
||||
|
||||
[[package]]
|
||||
name = "either_of"
|
||||
version = "0.1.3"
|
||||
version = "0.1.5"
|
||||
dependencies = [
|
||||
"paste",
|
||||
"pin-project-lite",
|
||||
]
|
||||
|
||||
@@ -1033,12 +995,6 @@ dependencies = [
|
||||
"percent-encoding",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "funty"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c"
|
||||
|
||||
[[package]]
|
||||
name = "futures"
|
||||
version = "0.3.31"
|
||||
@@ -1323,15 +1279,6 @@ dependencies = [
|
||||
"byteorder",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.12.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
|
||||
dependencies = [
|
||||
"ahash 0.7.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.14.5"
|
||||
@@ -1771,7 +1718,7 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||
|
||||
[[package]]
|
||||
name = "leptos"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"any_spawner",
|
||||
"base64",
|
||||
@@ -1822,7 +1769,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "leptos_actix"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"actix-files",
|
||||
"actix-http",
|
||||
@@ -1847,7 +1794,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "leptos_axum"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"any_spawner",
|
||||
"axum",
|
||||
@@ -1870,7 +1817,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "leptos_config"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"config",
|
||||
"regex",
|
||||
@@ -1884,7 +1831,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "leptos_dom"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"js-sys",
|
||||
"leptos",
|
||||
@@ -1901,7 +1848,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "leptos_hot_reload"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"camino",
|
||||
@@ -1917,7 +1864,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "leptos_integration_utils"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"futures",
|
||||
"hydration_context",
|
||||
@@ -1930,7 +1877,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "leptos_macro"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"attribute-derive",
|
||||
"cfg-if",
|
||||
@@ -1940,6 +1887,7 @@ dependencies = [
|
||||
"itertools",
|
||||
"leptos",
|
||||
"leptos_hot_reload",
|
||||
"leptos_router",
|
||||
"log",
|
||||
"prettyplease",
|
||||
"proc-macro-error2",
|
||||
@@ -1948,7 +1896,7 @@ dependencies = [
|
||||
"rstml",
|
||||
"serde",
|
||||
"server_fn",
|
||||
"server_fn_macro 0.7.4",
|
||||
"server_fn_macro 0.7.7",
|
||||
"syn 2.0.90",
|
||||
"tracing",
|
||||
"trybuild",
|
||||
@@ -1958,7 +1906,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "leptos_meta"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"futures",
|
||||
"indexmap",
|
||||
@@ -1973,7 +1921,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "leptos_router"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"any_spawner",
|
||||
"either_of",
|
||||
@@ -1997,17 +1945,19 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "leptos_router_macro"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"leptos_macro",
|
||||
"leptos_router",
|
||||
"proc-macro-error2",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "leptos_server"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"any_spawner",
|
||||
"base64",
|
||||
@@ -2423,9 +2373,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-lite"
|
||||
version = "0.2.15"
|
||||
version = "0.2.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff"
|
||||
checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
|
||||
|
||||
[[package]]
|
||||
name = "pin-utils"
|
||||
@@ -2565,33 +2515,13 @@ dependencies = [
|
||||
"yansi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ptr_meta"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1"
|
||||
dependencies = [
|
||||
"ptr_meta_derive 0.1.4",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ptr_meta"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fe9e76f66d3f9606f44e45598d155cb13ecf09f4a28199e48daf8c8fc937ea90"
|
||||
dependencies = [
|
||||
"ptr_meta_derive 0.3.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ptr_meta_derive"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
"ptr_meta_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2688,19 +2618,13 @@ dependencies = [
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "radium"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
|
||||
|
||||
[[package]]
|
||||
name = "rancor"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "caf5f7161924b9d1cea0e4cabc97c372cea92b5f927fc13c6bca67157a0ad947"
|
||||
dependencies = [
|
||||
"ptr_meta 0.3.0",
|
||||
"ptr_meta",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2735,7 +2659,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "reactive_graph"
|
||||
version = "0.1.4"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"any_spawner",
|
||||
"async-lock",
|
||||
@@ -2757,11 +2681,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "reactive_stores"
|
||||
version = "0.1.3"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"any_spawner",
|
||||
"guardian",
|
||||
"itertools",
|
||||
"leptos",
|
||||
"or_poisoned",
|
||||
"paste",
|
||||
"reactive_graph",
|
||||
@@ -2773,7 +2698,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "reactive_stores_macro"
|
||||
version = "0.1.0"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"convert_case 0.6.0",
|
||||
"proc-macro-error2",
|
||||
@@ -2826,22 +2751,13 @@ version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
||||
|
||||
[[package]]
|
||||
name = "rend"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71fe3824f5629716b1589be05dacd749f6aa084c87e00e016714a8cdfccc997c"
|
||||
dependencies = [
|
||||
"bytecheck 0.6.12",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rend"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a35e8a6bf28cd121053a66aa2e6a2e3eaffad4a60012179f0e864aa5ffeff215"
|
||||
dependencies = [
|
||||
"bytecheck 0.8.0",
|
||||
"bytecheck",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2907,54 +2823,25 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rkyv"
|
||||
version = "0.7.45"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9008cd6385b9e161d8229e1f6549dd23c3d022f132a2ea37ac3a10ac4935779b"
|
||||
dependencies = [
|
||||
"bitvec",
|
||||
"bytecheck 0.6.12",
|
||||
"bytes",
|
||||
"hashbrown 0.12.3",
|
||||
"ptr_meta 0.1.4",
|
||||
"rend 0.4.2",
|
||||
"rkyv_derive 0.7.45",
|
||||
"seahash",
|
||||
"tinyvec",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rkyv"
|
||||
version = "0.8.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b11a153aec4a6ab60795f8ebe2923c597b16b05bb1504377451e705ef1a45323"
|
||||
dependencies = [
|
||||
"bytecheck 0.8.0",
|
||||
"bytecheck",
|
||||
"bytes",
|
||||
"hashbrown 0.15.2",
|
||||
"indexmap",
|
||||
"munge",
|
||||
"ptr_meta 0.3.0",
|
||||
"ptr_meta",
|
||||
"rancor",
|
||||
"rend 0.5.2",
|
||||
"rkyv_derive 0.8.9",
|
||||
"rend",
|
||||
"rkyv_derive",
|
||||
"tinyvec",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rkyv_derive"
|
||||
version = "0.7.45"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "503d1d27590a2b0a3a4ca4c94755aa2875657196ecbf401a42eff41d7de532c0"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rkyv_derive"
|
||||
version = "0.8.9"
|
||||
@@ -3122,12 +3009,6 @@ version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
||||
|
||||
[[package]]
|
||||
name = "seahash"
|
||||
version = "4.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
|
||||
|
||||
[[package]]
|
||||
name = "security-framework"
|
||||
version = "2.11.1"
|
||||
@@ -3274,7 +3155,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "server_fn"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"actix-web",
|
||||
"axum",
|
||||
@@ -3294,7 +3175,7 @@ dependencies = [
|
||||
"pin-project-lite",
|
||||
"postcard",
|
||||
"reqwest",
|
||||
"rkyv 0.8.9",
|
||||
"rkyv",
|
||||
"rmp-serde",
|
||||
"send_wrapper",
|
||||
"serde",
|
||||
@@ -3330,7 +3211,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "server_fn_macro"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"const_format",
|
||||
"convert_case 0.6.0",
|
||||
@@ -3342,9 +3223,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "server_fn_macro_default"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
dependencies = [
|
||||
"server_fn_macro 0.7.4",
|
||||
"server_fn_macro 0.7.7",
|
||||
"syn 2.0.90",
|
||||
]
|
||||
|
||||
@@ -3477,7 +3358,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
@@ -3539,11 +3419,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tachys"
|
||||
version = "0.1.4"
|
||||
version = "0.1.7"
|
||||
dependencies = [
|
||||
"any_spawner",
|
||||
"async-trait",
|
||||
"const_str_slice_concat",
|
||||
"drain_filter_polyfill",
|
||||
"dyn-clone",
|
||||
"either_of",
|
||||
"futures",
|
||||
"html-escape",
|
||||
@@ -3572,12 +3454,6 @@ dependencies = [
|
||||
"web-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tap"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
|
||||
|
||||
[[package]]
|
||||
name = "target-lexicon"
|
||||
version = "0.12.16"
|
||||
@@ -4345,15 +4221,6 @@ version = "0.5.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
|
||||
|
||||
[[package]]
|
||||
name = "wyz"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
|
||||
dependencies = [
|
||||
"tap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xxhash-rust"
|
||||
version = "0.8.13"
|
||||
|
||||
38
Cargo.toml
38
Cargo.toml
@@ -40,36 +40,36 @@ members = [
|
||||
exclude = ["benchmarks", "examples", "projects"]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
edition = "2021"
|
||||
rust-version = "1.76"
|
||||
|
||||
[workspace.dependencies]
|
||||
throw_error = { path = "./any_error/", version = "0.2.0" }
|
||||
any_spawner = { path = "./any_spawner/", version = "0.2.0" }
|
||||
const_str_slice_concat = { path = "./const_str_slice_concat", version = "0.1.0" }
|
||||
const_str_slice_concat = { path = "./const_str_slice_concat", version = "0.1" }
|
||||
either_of = { path = "./either_of/", version = "0.1.0" }
|
||||
hydration_context = { path = "./hydration_context", version = "0.2.0" }
|
||||
leptos = { path = "./leptos", version = "0.7.4" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.7.4" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.7.4" }
|
||||
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.7.4" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.7.4" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.7.4" }
|
||||
leptos_router = { path = "./router", version = "0.7.4" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.7.4" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.7.4" }
|
||||
leptos_meta = { path = "./meta", version = "0.7.4" }
|
||||
leptos = { path = "./leptos", version = "0.7.7" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.7.7" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.7.7" }
|
||||
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.7.7" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.7.7" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.7.7" }
|
||||
leptos_router = { path = "./router", version = "0.7.7" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.7.7" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.7.7" }
|
||||
leptos_meta = { path = "./meta", version = "0.7.7" }
|
||||
next_tuple = { path = "./next_tuple", version = "0.1.0" }
|
||||
oco_ref = { path = "./oco", version = "0.2.0" }
|
||||
or_poisoned = { path = "./or_poisoned", version = "0.1.0" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.1.0" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.1.0" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.1.0" }
|
||||
server_fn = { path = "./server_fn", version = "0.7.4" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.7.4" }
|
||||
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.7.4" }
|
||||
tachys = { path = "./tachys", version = "0.1.0" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.1.7" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.1.7" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.1.7" }
|
||||
server_fn = { path = "./server_fn", version = "0.7.7" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.7.7" }
|
||||
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.7.7" }
|
||||
tachys = { path = "./tachys", version = "0.1.7" }
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
[](https://crates.io/crates/leptos)
|
||||
[](https://docs.rs/leptos)
|
||||

|
||||
[](https://discord.gg/YdRAhS7eQB)
|
||||
[](https://matrix.to/#/#leptos:matrix.org)
|
||||
|
||||
@@ -167,14 +168,14 @@ Yew is the most-used library for Rust web UI development, but there are several
|
||||
- **Performance:** This has huge performance implications: Leptos is simply much faster at both creating and updating the UI than Yew is.
|
||||
- **Server integration:** Yew was created in an era in which browser-rendered single-page apps (SPAs) were the dominant paradigm. While Leptos supports client-side rendering, it also focuses on integrating with the server side of your application via server functions and multiple modes of serving HTML, including out-of-order streaming.
|
||||
|
||||
- ### How is this different from Dioxus?
|
||||
### How is this different from Dioxus?
|
||||
|
||||
Like Leptos, Dioxus is a framework for building UIs using web technologies. However, there are significant differences in approach and features.
|
||||
|
||||
- **VDOM vs. fine-grained:** While Dioxus has a performant virtual DOM (VDOM), it still uses coarse-grained/component-scoped reactivity: changing a stateful value reruns the component function and diffs the old UI against the new one. Leptos components use a different mental model, creating (and returning) actual DOM nodes and setting up a reactive system to update those DOM nodes.
|
||||
- **Web vs. desktop priorities:** Dioxus uses Leptos server functions in its fullstack mode, but does not have the same `<Suspense>`-based support for things like streaming HTML rendering, or share the same focus on holistic web performance. Leptos tends to prioritize holistic web performance (streaming HTML rendering, smaller WASM binary sizes, etc.), whereas Dioxus has an unparalleled experience when building desktop apps, because your application logic runs as a native Rust binary.
|
||||
|
||||
- ### How is this different from Sycamore?
|
||||
### How is this different from Sycamore?
|
||||
|
||||
Sycamore and Leptos are both heavily influenced by SolidJS. At this point, Leptos has a larger community and ecosystem and is more actively developed. Other differences:
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "either_of"
|
||||
version = "0.1.3"
|
||||
version = "0.1.5"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -10,4 +10,9 @@ rust-version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
pin-project-lite = "0.2.15"
|
||||
pin-project-lite = "0.2.16"
|
||||
paste = "1.0.15"
|
||||
|
||||
[features]
|
||||
default = ["no_std"]
|
||||
no_std = []
|
||||
|
||||
@@ -1,33 +1,489 @@
|
||||
#![no_std]
|
||||
#![cfg_attr(feature = "no_std", no_std)]
|
||||
#![forbid(unsafe_code)]
|
||||
|
||||
//! Utilities for working with enumerated types that contain one of `2..n` other types.
|
||||
|
||||
use core::{
|
||||
cmp::Ordering,
|
||||
fmt::Display,
|
||||
future::Future,
|
||||
iter::{Product, Sum},
|
||||
pin::Pin,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
use paste::paste;
|
||||
use pin_project_lite::pin_project;
|
||||
#[cfg(not(feature = "no_std"))]
|
||||
use std::error::Error; // TODO: replace with core::error::Error once MSRV is >= 1.81.0
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum Either<A, B> {
|
||||
Left(A),
|
||||
Right(B),
|
||||
macro_rules! tuples {
|
||||
($name:ident + $fut_name:ident + $fut_proj:ident {
|
||||
$($ty:ident => ($($rest_variant:ident),*) + <$($mapped_ty:ident),+>),+$(,)?
|
||||
}) => {
|
||||
tuples!($name + $fut_name + $fut_proj {
|
||||
$($ty($ty) => ($($rest_variant),*) + <$($mapped_ty),+>),+
|
||||
});
|
||||
};
|
||||
($name:ident + $fut_name:ident + $fut_proj:ident {
|
||||
$($variant:ident($ty:ident) => ($($rest_variant:ident),*) + <$($mapped_ty:ident),+>),+$(,)?
|
||||
}) => {
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||
pub enum $name<$($ty),+> {
|
||||
$($variant ($ty),)+
|
||||
}
|
||||
|
||||
impl<$($ty),+> $name<$($ty),+> {
|
||||
paste! {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn map<$([<F $ty>]),+, $([<$ty 1>]),+>(self, $([<$variant:lower>]: [<F $ty>]),+) -> $name<$([<$ty 1>]),+>
|
||||
where
|
||||
$([<F $ty>]: FnOnce($ty) -> [<$ty 1>],)+
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(inner) => $name::$variant([<$variant:lower>](inner)),)+
|
||||
}
|
||||
}
|
||||
|
||||
$(
|
||||
pub fn [<map_ $variant:lower>]<Fun, [<$ty 1>]>(self, f: Fun) -> $name<$($mapped_ty),+>
|
||||
where
|
||||
Fun: FnOnce($ty) -> [<$ty 1>],
|
||||
{
|
||||
match self {
|
||||
$name::$variant(inner) => $name::$variant(f(inner)),
|
||||
$($name::$rest_variant(inner) => $name::$rest_variant(inner),)*
|
||||
}
|
||||
}
|
||||
|
||||
pub fn [<inspect_ $variant:lower>]<Fun, [<$ty 1>]>(self, f: Fun) -> Self
|
||||
where
|
||||
Fun: FnOnce(&$ty),
|
||||
{
|
||||
if let $name::$variant(inner) = &self {
|
||||
f(inner);
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
pub fn [<is_ $variant:lower>](&self) -> bool {
|
||||
matches!(self, $name::$variant(_))
|
||||
}
|
||||
|
||||
pub fn [<as_ $variant:lower>](&self) -> Option<&$ty> {
|
||||
match self {
|
||||
$name::$variant(inner) => Some(inner),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn [<as_ $variant:lower _mut>](&mut self) -> Option<&mut $ty> {
|
||||
match self {
|
||||
$name::$variant(inner) => Some(inner),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn [<unwrap_ $variant:lower>](self) -> $ty {
|
||||
match self {
|
||||
$name::$variant(inner) => inner,
|
||||
_ => panic!(concat!(
|
||||
"called `unwrap_", stringify!([<$variant:lower>]), "()` on a non-`", stringify!($variant), "` variant of `", stringify!($name), "`"
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn [<into_ $variant:lower>](self) -> Result<$ty, Self> {
|
||||
match self {
|
||||
$name::$variant(inner) => Ok(inner),
|
||||
_ => Err(self),
|
||||
}
|
||||
}
|
||||
)+
|
||||
}
|
||||
}
|
||||
|
||||
impl<$($ty),+> Display for $name<$($ty),+>
|
||||
where
|
||||
$($ty: Display,)+
|
||||
{
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
$($name::$variant(this) => this.fmt(f),)+
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "no_std"))]
|
||||
impl<$($ty),+> Error for $name<$($ty),+>
|
||||
where
|
||||
$($ty: Error,)+
|
||||
{
|
||||
fn source(&self) -> Option<&(dyn Error + 'static)> {
|
||||
match self {
|
||||
$($name::$variant(this) => this.source(),)+
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Item, $($ty),+> Iterator for $name<$($ty),+>
|
||||
where
|
||||
$($ty: Iterator<Item = Item>,)+
|
||||
{
|
||||
type Item = Item;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self {
|
||||
$($name::$variant(i) => i.next(),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
match self {
|
||||
$($name::$variant(i) => i.size_hint(),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn count(self) -> usize
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.count(),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn last(self) -> Option<Self::Item>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.last(),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
match self {
|
||||
$($name::$variant(i) => i.nth(n),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn for_each<Fun>(self, f: Fun)
|
||||
where
|
||||
Self: Sized,
|
||||
Fun: FnMut(Self::Item),
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.for_each(f),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn collect<Col: FromIterator<Self::Item>>(self) -> Col
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.collect(),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn partition<Col, Fun>(self, f: Fun) -> (Col, Col)
|
||||
where
|
||||
Self: Sized,
|
||||
Col: Default + Extend<Self::Item>,
|
||||
Fun: FnMut(&Self::Item) -> bool,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.partition(f),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn fold<Acc, Fun>(self, init: Acc, f: Fun) -> Acc
|
||||
where
|
||||
Self: Sized,
|
||||
Fun: FnMut(Acc, Self::Item) -> Acc,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.fold(init, f),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn reduce<Fun>(self, f: Fun) -> Option<Self::Item>
|
||||
where
|
||||
Self: Sized,
|
||||
Fun: FnMut(Self::Item, Self::Item) -> Self::Item,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.reduce(f),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn all<Fun>(&mut self, f: Fun) -> bool
|
||||
where
|
||||
Self: Sized,
|
||||
Fun: FnMut(Self::Item) -> bool,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.all(f),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn any<Fun>(&mut self, f: Fun) -> bool
|
||||
where
|
||||
Self: Sized,
|
||||
Fun: FnMut(Self::Item) -> bool,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.any(f),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn find<Pre>(&mut self, predicate: Pre) -> Option<Self::Item>
|
||||
where
|
||||
Self: Sized,
|
||||
Pre: FnMut(&Self::Item) -> bool,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.find(predicate),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn find_map<Out, Fun>(&mut self, f: Fun) -> Option<Out>
|
||||
where
|
||||
Self: Sized,
|
||||
Fun: FnMut(Self::Item) -> Option<Out>,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.find_map(f),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn position<Pre>(&mut self, predicate: Pre) -> Option<usize>
|
||||
where
|
||||
Self: Sized,
|
||||
Pre: FnMut(Self::Item) -> bool,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.position(predicate),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn max(self) -> Option<Self::Item>
|
||||
where
|
||||
Self: Sized,
|
||||
Self::Item: Ord,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.max(),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn min(self) -> Option<Self::Item>
|
||||
where
|
||||
Self: Sized,
|
||||
Self::Item: Ord,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.min(),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn max_by_key<Key: Ord, Fun>(self, f: Fun) -> Option<Self::Item>
|
||||
where
|
||||
Self: Sized,
|
||||
Fun: FnMut(&Self::Item) -> Key,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.max_by_key(f),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn max_by<Cmp>(self, compare: Cmp) -> Option<Self::Item>
|
||||
where
|
||||
Self: Sized,
|
||||
Cmp: FnMut(&Self::Item, &Self::Item) -> Ordering,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.max_by(compare),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn min_by_key<Key: Ord, Fun>(self, f: Fun) -> Option<Self::Item>
|
||||
where
|
||||
Self: Sized,
|
||||
Fun: FnMut(&Self::Item) -> Key,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.min_by_key(f),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn min_by<Cmp>(self, compare: Cmp) -> Option<Self::Item>
|
||||
where
|
||||
Self: Sized,
|
||||
Cmp: FnMut(&Self::Item, &Self::Item) -> Ordering,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.min_by(compare),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn sum<Out>(self) -> Out
|
||||
where
|
||||
Self: Sized,
|
||||
Out: Sum<Self::Item>,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.sum(),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn product<Out>(self) -> Out
|
||||
where
|
||||
Self: Sized,
|
||||
Out: Product<Self::Item>,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.product(),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn cmp<Other>(self, other: Other) -> Ordering
|
||||
where
|
||||
Other: IntoIterator<Item = Self::Item>,
|
||||
Self::Item: Ord,
|
||||
Self: Sized,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.cmp(other),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn partial_cmp<Other>(self, other: Other) -> Option<Ordering>
|
||||
where
|
||||
Other: IntoIterator,
|
||||
Self::Item: PartialOrd<Other::Item>,
|
||||
Self: Sized,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.partial_cmp(other),)+
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: uncomment once MSRV is >= 1.82.0
|
||||
// fn is_sorted(self) -> bool
|
||||
// where
|
||||
// Self: Sized,
|
||||
// Self::Item: PartialOrd,
|
||||
// {
|
||||
// match self {
|
||||
// $($name::$variant(i) => i.is_sorted(),)+
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// fn is_sorted_by<Cmp>(self, compare: Cmp) -> bool
|
||||
// where
|
||||
// Self: Sized,
|
||||
// Cmp: FnMut(&Self::Item, &Self::Item) -> bool,
|
||||
// {
|
||||
// match self {
|
||||
// $($name::$variant(i) => i.is_sorted_by(compare),)+
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// fn is_sorted_by_key<Fun, Key>(self, f: Fun) -> bool
|
||||
// where
|
||||
// Self: Sized,
|
||||
// Fun: FnMut(Self::Item) -> Key,
|
||||
// Key: PartialOrd,
|
||||
// {
|
||||
// match self {
|
||||
// $($name::$variant(i) => i.is_sorted_by_key(f),)+
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
impl<Item, $($ty),+> ExactSizeIterator for $name<$($ty),+>
|
||||
where
|
||||
$($ty: ExactSizeIterator<Item = Item>,)+
|
||||
{
|
||||
fn len(&self) -> usize {
|
||||
match self {
|
||||
$($name::$variant(i) => i.len(),)+
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Item, $($ty),+> DoubleEndedIterator for $name<$($ty),+>
|
||||
where
|
||||
$($ty: DoubleEndedIterator<Item = Item>,)+
|
||||
{
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
match self {
|
||||
$($name::$variant(i) => i.next_back(),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
match self {
|
||||
$($name::$variant(i) => i.nth_back(n),)+
|
||||
}
|
||||
}
|
||||
|
||||
fn rfind<Pre>(&mut self, predicate: Pre) -> Option<Self::Item>
|
||||
where
|
||||
Pre: FnMut(&Self::Item) -> bool,
|
||||
{
|
||||
match self {
|
||||
$($name::$variant(i) => i.rfind(predicate),)+
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pin_project! {
|
||||
#[project = $fut_proj]
|
||||
pub enum $fut_name<$($ty),+> {
|
||||
$($variant { #[pin] inner: $ty },)+
|
||||
}
|
||||
}
|
||||
|
||||
impl<$($ty),+> Future for $fut_name<$($ty),+>
|
||||
where
|
||||
$($ty: Future,)+
|
||||
{
|
||||
type Output = $name<$($ty::Output),+>;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.project();
|
||||
match this {
|
||||
$($fut_proj::$variant { inner } => match inner.poll(cx) {
|
||||
Poll::Pending => Poll::Pending,
|
||||
Poll::Ready(inner) => Poll::Ready($name::$variant(inner)),
|
||||
},)+
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Item, A, B> Iterator for Either<A, B>
|
||||
where
|
||||
A: Iterator<Item = Item>,
|
||||
B: Iterator<Item = Item>,
|
||||
{
|
||||
type Item = Item;
|
||||
tuples!(Either + EitherFuture + EitherFutureProj {
|
||||
Left(A) => (Right) + <A1, B>,
|
||||
Right(B) => (Left) + <A, B1>,
|
||||
});
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
impl<A, B> Either<A, B> {
|
||||
pub fn swap(self) -> Either<B, A> {
|
||||
match self {
|
||||
Either::Left(i) => i.next(),
|
||||
Either::Right(i) => i.next(),
|
||||
Either::Left(a) => Either::Right(a),
|
||||
Either::Right(b) => Either::Left(b),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, B> From<Result<A, B>> for Either<A, B> {
|
||||
fn from(value: Result<A, B>) -> Self {
|
||||
match value {
|
||||
Ok(left) => Either::Left(left),
|
||||
Err(right) => Either::Right(right),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -90,6 +546,20 @@ impl<T, E> EitherOr for Result<T, E> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, B> EitherOr for Either<A, B> {
|
||||
type Left = A;
|
||||
type Right = B;
|
||||
|
||||
#[inline]
|
||||
fn either_or<FA, A1, FB, B1>(self, a: FA, b: FB) -> Either<A1, B1>
|
||||
where
|
||||
FA: FnOnce(<Self as EitherOr>::Left) -> A1,
|
||||
FB: FnOnce(<Self as EitherOr>::Right) -> B1,
|
||||
{
|
||||
self.map(a, b)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_either_or() {
|
||||
let right = false.either_or(|_| 'a', |_| 12);
|
||||
@@ -110,109 +580,177 @@ fn test_either_or() {
|
||||
let result: Result<i32, _> = Err("12");
|
||||
let right = result.either_or(|a| a, |b| b.chars().next());
|
||||
assert!(matches!(right, Either::Right(Some('1'))));
|
||||
|
||||
let either = Either::<i32, char>::Left(12);
|
||||
let left = either.either_or(|a| a, |b| b);
|
||||
assert!(matches!(left, Either::Left(12)));
|
||||
|
||||
let either = Either::<i32, char>::Right('a');
|
||||
let right = either.either_or(|a| a, |b| b);
|
||||
assert!(matches!(right, Either::Right('a')));
|
||||
}
|
||||
|
||||
pin_project! {
|
||||
#[project = EitherFutureProj]
|
||||
pub enum EitherFuture<A, B> {
|
||||
Left { #[pin] inner: A },
|
||||
Right { #[pin] inner: B },
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, B> Future for EitherFuture<A, B>
|
||||
where
|
||||
A: Future,
|
||||
B: Future,
|
||||
{
|
||||
type Output = Either<A::Output, B::Output>;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.project();
|
||||
match this {
|
||||
EitherFutureProj::Left { inner } => match inner.poll(cx) {
|
||||
Poll::Pending => Poll::Pending,
|
||||
Poll::Ready(inner) => Poll::Ready(Either::Left(inner)),
|
||||
},
|
||||
EitherFutureProj::Right { inner } => match inner.poll(cx) {
|
||||
Poll::Pending => Poll::Pending,
|
||||
Poll::Ready(inner) => Poll::Ready(Either::Right(inner)),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! tuples {
|
||||
($name:ident + $fut_name:ident + $fut_proj:ident => $($ty:ident),*) => {
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||
pub enum $name<$($ty,)*> {
|
||||
$($ty ($ty),)*
|
||||
}
|
||||
|
||||
impl<$($ty,)*> Display for $name<$($ty,)*>
|
||||
where
|
||||
$($ty: Display,)*
|
||||
{
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
match self {
|
||||
$($name::$ty(this) => this.fmt(f),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Item, $($ty,)*> Iterator for $name<$($ty,)*>
|
||||
where
|
||||
$($ty: Iterator<Item = Item>,)*
|
||||
{
|
||||
type Item = Item;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self {
|
||||
$($name::$ty(i) => i.next(),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pin_project! {
|
||||
#[project = $fut_proj]
|
||||
pub enum $fut_name<$($ty,)*> {
|
||||
$($ty { #[pin] inner: $ty },)*
|
||||
}
|
||||
}
|
||||
|
||||
impl<$($ty,)*> Future for $fut_name<$($ty,)*>
|
||||
where
|
||||
$($ty: Future,)*
|
||||
{
|
||||
type Output = $name<$($ty::Output,)*>;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.project();
|
||||
match this {
|
||||
$($fut_proj::$ty { inner } => match inner.poll(cx) {
|
||||
Poll::Pending => Poll::Pending,
|
||||
Poll::Ready(inner) => Poll::Ready($name::$ty(inner)),
|
||||
},)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tuples!(EitherOf3 + EitherOf3Future + EitherOf3FutureProj => A, B, C);
|
||||
tuples!(EitherOf4 + EitherOf4Future + EitherOf4FutureProj => A, B, C, D);
|
||||
tuples!(EitherOf5 + EitherOf5Future + EitherOf5FutureProj => A, B, C, D, E);
|
||||
tuples!(EitherOf6 + EitherOf6Future + EitherOf6FutureProj => A, B, C, D, E, F);
|
||||
tuples!(EitherOf7 + EitherOf7Future + EitherOf7FutureProj => A, B, C, D, E, F, G);
|
||||
tuples!(EitherOf8 + EitherOf8Future + EitherOf8FutureProj => A, B, C, D, E, F, G, H);
|
||||
tuples!(EitherOf9 + EitherOf9Future + EitherOf9FutureProj => A, B, C, D, E, F, G, H, I);
|
||||
tuples!(EitherOf10 + EitherOf10Future + EitherOf10FutureProj => A, B, C, D, E, F, G, H, I, J);
|
||||
tuples!(EitherOf11 + EitherOf11Future + EitherOf11FutureProj => A, B, C, D, E, F, G, H, I, J, K);
|
||||
tuples!(EitherOf12 + EitherOf12Future + EitherOf12FutureProj => A, B, C, D, E, F, G, H, I, J, K, L);
|
||||
tuples!(EitherOf13 + EitherOf13Future + EitherOf13FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M);
|
||||
tuples!(EitherOf14 + EitherOf14Future + EitherOf14FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M, N);
|
||||
tuples!(EitherOf15 + EitherOf15Future + EitherOf15FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M, N, O);
|
||||
tuples!(EitherOf16 + EitherOf16Future + EitherOf16FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P);
|
||||
tuples!(EitherOf3 + EitherOf3Future + EitherOf3FutureProj {
|
||||
A => (B, C) + <A1, B, C>,
|
||||
B => (A, C) + <A, B1, C>,
|
||||
C => (A, B) + <A, B, C1>,
|
||||
});
|
||||
tuples!(EitherOf4 + EitherOf4Future + EitherOf4FutureProj {
|
||||
A => (B, C, D) + <A1, B, C, D>,
|
||||
B => (A, C, D) + <A, B1, C, D>,
|
||||
C => (A, B, D) + <A, B, C1, D>,
|
||||
D => (A, B, C) + <A, B, C, D1>,
|
||||
});
|
||||
tuples!(EitherOf5 + EitherOf5Future + EitherOf5FutureProj {
|
||||
A => (B, C, D, E) + <A1, B, C, D, E>,
|
||||
B => (A, C, D, E) + <A, B1, C, D, E>,
|
||||
C => (A, B, D, E) + <A, B, C1, D, E>,
|
||||
D => (A, B, C, E) + <A, B, C, D1, E>,
|
||||
E => (A, B, C, D) + <A, B, C, D, E1>,
|
||||
});
|
||||
tuples!(EitherOf6 + EitherOf6Future + EitherOf6FutureProj {
|
||||
A => (B, C, D, E, F) + <A1, B, C, D, E, F>,
|
||||
B => (A, C, D, E, F) + <A, B1, C, D, E, F>,
|
||||
C => (A, B, D, E, F) + <A, B, C1, D, E, F>,
|
||||
D => (A, B, C, E, F) + <A, B, C, D1, E, F>,
|
||||
E => (A, B, C, D, F) + <A, B, C, D, E1, F>,
|
||||
F => (A, B, C, D, E) + <A, B, C, D, E, F1>,
|
||||
});
|
||||
tuples!(EitherOf7 + EitherOf7Future + EitherOf7FutureProj {
|
||||
A => (B, C, D, E, F, G) + <A1, B, C, D, E, F, G>,
|
||||
B => (A, C, D, E, F, G) + <A, B1, C, D, E, F, G>,
|
||||
C => (A, B, D, E, F, G) + <A, B, C1, D, E, F, G>,
|
||||
D => (A, B, C, E, F, G) + <A, B, C, D1, E, F, G>,
|
||||
E => (A, B, C, D, F, G) + <A, B, C, D, E1, F, G>,
|
||||
F => (A, B, C, D, E, G) + <A, B, C, D, E, F1, G>,
|
||||
G => (A, B, C, D, E, F) + <A, B, C, D, E, F, G1>,
|
||||
});
|
||||
tuples!(EitherOf8 + EitherOf8Future + EitherOf8FutureProj {
|
||||
A => (B, C, D, E, F, G, H) + <A1, B, C, D, E, F, G, H>,
|
||||
B => (A, C, D, E, F, G, H) + <A, B1, C, D, E, F, G, H>,
|
||||
C => (A, B, D, E, F, G, H) + <A, B, C1, D, E, F, G, H>,
|
||||
D => (A, B, C, E, F, G, H) + <A, B, C, D1, E, F, G, H>,
|
||||
E => (A, B, C, D, F, G, H) + <A, B, C, D, E1, F, G, H>,
|
||||
F => (A, B, C, D, E, G, H) + <A, B, C, D, E, F1, G, H>,
|
||||
G => (A, B, C, D, E, F, H) + <A, B, C, D, E, F, G1, H>,
|
||||
H => (A, B, C, D, E, F, G) + <A, B, C, D, E, F, G, H1>,
|
||||
});
|
||||
tuples!(EitherOf9 + EitherOf9Future + EitherOf9FutureProj {
|
||||
A => (B, C, D, E, F, G, H, I) + <A1, B, C, D, E, F, G, H, I>,
|
||||
B => (A, C, D, E, F, G, H, I) + <A, B1, C, D, E, F, G, H, I>,
|
||||
C => (A, B, D, E, F, G, H, I) + <A, B, C1, D, E, F, G, H, I>,
|
||||
D => (A, B, C, E, F, G, H, I) + <A, B, C, D1, E, F, G, H, I>,
|
||||
E => (A, B, C, D, F, G, H, I) + <A, B, C, D, E1, F, G, H, I>,
|
||||
F => (A, B, C, D, E, G, H, I) + <A, B, C, D, E, F1, G, H, I>,
|
||||
G => (A, B, C, D, E, F, H, I) + <A, B, C, D, E, F, G1, H, I>,
|
||||
H => (A, B, C, D, E, F, G, I) + <A, B, C, D, E, F, G, H1, I>,
|
||||
I => (A, B, C, D, E, F, G, H) + <A, B, C, D, E, F, G, H, I1>,
|
||||
});
|
||||
tuples!(EitherOf10 + EitherOf10Future + EitherOf10FutureProj {
|
||||
A => (B, C, D, E, F, G, H, I, J) + <A1, B, C, D, E, F, G, H, I, J>,
|
||||
B => (A, C, D, E, F, G, H, I, J) + <A, B1, C, D, E, F, G, H, I, J>,
|
||||
C => (A, B, D, E, F, G, H, I, J) + <A, B, C1, D, E, F, G, H, I, J>,
|
||||
D => (A, B, C, E, F, G, H, I, J) + <A, B, C, D1, E, F, G, H, I, J>,
|
||||
E => (A, B, C, D, F, G, H, I, J) + <A, B, C, D, E1, F, G, H, I, J>,
|
||||
F => (A, B, C, D, E, G, H, I, J) + <A, B, C, D, E, F1, G, H, I, J>,
|
||||
G => (A, B, C, D, E, F, H, I, J) + <A, B, C, D, E, F, G1, H, I, J>,
|
||||
H => (A, B, C, D, E, F, G, I, J) + <A, B, C, D, E, F, G, H1, I, J>,
|
||||
I => (A, B, C, D, E, F, G, H, J) + <A, B, C, D, E, F, G, H, I1, J>,
|
||||
J => (A, B, C, D, E, F, G, H, I) + <A, B, C, D, E, F, G, H, I, J1>,
|
||||
});
|
||||
tuples!(EitherOf11 + EitherOf11Future + EitherOf11FutureProj {
|
||||
A => (B, C, D, E, F, G, H, I, J, K) + <A1, B, C, D, E, F, G, H, I, J, K>,
|
||||
B => (A, C, D, E, F, G, H, I, J, K) + <A, B1, C, D, E, F, G, H, I, J, K>,
|
||||
C => (A, B, D, E, F, G, H, I, J, K) + <A, B, C1, D, E, F, G, H, I, J, K>,
|
||||
D => (A, B, C, E, F, G, H, I, J, K) + <A, B, C, D1, E, F, G, H, I, J, K>,
|
||||
E => (A, B, C, D, F, G, H, I, J, K) + <A, B, C, D, E1, F, G, H, I, J, K>,
|
||||
F => (A, B, C, D, E, G, H, I, J, K) + <A, B, C, D, E, F1, G, H, I, J, K>,
|
||||
G => (A, B, C, D, E, F, H, I, J, K) + <A, B, C, D, E, F, G1, H, I, J, K>,
|
||||
H => (A, B, C, D, E, F, G, I, J, K) + <A, B, C, D, E, F, G, H1, I, J, K>,
|
||||
I => (A, B, C, D, E, F, G, H, J, K) + <A, B, C, D, E, F, G, H, I1, J, K>,
|
||||
J => (A, B, C, D, E, F, G, H, I, K) + <A, B, C, D, E, F, G, H, I, J1, K>,
|
||||
K => (A, B, C, D, E, F, G, H, I, J) + <A, B, C, D, E, F, G, H, I, J, K1>,
|
||||
});
|
||||
tuples!(EitherOf12 + EitherOf12Future + EitherOf12FutureProj {
|
||||
A => (B, C, D, E, F, G, H, I, J, K, L) + <A1, B, C, D, E, F, G, H, I, J, K, L>,
|
||||
B => (A, C, D, E, F, G, H, I, J, K, L) + <A, B1, C, D, E, F, G, H, I, J, K, L>,
|
||||
C => (A, B, D, E, F, G, H, I, J, K, L) + <A, B, C1, D, E, F, G, H, I, J, K, L>,
|
||||
D => (A, B, C, E, F, G, H, I, J, K, L) + <A, B, C, D1, E, F, G, H, I, J, K, L>,
|
||||
E => (A, B, C, D, F, G, H, I, J, K, L) + <A, B, C, D, E1, F, G, H, I, J, K, L>,
|
||||
F => (A, B, C, D, E, G, H, I, J, K, L) + <A, B, C, D, E, F1, G, H, I, J, K, L>,
|
||||
G => (A, B, C, D, E, F, H, I, J, K, L) + <A, B, C, D, E, F, G1, H, I, J, K, L>,
|
||||
H => (A, B, C, D, E, F, G, I, J, K, L) + <A, B, C, D, E, F, G, H1, I, J, K, L>,
|
||||
I => (A, B, C, D, E, F, G, H, J, K, L) + <A, B, C, D, E, F, G, H, I1, J, K, L>,
|
||||
J => (A, B, C, D, E, F, G, H, I, K, L) + <A, B, C, D, E, F, G, H, I, J1, K, L>,
|
||||
K => (A, B, C, D, E, F, G, H, I, J, L) + <A, B, C, D, E, F, G, H, I, J, K1, L>,
|
||||
L => (A, B, C, D, E, F, G, H, I, J, K) + <A, B, C, D, E, F, G, H, I, J, K, L1>,
|
||||
});
|
||||
tuples!(EitherOf13 + EitherOf13Future + EitherOf13FutureProj {
|
||||
A => (B, C, D, E, F, G, H, I, J, K, L, M) + <A1, B, C, D, E, F, G, H, I, J, K, L, M>,
|
||||
B => (A, C, D, E, F, G, H, I, J, K, L, M) + <A, B1, C, D, E, F, G, H, I, J, K, L, M>,
|
||||
C => (A, B, D, E, F, G, H, I, J, K, L, M) + <A, B, C1, D, E, F, G, H, I, J, K, L, M>,
|
||||
D => (A, B, C, E, F, G, H, I, J, K, L, M) + <A, B, C, D1, E, F, G, H, I, J, K, L, M>,
|
||||
E => (A, B, C, D, F, G, H, I, J, K, L, M) + <A, B, C, D, E1, F, G, H, I, J, K, L, M>,
|
||||
F => (A, B, C, D, E, G, H, I, J, K, L, M) + <A, B, C, D, E, F1, G, H, I, J, K, L, M>,
|
||||
G => (A, B, C, D, E, F, H, I, J, K, L, M) + <A, B, C, D, E, F, G1, H, I, J, K, L, M>,
|
||||
H => (A, B, C, D, E, F, G, I, J, K, L, M) + <A, B, C, D, E, F, G, H1, I, J, K, L, M>,
|
||||
I => (A, B, C, D, E, F, G, H, J, K, L, M) + <A, B, C, D, E, F, G, H, I1, J, K, L, M>,
|
||||
J => (A, B, C, D, E, F, G, H, I, K, L, M) + <A, B, C, D, E, F, G, H, I, J1, K, L, M>,
|
||||
K => (A, B, C, D, E, F, G, H, I, J, L, M) + <A, B, C, D, E, F, G, H, I, J, K1, L, M>,
|
||||
L => (A, B, C, D, E, F, G, H, I, J, K, M) + <A, B, C, D, E, F, G, H, I, J, K, L1, M>,
|
||||
M => (A, B, C, D, E, F, G, H, I, J, K, L) + <A, B, C, D, E, F, G, H, I, J, K, L, M1>,
|
||||
});
|
||||
tuples!(EitherOf14 + EitherOf14Future + EitherOf14FutureProj {
|
||||
A => (B, C, D, E, F, G, H, I, J, K, L, M, N) + <A1, B, C, D, E, F, G, H, I, J, K, L, M, N>,
|
||||
B => (A, C, D, E, F, G, H, I, J, K, L, M, N) + <A, B1, C, D, E, F, G, H, I, J, K, L, M, N>,
|
||||
C => (A, B, D, E, F, G, H, I, J, K, L, M, N) + <A, B, C1, D, E, F, G, H, I, J, K, L, M, N>,
|
||||
D => (A, B, C, E, F, G, H, I, J, K, L, M, N) + <A, B, C, D1, E, F, G, H, I, J, K, L, M, N>,
|
||||
E => (A, B, C, D, F, G, H, I, J, K, L, M, N) + <A, B, C, D, E1, F, G, H, I, J, K, L, M, N>,
|
||||
F => (A, B, C, D, E, G, H, I, J, K, L, M, N) + <A, B, C, D, E, F1, G, H, I, J, K, L, M, N>,
|
||||
G => (A, B, C, D, E, F, H, I, J, K, L, M, N) + <A, B, C, D, E, F, G1, H, I, J, K, L, M, N>,
|
||||
H => (A, B, C, D, E, F, G, I, J, K, L, M, N) + <A, B, C, D, E, F, G, H1, I, J, K, L, M, N>,
|
||||
I => (A, B, C, D, E, F, G, H, J, K, L, M, N) + <A, B, C, D, E, F, G, H, I1, J, K, L, M, N>,
|
||||
J => (A, B, C, D, E, F, G, H, I, K, L, M, N) + <A, B, C, D, E, F, G, H, I, J1, K, L, M, N>,
|
||||
K => (A, B, C, D, E, F, G, H, I, J, L, M, N) + <A, B, C, D, E, F, G, H, I, J, K1, L, M, N>,
|
||||
L => (A, B, C, D, E, F, G, H, I, J, K, M, N) + <A, B, C, D, E, F, G, H, I, J, K, L1, M, N>,
|
||||
M => (A, B, C, D, E, F, G, H, I, J, K, L, N) + <A, B, C, D, E, F, G, H, I, J, K, L, M1, N>,
|
||||
N => (A, B, C, D, E, F, G, H, I, J, K, L, M) + <A, B, C, D, E, F, G, H, I, J, K, L, M, N1>,
|
||||
});
|
||||
tuples!(EitherOf15 + EitherOf15Future + EitherOf15FutureProj {
|
||||
A => (B, C, D, E, F, G, H, I, J, K, L, M, N, O) + <A1, B, C, D, E, F, G, H, I, J, K, L, M, N, O>,
|
||||
B => (A, C, D, E, F, G, H, I, J, K, L, M, N, O) + <A, B1, C, D, E, F, G, H, I, J, K, L, M, N, O>,
|
||||
C => (A, B, D, E, F, G, H, I, J, K, L, M, N, O) + <A, B, C1, D, E, F, G, H, I, J, K, L, M, N, O>,
|
||||
D => (A, B, C, E, F, G, H, I, J, K, L, M, N, O) + <A, B, C, D1, E, F, G, H, I, J, K, L, M, N, O>,
|
||||
E => (A, B, C, D, F, G, H, I, J, K, L, M, N, O) + <A, B, C, D, E1, F, G, H, I, J, K, L, M, N, O>,
|
||||
F => (A, B, C, D, E, G, H, I, J, K, L, M, N, O) + <A, B, C, D, E, F1, G, H, I, J, K, L, M, N, O>,
|
||||
G => (A, B, C, D, E, F, H, I, J, K, L, M, N, O) + <A, B, C, D, E, F, G1, H, I, J, K, L, M, N, O>,
|
||||
H => (A, B, C, D, E, F, G, I, J, K, L, M, N, O) + <A, B, C, D, E, F, G, H1, I, J, K, L, M, N, O>,
|
||||
I => (A, B, C, D, E, F, G, H, J, K, L, M, N, O) + <A, B, C, D, E, F, G, H, I1, J, K, L, M, N, O>,
|
||||
J => (A, B, C, D, E, F, G, H, I, K, L, M, N, O) + <A, B, C, D, E, F, G, H, I, J1, K, L, M, N, O>,
|
||||
K => (A, B, C, D, E, F, G, H, I, J, L, M, N, O) + <A, B, C, D, E, F, G, H, I, J, K1, L, M, N, O>,
|
||||
L => (A, B, C, D, E, F, G, H, I, J, K, M, N, O) + <A, B, C, D, E, F, G, H, I, J, K, L1, M, N, O>,
|
||||
M => (A, B, C, D, E, F, G, H, I, J, K, L, N, O) + <A, B, C, D, E, F, G, H, I, J, K, L, M1, N, O>,
|
||||
N => (A, B, C, D, E, F, G, H, I, J, K, L, M, O) + <A, B, C, D, E, F, G, H, I, J, K, L, M, N1, O>,
|
||||
O => (A, B, C, D, E, F, G, H, I, J, K, L, M, N) + <A, B, C, D, E, F, G, H, I, J, K, L, M, N, O1>,
|
||||
});
|
||||
tuples!(EitherOf16 + EitherOf16Future + EitherOf16FutureProj {
|
||||
A => (B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) + <A1, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P>,
|
||||
B => (A, C, D, E, F, G, H, I, J, K, L, M, N, O, P) + <A, B1, C, D, E, F, G, H, I, J, K, L, M, N, O, P>,
|
||||
C => (A, B, D, E, F, G, H, I, J, K, L, M, N, O, P) + <A, B, C1, D, E, F, G, H, I, J, K, L, M, N, O, P>,
|
||||
D => (A, B, C, E, F, G, H, I, J, K, L, M, N, O, P) + <A, B, C, D1, E, F, G, H, I, J, K, L, M, N, O, P>,
|
||||
E => (A, B, C, D, F, G, H, I, J, K, L, M, N, O, P) + <A, B, C, D, E1, F, G, H, I, J, K, L, M, N, O, P>,
|
||||
F => (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P) + <A, B, C, D, E, F1, G, H, I, J, K, L, M, N, O, P>,
|
||||
G => (A, B, C, D, E, F, H, I, J, K, L, M, N, O, P) + <A, B, C, D, E, F, G1, H, I, J, K, L, M, N, O, P>,
|
||||
H => (A, B, C, D, E, F, G, I, J, K, L, M, N, O, P) + <A, B, C, D, E, F, G, H1, I, J, K, L, M, N, O, P>,
|
||||
I => (A, B, C, D, E, F, G, H, J, K, L, M, N, O, P) + <A, B, C, D, E, F, G, H, I1, J, K, L, M, N, O, P>,
|
||||
J => (A, B, C, D, E, F, G, H, I, K, L, M, N, O, P) + <A, B, C, D, E, F, G, H, I, J1, K, L, M, N, O, P>,
|
||||
K => (A, B, C, D, E, F, G, H, I, J, L, M, N, O, P) + <A, B, C, D, E, F, G, H, I, J, K1, L, M, N, O, P>,
|
||||
L => (A, B, C, D, E, F, G, H, I, J, K, M, N, O, P) + <A, B, C, D, E, F, G, H, I, J, K, L1, M, N, O, P>,
|
||||
M => (A, B, C, D, E, F, G, H, I, J, K, L, N, O, P) + <A, B, C, D, E, F, G, H, I, J, K, L, M1, N, O, P>,
|
||||
N => (A, B, C, D, E, F, G, H, I, J, K, L, M, O, P) + <A, B, C, D, E, F, G, H, I, J, K, L, M, N1, O, P>,
|
||||
O => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, P) + <A, B, C, D, E, F, G, H, I, J, K, L, M, N, O1, P>,
|
||||
P => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) + <A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P1>,
|
||||
});
|
||||
|
||||
/// Matches over the first expression and returns an either ([`Either`], [`EitherOf3`], ... [`EitherOf8`])
|
||||
/// composed of the values returned by the match arms.
|
||||
@@ -303,56 +841,67 @@ macro_rules! either {
|
||||
}; // if you need more eithers feel free to open a PR ;-)
|
||||
}
|
||||
|
||||
// compile time test
|
||||
#[test]
|
||||
fn either_macro() {
|
||||
let _: Either<&str, f64> = either!(12,
|
||||
12 => "12",
|
||||
_ => 0.0,
|
||||
);
|
||||
let _: EitherOf3<&str, f64, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
_ => 12,
|
||||
);
|
||||
let _: EitherOf4<&str, f64, char, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
14 => ' ',
|
||||
_ => 12,
|
||||
);
|
||||
let _: EitherOf5<&str, f64, char, f32, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
14 => ' ',
|
||||
15 => 0.0f32,
|
||||
_ => 12,
|
||||
);
|
||||
let _: EitherOf6<&str, f64, char, f32, u8, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
14 => ' ',
|
||||
15 => 0.0f32,
|
||||
16 => 24u8,
|
||||
_ => 12,
|
||||
);
|
||||
let _: EitherOf7<&str, f64, char, f32, u8, i8, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
14 => ' ',
|
||||
15 => 0.0f32,
|
||||
16 => 24u8,
|
||||
17 => 2i8,
|
||||
_ => 12,
|
||||
);
|
||||
let _: EitherOf8<&str, f64, char, f32, u8, i8, u32, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
14 => ' ',
|
||||
15 => 0.0f32,
|
||||
16 => 24u8,
|
||||
17 => 2i8,
|
||||
18 => 42u32,
|
||||
_ => 12,
|
||||
);
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
// compile time test
|
||||
#[test]
|
||||
fn either_macro() {
|
||||
let _: Either<&str, f64> = either!(12,
|
||||
12 => "12",
|
||||
_ => 0.0,
|
||||
);
|
||||
let _: EitherOf3<&str, f64, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
_ => 12,
|
||||
);
|
||||
let _: EitherOf4<&str, f64, char, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
14 => ' ',
|
||||
_ => 12,
|
||||
);
|
||||
let _: EitherOf5<&str, f64, char, f32, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
14 => ' ',
|
||||
15 => 0.0f32,
|
||||
_ => 12,
|
||||
);
|
||||
let _: EitherOf6<&str, f64, char, f32, u8, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
14 => ' ',
|
||||
15 => 0.0f32,
|
||||
16 => 24u8,
|
||||
_ => 12,
|
||||
);
|
||||
let _: EitherOf7<&str, f64, char, f32, u8, i8, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
14 => ' ',
|
||||
15 => 0.0f32,
|
||||
16 => 24u8,
|
||||
17 => 2i8,
|
||||
_ => 12,
|
||||
);
|
||||
let _: EitherOf8<&str, f64, char, f32, u8, i8, u32, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
14 => ' ',
|
||||
15 => 0.0f32,
|
||||
16 => 24u8,
|
||||
17 => 2i8,
|
||||
18 => 42u32,
|
||||
_ => 12,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn unwrap_wrong_either() {
|
||||
Either::<i32, &str>::Left(0).unwrap_right();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
[tasks.install-cargo-leptos]
|
||||
install_crate = { crate_name = "cargo-leptos", binary = "cargo-leptos", test_arg = "--help" }
|
||||
args = ["--locked"]
|
||||
|
||||
[tasks.cargo-leptos-e2e]
|
||||
command = "cargo"
|
||||
|
||||
@@ -1981,7 +1981,8 @@ where
|
||||
/// This is provided as a convenience, but is a fairly simple function. If you need to adapt it,
|
||||
/// simply reuse the source code of this function in your own application.
|
||||
#[cfg(feature = "default")]
|
||||
pub fn file_and_error_handler<S, IV>(
|
||||
pub fn file_and_error_handler_with_context<S, IV>(
|
||||
additional_context: impl Fn() + 'static + Clone + Send,
|
||||
shell: fn(LeptosOptions) -> IV,
|
||||
) -> impl Fn(
|
||||
Uri,
|
||||
@@ -1997,40 +1998,68 @@ where
|
||||
LeptosOptions: FromRef<S>,
|
||||
{
|
||||
move |uri: Uri, State(state): State<S>, req: Request<Body>| {
|
||||
Box::pin(async move {
|
||||
let options = LeptosOptions::from_ref(&state);
|
||||
let res = get_static_file(uri, &options.site_root, req.headers());
|
||||
let res = res.await.unwrap();
|
||||
Box::pin({
|
||||
let additional_context = additional_context.clone();
|
||||
async move {
|
||||
let options = LeptosOptions::from_ref(&state);
|
||||
let res =
|
||||
get_static_file(uri, &options.site_root, req.headers());
|
||||
let res = res.await.unwrap();
|
||||
|
||||
if res.status() == StatusCode::OK {
|
||||
res.into_response()
|
||||
} else {
|
||||
let mut res = handle_response_inner(
|
||||
move || {
|
||||
provide_context(state.clone());
|
||||
},
|
||||
move || shell(options),
|
||||
req,
|
||||
|app, chunks| {
|
||||
Box::pin(async move {
|
||||
let app = app
|
||||
.to_html_stream_in_order()
|
||||
.collect::<String>()
|
||||
.await;
|
||||
let chunks = chunks();
|
||||
Box::pin(once(async move { app }).chain(chunks))
|
||||
as PinnedStream<String>
|
||||
})
|
||||
},
|
||||
)
|
||||
.await;
|
||||
*res.status_mut() = StatusCode::NOT_FOUND;
|
||||
res
|
||||
if res.status() == StatusCode::OK {
|
||||
res.into_response()
|
||||
} else {
|
||||
let mut res = handle_response_inner(
|
||||
move || {
|
||||
additional_context();
|
||||
provide_context(state.clone());
|
||||
},
|
||||
move || shell(options),
|
||||
req,
|
||||
|app, chunks| {
|
||||
Box::pin(async move {
|
||||
let app = app
|
||||
.to_html_stream_in_order()
|
||||
.collect::<String>()
|
||||
.await;
|
||||
let chunks = chunks();
|
||||
Box::pin(once(async move { app }).chain(chunks))
|
||||
as PinnedStream<String>
|
||||
})
|
||||
},
|
||||
)
|
||||
.await;
|
||||
*res.status_mut() = StatusCode::NOT_FOUND;
|
||||
res
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A reasonable handler for serving static files (like JS/WASM/CSS) and 404 errors.
|
||||
///
|
||||
/// This is provided as a convenience, but is a fairly simple function. If you need to adapt it,
|
||||
/// simply reuse the source code of this function in your own application.
|
||||
#[cfg(feature = "default")]
|
||||
pub fn file_and_error_handler<S, IV>(
|
||||
shell: fn(LeptosOptions) -> IV,
|
||||
) -> impl Fn(
|
||||
Uri,
|
||||
State<S>,
|
||||
Request<Body>,
|
||||
) -> Pin<Box<dyn Future<Output = Response<Body>> + Send + 'static>>
|
||||
+ Clone
|
||||
+ Send
|
||||
+ 'static
|
||||
where
|
||||
IV: IntoView + 'static,
|
||||
S: Send + Sync + Clone + 'static,
|
||||
LeptosOptions: FromRef<S>,
|
||||
{
|
||||
file_and_error_handler_with_context(move || (), shell)
|
||||
}
|
||||
|
||||
#[cfg(feature = "default")]
|
||||
async fn get_static_file(
|
||||
uri: Uri,
|
||||
|
||||
@@ -80,6 +80,16 @@ impl<In, Out> UnsyncCallback<In, Out> {
|
||||
{
|
||||
Self(StoredValue::new_local(Rc::new(f)))
|
||||
}
|
||||
|
||||
/// Returns `true` if both callbacks wrap the same underlying function pointer.
|
||||
#[inline]
|
||||
pub fn matches(&self, other: &Self) -> bool {
|
||||
self.0.with_value(|self_value| {
|
||||
other
|
||||
.0
|
||||
.with_value(|other_value| Rc::ptr_eq(self_value, other_value))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<In: 'static, Out: 'static> Callable<In, Out> for UnsyncCallback<In, Out> {
|
||||
@@ -212,6 +222,19 @@ impl<In: 'static, Out: 'static> Callback<In, Out> {
|
||||
{
|
||||
Self(StoredValue::new(Arc::new(fun)))
|
||||
}
|
||||
|
||||
/// Returns `true` if both callbacks wrap the same underlying function pointer.
|
||||
#[inline]
|
||||
pub fn matches(&self, other: &Self) -> bool {
|
||||
self.0
|
||||
.try_with_value(|self_value| {
|
||||
other.0.try_with_value(|other_value| {
|
||||
Arc::ptr_eq(self_value, other_value)
|
||||
})
|
||||
})
|
||||
.flatten()
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -246,4 +269,32 @@ mod tests {
|
||||
let _callback: UnsyncCallback<(i32, String), String> =
|
||||
(|num, s| format!("{num} {s}")).into();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn callback_matches_same() {
|
||||
let callback1 = Callback::new(|x: i32| x * 2);
|
||||
let callback2 = callback1.clone();
|
||||
assert!(callback1.matches(&callback2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn callback_matches_different() {
|
||||
let callback1 = Callback::new(|x: i32| x * 2);
|
||||
let callback2 = Callback::new(|x: i32| x + 1);
|
||||
assert!(!callback1.matches(&callback2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unsync_callback_matches_same() {
|
||||
let callback1 = UnsyncCallback::new(|x: i32| x * 2);
|
||||
let callback2 = callback1.clone();
|
||||
assert!(callback1.matches(&callback2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn unsync_callback_matches_different() {
|
||||
let callback1 = UnsyncCallback::new(|x: i32| x * 2);
|
||||
let callback2 = UnsyncCallback::new(|x: i32| x + 1);
|
||||
assert!(!callback1.matches(&callback2));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use oco_ref::Oco;
|
||||
use std::sync::Arc;
|
||||
use tachys::prelude::IntoAttributeValue;
|
||||
|
||||
/// Describes a value that is either a static or a reactive string, i.e.,
|
||||
/// a [`String`], a [`&str`], or a reactive `Fn() -> String`.
|
||||
@@ -73,3 +74,11 @@ impl Default for TextProp {
|
||||
Self(Arc::new(|| Oco::Borrowed("")))
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoAttributeValue for TextProp {
|
||||
type Output = Oco<'static, str>;
|
||||
|
||||
fn into_attribute_value(self) -> Self::Output {
|
||||
self.get()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_macro"
|
||||
version = "0.7.4"
|
||||
version = { workspace = true }
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -34,6 +34,7 @@ log = "0.4.22"
|
||||
typed-builder = "0.20.0"
|
||||
trybuild = "1.0"
|
||||
leptos = { path = "../leptos" }
|
||||
leptos_router = { path = "../router", features = ["ssr"] }
|
||||
server_fn = { path = "../server_fn", features = ["cbor"] }
|
||||
insta = "1.41"
|
||||
serde = "1.0"
|
||||
|
||||
@@ -144,8 +144,6 @@ impl ToTokens for Model {
|
||||
let (impl_generics, generics, where_clause) =
|
||||
body.sig.generics.split_for_impl();
|
||||
|
||||
let lifetimes = body.sig.generics.lifetimes();
|
||||
|
||||
let props_name = format_ident!("{name}Props");
|
||||
let props_builder_name = format_ident!("{name}PropsBuilder");
|
||||
let props_serialized_name = format_ident!("{name}PropsSerialized");
|
||||
@@ -301,7 +299,7 @@ impl ToTokens for Model {
|
||||
} else if cfg!(erase_components) {
|
||||
quote! {
|
||||
::leptos::prelude::IntoAny::into_any(
|
||||
::leptos::prelude::untrack(
|
||||
::leptos::reactive::graph::untrack_with_diagnostics(
|
||||
move || {
|
||||
#tracing_guard_expr
|
||||
#tracing_props_expr
|
||||
@@ -312,7 +310,7 @@ impl ToTokens for Model {
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
::leptos::prelude::untrack(
|
||||
::leptos::reactive::graph::untrack_with_diagnostics(
|
||||
move || {
|
||||
#tracing_guard_expr
|
||||
#tracing_props_expr
|
||||
@@ -570,7 +568,7 @@ impl ToTokens for Model {
|
||||
#tracing_instrument_attr
|
||||
#vis fn #name #impl_generics (
|
||||
#props_arg
|
||||
) #ret #(+ #lifetimes)*
|
||||
) #ret
|
||||
#where_clause
|
||||
{
|
||||
#body
|
||||
|
||||
32
leptos_macro/src/lazy.rs
Normal file
32
leptos_macro/src/lazy.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
use convert_case::{Case, Casing};
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro2::Ident;
|
||||
use proc_macro_error2::abort;
|
||||
use quote::quote;
|
||||
use syn::{spanned::Spanned, ItemFn};
|
||||
|
||||
pub fn lazy_impl(
|
||||
_args: proc_macro::TokenStream,
|
||||
s: TokenStream,
|
||||
) -> TokenStream {
|
||||
let fun = syn::parse::<ItemFn>(s).unwrap_or_else(|e| {
|
||||
abort!(e.span(), "`lazy` can only be used on a function")
|
||||
});
|
||||
if fun.sig.asyncness.is_none() {
|
||||
abort!(
|
||||
fun.sig.asyncness.span(),
|
||||
"`lazy` can only be used on an async function"
|
||||
)
|
||||
}
|
||||
|
||||
let converted_name = Ident::new(
|
||||
&fun.sig.ident.to_string().to_case(Case::Snake),
|
||||
fun.sig.ident.span(),
|
||||
);
|
||||
|
||||
quote! {
|
||||
#[cfg_attr(feature = "split", wasm_split::wasm_split(#converted_name))]
|
||||
#fun
|
||||
}
|
||||
.into()
|
||||
}
|
||||
@@ -23,6 +23,7 @@ mod params;
|
||||
mod view;
|
||||
use crate::component::unmodified_fn_name_from_fn_name;
|
||||
mod component;
|
||||
mod lazy;
|
||||
mod memo;
|
||||
mod slice;
|
||||
mod slot;
|
||||
@@ -676,17 +677,21 @@ fn component_macro(
|
||||
#[allow(non_snake_case, dead_code, clippy::too_many_arguments, clippy::needless_lifetimes)]
|
||||
#unexpanded
|
||||
}
|
||||
} else if let Ok(mut dummy) = dummy {
|
||||
dummy.sig.ident = unmodified_fn_name_from_fn_name(&dummy.sig.ident);
|
||||
quote! {
|
||||
#[doc(hidden)]
|
||||
#[allow(non_snake_case, dead_code, clippy::too_many_arguments, clippy::needless_lifetimes)]
|
||||
#dummy
|
||||
}
|
||||
} else {
|
||||
quote! {}
|
||||
}
|
||||
.into()
|
||||
match dummy {
|
||||
Ok(mut dummy) => {
|
||||
dummy.sig.ident = unmodified_fn_name_from_fn_name(&dummy.sig.ident);
|
||||
quote! {
|
||||
#[doc(hidden)]
|
||||
#[allow(non_snake_case, dead_code, clippy::too_many_arguments, clippy::needless_lifetimes)]
|
||||
#dummy
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
proc_macro_error2::abort!(e.span(), e);
|
||||
}
|
||||
}
|
||||
}.into()
|
||||
}
|
||||
|
||||
/// Annotates a struct so that it can be used with your Component as a `slot`.
|
||||
@@ -1002,3 +1007,17 @@ pub fn slice(input: TokenStream) -> TokenStream {
|
||||
pub fn memo(input: TokenStream) -> TokenStream {
|
||||
memo::memo_impl(input)
|
||||
}
|
||||
|
||||
/// The `#[lazy]` macro marks an `async` function as a function that can be lazy-loaded from a
|
||||
/// separate (WebAssembly) binary.
|
||||
///
|
||||
/// The first time the function is called, calling the function will first load that other binary,
|
||||
/// then call the function. On subsequent call it will be called immediately, but still return
|
||||
/// asynchronously to maintain the same API.
|
||||
///
|
||||
/// All parameters and output types should be concrete types, with no generics.
|
||||
#[proc_macro_attribute]
|
||||
#[proc_macro_error]
|
||||
pub fn lazy(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
lazy::lazy_impl(args, s)
|
||||
}
|
||||
|
||||
@@ -13,7 +13,13 @@ pub fn params_impl(ast: &syn::DeriveInput) -> proc_macro::TokenStream {
|
||||
.named
|
||||
.iter()
|
||||
.map(|field| {
|
||||
let field_name_string = &field.ident.as_ref().expect("expected named struct fields").to_string();
|
||||
let field_name_string = &field
|
||||
.ident
|
||||
.as_ref()
|
||||
.expect("expected named struct fields")
|
||||
.to_string()
|
||||
.trim_start_matches("r#")
|
||||
.to_owned();
|
||||
let ident = &field.ident;
|
||||
let ty = &field.ty;
|
||||
let span = field.span();
|
||||
|
||||
@@ -171,7 +171,7 @@ pub(crate) fn component_to_tokens(
|
||||
|
||||
let spreads = (!(spreads.is_empty())).then(|| {
|
||||
quote! {
|
||||
.add_any_attr((#(#spreads,)*))
|
||||
.add_any_attr((#(#spreads,)*).into_attr())
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -154,7 +154,12 @@ fn is_inert_element(orig_node: &Node<impl CustomNode>) -> bool {
|
||||
Some(value) => {
|
||||
matches!(&value.value, KVAttributeValue::Expr(expr) if {
|
||||
if let Expr::Lit(lit) = expr {
|
||||
matches!(&lit.lit, Lit::Str(_))
|
||||
let key = attr.key.to_string();
|
||||
if key.starts_with("style:") || key.starts_with("prop:") || key.starts_with("on:") || key.starts_with("use:") || key.starts_with("bind") {
|
||||
false
|
||||
} else {
|
||||
matches!(&lit.lit, Lit::Str(_))
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@@ -1129,6 +1134,11 @@ pub(crate) fn attribute_absolute(
|
||||
::leptos::tachys::html::attribute::custom::custom_attribute(#name, #value)
|
||||
}
|
||||
}
|
||||
else if name == "node_ref" {
|
||||
quote! {
|
||||
::leptos::tachys::html::node_ref::#key(#value)
|
||||
}
|
||||
}
|
||||
else {
|
||||
quote! {
|
||||
::leptos::tachys::html::attribute::#key(#value)
|
||||
@@ -1169,8 +1179,7 @@ pub(crate) fn event_type_and_handler(
|
||||
) -> (TokenStream, TokenStream, TokenStream) {
|
||||
let handler = attribute_value(node, false);
|
||||
|
||||
let (event_type, is_custom, is_force_undelegated, is_targeted) =
|
||||
parse_event_name(name);
|
||||
let (event_type, is_custom, options) = parse_event_name(name);
|
||||
|
||||
let event_name_ident = match &node.key {
|
||||
NodeName::Punctuated(parts) => {
|
||||
@@ -1188,11 +1197,17 @@ pub(crate) fn event_type_and_handler(
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let capture_ident = match &node.key {
|
||||
NodeName::Punctuated(parts) => {
|
||||
parts.iter().find(|part| part.to_string() == "capture")
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let on = match &node.key {
|
||||
NodeName::Punctuated(parts) => &parts[0],
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let on = if is_targeted {
|
||||
let on = if options.targeted {
|
||||
Ident::new("on_target", on.span()).to_token_stream()
|
||||
} else {
|
||||
on.to_token_stream()
|
||||
@@ -1205,15 +1220,29 @@ pub(crate) fn event_type_and_handler(
|
||||
event_type
|
||||
};
|
||||
|
||||
let event_type = if is_force_undelegated {
|
||||
let event_type = quote! {
|
||||
::leptos::tachys::html::event::#event_type
|
||||
};
|
||||
let event_type = if options.captured {
|
||||
let capture = if let Some(capture) = capture_ident {
|
||||
quote! { #capture }
|
||||
} else {
|
||||
quote! { capture }
|
||||
};
|
||||
quote! { ::leptos::tachys::html::event::#capture(#event_type) }
|
||||
} else {
|
||||
event_type
|
||||
};
|
||||
|
||||
let event_type = if options.undelegated {
|
||||
let undelegated = if let Some(undelegated) = undelegated_ident {
|
||||
quote! { #undelegated }
|
||||
} else {
|
||||
quote! { undelegated }
|
||||
};
|
||||
quote! { ::leptos::tachys::html::event::#undelegated(::leptos::tachys::html::event::#event_type) }
|
||||
quote! { ::leptos::tachys::html::event::#undelegated(#event_type) }
|
||||
} else {
|
||||
quote! { ::leptos::tachys::html::event::#event_type }
|
||||
event_type
|
||||
};
|
||||
|
||||
(on, event_type, handler)
|
||||
@@ -1419,13 +1448,22 @@ fn is_ambiguous_element(tag: &str) -> bool {
|
||||
tag == "a" || tag == "script" || tag == "title"
|
||||
}
|
||||
|
||||
fn parse_event(event_name: &str) -> (String, bool, bool) {
|
||||
let is_undelegated = event_name.contains(":undelegated");
|
||||
let is_targeted = event_name.contains(":target");
|
||||
fn parse_event(event_name: &str) -> (String, EventNameOptions) {
|
||||
let undelegated = event_name.contains(":undelegated");
|
||||
let targeted = event_name.contains(":target");
|
||||
let captured = event_name.contains(":capture");
|
||||
let event_name = event_name
|
||||
.replace(":undelegated", "")
|
||||
.replace(":target", "");
|
||||
(event_name, is_undelegated, is_targeted)
|
||||
.replace(":target", "")
|
||||
.replace(":capture", "");
|
||||
(
|
||||
event_name,
|
||||
EventNameOptions {
|
||||
undelegated,
|
||||
targeted,
|
||||
captured,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
/// Escapes Rust keywords that are also HTML attribute names
|
||||
@@ -1617,8 +1655,17 @@ const TYPED_EVENTS: [&str; 126] = [
|
||||
|
||||
const CUSTOM_EVENT: &str = "Custom";
|
||||
|
||||
pub(crate) fn parse_event_name(name: &str) -> (TokenStream, bool, bool, bool) {
|
||||
let (name, is_force_undelegated, is_targeted) = parse_event(name);
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct EventNameOptions {
|
||||
undelegated: bool,
|
||||
targeted: bool,
|
||||
captured: bool,
|
||||
}
|
||||
|
||||
pub(crate) fn parse_event_name(
|
||||
name: &str,
|
||||
) -> (TokenStream, bool, EventNameOptions) {
|
||||
let (name, options) = parse_event(name);
|
||||
|
||||
let (event_type, is_custom) = TYPED_EVENTS
|
||||
.binary_search(&name.as_str())
|
||||
@@ -1634,7 +1681,7 @@ pub(crate) fn parse_event_name(name: &str) -> (TokenStream, bool, bool, bool) {
|
||||
} else {
|
||||
event_type
|
||||
};
|
||||
(event_type, is_custom, is_force_undelegated, is_targeted)
|
||||
(event_type, is_custom, options)
|
||||
}
|
||||
|
||||
fn convert_to_snake_case(name: String) -> String {
|
||||
|
||||
@@ -104,3 +104,18 @@ fn component_nostrip() {
|
||||
/>
|
||||
};
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn WithLifetime<'a>(data: &'a str) -> impl IntoView {
|
||||
_ = data;
|
||||
"static lifetime"
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_static_lifetime() {
|
||||
#[allow(unused)]
|
||||
fn can_return_impl_intoview_from_body() -> impl IntoView {
|
||||
let val = String::from("non_static_lifetime");
|
||||
WithLifetime(WithLifetimeProps::builder().data(&val).build())
|
||||
}
|
||||
}
|
||||
|
||||
28
leptos_macro/tests/params.rs
Normal file
28
leptos_macro/tests/params.rs
Normal file
@@ -0,0 +1,28 @@
|
||||
use leptos::prelude::*;
|
||||
use leptos_router::params::Params;
|
||||
|
||||
#[derive(PartialEq, Debug, Params)]
|
||||
struct UserInfo {
|
||||
user_id: Option<String>,
|
||||
email: Option<String>,
|
||||
r#type: Option<i32>,
|
||||
not_found: Option<i32>,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn params_test() {
|
||||
let mut map = leptos_router::params::ParamsMap::new();
|
||||
map.insert("user_id", "12".to_owned());
|
||||
map.insert("email", "em@il".to_owned());
|
||||
map.insert("type", "12".to_owned());
|
||||
let user_info = UserInfo::from_map(&map).unwrap();
|
||||
assert_eq!(
|
||||
UserInfo {
|
||||
email: Some("em@il".to_owned()),
|
||||
user_id: Some("12".to_owned()),
|
||||
r#type: Some(12),
|
||||
not_found: None,
|
||||
},
|
||||
user_info
|
||||
);
|
||||
}
|
||||
@@ -11,7 +11,7 @@ edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
base64 = "0.22.1"
|
||||
codee = { version = "0.2.0", features = ["json_serde"] }
|
||||
codee = { version = "0.3.0", features = ["json_serde"] }
|
||||
hydration_context = { workspace = true }
|
||||
reactive_graph = { workspace = true, features = ["hydration"] }
|
||||
server_fn = { workspace = true }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_meta"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
|
||||
14
projects/hexagonal-architecture/.gitignore
vendored
Normal file
14
projects/hexagonal-architecture/.gitignore
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
debug/
|
||||
target/
|
||||
|
||||
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||
Cargo.lock
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||
*.pdb
|
||||
111
projects/hexagonal-architecture/Cargo.toml
Normal file
111
projects/hexagonal-architecture/Cargo.toml
Normal file
@@ -0,0 +1,111 @@
|
||||
[package]
|
||||
name = "leptos-hexagonal-design"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
leptos = { version = "0.7.0" }
|
||||
leptos_router = { version = "0.7.0" }
|
||||
axum = { version = "0.7", optional = true }
|
||||
console_error_panic_hook = "0.1"
|
||||
leptos_axum = { version = "0.7.0", optional = true }
|
||||
leptos_meta = { version = "0.7.0" }
|
||||
tokio = { version = "1", features = ["rt-multi-thread"], optional = true }
|
||||
tower = { version = "0.4", optional = true }
|
||||
tower-http = { version = "0.5", features = ["fs"], optional = true }
|
||||
wasm-bindgen = "=0.2.99"
|
||||
thiserror = "1"
|
||||
tracing = { version = "0.1", optional = true }
|
||||
http = "1"
|
||||
mockall = "0.13.1"
|
||||
cfg-if = "1.0.0"
|
||||
serde = "1.0.215"
|
||||
pin-project-lite = "0.2.15"
|
||||
|
||||
[features]
|
||||
config_1 = []
|
||||
hydrate = ["leptos/hydrate"]
|
||||
ssr = [
|
||||
"dep:axum",
|
||||
"dep:tokio",
|
||||
"dep:tower",
|
||||
"dep:tower-http",
|
||||
"dep:leptos_axum",
|
||||
"leptos/ssr",
|
||||
"leptos_meta/ssr",
|
||||
"leptos_router/ssr",
|
||||
"dep:tracing",
|
||||
]
|
||||
|
||||
# Defines a size-optimized profile for the WASM bundle in release mode
|
||||
[profile.wasm-release]
|
||||
inherits = "release"
|
||||
opt-level = 'z'
|
||||
lto = true
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
|
||||
[package.metadata.leptos]
|
||||
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
|
||||
output-name = "leptos-hexagonal-design"
|
||||
|
||||
# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
|
||||
site-root = "target/site"
|
||||
|
||||
# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
|
||||
# Defaults to pkg
|
||||
site-pkg-dir = "pkg"
|
||||
|
||||
# [Optional] The source CSS file. If it ends with .sass or .scss then it will be compiled by dart-sass into CSS. The CSS is optimized by Lightning CSS before being written to <site-root>/<site-pkg>/app.css
|
||||
style-file = "style/main.scss"
|
||||
# Assets source dir. All files found here will be copied and synchronized to site-root.
|
||||
# The assets-dir cannot have a sub directory with the same name/path as site-pkg-dir.
|
||||
#
|
||||
# Optional. Env: LEPTOS_ASSETS_DIR.
|
||||
assets-dir = "public"
|
||||
|
||||
# The IP and port (ex: 127.0.0.1:3000) where the server serves the content. Use it in your server setup.
|
||||
site-addr = "127.0.0.1:3000"
|
||||
|
||||
# The port to use for automatic reload monitoring
|
||||
reload-port = 3001
|
||||
|
||||
# [Optional] Command to use when running end2end tests. It will run in the end2end dir.
|
||||
# [Windows] for non-WSL use "npx.cmd playwright test"
|
||||
# This binary name can be checked in Powershell with Get-Command npx
|
||||
end2end-cmd = "npx playwright test"
|
||||
end2end-dir = "end2end"
|
||||
|
||||
# The browserlist query used for optimizing the CSS.
|
||||
browserquery = "defaults"
|
||||
|
||||
# The environment Leptos will run in, usually either "DEV" or "PROD"
|
||||
env = "DEV"
|
||||
|
||||
# The features to use when compiling the bin target
|
||||
#
|
||||
# Optional. Can be over-ridden with the command line parameter --bin-features
|
||||
bin-features = ["ssr"]
|
||||
|
||||
# If the --no-default-features flag should be used when compiling the bin target
|
||||
#
|
||||
# Optional. Defaults to false.
|
||||
bin-default-features = false
|
||||
|
||||
# The features to use when compiling the lib target
|
||||
#
|
||||
# Optional. Can be over-ridden with the command line parameter --lib-features
|
||||
lib-features = ["hydrate"]
|
||||
|
||||
# If the --no-default-features flag should be used when compiling the lib target
|
||||
#
|
||||
# Optional. Defaults to false.
|
||||
lib-default-features = false
|
||||
|
||||
# The profile to use for the lib target when compiling for release
|
||||
#
|
||||
# Optional. Defaults to "release".
|
||||
lib-profile-release = "wasm-release"
|
||||
24
projects/hexagonal-architecture/LICENSE
Normal file
24
projects/hexagonal-architecture/LICENSE
Normal file
@@ -0,0 +1,24 @@
|
||||
This is free and unencumbered software released into the public domain.
|
||||
|
||||
Anyone is free to copy, modify, publish, use, compile, sell, or
|
||||
distribute this software, either in source code form or as a compiled
|
||||
binary, for any purpose, commercial or non-commercial, and by any
|
||||
means.
|
||||
|
||||
In jurisdictions that recognize copyright laws, the author or authors
|
||||
of this software dedicate any and all copyright interest in the
|
||||
software to the public domain. We make this dedication for the benefit
|
||||
of the public at large and to the detriment of our heirs and
|
||||
successors. We intend this dedication to be an overt act of
|
||||
relinquishment in perpetuity of all present and future rights to this
|
||||
software under copyright law.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
||||
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
||||
OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
For more information, please refer to <https://unlicense.org>
|
||||
142
projects/hexagonal-architecture/README.md
Normal file
142
projects/hexagonal-architecture/README.md
Normal file
@@ -0,0 +1,142 @@
|
||||
### Leptos Hexagonal Design
|
||||
|
||||
This Blog Post / Github Repository is about applying principles of hexagonal design
|
||||
- Isolating Business Logic from Sub Domains
|
||||
- Decoupling design to improve flexibility and testablity
|
||||
- Applying the principles hierachically so that sub domains which talk to external services also implement also implement hexagonal architecture
|
||||
|
||||
|
||||
There are specific constraints that guide our design decisions
|
||||
|
||||
- Server Functions Can't be Generic
|
||||
- Boxed Traits Objects Have overhead, so we only want to use as much generic code as possible avoid Trait Objects
|
||||
|
||||
The way this works is we define the functionality of our program in the main domain (i.e the business problem and processes our app is trying to solve / proceduralize). We then create sub domains and external services, although they are represented the same. External services are usually the end nodes of your app's architectural graph. Our main application builds it's service layout using configuration flags.
|
||||
|
||||
```rust
|
||||
pub fn config() -> MainAppHandlerAlias {
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(feature="open_ai_wrapper")] {
|
||||
fn server_handler_config_1() -> MainAppHandler<
|
||||
AuthService<PostgresDb, Redis>,
|
||||
AiMessageGen<PostgresDb,OpenAiWrapper>,
|
||||
> {
|
||||
MainAppHandler::new_with_postgres_and_redis_open_ai()
|
||||
}
|
||||
server_handler_config_1()
|
||||
} else {
|
||||
fn server_handler_config_2() -> MainAppHandler<
|
||||
AuthService<MySql, MemCache>,
|
||||
OtherAiMessageGen<MySql,HuggingFaceWrapper>,
|
||||
> {
|
||||
MainAppHandler::new_with_my_sql_memcache_hugging_face()
|
||||
}
|
||||
server_handler_config_2()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
And we pass in our handler which implements a trait
|
||||
|
||||
```rust
|
||||
pub trait HandlerServerFn {
|
||||
pub fn server_fn_1_inner(&self);
|
||||
}
|
||||
impl<S,S2> HandlerServerFn for MainAppHandler<S:SubDomain1Trait,S2:SubDomain2Trait> {
|
||||
pub fn server_fn_1_inner(&self) {
|
||||
// do thing
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
in our main fn we produce our applications service graph and pass it to our leptos router.
|
||||
|
||||
```rust
|
||||
main () {
|
||||
let leptos_options = conf.leptos_options;
|
||||
let routes = generate_route_list(crate::app::App);
|
||||
// our feature flag based config function.
|
||||
let handler = config();
|
||||
let handler_c = handler.clone();
|
||||
// we implement FromRef<ServerState> for LeptosOptions
|
||||
let server_state = ServerState {
|
||||
handler,
|
||||
leptos_options: leptos_options.clone(),
|
||||
};
|
||||
let app = Router::new()
|
||||
.leptos_routes_with_context(
|
||||
&server_state,
|
||||
routes,
|
||||
// We pass in the MainAppHandler struct as context so we can fetch it anywhere context is available on the server.
|
||||
// This includes in middleware we define on server functions (see middleware.rs)
|
||||
move || provide_context(handler_c.clone()),
|
||||
{
|
||||
let leptos_options = leptos_options.clone();
|
||||
move || shell(leptos_options.clone())
|
||||
},
|
||||
)
|
||||
.fallback(leptos_axum::file_and_error_handler::<
|
||||
ServerState<HandlerStructAlias>,
|
||||
_,
|
||||
>(shell))
|
||||
.with_state(server_state);
|
||||
}
|
||||
```
|
||||
|
||||
and then in our server functions
|
||||
|
||||
```rust
|
||||
#[server]
|
||||
pub async fn server_fn_1() -> Result<(),ServerFnError> {
|
||||
// we type alias every variation of our services we plan on configuring. The alternative is using Box<dyn Trait> which isn't bad - just slower.
|
||||
Ok(expect_context::<MainAppHandlerAlias>().server_fn_1_inner())
|
||||
}
|
||||
```
|
||||
|
||||
And then we can mock and service trait in any combination like so
|
||||
|
||||
```rust
|
||||
#[tokio::test]
|
||||
pub async fn test_subdomain_1_with_mocks() -> Result<(), Box<dyn Error>> {
|
||||
let mut mock_external_service_1 = MockExternalServiceTrait1::new();
|
||||
mock_external_service_1
|
||||
.expect_external_service_1_method()
|
||||
.returning(|| {
|
||||
println!("Mock external service 1");
|
||||
Ok(ExternalService1Data)
|
||||
});
|
||||
let mut mock_external_service_2 = MockExternalServiceTrait2::new();
|
||||
mock_external_service_2
|
||||
.expect_external_service_2_method()
|
||||
.returning(|| {
|
||||
println!("Mock external service 2");
|
||||
Ok(ExternalService2Data)
|
||||
});
|
||||
let real_subdomain_1_with_mock_externals = SubDomainStruct1 {
|
||||
external_service_1: mock_external_service_1,
|
||||
external_service_2: mock_external_service_2,
|
||||
};
|
||||
let data = real_subdomain_1_with_mock_externals
|
||||
.sub_domain_1_method()
|
||||
.await?;
|
||||
assert_eq!(data, SubDomain1Data);
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
Check out the code in the repository for a working example.
|
||||
|
||||
Run the tests with
|
||||
|
||||
` cargo test --features ssr `
|
||||
and otherwise run
|
||||
` cargo leptos serve `
|
||||
and navigate to `127.0.0.1:3000`
|
||||
|
||||
here's a picture
|
||||
|
||||
|
||||

|
||||
167
projects/hexagonal-architecture/end2end/package-lock.json
generated
Normal file
167
projects/hexagonal-architecture/end2end/package-lock.json
generated
Normal file
@@ -0,0 +1,167 @@
|
||||
{
|
||||
"name": "end2end",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "end2end",
|
||||
"version": "1.0.0",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/node": "^20.12.12",
|
||||
"typescript": "^5.4.5"
|
||||
}
|
||||
},
|
||||
"node_modules/@playwright/test": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.1.tgz",
|
||||
"integrity": "sha512-1hZ4TNvD5z9VuhNJ/walIjvMVvYkZKf71axoF/uiAqpntQJXpG64dlXhoDXE3OczPuTuvjf/M5KWFg5VAVUS3Q==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"playwright": "1.44.1"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node": {
|
||||
"version": "20.12.12",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.12.tgz",
|
||||
"integrity": "sha512-eWLDGF/FOSPtAvEqeRAQ4C8LSA7M1I7i0ky1I8U7kD1J5ITyW3AsRhQrKVoWf5pFKZ2kILsEGJhsI9r93PYnOw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"undici-types": "~5.26.4"
|
||||
}
|
||||
},
|
||||
"node_modules/fsevents": {
|
||||
"version": "2.3.2",
|
||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
|
||||
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
|
||||
"dev": true,
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/playwright": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.1.tgz",
|
||||
"integrity": "sha512-qr/0UJ5CFAtloI3avF95Y0L1xQo6r3LQArLIg/z/PoGJ6xa+EwzrwO5lpNr/09STxdHuUoP2mvuELJS+hLdtgg==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"playwright-core": "1.44.1"
|
||||
},
|
||||
"bin": {
|
||||
"playwright": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"fsevents": "2.3.2"
|
||||
}
|
||||
},
|
||||
"node_modules/playwright-core": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.1.tgz",
|
||||
"integrity": "sha512-wh0JWtYTrhv1+OSsLPgFzGzt67Y7BE/ZS3jEqgGBlp2ppp1ZDj8c+9IARNW4dwf1poq5MgHreEM2KV/GuR4cFA==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"playwright-core": "cli.js"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
},
|
||||
"node_modules/typescript": {
|
||||
"version": "5.4.5",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz",
|
||||
"integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"bin": {
|
||||
"tsc": "bin/tsc",
|
||||
"tsserver": "bin/tsserver"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14.17"
|
||||
}
|
||||
},
|
||||
"node_modules/undici-types": {
|
||||
"version": "5.26.5",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
|
||||
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@playwright/test": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.1.tgz",
|
||||
"integrity": "sha512-1hZ4TNvD5z9VuhNJ/walIjvMVvYkZKf71axoF/uiAqpntQJXpG64dlXhoDXE3OczPuTuvjf/M5KWFg5VAVUS3Q==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"playwright": "1.44.1"
|
||||
}
|
||||
},
|
||||
"@types/node": {
|
||||
"version": "20.12.12",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.12.tgz",
|
||||
"integrity": "sha512-eWLDGF/FOSPtAvEqeRAQ4C8LSA7M1I7i0ky1I8U7kD1J5ITyW3AsRhQrKVoWf5pFKZ2kILsEGJhsI9r93PYnOw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"undici-types": "~5.26.4"
|
||||
}
|
||||
},
|
||||
"fsevents": {
|
||||
"version": "2.3.2",
|
||||
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
|
||||
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"playwright": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.1.tgz",
|
||||
"integrity": "sha512-qr/0UJ5CFAtloI3avF95Y0L1xQo6r3LQArLIg/z/PoGJ6xa+EwzrwO5lpNr/09STxdHuUoP2mvuELJS+hLdtgg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"fsevents": "2.3.2",
|
||||
"playwright-core": "1.44.1"
|
||||
}
|
||||
},
|
||||
"playwright-core": {
|
||||
"version": "1.44.1",
|
||||
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.1.tgz",
|
||||
"integrity": "sha512-wh0JWtYTrhv1+OSsLPgFzGzt67Y7BE/ZS3jEqgGBlp2ppp1ZDj8c+9IARNW4dwf1poq5MgHreEM2KV/GuR4cFA==",
|
||||
"dev": true
|
||||
},
|
||||
"typescript": {
|
||||
"version": "5.4.5",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz",
|
||||
"integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==",
|
||||
"dev": true
|
||||
},
|
||||
"undici-types": {
|
||||
"version": "5.26.5",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
|
||||
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
}
|
||||
15
projects/hexagonal-architecture/end2end/package.json
Normal file
15
projects/hexagonal-architecture/end2end/package.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "end2end",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.44.1",
|
||||
"@types/node": "^20.12.12",
|
||||
"typescript": "^5.4.5"
|
||||
}
|
||||
}
|
||||
105
projects/hexagonal-architecture/end2end/playwright.config.ts
Normal file
105
projects/hexagonal-architecture/end2end/playwright.config.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import type { PlaywrightTestConfig } from "@playwright/test";
|
||||
import { devices, defineConfig } from "@playwright/test";
|
||||
|
||||
/**
|
||||
* Read environment variables from file.
|
||||
* https://github.com/motdotla/dotenv
|
||||
*/
|
||||
// require('dotenv').config();
|
||||
|
||||
/**
|
||||
* See https://playwright.dev/docs/test-configuration.
|
||||
*/
|
||||
export default defineConfig({
|
||||
testDir: "./tests",
|
||||
/* Maximum time one test can run for. */
|
||||
timeout: 30 * 1000,
|
||||
expect: {
|
||||
/**
|
||||
* Maximum time expect() should wait for the condition to be met.
|
||||
* For example in `await expect(locator).toHaveText();`
|
||||
*/
|
||||
timeout: 5000,
|
||||
},
|
||||
/* Run tests in files in parallel */
|
||||
fullyParallel: true,
|
||||
/* Fail the build on CI if you accidentally left test.only in the source code. */
|
||||
forbidOnly: !!process.env.CI,
|
||||
/* Retry on CI only */
|
||||
retries: process.env.CI ? 2 : 0,
|
||||
/* Opt out of parallel tests on CI. */
|
||||
workers: process.env.CI ? 1 : undefined,
|
||||
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
|
||||
reporter: "html",
|
||||
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
|
||||
use: {
|
||||
/* Maximum time each action such as `click()` can take. Defaults to 0 (no limit). */
|
||||
actionTimeout: 0,
|
||||
/* Base URL to use in actions like `await page.goto('/')`. */
|
||||
// baseURL: 'http://localhost:3000',
|
||||
|
||||
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
|
||||
trace: "on-first-retry",
|
||||
},
|
||||
|
||||
/* Configure projects for major browsers */
|
||||
projects: [
|
||||
{
|
||||
name: "chromium",
|
||||
use: {
|
||||
...devices["Desktop Chrome"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "firefox",
|
||||
use: {
|
||||
...devices["Desktop Firefox"],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
name: "webkit",
|
||||
use: {
|
||||
...devices["Desktop Safari"],
|
||||
},
|
||||
},
|
||||
|
||||
/* Test against mobile viewports. */
|
||||
// {
|
||||
// name: 'Mobile Chrome',
|
||||
// use: {
|
||||
// ...devices['Pixel 5'],
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// name: 'Mobile Safari',
|
||||
// use: {
|
||||
// ...devices['iPhone 12'],
|
||||
// },
|
||||
// },
|
||||
|
||||
/* Test against branded browsers. */
|
||||
// {
|
||||
// name: 'Microsoft Edge',
|
||||
// use: {
|
||||
// channel: 'msedge',
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// name: 'Google Chrome',
|
||||
// use: {
|
||||
// channel: 'chrome',
|
||||
// },
|
||||
// },
|
||||
],
|
||||
|
||||
/* Folder for test artifacts such as screenshots, videos, traces, etc. */
|
||||
// outputDir: 'test-results/',
|
||||
|
||||
/* Run your local dev server before starting the tests */
|
||||
// webServer: {
|
||||
// command: 'npm run start',
|
||||
// port: 3000,
|
||||
// },
|
||||
});
|
||||
@@ -0,0 +1,9 @@
|
||||
import { test, expect } from "@playwright/test";
|
||||
|
||||
test("homepage has title and heading text", async ({ page }) => {
|
||||
await page.goto("http://localhost:3000/");
|
||||
|
||||
await expect(page).toHaveTitle("Welcome to Leptos");
|
||||
|
||||
await expect(page.locator("h1")).toHaveText("Welcome to Leptos!");
|
||||
});
|
||||
109
projects/hexagonal-architecture/end2end/tsconfig.json
Normal file
109
projects/hexagonal-architecture/end2end/tsconfig.json
Normal file
@@ -0,0 +1,109 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
/* Visit https://aka.ms/tsconfig to read more about this file */
|
||||
|
||||
/* Projects */
|
||||
// "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
|
||||
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
|
||||
// "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
|
||||
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
|
||||
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
|
||||
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
|
||||
|
||||
/* Language and Environment */
|
||||
"target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
|
||||
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
|
||||
// "jsx": "preserve", /* Specify what JSX code is generated. */
|
||||
// "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
|
||||
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
|
||||
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
|
||||
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
|
||||
// "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
|
||||
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
|
||||
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
|
||||
// "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
|
||||
|
||||
/* Modules */
|
||||
"module": "commonjs", /* Specify what module code is generated. */
|
||||
// "rootDir": "./", /* Specify the root folder within your source files. */
|
||||
// "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */
|
||||
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
|
||||
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
|
||||
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
|
||||
// "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
|
||||
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
|
||||
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||
// "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
|
||||
// "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */
|
||||
// "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
|
||||
// "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
|
||||
// "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
|
||||
// "resolveJsonModule": true, /* Enable importing .json files. */
|
||||
// "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
|
||||
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
|
||||
|
||||
/* JavaScript Support */
|
||||
// "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
|
||||
// "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
|
||||
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
|
||||
|
||||
/* Emit */
|
||||
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
|
||||
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
|
||||
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
|
||||
// "sourceMap": true, /* Create source map files for emitted JavaScript files. */
|
||||
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
|
||||
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
|
||||
// "outDir": "./", /* Specify an output folder for all emitted files. */
|
||||
// "removeComments": true, /* Disable emitting comments. */
|
||||
// "noEmit": true, /* Disable emitting files from a compilation. */
|
||||
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
|
||||
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
|
||||
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
|
||||
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
|
||||
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
|
||||
// "newLine": "crlf", /* Set the newline character for emitting files. */
|
||||
// "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
|
||||
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
|
||||
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
|
||||
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
|
||||
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
|
||||
// "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
|
||||
|
||||
/* Interop Constraints */
|
||||
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
|
||||
// "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */
|
||||
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
|
||||
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
|
||||
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
|
||||
"forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
|
||||
|
||||
/* Type Checking */
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
|
||||
// "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
|
||||
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
|
||||
// "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
|
||||
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
|
||||
// "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
|
||||
// "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
|
||||
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
|
||||
// "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
|
||||
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
|
||||
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
|
||||
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
|
||||
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
|
||||
// "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
|
||||
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
|
||||
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
|
||||
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
|
||||
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
|
||||
|
||||
/* Completeness */
|
||||
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
|
||||
"skipLibCheck": true /* Skip type checking all .d.ts files. */
|
||||
}
|
||||
}
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 292 KiB |
BIN
projects/hexagonal-architecture/public/favicon.ico
Normal file
BIN
projects/hexagonal-architecture/public/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
87
projects/hexagonal-architecture/src/app.rs
Normal file
87
projects/hexagonal-architecture/src/app.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
use leptos::prelude::*;
|
||||
use leptos_meta::{provide_meta_context, MetaTags, Stylesheet, Title};
|
||||
use leptos_router::{
|
||||
components::{Route, Router, Routes},
|
||||
StaticSegment,
|
||||
};
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
use super::{server_types::HandlerStructAlias, traits::HandlerTrait};
|
||||
use crate::ui_types::*;
|
||||
pub fn shell(options: LeptosOptions) -> impl IntoView {
|
||||
view! {
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1"/>
|
||||
<AutoReload options=options.clone() />
|
||||
<HydrationScripts options/>
|
||||
<MetaTags/>
|
||||
</head>
|
||||
<body>
|
||||
<App/>
|
||||
</body>
|
||||
</html>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
pub fn App() -> impl IntoView {
|
||||
// Provides context that manages stylesheets, titles, meta tags, etc.
|
||||
provide_meta_context();
|
||||
|
||||
view! {
|
||||
// injects a stylesheet into the document <head>
|
||||
// id=leptos means cargo-leptos will hot-reload this stylesheet
|
||||
<Stylesheet id="leptos" href="/pkg/leptos-hexagonal-design.css"/>
|
||||
|
||||
// sets the document title
|
||||
<Title text="Welcome to Leptos"/>
|
||||
|
||||
// content for this welcome page
|
||||
<Router>
|
||||
<main>
|
||||
<Routes fallback=|| "Page not found.".into_view()>
|
||||
<Route path=StaticSegment("") view=HomePage/>
|
||||
</Routes>
|
||||
</main>
|
||||
</Router>
|
||||
}
|
||||
}
|
||||
|
||||
/// Renders the home page of your application.
|
||||
#[component]
|
||||
fn HomePage() -> impl IntoView {
|
||||
let server_fn_1 = ServerAction::<ServerFunction1>::new();
|
||||
let server_fn_2 = ServerAction::<ServerFunction2>::new();
|
||||
let server_fn_3 = ServerAction::<ServerFunction3>::new();
|
||||
Effect::new(move |_| {
|
||||
server_fn_1.dispatch(ServerFunction1 {});
|
||||
server_fn_2.dispatch(ServerFunction2 {});
|
||||
server_fn_3.dispatch(ServerFunction3 {});
|
||||
});
|
||||
}
|
||||
|
||||
#[server]
|
||||
#[middleware(crate::middleware::SubDomain1Layer)]
|
||||
pub async fn server_function_1() -> Result<UiMappingFromDomainData, ServerFnError> {
|
||||
Ok(expect_context::<HandlerStructAlias>()
|
||||
.server_fn_1()
|
||||
.await?
|
||||
.into())
|
||||
}
|
||||
#[server]
|
||||
pub async fn server_function_2() -> Result<UiMappingFromDomainData2, ServerFnError> {
|
||||
Ok(expect_context::<HandlerStructAlias>()
|
||||
.server_fn_2()
|
||||
.await?
|
||||
.into())
|
||||
}
|
||||
#[server]
|
||||
pub async fn server_function_3() -> Result<UiMappingFromDomainData3, ServerFnError> {
|
||||
Ok(expect_context::<HandlerStructAlias>()
|
||||
.server_fn_3()
|
||||
.await?
|
||||
.into())
|
||||
}
|
||||
23
projects/hexagonal-architecture/src/config.rs
Normal file
23
projects/hexagonal-architecture/src/config.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use super::server_types::*;
|
||||
|
||||
pub fn config() -> HandlerStructAlias {
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(feature="config_1")] {
|
||||
fn server_handler_config_1() -> HandlerStruct<
|
||||
SubDomainStruct1<ExternalService1_1, ExternalService2_1>,
|
||||
SubDomainStruct2<ExternalService1_1>,
|
||||
> {
|
||||
HandlerStruct::default()
|
||||
}
|
||||
server_handler_config_1()
|
||||
} else {
|
||||
fn server_handler_config_2() -> HandlerStruct<
|
||||
SubDomainStruct1<ExternalService1_2, ExternalService2_2>,
|
||||
SubDomainStruct2<ExternalService1_2>,
|
||||
> {
|
||||
HandlerStruct::new()
|
||||
}
|
||||
server_handler_config_2()
|
||||
}
|
||||
}
|
||||
}
|
||||
150
projects/hexagonal-architecture/src/lib.rs
Normal file
150
projects/hexagonal-architecture/src/lib.rs
Normal file
@@ -0,0 +1,150 @@
|
||||
pub mod app;
|
||||
|
||||
pub mod ui_types;
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
pub mod config;
|
||||
#[cfg(feature = "ssr")]
|
||||
pub mod middleware;
|
||||
#[cfg(feature = "ssr")]
|
||||
pub mod server_types;
|
||||
#[cfg(feature = "ssr")]
|
||||
pub mod trait_impl;
|
||||
#[cfg(feature = "ssr")]
|
||||
pub mod traits;
|
||||
|
||||
#[cfg(feature = "hydrate")]
|
||||
#[wasm_bindgen::prelude::wasm_bindgen]
|
||||
pub fn hydrate() {
|
||||
use crate::app::*;
|
||||
console_error_panic_hook::set_once();
|
||||
leptos::mount::hydrate_body(App);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod tests {
|
||||
use super::server_types::*;
|
||||
use super::traits::*;
|
||||
use std::error::Error;
|
||||
|
||||
#[tokio::test]
|
||||
pub async fn test_subdomain_1_with_mocks() -> Result<(), Box<dyn Error>> {
|
||||
let mut mock_external_service_1 = MockExternalServiceTrait1::new();
|
||||
mock_external_service_1
|
||||
.expect_external_service_1_method()
|
||||
.returning(|| {
|
||||
println!("Mock external service 1");
|
||||
Ok(ExternalService1Data)
|
||||
});
|
||||
let mut mock_external_service_2 = MockExternalServiceTrait2::new();
|
||||
mock_external_service_2
|
||||
.expect_external_service_2_method()
|
||||
.returning(|| {
|
||||
println!("Mock external service 2");
|
||||
Ok(ExternalService2Data)
|
||||
});
|
||||
let real_subdomain_1_with_mock_externals = SubDomainStruct1 {
|
||||
external_service_1: mock_external_service_1,
|
||||
external_service_2: mock_external_service_2,
|
||||
};
|
||||
let data = real_subdomain_1_with_mock_externals
|
||||
.sub_domain_1_method()
|
||||
.await?;
|
||||
assert_eq!(data, SubDomain1Data);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
pub async fn test_subdomain_2_with_mocks() -> Result<(), Box<dyn Error>> {
|
||||
let mut mock_external_service_1 = MockExternalServiceTrait1::new();
|
||||
mock_external_service_1
|
||||
.expect_external_service_1_method()
|
||||
.returning(|| {
|
||||
println!("Mock external service 1 AGAIN");
|
||||
Ok(ExternalService1Data)
|
||||
});
|
||||
let real_subdomain_2_with_mock_externals = SubDomainStruct2 {
|
||||
external_service_1: mock_external_service_1,
|
||||
};
|
||||
let data = real_subdomain_2_with_mock_externals
|
||||
.sub_domain_2_method()
|
||||
.await?;
|
||||
assert_eq!(data, SubDomain2Data);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
pub async fn test_handler_with_mocks() -> Result<(), Box<dyn Error>> {
|
||||
let mut mock_subdomain_1_trait = MockSubDomainTrait1::new();
|
||||
mock_subdomain_1_trait
|
||||
.expect_sub_domain_1_method()
|
||||
.returning(|| {
|
||||
println!("Mock Subdomain 1");
|
||||
Ok(SubDomain1Data)
|
||||
});
|
||||
let mut mock_subdomain_2_trait = MockSubDomainTrait2::new();
|
||||
mock_subdomain_2_trait
|
||||
.expect_sub_domain_2_method()
|
||||
.returning(|| {
|
||||
println!("Mock Subdomain 2");
|
||||
Ok(SubDomain2Data)
|
||||
});
|
||||
let real_handler_with_mock_subdomains = HandlerStruct {
|
||||
sub_domain_1: mock_subdomain_1_trait,
|
||||
sub_domain_2: mock_subdomain_2_trait,
|
||||
};
|
||||
let data = real_handler_with_mock_subdomains.server_fn_1().await?;
|
||||
assert_eq!(data, DomainData);
|
||||
let data = real_handler_with_mock_subdomains.server_fn_2().await?;
|
||||
assert_eq!(data, DomainData2);
|
||||
let data = real_handler_with_mock_subdomains.server_fn_3().await?;
|
||||
assert_eq!(data, DomainData3);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn mock_subdomain_1() -> SubDomainStruct1<MockExternalServiceTrait1, MockExternalServiceTrait2>
|
||||
{
|
||||
let mut mock_external_service_1 = MockExternalServiceTrait1::new();
|
||||
mock_external_service_1
|
||||
.expect_external_service_1_method()
|
||||
.returning(|| {
|
||||
println!("Mock external service 1");
|
||||
Ok(ExternalService1Data)
|
||||
});
|
||||
let mut mock_external_service_2 = MockExternalServiceTrait2::new();
|
||||
mock_external_service_2
|
||||
.expect_external_service_2_method()
|
||||
.returning(|| {
|
||||
println!("Mock external service 2");
|
||||
Ok(ExternalService2Data)
|
||||
});
|
||||
let real_subdomain_1_with_mock_externals = SubDomainStruct1 {
|
||||
external_service_1: mock_external_service_1,
|
||||
external_service_2: mock_external_service_2,
|
||||
};
|
||||
real_subdomain_1_with_mock_externals
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
pub async fn test_handler_with_mock_and_real_mix() -> Result<(), Box<dyn Error>> {
|
||||
let sub_domain_1 = mock_subdomain_1();
|
||||
let mut mock_subdomain_2_trait = MockSubDomainTrait2::new();
|
||||
mock_subdomain_2_trait
|
||||
.expect_sub_domain_2_method()
|
||||
.returning(|| {
|
||||
println!("Mock Subdomain 2");
|
||||
Ok(SubDomain2Data)
|
||||
});
|
||||
let real_handler = HandlerStruct {
|
||||
sub_domain_1,
|
||||
sub_domain_2: mock_subdomain_2_trait,
|
||||
};
|
||||
let data = real_handler.server_fn_1().await?;
|
||||
assert_eq!(data, DomainData);
|
||||
let data = real_handler.server_fn_2().await?;
|
||||
assert_eq!(data, DomainData2);
|
||||
let data = real_handler.server_fn_3().await?;
|
||||
assert_eq!(data, DomainData3);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
52
projects/hexagonal-architecture/src/main.rs
Normal file
52
projects/hexagonal-architecture/src/main.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
#[cfg(feature = "ssr")]
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
use axum::Router;
|
||||
use leptos::logging::log;
|
||||
use leptos::prelude::*;
|
||||
use leptos_axum::{generate_route_list, LeptosRoutes};
|
||||
use leptos_hexagonal_design::{
|
||||
app::*,
|
||||
config::config,
|
||||
server_types::{HandlerStructAlias, ServerState},
|
||||
};
|
||||
|
||||
let conf = get_configuration(None).unwrap();
|
||||
let addr = conf.leptos_options.site_addr;
|
||||
let leptos_options = conf.leptos_options;
|
||||
let routes = generate_route_list(App);
|
||||
let handler = config();
|
||||
let handler_c = handler.clone();
|
||||
let server_state = ServerState {
|
||||
handler,
|
||||
leptos_options: leptos_options.clone(),
|
||||
};
|
||||
let app = Router::new()
|
||||
.leptos_routes_with_context(
|
||||
&server_state,
|
||||
routes,
|
||||
move || provide_context(handler_c.clone()),
|
||||
{
|
||||
let leptos_options = leptos_options.clone();
|
||||
move || shell(leptos_options.clone())
|
||||
},
|
||||
)
|
||||
.fallback(leptos_axum::file_and_error_handler::<
|
||||
ServerState<HandlerStructAlias>,
|
||||
_,
|
||||
>(shell))
|
||||
.with_state(server_state);
|
||||
|
||||
log!("listening on http://{}", &addr);
|
||||
let listener = tokio::net::TcpListener::bind(&addr).await.unwrap();
|
||||
axum::serve(listener, app.into_make_service())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "ssr"))]
|
||||
pub fn main() {
|
||||
// no client-side main function
|
||||
// unless we want this to work with e.g., Trunk for pure client-side testing
|
||||
// see lib.rs for hydration function instead
|
||||
}
|
||||
84
projects/hexagonal-architecture/src/middleware.rs
Normal file
84
projects/hexagonal-architecture/src/middleware.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
use axum::{
|
||||
body::Body,
|
||||
http::{Request, Response},
|
||||
};
|
||||
use leptos::prelude::expect_context;
|
||||
use std::{
|
||||
future::Future,
|
||||
pin::Pin,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
use tower::{Layer, Service};
|
||||
|
||||
use crate::{
|
||||
server_types::{HandlerStructAlias, ServerState},
|
||||
traits::SubDomainTrait1,
|
||||
};
|
||||
use pin_project_lite::pin_project;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubDomain1Layer;
|
||||
|
||||
impl<S> Layer<S> for SubDomain1Layer {
|
||||
type Service = SubDomain1MiddleWare<S>;
|
||||
|
||||
fn layer(&self, inner: S) -> Self::Service {
|
||||
SubDomain1MiddleWare { inner }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SubDomain1MiddleWare<S> {
|
||||
inner: S,
|
||||
}
|
||||
|
||||
impl<S, ReqBody> Service<Request<ReqBody>> for SubDomain1MiddleWare<S>
|
||||
where
|
||||
S: Service<Request<ReqBody>, Response = Response<Body>>,
|
||||
S::Error: std::fmt::Debug,
|
||||
S::Future: Send + 'static,
|
||||
{
|
||||
type Response = S::Response;
|
||||
type Error = S::Error;
|
||||
type Future = SubDomain1Future<S::Future>;
|
||||
|
||||
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||
self.inner.poll_ready(cx)
|
||||
}
|
||||
|
||||
fn call(&mut self, req: Request<ReqBody>) -> Self::Future {
|
||||
let req_fut = self.inner.call(req);
|
||||
SubDomain1Future { req_fut }
|
||||
}
|
||||
}
|
||||
pin_project! {
|
||||
pub struct SubDomain1Future<F> {
|
||||
#[pin]
|
||||
req_fut: F,
|
||||
}
|
||||
}
|
||||
|
||||
impl<F, Err> Future for SubDomain1Future<F>
|
||||
where
|
||||
F: Future<Output = Result<Response<Body>, Err>>,
|
||||
{
|
||||
type Output = Result<Response<Body>, Err>;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
let this = self.project();
|
||||
let subdomain_1 = expect_context::<ServerState<HandlerStructAlias>>()
|
||||
.handler
|
||||
.sub_domain_1;
|
||||
let mut subdomain_1_fut = subdomain_1.sub_domain_1_method();
|
||||
match Pin::as_mut(&mut subdomain_1_fut).poll(cx) {
|
||||
Poll::Ready(Ok(_)) => {
|
||||
println!("Middleware for Subdomain 1 Passed, calling request...");
|
||||
this.req_fut.poll(cx)
|
||||
}
|
||||
Poll::Ready(Err(_)) => Poll::Ready(Ok(Response::builder()
|
||||
.status(http::StatusCode::FORBIDDEN)
|
||||
.body(Body::from("Access denied"))
|
||||
.unwrap())),
|
||||
Poll::Pending => Poll::Pending,
|
||||
}
|
||||
}
|
||||
}
|
||||
102
projects/hexagonal-architecture/src/server_types.rs
Normal file
102
projects/hexagonal-architecture/src/server_types.rs
Normal file
@@ -0,0 +1,102 @@
|
||||
use super::traits::*;
|
||||
use leptos::config::LeptosOptions;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ServerState<Handler: HandlerTrait> {
|
||||
pub handler: Handler,
|
||||
pub leptos_options: LeptosOptions,
|
||||
}
|
||||
|
||||
#[cfg(feature = "config_1")]
|
||||
pub type HandlerStructAlias = HandlerStruct<
|
||||
SubDomainStruct1<ExternalService1_1, ExternalService2_1>,
|
||||
SubDomainStruct2<ExternalService1_1>,
|
||||
>;
|
||||
#[cfg(not(feature = "config_1"))]
|
||||
pub type HandlerStructAlias = HandlerStruct<
|
||||
SubDomainStruct1<ExternalService1_2, ExternalService2_2>,
|
||||
SubDomainStruct2<ExternalService1_2>,
|
||||
>;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct HandlerStruct<SubDomain1: SubDomainTrait1, SubDomain2: SubDomainTrait2> {
|
||||
pub sub_domain_1: SubDomain1,
|
||||
pub sub_domain_2: SubDomain2,
|
||||
}
|
||||
#[derive(Clone, Default)]
|
||||
pub struct SubDomainStruct1<
|
||||
ExternalService1: ExternalServiceTrait1,
|
||||
ExternalService2: ExternalServiceTrait2,
|
||||
> {
|
||||
pub external_service_1: ExternalService1,
|
||||
pub external_service_2: ExternalService2,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct SubDomainStruct2<ExternalService1: ExternalServiceTrait1> {
|
||||
pub external_service_1: ExternalService1,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct ExternalService1_1;
|
||||
#[derive(Clone, Default)]
|
||||
pub struct ExternalService1_2;
|
||||
#[derive(Clone, Default)]
|
||||
pub struct ExternalService2_1;
|
||||
#[derive(Clone, Default)]
|
||||
pub struct ExternalService2_2;
|
||||
#[derive(Clone, Default)]
|
||||
pub struct ExternalService1;
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub struct DomainData;
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub struct DomainData2;
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub struct DomainData3;
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub struct SubDomain1Data;
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
pub struct SubDomain2Data;
|
||||
#[derive(Clone)]
|
||||
pub struct ExternalService1Data;
|
||||
#[derive(Clone)]
|
||||
pub struct ExternalService2Data;
|
||||
|
||||
#[derive(Clone, Error, Debug)]
|
||||
pub enum DomainError {
|
||||
#[error("Underlying Subdomain 1 Error")]
|
||||
SubDomain1Error(#[from] SubDomain1Error),
|
||||
#[error("Underlying Subdomain 2 Error")]
|
||||
SubDomain2Error(#[from] SubDomain2Error),
|
||||
}
|
||||
|
||||
#[derive(Clone, Error, Debug)]
|
||||
pub enum SubDomain1Error {
|
||||
#[error("Sub Domain 1 Error")]
|
||||
SubDomain1Error,
|
||||
#[error("Underlying Service 1")]
|
||||
ExternalService1Error(#[from] ExternalService1Error),
|
||||
#[error("Underlying Service 2")]
|
||||
ExternalService2Error(#[from] ExternalService2Error),
|
||||
}
|
||||
#[derive(Clone, Error, Debug)]
|
||||
pub enum SubDomain2Error {
|
||||
#[error("Sub Domain 2 Error")]
|
||||
SubDomain2Error,
|
||||
#[error("Underlying Service 1")]
|
||||
ExternalService1Error(#[from] ExternalService1Error),
|
||||
}
|
||||
|
||||
#[derive(Clone, Error, Debug)]
|
||||
pub enum ExternalService1Error {
|
||||
#[error("Service 1 Error")]
|
||||
Error,
|
||||
}
|
||||
|
||||
#[derive(Clone, Error, Debug)]
|
||||
pub enum ExternalService2Error {
|
||||
#[error("Service 2 Error")]
|
||||
Error,
|
||||
}
|
||||
149
projects/hexagonal-architecture/src/trait_impl.rs
Normal file
149
projects/hexagonal-architecture/src/trait_impl.rs
Normal file
@@ -0,0 +1,149 @@
|
||||
use crate::ui_types::*;
|
||||
|
||||
use super::server_types::*;
|
||||
use super::traits::*;
|
||||
use axum::async_trait;
|
||||
use axum::extract::FromRef;
|
||||
use leptos::config::LeptosOptions;
|
||||
|
||||
// So we can pass our server state as state into our leptos router.
|
||||
impl<Handler: HandlerTrait + Clone> FromRef<ServerState<Handler>> for LeptosOptions {
|
||||
fn from_ref(input: &ServerState<Handler>) -> Self {
|
||||
input.leptos_options.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<SubDomain1, SubDomain2> HandlerTrait for HandlerStruct<SubDomain1, SubDomain2>
|
||||
where
|
||||
SubDomain1: SubDomainTrait1 + Send + Sync,
|
||||
SubDomain2: SubDomainTrait2 + Send + Sync,
|
||||
{
|
||||
async fn server_fn_1(&self) -> Result<DomainData, DomainError> {
|
||||
Ok(self.sub_domain_1.sub_domain_1_method().await?.into())
|
||||
}
|
||||
|
||||
async fn server_fn_2(&self) -> Result<DomainData2, DomainError> {
|
||||
Ok(self.sub_domain_2.sub_domain_2_method().await?.into())
|
||||
}
|
||||
|
||||
async fn server_fn_3(&self) -> Result<DomainData3, DomainError> {
|
||||
Ok((
|
||||
self.sub_domain_1.sub_domain_1_method().await?,
|
||||
self.sub_domain_2.sub_domain_2_method().await?,
|
||||
)
|
||||
.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<ExternalService1, ExternalService2> SubDomainTrait1
|
||||
for SubDomainStruct1<ExternalService1, ExternalService2>
|
||||
where
|
||||
ExternalService1: ExternalServiceTrait1 + Send + Sync,
|
||||
ExternalService2: ExternalServiceTrait2 + Send + Sync,
|
||||
{
|
||||
async fn sub_domain_1_method(&self) -> Result<SubDomain1Data, SubDomain1Error> {
|
||||
Ok((
|
||||
self.external_service_1.external_service_1_method().await?,
|
||||
self.external_service_2.external_service_2_method().await?,
|
||||
)
|
||||
.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<ExternalService1> SubDomainTrait2 for SubDomainStruct2<ExternalService1>
|
||||
where
|
||||
ExternalService1: ExternalServiceTrait1 + Send + Sync,
|
||||
{
|
||||
async fn sub_domain_2_method(&self) -> Result<SubDomain2Data, SubDomain2Error> {
|
||||
Ok(self
|
||||
.external_service_1
|
||||
.external_service_1_method()
|
||||
.await?
|
||||
.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ExternalServiceTrait1 for ExternalService1_1 {
|
||||
async fn external_service_1_method(
|
||||
&self,
|
||||
) -> Result<ExternalService1Data, ExternalService1Error> {
|
||||
println!("External Service 1 From External Service 1_1");
|
||||
Ok(ExternalService1Data)
|
||||
}
|
||||
}
|
||||
#[async_trait]
|
||||
impl ExternalServiceTrait1 for ExternalService1_2 {
|
||||
async fn external_service_1_method(
|
||||
&self,
|
||||
) -> Result<ExternalService1Data, ExternalService1Error> {
|
||||
println!("External Service 1 From External Service 1_2");
|
||||
Ok(ExternalService1Data)
|
||||
}
|
||||
}
|
||||
#[async_trait]
|
||||
impl ExternalServiceTrait2 for ExternalService2_1 {
|
||||
async fn external_service_2_method(
|
||||
&self,
|
||||
) -> Result<ExternalService2Data, ExternalService2Error> {
|
||||
println!("External Service 2 From External Service 2_1");
|
||||
Ok(ExternalService2Data)
|
||||
}
|
||||
}
|
||||
#[async_trait]
|
||||
impl ExternalServiceTrait2 for ExternalService2_2 {
|
||||
async fn external_service_2_method(
|
||||
&self,
|
||||
) -> Result<ExternalService2Data, ExternalService2Error> {
|
||||
println!("External Service 2 From External Service 2_2");
|
||||
Ok(ExternalService2Data)
|
||||
}
|
||||
}
|
||||
|
||||
// Sub Domain mapping
|
||||
impl From<(ExternalService1Data, ExternalService2Data)> for SubDomain1Data {
|
||||
fn from(_: (ExternalService1Data, ExternalService2Data)) -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
impl From<ExternalService1Data> for SubDomain2Data {
|
||||
fn from(_: ExternalService1Data) -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
// Domain Mapping
|
||||
impl From<SubDomain1Data> for DomainData {
|
||||
fn from(_: SubDomain1Data) -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
impl From<SubDomain2Data> for DomainData2 {
|
||||
fn from(_: SubDomain2Data) -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
impl From<(SubDomain1Data, SubDomain2Data)> for DomainData3 {
|
||||
fn from(_: (SubDomain1Data, SubDomain2Data)) -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
// Ui Mapping
|
||||
impl From<DomainData> for UiMappingFromDomainData {
|
||||
fn from(_: DomainData) -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
impl From<DomainData2> for UiMappingFromDomainData2 {
|
||||
fn from(_: DomainData2) -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
impl From<DomainData3> for UiMappingFromDomainData3 {
|
||||
fn from(_: DomainData3) -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
42
projects/hexagonal-architecture/src/traits.rs
Normal file
42
projects/hexagonal-architecture/src/traits.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
use super::server_types::*;
|
||||
use axum::async_trait;
|
||||
use mockall::automock;
|
||||
pub trait New {
|
||||
fn new() -> Self;
|
||||
}
|
||||
|
||||
#[automock]
|
||||
#[async_trait]
|
||||
pub trait HandlerTrait {
|
||||
async fn server_fn_1(&self) -> Result<DomainData, DomainError>;
|
||||
async fn server_fn_2(&self) -> Result<DomainData2, DomainError>;
|
||||
async fn server_fn_3(&self) -> Result<DomainData3, DomainError>;
|
||||
}
|
||||
|
||||
#[automock]
|
||||
#[async_trait]
|
||||
pub trait SubDomainTrait1 {
|
||||
async fn sub_domain_1_method(&self) -> Result<SubDomain1Data, SubDomain1Error>;
|
||||
}
|
||||
|
||||
#[automock]
|
||||
#[async_trait]
|
||||
pub trait SubDomainTrait2 {
|
||||
async fn sub_domain_2_method(&self) -> Result<SubDomain2Data, SubDomain2Error>;
|
||||
}
|
||||
|
||||
#[automock]
|
||||
#[async_trait]
|
||||
pub trait ExternalServiceTrait1 {
|
||||
async fn external_service_1_method(
|
||||
&self,
|
||||
) -> Result<ExternalService1Data, ExternalService1Error>;
|
||||
}
|
||||
|
||||
#[automock]
|
||||
#[async_trait]
|
||||
pub trait ExternalServiceTrait2 {
|
||||
async fn external_service_2_method(
|
||||
&self,
|
||||
) -> Result<ExternalService2Data, ExternalService2Error>;
|
||||
}
|
||||
8
projects/hexagonal-architecture/src/ui_types.rs
Normal file
8
projects/hexagonal-architecture/src/ui_types.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct UiMappingFromDomainData;
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct UiMappingFromDomainData2;
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct UiMappingFromDomainData3;
|
||||
4
projects/hexagonal-architecture/style/main.scss
Normal file
4
projects/hexagonal-architecture/style/main.scss
Normal file
@@ -0,0 +1,4 @@
|
||||
body {
|
||||
font-family: sans-serif;
|
||||
text-align: center;
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_graph"
|
||||
version = "0.1.4"
|
||||
version = "0.1.7"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -939,7 +939,8 @@ where
|
||||
#[track_caller]
|
||||
pub fn dispatch(&self, input: I) -> ActionAbortHandle {
|
||||
self.inner
|
||||
.try_with_value(|inner| inner.dispatch(input))
|
||||
.try_get_value()
|
||||
.map(|inner| inner.dispatch(input))
|
||||
.unwrap_or_else(unwrap_signal!(self))
|
||||
}
|
||||
}
|
||||
@@ -954,7 +955,8 @@ where
|
||||
#[track_caller]
|
||||
pub fn dispatch_local(&self, input: I) -> ActionAbortHandle {
|
||||
self.inner
|
||||
.try_with_value(|inner| inner.dispatch_local(input))
|
||||
.try_get_value()
|
||||
.map(|inner| inner.dispatch_local(input))
|
||||
.unwrap_or_else(unwrap_signal!(self))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@ use crate::{
|
||||
};
|
||||
pub use arc_memo::*;
|
||||
pub use async_derived::*;
|
||||
pub(crate) use inner::MemoInner;
|
||||
pub use memo::*;
|
||||
pub use selector::*;
|
||||
|
||||
|
||||
@@ -12,11 +12,10 @@ use crate::{
|
||||
traits::{DefinedAt, Get, IsDisposed, ReadUntracked},
|
||||
};
|
||||
use core::fmt::Debug;
|
||||
use or_poisoned::OrPoisoned;
|
||||
use std::{
|
||||
hash::Hash,
|
||||
panic::Location,
|
||||
sync::{Arc, RwLock, Weak},
|
||||
sync::{Arc, Weak},
|
||||
};
|
||||
|
||||
/// An efficient derived reactive value based on other reactive values.
|
||||
@@ -95,7 +94,7 @@ where
|
||||
{
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: &'static Location<'static>,
|
||||
inner: Arc<RwLock<MemoInner<T, S>>>,
|
||||
inner: Arc<MemoInner<T, S>>,
|
||||
}
|
||||
|
||||
impl<T: 'static> ArcMemo<T, SyncStorage>
|
||||
@@ -161,7 +160,7 @@ where
|
||||
Weak::clone(weak) as Weak<dyn Subscriber + Send + Sync>,
|
||||
);
|
||||
|
||||
RwLock::new(MemoInner::new(Arc::new(fun), subscriber))
|
||||
MemoInner::new(Arc::new(fun), subscriber)
|
||||
});
|
||||
Self {
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
@@ -312,15 +311,11 @@ where
|
||||
S: Storage<T>,
|
||||
{
|
||||
fn add_source(&self, source: AnySource) {
|
||||
self.inner.write().or_poisoned().sources.insert(source);
|
||||
self.inner.add_source(source);
|
||||
}
|
||||
|
||||
fn clear_sources(&self, subscriber: &AnySubscriber) {
|
||||
self.inner
|
||||
.write()
|
||||
.or_poisoned()
|
||||
.sources
|
||||
.clear_sources(subscriber);
|
||||
self.inner.clear_sources(subscriber);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -328,15 +323,15 @@ impl<T: 'static, S> ReadUntracked for ArcMemo<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
{
|
||||
type Value = ReadGuard<T, Mapped<Plain<MemoInner<T, S>>, T>>;
|
||||
type Value = ReadGuard<T, Mapped<Plain<Option<S::Wrapped>>, T>>;
|
||||
|
||||
fn try_read_untracked(&self) -> Option<Self::Value> {
|
||||
self.update_if_necessary();
|
||||
|
||||
Mapped::try_new(Arc::clone(&self.inner), |t| {
|
||||
Mapped::try_new(Arc::clone(&self.inner.value), |t| {
|
||||
// safe to unwrap here because update_if_necessary
|
||||
// guarantees the value is Some
|
||||
t.value.as_ref().unwrap().as_borrowed()
|
||||
t.as_ref().unwrap().as_borrowed()
|
||||
})
|
||||
.map(ReadGuard::new)
|
||||
}
|
||||
|
||||
@@ -15,10 +15,15 @@ pub struct MemoInner<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
{
|
||||
pub(crate) value: Option<S::Wrapped>,
|
||||
/// Must always be aquired *after* the reactivity lock
|
||||
pub(crate) value: Arc<RwLock<Option<S::Wrapped>>>,
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub(crate) fun: Arc<dyn Fn(Option<T>) -> (T, bool) + Send + Sync>,
|
||||
pub(crate) owner: Owner,
|
||||
pub(crate) reactivity: RwLock<MemoInnerReactivity>,
|
||||
}
|
||||
|
||||
pub(crate) struct MemoInnerReactivity {
|
||||
pub(crate) state: ReactiveNodeState,
|
||||
pub(crate) sources: SourceSet,
|
||||
pub(crate) subscribers: SubscriberSet,
|
||||
@@ -44,40 +49,44 @@ where
|
||||
any_subscriber: AnySubscriber,
|
||||
) -> Self {
|
||||
Self {
|
||||
value: None,
|
||||
value: Arc::new(RwLock::new(None)),
|
||||
fun,
|
||||
owner: Owner::new(),
|
||||
state: ReactiveNodeState::Dirty,
|
||||
sources: Default::default(),
|
||||
subscribers: SubscriberSet::new(),
|
||||
any_subscriber,
|
||||
reactivity: RwLock::new(MemoInnerReactivity {
|
||||
state: ReactiveNodeState::Dirty,
|
||||
sources: Default::default(),
|
||||
subscribers: SubscriberSet::new(),
|
||||
any_subscriber,
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static, S> ReactiveNode for RwLock<MemoInner<T, S>>
|
||||
impl<T: 'static, S> ReactiveNode for MemoInner<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
{
|
||||
fn mark_dirty(&self) {
|
||||
self.write().or_poisoned().state = ReactiveNodeState::Dirty;
|
||||
self.reactivity.write().or_poisoned().state = ReactiveNodeState::Dirty;
|
||||
self.mark_subscribers_check();
|
||||
}
|
||||
|
||||
fn mark_check(&self) {
|
||||
{
|
||||
let mut lock = self.write().or_poisoned();
|
||||
let mut lock = self.reactivity.write().or_poisoned();
|
||||
if lock.state != ReactiveNodeState::Dirty {
|
||||
lock.state = ReactiveNodeState::Check;
|
||||
}
|
||||
}
|
||||
for sub in (&self.read().or_poisoned().subscribers).into_iter() {
|
||||
for sub in
|
||||
(&self.reactivity.read().or_poisoned().subscribers).into_iter()
|
||||
{
|
||||
sub.mark_check();
|
||||
}
|
||||
}
|
||||
|
||||
fn mark_subscribers_check(&self) {
|
||||
let lock = self.read().or_poisoned();
|
||||
let lock = self.reactivity.read().or_poisoned();
|
||||
for sub in (&lock.subscribers).into_iter() {
|
||||
sub.mark_check();
|
||||
}
|
||||
@@ -85,7 +94,7 @@ where
|
||||
|
||||
fn update_if_necessary(&self) -> bool {
|
||||
let (state, sources) = {
|
||||
let inner = self.read().or_poisoned();
|
||||
let inner = self.reactivity.read().or_poisoned();
|
||||
(inner.state, inner.sources.clone())
|
||||
};
|
||||
|
||||
@@ -94,32 +103,37 @@ where
|
||||
ReactiveNodeState::Dirty => true,
|
||||
ReactiveNodeState::Check => (&sources).into_iter().any(|source| {
|
||||
source.update_if_necessary()
|
||||
|| self.read().or_poisoned().state
|
||||
|| self.reactivity.read().or_poisoned().state
|
||||
== ReactiveNodeState::Dirty
|
||||
}),
|
||||
};
|
||||
|
||||
if needs_update {
|
||||
let (fun, value, owner) = {
|
||||
let mut lock = self.write().or_poisoned();
|
||||
(lock.fun.clone(), lock.value.take(), lock.owner.clone())
|
||||
};
|
||||
let fun = self.fun.clone();
|
||||
let owner = self.owner.clone();
|
||||
// No deadlock risk, because we only hold the value lock.
|
||||
let value = self.value.write().or_poisoned().take();
|
||||
|
||||
let any_subscriber =
|
||||
{ self.read().or_poisoned().any_subscriber.clone() };
|
||||
{ self.reactivity.read().or_poisoned().any_subscriber.clone() };
|
||||
any_subscriber.clear_sources(&any_subscriber);
|
||||
let (new_value, changed) = owner.with_cleanup(|| {
|
||||
any_subscriber
|
||||
.with_observer(|| fun(value.map(StorageAccess::into_taken)))
|
||||
});
|
||||
|
||||
let mut lock = self.write().or_poisoned();
|
||||
lock.value = Some(S::wrap(new_value));
|
||||
lock.state = ReactiveNodeState::Clean;
|
||||
// Two locks are aquired, so order matters.
|
||||
let mut reactivity_lock = self.reactivity.write().or_poisoned();
|
||||
{
|
||||
// Safety: Can block endlessly if the user is has a ReadGuard on the value
|
||||
let mut value_lock = self.value.write().or_poisoned();
|
||||
*value_lock = Some(S::wrap(new_value));
|
||||
}
|
||||
reactivity_lock.state = ReactiveNodeState::Clean;
|
||||
|
||||
if changed {
|
||||
let subs = lock.subscribers.clone();
|
||||
drop(lock);
|
||||
let subs = reactivity_lock.subscribers.clone();
|
||||
drop(reactivity_lock);
|
||||
for sub in subs {
|
||||
// don't trigger reruns of effects/memos
|
||||
// basically: if one of the observers has triggered this memo to
|
||||
@@ -128,49 +142,54 @@ where
|
||||
sub.mark_dirty();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
drop(reactivity_lock);
|
||||
}
|
||||
|
||||
changed
|
||||
} else {
|
||||
if let Ok(mut lock) = self.try_write() {
|
||||
lock.state = ReactiveNodeState::Clean;
|
||||
}
|
||||
let mut lock = self.reactivity.write().or_poisoned();
|
||||
lock.state = ReactiveNodeState::Clean;
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static, S> Source for RwLock<MemoInner<T, S>>
|
||||
impl<T: 'static, S> Source for MemoInner<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
{
|
||||
fn add_subscriber(&self, subscriber: AnySubscriber) {
|
||||
if let Ok(mut lock) = self.try_write() {
|
||||
lock.subscribers.subscribe(subscriber);
|
||||
}
|
||||
let mut lock = self.reactivity.write().or_poisoned();
|
||||
lock.subscribers.subscribe(subscriber);
|
||||
}
|
||||
|
||||
fn remove_subscriber(&self, subscriber: &AnySubscriber) {
|
||||
self.write()
|
||||
self.reactivity
|
||||
.write()
|
||||
.or_poisoned()
|
||||
.subscribers
|
||||
.unsubscribe(subscriber);
|
||||
}
|
||||
|
||||
fn clear_subscribers(&self) {
|
||||
self.write().or_poisoned().subscribers.take();
|
||||
self.reactivity.write().or_poisoned().subscribers.take();
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static, S> Subscriber for RwLock<MemoInner<T, S>>
|
||||
impl<T: 'static, S> Subscriber for MemoInner<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
{
|
||||
fn add_source(&self, source: AnySource) {
|
||||
self.write().or_poisoned().sources.insert(source);
|
||||
self.reactivity.write().or_poisoned().sources.insert(source);
|
||||
}
|
||||
|
||||
fn clear_sources(&self, subscriber: &AnySubscriber) {
|
||||
self.write().or_poisoned().sources.clear_sources(subscriber);
|
||||
self.reactivity
|
||||
.write()
|
||||
.or_poisoned()
|
||||
.sources
|
||||
.clear_sources(subscriber);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use super::{inner::MemoInner, ArcMemo};
|
||||
use super::ArcMemo;
|
||||
use crate::{
|
||||
owner::{ArenaItem, FromLocal, LocalStorage, Storage, SyncStorage},
|
||||
signal::{
|
||||
@@ -306,7 +306,8 @@ where
|
||||
T: 'static,
|
||||
S: Storage<ArcMemo<T, S>> + Storage<T>,
|
||||
{
|
||||
type Value = ReadGuard<T, Mapped<Plain<MemoInner<T, S>>, T>>;
|
||||
type Value =
|
||||
ReadGuard<T, Mapped<Plain<Option<<S as Storage<T>>::Wrapped>>, T>>;
|
||||
|
||||
fn try_read_untracked(&self) -> Option<Self::Value> {
|
||||
self.inner
|
||||
|
||||
@@ -135,6 +135,13 @@ pub fn untrack<T>(fun: impl FnOnce() -> T) -> T {
|
||||
fun()
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[track_caller]
|
||||
pub fn untrack_with_diagnostics<T>(fun: impl FnOnce() -> T) -> T {
|
||||
let _prev = Observer::take();
|
||||
fun()
|
||||
}
|
||||
|
||||
/// Converts a [`Subscriber`] to a type-erased [`AnySubscriber`].
|
||||
pub trait ToAnySubscriber {
|
||||
/// Converts this type to its type-erased equivalent.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use super::subscriber_traits::AsSubscriberSet;
|
||||
use crate::{
|
||||
graph::{ReactiveNode, SubscriberSet},
|
||||
traits::{DefinedAt, IsDisposed, Notify},
|
||||
traits::{DefinedAt, IsDisposed, Notify, Track},
|
||||
};
|
||||
use std::{
|
||||
fmt::{Debug, Formatter, Result},
|
||||
@@ -69,6 +69,22 @@ impl AsSubscriberSet for ArcTrigger {
|
||||
}
|
||||
}
|
||||
|
||||
impl Notify for Vec<ArcTrigger> {
|
||||
fn notify(&self) {
|
||||
for trigger in self {
|
||||
trigger.notify();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Track for Vec<ArcTrigger> {
|
||||
fn track(&self) {
|
||||
for trigger in self {
|
||||
trigger.track();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DefinedAt for ArcTrigger {
|
||||
#[inline(always)]
|
||||
fn defined_at(&self) -> Option<&'static Location<'static>> {
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
/// Types that abstract over signals with values that can be read.
|
||||
pub mod read {
|
||||
use crate::{
|
||||
computed::{ArcMemo, Memo, MemoInner},
|
||||
computed::{ArcMemo, Memo},
|
||||
graph::untrack,
|
||||
owner::{
|
||||
ArcStoredValue, ArenaItem, FromLocal, LocalStorage, Storage,
|
||||
@@ -1735,22 +1735,40 @@ pub mod read {
|
||||
}
|
||||
|
||||
/// The content of a [`Signal`] wrapper read guard, variable depending on the signal type.
|
||||
#[derive(Debug)]
|
||||
pub enum SignalReadGuard<T: 'static, S: Storage<T>> {
|
||||
/// A read signal guard.
|
||||
Read(ReadGuard<T, Plain<T>>),
|
||||
#[allow(clippy::type_complexity)]
|
||||
/// A memo guard.
|
||||
Memo(ReadGuard<T, Mapped<Plain<MemoInner<T, S>>, T>>),
|
||||
Memo(
|
||||
ReadGuard<T, Mapped<Plain<Option<<S as Storage<T>>::Wrapped>>, T>>,
|
||||
),
|
||||
/// A fake guard for derived signals, the content had to actually be cloned, so it's not a guard but we pretend it is.
|
||||
Owned(T),
|
||||
}
|
||||
|
||||
impl<T: 'static + std::fmt::Debug, S: Storage<T> + std::fmt::Debug>
|
||||
std::fmt::Debug for SignalReadGuard<T, S>
|
||||
where
|
||||
<S as Storage<T>>::Wrapped: std::fmt::Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Read(arg0) => f.debug_tuple("Read").field(arg0).finish(),
|
||||
Self::Memo(arg0) => f.debug_tuple("Memo").field(arg0).finish(),
|
||||
Self::Owned(arg0) => {
|
||||
f.debug_tuple("Owned").field(arg0).finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> Clone for SignalReadGuard<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
T: Clone,
|
||||
Plain<T>: Clone,
|
||||
Mapped<Plain<MemoInner<T, S>>, T>: Clone,
|
||||
Mapped<Plain<Option<<S as Storage<T>>::Wrapped>>, T>: Clone,
|
||||
{
|
||||
fn clone(&self) -> Self {
|
||||
match self {
|
||||
|
||||
@@ -444,6 +444,69 @@ fn unsync_derived_signal_and_memo() {
|
||||
assert_eq!(f.get_untracked(), 6);
|
||||
}
|
||||
|
||||
#[cfg(feature = "effects")]
|
||||
#[tokio::test]
|
||||
async fn test_memo_multiple_read_guards() {
|
||||
// regression test for https://github.com/leptos-rs/leptos/issues/3158
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
use imports::*;
|
||||
|
||||
_ = Executor::init_tokio();
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
task::LocalSet::new()
|
||||
.run_until(async {
|
||||
let memo = Memo::<i32>::new_with_compare(|_| 42, |_, _| true);
|
||||
|
||||
Effect::new(move |_| {
|
||||
let guard_a = memo.read();
|
||||
let guard_b = memo.read();
|
||||
assert_eq!(guard_a, 42);
|
||||
assert_eq!(guard_b, 42);
|
||||
});
|
||||
Executor::tick().await;
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(feature = "effects")]
|
||||
#[tokio::test]
|
||||
async fn test_memo_read_guard_held() {
|
||||
// regression test for https://github.com/leptos-rs/leptos/issues/3252
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
use imports::*;
|
||||
|
||||
_ = Executor::init_tokio();
|
||||
let owner = Owner::new();
|
||||
owner.set();
|
||||
task::LocalSet::new()
|
||||
.run_until(async {
|
||||
let source = RwSignal::new(0);
|
||||
|
||||
let directly_derived =
|
||||
Memo::new_with_compare(move |_| source.get(), |_, _| true);
|
||||
let indirect = Memo::new_with_compare(
|
||||
move |_| directly_derived.get(),
|
||||
|_, _| true,
|
||||
);
|
||||
|
||||
Effect::new(move |_| {
|
||||
let direct_value = directly_derived.read();
|
||||
let indirect_value = indirect.get();
|
||||
assert_eq!(direct_value, indirect_value);
|
||||
});
|
||||
|
||||
Executor::tick().await;
|
||||
source.set(1);
|
||||
Executor::tick().await;
|
||||
source.set(2);
|
||||
Executor::tick().await;
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn memo_updates_even_if_not_read_until_later() {
|
||||
#![allow(clippy::bool_assert_comparison)]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_stores"
|
||||
version = "0.1.3"
|
||||
version = "0.1.7"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -23,6 +23,7 @@ tokio = { version = "1.41", features = ["rt-multi-thread", "macros"] }
|
||||
tokio-test = { version = "0.4.4" }
|
||||
any_spawner = { workspace = true, features = ["futures-executor", "tokio"] }
|
||||
reactive_graph = { workspace = true, features = ["effects"] }
|
||||
leptos = { path = "../leptos", features = ["csr"] }
|
||||
|
||||
[lints.rust]
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(leptos_debuginfo)'] }
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::{
|
||||
path::{StorePath, StorePathSegment},
|
||||
ArcStore, AtIndex, AtKeyed, KeyMap, KeyedSubfield, Store, StoreField,
|
||||
StoreFieldTrigger, Subfield,
|
||||
ArcStore, AtIndex, AtKeyed, DerefedField, KeyMap, KeyedSubfield, Store,
|
||||
StoreField, StoreFieldTrigger, Subfield,
|
||||
};
|
||||
use reactive_graph::{
|
||||
owner::Storage,
|
||||
@@ -202,6 +202,43 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<Inner, T> From<DerefedField<Inner>> for ArcField<T>
|
||||
where
|
||||
Inner: Clone + StoreField + Send + Sync + 'static,
|
||||
Inner::Value: Deref<Target = T> + DerefMut,
|
||||
T: Sized + 'static,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: DerefedField<Inner>) -> Self {
|
||||
ArcField {
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
path: value.path().into_iter().collect(),
|
||||
trigger: value.get_trigger(value.path().into_iter().collect()),
|
||||
get_trigger: Arc::new({
|
||||
let value = value.clone();
|
||||
move |path| value.get_trigger(path)
|
||||
}),
|
||||
read: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.reader().map(StoreFieldReader::new)
|
||||
}),
|
||||
write: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.writer().map(StoreFieldWriter::new)
|
||||
}),
|
||||
keys: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.keys()
|
||||
}),
|
||||
track_field: Arc::new({
|
||||
let value = value.clone();
|
||||
move || value.track_field()
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Inner, Prev> From<AtIndex<Inner, Prev>> for ArcField<Prev::Output>
|
||||
where
|
||||
AtIndex<Inner, Prev>: Clone,
|
||||
|
||||
165
reactive_stores/src/deref.rs
Normal file
165
reactive_stores/src/deref.rs
Normal file
@@ -0,0 +1,165 @@
|
||||
use crate::{
|
||||
path::{StorePath, StorePathSegment},
|
||||
store_field::StoreField,
|
||||
KeyMap, StoreFieldTrigger,
|
||||
};
|
||||
use reactive_graph::{
|
||||
signal::guards::{Mapped, MappedMut},
|
||||
traits::{
|
||||
DefinedAt, IsDisposed, Notify, ReadUntracked, Track, UntrackableGuard,
|
||||
Write,
|
||||
},
|
||||
};
|
||||
use std::{
|
||||
ops::{Deref, DerefMut},
|
||||
panic::Location,
|
||||
};
|
||||
|
||||
/// Maps a store field that is a smart pointer to a subfield of the dereferenced inner type.
|
||||
pub trait DerefField
|
||||
where
|
||||
Self: StoreField,
|
||||
Self::Value: Deref + DerefMut,
|
||||
<Self::Value as Deref>::Target: Sized + 'static,
|
||||
{
|
||||
/// Returns a new store field with the value mapped to the target type of dereferencing this
|
||||
/// field
|
||||
///
|
||||
/// For example, if you have a store field with a `Box<T>`, `.deref_field()` will return a
|
||||
/// new store field containing a `T`.
|
||||
fn deref_field(self) -> DerefedField<Self>;
|
||||
}
|
||||
|
||||
impl<S> DerefField for S
|
||||
where
|
||||
S: StoreField,
|
||||
S::Value: Deref + DerefMut,
|
||||
<S::Value as Deref>::Target: Sized + 'static,
|
||||
{
|
||||
#[track_caller]
|
||||
fn deref_field(self) -> DerefedField<Self> {
|
||||
DerefedField {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
inner: self,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A wrapper from a store field containing a smart pointer to a store field containing the
|
||||
/// dereferenced target type of that smart pointer.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct DerefedField<S> {
|
||||
inner: S,
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: &'static Location<'static>,
|
||||
}
|
||||
|
||||
impl<S> StoreField for DerefedField<S>
|
||||
where
|
||||
S: StoreField,
|
||||
S::Value: Deref + DerefMut,
|
||||
<S::Value as Deref>::Target: Sized + 'static,
|
||||
{
|
||||
type Value = <S::Value as Deref>::Target;
|
||||
type Reader = Mapped<S::Reader, Self::Value>;
|
||||
type Writer = MappedMut<S::Writer, Self::Value>;
|
||||
|
||||
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
|
||||
self.inner.get_trigger(path)
|
||||
}
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner.path()
|
||||
}
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
let inner = self.inner.reader()?;
|
||||
Some(Mapped::new_with_guard(inner, |n| n.deref()))
|
||||
}
|
||||
fn writer(&self) -> Option<Self::Writer> {
|
||||
let inner = self.inner.writer()?;
|
||||
Some(MappedMut::new(inner, |n| n.deref(), |n| n.deref_mut()))
|
||||
}
|
||||
#[inline(always)]
|
||||
fn keys(&self) -> Option<KeyMap> {
|
||||
self.inner.keys()
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> DefinedAt for DerefedField<S>
|
||||
where
|
||||
S: StoreField,
|
||||
S::Value: Deref + DerefMut,
|
||||
<S::Value as Deref>::Target: Sized + 'static,
|
||||
{
|
||||
fn defined_at(&self) -> Option<&'static Location<'static>> {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
Some(self.defined_at)
|
||||
}
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<S> IsDisposed for DerefedField<S>
|
||||
where
|
||||
S: IsDisposed,
|
||||
{
|
||||
fn is_disposed(&self) -> bool {
|
||||
self.inner.is_disposed()
|
||||
}
|
||||
}
|
||||
impl<S> Notify for DerefedField<S>
|
||||
where
|
||||
S: StoreField,
|
||||
S::Value: Deref + DerefMut,
|
||||
<S::Value as Deref>::Target: Sized + 'static,
|
||||
{
|
||||
fn notify(&self) {
|
||||
let trigger = self.get_trigger(self.path().into_iter().collect());
|
||||
trigger.this.notify();
|
||||
trigger.children.notify();
|
||||
}
|
||||
}
|
||||
impl<S> Track for DerefedField<S>
|
||||
where
|
||||
S: StoreField,
|
||||
S::Value: Deref + DerefMut,
|
||||
<S::Value as Deref>::Target: Sized + 'static,
|
||||
{
|
||||
fn track(&self) {
|
||||
self.track_field();
|
||||
}
|
||||
}
|
||||
impl<S> ReadUntracked for DerefedField<S>
|
||||
where
|
||||
S: StoreField,
|
||||
S::Value: Deref + DerefMut,
|
||||
<S::Value as Deref>::Target: Sized + 'static,
|
||||
{
|
||||
type Value = <Self as StoreField>::Reader;
|
||||
fn try_read_untracked(&self) -> Option<Self::Value> {
|
||||
self.reader()
|
||||
}
|
||||
}
|
||||
impl<S> Write for DerefedField<S>
|
||||
where
|
||||
S: StoreField,
|
||||
S::Value: Deref + DerefMut,
|
||||
<S::Value as Deref>::Target: Sized + 'static,
|
||||
{
|
||||
type Value = <S::Value as Deref>::Target;
|
||||
|
||||
fn try_write(&self) -> Option<impl UntrackableGuard<Target = Self::Value>> {
|
||||
self.writer()
|
||||
}
|
||||
fn try_write_untracked(
|
||||
&self,
|
||||
) -> Option<impl DerefMut<Target = Self::Value>> {
|
||||
self.writer().map(|mut writer| {
|
||||
writer.untrack();
|
||||
writer
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
use crate::{
|
||||
arc_field::{StoreFieldReader, StoreFieldWriter},
|
||||
path::{StorePath, StorePathSegment},
|
||||
ArcField, ArcStore, AtIndex, AtKeyed, KeyMap, KeyedSubfield, Store,
|
||||
StoreField, StoreFieldTrigger, Subfield,
|
||||
ArcField, ArcStore, AtIndex, AtKeyed, DerefedField, KeyMap, KeyedSubfield,
|
||||
Store, StoreField, StoreFieldTrigger, Subfield,
|
||||
};
|
||||
use reactive_graph::{
|
||||
owner::{ArenaItem, Storage, SyncStorage},
|
||||
@@ -10,12 +10,11 @@ use reactive_graph::{
|
||||
DefinedAt, IsDisposed, Notify, ReadUntracked, Track, UntrackableGuard,
|
||||
Write,
|
||||
},
|
||||
unwrap_signal,
|
||||
};
|
||||
use std::{
|
||||
fmt::Debug,
|
||||
hash::Hash,
|
||||
ops::{DerefMut, IndexMut},
|
||||
ops::{Deref, DerefMut, IndexMut},
|
||||
panic::Location,
|
||||
};
|
||||
|
||||
@@ -44,14 +43,14 @@ where
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|inner| inner.get_trigger(path))
|
||||
.unwrap_or_else(unwrap_signal!(self))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|inner| inner.path().into_iter().collect::<Vec<_>>())
|
||||
.unwrap_or_else(unwrap_signal!(self))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn reader(&self) -> Option<Self::Reader> {
|
||||
@@ -82,6 +81,21 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<ArcField<T>> for Field<T, S>
|
||||
where
|
||||
T: 'static,
|
||||
S: Storage<ArcField<T>>,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: ArcField<T>) -> Self {
|
||||
Field {
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
inner: ArenaItem::new_with_storage(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> From<ArcStore<T>> for Field<T, S>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
@@ -115,6 +129,22 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<Inner, T> From<DerefedField<Inner>> for Field<T>
|
||||
where
|
||||
Inner: Clone + StoreField + Send + Sync + 'static,
|
||||
Inner::Value: Deref<Target = T> + DerefMut,
|
||||
T: Sized + 'static,
|
||||
{
|
||||
#[track_caller]
|
||||
fn from(value: DerefedField<Inner>) -> Self {
|
||||
Field {
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at: Location::caller(),
|
||||
inner: ArenaItem::new_with_storage(value.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Inner, Prev, S> From<AtIndex<Inner, Prev>> for Field<Prev::Output, S>
|
||||
where
|
||||
AtIndex<Inner, Prev>: Clone,
|
||||
|
||||
@@ -148,11 +148,8 @@ where
|
||||
{
|
||||
fn latest_keys(&self) -> Vec<K> {
|
||||
self.reader()
|
||||
.expect("trying to update keys")
|
||||
.deref()
|
||||
.into_iter()
|
||||
.map(|n| (self.key_fn)(n))
|
||||
.collect()
|
||||
.map(|r| r.deref().into_iter().map(|n| (self.key_fn)(n)).collect())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -483,8 +480,7 @@ where
|
||||
|| self.inner.latest_keys(),
|
||||
)
|
||||
.flatten()
|
||||
.map(|(_, idx)| idx)
|
||||
.expect("reading from a keyed field that has not yet been created");
|
||||
.map(|(_, idx)| idx)?;
|
||||
|
||||
Some(WriteGuard::new(
|
||||
trigger.children,
|
||||
@@ -654,13 +650,15 @@ where
|
||||
self.track_field();
|
||||
|
||||
// get the current length of the field by accessing slice
|
||||
let reader = self
|
||||
.reader()
|
||||
.expect("creating iterator from unavailable store field");
|
||||
let reader = self.reader();
|
||||
|
||||
let keys = reader
|
||||
.into_iter()
|
||||
.map(|item| (self.key_fn)(item))
|
||||
.collect::<VecDeque<_>>();
|
||||
.map(|r| {
|
||||
r.into_iter()
|
||||
.map(|item| (self.key_fn)(item))
|
||||
.collect::<VecDeque<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
// return the iterator
|
||||
StoreFieldKeyedIter { inner: self, keys }
|
||||
|
||||
@@ -67,7 +67,141 @@
|
||||
//! completed: false,
|
||||
//! });
|
||||
//! ```
|
||||
//! ### Generated traits
|
||||
//! The [`Store`](macro@Store) macro generates traits for each `struct` to which it is applied. When working
|
||||
//! within a single file more module, this is not an issue. However, when working with multiple modules
|
||||
//! or files, one needs to `use` the generated traits. The general pattern is that for each `struct`
|
||||
//! named `Foo`, the macro generates a trait named `FooStoreFields`. For example:
|
||||
//! ```rust
|
||||
//! pub mod foo {
|
||||
//! use reactive_stores::Store;
|
||||
|
||||
//! #[derive(Store)]
|
||||
//! pub struct Foo {
|
||||
//! field: i32,
|
||||
//! }
|
||||
//! }
|
||||
//!
|
||||
//! pub mod user {
|
||||
//! use leptos::prelude::*;
|
||||
//! use reactive_stores::Field;
|
||||
//! // Using FooStore fields here.
|
||||
//! use crate::foo::{ Foo, FooStoreFields };
|
||||
//!
|
||||
//! #[component]
|
||||
//! pub fn UseFoo(foo: Field<Foo>) {
|
||||
//! // Without FooStoreFields, foo.field() would fail to compile.
|
||||
//! println!("field: {}", foo.field().read());
|
||||
//! }
|
||||
//! }
|
||||
//!
|
||||
//! # fn main() {
|
||||
//! # }
|
||||
//! ```
|
||||
//!
|
||||
//! ### Additional field types
|
||||
//!
|
||||
//! Most of the time, your structs will have fields as in the example above: the struct is comprised
|
||||
//! of primitive types, builtin types like [String], or other structs that implement [Store](struct@Store) or [Field].
|
||||
//! However, there are some special cases that require some additional understanding.
|
||||
//!
|
||||
//! #### Option
|
||||
//! [`Option<T>`](std::option::Option) behaves pretty much as you would expect, utilizing [.is_some()](std::option::Option::is_some)
|
||||
//! and [.is_none()](std::option::Option::is_none) to check the value and [.unwrap()](OptionStoreExt::unwrap) method to access the inner value. The [OptionStoreExt]
|
||||
//! trait is required to use the [.unwrap()](OptionStoreExt::unwrap) method. Here is a quick example:
|
||||
//! ```rust
|
||||
//! // Including the trait OptionStoreExt here is required to use unwrap()
|
||||
//! use reactive_stores::{OptionStoreExt, Store};
|
||||
//! use reactive_graph::traits::{Get, Read};
|
||||
//!
|
||||
//! #[derive(Store)]
|
||||
//! struct StructWithOption {
|
||||
//! opt_field: Option<i32>,
|
||||
//! }
|
||||
//!
|
||||
//! fn describe(store: &Store<StructWithOption>) -> String {
|
||||
//! if store.opt_field().read().is_some() {
|
||||
//! // Note here we need to use OptionStoreExt or unwrap() would not compile
|
||||
//! format!("store has a value {}", store.opt_field().unwrap().get())
|
||||
//! } else {
|
||||
//! format!("store has no value")
|
||||
//! }
|
||||
//! }
|
||||
//! let none_store = Store::new(StructWithOption { opt_field: None });
|
||||
//! let some_store = Store::new(StructWithOption { opt_field: Some(42)});
|
||||
//!
|
||||
//! assert_eq!(describe(&none_store), "store has no value");
|
||||
//! assert_eq!(describe(&some_store), "store has a value 42");
|
||||
//! ```
|
||||
//! #### Vec
|
||||
//! [`Vec<T>`](std::vec::Vec) requires some special treatment when trying to access
|
||||
//! elements of the vector directly. Use the [StoreFieldIterator::at_unkeyed()] method to
|
||||
//! access a particular value in a [struct@Store] or [Field] for a [std::vec::Vec]. For example:
|
||||
//! ```rust
|
||||
//! # use reactive_stores::Store;
|
||||
//! // Needed to use at_unkeyed() on Vec
|
||||
//! use reactive_stores::StoreFieldIter;
|
||||
//! use crate::reactive_stores::StoreFieldIterator;
|
||||
//! use reactive_graph::traits::Read;
|
||||
//! use reactive_graph::traits::Get;
|
||||
//!
|
||||
//! #[derive(Store)]
|
||||
//! struct StructWithVec {
|
||||
//! vec_field: Vec<i32>,
|
||||
//! }
|
||||
//!
|
||||
//! let store = Store::new(StructWithVec { vec_field: vec![1, 2, 3] });
|
||||
//!
|
||||
//! assert_eq!(store.vec_field().at_unkeyed(0).get(), 1);
|
||||
//! assert_eq!(store.vec_field().at_unkeyed(1).get(), 2);
|
||||
//! assert_eq!(store.vec_field().at_unkeyed(2).get(), 3);
|
||||
//! ```
|
||||
//! #### Enum
|
||||
//! Enumerated types behave a bit differently as the [`Store`](macro@Store) macro builds underlying traits instead of alternate
|
||||
//! enumerated structures. Each element in an `Enum` generates methods to access it in the store: a
|
||||
//! method with the name of the field gives a boolean if the `Enum` is that variant, and possible accessor
|
||||
//! methods for anonymous fields of that variant. For example:
|
||||
//! ```rust
|
||||
//! use reactive_stores::Store;
|
||||
//! use reactive_graph::traits::{Read, Get};
|
||||
//!
|
||||
//! #[derive(Store)]
|
||||
//! enum Choices {
|
||||
//! First,
|
||||
//! Second(String),
|
||||
//! }
|
||||
//!
|
||||
//! let choice_one = Store::new(Choices::First);
|
||||
//! let choice_two = Store::new(Choices::Second("hello".to_string()));
|
||||
//!
|
||||
//! assert!(choice_one.first());
|
||||
//! assert!(!choice_one.second());
|
||||
//! // Note the use of the accessor method here .second_0()
|
||||
//! assert_eq!(choice_two.second_0().unwrap().get(), "hello");
|
||||
//! ```
|
||||
//! #### Box
|
||||
//! [`Box<T>`](std::boxed::Box) also requires some special treatment in how you dereference elements of the Box, especially
|
||||
//! when trying to build a recursive data structure. [DerefField](trait@DerefField) provides a [.deref_value()](DerefField::deref_field) method to access
|
||||
//! the inner value. For example:
|
||||
//! ```rust
|
||||
//! // Note here we need to use DerefField to use deref_field() and OptionStoreExt to use unwrap()
|
||||
//! use reactive_stores::{Store, DerefField, OptionStoreExt};
|
||||
//! use reactive_graph::traits::{ Read, Get };
|
||||
//!
|
||||
//! #[derive(Store)]
|
||||
//! struct List {
|
||||
//! value: i32,
|
||||
//! #[store]
|
||||
//! child: Option<Box<List>>,
|
||||
//! }
|
||||
//!
|
||||
//! let tree = Store::new(List {
|
||||
//! value: 1,
|
||||
//! child: Some(Box::new(List { value: 2, child: None })),
|
||||
//! });
|
||||
//!
|
||||
//! assert_eq!(tree.child().unwrap().deref_field().value().get(), 2);
|
||||
//! ```
|
||||
//! ### Implementation Notes
|
||||
//!
|
||||
//! Every struct field can be understood as an index. For example, given the following definition
|
||||
@@ -131,6 +265,7 @@ use std::{
|
||||
};
|
||||
|
||||
mod arc_field;
|
||||
mod deref;
|
||||
mod field;
|
||||
mod iter;
|
||||
mod keyed;
|
||||
@@ -141,6 +276,7 @@ mod store_field;
|
||||
mod subfield;
|
||||
|
||||
pub use arc_field::ArcField;
|
||||
pub use deref::*;
|
||||
pub use field::Field;
|
||||
pub use iter::*;
|
||||
pub use keyed::*;
|
||||
@@ -621,6 +757,7 @@ mod tests {
|
||||
use crate::{self as reactive_stores, Patch, Store, StoreFieldIterator};
|
||||
use reactive_graph::{
|
||||
effect::Effect,
|
||||
owner::StoredValue,
|
||||
traits::{Read, ReadUntracked, Set, Update, Write},
|
||||
};
|
||||
use std::sync::{
|
||||
@@ -951,4 +1088,118 @@ mod tests {
|
||||
pub struct StructWithOption {
|
||||
opt_field: Option<Todo>,
|
||||
}
|
||||
|
||||
// regression test for https://github.com/leptos-rs/leptos/issues/3523
|
||||
#[tokio::test]
|
||||
async fn notifying_all_descendants() {
|
||||
use reactive_graph::traits::*;
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Foo {
|
||||
id: i32,
|
||||
bar: Bar,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Bar {
|
||||
bar_signature: i32,
|
||||
baz: Baz,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baz {
|
||||
more_data: i32,
|
||||
baw: Baw,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch, Default)]
|
||||
struct Baw {
|
||||
more_data: i32,
|
||||
end: i32,
|
||||
}
|
||||
|
||||
let store = Store::new(Foo {
|
||||
id: 42,
|
||||
bar: Bar {
|
||||
bar_signature: 69,
|
||||
baz: Baz {
|
||||
more_data: 9999,
|
||||
baw: Baw {
|
||||
more_data: 22,
|
||||
end: 1112,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
let store_runs = StoredValue::new(0);
|
||||
let id_runs = StoredValue::new(0);
|
||||
let bar_runs = StoredValue::new(0);
|
||||
let bar_signature_runs = StoredValue::new(0);
|
||||
let bar_baz_runs = StoredValue::new(0);
|
||||
let more_data_runs = StoredValue::new(0);
|
||||
let baz_baw_end_runs = StoredValue::new(0);
|
||||
|
||||
Effect::new_sync(move |_| {
|
||||
println!("foo: {:?}", store.get());
|
||||
*store_runs.write_value() += 1;
|
||||
});
|
||||
|
||||
Effect::new_sync(move |_| {
|
||||
println!("foo.id: {:?}", store.id().get());
|
||||
*id_runs.write_value() += 1;
|
||||
});
|
||||
|
||||
Effect::new_sync(move |_| {
|
||||
println!("foo.bar: {:?}", store.bar().get());
|
||||
*bar_runs.write_value() += 1;
|
||||
});
|
||||
|
||||
Effect::new_sync(move |_| {
|
||||
println!(
|
||||
"foo.bar.bar_signature: {:?}",
|
||||
store.bar().bar_signature().get()
|
||||
);
|
||||
*bar_signature_runs.write_value() += 1;
|
||||
});
|
||||
|
||||
Effect::new_sync(move |_| {
|
||||
println!("foo.bar.baz: {:?}", store.bar().baz().get());
|
||||
*bar_baz_runs.write_value() += 1;
|
||||
});
|
||||
|
||||
Effect::new_sync(move |_| {
|
||||
println!(
|
||||
"foo.bar.baz.more_data: {:?}",
|
||||
store.bar().baz().more_data().get()
|
||||
);
|
||||
*more_data_runs.write_value() += 1;
|
||||
});
|
||||
|
||||
Effect::new_sync(move |_| {
|
||||
println!(
|
||||
"foo.bar.baz.baw.end: {:?}",
|
||||
store.bar().baz().baw().end().get()
|
||||
);
|
||||
*baz_baw_end_runs.write_value() += 1;
|
||||
});
|
||||
|
||||
println!("[INITIAL EFFECT RUN]");
|
||||
tick().await;
|
||||
println!("\n\n[SETTING STORE]");
|
||||
store.set(Default::default());
|
||||
tick().await;
|
||||
println!("\n\n[SETTING STORE.BAR.BAZ]");
|
||||
store.bar().baz().set(Default::default());
|
||||
tick().await;
|
||||
|
||||
assert_eq!(store_runs.get_value(), 3);
|
||||
assert_eq!(id_runs.get_value(), 2);
|
||||
assert_eq!(bar_runs.get_value(), 3);
|
||||
assert_eq!(bar_signature_runs.get_value(), 2);
|
||||
assert_eq!(bar_baz_runs.get_value(), 3);
|
||||
assert_eq!(more_data_runs.get_value(), 3);
|
||||
assert_eq!(baz_baw_end_runs.get_value(), 3);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -77,11 +77,12 @@ where
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{self as reactive_stores, Store};
|
||||
use crate::{self as reactive_stores, Patch as _, Store};
|
||||
use reactive_graph::{
|
||||
effect::Effect,
|
||||
traits::{Get, Read, ReadUntracked, Set, Write},
|
||||
};
|
||||
use reactive_stores_macro::Patch;
|
||||
use std::sync::{
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
Arc,
|
||||
@@ -237,4 +238,115 @@ mod tests {
|
||||
assert_eq!(parent_count.load(Ordering::Relaxed), 3);
|
||||
assert_eq!(inner_count.load(Ordering::Relaxed), 3);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn patch() {
|
||||
use crate::OptionStoreExt;
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch)]
|
||||
struct Outer {
|
||||
inner: Option<Inner>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Store, Patch)]
|
||||
struct Inner {
|
||||
first: String,
|
||||
second: String,
|
||||
}
|
||||
|
||||
let store = Store::new(Outer {
|
||||
inner: Some(Inner {
|
||||
first: "A".to_owned(),
|
||||
second: "B".to_owned(),
|
||||
}),
|
||||
});
|
||||
|
||||
_ = any_spawner::Executor::init_tokio();
|
||||
|
||||
let parent_count = Arc::new(AtomicUsize::new(0));
|
||||
let inner_first_count = Arc::new(AtomicUsize::new(0));
|
||||
let inner_second_count = Arc::new(AtomicUsize::new(0));
|
||||
|
||||
Effect::new_sync({
|
||||
let parent_count = Arc::clone(&parent_count);
|
||||
move |prev: Option<()>| {
|
||||
if prev.is_none() {
|
||||
println!("parent: first run");
|
||||
} else {
|
||||
println!("parent: next run");
|
||||
}
|
||||
|
||||
println!(" value = {:?}", store.inner().get());
|
||||
parent_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
Effect::new_sync({
|
||||
let inner_first_count = Arc::clone(&inner_first_count);
|
||||
move |prev: Option<()>| {
|
||||
if prev.is_none() {
|
||||
println!("inner_first: first run");
|
||||
} else {
|
||||
println!("inner_first: next run");
|
||||
}
|
||||
|
||||
println!(
|
||||
" value = {:?}",
|
||||
store.inner().map(|inner| inner.first().get())
|
||||
);
|
||||
inner_first_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
Effect::new_sync({
|
||||
let inner_second_count = Arc::clone(&inner_second_count);
|
||||
move |prev: Option<()>| {
|
||||
if prev.is_none() {
|
||||
println!("inner_second: first run");
|
||||
} else {
|
||||
println!("inner_second: next run");
|
||||
}
|
||||
|
||||
println!(
|
||||
" value = {:?}",
|
||||
store.inner().map(|inner| inner.second().get())
|
||||
);
|
||||
inner_second_count.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
});
|
||||
|
||||
tick().await;
|
||||
assert_eq!(parent_count.load(Ordering::Relaxed), 1);
|
||||
assert_eq!(inner_first_count.load(Ordering::Relaxed), 1);
|
||||
assert_eq!(inner_second_count.load(Ordering::Relaxed), 1);
|
||||
|
||||
store.patch(Outer {
|
||||
inner: Some(Inner {
|
||||
first: "A".to_string(),
|
||||
second: "C".to_string(),
|
||||
}),
|
||||
});
|
||||
|
||||
tick().await;
|
||||
assert_eq!(parent_count.load(Ordering::Relaxed), 1);
|
||||
assert_eq!(inner_first_count.load(Ordering::Relaxed), 1);
|
||||
assert_eq!(inner_second_count.load(Ordering::Relaxed), 2);
|
||||
|
||||
store.patch(Outer { inner: None });
|
||||
|
||||
tick().await;
|
||||
assert_eq!(parent_count.load(Ordering::Relaxed), 2);
|
||||
assert_eq!(inner_first_count.load(Ordering::Relaxed), 2);
|
||||
assert_eq!(inner_second_count.load(Ordering::Relaxed), 3);
|
||||
|
||||
store.patch(Outer {
|
||||
inner: Some(Inner {
|
||||
first: "A".to_string(),
|
||||
second: "B".to_string(),
|
||||
}),
|
||||
});
|
||||
|
||||
tick().await;
|
||||
assert_eq!(parent_count.load(Ordering::Relaxed), 3);
|
||||
assert_eq!(inner_first_count.load(Ordering::Relaxed), 3);
|
||||
assert_eq!(inner_second_count.load(Ordering::Relaxed), 4);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -114,6 +114,35 @@ patch_primitives! {
|
||||
NonZeroUsize
|
||||
}
|
||||
|
||||
impl<T> PatchField for Option<T>
|
||||
where
|
||||
T: PatchField,
|
||||
{
|
||||
fn patch_field(
|
||||
&mut self,
|
||||
new: Self,
|
||||
path: &StorePath,
|
||||
notify: &mut dyn FnMut(&StorePath),
|
||||
) {
|
||||
match (self, new) {
|
||||
(None, None) => {}
|
||||
(old @ Some(_), None) => {
|
||||
old.take();
|
||||
notify(path);
|
||||
}
|
||||
(old @ None, new @ Some(_)) => {
|
||||
*old = new;
|
||||
notify(path);
|
||||
}
|
||||
(Some(old), Some(new)) => {
|
||||
let mut new_path = path.to_owned();
|
||||
new_path.push(0);
|
||||
old.patch_field(new, &new_path, notify);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PatchField for Vec<T>
|
||||
where
|
||||
T: PatchField,
|
||||
|
||||
@@ -34,6 +34,16 @@ impl StorePath {
|
||||
*last = segment.into();
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the path contains no elements.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
/// Returns the number of elements in the path.
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
}
|
||||
|
||||
/// One segment of a [`StorePath`].
|
||||
|
||||
@@ -9,8 +9,7 @@ use reactive_graph::{
|
||||
guards::{Plain, UntrackedWriteGuard, WriteGuard},
|
||||
ArcTrigger,
|
||||
},
|
||||
traits::{DefinedAt, Track, UntrackableGuard},
|
||||
unwrap_signal,
|
||||
traits::{Track, UntrackableGuard},
|
||||
};
|
||||
use std::{iter, ops::Deref, sync::Arc};
|
||||
|
||||
@@ -105,7 +104,7 @@ where
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|n| n.get_trigger(path))
|
||||
.unwrap_or_else(unwrap_signal!(self))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
@@ -113,7 +112,7 @@ where
|
||||
self.inner
|
||||
.try_get_value()
|
||||
.map(|n| n.path().into_iter().collect::<Vec<_>>())
|
||||
.unwrap_or_else(unwrap_signal!(self))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
|
||||
@@ -74,7 +74,7 @@ where
|
||||
{
|
||||
type Value = T;
|
||||
type Reader = Mapped<Inner::Reader, T>;
|
||||
type Writer = MappedMut<WriteGuard<ArcTrigger, Inner::Writer>, T>;
|
||||
type Writer = MappedMut<WriteGuard<Vec<ArcTrigger>, Inner::Writer>, T>;
|
||||
|
||||
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
|
||||
self.inner
|
||||
@@ -94,8 +94,24 @@ where
|
||||
|
||||
fn writer(&self) -> Option<Self::Writer> {
|
||||
let trigger = self.get_trigger(self.path().into_iter().collect());
|
||||
let inner = WriteGuard::new(trigger.children, self.inner.writer()?);
|
||||
Some(MappedMut::new(inner, self.read, self.write))
|
||||
let mut parent = self.inner.writer()?;
|
||||
parent.untrack();
|
||||
|
||||
let mut full_path = self.path().into_iter().collect::<StorePath>();
|
||||
full_path.pop();
|
||||
let mut triggers = Vec::with_capacity(full_path.len());
|
||||
triggers.push(trigger.this.clone());
|
||||
loop {
|
||||
let inner = self.get_trigger(full_path.clone());
|
||||
triggers.push(inner.children.clone());
|
||||
if full_path.is_empty() {
|
||||
break;
|
||||
}
|
||||
full_path.pop();
|
||||
}
|
||||
let guard = WriteGuard::new(triggers, parent);
|
||||
|
||||
Some(MappedMut::new(guard, self.read, self.write))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
@@ -105,10 +121,19 @@ where
|
||||
|
||||
#[track_caller]
|
||||
fn track_field(&self) {
|
||||
let inner = self
|
||||
.inner
|
||||
.get_trigger(self.inner.path().into_iter().collect());
|
||||
inner.this.track();
|
||||
let mut full_path = self.path().into_iter().collect::<StorePath>();
|
||||
// tracks `this` for all ancestors: i.e., it will track any change that is made
|
||||
// directly to one of its ancestors, but not a change made to a *child* of an ancestor
|
||||
// (which would end up with every subfield tracking its own siblings, because they are
|
||||
// children of its parent)
|
||||
loop {
|
||||
let inner = self.get_trigger(full_path.clone());
|
||||
inner.this.track();
|
||||
if full_path.is_empty() {
|
||||
break;
|
||||
}
|
||||
full_path.pop();
|
||||
}
|
||||
let trigger = self.get_trigger(self.path().into_iter().collect());
|
||||
trigger.this.track();
|
||||
trigger.children.track();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_stores_macro"
|
||||
version = "0.1.0"
|
||||
version = "0.1.7"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -87,15 +87,15 @@ impl Parse for SubfieldMode {
|
||||
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
|
||||
let mode: Ident = input.parse()?;
|
||||
if mode == "key" {
|
||||
let _col: Token!(:) = input.parse()?;
|
||||
let _col: Token![:] = input.parse()?;
|
||||
let ty: Type = input.parse()?;
|
||||
let _eq: Token!(=) = input.parse()?;
|
||||
let ident: ExprClosure = input.parse()?;
|
||||
Ok(SubfieldMode::Keyed(ident, ty))
|
||||
let _eq: Token![=] = input.parse()?;
|
||||
let closure: ExprClosure = input.parse()?;
|
||||
Ok(SubfieldMode::Keyed(closure, ty))
|
||||
} else if mode == "skip" {
|
||||
Ok(SubfieldMode::Skip)
|
||||
} else {
|
||||
Err(input.error("expected `key = <ident>: <Type>`"))
|
||||
Err(input.error("expected `key: <Type> = <closure>`"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_router_macro"
|
||||
version = "0.7.4"
|
||||
version = "0.7.7"
|
||||
authors = ["Greg Johnston", "Ben Wishovich"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -16,9 +16,11 @@ proc-macro = true
|
||||
proc-macro-error2 = { version = "2.0", default-features = false }
|
||||
proc-macro2 = "1.0"
|
||||
quote = "1.0"
|
||||
syn = { version = "2.0", features = ["full"] }
|
||||
|
||||
[dev-dependencies]
|
||||
leptos_router = { path = "../router" }
|
||||
leptos_macro = { path = "../leptos_macro" }
|
||||
|
||||
[lints.rust]
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(leptos_debuginfo)'] }
|
||||
|
||||
@@ -1,16 +1,21 @@
|
||||
//! A macro to make path definitions easier with [`leptos_router`].
|
||||
//!
|
||||
//! [`leptos_router`]: https://docs.rs/leptos_router/latest/leptos_router/components/fn.Route.html
|
||||
|
||||
#![deny(missing_docs)]
|
||||
|
||||
use proc_macro::{TokenStream, TokenTree};
|
||||
use proc_macro2::Span;
|
||||
use proc_macro_error2::abort;
|
||||
use proc_macro_error2::{abort, proc_macro_error};
|
||||
use quote::{quote, ToTokens};
|
||||
use syn::{
|
||||
spanned::Spanned, Block, Ident, ImplItem, ItemImpl, Path, Type, TypePath,
|
||||
};
|
||||
|
||||
const RFC3986_UNRESERVED: [char; 4] = ['-', '.', '_', '~'];
|
||||
const RFC3986_PCHAR_OTHER: [char; 1] = ['@'];
|
||||
|
||||
/// Constructs a path for use in a [`leptos_router::Route`] definition.
|
||||
/// Constructs a path for use in a [`Route`] definition.
|
||||
///
|
||||
/// Note that this is an optional convenience. Manually defining route segments
|
||||
/// is equivalent.
|
||||
@@ -33,6 +38,7 @@ const RFC3986_PCHAR_OTHER: [char; 1] = ['@'];
|
||||
///
|
||||
/// assert_eq!(path, output);
|
||||
/// ```
|
||||
/// [`Route`]: https://docs.rs/leptos_router/latest/leptos_router/components/fn.Route.html
|
||||
#[proc_macro_error2::proc_macro_error]
|
||||
#[proc_macro]
|
||||
pub fn path(tokens: TokenStream) -> TokenStream {
|
||||
@@ -187,3 +193,79 @@ impl ToTokens for Segments {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// When added to an [`impl LazyRoute`] implementation block, this will automatically
|
||||
/// add a [`lazy`] annotation to the `view` method, which will cause the code for the view
|
||||
/// to lazy-load concurrently with the `data` being loaded for the route.
|
||||
///
|
||||
/// [`impl LazyRoute`]: https://docs.rs/leptos_router/latest/leptos_router/trait.LazyRoute.html
|
||||
/// [`lazy`]: https://docs.rs/leptos_macro/latest/leptos_macro/macro.lazy.html
|
||||
#[proc_macro_attribute]
|
||||
#[proc_macro_error]
|
||||
pub fn lazy_route(
|
||||
args: proc_macro::TokenStream,
|
||||
s: TokenStream,
|
||||
) -> TokenStream {
|
||||
lazy_route_impl(args, s)
|
||||
}
|
||||
|
||||
fn lazy_route_impl(
|
||||
_args: proc_macro::TokenStream,
|
||||
s: TokenStream,
|
||||
) -> TokenStream {
|
||||
let mut im = syn::parse::<ItemImpl>(s).unwrap_or_else(|e| {
|
||||
abort!(e.span(), "`lazy_route` can only be used on an `impl` block")
|
||||
});
|
||||
if im.trait_.is_none() {
|
||||
abort!(
|
||||
im.span(),
|
||||
"`lazy_route` can only be used on an `impl LazyRoute for ...` \
|
||||
block"
|
||||
)
|
||||
}
|
||||
|
||||
let self_ty = im.self_ty.clone();
|
||||
let ty_name_to_snake = match &*self_ty {
|
||||
Type::Path(TypePath {
|
||||
path: Path { segments, .. },
|
||||
..
|
||||
}) => segments.last().unwrap().ident.to_string(),
|
||||
_ => abort!(self_ty.span(), "only path types are supported"),
|
||||
};
|
||||
let lazy_view_ident = Ident::new(&ty_name_to_snake, im.self_ty.span());
|
||||
|
||||
let item = im.items.iter_mut().find_map(|item| match item {
|
||||
ImplItem::Fn(inner) => {
|
||||
if inner.sig.ident.to_string().as_str() == "view" {
|
||||
Some(inner)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
});
|
||||
match item {
|
||||
None => abort!(im.span(), "must contain a fn called `view`"),
|
||||
Some(fun) => {
|
||||
let body = fun.block.clone();
|
||||
let new_block = quote! {{
|
||||
#[cfg_attr(feature = "split", wasm_split::wasm_split(#lazy_view_ident))]
|
||||
async fn view(this: #self_ty) -> ::leptos::prelude::AnyView {
|
||||
#body
|
||||
}
|
||||
|
||||
view(self).await
|
||||
}};
|
||||
let block =
|
||||
syn::parse::<Block>(new_block.into()).unwrap_or_else(|e| {
|
||||
abort!(
|
||||
e.span(),
|
||||
"`lazy_route` can only be used on an `impl` block"
|
||||
)
|
||||
});
|
||||
fun.block = block;
|
||||
}
|
||||
}
|
||||
|
||||
quote! { #im }.into()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "tachys"
|
||||
version = "0.1.4"
|
||||
version = "0.1.7"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
@@ -20,6 +20,8 @@ reactive_graph = { workspace = true, optional = true }
|
||||
reactive_stores = { workspace = true, optional = true }
|
||||
slotmap = { version = "1.0", optional = true }
|
||||
oco_ref = { workspace = true, optional = true }
|
||||
async-trait = "0.1.81"
|
||||
dyn-clone = "1.0.17"
|
||||
once_cell = "1.20"
|
||||
paste = "1.0"
|
||||
wasm-bindgen = "0.2.97"
|
||||
@@ -198,4 +200,7 @@ skip_feature_sets = [
|
||||
]
|
||||
|
||||
[lints.rust]
|
||||
unexpected_cfgs = { level = "warn", check-cfg = ['cfg(leptos_debuginfo)'] }
|
||||
unexpected_cfgs = { level = "warn", check-cfg = [
|
||||
'cfg(leptos_debuginfo)',
|
||||
'cfg(erase_components)',
|
||||
] }
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use super::{Attribute, NextAttribute};
|
||||
use dyn_clone::DynClone;
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
fmt::Debug,
|
||||
@@ -6,31 +7,64 @@ use std::{
|
||||
#[cfg(feature = "ssr")]
|
||||
use std::{future::Future, pin::Pin};
|
||||
|
||||
trait DynAttr: DynClone + Any + Send + 'static {
|
||||
fn into_any(self: Box<Self>) -> Box<dyn Any>;
|
||||
#[cfg(feature = "ssr")]
|
||||
fn as_any_mut(&mut self) -> &mut dyn Any;
|
||||
}
|
||||
|
||||
dyn_clone::clone_trait_object!(DynAttr);
|
||||
|
||||
impl<T: Clone> DynAttr for T
|
||||
where
|
||||
T: Attribute + 'static,
|
||||
{
|
||||
fn into_any(self: Box<Self>) -> Box<dyn Any> {
|
||||
self
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
fn as_any_mut(&mut self) -> &mut dyn Any {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// A type-erased container for any [`Attribute`].
|
||||
#[derive(Clone)]
|
||||
pub struct AnyAttribute {
|
||||
type_id: TypeId,
|
||||
html_len: usize,
|
||||
value: Box<dyn Any + Send>,
|
||||
value: Box<dyn DynAttr>,
|
||||
#[cfg(feature = "ssr")]
|
||||
to_html:
|
||||
fn(Box<dyn Any>, &mut String, &mut String, &mut String, &mut String),
|
||||
to_html: fn(
|
||||
Box<dyn DynAttr>,
|
||||
&mut String,
|
||||
&mut String,
|
||||
&mut String,
|
||||
&mut String,
|
||||
),
|
||||
build: fn(
|
||||
Box<dyn Any>,
|
||||
Box<dyn DynAttr>,
|
||||
el: &crate::renderer::types::Element,
|
||||
) -> AnyAttributeState,
|
||||
rebuild: fn(TypeId, Box<dyn Any>, &mut AnyAttributeState),
|
||||
rebuild: fn(TypeId, Box<dyn DynAttr>, &mut AnyAttributeState),
|
||||
#[cfg(feature = "hydrate")]
|
||||
hydrate_from_server:
|
||||
fn(Box<dyn Any>, &crate::renderer::types::Element) -> AnyAttributeState,
|
||||
hydrate_from_server: fn(
|
||||
Box<dyn DynAttr>,
|
||||
&crate::renderer::types::Element,
|
||||
) -> AnyAttributeState,
|
||||
#[cfg(feature = "hydrate")]
|
||||
hydrate_from_template:
|
||||
fn(Box<dyn Any>, &crate::renderer::types::Element) -> AnyAttributeState,
|
||||
hydrate_from_template: fn(
|
||||
Box<dyn DynAttr>,
|
||||
&crate::renderer::types::Element,
|
||||
) -> AnyAttributeState,
|
||||
#[cfg(feature = "ssr")]
|
||||
#[allow(clippy::type_complexity)]
|
||||
resolve:
|
||||
fn(Box<dyn Any>) -> Pin<Box<dyn Future<Output = AnyAttribute> + Send>>,
|
||||
resolve: fn(
|
||||
Box<dyn DynAttr>,
|
||||
) -> Pin<Box<dyn Future<Output = AnyAttribute> + Send>>,
|
||||
#[cfg(feature = "ssr")]
|
||||
dry_resolve: fn(&mut Box<dyn Any + Send>),
|
||||
dry_resolve: fn(&mut Box<dyn DynAttr>),
|
||||
}
|
||||
|
||||
impl Debug for AnyAttribute {
|
||||
@@ -55,145 +89,138 @@ pub trait IntoAnyAttribute {
|
||||
impl<T> IntoAnyAttribute for T
|
||||
where
|
||||
Self: Send,
|
||||
T: Attribute + 'static,
|
||||
T::State: 'static,
|
||||
T: Attribute,
|
||||
crate::renderer::types::Element: Clone,
|
||||
{
|
||||
// inlining allows the compiler to remove the unused functions
|
||||
// i.e., doesn't ship HTML-generating code that isn't used
|
||||
#[inline(always)]
|
||||
fn into_any_attr(self) -> AnyAttribute {
|
||||
let html_len = self.html_len();
|
||||
|
||||
let value = Box::new(self) as Box<dyn Any + Send>;
|
||||
|
||||
match value.downcast::<AnyAttribute>() {
|
||||
let value =
|
||||
Box::new(self.into_cloneable_owned()) as Box<dyn Any + Send>;
|
||||
let value = match (value as Box<dyn Any>).downcast::<AnyAttribute>() {
|
||||
// if it's already an AnyAttribute, we don't need to double-wrap it
|
||||
Ok(any_attribute) => *any_attribute,
|
||||
Err(value) => {
|
||||
#[cfg(feature = "ssr")]
|
||||
let to_html =
|
||||
|value: Box<dyn Any>,
|
||||
buf: &mut String,
|
||||
class: &mut String,
|
||||
style: &mut String,
|
||||
inner_html: &mut String| {
|
||||
let value = value.downcast::<T>().expect(
|
||||
"AnyAttribute::to_html could not be downcast",
|
||||
);
|
||||
value.to_html(buf, class, style, inner_html);
|
||||
};
|
||||
let build =
|
||||
|value: Box<dyn Any>,
|
||||
Ok(any_attribute) => return *any_attribute,
|
||||
Err(value) => value.downcast::<T::CloneableOwned>().unwrap(),
|
||||
};
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
let to_html = |value: Box<dyn DynAttr>,
|
||||
buf: &mut String,
|
||||
class: &mut String,
|
||||
style: &mut String,
|
||||
inner_html: &mut String| {
|
||||
let value = value
|
||||
.into_any()
|
||||
.downcast::<T::CloneableOwned>()
|
||||
.expect("AnyAttribute::to_html could not be downcast");
|
||||
value.to_html(buf, class, style, inner_html);
|
||||
};
|
||||
let build = |value: Box<dyn DynAttr>,
|
||||
el: &crate::renderer::types::Element| {
|
||||
let value = value
|
||||
.downcast::<T>()
|
||||
.expect("AnyAttribute::build couldn't downcast");
|
||||
let state = Box::new(value.build(el));
|
||||
let value = value
|
||||
.into_any()
|
||||
.downcast::<T::CloneableOwned>()
|
||||
.expect("AnyAttribute::build couldn't downcast");
|
||||
let state = Box::new(value.build(el));
|
||||
|
||||
AnyAttributeState {
|
||||
type_id: TypeId::of::<T>(),
|
||||
state,
|
||||
el: el.clone(),
|
||||
}
|
||||
};
|
||||
#[cfg(feature = "hydrate")]
|
||||
let hydrate_from_server =
|
||||
|value: Box<dyn Any>,
|
||||
el: &crate::renderer::types::Element| {
|
||||
let value = value.downcast::<T>().expect(
|
||||
"AnyAttribute::hydrate_from_server couldn't \
|
||||
downcast",
|
||||
);
|
||||
let state = Box::new(value.hydrate::<true>(el));
|
||||
|
||||
AnyAttributeState {
|
||||
type_id: TypeId::of::<T>(),
|
||||
state,
|
||||
el: el.clone(),
|
||||
}
|
||||
};
|
||||
#[cfg(feature = "hydrate")]
|
||||
let hydrate_from_template =
|
||||
|value: Box<dyn Any>,
|
||||
el: &crate::renderer::types::Element| {
|
||||
let value = value.downcast::<T>().expect(
|
||||
"AnyAttribute::hydrate_from_server couldn't \
|
||||
downcast",
|
||||
);
|
||||
let state = Box::new(value.hydrate::<true>(el));
|
||||
|
||||
AnyAttributeState {
|
||||
type_id: TypeId::of::<T>(),
|
||||
state,
|
||||
el: el.clone(),
|
||||
}
|
||||
};
|
||||
let rebuild =
|
||||
|new_type_id: TypeId,
|
||||
value: Box<dyn Any>,
|
||||
state: &mut AnyAttributeState| {
|
||||
let value = value.downcast::<T>().expect(
|
||||
"AnyAttribute::rebuild couldn't downcast value",
|
||||
);
|
||||
if new_type_id == state.type_id {
|
||||
let state = state.state.downcast_mut().expect(
|
||||
"AnyAttribute::rebuild couldn't downcast state",
|
||||
);
|
||||
value.rebuild(state);
|
||||
} else {
|
||||
let new = value.into_any_attr().build(&state.el);
|
||||
*state = new;
|
||||
}
|
||||
};
|
||||
#[cfg(feature = "ssr")]
|
||||
let dry_resolve = |value: &mut Box<dyn Any + Send>| {
|
||||
let value = value
|
||||
.downcast_mut::<T>()
|
||||
.expect("AnyView::resolve could not be downcast");
|
||||
value.dry_resolve();
|
||||
};
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
let resolve = |value: Box<dyn Any>| {
|
||||
let value = value
|
||||
.downcast::<T>()
|
||||
.expect("AnyView::resolve could not be downcast");
|
||||
Box::pin(
|
||||
async move { value.resolve().await.into_any_attr() },
|
||||
)
|
||||
as Pin<Box<dyn Future<Output = AnyAttribute> + Send>>
|
||||
};
|
||||
AnyAttribute {
|
||||
type_id: TypeId::of::<T>(),
|
||||
html_len,
|
||||
value,
|
||||
#[cfg(feature = "ssr")]
|
||||
to_html,
|
||||
build,
|
||||
rebuild,
|
||||
#[cfg(feature = "hydrate")]
|
||||
hydrate_from_server,
|
||||
#[cfg(feature = "hydrate")]
|
||||
hydrate_from_template,
|
||||
#[cfg(feature = "ssr")]
|
||||
resolve,
|
||||
#[cfg(feature = "ssr")]
|
||||
dry_resolve,
|
||||
}
|
||||
AnyAttributeState {
|
||||
type_id: TypeId::of::<T::CloneableOwned>(),
|
||||
state,
|
||||
el: el.clone(),
|
||||
}
|
||||
};
|
||||
#[cfg(feature = "hydrate")]
|
||||
let hydrate_from_server =
|
||||
|value: Box<dyn DynAttr>, el: &crate::renderer::types::Element| {
|
||||
let value =
|
||||
value.into_any().downcast::<T::CloneableOwned>().expect(
|
||||
"AnyAttribute::hydrate_from_server couldn't downcast",
|
||||
);
|
||||
let state = Box::new(value.hydrate::<true>(el));
|
||||
|
||||
AnyAttributeState {
|
||||
type_id: TypeId::of::<T::CloneableOwned>(),
|
||||
state,
|
||||
el: el.clone(),
|
||||
}
|
||||
};
|
||||
#[cfg(feature = "hydrate")]
|
||||
let hydrate_from_template =
|
||||
|value: Box<dyn DynAttr>, el: &crate::renderer::types::Element| {
|
||||
let value =
|
||||
value.into_any().downcast::<T::CloneableOwned>().expect(
|
||||
"AnyAttribute::hydrate_from_server couldn't downcast",
|
||||
);
|
||||
let state = Box::new(value.hydrate::<true>(el));
|
||||
|
||||
AnyAttributeState {
|
||||
type_id: TypeId::of::<T::CloneableOwned>(),
|
||||
state,
|
||||
el: el.clone(),
|
||||
}
|
||||
};
|
||||
let rebuild = |new_type_id: TypeId,
|
||||
value: Box<dyn DynAttr>,
|
||||
state: &mut AnyAttributeState| {
|
||||
let value = value
|
||||
.into_any()
|
||||
.downcast::<T::CloneableOwned>()
|
||||
.expect("AnyAttribute::rebuild couldn't downcast value");
|
||||
if new_type_id == state.type_id {
|
||||
let state = state
|
||||
.state
|
||||
.downcast_mut()
|
||||
.expect("AnyAttribute::rebuild couldn't downcast state");
|
||||
value.rebuild(state);
|
||||
} else {
|
||||
let new = value.into_any_attr().build(&state.el);
|
||||
*state = new;
|
||||
}
|
||||
};
|
||||
#[cfg(feature = "ssr")]
|
||||
let dry_resolve = |value: &mut Box<dyn DynAttr>| {
|
||||
let value = value
|
||||
.as_any_mut()
|
||||
.downcast_mut::<T::CloneableOwned>()
|
||||
.expect("AnyView::resolve could not be downcast");
|
||||
value.dry_resolve();
|
||||
};
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
let resolve = |value: Box<dyn DynAttr>| {
|
||||
let value = value
|
||||
.into_any()
|
||||
.downcast::<T::CloneableOwned>()
|
||||
.expect("AnyView::resolve could not be downcast");
|
||||
Box::pin(async move { value.resolve().await.into_any_attr() })
|
||||
as Pin<Box<dyn Future<Output = AnyAttribute> + Send>>
|
||||
};
|
||||
AnyAttribute {
|
||||
type_id: TypeId::of::<T::CloneableOwned>(),
|
||||
html_len: value.html_len(),
|
||||
value,
|
||||
#[cfg(feature = "ssr")]
|
||||
to_html,
|
||||
build,
|
||||
rebuild,
|
||||
#[cfg(feature = "hydrate")]
|
||||
hydrate_from_server,
|
||||
#[cfg(feature = "hydrate")]
|
||||
hydrate_from_template,
|
||||
#[cfg(feature = "ssr")]
|
||||
resolve,
|
||||
#[cfg(feature = "ssr")]
|
||||
dry_resolve,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl NextAttribute for AnyAttribute {
|
||||
type Output<NewAttr: Attribute> = (Self, NewAttr);
|
||||
type Output<NewAttr: Attribute> = Vec<AnyAttribute>;
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(self, new_attr)
|
||||
vec![self, new_attr.into_any_attr()]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -202,8 +229,8 @@ impl Attribute for AnyAttribute {
|
||||
|
||||
type AsyncOutput = AnyAttribute;
|
||||
type State = AnyAttributeState;
|
||||
type Cloneable = ();
|
||||
type CloneableOwned = ();
|
||||
type Cloneable = AnyAttribute;
|
||||
type CloneableOwned = AnyAttribute;
|
||||
|
||||
fn html_len(&self) -> usize {
|
||||
self.html_len
|
||||
@@ -257,11 +284,11 @@ impl Attribute for AnyAttribute {
|
||||
}
|
||||
|
||||
fn into_cloneable(self) -> Self::Cloneable {
|
||||
todo!()
|
||||
self
|
||||
}
|
||||
|
||||
fn into_cloneable_owned(self) -> Self::CloneableOwned {
|
||||
todo!()
|
||||
self
|
||||
}
|
||||
|
||||
fn dry_resolve(&mut self) {
|
||||
@@ -288,3 +315,120 @@ impl Attribute for AnyAttribute {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
impl NextAttribute for Vec<AnyAttribute> {
|
||||
type Output<NewAttr: Attribute> = Self;
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
mut self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
self.push(new_attr.into_any_attr());
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Attribute for Vec<AnyAttribute> {
|
||||
const MIN_LENGTH: usize = 0;
|
||||
|
||||
type AsyncOutput = Vec<AnyAttribute>;
|
||||
type State = Vec<AnyAttributeState>;
|
||||
type Cloneable = Vec<AnyAttribute>;
|
||||
type CloneableOwned = Vec<AnyAttribute>;
|
||||
|
||||
fn html_len(&self) -> usize {
|
||||
self.iter().map(|attr| attr.html_len()).sum()
|
||||
}
|
||||
|
||||
#[allow(unused)] // they are used in SSR
|
||||
fn to_html(
|
||||
self,
|
||||
buf: &mut String,
|
||||
class: &mut String,
|
||||
style: &mut String,
|
||||
inner_html: &mut String,
|
||||
) {
|
||||
#[cfg(feature = "ssr")]
|
||||
{
|
||||
for mut attr in self {
|
||||
attr.to_html(buf, class, style, inner_html)
|
||||
}
|
||||
}
|
||||
#[cfg(not(feature = "ssr"))]
|
||||
panic!(
|
||||
"You are rendering AnyAttribute to HTML without the `ssr` feature \
|
||||
enabled."
|
||||
);
|
||||
}
|
||||
|
||||
fn hydrate<const FROM_SERVER: bool>(
|
||||
self,
|
||||
el: &crate::renderer::types::Element,
|
||||
) -> Self::State {
|
||||
#[cfg(feature = "hydrate")]
|
||||
if FROM_SERVER {
|
||||
self.into_iter()
|
||||
.map(|attr| attr.hydrate::<true>(el))
|
||||
.collect()
|
||||
} else {
|
||||
self.into_iter()
|
||||
.map(|attr| attr.hydrate::<false>(el))
|
||||
.collect()
|
||||
}
|
||||
#[cfg(not(feature = "hydrate"))]
|
||||
{
|
||||
_ = el;
|
||||
panic!(
|
||||
"You are trying to hydrate AnyAttribute without the `hydrate` \
|
||||
feature enabled."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn build(self, el: &crate::renderer::types::Element) -> Self::State {
|
||||
self.into_iter().map(|attr| attr.build(el)).collect()
|
||||
}
|
||||
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
for (attr, state) in self.into_iter().zip(state.iter_mut()) {
|
||||
attr.rebuild(state)
|
||||
}
|
||||
}
|
||||
|
||||
fn into_cloneable(self) -> Self::Cloneable {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_cloneable_owned(self) -> Self::CloneableOwned {
|
||||
self
|
||||
}
|
||||
|
||||
fn dry_resolve(&mut self) {
|
||||
#[cfg(feature = "ssr")]
|
||||
{
|
||||
for attr in self.iter_mut() {
|
||||
attr.dry_resolve()
|
||||
}
|
||||
}
|
||||
#[cfg(not(feature = "ssr"))]
|
||||
panic!(
|
||||
"You are rendering AnyAttribute to HTML without the `ssr` feature \
|
||||
enabled."
|
||||
);
|
||||
}
|
||||
|
||||
async fn resolve(self) -> Self::AsyncOutput {
|
||||
#[cfg(feature = "ssr")]
|
||||
{
|
||||
futures::future::join_all(
|
||||
self.into_iter().map(|attr| attr.resolve()),
|
||||
)
|
||||
.await
|
||||
}
|
||||
#[cfg(not(feature = "ssr"))]
|
||||
panic!(
|
||||
"You are rendering AnyAttribute to HTML without the `ssr` feature \
|
||||
enabled."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
use super::NextAttribute;
|
||||
use super::{
|
||||
maybe_next_attr_erasure_macros::next_attr_output_type, NextAttribute,
|
||||
};
|
||||
use crate::{
|
||||
html::attribute::{Attribute, AttributeValue},
|
||||
html::attribute::{
|
||||
maybe_next_attr_erasure_macros::next_attr_combine, Attribute,
|
||||
AttributeValue,
|
||||
},
|
||||
view::{add_attr::AddAnyAttr, Position, ToTemplate},
|
||||
};
|
||||
use std::{borrow::Cow, sync::Arc};
|
||||
@@ -114,13 +119,13 @@ where
|
||||
K: CustomAttributeKey,
|
||||
V: AttributeValue,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = (Self, NewAttr);
|
||||
next_attr_output_type!(Self, NewAttr);
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(self, new_attr)
|
||||
next_attr_combine!(self, new_attr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
27
tachys/src/html/attribute/maybe_next_attr_erasure_macros.rs
Normal file
27
tachys/src/html/attribute/maybe_next_attr_erasure_macros.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
macro_rules! next_attr_output_type {
|
||||
($current:ty, $next:ty) => {
|
||||
#[cfg(not(erase_components))]
|
||||
type Output<NewAttr: Attribute> = ($current, $next);
|
||||
|
||||
#[cfg(erase_components)]
|
||||
type Output<NewAttr: Attribute> =
|
||||
Vec<$crate::html::attribute::any_attribute::AnyAttribute>;
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! next_attr_combine {
|
||||
($self:expr, $next_attr:expr) => {{
|
||||
#[cfg(not(erase_components))]
|
||||
{
|
||||
($self, $next_attr)
|
||||
}
|
||||
#[cfg(erase_components)]
|
||||
{
|
||||
use $crate::html::attribute::any_attribute::IntoAnyAttribute;
|
||||
vec![$self.into_any_attr(), $next_attr.into_any_attr()]
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
pub(crate) use next_attr_combine;
|
||||
pub(crate) use next_attr_output_type;
|
||||
@@ -7,10 +7,15 @@ pub mod custom;
|
||||
/// Traits to define global attribute methods on all HTML elements.
|
||||
pub mod global;
|
||||
mod key;
|
||||
pub(crate) mod maybe_next_attr_erasure_macros;
|
||||
pub(crate) mod panic_on_clone_attribute;
|
||||
mod value;
|
||||
|
||||
use crate::view::{Position, ToTemplate};
|
||||
pub use key::*;
|
||||
use maybe_next_attr_erasure_macros::{
|
||||
next_attr_combine, next_attr_output_type,
|
||||
};
|
||||
use std::{fmt::Debug, future::Future};
|
||||
pub use value::*;
|
||||
|
||||
@@ -73,6 +78,25 @@ pub trait Attribute: NextAttribute + Send {
|
||||
fn resolve(self) -> impl Future<Output = Self::AsyncOutput> + Send;
|
||||
}
|
||||
|
||||
/// A type that can be converted into an attribute.
|
||||
///
|
||||
/// Used type-erasing attrs and tuples of attrs to [`Vec<AnyAttribute>`] as early as possible to prevent type explosion.
|
||||
pub trait IntoAttribute {
|
||||
/// The type of the attribute.
|
||||
type Output: Attribute;
|
||||
|
||||
/// Converts this into an attribute.
|
||||
fn into_attr(self) -> Self::Output;
|
||||
}
|
||||
|
||||
impl<T: Attribute> IntoAttribute for T {
|
||||
type Output = T;
|
||||
|
||||
fn into_attr(self) -> Self::Output {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds another attribute to this one, returning a new attribute.
|
||||
///
|
||||
/// This is typically achieved by creating or extending a tuple of attributes.
|
||||
@@ -132,13 +156,27 @@ impl Attribute for () {
|
||||
}
|
||||
|
||||
impl NextAttribute for () {
|
||||
#[cfg(not(erase_components))]
|
||||
type Output<NewAttr: Attribute> = (NewAttr,);
|
||||
|
||||
#[cfg(erase_components)]
|
||||
type Output<NewAttr: Attribute> =
|
||||
Vec<crate::html::attribute::any_attribute::AnyAttribute>;
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(new_attr,)
|
||||
#[cfg(not(erase_components))]
|
||||
{
|
||||
(new_attr,)
|
||||
}
|
||||
#[cfg(erase_components)]
|
||||
{
|
||||
use crate::html::attribute::any_attribute::IntoAnyAttribute;
|
||||
|
||||
vec![new_attr.into_any_attr()]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -238,28 +276,28 @@ where
|
||||
K: AttributeKey,
|
||||
V: AttributeValue,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = (Self, NewAttr);
|
||||
next_attr_output_type!(Self, NewAttr);
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(self, new_attr)
|
||||
next_attr_combine!(self, new_attr)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_attr_for_tuples {
|
||||
($first:ident, $($ty:ident),* $(,)?) => {
|
||||
impl<$first, $($ty),*> Attribute for ($first, $($ty,)*)
|
||||
where
|
||||
$first: Attribute,
|
||||
$($ty: Attribute),*,
|
||||
|
||||
{
|
||||
($first:ident, $($ty:ident),* $(,)?) => {
|
||||
#[cfg(not(erase_components))]
|
||||
impl<$first, $($ty),*> Attribute for ($first, $($ty,)*)
|
||||
where
|
||||
$first: Attribute,
|
||||
$($ty: Attribute),*,
|
||||
{
|
||||
const MIN_LENGTH: usize = $first::MIN_LENGTH $(+ $ty::MIN_LENGTH)*;
|
||||
|
||||
type AsyncOutput = ($first::AsyncOutput, $($ty::AsyncOutput,)*);
|
||||
type State = ($first::State, $($ty::State,)*);
|
||||
type AsyncOutput = ($first::AsyncOutput, $($ty::AsyncOutput,)*);
|
||||
type State = ($first::State, $($ty::State,)*);
|
||||
type Cloneable = ($first::Cloneable, $($ty::Cloneable,)*);
|
||||
type CloneableOwned = ($first::CloneableOwned, $($ty::CloneableOwned,)*);
|
||||
|
||||
@@ -269,39 +307,39 @@ macro_rules! impl_attr_for_tuples {
|
||||
$first.html_len() $(+ $ty.html_len())*
|
||||
}
|
||||
|
||||
fn to_html(self, buf: &mut String, class: &mut String, style: &mut String, inner_html: &mut String,) {
|
||||
fn to_html(self, buf: &mut String, class: &mut String, style: &mut String, inner_html: &mut String,) {
|
||||
#[allow(non_snake_case)]
|
||||
let ($first, $($ty,)* ) = self;
|
||||
$first.to_html(buf, class, style, inner_html);
|
||||
$($ty.to_html(buf, class, style, inner_html));*
|
||||
}
|
||||
let ($first, $($ty,)* ) = self;
|
||||
$first.to_html(buf, class, style, inner_html);
|
||||
$($ty.to_html(buf, class, style, inner_html));*
|
||||
}
|
||||
|
||||
fn hydrate<const FROM_SERVER: bool>(self, el: &crate::renderer::types::Element) -> Self::State {
|
||||
fn hydrate<const FROM_SERVER: bool>(self, el: &crate::renderer::types::Element) -> Self::State {
|
||||
#[allow(non_snake_case)]
|
||||
let ($first, $($ty,)* ) = self;
|
||||
(
|
||||
$first.hydrate::<FROM_SERVER>(el),
|
||||
$($ty.hydrate::<FROM_SERVER>(el)),*
|
||||
)
|
||||
}
|
||||
let ($first, $($ty,)* ) = self;
|
||||
(
|
||||
$first.hydrate::<FROM_SERVER>(el),
|
||||
$($ty.hydrate::<FROM_SERVER>(el)),*
|
||||
)
|
||||
}
|
||||
|
||||
fn build(self, el: &crate::renderer::types::Element) -> Self::State {
|
||||
#[allow(non_snake_case)]
|
||||
let ($first, $($ty,)*) = self;
|
||||
let ($first, $($ty,)*) = self;
|
||||
(
|
||||
$first.build(el),
|
||||
$($ty.build(el)),*
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
paste::paste! {
|
||||
let ([<$first:lower>], $([<$ty:lower>],)*) = self;
|
||||
let ([<view_ $first:lower>], $([<view_ $ty:lower>],)*) = state;
|
||||
[<$first:lower>].rebuild([<view_ $first:lower>]);
|
||||
$([<$ty:lower>].rebuild([<view_ $ty:lower>]));*
|
||||
}
|
||||
}
|
||||
let ([<$first:lower>], $([<$ty:lower>],)*) = self;
|
||||
let ([<view_ $first:lower>], $([<view_ $ty:lower>],)*) = state;
|
||||
[<$first:lower>].rebuild([<view_ $first:lower>]);
|
||||
$([<$ty:lower>].rebuild([<view_ $ty:lower>]));*
|
||||
}
|
||||
}
|
||||
|
||||
fn into_cloneable(self) -> Self::Cloneable {
|
||||
#[allow(non_snake_case)]
|
||||
@@ -338,10 +376,11 @@ macro_rules! impl_attr_for_tuples {
|
||||
}
|
||||
}
|
||||
|
||||
impl<$first, $($ty),*> NextAttribute for ($first, $($ty,)*)
|
||||
where
|
||||
$first: Attribute,
|
||||
$($ty: Attribute),*,
|
||||
#[cfg(not(erase_components))]
|
||||
impl<$first, $($ty),*> NextAttribute for ($first, $($ty,)*)
|
||||
where
|
||||
$first: Attribute,
|
||||
$($ty: Attribute),*,
|
||||
|
||||
{
|
||||
type Output<NewAttr: Attribute> = ($first, $($ty,)* NewAttr);
|
||||
@@ -354,22 +393,44 @@ macro_rules! impl_attr_for_tuples {
|
||||
let ($first, $($ty,)*) = self;
|
||||
($first, $($ty,)* new_attr)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
#[cfg(erase_components)]
|
||||
impl<$first, $($ty),*> IntoAttribute for ($first, $($ty,)*)
|
||||
where
|
||||
$first: IntoAttribute,
|
||||
$($ty: IntoAttribute),*,
|
||||
{
|
||||
type Output = Vec<$crate::html::attribute::any_attribute::AnyAttribute>;
|
||||
|
||||
fn into_attr(self) -> Self::Output {
|
||||
use crate::html::attribute::any_attribute::IntoAnyAttribute;
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
let ($first, $($ty,)*) = self;
|
||||
vec![
|
||||
$first.into_attr().into_any_attr(),
|
||||
$($ty.into_attr().into_any_attr(),)*
|
||||
]
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! impl_attr_for_tuples_truncate_additional {
|
||||
($first:ident, $($ty:ident),* $(,)?) => {
|
||||
impl<$first, $($ty),*> Attribute for ($first, $($ty,)*)
|
||||
where
|
||||
$first: Attribute,
|
||||
$($ty: Attribute),*,
|
||||
($first:ident, $($ty:ident),* $(,)?) => {
|
||||
#[cfg(not(erase_components))]
|
||||
impl<$first, $($ty),*> Attribute for ($first, $($ty,)*)
|
||||
where
|
||||
$first: Attribute,
|
||||
$($ty: Attribute),*,
|
||||
|
||||
{
|
||||
{
|
||||
const MIN_LENGTH: usize = $first::MIN_LENGTH $(+ $ty::MIN_LENGTH)*;
|
||||
|
||||
type AsyncOutput = ($first::AsyncOutput, $($ty::AsyncOutput,)*);
|
||||
type State = ($first::State, $($ty::State,)*);
|
||||
type AsyncOutput = ($first::AsyncOutput, $($ty::AsyncOutput,)*);
|
||||
type State = ($first::State, $($ty::State,)*);
|
||||
type Cloneable = ($first::Cloneable, $($ty::Cloneable,)*);
|
||||
type CloneableOwned = ($first::CloneableOwned, $($ty::CloneableOwned,)*);
|
||||
|
||||
@@ -379,21 +440,21 @@ macro_rules! impl_attr_for_tuples_truncate_additional {
|
||||
$first.html_len() $(+ $ty.html_len())*
|
||||
}
|
||||
|
||||
fn to_html(self, buf: &mut String, class: &mut String, style: &mut String, inner_html: &mut String,) {
|
||||
fn to_html(self, buf: &mut String, class: &mut String, style: &mut String, inner_html: &mut String,) {
|
||||
#[allow(non_snake_case)]
|
||||
let ($first, $($ty,)* ) = self;
|
||||
$first.to_html(buf, class, style, inner_html);
|
||||
$($ty.to_html(buf, class, style, inner_html));*
|
||||
}
|
||||
}
|
||||
|
||||
fn hydrate<const FROM_SERVER: bool>(self, el: &crate::renderer::types::Element) -> Self::State {
|
||||
fn hydrate<const FROM_SERVER: bool>(self, el: &crate::renderer::types::Element) -> Self::State {
|
||||
#[allow(non_snake_case)]
|
||||
let ($first, $($ty,)* ) = self;
|
||||
(
|
||||
$first.hydrate::<FROM_SERVER>(el),
|
||||
$($ty.hydrate::<FROM_SERVER>(el)),*
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn build(self, el: &crate::renderer::types::Element) -> Self::State {
|
||||
#[allow(non_snake_case)]
|
||||
@@ -402,16 +463,16 @@ macro_rules! impl_attr_for_tuples_truncate_additional {
|
||||
$first.build(el),
|
||||
$($ty.build(el)),*
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
paste::paste! {
|
||||
let ([<$first:lower>], $([<$ty:lower>],)*) = self;
|
||||
let ([<view_ $first:lower>], $([<view_ $ty:lower>],)*) = state;
|
||||
[<$first:lower>].rebuild([<view_ $first:lower>]);
|
||||
$([<$ty:lower>].rebuild([<view_ $ty:lower>]));*
|
||||
}
|
||||
}
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
paste::paste! {
|
||||
let ([<$first:lower>], $([<$ty:lower>],)*) = self;
|
||||
let ([<view_ $first:lower>], $([<view_ $ty:lower>],)*) = state;
|
||||
[<$first:lower>].rebuild([<view_ $first:lower>]);
|
||||
$([<$ty:lower>].rebuild([<view_ $ty:lower>]));*
|
||||
}
|
||||
}
|
||||
|
||||
fn into_cloneable(self) -> Self::Cloneable {
|
||||
#[allow(non_snake_case)]
|
||||
@@ -448,10 +509,11 @@ macro_rules! impl_attr_for_tuples_truncate_additional {
|
||||
}
|
||||
}
|
||||
|
||||
impl<$first, $($ty),*> NextAttribute for ($first, $($ty,)*)
|
||||
where
|
||||
$first: Attribute,
|
||||
$($ty: Attribute),*,
|
||||
#[cfg(not(erase_components))]
|
||||
impl<$first, $($ty),*> NextAttribute for ($first, $($ty,)*)
|
||||
where
|
||||
$first: Attribute,
|
||||
$($ty: Attribute),*,
|
||||
|
||||
{
|
||||
type Output<NewAttr: Attribute> = ($first, $($ty,)*);
|
||||
@@ -463,10 +525,39 @@ macro_rules! impl_attr_for_tuples_truncate_additional {
|
||||
todo!("adding more than 26 attributes is not supported");
|
||||
//($first, $($ty,)*)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(erase_components)]
|
||||
impl<$first, $($ty),*> IntoAttribute for ($first, $($ty,)*)
|
||||
where
|
||||
$first: IntoAttribute,
|
||||
$($ty: IntoAttribute),*,
|
||||
{
|
||||
type Output = $crate::html::attribute::any_attribute::AnyAttribute;
|
||||
|
||||
fn into_attr(self) -> Self::Output {
|
||||
todo!("adding more than 26 attributes is not supported");
|
||||
//crate::html::attribute::any_attribute::IntoAnyAttribute::into_any_attr(self)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(erase_components)]
|
||||
impl<A> IntoAttribute for (A,)
|
||||
where
|
||||
A: IntoAttribute,
|
||||
{
|
||||
type Output = Vec<crate::html::attribute::any_attribute::AnyAttribute>;
|
||||
|
||||
fn into_attr(self) -> Self::Output {
|
||||
use crate::html::attribute::any_attribute::IntoAnyAttribute;
|
||||
|
||||
vec![self.0.into_attr().into_any_attr()]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(erase_components))]
|
||||
impl<A> Attribute for (A,)
|
||||
where
|
||||
A: Attribute,
|
||||
@@ -524,17 +615,18 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(erase_components))]
|
||||
impl<A> NextAttribute for (A,)
|
||||
where
|
||||
A: Attribute,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = (A, NewAttr);
|
||||
next_attr_output_type!(A, NewAttr);
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(self.0, new_attr)
|
||||
next_attr_combine!(self.0, new_attr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
88
tachys/src/html/attribute/panic_on_clone_attribute.rs
Normal file
88
tachys/src/html/attribute/panic_on_clone_attribute.rs
Normal file
@@ -0,0 +1,88 @@
|
||||
use super::{Attribute, NextAttribute};
|
||||
|
||||
/// When type erasing with `AnyAttribute`, the underling attribute must be cloneable.
|
||||
///
|
||||
/// For most this is possible, but for some like `NodeRef` it is not.
|
||||
///
|
||||
/// This allows for a panic to be thrown if a non-cloneable attribute is cloned, whilst still seeming like it can be cloned.
|
||||
pub struct PanicOnCloneAttr<T: Attribute + 'static> {
|
||||
msg: &'static str,
|
||||
attr: T,
|
||||
}
|
||||
|
||||
impl<T: Attribute + 'static> PanicOnCloneAttr<T> {
|
||||
pub(crate) fn new(attr: T, msg: &'static str) -> Self {
|
||||
Self { msg, attr }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Attribute + 'static> Clone for PanicOnCloneAttr<T> {
|
||||
fn clone(&self) -> Self {
|
||||
panic!("{}", self.msg)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Attribute + 'static> NextAttribute for PanicOnCloneAttr<T> {
|
||||
type Output<NewAttr: Attribute> = <T as NextAttribute>::Output<NewAttr>;
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
self.attr.add_any_attr(new_attr)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Attribute + 'static> Attribute for PanicOnCloneAttr<T> {
|
||||
const MIN_LENGTH: usize = T::MIN_LENGTH;
|
||||
|
||||
type State = T::State;
|
||||
type AsyncOutput = T::AsyncOutput;
|
||||
type Cloneable = Self;
|
||||
type CloneableOwned = Self;
|
||||
|
||||
fn html_len(&self) -> usize {
|
||||
self.attr.html_len()
|
||||
}
|
||||
|
||||
fn to_html(
|
||||
self,
|
||||
buf: &mut String,
|
||||
class: &mut String,
|
||||
style: &mut String,
|
||||
inner_html: &mut String,
|
||||
) {
|
||||
self.attr.to_html(buf, class, style, inner_html)
|
||||
}
|
||||
|
||||
fn hydrate<const FROM_SERVER: bool>(
|
||||
self,
|
||||
el: &crate::renderer::types::Element,
|
||||
) -> Self::State {
|
||||
self.attr.hydrate::<FROM_SERVER>(el)
|
||||
}
|
||||
|
||||
fn build(self, el: &crate::renderer::types::Element) -> Self::State {
|
||||
self.attr.build(el)
|
||||
}
|
||||
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
self.attr.rebuild(state)
|
||||
}
|
||||
|
||||
fn into_cloneable(self) -> Self::Cloneable {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_cloneable_owned(self) -> Self::CloneableOwned {
|
||||
self
|
||||
}
|
||||
|
||||
fn dry_resolve(&mut self) {
|
||||
self.attr.dry_resolve()
|
||||
}
|
||||
|
||||
async fn resolve(self) -> Self::AsyncOutput {
|
||||
self.attr.resolve().await
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,9 @@
|
||||
use super::attribute::{Attribute, NextAttribute};
|
||||
use super::attribute::{
|
||||
maybe_next_attr_erasure_macros::next_attr_output_type, Attribute,
|
||||
NextAttribute,
|
||||
};
|
||||
use crate::{
|
||||
html::attribute::maybe_next_attr_erasure_macros::next_attr_combine,
|
||||
renderer::Rndr,
|
||||
view::{Position, ToTemplate},
|
||||
};
|
||||
@@ -99,13 +103,13 @@ impl<C> NextAttribute for Class<C>
|
||||
where
|
||||
C: IntoClass,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = (Self, NewAttr);
|
||||
next_attr_output_type!(Self, NewAttr);
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(self, new_attr)
|
||||
next_attr_combine!(self, new_attr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
use super::attribute::{Attribute, NextAttribute};
|
||||
use super::attribute::{
|
||||
maybe_next_attr_erasure_macros::next_attr_output_type, Attribute,
|
||||
NextAttribute,
|
||||
};
|
||||
use crate::{
|
||||
html::attribute::maybe_next_attr_erasure_macros::next_attr_combine,
|
||||
prelude::AddAnyAttr,
|
||||
view::{Position, ToTemplate},
|
||||
};
|
||||
@@ -164,13 +168,13 @@ where
|
||||
P: Clone + 'static,
|
||||
T: 'static,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = (Self, NewAttr);
|
||||
next_attr_output_type!(Self, NewAttr);
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(self, new_attr)
|
||||
next_attr_combine!(self, new_attr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ where
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
pub struct Custom<E>(E);
|
||||
|
||||
impl<E> ElementType for Custom<E>
|
||||
impl<E: 'static> ElementType for Custom<E>
|
||||
where
|
||||
E: AsRef<str> + Send,
|
||||
{
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
use crate::{
|
||||
html::{
|
||||
attribute::{Attr, Attribute, AttributeValue},
|
||||
attribute::{Attr, Attribute, AttributeValue, NextAttribute},
|
||||
element::{ElementType, ElementWithChildren, HtmlElement},
|
||||
},
|
||||
view::Render,
|
||||
};
|
||||
use next_tuple::NextTuple;
|
||||
use std::fmt::Debug;
|
||||
|
||||
macro_rules! html_element_inner {
|
||||
@@ -48,13 +47,13 @@ macro_rules! html_element_inner {
|
||||
#[doc = concat!("The [`", stringify!($attr), "`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/", stringify!($tag), "#", stringify!($attr) ,") attribute on `<", stringify!($tag), ">`.")]
|
||||
pub fn $attr<V>(self, value: V) -> HtmlElement <
|
||||
$struct_name,
|
||||
<At as NextTuple>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>,
|
||||
<At as NextAttribute>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>,
|
||||
Ch
|
||||
>
|
||||
where
|
||||
V: AttributeValue,
|
||||
At: NextTuple,
|
||||
<At as NextTuple>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>: Attribute,
|
||||
At: NextAttribute,
|
||||
<At as NextAttribute>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>: Attribute,
|
||||
{
|
||||
let HtmlElement {
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
@@ -68,7 +67,7 @@ macro_rules! html_element_inner {
|
||||
defined_at,
|
||||
tag,
|
||||
children,
|
||||
attributes: attributes.next_tuple($crate::html::attribute::$attr(value)),
|
||||
attributes: attributes.add_any_attr($crate::html::attribute::$attr(value)),
|
||||
}
|
||||
}
|
||||
)*
|
||||
@@ -153,13 +152,13 @@ macro_rules! html_self_closing_elements {
|
||||
#[doc = concat!("The [`", stringify!($attr), "`](https://developer.mozilla.org/en-US/docs/Web/HTML/Element/", stringify!($tag), "#", stringify!($attr) ,") attribute on `<", stringify!($tag), ">`.")]
|
||||
pub fn $attr<V>(self, value: V) -> HtmlElement<
|
||||
[<$tag:camel>],
|
||||
<At as NextTuple>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>,
|
||||
<At as NextAttribute>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>,
|
||||
(),
|
||||
>
|
||||
where
|
||||
V: AttributeValue,
|
||||
At: NextTuple,
|
||||
<At as NextTuple>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>: Attribute,
|
||||
At: NextAttribute,
|
||||
<At as NextAttribute>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>: Attribute,
|
||||
{
|
||||
let HtmlElement {
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
@@ -173,7 +172,7 @@ macro_rules! html_self_closing_elements {
|
||||
defined_at,
|
||||
tag,
|
||||
children,
|
||||
attributes: attributes.next_tuple($crate::html::attribute::$attr(value)),
|
||||
attributes: attributes.add_any_attr($crate::html::attribute::$attr(value)),
|
||||
}
|
||||
}
|
||||
)*
|
||||
@@ -284,7 +283,7 @@ html_elements! {
|
||||
/// The `<em>` HTML element marks text that has stress emphasis. The `<em>` element can be nested, with each level of nesting indicating a greater degree of emphasis.
|
||||
em HtmlElement [] true,
|
||||
/// The `<fieldset>` HTML element is used to group several controls as well as labels (label) within a web form.
|
||||
fieldset HtmlFieldSetElement [] true,
|
||||
fieldset HtmlFieldSetElement [disabled, form, name] true,
|
||||
/// The `<figcaption>` HTML element represents a caption or legend describing the rest of the contents of its parent figure element.
|
||||
figcaption HtmlElement [] true,
|
||||
/// The `<figure>` HTML element represents self-contained content, potentially with an optional caption, which is specified using the figcaption element. The figure, its caption, and its contents are referenced as a single unit.
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
use super::{ElementWithChildren, HtmlElement};
|
||||
use crate::{
|
||||
html::attribute::{Attribute, NextAttribute},
|
||||
html::attribute::{
|
||||
maybe_next_attr_erasure_macros::{
|
||||
next_attr_combine, next_attr_output_type,
|
||||
},
|
||||
Attribute, NextAttribute,
|
||||
},
|
||||
renderer::Rndr,
|
||||
view::add_attr::AddAnyAttr,
|
||||
};
|
||||
@@ -106,13 +111,13 @@ impl<T> NextAttribute for InnerHtml<T>
|
||||
where
|
||||
T: InnerHtmlValue,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = (Self, NewAttr);
|
||||
next_attr_output_type!(Self, NewAttr);
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(self, new_attr)
|
||||
next_attr_combine!(self, new_attr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -141,7 +141,7 @@ where
|
||||
}
|
||||
|
||||
/// An HTML element.
|
||||
pub trait ElementType: Send {
|
||||
pub trait ElementType: Send + 'static {
|
||||
/// The underlying native widget type that this represents.
|
||||
type Output;
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
use crate::{
|
||||
html::attribute::Attribute,
|
||||
html::attribute::{
|
||||
maybe_next_attr_erasure_macros::next_attr_combine, Attribute,
|
||||
},
|
||||
renderer::{CastFrom, RemoveEventHandler, Rndr},
|
||||
view::{Position, ToTemplate},
|
||||
};
|
||||
@@ -165,6 +167,9 @@ where
|
||||
el: &crate::renderer::types::Element,
|
||||
cb: Box<dyn FnMut(crate::renderer::types::Event)>,
|
||||
name: Cow<'static, str>,
|
||||
// TODO investigate: does passing this as an option
|
||||
// (rather than, say, having a const DELEGATED: bool)
|
||||
// add to binary size?
|
||||
delegation_key: Option<Cow<'static, str>>,
|
||||
) -> RemoveEventHandler<crate::renderer::types::Element> {
|
||||
match delegation_key {
|
||||
@@ -199,6 +204,39 @@ where
|
||||
.then(|| self.event.event_delegation_key()),
|
||||
)
|
||||
}
|
||||
|
||||
/// Attaches the event listener to the element as a listener that is triggered during the capture phase,
|
||||
/// meaning it will fire before any event listeners further down in the DOM.
|
||||
pub fn attach_capture(
|
||||
self,
|
||||
el: &crate::renderer::types::Element,
|
||||
) -> RemoveEventHandler<crate::renderer::types::Element> {
|
||||
fn attach_inner(
|
||||
el: &crate::renderer::types::Element,
|
||||
cb: Box<dyn FnMut(crate::renderer::types::Event)>,
|
||||
name: Cow<'static, str>,
|
||||
) -> RemoveEventHandler<crate::renderer::types::Element> {
|
||||
Rndr::add_event_listener_use_capture(el, &name, cb)
|
||||
}
|
||||
|
||||
let mut cb = self.cb.expect("callback removed before attaching").take();
|
||||
|
||||
#[cfg(feature = "tracing")]
|
||||
let span = tracing::Span::current();
|
||||
|
||||
let cb = Box::new(move |ev: crate::renderer::types::Event| {
|
||||
#[cfg(all(debug_assertions, feature = "reactive_graph"))]
|
||||
let _rx_guard =
|
||||
reactive_graph::diagnostics::SpecialNonReactiveZone::enter();
|
||||
#[cfg(feature = "tracing")]
|
||||
let _tracing_guard = span.enter();
|
||||
|
||||
let ev = E::EventType::from(ev);
|
||||
cb.invoke(ev);
|
||||
}) as Box<dyn FnMut(crate::renderer::types::Event)>;
|
||||
|
||||
attach_inner(el, cb, self.event.name())
|
||||
}
|
||||
}
|
||||
|
||||
impl<E, F> Debug for On<E, F>
|
||||
@@ -248,13 +286,21 @@ where
|
||||
self,
|
||||
el: &crate::renderer::types::Element,
|
||||
) -> Self::State {
|
||||
let cleanup = self.attach(el);
|
||||
let cleanup = if E::CAPTURE {
|
||||
self.attach_capture(el)
|
||||
} else {
|
||||
self.attach(el)
|
||||
};
|
||||
(el.clone(), Some(cleanup))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn build(self, el: &crate::renderer::types::Element) -> Self::State {
|
||||
let cleanup = self.attach(el);
|
||||
let cleanup = if E::CAPTURE {
|
||||
self.attach_capture(el)
|
||||
} else {
|
||||
self.attach(el)
|
||||
};
|
||||
(el.clone(), Some(cleanup))
|
||||
}
|
||||
|
||||
@@ -264,7 +310,11 @@ where
|
||||
if let Some(prev) = prev_cleanup.take() {
|
||||
(prev.into_inner())(el);
|
||||
}
|
||||
*prev_cleanup = Some(self.attach(el));
|
||||
*prev_cleanup = Some(if E::CAPTURE {
|
||||
self.attach_capture(el)
|
||||
} else {
|
||||
self.attach(el)
|
||||
});
|
||||
}
|
||||
|
||||
fn into_cloneable(self) -> Self::Cloneable {
|
||||
@@ -302,13 +352,13 @@ where
|
||||
|
||||
E::EventType: From<crate::renderer::types::Event>,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = (Self, NewAttr);
|
||||
next_attr_output_type!(Self, NewAttr);
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(self, new_attr)
|
||||
next_attr_combine!(self, new_attr)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -332,10 +382,13 @@ pub trait EventDescriptor: Clone {
|
||||
/// Indicates if this event bubbles. For example, `click` bubbles,
|
||||
/// but `focus` does not.
|
||||
///
|
||||
/// If this is true, then the event will be delegated globally,
|
||||
/// otherwise, event listeners will be directly attached to the element.
|
||||
/// If this is true, then the event will be delegated globally if the `delegation`
|
||||
/// feature is enabled. Otherwise, event listeners will be directly attached to the element.
|
||||
const BUBBLES: bool;
|
||||
|
||||
/// Indicates if this event should be handled during the capture phase.
|
||||
const CAPTURE: bool = false;
|
||||
|
||||
/// The name of the event, such as `click` or `mouseover`.
|
||||
fn name(&self) -> Cow<'static, str>;
|
||||
|
||||
@@ -350,6 +403,32 @@ pub trait EventDescriptor: Clone {
|
||||
}
|
||||
}
|
||||
|
||||
/// A wrapper that tells the framework to handle an event during the capture phase.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Capture<E> {
|
||||
inner: E,
|
||||
}
|
||||
|
||||
/// Wraps an event to indicate that it should be handled during the capture phase.
|
||||
pub fn capture<E>(event: E) -> Capture<E> {
|
||||
Capture { inner: event }
|
||||
}
|
||||
|
||||
impl<E: EventDescriptor> EventDescriptor for Capture<E> {
|
||||
type EventType = E::EventType;
|
||||
|
||||
const CAPTURE: bool = true;
|
||||
const BUBBLES: bool = E::BUBBLES;
|
||||
|
||||
fn name(&self) -> Cow<'static, str> {
|
||||
self.inner.name()
|
||||
}
|
||||
|
||||
fn event_delegation_key(&self) -> Cow<'static, str> {
|
||||
self.inner.event_delegation_key()
|
||||
}
|
||||
}
|
||||
|
||||
/// A custom event.
|
||||
#[derive(Debug)]
|
||||
pub struct Custom<E: FromWasmAbi = web_sys::Event> {
|
||||
@@ -671,7 +750,12 @@ generate_event_types! {
|
||||
}
|
||||
|
||||
// Export `web_sys` event types
|
||||
use super::{attribute::NextAttribute, element::HasElementType};
|
||||
use super::{
|
||||
attribute::{
|
||||
maybe_next_attr_erasure_macros::next_attr_output_type, NextAttribute,
|
||||
},
|
||||
element::HasElementType,
|
||||
};
|
||||
#[doc(no_inline)]
|
||||
pub use web_sys::{
|
||||
AnimationEvent, BeforeUnloadEvent, CompositionEvent, CustomEvent,
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
use super::{
|
||||
attribute::{Attribute, NextAttribute},
|
||||
attribute::{
|
||||
maybe_next_attr_erasure_macros::next_attr_output_type,
|
||||
panic_on_clone_attribute::PanicOnCloneAttr, Attribute, NextAttribute,
|
||||
},
|
||||
element::ElementType,
|
||||
};
|
||||
use crate::{
|
||||
html::element::HtmlElement, prelude::Render, view::add_attr::AddAnyAttr,
|
||||
html::{
|
||||
attribute::maybe_next_attr_erasure_macros::next_attr_combine,
|
||||
element::HtmlElement,
|
||||
},
|
||||
prelude::Render,
|
||||
view::add_attr::AddAnyAttr,
|
||||
};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
/// Describes a container that can be used to hold a reference to an HTML element.
|
||||
pub trait NodeRefContainer<E>: Send + Clone
|
||||
pub trait NodeRefContainer<E>: Send + Clone + 'static
|
||||
where
|
||||
E: ElementType,
|
||||
{
|
||||
@@ -57,8 +65,8 @@ where
|
||||
const MIN_LENGTH: usize = 0;
|
||||
type AsyncOutput = Self;
|
||||
type State = crate::renderer::types::Element;
|
||||
type Cloneable = ();
|
||||
type CloneableOwned = ();
|
||||
type Cloneable = PanicOnCloneAttr<Self>;
|
||||
type CloneableOwned = PanicOnCloneAttr<Self>;
|
||||
|
||||
#[inline(always)]
|
||||
fn html_len(&self) -> usize {
|
||||
@@ -92,11 +100,17 @@ where
|
||||
}
|
||||
|
||||
fn into_cloneable(self) -> Self::Cloneable {
|
||||
panic!("node_ref should not be spread across multiple elements.");
|
||||
PanicOnCloneAttr::new(
|
||||
self,
|
||||
"node_ref should not be spread across multiple elements.",
|
||||
)
|
||||
}
|
||||
|
||||
fn into_cloneable_owned(self) -> Self::Cloneable {
|
||||
panic!("node_ref should not be spread across multiple elements.");
|
||||
PanicOnCloneAttr::new(
|
||||
self,
|
||||
"node_ref should not be spread across multiple elements.",
|
||||
)
|
||||
}
|
||||
|
||||
fn dry_resolve(&mut self) {}
|
||||
@@ -113,13 +127,13 @@ where
|
||||
|
||||
crate::renderer::types::Element: PartialEq,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = (Self, NewAttr);
|
||||
next_attr_output_type!(Self, NewAttr);
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(self, new_attr)
|
||||
next_attr_combine!(self, new_attr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
use super::attribute::{Attribute, NextAttribute};
|
||||
use super::attribute::{
|
||||
maybe_next_attr_erasure_macros::next_attr_output_type, Attribute,
|
||||
NextAttribute,
|
||||
};
|
||||
use crate::{
|
||||
html::attribute::maybe_next_attr_erasure_macros::next_attr_combine,
|
||||
renderer::Rndr,
|
||||
view::{Position, ToTemplate},
|
||||
};
|
||||
@@ -127,13 +131,13 @@ where
|
||||
K: AsRef<str> + Send,
|
||||
P: IntoProperty,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = (Self, NewAttr);
|
||||
next_attr_output_type!(Self, NewAttr);
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(self, new_attr)
|
||||
next_attr_combine!(self, new_attr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
use super::attribute::{Attribute, NextAttribute};
|
||||
use super::attribute::{
|
||||
maybe_next_attr_erasure_macros::next_attr_output_type, Attribute,
|
||||
NextAttribute,
|
||||
};
|
||||
#[cfg(feature = "nightly")]
|
||||
use crate::view::static_types::Static;
|
||||
use crate::{
|
||||
html::attribute::maybe_next_attr_erasure_macros::next_attr_combine,
|
||||
renderer::Rndr,
|
||||
view::{Position, ToTemplate},
|
||||
};
|
||||
@@ -102,13 +106,13 @@ impl<S> NextAttribute for Style<S>
|
||||
where
|
||||
S: IntoStyle,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = (Self, NewAttr);
|
||||
next_attr_output_type!(Self, NewAttr);
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(self, new_attr)
|
||||
next_attr_combine!(self, new_attr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ pub mod prelude {
|
||||
OnAttribute, OnTargetAttribute, PropAttribute,
|
||||
StyleAttribute,
|
||||
},
|
||||
IntoAttributeValue,
|
||||
IntoAttribute, IntoAttributeValue,
|
||||
},
|
||||
directive::DirectiveAttribute,
|
||||
element::{ElementChild, ElementExt, InnerHtmlAttribute},
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
use crate::{
|
||||
html::{
|
||||
attribute::{Attr, Attribute, AttributeValue},
|
||||
attribute::{Attr, Attribute, AttributeValue, NextAttribute},
|
||||
element::{ElementType, ElementWithChildren, HtmlElement},
|
||||
},
|
||||
view::Render,
|
||||
};
|
||||
use next_tuple::NextTuple;
|
||||
use std::fmt::Debug;
|
||||
|
||||
macro_rules! mathml_global {
|
||||
@@ -14,13 +13,13 @@ macro_rules! mathml_global {
|
||||
/// A MathML attribute.
|
||||
pub fn $attr<V>(self, value: V) -> HtmlElement <
|
||||
[<$tag:camel>],
|
||||
<At as NextTuple>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>,
|
||||
<At as NextAttribute>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>,
|
||||
Ch
|
||||
>
|
||||
where
|
||||
V: AttributeValue,
|
||||
At: NextTuple,
|
||||
<At as NextTuple>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>: Attribute,
|
||||
At: NextAttribute,
|
||||
<At as NextAttribute>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>: Attribute,
|
||||
{
|
||||
let HtmlElement {
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
@@ -34,7 +33,7 @@ macro_rules! mathml_global {
|
||||
defined_at,
|
||||
tag,
|
||||
children,
|
||||
attributes: attributes.next_tuple($crate::html::attribute::$attr(value)),
|
||||
attributes: attributes.add_any_attr($crate::html::attribute::$attr(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -84,13 +83,13 @@ macro_rules! mathml_elements {
|
||||
/// A MathML attribute.
|
||||
pub fn $attr<V>(self, value: V) -> HtmlElement <
|
||||
[<$tag:camel>],
|
||||
<At as NextTuple>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>,
|
||||
<At as NextAttribute>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>,
|
||||
Ch
|
||||
>
|
||||
where
|
||||
V: AttributeValue,
|
||||
At: NextTuple,
|
||||
<At as NextTuple>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>: Attribute,
|
||||
At: NextAttribute,
|
||||
<At as NextAttribute>::Output<Attr<$crate::html::attribute::[<$attr:camel>], V>>: Attribute,
|
||||
{
|
||||
let HtmlElement {
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
@@ -104,7 +103,7 @@ macro_rules! mathml_elements {
|
||||
defined_at,
|
||||
tag,
|
||||
children,
|
||||
attributes: attributes.next_tuple($crate::html::attribute::$attr(value)),
|
||||
attributes: attributes.add_any_attr($crate::html::attribute::$attr(value)),
|
||||
}
|
||||
}
|
||||
)*
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
use crate::{
|
||||
dom::{event_target_checked, event_target_value},
|
||||
html::{
|
||||
attribute::{Attribute, AttributeKey, AttributeValue, NextAttribute},
|
||||
attribute::{
|
||||
maybe_next_attr_erasure_macros::{
|
||||
next_attr_combine, next_attr_output_type,
|
||||
},
|
||||
Attribute, AttributeKey, AttributeValue, NextAttribute,
|
||||
},
|
||||
event::{change, input, on},
|
||||
property::{prop, IntoProperty},
|
||||
},
|
||||
@@ -276,13 +281,13 @@ where
|
||||
W: Update<Value = T> + Clone + Send + 'static,
|
||||
Element: ChangeEvent + GetValue<T>,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = (Self, NewAttr);
|
||||
next_attr_output_type!(Self, NewAttr);
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(self, new_attr)
|
||||
next_attr_combine!(self, new_attr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -58,7 +58,11 @@ where
|
||||
|
||||
#[track_caller]
|
||||
fn build(mut self) -> Self::State {
|
||||
let hook = throw_error::get_error_hook();
|
||||
RenderEffect::new(move |prev| {
|
||||
let _guard = hook
|
||||
.as_ref()
|
||||
.map(|h| throw_error::set_error_hook(Arc::clone(h)));
|
||||
let value = self.invoke();
|
||||
if let Some(mut state) = prev {
|
||||
value.rebuild(&mut state);
|
||||
@@ -175,7 +179,11 @@ where
|
||||
) -> Self::State {
|
||||
let cursor = cursor.clone();
|
||||
let position = position.clone();
|
||||
let hook = throw_error::get_error_hook();
|
||||
RenderEffect::new(move |prev| {
|
||||
let _guard = hook
|
||||
.as_ref()
|
||||
.map(|h| throw_error::set_error_hook(Arc::clone(h)));
|
||||
let value = self.invoke();
|
||||
if let Some(mut state) = prev {
|
||||
value.rebuild(&mut state);
|
||||
|
||||
@@ -13,7 +13,7 @@ use once_cell::unsync::Lazy;
|
||||
use rustc_hash::FxHashSet;
|
||||
use std::{any::TypeId, borrow::Cow, cell::RefCell};
|
||||
use wasm_bindgen::{intern, prelude::Closure, JsCast, JsValue};
|
||||
use web_sys::{Comment, HtmlTemplateElement};
|
||||
use web_sys::{AddEventListenerOptions, Comment, HtmlTemplateElement};
|
||||
|
||||
/// A [`Renderer`](crate::renderer::Renderer) that uses `web-sys` to manipulate DOM elements in the browser.
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
@@ -245,6 +245,44 @@ impl Dom {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn add_event_listener_use_capture(
|
||||
el: &Element,
|
||||
name: &str,
|
||||
cb: Box<dyn FnMut(Event)>,
|
||||
) -> RemoveEventHandler<Element> {
|
||||
let cb = wasm_bindgen::closure::Closure::wrap(cb);
|
||||
let name = intern(name);
|
||||
let options = AddEventListenerOptions::new();
|
||||
options.set_capture(true);
|
||||
or_debug!(
|
||||
el.add_event_listener_with_callback_and_add_event_listener_options(
|
||||
name,
|
||||
cb.as_ref().unchecked_ref(),
|
||||
&options
|
||||
),
|
||||
el,
|
||||
"addEventListenerUseCapture"
|
||||
);
|
||||
|
||||
// return the remover
|
||||
RemoveEventHandler::new({
|
||||
let name = name.to_owned();
|
||||
// safe to construct this here, because it will only run in the browser
|
||||
// so it will always be accessed or dropped from the main thread
|
||||
let cb = send_wrapper::SendWrapper::new(cb);
|
||||
move |el: &Element| {
|
||||
or_debug!(
|
||||
el.remove_event_listener_with_callback(
|
||||
intern(&name),
|
||||
cb.as_ref().unchecked_ref()
|
||||
),
|
||||
el,
|
||||
"removeEventListener"
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn event_target<T>(ev: &Event) -> T
|
||||
where
|
||||
T: CastFrom<Element>,
|
||||
|
||||
@@ -40,13 +40,13 @@ macro_rules! svg_elements {
|
||||
$(
|
||||
pub fn $attr<V>(self, value: V) -> HtmlElement <
|
||||
[<$tag:camel>],
|
||||
<At as NextTuple<Attr<$crate::html::attribute::[<$attr:camel>], V>>>::Output,
|
||||
<At as $crate::html::attribute::NextAttribute<Attr<$crate::html::attribute::[<$attr:camel>], V>>>::Output,
|
||||
Ch
|
||||
>
|
||||
where
|
||||
V: AttributeValue,
|
||||
At: NextTuple<Attr<$crate::html::attribute::[<$attr:camel>], V>>,
|
||||
<At as NextTuple<Attr<$crate::html::attribute::[<$attr:camel>], V>>>::Output: Attribute,
|
||||
At: $crate::html::attribute::NextAttribute<Attr<$crate::html::attribute::[<$attr:camel>], V>>,
|
||||
<At as $crate::html::attribute::NextAttribute<Attr<$crate::html::attribute::[<$attr:camel>], V>>>::Output: Attribute,
|
||||
{
|
||||
let HtmlElement { tag, children, attributes,
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
@@ -56,7 +56,7 @@ macro_rules! svg_elements {
|
||||
tag,
|
||||
|
||||
children,
|
||||
attributes: attributes.next_tuple($crate::html::attribute::$attr(value)),
|
||||
attributes: attributes.add_any_attr($crate::html::attribute::$attr(value)),
|
||||
#[cfg(any(debug_assertions, leptos_debuginfo))]
|
||||
defined_at
|
||||
}
|
||||
|
||||
@@ -29,6 +29,12 @@ pub struct AnyView {
|
||||
value: Box<dyn Any + Send>,
|
||||
build: fn(Box<dyn Any>) -> AnyViewState,
|
||||
rebuild: fn(TypeId, Box<dyn Any>, &mut AnyViewState),
|
||||
// Without erasure, tuples of attrs created by default cause too much type explosion to enable.
|
||||
#[cfg(erase_components)]
|
||||
add_any_attr: fn(
|
||||
Box<dyn Any>,
|
||||
crate::html::attribute::any_attribute::AnyAttribute,
|
||||
) -> AnyView,
|
||||
// The fields below are cfg-gated so they will not be included in WASM bundles if not needed.
|
||||
// Ordinarily, the compiler can simply omit this dead code because the methods are not called.
|
||||
// With this type-erased wrapper, however, the compiler is not *always* able to correctly
|
||||
@@ -128,9 +134,6 @@ where
|
||||
T: RenderHtml + 'static,
|
||||
T::State: 'static,
|
||||
{
|
||||
// inlining allows the compiler to remove the unused functions
|
||||
// i.e., doesn't ship HTML-generating code that isn't used
|
||||
#[inline(always)]
|
||||
fn into_any(self) -> AnyView {
|
||||
#[cfg(feature = "ssr")]
|
||||
let html_len = self.html_len();
|
||||
@@ -282,11 +285,23 @@ where
|
||||
}
|
||||
};
|
||||
|
||||
// Without erasure, tuples of attrs created by default cause too much type explosion to enable.
|
||||
#[cfg(erase_components)]
|
||||
let add_any_attr = |value: Box<dyn Any>, attr: crate::html::attribute::any_attribute::AnyAttribute| {
|
||||
let value = value
|
||||
.downcast::<T>()
|
||||
.expect("AnyView::add_any_attr could not be downcast");
|
||||
value.add_any_attr(attr).into_any()
|
||||
};
|
||||
|
||||
AnyView {
|
||||
type_id: TypeId::of::<T>(),
|
||||
value,
|
||||
build,
|
||||
rebuild,
|
||||
// Without erasure, tuples of attrs created by default cause too much type explosion to enable.
|
||||
#[cfg(erase_components)]
|
||||
add_any_attr,
|
||||
#[cfg(feature = "ssr")]
|
||||
resolve,
|
||||
#[cfg(feature = "ssr")]
|
||||
@@ -322,14 +337,26 @@ impl Render for AnyView {
|
||||
impl AddAnyAttr for AnyView {
|
||||
type Output<SomeNewAttr: Attribute> = Self;
|
||||
|
||||
#[allow(unused_variables)]
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
_attr: NewAttr,
|
||||
attr: NewAttr,
|
||||
) -> Self::Output<NewAttr>
|
||||
where
|
||||
Self::Output<NewAttr>: RenderHtml,
|
||||
{
|
||||
self
|
||||
// Without erasure, tuples of attrs created by default cause too much type explosion to enable.
|
||||
#[cfg(erase_components)]
|
||||
{
|
||||
use crate::html::attribute::any_attribute::IntoAnyAttribute;
|
||||
|
||||
let attr = attr.into_cloneable_owned();
|
||||
(self.add_any_attr)(self.value, attr.into_any_attr())
|
||||
}
|
||||
#[cfg(not(erase_components))]
|
||||
{
|
||||
self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,9 @@ use super::{
|
||||
Render, RenderHtml,
|
||||
};
|
||||
use crate::{
|
||||
html::attribute::Attribute, hydration::Cursor, ssr::StreamBuilder,
|
||||
html::attribute::{Attribute, NextAttribute},
|
||||
hydration::Cursor,
|
||||
ssr::StreamBuilder,
|
||||
};
|
||||
use either_of::*;
|
||||
use futures::future::join;
|
||||
@@ -114,6 +116,150 @@ const fn max_usize(vals: &[usize]) -> usize {
|
||||
max
|
||||
}
|
||||
|
||||
#[cfg(not(erase_components))]
|
||||
impl<A, B> NextAttribute for Either<A, B>
|
||||
where
|
||||
B: NextAttribute,
|
||||
A: NextAttribute,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = Either<
|
||||
<A as NextAttribute>::Output<NewAttr>,
|
||||
<B as NextAttribute>::Output<NewAttr>,
|
||||
>;
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
match self {
|
||||
Either::Left(left) => Either::Left(left.add_any_attr(new_attr)),
|
||||
Either::Right(right) => Either::Right(right.add_any_attr(new_attr)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(erase_components)]
|
||||
use crate::html::attribute::any_attribute::{AnyAttribute, IntoAnyAttribute};
|
||||
|
||||
#[cfg(erase_components)]
|
||||
impl<A, B> NextAttribute for Either<A, B>
|
||||
where
|
||||
B: IntoAnyAttribute,
|
||||
A: IntoAnyAttribute,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = Vec<AnyAttribute>;
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
vec![
|
||||
match self {
|
||||
Either::Left(left) => left.into_any_attr(),
|
||||
Either::Right(right) => right.into_any_attr(),
|
||||
},
|
||||
new_attr.into_any_attr(),
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, B> Attribute for Either<A, B>
|
||||
where
|
||||
B: Attribute,
|
||||
A: Attribute,
|
||||
{
|
||||
const MIN_LENGTH: usize = max_usize(&[A::MIN_LENGTH, B::MIN_LENGTH]);
|
||||
|
||||
type AsyncOutput = Either<A::AsyncOutput, B::AsyncOutput>;
|
||||
type State = Either<A::State, B::State>;
|
||||
type Cloneable = Either<A::Cloneable, B::Cloneable>;
|
||||
type CloneableOwned = Either<A::CloneableOwned, B::CloneableOwned>;
|
||||
|
||||
fn html_len(&self) -> usize {
|
||||
match self {
|
||||
Either::Left(left) => left.html_len(),
|
||||
Either::Right(right) => right.html_len(),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_html(
|
||||
self,
|
||||
buf: &mut String,
|
||||
class: &mut String,
|
||||
style: &mut String,
|
||||
inner_html: &mut String,
|
||||
) {
|
||||
match self {
|
||||
Either::Left(left) => left.to_html(buf, class, style, inner_html),
|
||||
Either::Right(right) => {
|
||||
right.to_html(buf, class, style, inner_html)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn hydrate<const FROM_SERVER: bool>(
|
||||
self,
|
||||
el: &crate::renderer::types::Element,
|
||||
) -> Self::State {
|
||||
match self {
|
||||
Either::Left(left) => Either::Left(left.hydrate::<FROM_SERVER>(el)),
|
||||
Either::Right(right) => {
|
||||
Either::Right(right.hydrate::<FROM_SERVER>(el))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build(self, el: &crate::renderer::types::Element) -> Self::State {
|
||||
match self {
|
||||
Either::Left(left) => Either::Left(left.build(el)),
|
||||
Either::Right(right) => Either::Right(right.build(el)),
|
||||
}
|
||||
}
|
||||
|
||||
fn rebuild(self, state: &mut Self::State) {
|
||||
match self {
|
||||
Either::Left(left) => {
|
||||
if let Some(state) = state.as_left_mut() {
|
||||
left.rebuild(state)
|
||||
}
|
||||
}
|
||||
Either::Right(right) => {
|
||||
if let Some(state) = state.as_right_mut() {
|
||||
right.rebuild(state)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn into_cloneable(self) -> Self::Cloneable {
|
||||
match self {
|
||||
Either::Left(left) => Either::Left(left.into_cloneable()),
|
||||
Either::Right(right) => Either::Right(right.into_cloneable()),
|
||||
}
|
||||
}
|
||||
|
||||
fn into_cloneable_owned(self) -> Self::CloneableOwned {
|
||||
match self {
|
||||
Either::Left(left) => Either::Left(left.into_cloneable_owned()),
|
||||
Either::Right(right) => Either::Right(right.into_cloneable_owned()),
|
||||
}
|
||||
}
|
||||
|
||||
fn dry_resolve(&mut self) {
|
||||
match self {
|
||||
Either::Left(left) => left.dry_resolve(),
|
||||
Either::Right(right) => right.dry_resolve(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn resolve(self) -> Self::AsyncOutput {
|
||||
match self {
|
||||
Either::Left(left) => Either::Left(left.resolve().await),
|
||||
Either::Right(right) => Either::Right(right.resolve().await),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, B> RenderHtml for Either<A, B>
|
||||
where
|
||||
A: RenderHtml,
|
||||
|
||||
@@ -3,7 +3,12 @@ use super::{
|
||||
RenderHtml, ToTemplate,
|
||||
};
|
||||
use crate::{
|
||||
html::attribute::{Attribute, AttributeKey, AttributeValue, NextAttribute},
|
||||
html::attribute::{
|
||||
maybe_next_attr_erasure_macros::{
|
||||
next_attr_combine, next_attr_output_type,
|
||||
},
|
||||
Attribute, AttributeKey, AttributeValue, NextAttribute,
|
||||
},
|
||||
hydration::Cursor,
|
||||
renderer::{CastFrom, Rndr},
|
||||
};
|
||||
@@ -111,13 +116,13 @@ impl<K, const V: &'static str> NextAttribute for StaticAttr<K, V>
|
||||
where
|
||||
K: AttributeKey,
|
||||
{
|
||||
type Output<NewAttr: Attribute> = (Self, NewAttr);
|
||||
next_attr_output_type!(Self, NewAttr);
|
||||
|
||||
fn add_any_attr<NewAttr: Attribute>(
|
||||
self,
|
||||
new_attr: NewAttr,
|
||||
) -> Self::Output<NewAttr> {
|
||||
(StaticAttr::<K, V> { ty: PhantomData }, new_attr)
|
||||
next_attr_combine!(StaticAttr::<K, V> { ty: PhantomData }, new_attr)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user