Compare commits

...

47 Commits

Author SHA1 Message Date
Greg Johnston
9ff58d5afd docs: add note about file hashing in Stylesheet docs 2025-04-28 21:00:48 -04:00
Greg Johnston
e2ea4277bc fix(examples): broken favicons in hackernews examples (closes #3890) (#3891) 2025-04-28 09:52:15 -04:00
Greg Johnston
171c8e7ff7 v0.8.0-rc3 2025-04-26 15:53:20 -04:00
Greg Johnston
53ffbeeb67 chore: fmt 2025-04-26 15:49:32 -04:00
Greg Johnston
ee86844077 Merge pull request #3882 from leptos-rs/3871
fix: remove event listeners from Suspense fallback during SSR (closes #3871)
2025-04-26 12:48:45 -04:00
Greg Johnston
1cee3f2f52 Merge branch 'main' into 3871 2025-04-26 12:48:40 -04:00
Greg Johnston
23c89dbfe1 Merge pull request #3878 from leptos-rs/3828
fix: correctly calculate starting index for first new key (closes #3828)
2025-04-26 12:48:12 -04:00
Greg Johnston
9f71f39f89 Merge branch 'main' into 3828 2025-04-26 12:47:56 -04:00
Greg Johnston
ef1d0f108a Merge pull request #3880 from sabify/websocket-example
fix(examples): websocket example tests fail on latency
2025-04-26 12:47:17 -04:00
Greg Johnston
a7a78317b7 Merge pull request #3879 from huuff/prelude-textprop
chore: put `TextProp` in the prelude (closes #3877)
2025-04-26 12:46:53 -04:00
autofix-ci[bot]
5005cc3587 [autofix.ci] apply automated fixes (attempt 2/3) 2025-04-26 16:15:06 +00:00
autofix-ci[bot]
08708f3388 [autofix.ci] apply automated fixes 2025-04-26 16:03:45 +00:00
Greg Johnston
c19c1b32f1 fix: remove event listeners from Suspense fallback during SSR (closes #3871) 2025-04-26 11:48:59 -04:00
Saber Haj Rabiee
e70cc08e96 fix(examples): websocket example tests fail on latency 2025-04-26 07:06:50 -07:00
autofix-ci[bot]
97175663ef [autofix.ci] apply automated fixes 2025-04-26 00:05:33 +00:00
Haf
92524a93cd chore: put TextProp in the prelude (closes #3877) 2025-04-25 23:50:08 +02:00
autofix-ci[bot]
9449f41ca9 [autofix.ci] apply automated fixes (attempt 2/3) 2025-04-25 20:31:56 +00:00
autofix-ci[bot]
d979055b70 [autofix.ci] apply automated fixes 2025-04-25 20:18:03 +00:00
Greg Johnston
97686f71a5 fix: improve support for keyed fields in ArcField<_> 2025-04-25 16:04:01 -04:00
Greg Johnston
06a0c768dc fix: correctly calculate starting index for first new key (closes #3828) 2025-04-25 15:57:30 -04:00
Saber Haj Rabiee
fff6a508fc feat(examples): add WebSocket example (#3853) 2025-04-24 20:17:08 -04:00
Greg Johnston
e65fc23fc7 fix: prevent infinite loop when sending Result over websocket, remove Display bound (#3848)
* chore: easing `Display` bound on `FromServerFnError`, #3811 follow-up

* fix: send/receive websocket data

* fix: clippy warnings

* fix: server_fn_axum example

* fix: make de/serialize_result functions public

* fix: make websocket result ser/de private

* chore: make the doc a comment and remove allow dead_code
2025-04-23 07:52:42 -04:00
Saber Haj Rabiee
f83b14d76c feat: enhancing ByteStream error handling (#3869)
* feat: enhancing `ByteStream` error handling

* fix: easing unnecessary trait bound over some `TextStream` methods
2025-04-23 07:38:39 -04:00
zakstucke
62dac6fb8a fix: prevent ScopedFuture stopping owner cleanup (#3863) 2025-04-23 07:37:12 -04:00
Rakshith Ravi
b36dec8269 feat: add header generation method to BrowserResponse (#3873) 2025-04-23 07:33:18 -04:00
Fea
0c50852251 fix: Use stabilized ClipboardEvent (#3849) 2025-04-23 07:27:24 -04:00
Nikolai Morin
50cb6005a8 chore(examples): complete the migration to Tailwind 4 (#3861)
The tailwind_csr example was not working yet with version 4, and the
tailwind_actix example still had the no-longer-needed config file.

This also brings the three tailwind examples back in sync, mostly. I
don't know why the axum example has a packages.json and
packages-lock.json file, to be honest.
2025-04-23 07:26:56 -04:00
Sathish
b725291ce9 chore: forward lint attributes used with #[component] macro (#3864)
* Forward lint attributes used with #[component] macro

* [autofix.ci] apply automated fixes

---------

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-04-23 07:26:26 -04:00
Greg Johnston
ed6d45d92d Merge pull request #3870 from leptos-rs/3851
Error boundary fixes
2025-04-23 07:25:41 -04:00
Greg Johnston
73b5587738 fix: correctly insert out-of-order stream chunks when Ok 2025-04-21 13:12:30 -04:00
Greg Johnston
68813a5918 fix: clear old errors when going from error state to error state (closes #3850) 2025-04-21 12:10:47 -04:00
Greg Johnston
8f6a96341e fix: wait for any inner Suspense before rendering ErrorBoundary (closes #3851) 2025-04-21 09:29:33 -04:00
Greg Johnston
046d5286c3 fix: correctly flush synchronous parts of SSR stream when appending another StreamBuilder 2025-04-21 09:28:14 -04:00
Greg Johnston
b45f982feb fix: close Actix websocket stream when browser disconnects (closes #3865) (#3866) 2025-04-19 16:47:09 -04:00
Greg Johnston
2b50ddc0db v0.8.0-rc2 2025-04-18 08:43:16 -04:00
Saber Haj Rabiee
c743f0641c chore: make the doc a comment and remove allow dead_code 2025-04-17 06:45:40 -07:00
Saber Haj Rabiee
078c252e2e fix: make websocket result ser/de private 2025-04-17 03:38:04 -07:00
martin frances
410aedbba8 chore: ran cargo outdated. (#3722)
* chore: ran cargo outdated fixed issues.

In bumping the rand crate, calls to thread_rng() becomes rng().

* backed out changes to rand.
2025-04-16 16:42:01 -07:00
Saber Haj Rabiee
00e474599f fix: reactive_graph keymap impl and clippy warnings (#3843) 2025-04-16 16:39:07 -07:00
Greg Johnston
8f38559aa2 chore(nightly): update proc-macro span file name method name (#3852) 2025-04-16 16:35:59 -07:00
Greg Johnston
3934c8b162 Update issue templates 2025-04-16 09:15:10 -04:00
Saber Haj Rabiee
de3a558203 fix: make de/serialize_result functions public 2025-04-15 09:24:43 -07:00
Saber Haj Rabiee
4d20105760 fix: server_fn_axum example 2025-04-15 09:23:32 -07:00
Saber Haj Rabiee
b95e827b8b fix: clippy warnings 2025-04-15 08:31:42 -07:00
Saber Haj Rabiee
30c445a419 fix: send/receive websocket data 2025-04-15 07:43:47 -07:00
Saber Haj Rabiee
6d5ab73594 chore: easing Display bound on FromServerFnError, #3811 follow-up 2025-04-14 23:52:02 -07:00
Greg Johnston
e0bf5ec480 fix: don't try to move keyed elements within the DOM if they're not yet mounted (closes #3844) (#3846) 2025-04-14 20:06:31 -04:00
86 changed files with 1284 additions and 415 deletions

View File

@@ -29,11 +29,14 @@ Steps to reproduce the behavior:
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Next Steps**
[ ] I will make a PR
[ ] I would like to make a PR, but need help getting started
[ ] I want someone else to take the time to fix this
[ ] This is a low priority for me and is just shared for your information
**Additional context**
Add any other context about the problem here.

96
Cargo.lock generated
View File

@@ -263,7 +263,7 @@ dependencies = [
[[package]]
name = "any_spawner"
version = "0.3.0-rc1"
version = "0.3.0-rc3"
dependencies = [
"async-executor",
"futures",
@@ -687,9 +687,9 @@ dependencies = [
[[package]]
name = "const-str"
version = "0.5.7"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3618cccc083bb987a415d85c02ca6c9994ea5b44731ec28b9ecf09658655fba9"
checksum = "9e991226a70654b49d34de5ed064885f0bef0348a8e70018b8ff1ac80aa984a2"
[[package]]
name = "const_format"
@@ -732,9 +732,9 @@ dependencies = [
[[package]]
name = "convert_case"
version = "0.7.1"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb402b8d4c85569410425650ce3eddc7d698ed96d39a73f941b08fb63082f1e7"
checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f"
dependencies = [
"unicode-segmentation",
]
@@ -1781,7 +1781,7 @@ checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388"
[[package]]
name = "leptos"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"any_spawner",
"base64",
@@ -1813,8 +1813,8 @@ dependencies = [
"thiserror 2.0.12",
"throw_error",
"tracing",
"typed-builder",
"typed-builder-macro",
"typed-builder 0.21.0",
"typed-builder-macro 0.21.0",
"wasm-bindgen",
"web-sys",
]
@@ -1833,7 +1833,7 @@ dependencies = [
[[package]]
name = "leptos_actix"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"actix-files",
"actix-http",
@@ -1859,7 +1859,7 @@ dependencies = [
[[package]]
name = "leptos_axum"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"any_spawner",
"axum",
@@ -1883,7 +1883,7 @@ dependencies = [
[[package]]
name = "leptos_config"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"config",
"regex",
@@ -1892,12 +1892,12 @@ dependencies = [
"tempfile",
"thiserror 2.0.12",
"tokio",
"typed-builder",
"typed-builder 0.21.0",
]
[[package]]
name = "leptos_dom"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"js-sys",
"leptos",
@@ -1914,7 +1914,7 @@ dependencies = [
[[package]]
name = "leptos_hot_reload"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"anyhow",
"camino",
@@ -1930,7 +1930,7 @@ dependencies = [
[[package]]
name = "leptos_integration_utils"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"futures",
"hydration_context",
@@ -1943,11 +1943,11 @@ dependencies = [
[[package]]
name = "leptos_macro"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"attribute-derive",
"cfg-if",
"convert_case 0.7.1",
"convert_case 0.8.0",
"html-escape",
"insta",
"itertools",
@@ -1963,17 +1963,17 @@ dependencies = [
"rustc_version",
"serde",
"server_fn",
"server_fn_macro 0.8.0-rc1",
"server_fn_macro 0.8.0-rc3",
"syn 2.0.100",
"tracing",
"trybuild",
"typed-builder",
"typed-builder 0.20.1",
"uuid",
]
[[package]]
name = "leptos_meta"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"futures",
"indexmap",
@@ -1988,7 +1988,7 @@ dependencies = [
[[package]]
name = "leptos_router"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"any_spawner",
"either_of",
@@ -2013,7 +2013,7 @@ dependencies = [
[[package]]
name = "leptos_router_macro"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"leptos_macro",
"leptos_router",
@@ -2025,7 +2025,7 @@ dependencies = [
[[package]]
name = "leptos_server"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"any_spawner",
"base64",
@@ -2759,7 +2759,7 @@ dependencies = [
[[package]]
name = "reactive_graph"
version = "0.2.0-rc1"
version = "0.2.0-rc3"
dependencies = [
"any_spawner",
"async-lock",
@@ -2782,9 +2782,10 @@ dependencies = [
[[package]]
name = "reactive_stores"
version = "0.2.0-rc1"
version = "0.2.0-rc3"
dependencies = [
"any_spawner",
"dashmap",
"guardian",
"itertools",
"leptos",
@@ -2793,15 +2794,16 @@ dependencies = [
"reactive_graph",
"reactive_stores_macro",
"rustc-hash 2.1.1",
"send_wrapper",
"tokio",
"tokio-test",
]
[[package]]
name = "reactive_stores_macro"
version = "0.2.0-rc1"
version = "0.2.0-rc3"
dependencies = [
"convert_case 0.7.1",
"convert_case 0.8.0",
"proc-macro-error2",
"proc-macro2",
"quote",
@@ -3239,13 +3241,13 @@ dependencies = [
[[package]]
name = "serde_qs"
version = "0.13.0"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd34f36fe4c5ba9654417139a9b3a20d2e1de6012ee678ad14d240c22c78d8d6"
checksum = "8b417bedc008acbdf6d6b4bc482d29859924114bbe2650b7921fb68a261d0aa6"
dependencies = [
"percent-encoding",
"serde",
"thiserror 1.0.69",
"thiserror 2.0.12",
]
[[package]]
@@ -3296,7 +3298,7 @@ dependencies = [
[[package]]
name = "server_fn"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"actix-web",
"actix-ws",
@@ -3359,10 +3361,10 @@ dependencies = [
[[package]]
name = "server_fn_macro"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"const_format",
"convert_case 0.6.0",
"convert_case 0.8.0",
"proc-macro2",
"quote",
"rustc_version",
@@ -3372,9 +3374,9 @@ dependencies = [
[[package]]
name = "server_fn_macro_default"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
dependencies = [
"server_fn_macro 0.8.0-rc1",
"server_fn_macro 0.8.0-rc3",
"syn 2.0.100",
]
@@ -3568,7 +3570,7 @@ dependencies = [
[[package]]
name = "tachys"
version = "0.2.0-rc1"
version = "0.2.0-rc3"
dependencies = [
"any_spawner",
"async-trait",
@@ -4015,7 +4017,16 @@ version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd9d30e3a08026c78f246b173243cf07b3696d274debd26680773b6773c2afc7"
dependencies = [
"typed-builder-macro",
"typed-builder-macro 0.20.1",
]
[[package]]
name = "typed-builder"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce63bcaf7e9806c206f7d7b9c1f38e0dce8bb165a80af0898161058b19248534"
dependencies = [
"typed-builder-macro 0.21.0",
]
[[package]]
@@ -4029,6 +4040,17 @@ dependencies = [
"syn 2.0.100",
]
[[package]]
name = "typed-builder-macro"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60d8d828da2a3d759d3519cdf29a5bac49c77d039ad36d0782edadbf9cd5415b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.100",
]
[[package]]
name = "typenum"
version = "1.18.0"

View File

@@ -40,41 +40,44 @@ members = [
exclude = ["benchmarks", "examples", "projects"]
[workspace.package]
version = "0.8.0-rc1"
version = "0.8.0-rc3"
edition = "2021"
rust-version = "1.76"
[workspace.dependencies]
convert_case = "0.8"
throw_error = { path = "./any_error/", version = "0.3.0" }
any_spawner = { path = "./any_spawner/", version = "0.3.0-rc1" }
any_spawner = { path = "./any_spawner/", version = "0.3.0-rc3" }
const_str_slice_concat = { path = "./const_str_slice_concat", version = "0.1" }
either_of = { path = "./either_of/", version = "0.1.5" }
hydration_context = { path = "./hydration_context", version = "0.3.0" }
itertools = "0.14.0"
leptos = { path = "./leptos", version = "0.8.0-rc1" }
leptos_config = { path = "./leptos_config", version = "0.8.0-rc1" }
leptos_dom = { path = "./leptos_dom", version = "0.8.0-rc1" }
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.8.0-rc1" }
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.0-rc1" }
leptos_macro = { path = "./leptos_macro", version = "0.8.0-rc1" }
leptos_router = { path = "./router", version = "0.8.0-rc1" }
leptos_router_macro = { path = "./router_macro", version = "0.8.0-rc1" }
leptos_server = { path = "./leptos_server", version = "0.8.0-rc1" }
leptos_meta = { path = "./meta", version = "0.8.0-rc1" }
leptos = { path = "./leptos", version = "0.8.0-rc3" }
leptos_config = { path = "./leptos_config", version = "0.8.0-rc3" }
leptos_dom = { path = "./leptos_dom", version = "0.8.0-rc3" }
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.8.0-rc3" }
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.0-rc3" }
leptos_macro = { path = "./leptos_macro", version = "0.8.0-rc3" }
leptos_router = { path = "./router", version = "0.8.0-rc3" }
leptos_router_macro = { path = "./router_macro", version = "0.8.0-rc3" }
leptos_server = { path = "./leptos_server", version = "0.8.0-rc3" }
leptos_meta = { path = "./meta", version = "0.8.0-rc3" }
next_tuple = { path = "./next_tuple", version = "0.1.0" }
oco_ref = { path = "./oco", version = "0.2.0" }
or_poisoned = { path = "./or_poisoned", version = "0.1.0" }
reactive_graph = { path = "./reactive_graph", version = "0.2.0-rc1" }
reactive_stores = { path = "./reactive_stores", version = "0.2.0-rc1" }
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.0-rc1" }
reactive_graph = { path = "./reactive_graph", version = "0.2.0-rc3" }
reactive_stores = { path = "./reactive_stores", version = "0.2.0-rc3" }
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.0-rc3" }
rustversion = "1"
serde_json = "1.0.0"
server_fn = { path = "./server_fn", version = "0.8.0-rc1" }
server_fn_macro = { path = "./server_fn_macro", version = "0.8.0-rc1" }
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.8.0-rc1" }
tachys = { path = "./tachys", version = "0.2.0-rc1" }
server_fn = { path = "./server_fn", version = "0.8.0-rc3" }
server_fn_macro = { path = "./server_fn_macro", version = "0.8.0-rc3" }
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.8.0-rc3" }
tachys = { path = "./tachys", version = "0.2.0-rc3" }
trybuild = "1"
wasm-bindgen = { version = "0.2.100" }
typed-builder = "0.21.0"
thiserror = "2.0.12"
wasm-bindgen = "0.2.100"
[profile.release]
codegen-units = 1

View File

@@ -1,6 +1,6 @@
[package]
name = "any_spawner"
version = "0.3.0-rc1"
version = "0.3.0-rc3"
authors = ["Greg Johnston"]
license = "MIT"
readme = "../README.md"
@@ -12,7 +12,7 @@ edition.workspace = true
async-executor = { version = "1.13.1", optional = true }
futures = "0.3.31"
glib = { version = "0.20.6", optional = true }
thiserror = "2.0"
thiserror = { workspace = true }
tokio = { version = "1.41", optional = true, default-features = false, features = [
"rt",
] }

View File

@@ -472,9 +472,8 @@ fn handle_uninitialized_spawn(_fut: PinnedFuture<()>) {
#[cfg(all(debug_assertions, not(feature = "tracing")))]
{
panic!(
"At {}, tried to spawn a Future with Executor::spawn() before a \
global executor was initialized.",
caller
"At {caller}, tried to spawn a Future with Executor::spawn() \
before a global executor was initialized."
);
}
// In release builds (without tracing), call the specific no-op function.
@@ -503,9 +502,8 @@ fn handle_uninitialized_spawn_local(_fut: PinnedLocalFuture<()>) {
#[cfg(all(debug_assertions, not(feature = "tracing")))]
{
panic!(
"At {}, tried to spawn a Future with Executor::spawn_local() \
before a global executor was initialized.",
caller
"At {caller}, tried to spawn a Future with \
Executor::spawn_local() before a global executor was initialized."
);
}
// In release builds (without tracing), call the specific no-op function (which usually panics).

View File

@@ -1,7 +1,7 @@
#[cfg(feature = "ssr")]
#[tokio::main]
async fn main() {
use axum::Router;
use axum::{routing::get, Router};
use hackernews_axum::{shell, App};
use leptos::config::get_configuration;
use leptos_axum::{generate_route_list, LeptosRoutes};
@@ -13,6 +13,15 @@ async fn main() {
// build our application with a route
let app = Router::new()
.route(
"/favicon.ico",
get(|| async {
(
[("content-type", "image/x-icon")],
include_bytes!("../public/favicon.ico"),
)
}),
)
.leptos_routes(&leptos_options, routes, {
let leptos_options = leptos_options.clone();
move || shell(leptos_options.clone())

View File

@@ -1,6 +1,7 @@
#[cfg(feature = "ssr")]
#[tokio::main]
async fn main() {
use axum::routing::get;
pub use axum::Router;
use hackernews_islands::*;
pub use leptos::config::get_configuration;
@@ -25,6 +26,7 @@ async fn main() {
// build our application with a route
let app = Router::new()
.route("/favicon.ico", get(fallback::file_and_error_handler))
.leptos_routes(&leptos_options, routes, {
let leptos_options = leptos_options.clone();
move || shell(leptos_options.clone())

View File

@@ -945,9 +945,7 @@ pub fn CustomClientExample() -> impl IntoView {
Item = Result<server_fn::Bytes, server_fn::Bytes>,
> + Send
+ 'static,
impl Sink<Result<server_fn::Bytes, server_fn::Bytes>>
+ Send
+ 'static,
impl Sink<server_fn::Bytes> + Send + 'static,
),
E,
>,

View File

@@ -57,7 +57,7 @@ site-pkg-dir = "pkg"
# The tailwind input file.
#
# Optional, Activates the tailwind build
tailwind-input-file = "style/tailwind.css"
tailwind-input-file = "input.css"
assets-dir = "public"
# The IP and port (ex: 127.0.0.1:3000) where the server serves the content. Use it in your server setup.
site-addr = "127.0.0.1:3000"

View File

@@ -1,13 +0,0 @@
/** @type {import('tailwindcss').Config} */
module.exports = {
content: {
files: ["*.html", "./src/**/*.rs"],
transform: {
rs: (content) => content.replace(/(?:^|\s)class:/g, ' '),
},
},
theme: {
extend: {},
},
plugins: [],
}

View File

@@ -4,7 +4,7 @@
<meta charset="utf-8" />
<link data-trunk rel="rust" data-wasm-opt="z" />
<link data-trunk rel="icon" type="image/ico" href="/public/favicon.ico" />
<link data-trunk rel="tailwind-css" href="/style/tailwind.css" />
<link data-trunk rel="tailwind-css" href="input.css" />
<title>Leptos • Counter with Tailwind</title>
</head>

View File

@@ -0,0 +1 @@
@import "tailwindcss";

View File

@@ -1,3 +0,0 @@
@tailwind base;
@tailwind components;
@tailwind utilities;

View File

@@ -1,13 +0,0 @@
/** @type {import('tailwindcss').Config} */
module.exports = {
content: {
files: ["*.html", "./src/**/*.rs"],
transform: {
rs: (content) => content.replace(/(?:^|\s)class:/g, ' '),
},
},
theme: {
extend: {},
},
plugins: [],
}

View File

@@ -0,0 +1,74 @@
[package]
name = "websocket"
version = "0.1.0"
edition = "2021"
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies]
console_log = "1.0"
console_error_panic_hook = "0.1.7"
futures = "0.3.30"
leptos = { path = "../../leptos", features = ["tracing"] }
leptos_axum = { path = "../../integrations/axum", optional = true }
log = "0.4.22"
simple_logger = "5.0"
serde = { version = "1.0", features = ["derive"] }
axum = { version = "0.8.1", optional = true }
tokio = { version = "1.39", features = ["full"], optional = true }
thiserror = "2.0"
wasm-bindgen = "0.2.100"
[features]
hydrate = ["leptos/hydrate"]
ssr = ["dep:axum", "dep:tokio", "leptos/ssr", "dep:leptos_axum"]
[package.metadata.cargo-all-features]
denylist = ["axum", "tokio", "leptos_axum"]
skip_feature_sets = [["csr", "ssr"], ["csr", "hydrate"], ["ssr", "hydrate"], []]
[package.metadata.leptos]
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
output-name = "websocket"
# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
site-root = "target/site"
# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
# Defaults to pkg
site-pkg-dir = "pkg"
# [Optional] The source CSS file. If it ends with .sass or .scss then it will be compiled by dart-sass into CSS. The CSS is optimized by Lightning CSS before being written to <site-root>/<site-pkg>/app.css
style-file = "./style.css"
# [Optional] Files in the asset-dir will be copied to the site-root directory
assets-dir = "public"
# The IP and port (ex: 127.0.0.1:3000) where the server serves the content. Use it in your server setup.
site-addr = "127.0.0.1:3000"
# The port to use for automatic reload monitoring
reload-port = 3001
# [Optional] Command to use when running end2end tests. It will run in the end2end dir.
end2end-cmd = "cargo make test-ui"
end2end-dir = "e2e"
# The browserlist query used for optimizing the CSS.
browserquery = "defaults"
# Set by cargo-leptos watch when building with that tool. Controls whether autoreload JS will be included in the head
watch = false
# The environment Leptos will run in, usually either "DEV" or "PROD"
env = "DEV"
# The features to use when compiling the bin target
#
# Optional. Can be over-ridden with the command line parameter --bin-features
bin-features = ["ssr"]
# If the --no-default-features flag should be used when compiling the bin target
#
# Optional. Defaults to false.
bin-default-features = false
# The features to use when compiling the lib target
#
# Optional. Can be over-ridden with the command line parameter --lib-features
lib-features = ["hydrate"]
# If the --no-default-features flag should be used when compiling the lib target
#
# Optional. Defaults to false.
lib-default-features = false

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2022 Greg Johnston
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,12 @@
extend = [
{ path = "../cargo-make/main.toml" },
{ path = "../cargo-make/cargo-leptos-webdriver-test.toml" },
]
[env]
CLIENT_PROCESS_NAME = "websocket"
[tasks.test-ui]
cwd = "./e2e"
command = "cargo"
args = ["make", "test-ui", "${@}"]

View File

@@ -0,0 +1,19 @@
# Leptos WebSocket
This example creates a basic WebSocket echo app.
## Getting Started
See the [Examples README](../README.md) for setup and run instructions.
## E2E Testing
See the [E2E README](./e2e/README.md) for more information about the testing strategy.
## Rendering
See the [SSR Notes](../SSR_NOTES.md) for more information about Server Side Rendering.
## Quick Start
Run `cargo leptos watch` to run this example.

View File

@@ -0,0 +1,18 @@
[package]
name = "websocket_e2e"
version = "0.1.0"
edition = "2021"
[dev-dependencies]
anyhow = "1.0"
async-trait = "0.1.81"
cucumber = "0.21.1"
fantoccini = "0.21.1"
pretty_assertions = "1.4"
serde_json = "1.0"
tokio = { version = "1.39", features = ["macros", "rt-multi-thread", "time"] }
url = "2.5"
[[test]]
name = "app_suite"
harness = false # Allow Cucumber to print output instead of libtest

View File

@@ -0,0 +1,20 @@
extend = { path = "../../cargo-make/main.toml" }
[tasks.test]
env = { RUN_AUTOMATICALLY = false }
condition = { env_true = ["RUN_AUTOMATICALLY"] }
[tasks.ci]
[tasks.test-ui]
command = "cargo"
args = [
"test",
"--test",
"app_suite",
"--",
"--retry",
"2",
"--fail-fast",
"${@}",
]

View File

@@ -0,0 +1,34 @@
# E2E Testing
This example demonstrates e2e testing with Rust using executable requirements.
## Testing Stack
| | Role | Description |
|---|---|---|
| [Cucumber](https://github.com/cucumber-rs/cucumber/tree/main) | Test Runner | Run [Gherkin](https://cucumber.io/docs/gherkin/reference/) specifications as Rust tests |
| [Fantoccini](https://github.com/jonhoo/fantoccini/tree/main) | Browser Client | Interact with web pages through WebDriver |
| [Cargo Leptos ](https://github.com/leptos-rs/cargo-leptos) | Build Tool | Compile example and start the server and end-2-end tests |
| [chromedriver](https://chromedriver.chromium.org/downloads) | WebDriver | Provide WebDriver for Chrome
## Testing Organization
Testing is organized around what a user can do and see/not see. Test scenarios are grouped by the **user action** and the **object** of that action. This makes it easier to locate and reason about requirements.
Here is a brief overview of how things fit together.
```bash
features
└── {action}_{object}.feature # Specify test scenarios
tests
├── fixtures
│ ├── action.rs # Perform a user action (click, type, etc.)
│ ├── check.rs # Assert what a user can see/not see
│ ├── find.rs # Query page elements
│ ├── mod.rs
│ └── world
│ ├── action_steps.rs # Map Gherkin steps to user actions
│ ├── check_steps.rs # Map Gherkin steps to user expectations
│ └── mod.rs
└── app_suite.rs # Test main
```

View File

@@ -0,0 +1,10 @@
@echo_client_error
Feature: Echo Client Error
Background:
Given I see the app
@echo_client_error-see-fifth-input-error
Scenario: Should see the client error
Given I add a text as abcde
Then I see the label of the input is Error(ServerFnErrorWrapper(Registration("Error generated from client")))

View File

@@ -0,0 +1,10 @@
@echo_server_error
Feature: Echo Server Error
Background:
Given I see the app
@echo_server_error-see-third-input-error
Scenario: Should see the server error
Given I add a text as abc
Then I see the label of the input is Error(ServerFnErrorWrapper(Registration("Error generated from server")))

View File

@@ -0,0 +1,17 @@
@echo_text
Feature: Echo Text
Background:
Given I see the app
@echo_text-see-first-input
Scenario: Should see the label
Given I add a text as a
Then I see the label of the input is A
@add_text-see-second-input
Scenario: Should see the label
Given I add a text as ab
Then I see the label of the input is AB

View File

@@ -0,0 +1,7 @@
@open_app
Feature: Open App
@open_app-title
Scenario: Should see the home page title
When I open the app
Then I see the page title is Simple Echo WebSocket Communication

View File

@@ -0,0 +1,14 @@
mod fixtures;
use anyhow::Result;
use cucumber::World;
use fixtures::world::AppWorld;
#[tokio::main]
async fn main() -> Result<()> {
AppWorld::cucumber()
.fail_on_skipped()
.run_and_exit("./features")
.await;
Ok(())
}

View File

@@ -0,0 +1,28 @@
use super::{find, world::HOST};
use anyhow::Result;
use fantoccini::Client;
use std::result::Result::Ok;
pub async fn goto_path(client: &Client, path: &str) -> Result<()> {
let url = format!("{}{}", HOST, path);
client.goto(&url).await?;
Ok(())
}
pub async fn add_text(client: &Client, text: &str) -> Result<String> {
fill_input(client, text).await?;
get_label(client).await
}
pub async fn fill_input(client: &Client, text: &str) -> Result<()> {
let textbox = find::input(client).await;
textbox.send_keys(text).await?;
Ok(())
}
pub async fn get_label(client: &Client) -> Result<String> {
let label = find::label(client).await;
Ok(label.text().await?)
}

View File

@@ -0,0 +1,22 @@
use anyhow::{Ok, Result};
use fantoccini::{Client, Locator};
use pretty_assertions::assert_eq;
pub async fn text_on_element(
client: &Client,
selector: &str,
expected_text: &str,
) -> Result<()> {
let element = client
.wait()
.for_element(Locator::Css(selector))
.await
.unwrap_or_else(|_| {
panic!("Element not found by Css selector `{}`", selector)
});
let actual = element.text().await?;
assert_eq!(&actual, expected_text);
Ok(())
}

View File

@@ -0,0 +1,21 @@
use fantoccini::{elements::Element, Client, Locator};
pub async fn input(client: &Client) -> Element {
let textbox = client
.wait()
.for_element(Locator::Css("input"))
.await
.expect("websocket textbox not found");
textbox
}
pub async fn label(client: &Client) -> Element {
let label = client
.wait()
.for_element(Locator::Css("p"))
.await
.expect("");
label
}

View File

@@ -0,0 +1,4 @@
pub mod action;
pub mod check;
pub mod find;
pub mod world;

View File

@@ -0,0 +1,20 @@
use crate::fixtures::{action, world::AppWorld};
use anyhow::{Ok, Result};
use cucumber::{given, when};
#[given("I see the app")]
#[when("I open the app")]
async fn i_open_the_app(world: &mut AppWorld) -> Result<()> {
let client = &world.client;
action::goto_path(client, "").await?;
Ok(())
}
#[given(regex = "^I add a text as (.*)$")]
async fn i_add_a_text(world: &mut AppWorld, text: String) -> Result<()> {
let client = &world.client;
action::add_text(client, text.as_str()).await?;
Ok(())
}

View File

@@ -0,0 +1,28 @@
use crate::fixtures::{check, world::AppWorld};
use anyhow::{Ok, Result};
use cucumber::then;
use std::time::Duration;
use tokio::time::sleep;
#[then(regex = "^I see the page title is (.*)$")]
async fn i_see_the_page_title_is(
world: &mut AppWorld,
text: String,
) -> Result<()> {
let client = &world.client;
check::text_on_element(client, "h1", &text).await?;
Ok(())
}
#[then(regex = "^I see the label of the input is (.*)$")]
async fn i_see_the_label_of_the_input_is(
world: &mut AppWorld,
text: String,
) -> Result<()> {
sleep(Duration::from_millis(50)).await;
let client = &world.client;
check::text_on_element(client, "p", &text).await?;
Ok(())
}

View File

@@ -0,0 +1,39 @@
pub mod action_steps;
pub mod check_steps;
use anyhow::Result;
use cucumber::World;
use fantoccini::{
error::NewSessionError, wd::Capabilities, Client, ClientBuilder,
};
pub const HOST: &str = "http://127.0.0.1:3000";
#[derive(Debug, World)]
#[world(init = Self::new)]
pub struct AppWorld {
pub client: Client,
}
impl AppWorld {
async fn new() -> Result<Self, anyhow::Error> {
let webdriver_client = build_client().await?;
Ok(Self {
client: webdriver_client,
})
}
}
async fn build_client() -> Result<Client, NewSessionError> {
let mut cap = Capabilities::new();
let arg = serde_json::from_str("{\"args\": [\"-headless\"]}").unwrap();
cap.insert("goog:chromeOptions".to_string(), arg);
let client = ClientBuilder::native()
.capabilities(cap)
.connect("http://localhost:4444")
.await?;
Ok(client)
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -0,0 +1,9 @@
pub mod websocket;
#[cfg(feature = "hydrate")]
#[wasm_bindgen::prelude::wasm_bindgen]
pub fn hydrate() {
use crate::websocket::App;
console_error_panic_hook::set_once();
leptos::mount::hydrate_body(App);
}

View File

@@ -0,0 +1,44 @@
#[cfg(feature = "ssr")]
#[tokio::main]
async fn main() {
use axum::Router;
use leptos::prelude::*;
use leptos_axum::{generate_route_list, LeptosRoutes};
use websocket::websocket::{shell, App};
simple_logger::init_with_level(log::Level::Error)
.expect("couldn't initialize logging");
// Setting this to None means we'll be using cargo-leptos and its env vars
let conf = get_configuration(None).unwrap();
let leptos_options = conf.leptos_options;
let addr = leptos_options.site_addr;
let routes = generate_route_list(App);
// build our application with a route
let app = Router::new()
.leptos_routes(&leptos_options, routes, {
let leptos_options = leptos_options.clone();
move || shell(leptos_options.clone())
})
.fallback(leptos_axum::file_and_error_handler(shell))
.with_state(leptos_options);
// run our app with hyper
// `axum::Server` is a re-export of `hyper::Server`
let listener = tokio::net::TcpListener::bind(&addr).await.unwrap();
println!("listening on http://{}", &addr);
axum::serve(listener, app.into_make_service())
.await
.unwrap();
}
#[cfg(not(feature = "ssr"))]
pub fn main() {
use leptos::mount::mount_to_body;
use websocket::websocket::App;
_ = console_log::init_with_level(log::Level::Debug);
console_error_panic_hook::set_once();
mount_to_body(App);
}

View File

@@ -0,0 +1,123 @@
use leptos::{prelude::*, task::spawn_local};
use server_fn::{codec::JsonEncoding, BoxedStream, ServerFnError, Websocket};
pub fn shell(options: LeptosOptions) -> impl IntoView {
view! {
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<AutoReload options=options.clone() />
<HydrationScripts options />
<link rel="shortcut icon" type="image/ico" href="/favicon.ico" />
</head>
<body>
<App />
</body>
</html>
}
}
// The websocket protocol can be used on any server function that accepts and returns a [`BoxedStream`]
// with items that can be encoded by the input and output encoding generics.
//
// In this case, the input and output encodings are [`Json`] and [`Json`], respectively which requires
// the items to implement [`Serialize`] and [`Deserialize`].
#[server(protocol = Websocket<JsonEncoding, JsonEncoding>)]
async fn echo_websocket(
input: BoxedStream<String, ServerFnError>,
) -> Result<BoxedStream<String, ServerFnError>, ServerFnError> {
use futures::{channel::mpsc, SinkExt, StreamExt};
let mut input = input; // FIXME :-) server fn fields should pass mut through to destructure
// create a channel of outgoing websocket messages
// we'll return rx, so sending a message to tx will send a message to the client via the websocket
let (mut tx, rx) = mpsc::channel(1);
// spawn a task to listen to the input stream of messages coming in over the websocket
tokio::spawn(async move {
let mut x = 0;
while let Some(msg) = input.next().await {
// do some work on each message, and then send our responses
x += 1;
println!("In server: {} {:?}", x, msg);
if x % 3 == 0 {
let _ = tx
.send(Err(ServerFnError::Registration(
"Error generated from server".to_string(),
)))
.await;
} else {
let _ = tx.send(msg.map(|msg| msg.to_ascii_uppercase())).await;
}
}
});
Ok(rx.into())
}
#[component]
pub fn App() -> impl IntoView {
use futures::{channel::mpsc, StreamExt};
let (mut tx, rx) = mpsc::channel(1);
let latest = RwSignal::new(Ok("".into()));
// we'll only listen for websocket messages on the client
if cfg!(feature = "hydrate") {
spawn_local(async move {
match echo_websocket(rx.into()).await {
Ok(mut messages) => {
while let Some(msg) = messages.next().await {
leptos::logging::log!("{:?}", msg);
latest.set(msg);
}
}
Err(e) => leptos::logging::warn!("{e}"),
}
});
}
let mut x = 0;
view! {
<h1>Simple Echo WebSocket Communication</h1>
<input
type="text"
on:input:target=move |ev| {
x += 1;
let msg = ev.target().value();
leptos::logging::log!("In client: {} {:?}", x, msg);
if x % 5 == 0 {
let _ = tx
.try_send(
Err(
ServerFnError::Registration(
"Error generated from client".to_string(),
),
),
);
} else {
let _ = tx.try_send(Ok(msg));
}
}
/>
<div>
<ErrorBoundary fallback=|errors| {
view! {
<p>
{move || {
errors
.get()
.into_iter()
.map(|(_, e)| format!("{e:?}"))
.collect::<Vec<String>>()
.join(" ")
}}
</p>
}
}>
<p>{latest}</p>
</ErrorBoundary>
</div>
}
}

View File

View File

@@ -14,7 +14,7 @@ throw_error = { workspace = true }
or_poisoned = { workspace = true }
futures = "0.3.31"
serde = { version = "1.0", features = ["derive"] }
wasm-bindgen = { version = "0.2.100", optional = true }
wasm-bindgen = { workspace = true, optional = true }
js-sys = { version = "0.3.74", optional = true }
once_cell = "1.20"
pin-project-lite = "0.2.15"

View File

@@ -1230,7 +1230,7 @@ fn static_path(options: &LeptosOptions, path: &str) -> String {
// If the path ends with a trailing slash, we generate the path
// as a directory with a index.html file inside.
if path != "/" && path.ends_with("/") {
static_file_path(options, &format!("{}index", path))
static_file_path(options, &format!("{path}index"))
} else {
static_file_path(options, path)
}

View File

@@ -4,7 +4,7 @@ authors = ["Greg Johnston"]
license = "MIT"
repository = "https://github.com/leptos-rs/leptos"
description = "Axum integrations for the Leptos web framework."
version = "0.8.0-rc1"
version = "0.8.0-rc3"
rust-version.workspace = true
edition.workspace = true

View File

@@ -1543,7 +1543,7 @@ fn static_path(options: &LeptosOptions, path: &str) -> String {
// If the path ends with a trailing slash, we generate the path
// as a directory with a index.html file inside.
if path != "/" && path.ends_with("/") {
static_file_path(options, &format!("{}index", path))
static_file_path(options, &format!("{path}index"))
} else {
static_file_path(options, path)
}

View File

@@ -39,10 +39,10 @@ tachys = { workspace = true, features = [
"reactive_stores",
"oco",
] }
thiserror = "2.0"
thiserror = { workspace = true }
tracing = { version = "0.1.41", optional = true }
typed-builder = "0.20.0"
typed-builder-macro = "0.20.0"
typed-builder = { workspace = true }
typed-builder-macro = "0.21.0"
serde = "1.0"
serde_json = { version = "1.0", optional = true }
server_fn = { workspace = true, features = ["form-redirects", "browser"] }
@@ -52,7 +52,7 @@ web-sys = { version = "0.3.72", features = [
"ShadowRootMode",
] }
wasm-bindgen = { workspace = true }
serde_qs = "0.13.0"
serde_qs = "0.14.0"
slotmap = "1.0"
futures = "0.3.31"
send_wrapper = "0.6.0"
@@ -107,7 +107,7 @@ rustc_version = "0.4.1"
# https://github.com/rust-lang/cargo/issues/4423
# TLDR proc macros will ignore RUSTFLAGS when --target is specified on the cargo command.
# This works around the issue by the non proc-macro crate which does see RUSTFLAGS enabling the replacement feature on the proc-macro crate, which wouldn't.
# This is automatic as long as the leptos crate is depended upon,
# This is automatic as long as the leptos crate is depended upon,
# downstream usage should never manually enable this feature.
[target.'cfg(erase_components)'.dependencies]
leptos_macro = { workspace = true, features = ["__internal_erase_components"] }

View File

@@ -1,20 +1,21 @@
use crate::{children::TypedChildren, IntoView};
use futures::{channel::oneshot, future::join_all};
use hydration_context::{SerializedDataId, SharedContext};
use leptos_macro::component;
use reactive_graph::{
computed::ArcMemo,
effect::RenderEffect,
owner::{provide_context, Owner},
owner::{provide_context, ArcStoredValue, Owner},
signal::ArcRwSignal,
traits::{Get, Update, With, WithUntracked},
traits::{Get, Update, With, WithUntracked, WriteValue},
};
use rustc_hash::FxHashMap;
use std::{fmt::Debug, sync::Arc};
use std::{collections::VecDeque, fmt::Debug, mem, sync::Arc};
use tachys::{
html::attribute::{any_attribute::AnyAttribute, Attribute},
hydration::Cursor,
reactive_graph::OwnedView,
ssr::StreamBuilder,
ssr::{StreamBuilder, StreamChunk},
view::{
add_attr::AddAnyAttr, Mountable, Position, PositionState, Render,
RenderHtml,
@@ -96,10 +97,12 @@ where
let hook = hook as Arc<dyn ErrorHook>;
let _guard = throw_error::set_error_hook(Arc::clone(&hook));
let suspended_children = ErrorBoundarySuspendedChildren::default();
let owner = Owner::new();
let children = owner.with(|| {
provide_context(Arc::clone(&hook));
provide_context(suspended_children.clone());
children.into_inner()()
});
@@ -111,11 +114,15 @@ where
children,
errors,
fallback,
suspended_children,
},
owner,
)
}
pub(crate) type ErrorBoundarySuspendedChildren =
ArcStoredValue<Vec<oneshot::Receiver<()>>>;
struct ErrorBoundaryView<Chil, FalFn> {
hook: Arc<dyn ErrorHook>,
boundary_id: SerializedDataId,
@@ -123,6 +130,7 @@ struct ErrorBoundaryView<Chil, FalFn> {
children: Chil,
fallback: FalFn,
errors: ArcRwSignal<Errors>,
suspended_children: ErrorBoundarySuspendedChildren,
}
struct ErrorBoundaryViewState<Chil, Fal> {
@@ -257,6 +265,7 @@ where
children,
fallback,
errors,
suspended_children,
} = self;
ErrorBoundaryView {
hook,
@@ -265,6 +274,7 @@ where
children: children.add_any_attr(attr.into_cloneable_owned()),
fallback,
errors,
suspended_children,
}
}
}
@@ -292,6 +302,7 @@ where
children,
fallback,
errors,
suspended_children,
..
} = self;
ErrorBoundaryView {
@@ -301,6 +312,7 @@ where
children: children.resolve().await,
fallback,
errors,
suspended_children,
}
}
@@ -349,7 +361,8 @@ where
) where
Self: Sized,
{
let _hook = throw_error::set_error_hook(self.hook);
let _hook = throw_error::set_error_hook(Arc::clone(&self.hook));
// first, attempt to serialize the children to HTML, then check for errors
let mut new_buf = StreamBuilder::new(buf.clone_id());
let mut new_pos = *position;
@@ -361,20 +374,76 @@ where
extra_attrs.clone(),
);
// any thrown errors would've been caught here
if self.errors.with_untracked(|map| map.is_empty()) {
buf.append(new_buf);
let suspense_children =
mem::take(&mut *self.suspended_children.write_value());
// not waiting for any suspended children: just render
if suspense_children.is_empty() {
// any thrown errors would've been caught here
if self.errors.with_untracked(|map| map.is_empty()) {
buf.append(new_buf);
} else {
// otherwise, serialize the fallback instead
let mut fallback = String::with_capacity(Fal::MIN_LENGTH);
(self.fallback)(self.errors).to_html_with_buf(
&mut fallback,
position,
escape,
mark_branches,
extra_attrs,
);
buf.push_sync(&fallback);
}
} else {
// otherwise, serialize the fallback instead
let mut fallback = String::with_capacity(Fal::MIN_LENGTH);
(self.fallback)(self.errors).to_html_with_buf(
&mut fallback,
position,
escape,
mark_branches,
extra_attrs,
);
buf.push_sync(&fallback);
let mut position = *position;
// if we're waiting for suspended children, we'll first wait for them to load
// in this implementation, an ErrorBoundary that *contains* Suspense essentially acts
// like a Suspense: it will wait for (all top-level) child Suspense to load before rendering anything
let mut view_buf = StreamBuilder::new(new_buf.clone_id());
view_buf.next_id();
let hook = Arc::clone(&self.hook);
view_buf.push_async(async move {
let _hook = throw_error::set_error_hook(Arc::clone(&hook));
let _ = join_all(suspense_children).await;
let mut my_chunks = VecDeque::new();
for chunk in new_buf.take_chunks() {
match chunk {
StreamChunk::Sync(data) => {
my_chunks.push_back(StreamChunk::Sync(data))
}
StreamChunk::Async { chunks } => {
let chunks = chunks.await;
my_chunks.extend(chunks);
}
StreamChunk::OutOfOrder { chunks } => {
let chunks = chunks.await;
my_chunks.push_back(StreamChunk::OutOfOrder {
chunks: Box::pin(async move { chunks }),
});
}
}
}
if self.errors.with_untracked(|map| map.is_empty()) {
// if no errors, just go ahead with the stream
my_chunks
} else {
// otherwise, serialize the fallback instead
let mut fallback = String::with_capacity(Fal::MIN_LENGTH);
(self.fallback)(self.errors).to_html_with_buf(
&mut fallback,
&mut position,
escape,
mark_branches,
extra_attrs,
);
my_chunks.clear();
my_chunks.push_back(StreamChunk::Sync(fallback));
my_chunks
}
});
buf.append(view_buf);
}
}

View File

@@ -162,6 +162,7 @@ pub mod prelude {
pub use crate::{
callback::*, children::*, component::*, control_flow::*, error::*,
form::*, hydration::*, into_view::*, mount::*, suspense::*,
text_prop::*,
};
pub use leptos_config::*;
pub use leptos_dom::helpers::*;

View File

@@ -1,8 +1,9 @@
use crate::{
children::{TypedChildren, ViewFnOnce},
error::ErrorBoundarySuspendedChildren,
IntoView,
};
use futures::{select, FutureExt};
use futures::{channel::oneshot, select, FutureExt};
use hydration_context::SerializedDataId;
use leptos_macro::component;
use reactive_graph::{
@@ -13,7 +14,7 @@ use reactive_graph::{
effect::RenderEffect,
owner::{provide_context, use_context, Owner},
signal::ArcRwSignal,
traits::{Dispose, Get, Read, Track, With},
traits::{Dispose, Get, Read, Track, With, WriteValue},
};
use slotmap::{DefaultKey, SlotMap};
use std::sync::Arc;
@@ -99,6 +100,8 @@ pub fn Suspense<Chil>(
where
Chil: IntoView + Send + 'static,
{
let error_boundary_parent = use_context::<ErrorBoundarySuspendedChildren>();
let owner = Owner::new();
owner.with(|| {
let (starts_local, id) = {
@@ -129,6 +132,7 @@ where
none_pending,
fallback,
children,
error_boundary_parent,
})
})
}
@@ -150,6 +154,7 @@ pub(crate) struct SuspenseBoundary<const TRANSITION: bool, Fal, Chil> {
pub none_pending: ArcMemo<bool>,
pub fallback: Fal,
pub children: Chil,
pub error_boundary_parent: Option<ErrorBoundarySuspendedChildren>,
}
impl<const TRANSITION: bool, Fal, Chil> Render
@@ -228,12 +233,14 @@ where
none_pending,
fallback,
children,
error_boundary_parent,
} = self;
SuspenseBoundary {
id,
none_pending,
fallback,
children: children.add_any_attr(attr),
error_boundary_parent,
}
}
}
@@ -288,6 +295,13 @@ where
let suspense_context = use_context::<SuspenseContext>().unwrap();
let owner = Owner::current().unwrap();
let mut notify_error_boundary =
self.error_boundary_parent.map(|children| {
let (tx, rx) = oneshot::channel();
children.write_value().push(rx);
tx
});
// we need to wait for one of two things: either
// 1. all tasks are finished loading, or
// 2. we read from a local resource, meaning this Suspense can never resolve on the server
@@ -318,6 +332,9 @@ where
// dropped, so it doesn't matter if we manage to send this.
_ = tx.send(());
}
if let Some(tx) = notify_error_boundary.take() {
_ = tx.send(());
}
}
}
}
@@ -413,6 +430,11 @@ where
extra_attrs,
);
} else {
// calling this will walk over the tree, removing all event listeners
// and other single-threaded values from the view tree. this needs to be
// done because the fallback can be shifted to another thread in push_async below.
self.fallback.dry_resolve();
buf.push_async({
let mut position = *position;
async move {

View File

@@ -1,5 +1,6 @@
use crate::{
children::{TypedChildren, ViewFnOnce},
error::ErrorBoundarySuspendedChildren,
suspense_component::SuspenseBoundary,
IntoView,
};
@@ -7,7 +8,7 @@ use leptos_macro::component;
use reactive_graph::{
computed::{suspense::SuspenseContext, ArcMemo},
effect::Effect,
owner::{provide_context, Owner},
owner::{provide_context, use_context, Owner},
signal::ArcRwSignal,
traits::{Get, Set, Track, With},
wrappers::write::SignalSetter,
@@ -85,6 +86,8 @@ pub fn Transition<Chil>(
where
Chil: IntoView + Send + 'static,
{
let error_boundary_parent = use_context::<ErrorBoundarySuspendedChildren>();
let owner = Owner::new();
owner.with(|| {
let (starts_local, id) = {
@@ -123,6 +126,7 @@ where
none_pending,
fallback,
children,
error_boundary_parent,
})
})
}

View File

@@ -16,8 +16,8 @@ config = { version = "0.15.8", default-features = false, features = [
] }
regex = "1.11"
serde = { version = "1.0", features = ["derive", "rc"] }
thiserror = "2.0"
typed-builder = "0.20.0"
thiserror = { workspace = true }
typed-builder = { workspace = true }
[dev-dependencies]
tokio = { version = "1.43", features = ["rt", "macros"] }

View File

@@ -25,7 +25,7 @@ syn = { version = "2.0", features = ["full"] }
rstml = "0.12.0"
leptos_hot_reload = { workspace = true }
server_fn_macro = { workspace = true }
convert_case = "0.7"
convert_case = { workspace = true }
uuid = { version = "1.11", features = ["v4"] }
tracing = { version = "0.1.41", optional = true }

View File

@@ -645,7 +645,9 @@ impl Parse for DummyModel {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
let mut attrs = input.call(Attribute::parse_outer)?;
// Drop unknown attributes like #[deprecated]
drain_filter(&mut attrs, |attr| !attr.path().is_ident("doc"));
drain_filter(&mut attrs, |attr| {
!(attr.path().is_ident("doc") || attr.path().is_ident("allow"))
});
let vis: Visibility = input.parse()?;
let mut sig: Signature = input.parse()?;
@@ -939,6 +941,10 @@ impl UnknownAttrs {
}
}
if attr.path().is_ident("allow") {
return None;
}
Some((attr.into_token_stream(), attr.span()))
})
.collect_vec();

View File

@@ -361,7 +361,7 @@ fn normalized_call_site(site: proc_macro::Span) -> Option<String> {
cfg_if::cfg_if! {
if #[cfg(all(debug_assertions, feature = "nightly", rustc_nightly))] {
Some(leptos_hot_reload::span_to_stable_id(
site.source_file().path(),
site.file(),
site.start().line()
))
} else {

View File

@@ -1,8 +1,6 @@
[package]
name = "leptos_server"
# TODO revert to { workspace = true } before 0.8.0 release
# this is a hack because I missing bumping the hydration_context version number before publishing
version = "0.8.0-rc1"
version = { workspace = true }
authors = ["Greg Johnston"]
license = "MIT"
repository = "https://github.com/leptos-rs/leptos"
@@ -28,7 +26,7 @@ send_wrapper = "0.6"
# serialization formats
serde = { version = "1.0" }
js-sys = { version = "0.3.74", optional = true }
wasm-bindgen = { version = "0.2.100", optional = true }
wasm-bindgen = { workspace = true, optional = true }
serde_json = { workspace = true }
[features]

View File

@@ -1,6 +1,6 @@
[package]
name = "leptos_meta"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
authors = ["Greg Johnston"]
license = "MIT"
repository = "https://github.com/leptos-rs/leptos"

View File

@@ -446,7 +446,7 @@ where
tracing::warn!("{}", msg);
#[cfg(not(feature = "tracing"))]
eprintln!("{}", msg);
eprintln!("{msg}");
}
}

View File

@@ -7,6 +7,9 @@ use leptos::{
/// Injects an [`HTMLLinkElement`](https://developer.mozilla.org/en-US/docs/Web/API/HTMLLinkElement) into the document
/// head that loads a stylesheet from the URL given by the `href` property.
///
/// Note that this does *not* work with the `cargo-leptos` `hash-files` feature: if you are using file
/// hashing, you should use [`HashedStylesheet`](crate::HashedStylesheet).
///
/// ```
/// use leptos::prelude::*;
/// use leptos_meta::*;

View File

@@ -10,7 +10,7 @@ edition.workspace = true
[dependencies]
serde = "1.0"
thiserror = "2.0"
thiserror = { workspace = true }
[dev-dependencies]
serde_json = { workspace = true }

View File

@@ -1,6 +1,6 @@
[package]
name = "reactive_graph"
version = "0.2.0-rc1"
version = "0.2.0-rc3"
authors = ["Greg Johnston"]
license = "MIT"
readme = "../README.md"
@@ -18,7 +18,7 @@ pin-project-lite = "0.2.15"
rustc-hash = "2.0"
serde = { version = "1.0", features = ["derive"], optional = true }
slotmap = "1.0"
thiserror = "2.0"
thiserror = { workspace = true }
tracing = { version = "0.1.41", optional = true }
guardian = "1.2"
async-lock = "3.4.0"

View File

@@ -560,7 +560,7 @@ impl<T: 'static> ArcAsyncDerived<T> {
};
let initial_value = SendOption::new_local(initial_value);
let (this, _) = spawn_derived!(
crate::spawn_local_scoped,
crate::spawn_local,
initial_value,
fun,
true,
@@ -595,7 +595,7 @@ impl<T: 'static> ArcAsyncDerived<T> {
async move { SendOption::new_local(Some(fut.await)) }
};
let (this, _) = spawn_derived!(
crate::spawn_local_scoped,
crate::spawn_local,
initial,
fun,
false,

View File

@@ -369,7 +369,7 @@ mod inner {
const MSG: &str = "ImmediateEffect recursed more than once.";
match effect.defined_at() {
Some(defined_at) => {
log_warning(format_args!("{MSG} Defined at: {}", defined_at));
log_warning(format_args!("{MSG} Defined at: {defined_at}"));
}
None => {
log_warning(format_args!("{MSG}"));

View File

@@ -121,7 +121,7 @@ pub fn log_warning(text: Arguments) {
not(all(target_arch = "wasm32", target_os = "unknown"))
))]
{
eprintln!("{}", text);
eprintln!("{text}");
}
}
@@ -138,6 +138,15 @@ pub fn spawn(task: impl Future<Output = ()> + Send + 'static) {
any_spawner::Executor::spawn_local(task);
}
/// Calls [`Executor::spawn_local`](any_spawner::Executor::spawn_local), but ensures that the task also runs in the current arena, if
/// multithreaded arena sandboxing is enabled.
pub fn spawn_local(task: impl Future<Output = ()> + 'static) {
#[cfg(feature = "sandboxed-arenas")]
let task = owner::Sandboxed::new(task);
any_spawner::Executor::spawn_local(task);
}
/// Calls [`Executor::spawn_local`](any_spawner::Executor), but ensures that the task runs under the current reactive [`Owner`](crate::owner::Owner) and observer.
///
/// Does not cancel the task if the owner is cleaned up.

View File

@@ -181,10 +181,10 @@ impl<T: Debug> Debug for SendOption<T> {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match &self.inner {
Inner::Threadsafe(value) => {
write!(f, "SendOption::Threadsafe({:?})", value)
write!(f, "SendOption::Threadsafe({value:?})")
}
Inner::Local(value) => {
write!(f, "SendOption::Local({:?})", value)
write!(f, "SendOption::Local({value:?})")
}
}
}

View File

@@ -1133,7 +1133,7 @@ pub mod read {
/// ```
#[derive(Debug, PartialEq, Eq)]
#[deprecated(
since = "0.7.0-rc1",
since = "0.7.0-rc3",
note = "`MaybeSignal<T>` is deprecated in favour of `Signal<T>` which \
is `Copy`, now has a more efficient From<T> implementation \
and other benefits in 0.7."

View File

@@ -1,6 +1,6 @@
[package]
name = "reactive_stores"
version = "0.2.0-rc1"
version = "0.2.0-rc3"
authors = ["Greg Johnston"]
license = "MIT"
readme = "../README.md"
@@ -17,6 +17,8 @@ paste = "1.0"
reactive_graph = { workspace = true }
rustc-hash = "2.0"
reactive_stores_macro = { workspace = true }
dashmap = "6.1"
send_wrapper = "0.6.0"
[dev-dependencies]
tokio = { version = "1.43", features = ["rt-multi-thread", "macros"] }

View File

@@ -28,14 +28,14 @@ where
{
#[cfg(any(debug_assertions, leptos_debuginfo))]
defined_at: &'static Location<'static>,
path: StorePath,
trigger: StoreFieldTrigger,
path: Arc<dyn Fn() -> StorePath + Send + Sync>,
get_trigger: Arc<dyn Fn(StorePath) -> StoreFieldTrigger + Send + Sync>,
read: Arc<dyn Fn() -> Option<StoreFieldReader<T>> + Send + Sync>,
pub(crate) write:
Arc<dyn Fn() -> Option<StoreFieldWriter<T>> + Send + Sync>,
keys: Arc<dyn Fn() -> Option<KeyMap> + Send + Sync>,
track_field: Arc<dyn Fn() + Send + Sync>,
notify: Arc<dyn Fn() + Send + Sync>,
}
impl<T> Debug for ArcField<T>
@@ -46,9 +46,7 @@ where
let mut f = f.debug_struct("ArcField");
#[cfg(any(debug_assertions, leptos_debuginfo))]
let f = f.field("defined_at", &self.defined_at);
f.field("path", &self.path)
.field("trigger", &self.trigger)
.finish()
f.finish_non_exhaustive()
}
}
@@ -106,7 +104,7 @@ impl<T> StoreField for ArcField<T> {
}
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
self.path.clone()
(self.path)()
}
fn reader(&self) -> Option<Self::Reader> {
@@ -132,13 +130,13 @@ where
ArcField {
#[cfg(any(debug_assertions, leptos_debuginfo))]
defined_at: Location::caller(),
path: value.path().into_iter().collect(),
trigger: value.get_trigger(value.path().into_iter().collect()),
path: Arc::new(move || value.path().into_iter().collect()),
get_trigger: Arc::new(move |path| value.get_trigger(path)),
read: Arc::new(move || value.reader().map(StoreFieldReader::new)),
write: Arc::new(move || value.writer().map(StoreFieldWriter::new)),
keys: Arc::new(move || value.keys()),
track_field: Arc::new(move || value.track_field()),
notify: Arc::new(move || value.notify()),
}
}
}
@@ -152,8 +150,10 @@ where
ArcField {
#[cfg(any(debug_assertions, leptos_debuginfo))]
defined_at: Location::caller(),
path: value.path().into_iter().collect(),
trigger: value.get_trigger(value.path().into_iter().collect()),
path: Arc::new({
let value = value.clone();
move || value.path().into_iter().collect()
}),
get_trigger: Arc::new({
let value = value.clone();
move |path| value.get_trigger(path)
@@ -174,6 +174,10 @@ where
let value = value.clone();
move || value.track_field()
}),
notify: Arc::new({
let value = value.clone();
move || value.notify()
}),
}
}
}
@@ -190,8 +194,10 @@ where
ArcField {
#[cfg(any(debug_assertions, leptos_debuginfo))]
defined_at: Location::caller(),
path: value.path().into_iter().collect(),
trigger: value.get_trigger(value.path().into_iter().collect()),
path: Arc::new({
let value = value.clone();
move || value.path().into_iter().collect()
}),
get_trigger: Arc::new({
let value = value.clone();
move |path| value.get_trigger(path)
@@ -212,6 +218,10 @@ where
let value = value.clone();
move || value.track_field()
}),
notify: Arc::new({
let value = value.clone();
move || value.notify()
}),
}
}
}
@@ -227,8 +237,10 @@ where
ArcField {
#[cfg(any(debug_assertions, leptos_debuginfo))]
defined_at: Location::caller(),
path: value.path().into_iter().collect(),
trigger: value.get_trigger(value.path().into_iter().collect()),
path: Arc::new({
let value = value.clone();
move || value.path().into_iter().collect()
}),
get_trigger: Arc::new({
let value = value.clone();
move |path| value.get_trigger(path)
@@ -249,6 +261,10 @@ where
let value = value.clone();
move || value.track_field()
}),
notify: Arc::new({
let value = value.clone();
move || value.notify()
}),
}
}
}
@@ -265,8 +281,10 @@ where
ArcField {
#[cfg(any(debug_assertions, leptos_debuginfo))]
defined_at: Location::caller(),
path: value.path().into_iter().collect(),
trigger: value.get_trigger(value.path().into_iter().collect()),
path: Arc::new({
let value = value.clone();
move || value.path().into_iter().collect()
}),
get_trigger: Arc::new({
let value = value.clone();
move |path| value.get_trigger(path)
@@ -287,6 +305,10 @@ where
let value = value.clone();
move || value.track_field()
}),
notify: Arc::new({
let value = value.clone();
move || value.notify()
}),
}
}
}
@@ -307,8 +329,10 @@ where
ArcField {
#[cfg(any(debug_assertions, leptos_debuginfo))]
defined_at: Location::caller(),
path: value.path().into_iter().collect(),
trigger: value.get_trigger(value.path().into_iter().collect()),
path: Arc::new({
let value = value.clone();
move || value.path().into_iter().collect()
}),
get_trigger: Arc::new({
let value = value.clone();
move |path| value.get_trigger(path)
@@ -329,6 +353,10 @@ where
let value = value.clone();
move || value.track_field()
}),
notify: Arc::new({
let value = value.clone();
move || value.notify()
}),
}
}
}
@@ -339,12 +367,12 @@ impl<T> Clone for ArcField<T> {
#[cfg(any(debug_assertions, leptos_debuginfo))]
defined_at: self.defined_at,
path: self.path.clone(),
trigger: self.trigger.clone(),
get_trigger: Arc::clone(&self.get_trigger),
read: Arc::clone(&self.read),
write: Arc::clone(&self.write),
keys: Arc::clone(&self.keys),
track_field: Arc::clone(&self.track_field),
notify: Arc::clone(&self.notify),
}
}
}
@@ -364,7 +392,7 @@ impl<T> DefinedAt for ArcField<T> {
impl<T> Notify for ArcField<T> {
fn notify(&self) {
self.trigger.this.notify();
(self.notify)()
}
}

View File

@@ -239,7 +239,6 @@
//! field in the signal inner `Arc<RwLock<_>>`, and tracks the trigger that corresponds with its
//! path; calling `.write()` returns a writeable guard, and notifies that same trigger.
use or_poisoned::OrPoisoned;
use reactive_graph::{
owner::{ArenaItem, LocalStorage, Storage, SyncStorage},
signal::{
@@ -255,7 +254,6 @@ pub use reactive_stores_macro::{Patch, Store};
use rustc_hash::FxHashMap;
use std::{
any::Any,
collections::HashMap,
fmt::Debug,
hash::Hash,
ops::DerefMut,
@@ -345,7 +343,7 @@ where
Self {
spare_keys: Vec::new(),
current_key: 0,
current_key: keys.len().saturating_sub(1),
keys,
}
}
@@ -408,9 +406,25 @@ impl<K> Default for FieldKeys<K> {
}
}
#[cfg(not(target_arch = "wasm32"))]
type HashMap<K, V> = Arc<dashmap::DashMap<K, V>>;
#[cfg(target_arch = "wasm32")]
type HashMap<K, V> = send_wrapper::SendWrapper<
std::rc::Rc<std::cell::RefCell<std::collections::HashMap<K, V>>>,
>;
/// A map of the keys for a keyed subfield.
#[derive(Default, Clone)]
pub struct KeyMap(Arc<RwLock<HashMap<StorePath, Box<dyn Any + Send + Sync>>>>);
#[derive(Clone)]
pub struct KeyMap(HashMap<StorePath, Box<dyn Any + Send + Sync>>);
impl Default for KeyMap {
fn default() -> Self {
#[cfg(not(target_arch = "wasm32"))]
return Self(Default::default());
#[cfg(target_arch = "wasm32")]
return Self(send_wrapper::SendWrapper::new(Default::default()));
}
}
impl KeyMap {
fn with_field_keys<K, T>(
@@ -422,26 +436,25 @@ impl KeyMap {
where
K: Debug + Hash + PartialEq + Eq + Send + Sync + 'static,
{
// this incredibly defensive mechanism takes the guard twice
// on initialization. unfortunately, this is because `initialize`, on
// a nested keyed field can, when being initialized), can in fact try
// to take the lock again, as we try to insert the keys of the parent
// while inserting the keys on this child.
//
// see here https://github.com/leptos-rs/leptos/issues/3086
let mut guard = self.0.write().or_poisoned();
if guard.contains_key(&path) {
let entry = guard.get_mut(&path)?;
let entry = entry.downcast_mut::<FieldKeys<K>>()?;
Some(fun(entry))
#[cfg(not(target_arch = "wasm32"))]
let mut entry = self
.0
.entry(path)
.or_insert_with(|| Box::new(FieldKeys::new(initialize())));
#[cfg(target_arch = "wasm32")]
let entry = if !self.0.borrow().contains_key(&path) {
Some(Box::new(FieldKeys::new(initialize())))
} else {
drop(guard);
let keys = Box::new(FieldKeys::new(initialize()));
let mut guard = self.0.write().or_poisoned();
let entry = guard.entry(path).or_insert(keys);
let entry = entry.downcast_mut::<FieldKeys<K>>()?;
Some(fun(entry))
}
None
};
#[cfg(target_arch = "wasm32")]
let mut map = self.0.borrow_mut();
#[cfg(target_arch = "wasm32")]
let entry = map.entry(path).or_insert_with(|| entry.unwrap());
let entry = entry.downcast_mut::<FieldKeys<K>>()?;
Some(fun(entry))
}
}

View File

@@ -1,6 +1,6 @@
[package]
name = "reactive_stores_macro"
version = "0.2.0-rc1"
version = "0.2.0-rc3"
authors = ["Greg Johnston"]
license = "MIT"
readme = "../README.md"
@@ -13,7 +13,7 @@ edition.workspace = true
proc-macro = true
[dependencies]
convert_case = "0.7"
convert_case = { workspace = true }
proc-macro-error2 = "2.0"
proc-macro2 = "1.0"
quote = "1.0"

View File

@@ -79,7 +79,7 @@ impl Parse for Model {
#[derive(Clone)]
enum SubfieldMode {
Keyed(ExprClosure, Box<Type>),
Keyed(Box<ExprClosure>, Box<Type>),
Skip,
}
@@ -91,7 +91,7 @@ impl Parse for SubfieldMode {
let ty: Type = input.parse()?;
let _eq: Token![=] = input.parse()?;
let closure: ExprClosure = input.parse()?;
Ok(SubfieldMode::Keyed(closure, Box::new(ty)))
Ok(SubfieldMode::Keyed(Box::new(closure), Box::new(ty)))
} else if mode == "skip" {
Ok(SubfieldMode::Skip)
} else {
@@ -403,7 +403,7 @@ fn variant_to_tokens(
let field_ident = field.ident.as_ref().unwrap();
let field_ty = &field.ty;
let combined_ident = Ident::new(
&format!("{}_{}", ident, field_ident),
&format!("{ident}_{field_ident}"),
field_ident.span(),
);
@@ -481,7 +481,7 @@ fn variant_to_tokens(
let field_ident = idx;
let field_ty = &field.ty;
let combined_ident = Ident::new(
&format!("{}_{}", ident, field_ident),
&format!("{ident}_{field_ident}"),
ident.span(),
);

View File

@@ -1,6 +1,6 @@
[package]
name = "leptos_router"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
authors = ["Greg Johnston", "Ben Wishovich"]
license = "MIT"
readme = "../README.md"
@@ -24,7 +24,7 @@ wasm-bindgen = { workspace = true }
tracing = { version = "0.1.41", optional = true }
once_cell = "1.20"
send_wrapper = "0.6.0"
thiserror = "2.0"
thiserror = { workspace = true }
percent-encoding = { version = "2.3", optional = true }
gloo-net = "0.6.0"

View File

@@ -1,6 +1,6 @@
[package]
name = "leptos_router_macro"
version = "0.8.0-rc1"
version = "0.8.0-rc3"
authors = ["Greg Johnston", "Ben Wishovich"]
license = "MIT"
readme = "../README.md"

View File

@@ -14,13 +14,13 @@ throw_error = { workspace = true }
server_fn_macro_default = { workspace = true }
# used for hashing paths in #[server] macro
const_format = "0.2.33"
const-str = "0.5.7"
const-str = "0.6.2"
xxhash-rust = { version = "0.8.12", features = ["const_xxh64"] }
rustversion = { workspace = true}
# used across multiple features
serde = { version = "1.0", features = ["derive"] }
send_wrapper = { version = "0.6.0", features = ["futures"], optional = true }
thiserror = "2.0"
thiserror = { workspace = true }
# registration system
inventory = { version = "0.3.15", optional = true }
@@ -40,7 +40,7 @@ tower = { version = "0.5.1", optional = true }
tower-layer = { version = "0.3.3", optional = true }
## input encodings
serde_qs = { version = "0.13.0" }
serde_qs = { version = "0.14.0" }
multer = { version = "3.1", optional = true }
## output encodings
@@ -61,7 +61,7 @@ base64 = { version = "0.22.1" }
# client
gloo-net = { version = "0.6.0", optional = true }
js-sys = { version = "0.3.74", optional = true }
wasm-bindgen = { version = "0.2.100", optional = true }
wasm-bindgen = { workspace = true, optional = true }
wasm-bindgen-futures = { version = "0.4.50", optional = true }
wasm-streams = { version = "0.4.2", optional = true }
web-sys = { version = "0.3.72", optional = true, features = [

View File

@@ -42,7 +42,7 @@ pub trait Client<Error, InputStreamError = Error, OutputStreamError = Error> {
Output = Result<
(
impl Stream<Item = Result<Bytes, Bytes>> + Send + 'static,
impl Sink<Result<Bytes, Bytes>> + Send + 'static,
impl Sink<Bytes> + Send + 'static,
),
Error,
>,
@@ -62,8 +62,8 @@ pub mod browser {
response::browser::BrowserResponse,
};
use bytes::Bytes;
use futures::{Sink, SinkExt, StreamExt, TryStreamExt};
use gloo_net::websocket::{events::CloseEvent, Message, WebSocketError};
use futures::{Sink, SinkExt, StreamExt};
use gloo_net::websocket::{Message, WebSocketError};
use send_wrapper::SendWrapper;
use std::future::Future;
@@ -115,7 +115,7 @@ pub mod browser {
impl futures::Stream<Item = Result<Bytes, Bytes>>
+ Send
+ 'static,
impl futures::Sink<Result<Bytes, Bytes>> + Send + 'static,
impl futures::Sink<Bytes> + Send + 'static,
),
Error,
>,
@@ -131,18 +131,19 @@ pub mod browser {
})?;
let (sink, stream) = websocket.split();
let stream = stream
.map_err(|err| {
web_sys::console::error_1(&err.to_string().into());
OutputStreamError::from_server_fn_error(
ServerFnErrorErr::Request(err.to_string()),
)
.ser()
})
.map_ok(move |msg| match msg {
let stream = stream.map(|message| match message {
Ok(message) => Ok(match message {
Message::Text(text) => Bytes::from(text),
Message::Bytes(bytes) => Bytes::from(bytes),
});
}),
Err(err) => {
web_sys::console::error_1(&err.to_string().into());
Err(OutputStreamError::from_server_fn_error(
ServerFnErrorErr::Request(err.to_string()),
)
.ser())
}
});
let stream = SendWrapper::new(stream);
struct SendWrapperSink<S> {
@@ -195,26 +196,11 @@ pub mod browser {
}
}
let sink =
sink.with(|message: Result<Bytes, Bytes>| async move {
match message {
Ok(message) => Ok(Message::Bytes(message.into())),
Err(err) => {
let err = InputStreamError::de(err);
web_sys::console::error_1(
&js_sys::JsString::from(err.to_string()),
);
const CLOSE_CODE_ERROR: u16 = 1011;
Err(WebSocketError::ConnectionClose(
CloseEvent {
code: CLOSE_CODE_ERROR,
reason: err.to_string(),
was_clean: true,
},
))
}
}
});
let sink = sink.with(|message: Bytes| async move {
Ok::<Message, WebSocketError>(Message::Bytes(
message.into(),
))
});
let sink = SendWrapperSink::new(Box::pin(sink));
Ok((stream, sink))
@@ -243,13 +229,19 @@ pub mod reqwest {
/// Implements [`Client`] for a request made by [`reqwest`].
pub struct ReqwestClient;
impl<E: FromServerFnError + Send + 'static> Client<E> for ReqwestClient {
impl<
Error: FromServerFnError,
InputStreamError: FromServerFnError,
OutputStreamError: FromServerFnError,
> Client<Error, InputStreamError, OutputStreamError> for ReqwestClient
{
type Request = Request;
type Response = Response;
fn send(
req: Self::Request,
) -> impl Future<Output = Result<Self::Response, E>> + Send {
) -> impl Future<Output = Result<Self::Response, Error>> + Send
{
CLIENT.execute(req).map_err(|e| {
ServerFnErrorErr::Request(e.to_string()).into_app_error()
})
@@ -259,26 +251,24 @@ pub mod reqwest {
path: &str,
) -> Result<
(
impl futures::Stream<Item = Result<bytes::Bytes, Bytes>>
+ Send
+ 'static,
impl futures::Sink<Result<bytes::Bytes, Bytes>> + Send + 'static,
impl futures::Stream<Item = Result<Bytes, Bytes>> + Send + 'static,
impl futures::Sink<Bytes> + Send + 'static,
),
E,
Error,
> {
let mut websocket_server_url = get_server_url().to_string();
if let Some(postfix) = websocket_server_url.strip_prefix("http://")
{
websocket_server_url = format!("ws://{}", postfix);
websocket_server_url = format!("ws://{postfix}");
} else if let Some(postfix) =
websocket_server_url.strip_prefix("https://")
{
websocket_server_url = format!("wss://{}", postfix);
websocket_server_url = format!("wss://{postfix}");
}
let url = format!("{}{}", websocket_server_url, path);
let url = format!("{websocket_server_url}{path}");
let (ws_stream, _) =
tokio_tungstenite::connect_async(url).await.map_err(|e| {
E::from_server_fn_error(ServerFnErrorErr::Request(
Error::from_server_fn_error(ServerFnErrorErr::Request(
e.to_string(),
))
})?;
@@ -288,25 +278,18 @@ pub mod reqwest {
Ok((
read.map(|msg| match msg {
Ok(msg) => Ok(msg.into_data()),
Err(e) => Err(E::from_server_fn_error(
Err(e) => Err(OutputStreamError::from_server_fn_error(
ServerFnErrorErr::Request(e.to_string()),
)
.ser()),
}),
write.with(|msg: Result<Bytes, Bytes>| async move {
match msg {
Ok(msg) => {
Ok(tokio_tungstenite::tungstenite::Message::Binary(
msg,
))
}
Err(err) => {
let err = E::de(err);
Err(tokio_tungstenite::tungstenite::Error::Io(
std::io::Error::other(err.to_string()),
))
}
}
write.with(|msg: Bytes| async move {
Ok::<
tokio_tungstenite::tungstenite::Message,
tokio_tungstenite::tungstenite::Error,
>(
tokio_tungstenite::tungstenite::Message::Binary(msg)
)
}),
))
}

View File

@@ -6,7 +6,7 @@ use crate::{
ContentType, IntoRes, ServerFnError,
};
use bytes::Bytes;
use futures::{Stream, StreamExt};
use futures::{Stream, StreamExt, TryStreamExt};
use http::Method;
use std::{fmt::Debug, pin::Pin};
@@ -35,7 +35,8 @@ impl Encoding for Streaming {
impl<E, T, Request> IntoReq<Streaming, Request, E> for T
where
Request: ClientReq<E>,
T: Stream<Item = Bytes> + Send + Sync + 'static,
T: Stream<Item = Bytes> + Send + 'static,
E: FromServerFnError,
{
fn into_req(self, path: &str, accepts: &str) -> Result<Request, E> {
Request::try_new_post_streaming(
@@ -50,11 +51,12 @@ where
impl<E, T, Request> FromReq<Streaming, Request, E> for T
where
Request: Req<E> + Send + 'static,
T: From<ByteStream> + 'static,
T: From<ByteStream<E>> + 'static,
E: FromServerFnError,
{
async fn from_req(req: Request) -> Result<Self, E> {
let data = req.try_into_stream()?;
let s = ByteStream::new(data);
let s = ByteStream::new(data.map_err(|e| E::de(e)));
Ok(s.into())
}
}
@@ -71,37 +73,36 @@ where
/// end before the output will begin.
///
/// Streaming requests are only allowed over HTTP2 or HTTP3.
pub struct ByteStream(Pin<Box<dyn Stream<Item = Result<Bytes, Bytes>> + Send>>);
pub struct ByteStream<E = ServerFnError>(
Pin<Box<dyn Stream<Item = Result<Bytes, E>> + Send>>,
);
impl ByteStream {
impl<E> ByteStream<E> {
/// Consumes the wrapper, returning a stream of bytes.
pub fn into_inner(self) -> impl Stream<Item = Result<Bytes, Bytes>> + Send {
pub fn into_inner(self) -> impl Stream<Item = Result<Bytes, E>> + Send {
self.0
}
}
impl Debug for ByteStream {
impl<E> Debug for ByteStream<E> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("ByteStream").finish()
}
}
impl ByteStream {
impl<E> ByteStream<E> {
/// Creates a new `ByteStream` from the given stream.
pub fn new<T, E>(
pub fn new<T>(
value: impl Stream<Item = Result<T, E>> + Send + 'static,
) -> Self
where
T: Into<Bytes>,
E: Into<Bytes>,
{
Self(Box::pin(
value.map(|value| value.map(Into::into).map_err(Into::into)),
))
Self(Box::pin(value.map(|value| value.map(Into::into))))
}
}
impl<S, T> From<S> for ByteStream
impl<E, S, T> From<S> for ByteStream<E>
where
S: Stream<Item = T> + Send + 'static,
T: Into<Bytes>,
@@ -111,23 +112,27 @@ where
}
}
impl<E, Response> IntoRes<Streaming, Response, E> for ByteStream
impl<E, Response> IntoRes<Streaming, Response, E> for ByteStream<E>
where
Response: TryRes<E>,
E: 'static,
E: FromServerFnError,
{
async fn into_res(self) -> Result<Response, E> {
Response::try_from_stream(Streaming::CONTENT_TYPE, self.into_inner())
Response::try_from_stream(
Streaming::CONTENT_TYPE,
self.into_inner().map_err(|e| e.ser()),
)
}
}
impl<E, Response> FromRes<Streaming, Response, E> for ByteStream
impl<E, Response> FromRes<Streaming, Response, E> for ByteStream<E>
where
Response: ClientRes<E> + Send,
E: FromServerFnError,
{
async fn from_res(res: Response) -> Result<Self, E> {
let stream = res.try_into_stream()?;
Ok(ByteStream(Box::pin(stream)))
Ok(ByteStream::new(stream.map_err(|e| E::de(e))))
}
}
@@ -169,14 +174,14 @@ pub struct TextStream<E = ServerFnError>(
Pin<Box<dyn Stream<Item = Result<String, E>> + Send>>,
);
impl<E: FromServerFnError> Debug for TextStream<E> {
impl<E> Debug for TextStream<E> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("TextStream").finish()
}
}
impl<E: FromServerFnError> TextStream<E> {
/// Creates a new `ByteStream` from the given stream.
impl<E> TextStream<E> {
/// Creates a new `TextStream` from the given stream.
pub fn new(
value: impl Stream<Item = Result<String, E>> + Send + 'static,
) -> Self {
@@ -184,7 +189,7 @@ impl<E: FromServerFnError> TextStream<E> {
}
}
impl<E: FromServerFnError> TextStream<E> {
impl<E> TextStream<E> {
/// Consumes the wrapper, returning a stream of text.
pub fn into_inner(self) -> impl Stream<Item = Result<String, E>> + Send {
self.0
@@ -195,7 +200,6 @@ impl<E, S, T> From<S> for TextStream<E>
where
S: Stream<Item = T> + Send + 'static,
T: Into<String>,
E: FromServerFnError,
{
fn from(value: S) -> Self {
Self(Box::pin(value.map(|data| Ok(data.into()))))

View File

@@ -311,7 +311,7 @@ where
fn decode(bytes: Bytes) -> Result<ServerFnError<CustErr>, Self::Error> {
let data = String::from_utf8(bytes.to_vec())
.map_err(|err| format!("UTF-8 conversion error: {}", err))?;
.map_err(|err| format!("UTF-8 conversion error: {err}"))?;
data.split_once('|')
.ok_or_else(|| {
@@ -561,9 +561,7 @@ impl<E: FromServerFnError> FromStr for ServerFnErrorWrapper<E> {
}
/// A trait for types that can be returned from a server function.
pub trait FromServerFnError:
std::fmt::Debug + Sized + Display + 'static
{
pub trait FromServerFnError: std::fmt::Debug + Sized + 'static {
/// The encoding strategy used to serialize and deserialize this error type. Must implement the [`Encodes`](server_fn::Encodes) trait for references to the error type.
type Encoder: Encodes<Self> + Decodes<Self>;

View File

@@ -136,6 +136,7 @@ use base64::{engine::general_purpose::STANDARD_NO_PAD, DecodeError, Engine};
// re-exported to make it possible to implement a custom Client without adding a separate
// dependency on `bytes`
pub use bytes::Bytes;
use bytes::{BufMut, BytesMut};
use client::Client;
use codec::{Encoding, FromReq, FromRes, IntoReq, IntoRes};
#[doc(hidden)]
@@ -635,15 +636,19 @@ where
{
let (request_bytes, response_stream, response) =
request.try_into_websocket().await?;
let input = request_bytes.map(|request_bytes| match request_bytes {
Ok(request_bytes) => {
InputEncoding::decode(request_bytes).map_err(|e| {
InputStreamError::from_server_fn_error(
ServerFnErrorErr::Deserialization(e.to_string()),
)
})
let input = request_bytes.map(|request_bytes| {
let request_bytes = request_bytes
.map(|bytes| deserialize_result::<InputStreamError>(bytes))
.unwrap_or_else(Err);
match request_bytes {
Ok(request_bytes) => InputEncoding::decode(request_bytes)
.map_err(|e| {
InputStreamError::from_server_fn_error(
ServerFnErrorErr::Deserialization(e.to_string()),
)
}),
Err(err) => Err(InputStreamError::de(err)),
}
Err(err) => Err(InputStreamError::de(err)),
});
let boxed = Box::pin(input)
as Pin<
@@ -656,14 +661,17 @@ where
let output = server_fn(input.into()).await?;
let output = output.stream.map(|output| match output {
Ok(output) => OutputEncoding::encode(&output).map_err(|e| {
OutputStreamError::from_server_fn_error(
ServerFnErrorErr::Serialization(e.to_string()),
)
.ser()
}),
Err(err) => Err(err.ser()),
let output = output.stream.map(|output| {
let result = match output {
Ok(output) => OutputEncoding::encode(&output).map_err(|e| {
OutputStreamError::from_server_fn_error(
ServerFnErrorErr::Serialization(e.to_string()),
)
.ser()
}),
Err(err) => Err(err.ser()),
};
serialize_result(result)
});
Server::spawn(async move {
@@ -695,37 +703,42 @@ where
pin_mut!(input);
pin_mut!(sink);
while let Some(input) = input.stream.next().await {
if sink
.send(
input
.and_then(|input| {
InputEncoding::encode(&input).map_err(|e| {
InputStreamError::from_server_fn_error(
ServerFnErrorErr::Serialization(
e.to_string(),
),
)
})
})
.map_err(|e| e.ser()),
)
.await
.is_err()
{
let result = match input {
Ok(input) => {
InputEncoding::encode(&input).map_err(|e| {
InputStreamError::from_server_fn_error(
ServerFnErrorErr::Serialization(
e.to_string(),
),
)
.ser()
})
}
Err(err) => Err(err.ser()),
};
let result = serialize_result(result);
if sink.send(result).await.is_err() {
break;
}
}
});
// Return the output stream
let stream = stream.map(|request_bytes| match request_bytes {
Ok(request_bytes) => OutputEncoding::decode(request_bytes)
.map_err(|e| {
OutputStreamError::from_server_fn_error(
ServerFnErrorErr::Deserialization(e.to_string()),
)
}),
Err(err) => Err(OutputStreamError::de(err)),
let stream = stream.map(|request_bytes| {
let request_bytes = request_bytes
.map(|bytes| deserialize_result::<OutputStreamError>(bytes))
.unwrap_or_else(Err);
match request_bytes {
Ok(request_bytes) => OutputEncoding::decode(request_bytes)
.map_err(|e| {
OutputStreamError::from_server_fn_error(
ServerFnErrorErr::Deserialization(
e.to_string(),
),
)
}),
Err(err) => Err(OutputStreamError::de(err)),
}
});
let boxed = Box::pin(stream)
as Pin<
@@ -740,6 +753,51 @@ where
}
}
// Serializes a Result<Bytes, Bytes> into a single Bytes instance.
// Format: [tag: u8][content: Bytes]
// - Tag 0: Ok variant
// - Tag 1: Err variant
fn serialize_result(result: Result<Bytes, Bytes>) -> Bytes {
match result {
Ok(bytes) => {
let mut buf = BytesMut::with_capacity(1 + bytes.len());
buf.put_u8(0); // Tag for Ok variant
buf.extend_from_slice(&bytes);
buf.freeze()
}
Err(bytes) => {
let mut buf = BytesMut::with_capacity(1 + bytes.len());
buf.put_u8(1); // Tag for Err variant
buf.extend_from_slice(&bytes);
buf.freeze()
}
}
}
// Deserializes a Bytes instance back into a Result<Bytes, Bytes>.
fn deserialize_result<E: FromServerFnError>(
bytes: Bytes,
) -> Result<Bytes, Bytes> {
if bytes.is_empty() {
return Err(E::from_server_fn_error(
ServerFnErrorErr::Deserialization("Data is empty".into()),
)
.ser());
}
let tag = bytes[0];
let content = bytes.slice(1..);
match tag {
0 => Ok(content),
1 => Err(content),
_ => Err(E::from_server_fn_error(ServerFnErrorErr::Deserialization(
"Invalid data tag".into(),
))
.ser()), // Invalid tag
}
}
/// Encode format type
pub enum Format {
/// Binary representation
@@ -1218,3 +1276,45 @@ pub mod mock {
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::codec::JsonEncoding;
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
enum TestError {
ServerFnError(ServerFnErrorErr),
}
impl FromServerFnError for TestError {
type Encoder = JsonEncoding;
fn from_server_fn_error(value: ServerFnErrorErr) -> Self {
Self::ServerFnError(value)
}
}
#[test]
fn test_result_serialization() {
// Test Ok variant
let ok_result: Result<Bytes, Bytes> =
Ok(Bytes::from_static(b"success data"));
let serialized = serialize_result(ok_result);
let deserialized = deserialize_result::<TestError>(serialized);
assert!(deserialized.is_ok());
assert_eq!(deserialized.unwrap(), Bytes::from_static(b"success data"));
// Test Err variant
let err_result: Result<Bytes, Bytes> =
Err(Bytes::from_static(b"error details"));
let serialized = serialize_result(err_result);
let deserialized = deserialize_result::<TestError>(serialized);
assert!(deserialized.is_err());
assert_eq!(
deserialized.unwrap_err(),
Bytes::from_static(b"error details")
);
}
}

View File

@@ -117,7 +117,7 @@ where
) -> Result<
(
impl Stream<Item = Result<Bytes, Bytes>> + Send + 'static,
impl futures::Sink<Result<Bytes, Bytes>> + Send + 'static,
impl futures::Sink<Bytes> + Send + 'static,
Self::WebsocketResponse,
),
Error,
@@ -133,7 +133,7 @@ where
let (mut response_stream_tx, response_stream_rx) =
futures::channel::mpsc::channel(2048);
let (response_sink_tx, mut response_sink_rx) =
futures::channel::mpsc::channel::<Result<Bytes, Bytes>>(2048);
futures::channel::mpsc::channel::<Bytes>(2048);
actix_web::rt::spawn(async move {
loop {
@@ -142,16 +142,9 @@ where
let Some(incoming) = incoming else {
break;
};
match incoming {
Ok(message) => {
if let Err(err) = session.binary(message).await {
if let Err(err) = session.binary(incoming).await {
_ = response_stream_tx.start_send(Err(InputStreamError::from_server_fn_error(ServerFnErrorErr::Request(err.to_string())).ser()));
}
}
Err(err) => {
_ = response_stream_tx.start_send(Err(err));
}
}
},
outgoing = msg_stream.next().fuse() => {
let Some(outgoing) = outgoing else {
@@ -172,6 +165,9 @@ where
Ok(Message::Text(text)) => {
_ = response_stream_tx.start_send(Ok(text.into_bytes()));
}
Ok(Message::Close(_)) => {
break;
}
Ok(_other) => {
}
Err(e) => {

View File

@@ -79,7 +79,7 @@ where
) -> Result<
(
impl Stream<Item = Result<Bytes, Bytes>> + Send + 'static,
impl Sink<Result<Bytes, Bytes>> + Send + 'static,
impl Sink<Bytes> + Send + 'static,
Self::WebsocketResponse,
),
Error,
@@ -91,7 +91,7 @@ where
futures::stream::Once<
std::future::Ready<Result<Bytes, Bytes>>,
>,
futures::sink::Drain<Result<Bytes, Bytes>>,
futures::sink::Drain<Bytes>,
Self::WebsocketResponse,
),
Error,
@@ -117,9 +117,9 @@ where
))
})?;
let (mut outgoing_tx, outgoing_rx) =
futures::channel::mpsc::channel(2048);
let (incoming_tx, mut incoming_rx) =
futures::channel::mpsc::channel::<Result<Bytes, Bytes>>(2048);
let (incoming_tx, mut incoming_rx) =
futures::channel::mpsc::channel::<Bytes>(2048);
let response = upgrade
.on_failed_upgrade({
let mut outgoing_tx = outgoing_tx.clone();
@@ -134,18 +134,11 @@ where
let Some(incoming) = incoming else {
break;
};
match incoming {
Ok(message) => {
if let Err(err) = session.send(Message::Binary(message)).await {
_ = outgoing_tx.start_send(Err(InputStreamError::from_server_fn_error(ServerFnErrorErr::Request(err.to_string())).ser()));
}
}
Err(err) => {
_ = outgoing_tx.start_send(Err(err));
}
if let Err(err) = session.send(Message::Binary(incoming)).await {
_ = outgoing_tx.start_send(Err(InputStreamError::from_server_fn_error(ServerFnErrorErr::Request(err.to_string())).ser()));
}
},
outgoing = session.recv().fuse() => {
outgoing = session.recv().fuse() => {
let Some(outgoing) = outgoing else {
break;
};
@@ -159,6 +152,11 @@ where
Ok(Message::Text(text)) => {
_ = outgoing_tx.start_send(Ok(Bytes::from(text)));
}
Ok(Message::Ping(bytes)) => {
if session.send(Message::Pong(bytes)).await.is_err() {
break;
}
}
Ok(_other) => {}
Err(e) => {
_ = outgoing_tx.start_send(Err(InputStreamError::from_server_fn_error(ServerFnErrorErr::Response(e.to_string())).ser()));

View File

@@ -79,7 +79,7 @@ where
) -> Result<
(
impl Stream<Item = Result<Bytes, Bytes>> + Send + 'static,
impl Sink<Result<Bytes, Bytes>> + Send + 'static,
impl Sink<Bytes> + Send + 'static,
Self::WebsocketResponse,
),
Error,
@@ -87,7 +87,7 @@ where
Err::<
(
futures::stream::Once<std::future::Ready<Result<Bytes, Bytes>>>,
futures::sink::Drain<Result<Bytes, Bytes>>,
futures::sink::Drain<Bytes>,
Self::WebsocketResponse,
),
_,

View File

@@ -360,7 +360,7 @@ where
Output = Result<
(
impl Stream<Item = Result<Bytes, Bytes>> + Send + 'static,
impl Sink<Result<Bytes, Bytes>> + Send + 'static,
impl Sink<Bytes> + Send + 'static,
Self::WebsocketResponse,
),
Error,
@@ -415,7 +415,7 @@ where
) -> Result<
(
impl Stream<Item = Result<Bytes, Bytes>> + Send + 'static,
impl Sink<Result<Bytes, Bytes>> + Send + 'static,
impl Sink<Bytes> + Send + 'static,
Self::WebsocketResponse,
),
Error,
@@ -424,7 +424,7 @@ where
Err::<
(
futures::stream::Once<std::future::Ready<Result<Bytes, Bytes>>>,
futures::sink::Drain<Result<Bytes, Bytes>>,
futures::sink::Drain<Bytes>,
Self::WebsocketResponse,
),
_,

View File

@@ -6,15 +6,34 @@ use crate::{
use bytes::Bytes;
use futures::{Stream, StreamExt};
pub use gloo_net::http::Response;
use http::{HeaderMap, HeaderName, HeaderValue};
use js_sys::Uint8Array;
use send_wrapper::SendWrapper;
use std::future::Future;
use std::{future::Future, str::FromStr};
use wasm_bindgen::JsCast;
use wasm_streams::ReadableStream;
/// The response to a `fetch` request made in the browser.
pub struct BrowserResponse(pub(crate) SendWrapper<Response>);
impl BrowserResponse {
/// Generate the headers from the internal [`Response`] object.
/// This is a workaround for the fact that the `Response` object does not
/// have a [`HeaderMap`] directly. This function will iterate over the
/// headers and convert them to a [`HeaderMap`].
pub fn generate_headers(&self) -> HeaderMap {
self.0
.headers()
.entries()
.filter_map(|(key, value)| {
let key = HeaderName::from_str(&key).ok()?;
let value = HeaderValue::from_str(&value).ok()?;
Some((key, value))
})
.collect()
}
}
impl<E: FromServerFnError> ClientRes<E> for BrowserResponse {
fn try_into_string(self) -> impl Future<Output = Result<String, E>> + Send {
// the browser won't send this async work between threads (because it's single-threaded)

View File

@@ -14,7 +14,7 @@ syn = { version = "2.0", features = ["full", "parsing", "extra-traits"] }
proc-macro2 = "1.0"
xxhash-rust = { version = "0.8.12", features = ["const_xxh64"] }
const_format = "0.2.33"
convert_case = "0.6.0"
convert_case = { workspace = true }
[build-dependencies]

View File

@@ -1,6 +1,6 @@
[package]
name = "tachys"
version = "0.2.0-rc1"
version = "0.2.0-rc3"
authors = ["Greg Johnston"]
license = "MIT"
readme = "../README.md"

View File

@@ -733,9 +733,9 @@ generate_event_types! {
// =========================================================
// DocumentAndElementEventHandlersEventMap
// =========================================================
copy: Event, // ClipboardEvent is unstable
cut: Event, // ClipboardEvent is unstable
paste: Event, // ClipboardEvent is unstable
copy: ClipboardEvent,
cut: ClipboardEvent,
paste: ClipboardEvent,
// =========================================================
// DocumentEventMap
@@ -758,11 +758,11 @@ use super::{
};
#[doc(no_inline)]
pub use web_sys::{
AnimationEvent, BeforeUnloadEvent, CompositionEvent, CustomEvent,
DeviceMotionEvent, DeviceOrientationEvent, DragEvent, ErrorEvent, Event,
FocusEvent, GamepadEvent, HashChangeEvent, InputEvent, KeyboardEvent,
MessageEvent, MouseEvent, PageTransitionEvent, PointerEvent, PopStateEvent,
ProgressEvent, PromiseRejectionEvent, SecurityPolicyViolationEvent,
StorageEvent, SubmitEvent, TouchEvent, TransitionEvent, UiEvent,
WheelEvent,
AnimationEvent, BeforeUnloadEvent, ClipboardEvent, CompositionEvent,
CustomEvent, DeviceMotionEvent, DeviceOrientationEvent, DragEvent,
ErrorEvent, Event, FocusEvent, GamepadEvent, HashChangeEvent, InputEvent,
KeyboardEvent, MessageEvent, MouseEvent, PageTransitionEvent, PointerEvent,
PopStateEvent, ProgressEvent, PromiseRejectionEvent,
SecurityPolicyViolationEvent, StorageEvent, SubmitEvent, TouchEvent,
TransitionEvent, UiEvent, WheelEvent,
};

View File

@@ -82,6 +82,10 @@ impl StreamBuilder {
/// Appends another stream to this one.
pub fn append(&mut self, mut other: StreamBuilder) {
if !self.sync_buf.is_empty() {
self.chunks
.push_back(StreamChunk::Sync(mem::take(&mut self.sync_buf)));
}
self.chunks.append(&mut other.chunks);
self.sync_buf.push_str(&other.sync_buf);
}
@@ -149,7 +153,7 @@ impl StreamBuilder {
self.sync_buf.reserve(11 + (id.len() * 2));
self.sync_buf.push_str("<!--s-");
for piece in id {
write!(&mut self.sync_buf, "{}-", piece).unwrap();
write!(&mut self.sync_buf, "{piece}-").unwrap();
}
if opening {
self.sync_buf.push_str("o-->");
@@ -202,7 +206,7 @@ impl StreamBuilder {
let mut id = String::new();
if let Some(ids) = &subbuilder.id {
for piece in ids {
write!(&mut id, "{}-", piece).unwrap();
write!(&mut id, "{piece}-").unwrap();
}
}
if let Some(id) = subbuilder.id.as_mut() {
@@ -319,6 +323,11 @@ impl OooChunk {
}
buf.push_str("})()</script>");
}
/// Consumes this structure and returns its inner chunks of the stream.
pub fn take_chunks(self) -> VecDeque<StreamChunk> {
self.chunks
}
}
impl Debug for StreamChunk {

View File

@@ -33,7 +33,11 @@ where
match (&mut state.state, self) {
// both errors: throw the new error and replace
(Either::Right(_), Err(new)) => {
state.error = Some(throw_error::throw(new.into()))
if let Some(old_error) =
state.error.replace(throw_error::throw(new.into()))
{
throw_error::clear(&old_error);
}
}
// both Ok: need to rebuild child
(Either::Left(old), Ok(new)) => {

View File

@@ -148,9 +148,7 @@ where
let cmds = diff(hashed_items, &new_hashed_items);
apply_diff(
parent
.as_ref()
.expect("Keyed list rebuilt before being mounted."),
parent.as_ref(),
marker,
cmds,
rendered_items,
@@ -589,7 +587,7 @@ impl Default for DiffOpAddMode {
}
fn apply_diff<T, VFS, V>(
parent: &crate::renderer::types::Element,
parent: Option<&crate::renderer::types::Element>,
marker: &crate::renderer::types::Placeholder,
diff: Diff,
children: &mut Vec<Option<(VFS, V::State)>>,
@@ -649,16 +647,18 @@ fn apply_diff<T, VFS, V>(
{
let (set_index, mut each_item) = moved_children[i].take().unwrap();
if let Some(Some((_, state))) =
children.get_next_closest_mounted_sibling(to)
{
state.insert_before_this_or_marker(
parent,
&mut each_item,
Some(marker.as_ref()),
)
} else {
each_item.mount(parent, Some(marker.as_ref()));
if let Some(parent) = parent {
if let Some(Some((_, state))) =
children.get_next_closest_mounted_sibling(to)
{
state.insert_before_this_or_marker(
parent,
&mut each_item,
Some(marker.as_ref()),
)
} else {
each_item.mount(parent, Some(marker.as_ref()));
}
}
set_index(to);
@@ -670,23 +670,25 @@ fn apply_diff<T, VFS, V>(
let (set_index, item) = view_fn(at, item);
let mut item = item.build();
match mode {
DiffOpAddMode::Normal => {
if let Some(Some((_, state))) =
children.get_next_closest_mounted_sibling(at)
{
state.insert_before_this_or_marker(
parent,
&mut item,
Some(marker.as_ref()),
)
} else {
if let Some(parent) = parent {
match mode {
DiffOpAddMode::Normal => {
if let Some(Some((_, state))) =
children.get_next_closest_mounted_sibling(at)
{
state.insert_before_this_or_marker(
parent,
&mut item,
Some(marker.as_ref()),
)
} else {
item.mount(parent, Some(marker.as_ref()));
}
}
DiffOpAddMode::Append => {
item.mount(parent, Some(marker.as_ref()));
}
}
DiffOpAddMode::Append => {
item.mount(parent, Some(marker.as_ref()));
}
}
children[at] = Some((set_index, item));