Compare commits

..

40 Commits
v0.3.1 ... 533

Author SHA1 Message Date
Greg Johnston
99139ac505 starting work on committing adds and moves 2023-05-27 07:21:29 -04:00
Jose Quesada
2e2d500610 saving changes 2023-05-26 13:39:34 -04:00
Jose Quesada
694e94db97 only one thing left to do with apply_cmds 2023-05-26 13:39:34 -04:00
Jose Quesada
7e0bf3514f applied [C, A, B] optimization, along with fixing another one 2023-05-26 13:39:34 -04:00
Jose Quesada
010bd8c05e reworked impl of diff for the Each component 2023-05-26 13:39:34 -04:00
Greg Johnston
4d3dd7a6e6 feat: add Axum extract() function (#1093) 2023-05-25 11:16:58 -04:00
yuuma03
cc68d20758 fix: duplicate headers (like Set-Cookie) on the actix integration (#1086) 2023-05-25 11:16:29 -04:00
Matt Joiner
20682e63ef examples: fix fetch example (#1096) 2023-05-25 11:15:47 -04:00
Andrew Wheeler(Genusis)
40363df4a1 examples: updated axum_database_sessions to axum_session along with axum_sessions_auth to axum_session_auth (#1090) 2023-05-24 17:21:24 -04:00
Greg Johnston
e3ea889d5f feat: add <Await/> component to improve ergonomics of loading async blocks (#1091) 2023-05-24 14:05:36 -04:00
Greg Johnston
7f14da3026 fix: missing ? in navigation now that removed (#1092) 2023-05-24 12:12:57 -04:00
Ben Wishovich
06d28f7d67 feat: use Axum SubStates to enable .with_state in Axum router (#1085) 2023-05-24 08:34:17 -04:00
sjud
27f2a672ba docs: added a hint for a common error when using use_navigate (#1063) 2023-05-23 19:51:03 -04:00
Greg Johnston
23f9d537e9 fix: correctly handle new navigations while in the middle of an async navigation (#1084) 2023-05-23 17:21:12 -04:00
Rushi
d86339bae3 feat: manually implement Debug, PartialEq, Eq and Hash for reactive types (#1080) (closes #1074) 2023-05-22 16:52:59 -04:00
Greg Johnston
846c338491 docs: clarify difference between set() and update() (#1082) 2023-05-22 15:34:15 -04:00
Greg Johnston
2d418dae93 fix: debug-mode bugs in <For/> (closes #955, #1075, #1076) (#1078) 2023-05-22 06:49:13 -04:00
Greg Johnston
91e0fcdc1b fix/change: remove ? prefix from search in browser (matching server behavior) - closes #1071 (#1077) 2023-05-21 22:06:38 -04:00
Greg Johnston
a9ed8461d1 feat: add "async routing" feature (#1055)
* add "async routing" feature that waits for async resources to resolve before navigating
* add support for Outlet
* add `<RoutingProgress/>` component
2023-05-21 06:46:23 -04:00
Vladimir Motylenko
5a71ca797a feat: RSX parser with recovery after errors, and unquoted text (#1054)
* Feat: Upgrade to new local version of syn-rsx

* chore: Make macro more IDE friendly

1. Add quotation to RawText node.
2. Replace vec! macro with [].to_vec().
Cons:
1. Temporary remove allow(unused_braces) from expressions, to allow completion after dot in rust-analyzer.

* chore: Change dependency from syn-rsx to rstml

* chore: Fix value_to_string usage, pr comments, and fmt.
2023-05-21 06:45:53 -04:00
agilarity
70eb07d7d6 test: setup e2e automatically (#1067) 2023-05-20 20:46:06 -04:00
Greg Johnston
71ee69af01 fix: avoid potential already-borrowed issues with resources nested in suspense 2023-05-20 20:42:06 -04:00
Ben Wishovich
dd41c0586c feat: allow specifying exact server function paths (#1069) 2023-05-19 16:47:28 -04:00
Greg Johnston
aaf63dbf5c docs: clarify SSR/WASM binary size comments (#1070) 2023-05-19 15:46:26 -04:00
Greg Johnston
87f6802967 docs: update notes on WASM binary size to work with SSR too (closes #1059) (#1068) 2023-05-19 15:08:32 -04:00
Greg Johnston
2cbf3581c5 fix: docs note on style refers to class (#1066) 2023-05-19 13:42:16 -04:00
agilarity
5a67e208fd test: verify tailwind example with playwright tests (#1062)
* chore: ignore playwright output

* fix: could not run playwright test

* test: should see the welcome message

* build: clean playwright output

* build: run playwright web tests

* build: setup e2e dependencies
2023-05-19 13:04:06 -04:00
Greg Johnston
3391a4a035 examples: fix todo_app_sqlite_axum (#1064) 2023-05-19 13:02:52 -04:00
Daniel Santana
076aa363a4 feat: added Debug, PartialEq and Eq derives to trigger. (#1060) 2023-05-18 20:32:25 -04:00
agilarity
2cb68c0bd4 fix: todomvc example style errors (#1058) 2023-05-18 15:49:34 -04:00
Greg Johnston
6eb24b5017 tests: fix broken SSR doctests (#1056) 2023-05-18 10:17:14 -04:00
yuuma03
b2faa6b86c feat: allow multipart forms on server fns (Actix) (#1048) 2023-05-17 19:53:55 -04:00
sjud
43990b5b67 docs: include link to book, Discord, examples (#1053) 2023-05-17 13:07:17 -04:00
kasbuunk
9453164dd2 docs: fix typo in view fn (#1050) 2023-05-16 14:34:37 -04:00
Greg Johnston
00fcd1c65e docs: fix small docs issues (closes #1045) (#1049) 2023-05-16 13:01:29 -04:00
Greg Johnston
85ad7b0f38 fix: <Suspense/> hydration when no resources are read under it (#1046) 2023-05-16 12:20:23 -04:00
Greg Johnston
f0a9940364 fix: leak in todomvc example (closes #706) 2023-05-15 14:53:39 -04:00
Mark Catley
b472aaf6a0 fix: typo in actix extract documentation (#1043) 2023-05-15 08:57:49 -04:00
Greg Johnston
059c1bf61c cargo fmt 2023-05-14 06:55:05 -04:00
Matt Crane
add13fd6a4 change: migrate Axum integration to use with_state over layer(Extension) (#1032) 2023-05-14 06:37:39 -04:00
86 changed files with 2403 additions and 1019 deletions

View File

@@ -25,22 +25,22 @@ members = [
exclude = ["benchmarks", "examples"]
[workspace.package]
version = "0.3.1"
version = "0.3.0"
[workspace.dependencies]
leptos = { path = "./leptos", default-features = false, version = "0.3.1" }
leptos_dom = { path = "./leptos_dom", default-features = false, version = "0.3.1" }
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.3.1" }
leptos_macro = { path = "./leptos_macro", default-features = false, version = "0.3.1" }
leptos_reactive = { path = "./leptos_reactive", default-features = false, version = "0.3.1" }
leptos_server = { path = "./leptos_server", default-features = false, version = "0.3.1" }
server_fn = { path = "./server_fn", default-features = false, version = "0.3.1" }
server_fn_macro = { path = "./server_fn_macro", default-features = false, version = "0.3.1" }
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", default-features = false, version = "0.3.1" }
leptos_config = { path = "./leptos_config", default-features = false, version = "0.3.1" }
leptos_router = { path = "./router", version = "0.3.1" }
leptos_meta = { path = "./meta", default-features = false, version = "0.3.1" }
leptos_integration_utils = { path = "./integrations/utils", version = "0.3.1" }
leptos = { path = "./leptos", default-features = false, version = "0.3.0" }
leptos_dom = { path = "./leptos_dom", default-features = false, version = "0.3.0" }
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.3.0" }
leptos_macro = { path = "./leptos_macro", default-features = false, version = "0.3.0" }
leptos_reactive = { path = "./leptos_reactive", default-features = false, version = "0.3.0" }
leptos_server = { path = "./leptos_server", default-features = false, version = "0.3.0" }
server_fn = { path = "./server_fn", default-features = false, version = "0.3.0" }
server_fn_macro = { path = "./server_fn_macro", default-features = false, version = "0.3.0" }
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", default-features = false, version = "0.3.0" }
leptos_config = { path = "./leptos_config", default-features = false, version = "0.3.0" }
leptos_router = { path = "./router", version = "0.3.0" }
leptos_meta = { path = "./meta", default-features = false, version = "0.3.0" }
leptos_integration_utils = { path = "./integrations/utils", version = "0.3.0" }
[profile.release]
codegen-units = 1

View File

@@ -41,6 +41,14 @@ build-std = ["std", "panic_abort", "core", "alloc"]
build-std-features = ["panic_immediate_abort"]
```
Note that if you're using this with SSR too, the same Cargo profile will be applied. You'll need to explicitly specify your target:
```toml
[build]
target = "x86_64-unknown-linux-gnu" # or whatever
```
And you'll need to add `panic = "abort"` to `[profile.release]` in `Cargo.toml`. Note that this applies the same `build-std` and panic settings to your server binary, which may not be desirable. Some further exploration is probably needed here.
5. One of the sources of binary size in WASM binaries can be `serde` serialization/deserialization code. Leptos uses `serde` by default to serialize and deserialize resources created with `create_resource`. You might try experimenting with the `miniserde` and `serde-lite` features, which allow you to use those crates for serialization and deserialization instead; each only implements a subset of `serde`s functionality, but typically optimizes for size over speed.
## Things to Avoid

View File

@@ -90,7 +90,8 @@ view! { cx,
<button
// define an event listener with on:
on:click=move |_| {
set_count.update(|n| *n += 1);
// on stable, this is set_count.set(3);
set_count(3);
}
>
// text nodes are wrapped in quotation marks
@@ -142,6 +143,16 @@ in a function, telling the framework to update the view every time `count` chang
`{count()}` access the value of `count` once, and passes an `i32` into the view,
rendering it once, unreactively. You can see the difference in the CodeSandbox below!
Lets make one final change. `set_count(3)` is a pretty useless thing for a click handler to do. Lets replacing “set this value to 3” with “increment this value by 1”:
```rust
move |_| {
set_count.update(|n| *n += 1);
}
```
You can see here that while `set_count` just sets the value, `set_count.update()` gives us a mutable reference and mutates the value in place. Either one will trigger a reactive update in our UI.
> Throughout this tutorial, well use CodeSandbox to show interactive examples. To
> show the browser in the sandbox, you may need to click `Add DevTools >
Other Previews > 8080.` Hover over any of the variables to show Rust-Analyzer details

View File

@@ -15,7 +15,7 @@ You _could_ do this by just creating two `<progress>` elements:
let (count, set_count) = create_signal(cx, 0);
let double_count = move || count() * 2;
view! {
view! { cx,
<progress
max="50"
value=count

View File

@@ -0,0 +1,5 @@
[tasks.web-test]
dependencies = ["auto-setup", "cargo-leptos-e2e"]
[tasks.clean-all]
dependencies = ["clean-cargo", "clean-node_modules", "clean-playwright"]

View File

@@ -1,3 +1,6 @@
[env]
END2END_DIR = "end2end"
[tasks.pre-clippy]
env = { CARGO_MAKE_CLIPPY_ARGS = "--all-targets --all-features -- -D warnings" }
@@ -5,6 +8,9 @@ env = { CARGO_MAKE_CLIPPY_ARGS = "--all-targets --all-features -- -D warnings" }
description = "Check for style violations"
dependencies = ["check-format-flow", "clippy-flow"]
[tasks.check-format]
args = ["fmt", "--", "--check", "--config-path", "../../"]
[tasks.verify-local]
description = "Run all quality checks and tests from an example directory"
dependencies = ["check-style", "test-local"]
@@ -13,11 +19,69 @@ dependencies = ["check-style", "test-local"]
description = "Run all tests from an example directory"
dependencies = ["test", "web-test"]
[tasks.clean-cargo]
description = "Runs the cargo clean command."
category = "Cleanup"
command = "cargo"
args = ["clean"]
[tasks.clean-trunk]
description = "Runs the trunk clean command."
category = "Cleanup"
command = "trunk"
args = ["clean"]
[tasks.clean-node_modules]
description = "Delete all node_modules directories"
category = "Cleanup"
script = '''
find . -type d -name node_modules | xargs rm -rf
'''
[tasks.clean-playwright]
description = "Delete playwright directories"
category = "Cleanup"
cwd = "${END2END_DIR}"
command = "rm"
args = ["-rf", "playwright", "playwright/.cache", "test-results"]
[tasks.clean-all]
dependencies = ["clean", "clean-trunk"]
description = "Delete all temporary directories"
category = "Cleanup"
dependencies = ["clean-cargo"]
[tasks.wasm-web-test]
env = { CARGO_MAKE_WASM_TEST_ARGS = "--headless --chrome" }
command = "cargo"
args = ["make", "wasm-pack-test"]
[tasks.cargo-leptos-e2e]
description = "Runs end to end tests with cargo leptos"
command = "cargo"
args = ["leptos", "end-to-end"]
[tasks.setup]
description = "Setup e2e dependencies"
cwd = "${END2END_DIR}"
script = '''
BOLD="\e[1m"
GREEN="\e[0;32m"
RED="\e[0;31m"
RESET="\e[0m"
if command -v pnpm; then
pnpm install
elif command -v npm; then
npm install
else
echo "${RED}${BOLD}ERROR${RESET} - pnpm or npm is required by this task"
exit 1
fi
'''
[tasks.auto-setup]
script = '''
if [ ! -d "${END2END_DIR}/node_modules" ]; then
cargo make setup
fi
'''

View File

@@ -1,4 +1,5 @@
[tasks.web-test]
env = { CARGO_MAKE_WASM_TEST_ARGS = "--headless --chrome" }
command = "cargo"
args = ["make", "wasm-pack-test"]
dependencies = ["wasm-web-test"]
[tasks.clean-all]
dependencies = ["clean-cargo", "clean-trunk"]

View File

@@ -10,12 +10,12 @@ crate-type = ["cdylib", "rlib"]
console_log = "1.0.0"
console_error_panic_hook = "0.1.7"
cfg-if = "1.0.0"
leptos = { path = "../../../leptos/leptos", default-features = false, features = [
leptos = { path = "../../leptos", default-features = false, features = [
"serde",
] }
leptos_axum = { path = "../../../leptos/integrations/axum", default-features = false, optional = true }
leptos_meta = { path = "../../../leptos/meta", default-features = false }
leptos_router = { path = "../../../leptos/router", default-features = false }
leptos_axum = { path = "../../integrations/axum", default-features = false, optional = true }
leptos_meta = { path = "../../meta", default-features = false }
leptos_router = { path = "../../router", default-features = false }
log = "0.4.17"
serde = { version = "1", features = ["derive"] }
simple_logger = "4.0.0"

View File

@@ -3,19 +3,17 @@ use cfg_if::cfg_if;
cfg_if! { if #[cfg(feature = "ssr")] {
use axum::{
body::{boxed, Body, BoxBody},
extract::Extension,
extract::State,
response::IntoResponse,
http::{Request, Response, StatusCode, Uri},
};
use axum::response::Response as AxumResponse;
use tower::ServiceExt;
use tower_http::services::ServeDir;
use std::sync::Arc;
use leptos::{LeptosOptions, view};
use crate::landing::App;
pub async fn file_and_error_handler(uri: Uri, Extension(options): Extension<Arc<LeptosOptions>>, req: Request<Body>) -> AxumResponse {
let options = &*options;
pub async fn file_and_error_handler(uri: Uri, State(options): State<LeptosOptions>, req: Request<Body>) -> AxumResponse {
let root = options.site_root.clone();
let res = get_static_file(uri.clone(), &root).await.unwrap();

View File

@@ -5,7 +5,7 @@ cfg_if! { if #[cfg(feature = "ssr")] {
use crate::landing::*;
use axum::body::Body as AxumBody;
use axum::{
extract::{Extension, Path},
extract::{State, Path},
http::Request,
response::{IntoResponse, Response},
routing::{get, post},
@@ -14,18 +14,17 @@ cfg_if! { if #[cfg(feature = "ssr")] {
use errors_axum::*;
use leptos::*;
use leptos_axum::{generate_route_list, LeptosRoutes};
use std::sync::Arc;
}}
//Define a handler to test extractor with state
#[cfg(feature = "ssr")]
async fn custom_handler(
Path(id): Path<String>,
Extension(options): Extension<Arc<LeptosOptions>>,
State(options): State<LeptosOptions>,
req: Request<AxumBody>,
) -> Response {
let handler = leptos_axum::render_app_to_stream_with_context(
(*options).clone(),
options.clone(),
move |cx| {
provide_context(cx, id.clone());
},
@@ -53,12 +52,12 @@ async fn main() {
.route("/api/*fn_name", post(leptos_axum::handle_server_fns))
.route("/special/:id", get(custom_handler))
.leptos_routes(
leptos_options.clone(),
&leptos_options,
routes,
|cx| view! { cx, <App/> },
)
.fallback(file_and_error_handler)
.layer(Extension(Arc::new(leptos_options)));
.with_state(leptos_options);
// run our app with hyper
// `axum::Server` is a re-export of `hyper::Server`

View File

@@ -17,7 +17,9 @@ pub enum FetchError {
Json,
}
async fn fetch_cats(count: u32) -> Result<Vec<String>, FetchError> {
type CatCount = usize;
async fn fetch_cats(count: CatCount) -> Result<Vec<String>, FetchError> {
if count > 0 {
// make the request
let res = reqwasm::http::Request::get(&format!(
@@ -32,6 +34,7 @@ async fn fetch_cats(count: u32) -> Result<Vec<String>, FetchError> {
.map_err(|_| FetchError::Json)?
// extract the URL field for each cat
.into_iter()
.take(count)
.map(|cat| cat.url)
.collect::<Vec<_>>();
Ok(res)
@@ -41,7 +44,7 @@ async fn fetch_cats(count: u32) -> Result<Vec<String>, FetchError> {
}
pub fn fetch_example(cx: Scope) -> impl IntoView {
let (cat_count, set_cat_count) = create_signal::<u32>(cx, 0);
let (cat_count, set_cat_count) = create_signal::<CatCount>(cx, 0);
// we use local_resource here because
// 1) our error type isn't serializable/deserializable
@@ -75,7 +78,7 @@ pub fn fetch_example(cx: Scope) -> impl IntoView {
cats.read(cx).map(|data| {
data.map(|data| {
data.iter()
.map(|s| view! { cx, <span>{s}</span> })
.map(|s| view! { cx, <p><img src={s}/></p> })
.collect_view(cx)
})
})
@@ -89,7 +92,7 @@ pub fn fetch_example(cx: Scope) -> impl IntoView {
type="number"
prop:value=move || cat_count.get().to_string()
on:input=move |ev| {
let val = event_target_value(&ev).parse::<u32>().unwrap_or(0);
let val = event_target_value(&ev).parse::<CatCount>().unwrap_or(0);
set_cat_count(val);
}
/>
@@ -98,7 +101,9 @@ pub fn fetch_example(cx: Scope) -> impl IntoView {
<Transition fallback=move || {
view! { cx, <div>"Loading (Suspense Fallback)..."</div> }
}>
<div>
{cats_view}
</div>
</Transition>
</ErrorBoundary>
</div>

View File

@@ -1,5 +1,5 @@
use cfg_if::cfg_if;
use leptos::{component, view, IntoView, Scope};
use leptos::*;
use leptos_meta::*;
use leptos_router::*;
mod api;
@@ -9,23 +9,25 @@ use routes::{nav::*, stories::*, story::*, users::*};
#[component]
pub fn App(cx: Scope) -> impl IntoView {
provide_meta_context(cx);
view! {
cx,
<>
<Stylesheet id="leptos" href="/pkg/hackernews.css"/>
<Link rel="shortcut icon" type_="image/ico" href="/favicon.ico"/>
<Meta name="description" content="Leptos implementation of a HackerNews demo."/>
<Router>
<Nav />
<main>
<Routes>
<Route path="users/:id" view=|cx| view! { cx, <User/> }/>
<Route path="stories/:id" view=|cx| view! { cx, <Story/> }/>
<Route path=":stories?" view=|cx| view! { cx, <Stories/> }/>
</Routes>
</main>
</Router>
</>
let (is_routing, set_is_routing) = create_signal(cx, false);
view! { cx,
<Stylesheet id="leptos" href="/pkg/hackernews.css"/>
<Link rel="shortcut icon" type_="image/ico" href="/favicon.ico"/>
<Meta name="description" content="Leptos implementation of a HackerNews demo."/>
// adding `set_is_routing` causes the router to wait for async data to load on new pages
<Router set_is_routing>
// shows a progress bar while async data are loading
<RoutingProgress is_routing max_time=std::time::Duration::from_millis(250)/>
<Nav />
<main>
<Routes>
<Route path="users/:id" view=|cx| view! { cx, <User/> }/>
<Route path="stories/:id" view=|cx| view! { cx, <Story/> }/>
<Route path=":stories?" view=|cx| view! { cx, <Stories/> }/>
</Routes>
</main>
</Router>
}
}

View File

@@ -4,19 +4,17 @@ cfg_if! {
if #[cfg(feature = "ssr")] {
use axum::{
body::{boxed, Body, BoxBody},
extract::Extension,
extract::State,
response::IntoResponse,
http::{Request, Response, StatusCode, Uri},
};
use axum::response::Response as AxumResponse;
use tower::ServiceExt;
use tower_http::services::ServeDir;
use std::sync::Arc;
use leptos::{LeptosOptions};
use crate::error_template::error_template;
pub async fn file_and_error_handler(uri: Uri, Extension(options): Extension<Arc<LeptosOptions>>, req: Request<Body>) -> AxumResponse {
let options = &*options;
pub async fn file_and_error_handler(uri: Uri, State(options): State<LeptosOptions>, req: Request<Body>) -> AxumResponse {
let root = options.site_root.clone();
let res = get_static_file(uri.clone(), &root).await.unwrap();

View File

@@ -7,10 +7,8 @@ if #[cfg(feature = "ssr")] {
use axum::{
Router,
routing::get,
extract::Extension,
};
use leptos_axum::{generate_route_list, LeptosRoutes};
use std::sync::Arc;
use hackernews_axum::fallback::file_and_error_handler;
#[tokio::main]
@@ -27,9 +25,9 @@ if #[cfg(feature = "ssr")] {
// build our application with a route
let app = Router::new()
.route("/favicon.ico", get(file_and_error_handler))
.leptos_routes(leptos_options.clone(), routes, |cx| view! { cx, <App/> } )
.leptos_routes(&leptos_options, routes, |cx| view! { cx, <App/> } )
.fallback(file_and_error_handler)
.layer(Extension(Arc::new(leptos_options)));
.with_state(leptos_options);
// run our app with hyper
// `axum::Server` is a re-export of `hyper::Server`

View File

@@ -22,7 +22,7 @@ leptos_router = { path = "../../router", default-features = false }
log = "0.4.17"
simple_logger = "4.0.0"
serde = { version = "1.0.148", features = ["derive"] }
axum = { version = "0.6.1", optional = true }
axum = { version = "0.6.1", optional = true, features=["macros"] }
tower = { version = "0.4.13", optional = true }
tower-http = { version = "0.4", features = ["fs"], optional = true }
tokio = { version = "1.22.0", features = ["full"], optional = true }
@@ -33,10 +33,10 @@ sqlx = { version = "0.6.2", features = [
], optional = true }
thiserror = "1.0.38"
wasm-bindgen = "0.2"
axum_sessions_auth = { version = "7.0.0", features = [
axum_session_auth = { version = "0.2.1", features = [
"sqlite-rustls",
], optional = true }
axum_database_sessions = { version = "7.0.0", features = [
axum_session = { version = "0.2.3", features = [
"sqlite-rustls",
], optional = true }
bcrypt = { version = "0.14", optional = true }
@@ -49,8 +49,8 @@ ssr = [
"dep:tower",
"dep:tower-http",
"dep:tokio",
"dep:axum_sessions_auth",
"dep:axum_database_sessions",
"dep:axum_session_auth",
"dep:axum_session",
"dep:async-trait",
"dep:sqlx",
"dep:bcrypt",

View File

@@ -6,10 +6,10 @@ use std::collections::HashSet;
cfg_if! {
if #[cfg(feature = "ssr")] {
use sqlx::SqlitePool;
use axum_sessions_auth::{SessionSqlitePool, Authentication, HasPermission};
use axum_session_auth::{SessionSqlitePool, Authentication, HasPermission};
use bcrypt::{hash, verify, DEFAULT_COST};
use crate::todo::{pool, auth};
pub type AuthSession = axum_sessions_auth::AuthSession<User, i64, SessionSqlitePool, SqlitePool>;
pub type AuthSession = axum_session_auth::AuthSession<User, i64, SessionSqlitePool, SqlitePool>;
}}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]

View File

@@ -4,20 +4,18 @@ cfg_if! {
if #[cfg(feature = "ssr")] {
use axum::{
body::{boxed, Body, BoxBody},
extract::Extension,
extract::State,
response::IntoResponse,
http::{Request, Response, StatusCode, Uri},
};
use axum::response::Response as AxumResponse;
use tower::ServiceExt;
use tower_http::services::ServeDir;
use std::sync::Arc;
use leptos::{LeptosOptions, Errors, view};
use crate::error_template::ErrorTemplate;
use crate::errors::TodoAppError;
pub async fn file_and_error_handler(uri: Uri, Extension(options): Extension<Arc<LeptosOptions>>, req: Request<Body>) -> AxumResponse {
let options = &*options;
pub async fn file_and_error_handler(uri: Uri, State(options): State<LeptosOptions>, req: Request<Body>) -> AxumResponse {
let root = options.site_root.clone();
let res = get_static_file(uri.clone(), &root).await.unwrap();

View File

@@ -4,6 +4,7 @@ pub mod auth;
pub mod error_template;
pub mod errors;
pub mod fallback;
pub mod state;
pub mod todo;
// Needs to be in lib.rs AFAIK because wasm-bindgen needs us to be compiling a lib. I may be wrong.

View File

@@ -6,38 +6,38 @@ if #[cfg(feature = "ssr")] {
use axum::{
response::{Response, IntoResponse},
routing::get,
extract::{Path, Extension, RawQuery},
extract::{Path, State, RawQuery},
http::{Request, header::HeaderMap},
body::Body as AxumBody,
Router,
};
use session_auth_axum::todo::*;
use session_auth_axum::auth::*;
use session_auth_axum::state::AppState;
use session_auth_axum::*;
use session_auth_axum::fallback::file_and_error_handler;
use leptos_axum::{generate_route_list, LeptosRoutes, handle_server_fns_with_context};
use leptos::{log, view, provide_context, LeptosOptions, get_configuration};
use std::sync::Arc;
use leptos::{log, view, provide_context, get_configuration};
use sqlx::{SqlitePool, sqlite::SqlitePoolOptions};
use axum_database_sessions::{SessionConfig, SessionLayer, SessionStore};
use axum_sessions_auth::{AuthSessionLayer, AuthConfig, SessionSqlitePool};
use axum_session::{SessionConfig, SessionLayer, SessionStore};
use axum_session_auth::{AuthSessionLayer, AuthConfig, SessionSqlitePool};
async fn server_fn_handler(Extension(pool): Extension<SqlitePool>, auth_session: AuthSession, path: Path<String>, headers: HeaderMap, raw_query: RawQuery,
async fn server_fn_handler(State(app_state): State<AppState>, auth_session: AuthSession, path: Path<String>, headers: HeaderMap, raw_query: RawQuery,
request: Request<AxumBody>) -> impl IntoResponse {
log!("{:?}", path);
handle_server_fns_with_context(path, headers, raw_query, move |cx| {
provide_context(cx, auth_session.clone());
provide_context(cx, pool.clone());
provide_context(cx, app_state.pool.clone());
}, request).await
}
async fn leptos_routes_handler(Extension(pool): Extension<SqlitePool>, auth_session: AuthSession, Extension(options): Extension<Arc<LeptosOptions>>, req: Request<AxumBody>) -> Response{
let handler = leptos_axum::render_app_to_stream_with_context((*options).clone(),
async fn leptos_routes_handler(auth_session: AuthSession, State(app_state): State<AppState>, req: Request<AxumBody>) -> Response{
let handler = leptos_axum::render_app_to_stream_with_context(app_state.leptos_options.clone(),
move |cx| {
provide_context(cx, auth_session.clone());
provide_context(cx, pool.clone());
provide_context(cx, app_state.pool.clone());
},
|cx| view! { cx, <TodoApp/> }
);
@@ -72,16 +72,20 @@ if #[cfg(feature = "ssr")] {
let addr = leptos_options.site_addr;
let routes = generate_route_list(|cx| view! { cx, <TodoApp/> }).await;
let app_state = AppState{
leptos_options,
pool: pool.clone(),
};
// build our application with a route
let app = Router::new()
.route("/api/*fn_name", get(server_fn_handler).post(server_fn_handler))
.leptos_routes_with_handler(routes, get(leptos_routes_handler) )
.fallback(file_and_error_handler)
.layer(AuthSessionLayer::<User, i64, SessionSqlitePool, SqlitePool>::new(Some(pool.clone()))
.with_config(auth_config))
.with_config(auth_config))
.layer(SessionLayer::new(session_store))
.layer(Extension(Arc::new(leptos_options)))
.layer(Extension(pool));
.with_state(app_state);
// run our app with hyper
// `axum::Server` is a re-export of `hyper::Server`

View File

@@ -0,0 +1,17 @@
use cfg_if::cfg_if;
cfg_if! {
if #[cfg(feature = "ssr")] {
use leptos::LeptosOptions;
use sqlx::SqlitePool;
use axum::extract::FromRef;
/// This takes advantage of Axum's SubStates feature by deriving FromRef. This is the only way to have more than one
/// item in Axum's State. Leptos requires you to have leptosOptions in your State struct for the leptos route handlers
#[derive(FromRef, Debug, Clone)]
pub struct AppState{
pub leptos_options: LeptosOptions,
pub pool: SqlitePool
}
}
}

View File

@@ -3,7 +3,7 @@ use cfg_if::cfg_if;
cfg_if! { if #[cfg(feature = "ssr")] {
use axum::{
body::{boxed, Body, BoxBody},
extract::Extension,
extract::State,
response::IntoResponse,
http::{Request, Response, StatusCode, Uri},
};
@@ -14,8 +14,7 @@ cfg_if! { if #[cfg(feature = "ssr")] {
use leptos::{LeptosOptions, view};
use crate::app::App;
pub async fn file_and_error_handler(uri: Uri, Extension(options): Extension<Arc<LeptosOptions>>, req: Request<Body>) -> AxumResponse {
let options = &*options;
pub async fn file_and_error_handler(uri: Uri, State(options): State<LeptosOptions>, req: Request<Body>) -> AxumResponse {
let root = options.site_root.clone();
let res = get_static_file(uri.clone(), &root).await.unwrap();

View File

@@ -1,11 +1,10 @@
#[cfg(feature = "ssr")]
#[tokio::main]
async fn main() {
use axum::{extract::Extension, routing::post, Router};
use axum::{routing::post, Router};
use leptos::*;
use leptos_axum::{generate_route_list, LeptosRoutes};
use ssr_modes_axum::{app::*, fallback::file_and_error_handler};
use std::sync::Arc;
let conf = get_configuration(None).await.unwrap();
let addr = conf.leptos_options.site_addr;
@@ -19,12 +18,12 @@ async fn main() {
let app = Router::new()
.route("/api/*fn_name", post(leptos_axum::handle_server_fns))
.leptos_routes(
leptos_options.clone(),
&leptos_options,
routes,
|cx| view! { cx, <App/> },
)
.fallback(file_and_error_handler)
.layer(Extension(Arc::new(leptos_options)));
.with_state(leptos_options);
// run our app with hyper
// `axum::Server` is a re-export of `hyper::Server`

View File

@@ -8,3 +8,10 @@ Cargo.lock
# These are backup files generated by rustfmt
**/*.rs.bk
# Support playwright testing
node_modules/
test-results/
end2end/playwright-report/
playwright/.cache/
pnpm-lock.yaml

View File

@@ -96,6 +96,7 @@ site-addr = "127.0.0.1:3000"
reload-port = 3001
# [Optional] Command to use when running end2end tests. It will run in the end2end dir.
end2end-cmd = "npx playwright test"
end2end-dir = "end2end"
# The browserlist query used for optimizing the CSS.
browserquery = "defaults"
# Set by cargo-leptos watch when building with tha tool. Controls whether autoreload JS will be included in the head

View File

@@ -1,4 +1,7 @@
extend = [{ path = "../cargo-make/common.toml" }]
extend = [
{ path = "../cargo-make/common.toml" },
{ path = "../cargo-make/cargo-leptos-web-test.toml" },
]
[tasks.build]
command = "cargo"

View File

@@ -104,3 +104,8 @@ You'll need to install trunk to client side render this bundle.
## Attribution
Many thanks to GreatGreg for putting together this guide. You can find the original, with added details, [here](https://github.com/leptos-rs/leptos/discussions/125).
## Playwright Testing
- Run `cargo make setup` to install dependencies
- Run `cargo leptos test` or `cargo leptos end-to-end` to run the tests

View File

@@ -1,9 +1,7 @@
import { test, expect } from "@playwright/test";
test("homepage has title and links to intro page", async ({ page }) => {
test("should see the welcome message", async ({ page }) => {
await page.goto("http://localhost:3000/");
await expect(page).toHaveTitle("Cargo Leptos");
await expect(page.locator("h1")).toHaveText("Hi from your Leptos WASM!");
await expect(page.locator("h2")).toHaveText("Welcome to Leptos with Tailwind");
});

View File

@@ -4,20 +4,18 @@ cfg_if! {
if #[cfg(feature = "ssr")] {
use axum::{
body::{boxed, Body, BoxBody},
extract::Extension,
extract::State,
response::IntoResponse,
http::{Request, Response, StatusCode, Uri},
};
use axum::response::Response as AxumResponse;
use tower::ServiceExt;
use tower_http::services::ServeDir;
use std::sync::Arc;
use leptos::{LeptosOptions, Errors, view};
use crate::error_template::ErrorTemplate;
use crate::errors::TodoAppError;
pub async fn file_and_error_handler(uri: Uri, Extension(options): Extension<Arc<LeptosOptions>>, req: Request<Body>) -> AxumResponse {
let options = &*options;
pub async fn file_and_error_handler(uri: Uri, State(options): State<LeptosOptions>, req: Request<Body>) -> AxumResponse {
let root = options.site_root.clone();
let res = get_static_file(uri.clone(), &root).await.unwrap();

View File

@@ -5,7 +5,7 @@ cfg_if! {
use leptos::*;
use axum::{
routing::{post, get},
extract::{Extension, Path},
extract::{State, Path},
http::Request,
response::{IntoResponse, Response},
Router,
@@ -15,11 +15,10 @@ cfg_if! {
use todo_app_sqlite_axum::*;
use crate::fallback::file_and_error_handler;
use leptos_axum::{generate_route_list, LeptosRoutes};
use std::sync::Arc;
//Define a handler to test extractor with state
async fn custom_handler(Path(id): Path<String>, Extension(options): Extension<Arc<LeptosOptions>>, req: Request<AxumBody>) -> Response{
let handler = leptos_axum::render_app_to_stream_with_context((*options).clone(),
async fn custom_handler(Path(id): Path<String>, State(options): State<LeptosOptions>, req: Request<AxumBody>) -> Response{
let handler = leptos_axum::render_app_to_stream_with_context(options,
move |cx| {
provide_context(cx, id.clone());
},
@@ -50,9 +49,9 @@ cfg_if! {
let app = Router::new()
.route("/api/*fn_name", post(leptos_axum::handle_server_fns))
.route("/special/:id", get(custom_handler))
.leptos_routes(leptos_options.clone(), routes, |cx| view! { cx, <TodoApp/> } )
.leptos_routes(&leptos_options, routes, |cx| view! { cx, <TodoApp/> } )
.fallback(file_and_error_handler)
.layer(Extension(Arc::new(leptos_options)));
.with_state(leptos_options);
// run our app with hyper
// `axum::Server` is a re-export of `hyper::Server`

View File

@@ -18,7 +18,6 @@ cfg_if! {
_ = GetTodos::register();
_ = AddTodo::register();
_ = DeleteTodo::register();
_ = FormDataHandler::register();
}
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, sqlx::FromRow)]
@@ -108,30 +107,6 @@ pub async fn delete_todo(id: u16) -> Result<(), ServerFnError> {
.map_err(|e| ServerFnError::ServerError(e.to_string()))
}
#[derive(Serialize, Deserialize, Debug, Clone, Default)]
pub struct FormData {
hi: String,
}
#[server(FormDataHandler, "/api")]
pub async fn form_data(cx: Scope) -> Result<FormData, ServerFnError> {
use axum::extract::FromRequest;
let req = use_context::<leptos_axum::LeptosRequest<axum::body::Body>>(cx)
.and_then(|req| req.take_request())
.unwrap();
if req.method() == http::Method::POST {
let form = axum::Form::from_request(req, &())
.await
.map_err(|e| ServerFnError::ServerError(e.to_string()))?;
Ok(form.0)
} else {
Err(ServerFnError::ServerError(
"wrong form fields submitted".to_string(),
))
}
}
#[component]
pub fn TodoApp(cx: Scope) -> impl IntoView {
//let id = use_context::<String>(cx);
@@ -146,29 +121,9 @@ pub fn TodoApp(cx: Scope) -> impl IntoView {
</header>
<main>
<Routes>
<Route path="" view=|cx| view! {
cx,
<ErrorBoundary fallback=|cx, errors| view!{cx, <ErrorTemplate errors=errors/>}>
<Todos/>
</ErrorBoundary>
}/> //Route
<Route path="weird" methods=&[Method::Get, Method::Post]
ssr=SsrMode::Async
view=|cx| {
let res = create_resource(cx, || (), move |_| async move {
form_data(cx).await
});
view! { cx,
<Suspense fallback=|| ()>
<pre>
{move || {
res.with(cx, |body| format!("{body:#?}"))
}}
</pre>
</Suspense>
}
}
/>
<Route path="" view=|cx| view! { cx,
<Todos/>
}/>
</Routes>
</main>
</Router>
@@ -203,63 +158,65 @@ pub fn Todos(cx: Scope) -> impl IntoView {
<input type="submit" value="Add"/>
</MultiActionForm>
<Transition fallback=move || view! {cx, <p>"Loading..."</p> }>
{move || {
let existing_todos = {
move || {
todos.read(cx)
.map(move |todos| match todos {
Err(e) => {
view! { cx, <pre class="error">"Server Error: " {e.to_string()}</pre>}.into_view(cx)
}
Ok(todos) => {
if todos.is_empty() {
view! { cx, <p>"No tasks were found."</p> }.into_view(cx)
} else {
todos
.into_iter()
.map(move |todo| {
view! {
cx,
<li>
{todo.title}
<ActionForm action=delete_todo>
<input type="hidden" name="id" value={todo.id}/>
<input type="submit" value="X"/>
</ActionForm>
</li>
}
})
.collect_view(cx)
<ErrorBoundary fallback=|cx, errors| view!{cx, <ErrorTemplate errors=errors/>}>
{move || {
let existing_todos = {
move || {
todos.read(cx)
.map(move |todos| match todos {
Err(e) => {
view! { cx, <pre class="error">"Server Error: " {e.to_string()}</pre>}.into_view(cx)
}
}
})
.unwrap_or_default()
}
};
let pending_todos = move || {
submissions
.get()
.into_iter()
.filter(|submission| submission.pending().get())
.map(|submission| {
view! {
cx,
<li class="pending">{move || submission.input.get().map(|data| data.title) }</li>
Ok(todos) => {
if todos.is_empty() {
view! { cx, <p>"No tasks were found."</p> }.into_view(cx)
} else {
todos
.into_iter()
.map(move |todo| {
view! {
cx,
<li>
{todo.title}
<ActionForm action=delete_todo>
<input type="hidden" name="id" value={todo.id}/>
<input type="submit" value="X"/>
</ActionForm>
</li>
}
})
.collect_view(cx)
}
}
})
.unwrap_or_default()
}
})
.collect_view(cx)
};
};
view! {
cx,
<ul>
{existing_todos}
{pending_todos}
</ul>
let pending_todos = move || {
submissions
.get()
.into_iter()
.filter(|submission| submission.pending().get())
.map(|submission| {
view! {
cx,
<li class="pending">{move || submission.input.get().map(|data| data.title) }</li>
}
})
.collect_view(cx)
};
view! {
cx,
<ul>
{existing_todos}
{pending_todos}
</ul>
}
}
}
}
</ErrorBoundary>
</Transition>
</div>
}

View File

@@ -7,8 +7,8 @@ if #[cfg(feature = "ssr")] {
errors::TodoAppError,
};
use http::Uri;
use leptos::{view, Errors, LeptosOptions};
use std::sync::Arc;
use leptos::{view, Errors, LeptosOptions};
use viz::{
handlers::serve, header::HeaderMap, types::RouteInfo, Body, Error, Handler,
Request, RequestExt, Response, ResponseExt, Result,
@@ -18,7 +18,7 @@ if #[cfg(feature = "ssr")] {
let uri = req.uri().clone();
let headers = req.headers().clone();
let route_info = req.route_info().clone();
let options = &*req.state::<Arc<LeptosOptions>>().ok_or(
let options = req.state::<LeptosOptions>().ok_or(
Error::Responder(Response::text("missing state type LeptosOptions")),
)?;
let root = &options.site_root;

View File

@@ -7,7 +7,6 @@ cfg_if! {
use crate::fallback::file_and_error_handler;
use crate::todo::*;
use leptos_viz::{generate_route_list, LeptosRoutes};
use std::sync::Arc;
use todo_app_sqlite_viz::*;
use viz::{
types::{State, StateError},
@@ -17,8 +16,8 @@ cfg_if! {
//Define a handler to test extractor with state
async fn custom_handler(req: Request) -> Result<Response> {
let id = req.params::<String>()?;
let options = &*req
.state::<Arc<LeptosOptions>>()
let options = req
.state::<LeptosOptions>()
.ok_or(StateError::new::<LeptosOptions>())?;
let handler = leptos_viz::render_app_to_stream_with_context(
options.clone(),
@@ -59,7 +58,7 @@ cfg_if! {
|cx| view! { cx, <TodoApp/> },
)
.get("/*", file_and_error_handler)
.with(State(Arc::new(leptos_options)));
.with(State(leptos_options));
// run our app with hyper
// `viz::Server` is a re-export of `hyper::Server`

View File

@@ -43,7 +43,7 @@ impl Todos {
}
pub fn remove(&mut self, id: Uuid) {
self.0.retain(|todo| todo.id != id);
self.retain(|todo| todo.id != id);
}
pub fn remaining(&self) -> usize {
@@ -76,7 +76,23 @@ impl Todos {
}
fn clear_completed(&mut self) {
self.0.retain(|todo| !todo.completed.get());
self.retain(|todo| !todo.completed.get());
}
fn retain(&mut self, mut f: impl FnMut(&Todo) -> bool) {
self.0.retain(|todo| {
let retain = f(todo);
// because these signals are created at the top level,
// they are owned by the <TodoMVC/> component and not
// by the individual <Todo/> components. This means
// that if they are not manually disposed when removed, they
// will be held onto until the <TodoMVC/> is unmounted.
if !retain {
todo.title.dispose();
todo.completed.dispose();
}
retain
})
}
}
@@ -136,7 +152,7 @@ pub fn TodoMVC(cx: Scope) -> impl IntoView {
// Handle the three filter modes: All, Active, and Completed
let (mode, set_mode) = create_signal(cx, Mode::All);
window_event_listener_untyped("hashchange", move |_| {
window_event_listener(ev::hashchange, move |_| {
let new_mode =
location_hash().map(|hash| route(&hash)).unwrap_or_default();
set_mode(new_mode);

View File

@@ -8,6 +8,7 @@ repository = "https://github.com/leptos-rs/leptos"
description = "Actix integrations for the Leptos web framework."
[dependencies]
actix-http = "3"
actix-web = "4"
futures = "0.3"
leptos = { workspace = true, features = ["ssr"] }

View File

@@ -185,7 +185,7 @@ pub fn handle_server_fns_with_context(
.and_then(|value| value.to_str().ok());
if let Some(server_fn) = server_fn_by_path(path.as_str()) {
let body: &[u8] = &body;
let body_ref: &[u8] = &body;
let runtime = create_runtime();
let (cx, disposer) = raw_scope_and_disposer(runtime);
@@ -198,10 +198,28 @@ pub fn handle_server_fns_with_context(
provide_context(cx, req.clone());
provide_context(cx, res_options.clone());
// we consume the body here (using the web::Bytes extractor), but it is required for things
// like MultipartForm
if req
.headers()
.get("Content-Type")
.and_then(|value| value.to_str().ok())
.and_then(|value| {
Some(
value.starts_with(
"multipart/form-data; boundary=",
),
)
})
== Some(true)
{
provide_context(cx, body.clone());
}
let query = req.query_string().as_bytes();
let data = match &server_fn.encoding {
Encoding::Url | Encoding::Cbor => body,
Encoding::Url | Encoding::Cbor => body_ref,
Encoding::GetJSON | Encoding::GetCBOR => query,
};
let res = match (server_fn.trait_obj)(cx, data).await {
@@ -210,7 +228,7 @@ pub fn handle_server_fns_with_context(
use_context::<ResponseOptions>(cx).unwrap();
let mut res: HttpResponseBuilder;
let mut res_parts = res_options.0.write();
let res_parts = res_options.0.write();
if accept_header == Some("application/json")
|| accept_header
@@ -238,11 +256,10 @@ pub fn handle_server_fns_with_context(
// Use provided ResponseParts headers if they exist
let _count = res_parts
.headers
.drain()
.clone()
.into_iter()
.map(|(k, v)| {
if let Some(k) = k {
res.append_header((k, v));
}
res.append_header((k, v));
})
.count();
@@ -789,8 +806,7 @@ async fn build_stream_response(
let res_options = res_options.0.read();
let (status, mut headers) =
(res_options.status, res_options.headers.clone());
let (status, headers) = (res_options.status, res_options.headers.clone());
let status = status.unwrap_or_default();
let complete_stream =
@@ -799,12 +815,12 @@ async fn build_stream_response(
let mut res = HttpResponse::Ok()
.content_type("text/html")
.streaming(complete_stream);
// Add headers manipulated in the response
for (key, value) in headers.drain() {
if let Some(key) = key {
res.headers_mut().append(key, value);
}
for (key, value) in headers.into_iter() {
res.headers_mut().append(key, value);
}
// Set status to what is returned in the function
let res_status = res.status_mut();
*res_status = status;
@@ -829,18 +845,16 @@ async fn render_app_async_helper(
let res_options = res_options.0.read();
let (status, mut headers) =
(res_options.status, res_options.headers.clone());
let (status, headers) = (res_options.status, res_options.headers.clone());
let status = status.unwrap_or_default();
let mut res = HttpResponse::Ok().content_type("text/html").body(html);
// Add headers manipulated in the response
for (key, value) in headers.drain() {
if let Some(key) = key {
res.headers_mut().append(key, value);
}
for (key, value) in headers.into_iter() {
res.headers_mut().append(key, value);
}
// Set status to what is returned in the function
let res_status = res.status_mut();
*res_status = status;
@@ -1028,7 +1042,7 @@ where
}
}
/// A helper to make it easier to use Axum extractors in server functions. This takes
/// A helper to make it easier to use Actix extractors in server functions. This takes
/// a handler function as its argument. The handler follows similar rules to an Actix
/// [Handler](actix_web::Handler): it is an async function that receives arguments that
/// will be extracted from the request and returns some value.
@@ -1072,9 +1086,17 @@ where
{
let req = use_context::<actix_web::HttpRequest>(cx)
.expect("HttpRequest should have been provided via context");
let input = E::extract(&req)
.await
.map_err(|e| ServerFnError::ServerError(e.to_string()))?;
let input = if let Some(body) = use_context::<Bytes>(cx) {
let (_, mut payload) = actix_http::h1::Payload::create(false);
payload.unread_data(body);
E::from_request(&req, &mut dev::Payload::from(payload))
} else {
E::extract(&req)
}
.await
.map_err(|e| ServerFnError::ServerError(e.to_string()))?;
Ok(f.call(input).await)
}

View File

@@ -7,7 +7,7 @@
use axum::{
body::{Body, Bytes, Full, StreamBody},
extract::{Path, RawQuery},
extract::{FromRef, FromRequestParts, Path, RawQuery},
http::{
header::{HeaderName, HeaderValue},
HeaderMap, Request, StatusCode,
@@ -150,69 +150,6 @@ pub async fn generate_request_and_parts(
(request, request_parts)
}
/// A struct to hold the [`http::request::Request`] and allow users to take ownership of it
/// Required by `Request` not being `Clone`. See
/// [this issue](https://github.com/hyperium/http/pull/574) for eventual resolution:
#[derive(Debug, Default)]
pub struct LeptosRequest<B>(Arc<RwLock<Option<Request<B>>>>);
impl<B> Clone for LeptosRequest<B> {
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<B> LeptosRequest<B> {
/// Overwrite the contents of a LeptosRequest with a new `Request<B>`
pub fn overwrite(&self, req: Option<Request<B>>) {
let mut writable = self.0.write();
*writable = req
}
/// Consume the inner `Request<B>` inside the LeptosRequest and return it
///```rust, ignore
/// use axum::{
/// RequestPartsExt,
/// headers::Host
/// };
/// #[server(GetHost, "/api")]
/// pub async fn get_host(cx: Scope) -> Result((), ServerFnError){
/// let req = use_context::<leptos_axum::LeptosRequest<axum::body::Body>>(cx);
/// if let Some(req) = req{
/// let owned_req = req.take_request().unwrap();
/// let (mut parts, _body) = owned_req.into_parts();
/// let host: TypedHeader<Host> = parts.extract().await().unwrap();
/// println!("Host: {host:#?}");
/// }
/// }
/// ```
pub fn take_request(&self) -> Option<Request<B>> {
let mut writable = self.0.write();
writable.take()
}
/// Can be used to get immutable access to the interior fields of Request
/// and do something with them
pub fn with(&self, with_fn: impl Fn(Option<&Request<B>>)) {
let readable = self.0.read();
with_fn(readable.as_ref());
}
/// Can be used to mutate the fields of the Request
pub fn update(&self, update_fn: impl Fn(Option<&mut Request<B>>)) {
let mut writable = self.0.write();
update_fn(writable.as_mut());
}
}
/// Generate a wrapper for the http::Request::Request type that allows one to
/// process it, access the body, and use axum Extractors on it.
/// Required by Request not being Clone. See
/// [this issue](https://github.com/hyperium/http/pull/574) for eventual resolution:
pub async fn generate_leptos_request<B>(req: Request<B>) -> LeptosRequest<B>
where
B: Default + std::fmt::Debug,
{
let leptos_request = LeptosRequest::default();
leptos_request.overwrite(Some(req));
leptos_request
}
/// An Axum handlers to listens for a request with Leptos server function arguments in the body,
/// run the server function if found, and return the resulting [Response].
///
@@ -310,9 +247,8 @@ async fn handle_server_fns_inner(
additional_context(cx);
let (req, req_parts) = generate_request_and_parts(req).await;
let leptos_req = generate_leptos_request(req).await; // Add this so we can get details about the Request
provide_context(cx, req_parts.clone());
provide_context(cx, leptos_req);
provide_context(cx, ExtractorHelper::from(req));
// Add this so that we can set headers and status of the response
provide_context(cx, ResponseOptions::default());
@@ -675,9 +611,8 @@ where
let full_path = format!("http://leptos.dev{path}");
let (req, req_parts) = generate_request_and_parts(req).await;
let leptos_req = generate_leptos_request(req).await;
move |cx| {
provide_contexts(cx, full_path, req_parts,leptos_req, default_res_options);
provide_contexts(cx, full_path, req_parts, req.into(), default_res_options);
app_fn(cx).into_view(cx)
}
};
@@ -829,9 +764,8 @@ where
let app = {
let full_path = full_path.clone();
let (req, req_parts) = generate_request_and_parts(req).await;
let leptos_req = generate_leptos_request(req).await;
move |cx| {
provide_contexts(cx, full_path, req_parts,leptos_req, default_res_options);
provide_contexts(cx, full_path, req_parts, req.into(), default_res_options);
app_fn(cx).into_view(cx)
}
};
@@ -851,19 +785,20 @@ where
})
}
}
#[tracing::instrument(level = "trace", fields(error), skip_all)]
fn provide_contexts<B: 'static + std::fmt::Debug + std::default::Default>(
fn provide_contexts(
cx: Scope,
path: String,
req_parts: RequestParts,
leptos_req: LeptosRequest<B>,
extractor: ExtractorHelper,
default_res_options: ResponseOptions,
) {
let integration = ServerIntegration { path };
provide_context(cx, RouterIntegrationContext::new(integration));
provide_context(cx, MetaContext::new());
provide_context(cx, req_parts);
provide_context(cx, leptos_req);
provide_context(cx, extractor);
provide_context(cx, default_res_options);
provide_server_redirect(cx, move |path| redirect(cx, path));
}
@@ -999,9 +934,8 @@ where
let app = {
let full_path = full_path.clone();
let (req, req_parts) = generate_request_and_parts(req).await;
let leptos_req = generate_leptos_request(req).await;
move |cx| {
provide_contexts(cx, full_path, req_parts,leptos_req, default_res_options);
provide_contexts(cx, full_path, req_parts, req.into(), default_res_options);
app_fn(cx).into_view(cx)
}
};
@@ -1128,10 +1062,14 @@ where
/// This trait allows one to pass a list of routes and a render function to Axum's router, letting us avoid
/// having to use wildcards or manually define all routes in multiple places.
pub trait LeptosRoutes {
pub trait LeptosRoutes<S>
where
LeptosOptions: FromRef<S>,
S: Clone + Send + Sync + 'static,
{
fn leptos_routes<IV>(
self,
options: LeptosOptions,
options: &S,
paths: Vec<RouteListing>,
app_fn: impl Fn(leptos::Scope) -> IV + Clone + Send + 'static,
) -> Self
@@ -1140,7 +1078,7 @@ pub trait LeptosRoutes {
fn leptos_routes_with_context<IV>(
self,
options: LeptosOptions,
options: &S,
paths: Vec<RouteListing>,
additional_context: impl Fn(leptos::Scope) + 'static + Clone + Send,
app_fn: impl Fn(leptos::Scope) -> IV + Clone + Send + 'static,
@@ -1154,16 +1092,21 @@ pub trait LeptosRoutes {
handler: H,
) -> Self
where
H: axum::handler::Handler<T, (), axum::body::Body>,
H: axum::handler::Handler<T, S, axum::body::Body>,
T: 'static;
}
/// The default implementation of `LeptosRoutes` which takes in a list of paths, and dispatches GET requests
/// to those paths to Leptos's renderer.
impl LeptosRoutes for axum::Router {
impl<S> LeptosRoutes<S> for axum::Router<S>
where
LeptosOptions: FromRef<S>,
S: Clone + Send + Sync + 'static,
{
#[tracing::instrument(level = "info", fields(error), skip_all)]
fn leptos_routes<IV>(
self,
options: LeptosOptions,
options: &S,
paths: Vec<RouteListing>,
app_fn: impl Fn(leptos::Scope) -> IV + Clone + Send + 'static,
) -> Self
@@ -1176,7 +1119,7 @@ impl LeptosRoutes for axum::Router {
#[tracing::instrument(level = "trace", fields(error), skip_all)]
fn leptos_routes_with_context<IV>(
self,
options: LeptosOptions,
options: &S,
paths: Vec<RouteListing>,
additional_context: impl Fn(leptos::Scope) + 'static + Clone + Send,
app_fn: impl Fn(leptos::Scope) -> IV + Clone + Send + 'static,
@@ -1194,7 +1137,7 @@ impl LeptosRoutes for axum::Router {
match listing.mode() {
SsrMode::OutOfOrder => {
let s = render_app_to_stream_with_context(
options.clone(),
LeptosOptions::from_ref(options),
additional_context.clone(),
app_fn.clone(),
);
@@ -1208,7 +1151,7 @@ impl LeptosRoutes for axum::Router {
}
SsrMode::PartiallyBlocked => {
let s = render_app_to_stream_with_context_and_replace_blocks(
options.clone(),
LeptosOptions::from_ref(options),
additional_context.clone(),
app_fn.clone(),
true
@@ -1223,7 +1166,7 @@ impl LeptosRoutes for axum::Router {
}
SsrMode::InOrder => {
let s = render_app_to_stream_in_order_with_context(
options.clone(),
LeptosOptions::from_ref(options),
additional_context.clone(),
app_fn.clone(),
);
@@ -1237,7 +1180,7 @@ impl LeptosRoutes for axum::Router {
}
SsrMode::Async => {
let s = render_app_async_with_context(
options.clone(),
LeptosOptions::from_ref(options),
additional_context.clone(),
app_fn.clone(),
);
@@ -1263,7 +1206,7 @@ impl LeptosRoutes for axum::Router {
handler: H,
) -> Self
where
H: axum::handler::Handler<T, (), axum::body::Body>,
H: axum::handler::Handler<T, S, axum::body::Body>,
T: 'static,
{
let mut router = self;
@@ -1286,6 +1229,7 @@ impl LeptosRoutes for axum::Router {
router
}
}
#[tracing::instrument(level = "trace", fields(error), skip_all)]
fn get_leptos_pool() -> LocalPoolHandle {
static LOCAL_POOL: OnceCell<LocalPoolHandle> = OnceCell::new();
@@ -1297,3 +1241,111 @@ fn get_leptos_pool() -> LocalPoolHandle {
})
.clone()
}
#[derive(Clone, Debug)]
struct ExtractorHelper {
parts: Arc<tokio::sync::Mutex<Parts>>,
}
impl ExtractorHelper {
pub fn new(parts: Parts) -> Self {
Self {
parts: Arc::new(tokio::sync::Mutex::new(parts)),
}
}
pub async fn extract<F, T, U>(&self, f: F) -> Result<U, T::Rejection>
where
F: Extractor<T, U>,
T: std::fmt::Debug + Send + FromRequestParts<()> + 'static,
T::Rejection: std::fmt::Debug + Send + 'static,
{
let mut parts = self.parts.lock().await;
let data = T::from_request_parts(&mut parts, &()).await?;
Ok(f.call(data).await)
}
}
impl<B> From<Request<B>> for ExtractorHelper {
fn from(req: Request<B>) -> Self {
// TODO provide body for extractors there, too?
let (parts, _) = req.into_parts();
ExtractorHelper::new(parts)
}
}
/// A helper to make it easier to use Axum extractors in server functions. This takes
/// a handler function as its argument. The handler rules similar to Axum
/// [handlers](https://docs.rs/axum/latest/axum/extract/index.html#intro): it is an async function
/// whose arguments are “extractors.”
///
/// ```rust,ignore
/// #[server(QueryExtract, "/api")]
/// pub async fn query_extract(cx: Scope) -> Result<String, ServerFnError> {
/// use axum::{extract::Query, http::Method};
/// use leptos_axum::extract;
///
/// extract(cx, |method: Method, res: Query<MyQuery>| async move {
/// format!("{method:?} and {}", res.q)
/// },
/// )
/// .await
/// .map_err(|e| ServerFnError::ServerError("Could not extract method and query...".to_string()))
/// }
/// ```
#[tracing::instrument(level = "trace", fields(error), skip_all)]
pub async fn extract<T, U>(
cx: Scope,
f: impl Extractor<T, U>,
) -> Result<U, T::Rejection>
where
T: std::fmt::Debug + Send + FromRequestParts<()> + 'static,
T::Rejection: std::fmt::Debug + Send + 'static,
{
use_context::<ExtractorHelper>(cx)
.expect(
"should have had ExtractorHelper provided by the leptos_axum \
integration",
)
.extract(f)
.await
}
pub trait Extractor<T, U>
where
T: FromRequestParts<()>,
{
fn call(&self, args: T) -> Pin<Box<dyn Future<Output = U>>>;
}
macro_rules! factory_tuple ({ $($param:ident)* } => {
impl<Func, Fut, U, $($param,)*> Extractor<($($param,)*), U> for Func
where
$($param: FromRequestParts<()> + Send,)*
Func: Fn($($param),*) -> Fut + 'static,
Fut: Future<Output = U> + 'static,
{
#[inline]
#[allow(non_snake_case)]
fn call(&self, ($($param,)*): ($($param,)*)) -> Pin<Box<dyn Future<Output = U>>> {
Box::pin((self)($($param,)*))
}
}
});
factory_tuple! { A }
factory_tuple! { A B }
factory_tuple! { A B C }
factory_tuple! { A B C D }
factory_tuple! { A B C D E }
factory_tuple! { A B C D E F }
factory_tuple! { A B C D E F G }
factory_tuple! { A B C D E F G H }
factory_tuple! { A B C D E F G H I }
factory_tuple! { A B C D E F G H I J }
factory_tuple! { A B C D E F G H I J K }
factory_tuple! { A B C D E F G H I J K L }
factory_tuple! { A B C D E F G H I J K L M }
factory_tuple! { A B C D E F G H I J K L M N }
factory_tuple! { A B C D E F G H I J K L M N O }
factory_tuple! { A B C D E F G H I J K L M N O P }

66
leptos/src/await_.rs Normal file
View File

@@ -0,0 +1,66 @@
use crate::Suspense;
use leptos_dom::IntoView;
use leptos_macro::{component, view};
use leptos_reactive::{
create_blocking_resource, create_resource, store_value, Scope, Serializable,
};
#[component]
/// Allows you to inline the data loading for an `async` block or
/// server function directly into your view. This is the equivalent of combining a
/// [`create_resource`] that only loads once (i.e., with a source signal `|| ()`) with
/// a [`Suspense`] with no `fallback`.
/// ```
/// # use leptos_reactive::*;
/// # use leptos_macro::*;
/// # use leptos_dom::*; use leptos::*;
/// # if false {
/// # run_scope(create_runtime(), |cx| {
/// async fn fetch_monkeys(monkey: i32) -> i32 {
/// // do some expensive work
/// 3
/// }
///
/// view! { cx,
/// <Await
/// future=|cx| fetch_monkeys(3)
/// view=|cx, data| {
/// view! { cx, <p>{*data} " little monkeys, jumping on the bed."</p> }
/// }
/// />
/// }
/// # });
/// # }
/// ```
pub fn Await<T, Fut, FF, VF, V>(
cx: Scope,
/// A function that takes a [`Scope`] and returns the [`Future`](std::future::Future) that
/// will the component will `.await` before rendering.
future: FF,
/// If `true`, the component will use [`create_blocking_resource`], preventing
/// the HTML stream from returning anything before `future` has resolved.
#[prop(optional)]
blocking: bool,
/// A function that takes a [`Scope`] and a reference to the resolved data from the `future`
/// renders a view.
view: VF,
) -> impl IntoView
where
Fut: std::future::Future<Output = T> + 'static,
FF: Fn(Scope) -> Fut + 'static,
V: IntoView,
VF: Fn(Scope, &T) -> V + 'static,
T: Serializable + 'static,
{
let res = if blocking {
create_blocking_resource(cx, || (), move |_| future(cx))
} else {
create_resource(cx, || (), move |_| future(cx))
};
let view = store_value(cx, view);
view! { cx,
<Suspense fallback=|| ()>
{move || res.with(cx, |data| view.with_value(|view| view(cx, data)))}
</Suspense>
}
}

View File

@@ -12,6 +12,10 @@
//!
//! And you can do all three of these **using the same Leptos code.**
//!
//! Take a look at the [Leptos Book](https://leptos-rs.github.io/leptos/) for a walkthrough of the framework.
//! Join us on our [Discord Channel](https://discord.gg/v38Eef6sWG) to see what the community is building.
//! Explore our [Examples](https://github.com/leptos-rs/leptos/tree/main/examples) to see Leptos in action.
//!
//! # `nightly` Note
//! Most of the examples assume youre using `nightly` Rust. If youre on stable, note the following:
//! 1. You need to enable the `"stable"` flag in `Cargo.toml`: `leptos = { version = "0.0", features = ["stable"] }`
@@ -143,6 +147,8 @@
mod additional_attributes;
pub use additional_attributes::*;
mod await_;
pub use await_::*;
pub use leptos_config::{self, get_configuration, LeptosOptions};
#[cfg(not(all(
target_arch = "wasm32",

View File

@@ -101,13 +101,17 @@ where
use leptos_reactive::signal_prelude::*;
// run the child; we'll probably throw this away, but it will register resource reads
let child = orig_child(cx).into_view(cx);
let _child = orig_child(cx).into_view(cx);
let after_original_child = HydrationCtx::id();
let initial = {
// no resources were read under this, so just return the child
if context.pending_resources.get() == 0 {
child
let orig_child = Rc::clone(&orig_child);
HydrationCtx::continue_from(current_id.clone());
Fragment::lazy(Box::new(move || {
vec![DynChild::new(move || orig_child(cx)).into_view(cx)]
})).into_view(cx)
}
// show the fallback, but also prepare to stream HTML
else {

View File

@@ -17,7 +17,7 @@ leptos_actix = { path = "../../../../integrations/actix", optional = true }
leptos_router = { path = "../../../../router", default-features = false }
log = "0.4"
simple_logger = "4"
wasm-bindgen = "0.2"
wasm-bindgen = "0.2.85"
serde = "1.0.159"
tokio = { version = "1.27.0", features = ["time"], optional = true }

View File

@@ -56,6 +56,7 @@ pub fn App(cx: Scope) -> impl IntoView {
<Route path="single" view=|cx| view! { cx, <Single/> }/>
<Route path="parallel" view=|cx| view! { cx, <Parallel/> }/>
<Route path="inside-component" view=|cx| view! { cx, <InsideComponent/> }/>
<Route path="none" view=|cx| view! { cx, <None/> }/>
</Route>
// in-order
<Route
@@ -71,6 +72,7 @@ pub fn App(cx: Scope) -> impl IntoView {
<Route path="single" view=|cx| view! { cx, <Single/> }/>
<Route path="parallel" view=|cx| view! { cx, <Parallel/> }/>
<Route path="inside-component" view=|cx| view! { cx, <InsideComponent/> }/>
<Route path="none" view=|cx| view! { cx, <None/> }/>
</Route>
// async
<Route
@@ -86,6 +88,7 @@ pub fn App(cx: Scope) -> impl IntoView {
<Route path="single" view=|cx| view! { cx, <Single/> }/>
<Route path="parallel" view=|cx| view! { cx, <Parallel/> }/>
<Route path="inside-component" view=|cx| view! { cx, <InsideComponent/> }/>
<Route path="none" view=|cx| view! { cx, <None/> }/>
</Route>
</Routes>
</main>
@@ -101,6 +104,7 @@ fn SecondaryNav(cx: Scope) -> impl IntoView {
<A href="single">"Single"</A>
<A href="parallel">"Parallel"</A>
<A href="inside-component">"Inside Component"</A>
<A href="none">"No Resources"</A>
</nav>
}
}
@@ -217,3 +221,25 @@ fn InsideComponentChild(cx: Scope) -> impl IntoView {
</Suspense>
}
}
#[component]
fn None(cx: Scope) -> impl IntoView {
let (count, set_count) = create_signal(cx, 0);
view! { cx,
<div>
<Suspense fallback=|| "Loading 1...">
<div>"Children inside Suspense should hydrate properly."</div>
<button on:click=move |_| set_count.update(|n| *n += 1)>
{count}
</button>
</Suspense>
<p>"Children following " <code>"<Suspense/>"</code> " should hydrate properly."</p>
<div>
<button on:click=move |_| set_count.update(|n| *n += 1)>
{count}
</button>
</div>
</div>
}
}

View File

@@ -28,75 +28,66 @@ fn main() {
}
fn view_fn(cx: Scope) -> impl IntoView {
let view = view! { cx,
<For
each=|| vec![0, 1, 2, 3, 4, 5, 6, 7]
key=|i| *i
view=|cx, i| view! { cx, {i} }
/>
}
.into_view(cx);
let (list, set_list) = create_signal(cx, vec![2]);//vec![1, 2, 3, 4, 5]);
let (a, set_a) = create_signal(cx, view.clone());
let (b, set_b) = create_signal(cx, view);
let (is_a, set_is_a) = create_signal(cx, true);
let handle_toggle = move |_| {
trace!("toggling");
if is_a() {
set_b(a());
set_is_a(false);
} else {
set_a(a());
set_is_a(true);
}
};
let a_tag = view! { cx, <svg::a/> };
view! { cx,
<>
<div>
<button on:click=handle_toggle>"Toggle"</button>
</div>
<svg>{a_tag}</svg>
<Example/>
<A child=Signal::from(a) />
<A child=Signal::from(b) />
</>
}
}
#[component]
fn A(cx: Scope, child: Signal<View>) -> impl IntoView {
move || child()
}
#[component]
fn Example(cx: Scope) -> impl IntoView {
trace!("rendering <Example/>");
let (value, set_value) = create_signal(cx, 10);
let memo = create_memo(cx, move |_| value() * 2);
let derived = Signal::derive(cx, move || value() * 3);
create_effect(cx, move |_| {
trace!("logging value of derived..., {}", derived.get());
request_animation_frame(move || {
set_list(vec![1, 2]);//vec![0, 1, 3, 6, 4, 5, 2, 7])
});
set_timeout(
move || set_value.update(|v| *v += 1),
std::time::Duration::from_millis(50),
);
view! { cx,
<h1>"Example"</h1>
<button on:click=move |_| set_value.update(|value| *value += 1)>
"Click me"
</button>
<ul>
/* These work! */
/* <Test from=&[1] to=&[]/>
<Test from=&[1, 2] to=&[]/>
<Test from=&[1, 2, 3] to=&[]/>
<Test from=&[] to=&[1]/>
<Test from=&[1, 2] to=&[1]/>
<Test from=&[2, 1] to=&[1]/>
<Test from=&[1] to=&[1, 2]/>
<Test from=&[2] to=&[1, 2]/>
<Test from=&[1, 2, 3] to=&[1, 2]/>
<Test from=&[] to=&[1, 2, 3]/>
<Test from=&[2] to=&[1, 2, 3]/>
<Test from=&[1] to=&[1, 2, 3]/>
<Test from=&[3] to=&[1, 2, 3]/>
<Test from=&[1, 3, 2] to=&[1, 2, 3]/>
<Test from=&[2, 1, 3] to=&[1, 2, 3]/>*/
// TODO diffing broken
// <Test from=&[3, 2, 1] to=&[1, 2, 3]/>
<Test from=&[3, 1] to=&[1, 2, 3]/>
</ul>
}
}
#[component]
fn Test(cx: Scope, from: &'static [usize], to: &'static [usize]) -> impl IntoView {
let (list, set_list) = create_signal(cx, from.to_vec());
request_animation_frame(move || {
set_list(to.to_vec());
});
view! { cx,
<li>
<For
each=list
key=|i| *i
view=|cx, i| {
view! { cx, <span>{i}</span> }
}
/>
/* <p>
"Pre | "
<For
each=list
key=|i| *i
view=|cx, i| {
view! { cx, <span>{i}</span> }
}
/>
" | Post"
</p> */
</li>
}
}

View File

@@ -278,7 +278,33 @@ where
let start = child.get_opening_node();
let end = &closing;
unmount_child(&start, end);
match child {
View::CoreComponent(
crate::CoreComponent::DynChild(
child,
),
) => {
let start =
child.get_opening_node();
let end = child.closing.node;
prepare_to_move(
&child.document_fragment,
&start,
&end,
);
}
View::Component(child) => {
let start =
child.get_opening_node();
let end = child.closing.node;
prepare_to_move(
&child.document_fragment,
&start,
&end,
);
}
_ => unmount_child(&start, end),
}
}
// Mount the new child

File diff suppressed because it is too large Load Diff

View File

@@ -29,7 +29,7 @@ pub trait EventDescriptor: Clone {
}
}
/// Overrides the [`EventDescriptor::bubbles`] method to always return
/// Overrides the [`EventDescriptor::BUBBLES`] value to always return
/// `false`, which forces the event to not be globally delegated.
#[derive(Clone)]
#[allow(non_camel_case_types)]

View File

@@ -807,7 +807,7 @@ impl<El: ElementDescriptor + 'static> HtmlElement<El> {
/// Sets a style on an element.
///
/// **Note**: In the builder syntax, this will be overwritten by the `style`
/// attribute if you use `.attr("class", /* */)`. In the `view` macro, they
/// attribute if you use `.attr("style", /* */)`. In the `view` macro, they
/// are automatically re-ordered so that this over-writing does not happen.
#[track_caller]
pub fn style(

View File

@@ -218,7 +218,9 @@ pub fn render_to_stream_with_prefix_undisposed_with_context_and_block_replacemen
let mut blocking_fragments = FuturesUnordered::new();
let fragments = FuturesUnordered::new();
eprintln!("\n\n");
for (fragment_id, data) in pending_fragments {
eprintln!("pending fragment {fragment_id:?}");
if data.should_block {
blocking_fragments
.push(async move { (fragment_id, data.out_of_order.await) });

View File

@@ -240,13 +240,20 @@ impl View {
dont_escape_text: bool,
) {
match self {
View::Suspense(id, _) => {
View::Suspense(id, view) => {
let id = id.to_string();
if let Some(data) = cx.take_pending_fragment(&id) {
chunks.push_back(StreamChunk::Async {
chunks: data.in_order,
should_block: data.should_block,
});
} else {
// if not registered, means it was already resolved
View::CoreComponent(view).into_stream_chunks_helper(
cx,
chunks,
dont_escape_text,
);
}
}
View::Text(node) => {

View File

@@ -11,7 +11,7 @@ readme = "../README.md"
[dependencies]
anyhow = "1"
serde = { version = "1", features = ["derive"] }
syn = { version = "1", features = [
syn = { version = "2", features = [
"full",
"parsing",
"extra-traits",
@@ -19,7 +19,7 @@ syn = { version = "1", features = [
"printing",
] }
quote = "1"
syn-rsx = "0.9"
rstml = "0.10.6"
proc-macro2 = { version = "1", features = ["span-locations", "nightly"] }
parking_lot = "0.12"
walkdir = "2"

View File

@@ -76,7 +76,7 @@ impl ViewMacros {
tokens.next(); // ,
// TODO handle class = ...
let rsx =
syn_rsx::parse2(tokens.collect::<proc_macro2::TokenStream>())?;
rstml::parse2(tokens.collect::<proc_macro2::TokenStream>())?;
let template = LNode::parse_view(rsx)?;
views.push(MacroInvocation { id, template })
}

View File

@@ -1,8 +1,8 @@
use crate::parsing::{is_component_node, value_to_string};
use crate::parsing::is_component_node;
use anyhow::Result;
use quote::quote;
use quote::ToTokens;
use rstml::node::{Node, NodeAttribute};
use serde::{Deserialize, Serialize};
use syn_rsx::Node;
// A lightweight virtual DOM structure we can use to hold
// the state of a Leptos view macro template. This is because
@@ -58,36 +58,30 @@ impl LNode {
}
}
Node::Text(text) => {
if let Some(value) = value_to_string(&text.value) {
views.push(LNode::Text(value));
} else {
let value = text.value.as_ref();
let code = quote! { #value };
let code = code.to_string();
views.push(LNode::DynChild(code));
}
views.push(LNode::Text(text.value_string()));
}
Node::Block(block) => {
let value = block.value.as_ref();
let code = quote! { #value };
let code = block.into_token_stream();
let code = code.to_string();
views.push(LNode::DynChild(code));
}
Node::Element(el) => {
if is_component_node(&el) {
let name = el.name().to_string();
let mut children = Vec::new();
for child in el.children {
LNode::parse_node(child, &mut children)?;
}
views.push(LNode::Component {
name: el.name.to_string(),
name: name,
props: el
.open_tag
.attributes
.into_iter()
.filter_map(|attr| match attr {
Node::Attribute(attr) => Some((
NodeAttribute::Attribute(attr) => Some((
attr.key.to_string(),
format!("{:#?}", attr.value),
format!("{:#?}", attr.value()),
)),
_ => None,
})
@@ -95,15 +89,13 @@ impl LNode {
children,
});
} else {
let name = el.name.to_string();
let name = el.name().to_string();
let mut attrs = Vec::new();
for attr in el.attributes {
if let Node::Attribute(attr) = attr {
for attr in el.open_tag.attributes {
if let NodeAttribute::Attribute(attr) = attr {
let name = attr.key.to_string();
if let Some(value) =
attr.value.as_ref().and_then(value_to_string)
{
if let Some(value) = attr.value_literal_string() {
attrs.push((
name,
LAttributeValue::Static(value),

View File

@@ -1,7 +1,37 @@
use syn_rsx::{NodeElement, NodeValueExpr};
use rstml::node::NodeElement;
pub fn value_to_string(value: &NodeValueExpr) -> Option<String> {
match &value.as_ref() {
///
/// Converts `syn::Block` to simple expression
///
/// For example:
/// ```no_build
/// // "string literal" in
/// {"string literal"}
/// // number literal
/// {0x12}
/// // boolean literal
/// {true}
/// // variable
/// {path::x}
/// ```
pub fn block_to_primitive_expression(block: &syn::Block) -> Option<&syn::Expr> {
// its empty block, or block with multi lines
if block.stmts.len() != 1 {
return None;
}
match &block.stmts[0] {
syn::Stmt::Expr(e, None) => return Some(&e),
_ => {}
}
None
}
/// Converts simple literals to its string representation.
///
/// This function doesn't convert literal wrapped inside block
/// like: `{"string"}`.
pub fn value_to_string(value: &syn::Expr) -> Option<String> {
match &value {
syn::Expr::Lit(lit) => match &lit.lit {
syn::Lit::Str(s) => Some(s.value()),
syn::Lit::Char(c) => Some(c.value().to_string()),
@@ -14,7 +44,7 @@ pub fn value_to_string(value: &NodeValueExpr) -> Option<String> {
}
pub fn is_component_node(node: &NodeElement) -> bool {
node.name
node.name()
.to_string()
.starts_with(|c: char| c.is_ascii_uppercase())
}

View File

@@ -12,16 +12,16 @@ readme = "../README.md"
proc-macro = true
[dependencies]
attribute-derive = { version = "0.5", features = ["syn-full"] }
attribute-derive = { version = "0.6", features = ["syn-full"] }
cfg-if = "1"
html-escape = "0.2"
itertools = "0.10"
prettyplease = "0.1"
prettyplease = "0.2.4"
proc-macro-error = "1"
proc-macro2 = "1"
quote = "1"
syn = { version = "1", features = ["full"] }
syn-rsx = "0.9"
syn = { version = "2", features = ["full"] }
rstml = "0.10.6"
leptos_hot_reload = { workspace = true }
server_fn_macro = { workspace = true }
convert_case = "0.6.0"

View File

@@ -4,15 +4,15 @@ use convert_case::{
Casing,
};
use itertools::Itertools;
use leptos_hot_reload::parsing::value_to_string;
use proc_macro2::{Ident, Span, TokenStream};
use quote::{format_ident, quote_spanned, ToTokens, TokenStreamExt};
use syn::{
parse::Parse, parse_quote, spanned::Spanned,
AngleBracketedGenericArguments, Attribute, FnArg, GenericArgument, Item,
ItemFn, Lit, LitStr, Meta, MetaNameValue, Pat, PatIdent, Path,
PathArguments, ReturnType, Stmt, Type, TypePath, Visibility,
ItemFn, LitStr, Meta, Pat, PatIdent, Path, PathArguments, ReturnType, Stmt,
Type, TypePath, Visibility,
};
pub struct Model {
is_transparent: bool,
docs: Docs,
@@ -56,14 +56,17 @@ impl Parse for Model {
// We need to remove the `#[doc = ""]` and `#[builder(_)]`
// attrs from the function signature
drain_filter(&mut item.attrs, |attr| {
attr.path == parse_quote!(doc) || attr.path == parse_quote!(prop)
drain_filter(&mut item.attrs, |attr| match &attr.meta {
Meta::NameValue(attr) => attr.path == parse_quote!(doc),
Meta::List(attr) => attr.path == parse_quote!(prop),
_ => false,
});
item.sig.inputs.iter_mut().for_each(|arg| {
if let FnArg::Typed(ty) = arg {
drain_filter(&mut ty.attrs, |attr| {
attr.path == parse_quote!(doc)
|| attr.path == parse_quote!(prop)
drain_filter(&mut ty.attrs, |attr| match &attr.meta {
Meta::NameValue(attr) => attr.path == parse_quote!(doc),
Meta::List(attr) => attr.path == parse_quote!(prop),
_ => false,
});
}
});
@@ -400,12 +403,20 @@ impl Docs {
let mut attrs = attrs
.iter()
.filter_map(|attr| attr.path.is_ident("doc").then(|| {
let Ok(Meta::NameValue(MetaNameValue { lit: Lit::Str(doc), .. })) = attr.parse_meta() else {
abort!(attr, "expected doc comment to be string literal");
.filter_map(|attr| {
let Meta::NameValue(attr ) = &attr.meta else {
return None
};
(doc.value(), doc.span())
}))
if !attr.path.is_ident("doc") {
return None
}
let Some(val) = value_to_string(&attr.value) else {
abort!(attr, "expected string literal in value of doc comment");
};
Some((val, attr.path.span()))
})
.flat_map(map)
.collect_vec();

View File

@@ -7,9 +7,9 @@ extern crate proc_macro_error;
use proc_macro::TokenStream;
use proc_macro2::{Span, TokenTree};
use quote::ToTokens;
use rstml::{node::KeyedAttribute, parse};
use server_fn_macro::{server_macro_impl, ServerContext};
use syn::parse_macro_input;
use syn_rsx::{parse, NodeAttribute};
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum Mode {
@@ -351,16 +351,22 @@ pub fn view(tokens: TokenStream) -> TokenStream {
.chain(tokens)
.collect()
};
match parse(tokens.into()) {
Ok(nodes) => render_view(
&proc_macro2::Ident::new(&cx.to_string(), cx.span()),
&nodes,
Mode::default(),
global_class.as_ref(),
normalized_call_site(proc_macro::Span::call_site()),
),
Err(error) => error.to_compile_error(),
let config = rstml::ParserConfig::default().recover_block(true);
let parser = rstml::Parser::new(config);
let (nodes, errors) = parser.parse_recoverable(tokens).split_vec();
let errors = errors.into_iter().map(|e| e.emit_as_expr_tokens());
let nodes_output = render_view(
&cx,
&nodes,
Mode::default(),
global_class.as_ref(),
normalized_call_site(proc_macro::Span::call_site()),
);
quote! {
{
#(#errors;)*
#nodes_output
}
}
.into()
}
@@ -672,7 +678,7 @@ pub fn component(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
/// Annotates a struct so that it can be used with your Component as a `slot`.
///
/// The `#[slot]` macro allows you to annotate plain Rust struct as component slots and use them
/// within your Leptos [component](crate::component!) properties. The struct can contain any number
/// within your Leptos [`component`](macro@crate::component) properties. The struct can contain any number
/// of fields. When you use the component somewhere else, the names of the slot fields are the
/// names of the properties you use in the [view](crate::view!) macro.
///
@@ -874,9 +880,9 @@ pub fn params_derive(
}
}
pub(crate) fn attribute_value(attr: &NodeAttribute) -> &syn::Expr {
match &attr.value {
Some(value) => value.as_ref(),
pub(crate) fn attribute_value(attr: &KeyedAttribute) -> &syn::Expr {
match &attr.possible_value {
Some(value) => &value.value,
None => abort!(attr.key, "attribute should have value"),
}
}

View File

@@ -5,7 +5,8 @@ use attribute_derive::Attribute as AttributeDerive;
use proc_macro2::{Ident, TokenStream};
use quote::{ToTokens, TokenStreamExt};
use syn::{
parse::Parse, parse_quote, Field, ItemStruct, LitStr, Type, Visibility,
parse::Parse, parse_quote, Field, ItemStruct, LitStr, Meta, Type,
Visibility,
};
pub struct Model {
@@ -31,13 +32,16 @@ impl Parse for Model {
// We need to remove the `#[doc = ""]` and `#[builder(_)]`
// attrs from the function signature
drain_filter(&mut item.attrs, |attr| {
attr.path == parse_quote!(doc) || attr.path == parse_quote!(prop)
drain_filter(&mut item.attrs, |attr| match &attr.meta {
Meta::NameValue(attr) => attr.path == parse_quote!(doc),
Meta::List(attr) => attr.path == parse_quote!(prop),
_ => false,
});
item.fields.iter_mut().for_each(|arg| {
drain_filter(&mut arg.attrs, |attr| {
attr.path == parse_quote!(doc)
|| attr.path == parse_quote!(prop)
drain_filter(&mut arg.attrs, |attr| match &attr.meta {
Meta::NameValue(attr) => attr.path == parse_quote!(doc),
Meta::List(attr) => attr.path == parse_quote!(prop),
_ => false,
});
});

View File

@@ -1,9 +1,14 @@
use crate::attribute_value;
use leptos_hot_reload::parsing::is_component_node;
use itertools::Either;
use leptos_hot_reload::parsing::{
block_to_primitive_expression, is_component_node, value_to_string,
};
use proc_macro2::{Ident, Span, TokenStream};
use quote::{quote, quote_spanned};
use quote::{quote, quote_spanned, ToTokens};
use rstml::node::{
KeyedAttribute, Node, NodeAttribute, NodeBlock, NodeElement,
};
use syn::spanned::Spanned;
use syn_rsx::{Node, NodeAttribute, NodeElement, NodeValueExpr};
use uuid::Uuid;
pub(crate) fn render_template(cx: &Ident, nodes: &[Node]) -> TokenStream {
@@ -53,7 +58,7 @@ fn root_element_to_tokens(
.unwrap();
};
let span = node.name.span();
let span = node.name().span();
let navigations = if navigations.is_empty() {
quote! {}
@@ -67,7 +72,7 @@ fn root_element_to_tokens(
quote! { #(#expressions;);* }
};
let tag_name = node.name.to_string();
let tag_name = node.name().to_string();
quote_spanned! {
span => {
@@ -104,9 +109,9 @@ enum PrevSibChange {
Skip,
}
fn attributes(node: &NodeElement) -> impl Iterator<Item = &NodeAttribute> {
node.attributes.iter().filter_map(|node| {
if let Node::Attribute(attribute) = node {
fn attributes(node: &NodeElement) -> impl Iterator<Item = &KeyedAttribute> {
node.attributes().iter().filter_map(|node| {
if let NodeAttribute::Attribute(attribute) = node {
Some(attribute)
} else {
None
@@ -129,11 +134,11 @@ fn element_to_tokens(
) -> Ident {
// create this element
*next_el_id += 1;
let this_el_ident = child_ident(*next_el_id, node.name.span());
let this_el_ident = child_ident(*next_el_id, node.name().span());
// Open tag
let name_str = node.name.to_string();
let span = node.name.span();
let name_str = node.name().to_string();
let span = node.name().span();
// CSR/hydrate, push to template
template.push('<');
@@ -145,7 +150,7 @@ fn element_to_tokens(
}
// navigation for this el
let debug_name = node.name.to_string();
let debug_name = node.name().to_string();
let this_nav = if is_root_el {
quote_spanned! {
span => let #this_el_ident = #debug_name;
@@ -247,14 +252,17 @@ fn next_sibling_node(
if is_component_node(sibling) {
next_sibling_node(children, idx + 1, next_el_id)
} else {
Ok(Some(child_ident(*next_el_id + 1, sibling.name.span())))
Ok(Some(child_ident(
*next_el_id + 1,
sibling.name().span(),
)))
}
}
Node::Block(sibling) => {
Ok(Some(child_ident(*next_el_id + 1, sibling.value.span())))
Ok(Some(child_ident(*next_el_id + 1, sibling.span())))
}
Node::Text(sibling) => {
Ok(Some(child_ident(*next_el_id + 1, sibling.value.span())))
Ok(Some(child_ident(*next_el_id + 1, sibling.span())))
}
_ => Err("expected either an element or a block".to_string()),
}
@@ -263,7 +271,7 @@ fn next_sibling_node(
fn attr_to_tokens(
cx: &Ident,
node: &NodeAttribute,
node: &KeyedAttribute,
el_id: &Ident,
template: &mut String,
expressions: &mut Vec<TokenStream>,
@@ -272,8 +280,8 @@ fn attr_to_tokens(
let name = name.strip_prefix('_').unwrap_or(&name);
let name = name.strip_prefix("attr:").unwrap_or(name);
let value = match &node.value {
Some(expr) => match expr.as_ref() {
let value = match &node.value() {
Some(expr) => match expr {
syn::Expr::Lit(expr_lit) => {
if let syn::Lit::Str(s) = &expr_lit.lit {
AttributeValue::Static(s.value())
@@ -367,7 +375,7 @@ fn child_to_tokens(
Node::Element(node) => {
if is_component_node(node) {
proc_macro_error::emit_error!(
node.name.span(),
node.name().span(),
"component children not allowed in template!, use view! \
instead"
);
@@ -389,7 +397,7 @@ fn child_to_tokens(
}
Node::Text(node) => block_to_tokens(
cx,
&node.value,
Either::Left(node.value_string()),
node.value.span(),
parent,
prev_sib,
@@ -399,10 +407,42 @@ fn child_to_tokens(
expressions,
navigations,
),
Node::Block(node) => block_to_tokens(
Node::RawText(node) => block_to_tokens(
cx,
&node.value,
node.value.span(),
Either::Left(node.to_string_best()),
node.span(),
parent,
prev_sib,
next_sib,
next_el_id,
template,
expressions,
navigations,
),
Node::Block(NodeBlock::ValidBlock(b)) => {
let value = match block_to_primitive_expression(b)
.and_then(value_to_string)
{
Some(v) => Either::Left(v),
None => Either::Right(b.into_token_stream()),
};
block_to_tokens(
cx,
value,
b.span(),
parent,
prev_sib,
next_sib,
next_el_id,
template,
expressions,
navigations,
)
}
Node::Block(b @ NodeBlock::Invalid { .. }) => block_to_tokens(
cx,
Either::Right(b.into_token_stream()),
b.span(),
parent,
prev_sib,
next_sib,
@@ -418,7 +458,7 @@ fn child_to_tokens(
#[allow(clippy::too_many_arguments)]
fn block_to_tokens(
_cx: &Ident,
value: &NodeValueExpr,
value: Either<String, TokenStream>,
span: Span,
parent: &Ident,
prev_sib: Option<Ident>,
@@ -428,18 +468,6 @@ fn block_to_tokens(
expressions: &mut Vec<TokenStream>,
navigations: &mut Vec<TokenStream>,
) -> PrevSibChange {
let value = value.as_ref();
let str_value = match value {
syn::Expr::Lit(lit) => match &lit.lit {
syn::Lit::Str(s) => Some(s.value()),
syn::Lit::Char(c) => Some(c.value().to_string()),
syn::Lit::Int(i) => Some(i.base10_digits().to_string()),
syn::Lit::Float(f) => Some(f.base10_digits().to_string()),
_ => None,
},
_ => None,
};
// code to navigate to this text node
let (name, location) = /* if is_first_child && mode == Mode::Client {
@@ -473,27 +501,30 @@ fn block_to_tokens(
}
};
if let Some(v) = str_value {
navigations.push(location);
template.push_str(&v);
match value {
Either::Left(v) => {
navigations.push(location);
template.push_str(&v);
if let Some(name) = name {
PrevSibChange::Sib(name)
} else {
PrevSibChange::Parent
if let Some(name) = name {
PrevSibChange::Sib(name)
} else {
PrevSibChange::Parent
}
}
} else {
template.push_str("<!>");
navigations.push(location);
Either::Right(value) => {
template.push_str("<!>");
navigations.push(location);
expressions.push(quote! {
leptos::leptos_dom::mount_child(#mount_kind, &{#value}.into_view(cx));
});
expressions.push(quote! {
leptos::leptos_dom::mount_child(#mount_kind, &{#value}.into_view(cx));
});
if let Some(name) = name {
PrevSibChange::Sib(name)
} else {
PrevSibChange::Parent
if let Some(name) = name {
PrevSibChange::Sib(name)
} else {
PrevSibChange::Parent
}
}
}
}

View File

@@ -1,11 +1,15 @@
use crate::{attribute_value, Mode};
use convert_case::{Case::Snake, Casing};
use leptos_hot_reload::parsing::{is_component_node, value_to_string};
use leptos_hot_reload::parsing::{
block_to_primitive_expression, is_component_node, value_to_string,
};
use proc_macro2::{Ident, Span, TokenStream, TokenTree};
use quote::{format_ident, quote, quote_spanned};
use rstml::node::{
KeyedAttribute, Node, NodeAttribute, NodeBlock, NodeElement, NodeName,
};
use std::collections::HashMap;
use syn::{spanned::Spanned, Expr, ExprLit, ExprPath, Lit};
use syn_rsx::{Node, NodeAttribute, NodeElement, NodeName, NodeValueExpr};
#[derive(Clone, Copy)]
enum TagType {
@@ -213,18 +217,22 @@ fn root_node_to_tokens_ssr(
global_class,
view_marker,
),
Node::Comment(_) | Node::Doctype(_) | Node::Attribute(_) => quote! {},
Node::Comment(_) | Node::Doctype(_) => quote! {},
Node::Text(node) => {
let value = node.value.as_ref();
quote! {
leptos::leptos_dom::html::text(#value)
leptos::leptos_dom::html::text(#node)
}
}
Node::RawText(r) => {
let text = r.to_string_best();
let text = syn::LitStr::new(&text, r.span());
quote! {
leptos::leptos_dom::html::text(#text)
}
}
Node::Block(node) => {
let value = node.value.as_ref();
quote! {
#[allow(unused_braces)]
#value
#node
}
}
Node::Element(node) => {
@@ -254,9 +262,9 @@ fn fragment_to_tokens_ssr(
});
quote! {
{
leptos::Fragment::lazy(|| vec![
leptos::Fragment::lazy(|| [
#(#nodes),*
])
].to_vec())
#view_marker
}
}
@@ -329,15 +337,15 @@ fn root_element_to_tokens_ssr(
},
});
let tag_name = node.name.to_string();
let tag_name = node.name().to_string();
let is_custom_element = is_custom_element(&tag_name);
let typed_element_name = if is_custom_element {
Ident::new("Custom", node.name.span())
Ident::new("Custom", node.name().span())
} else {
let camel_cased = camel_case_tag_name(
&tag_name.replace("svg::", "").replace("math::", ""),
);
Ident::new(&camel_cased, node.name.span())
Ident::new(&camel_cased, node.name().span())
};
let typed_element_name = if is_svg_element(&tag_name) {
quote! { svg::#typed_element_name }
@@ -409,7 +417,7 @@ fn element_to_tokens_ssr(
}));
} else {
let tag_name = node
.name
.name()
.to_string()
.replace("svg::", "")
.replace("math::", "");
@@ -419,8 +427,8 @@ fn element_to_tokens_ssr(
let mut inner_html = None;
for attr in &node.attributes {
if let Node::Attribute(attr) = attr {
for attr in node.attributes() {
if let NodeAttribute::Attribute(attr) = attr {
inner_html = attribute_to_tokens_ssr(
cx,
attr,
@@ -439,9 +447,9 @@ fn element_to_tokens_ssr(
quote! { leptos::leptos_dom::HydrationCtx::id() }
};
match node
.attributes
.attributes()
.iter()
.find(|node| matches!(node, Node::Attribute(attr) if attr.key.to_string() == "id"))
.find(|node| matches!(node, NodeAttribute::Attribute(attr) if attr.key.to_string() == "id"))
{
Some(_) => {
template.push_str(" leptos-hk=\"_{}\"");
@@ -462,7 +470,7 @@ fn element_to_tokens_ssr(
if let Some(inner_html) = inner_html {
template.push_str("{}");
let value = inner_html.as_ref();
let value = inner_html;
holes.push(quote! {
(#value).into_attribute(#cx).as_nameless_value_string().unwrap_or_default()
@@ -484,32 +492,23 @@ fn element_to_tokens_ssr(
);
}
Node::Text(text) => {
if let Some(value) = value_to_string(&text.value) {
let value = if is_script_or_style {
value.into()
} else {
html_escape::encode_safe(&value)
};
template.push_str(
&value
.replace('{', "\\{")
.replace('}', "\\}"),
);
let value = text.value_string();
let value = if is_script_or_style {
value.into()
} else {
template.push_str("{}");
let value = text.value.as_ref();
holes.push(quote! {
#value.into_view(#cx).render_to_string(#cx)
})
}
html_escape::encode_safe(&value)
};
template.push_str(
&value.replace('{', "\\{").replace('}', "\\}"),
);
}
Node::Block(block) => {
if let Some(value) = value_to_string(&block.value) {
Node::Block(NodeBlock::ValidBlock(block)) => {
if let Some(value) =
block_to_primitive_expression(block)
.and_then(value_to_string)
{
template.push_str(&value);
} else {
let value = block.value.as_ref();
if !template.is_empty() {
chunks.push(SsrElementChunks::String {
template: std::mem::take(template),
@@ -517,10 +516,16 @@ fn element_to_tokens_ssr(
})
}
chunks.push(SsrElementChunks::View(quote! {
{#value}.into_view(#cx)
{#block}.into_view(#cx)
}));
}
}
// Keep invalid blocks for faster IDE diff (on user type)
Node::Block(block @ NodeBlock::Invalid { .. }) => {
chunks.push(SsrElementChunks::View(quote! {
{#block}.into_view(#cx)
}));
}
Node::Fragment(_) => abort!(
Span::call_site(),
"You can't nest a fragment inside an element."
@@ -531,7 +536,7 @@ fn element_to_tokens_ssr(
}
template.push_str("</");
template.push_str(&node.name.to_string());
template.push_str(&node.name().to_string());
template.push('>');
}
}
@@ -540,17 +545,17 @@ fn element_to_tokens_ssr(
// returns `inner_html`
fn attribute_to_tokens_ssr<'a>(
cx: &Ident,
node: &'a NodeAttribute,
attr: &'a KeyedAttribute,
template: &mut String,
holes: &mut Vec<TokenStream>,
exprs_for_compiler: &mut Vec<TokenStream>,
global_class: Option<&TokenTree>,
) -> Option<&'a NodeValueExpr> {
let name = node.key.to_string();
) -> Option<&'a syn::Expr> {
let name = attr.key.to_string();
if name == "ref" || name == "_ref" || name == "ref_" || name == "node_ref" {
// ignore refs on SSR
} else if let Some(name) = name.strip_prefix("on:") {
let handler = attribute_value(node);
let handler = attribute_value(attr);
let (event_type, _, _) = parse_event_name(name);
exprs_for_compiler.push(quote! {
@@ -563,16 +568,16 @@ fn attribute_to_tokens_ssr<'a>(
// ignore props for SSR
// ignore classes and sdtyles: we'll handle these separately
} else if name == "inner_html" {
return node.value.as_ref();
return attr.value();
} else {
let name = name.replacen("attr:", "", 1);
// special case of global_class and class attribute
if name == "class"
&& global_class.is_some()
&& node.value.as_ref().and_then(value_to_string).is_none()
&& attr.value().and_then(value_to_string).is_none()
{
let span = node.key.span();
let span = attr.key.span();
proc_macro_error::emit_error!(span, "Combining a global class (view! { cx, class = ... }) \
and a dynamic `class=` attribute on an element causes runtime inconsistencies. You can \
toggle individual classes dynamically with the `class:name=value` syntax. \n\nSee this issue \
@@ -582,7 +587,7 @@ fn attribute_to_tokens_ssr<'a>(
if name != "class" && name != "style" {
template.push(' ');
if let Some(value) = node.value.as_ref() {
if let Some(value) = attr.value() {
if let Some(value) = value_to_string(value) {
template.push_str(&name);
template.push_str("=\"");
@@ -590,7 +595,6 @@ fn attribute_to_tokens_ssr<'a>(
template.push('"');
} else {
template.push_str("{}");
let value = value.as_ref();
holes.push(quote! {
&{#value}.into_attribute(#cx)
.as_nameless_value_string()
@@ -630,11 +634,13 @@ fn set_class_attribute_ssr(
Some(val) => (String::new(), Some(val)),
};
let static_class_attr = node
.attributes
.attributes()
.iter()
.filter_map(|a| match a {
Node::Attribute(attr) if attr.key.to_string() == "class" => {
attr.value.as_ref().and_then(value_to_string)
NodeAttribute::Attribute(attr)
if attr.key.to_string() == "class" =>
{
attr.value().and_then(value_to_string)
}
_ => None,
})
@@ -644,17 +650,17 @@ fn set_class_attribute_ssr(
.join(" ");
let dyn_class_attr = node
.attributes
.attributes()
.iter()
.filter_map(|a| {
if let Node::Attribute(a) = a {
if let NodeAttribute::Attribute(a) = a {
if a.key.to_string() == "class" {
if a.value.as_ref().and_then(value_to_string).is_some()
if a.value().and_then(value_to_string).is_some()
|| fancy_class_name(&a.key.to_string(), cx, a).is_some()
{
None
} else {
Some((a.key.span(), &a.value))
Some((a.key.span(), a.value()))
}
} else {
None
@@ -666,10 +672,10 @@ fn set_class_attribute_ssr(
.collect::<Vec<_>>();
let class_attrs = node
.attributes
.attributes()
.iter()
.filter_map(|node| {
if let Node::Attribute(node) = node {
if let NodeAttribute::Attribute(node) = node {
let name = node.key.to_string();
if name == "class" {
return if let Some((_, name, value)) =
@@ -713,7 +719,6 @@ fn set_class_attribute_ssr(
for (_span, value) in dyn_class_attr {
if let Some(value) = value {
template.push_str(" {}");
let value = value.as_ref();
holes.push(quote! {
&(#cx, #value).into_attribute(#cx).as_nameless_value_string()
.map(|a| leptos::leptos_dom::ssr::escape_attr(&a).to_string())
@@ -745,11 +750,13 @@ fn set_style_attribute_ssr(
holes: &mut Vec<TokenStream>,
) {
let static_style_attr = node
.attributes
.attributes()
.iter()
.filter_map(|a| match a {
Node::Attribute(attr) if attr.key.to_string() == "style" => {
attr.value.as_ref().and_then(value_to_string)
NodeAttribute::Attribute(attr)
if attr.key.to_string() == "style" =>
{
attr.value().and_then(value_to_string)
}
_ => None,
})
@@ -757,17 +764,17 @@ fn set_style_attribute_ssr(
.map(|style| format!("{style};"));
let dyn_style_attr = node
.attributes
.attributes()
.iter()
.filter_map(|a| {
if let Node::Attribute(a) = a {
if let NodeAttribute::Attribute(a) = a {
if a.key.to_string() == "style" {
if a.value.as_ref().and_then(value_to_string).is_some()
if a.value().and_then(value_to_string).is_some()
|| fancy_style_name(&a.key.to_string(), cx, a).is_some()
{
None
} else {
Some((a.key.span(), &a.value))
Some((a.key.span(), a.value()))
}
} else {
None
@@ -779,10 +786,10 @@ fn set_style_attribute_ssr(
.collect::<Vec<_>>();
let style_attrs = node
.attributes
.attributes()
.iter()
.filter_map(|node| {
if let Node::Attribute(node) = node {
if let NodeAttribute::Attribute(node) = node {
let name = node.key.to_string();
if name == "style" {
return if let Some((_, name, value)) =
@@ -825,7 +832,6 @@ fn set_style_attribute_ssr(
for (_span, value) in dyn_style_attr {
if let Some(value) = value {
template.push_str(" {};");
let value = value.as_ref();
holes.push(quote! {
&(#cx, #value).into_attribute(#cx).as_nameless_value_string()
.map(|a| leptos::leptos_dom::ssr::escape_attr(&a).to_string())
@@ -899,18 +905,18 @@ fn fragment_to_tokens(
let tokens = if lazy {
quote! {
{
leptos::Fragment::lazy(|| vec![
leptos::Fragment::lazy(|| [
#(#nodes),*
])
].to_vec())
#view_marker
}
}
} else {
quote! {
{
leptos::Fragment::new(vec![
leptos::Fragment::new([
#(#nodes),*
])
].to_vec())
#view_marker
}
}
@@ -948,18 +954,14 @@ fn node_to_tokens(
view_marker,
),
Node::Comment(_) | Node::Doctype(_) => Some(quote! {}),
Node::Text(node) => {
let value = node.value.as_ref();
Some(quote! {
leptos::leptos_dom::html::text(#value)
})
}
Node::Block(node) => {
let value = node.value.as_ref();
Some(quote! { #value })
}
Node::Attribute(node) => {
Some(attribute_to_tokens(cx, node, global_class))
Node::Text(node) => Some(quote! {
leptos::leptos_dom::html::text(#node)
}),
Node::Block(node) => Some(quote! { #node }),
Node::RawText(r) => {
let text = r.to_string_best();
let text = syn::LitStr::new(&text, r.span());
Some(quote! { #text })
}
Node::Element(node) => element_to_tokens(
cx,
@@ -980,6 +982,7 @@ fn element_to_tokens(
global_class: Option<&TokenTree>,
view_marker: Option<String>,
) -> Option<TokenStream> {
let name = node.name();
if is_component_node(node) {
if let Some(slot) = get_slot(node) {
slot_to_tokens(cx, node, slot, parent_slots, global_class);
@@ -988,20 +991,17 @@ fn element_to_tokens(
Some(component_to_tokens(cx, node, global_class))
}
} else {
let tag = node.name.to_string();
let tag = name.to_string();
let name = if is_custom_element(&tag) {
let name = node.name.to_string();
let name = node.name().to_string();
quote! { leptos::leptos_dom::html::custom(#cx, leptos::leptos_dom::html::Custom::new(#name)) }
} else if is_svg_element(&tag) {
let name = &node.name;
parent_type = TagType::Svg;
quote! { leptos::leptos_dom::svg::#name(#cx) }
} else if is_math_ml_element(&tag) {
let name = &node.name;
parent_type = TagType::Math;
quote! { leptos::leptos_dom::math::#name(#cx) }
} else if is_ambiguous_element(&tag) {
let name = &node.name;
match parent_type {
TagType::Unknown => {
// We decided this warning was too aggressive, but I'll leave it here in case we want it later
@@ -1020,12 +1020,11 @@ fn element_to_tokens(
}
}
} else {
let name = &node.name;
parent_type = TagType::Html;
quote! { leptos::leptos_dom::html::#name(#cx) }
};
let attrs = node.attributes.iter().filter_map(|node| {
if let Node::Attribute(node) = node {
let attrs = node.attributes().iter().filter_map(|node| {
if let NodeAttribute::Attribute(node) = node {
let name = node.key.to_string();
let name = name.trim();
if name.starts_with("class:")
@@ -1041,8 +1040,8 @@ fn element_to_tokens(
None
}
});
let class_attrs = node.attributes.iter().filter_map(|node| {
if let Node::Attribute(node) = node {
let class_attrs = node.attributes().iter().filter_map(|node| {
if let NodeAttribute::Attribute(node) = node {
let name = node.key.to_string();
if let Some((fancy, _, _)) = fancy_class_name(&name, cx, node) {
Some(fancy)
@@ -1055,8 +1054,8 @@ fn element_to_tokens(
None
}
});
let style_attrs = node.attributes.iter().filter_map(|node| {
if let Node::Attribute(node) = node {
let style_attrs = node.attributes().iter().filter_map(|node| {
if let NodeAttribute::Attribute(node) = node {
let name = node.key.to_string();
if let Some((fancy, _, _)) = fancy_style_name(&name, cx, node) {
Some(fancy)
@@ -1101,32 +1100,18 @@ fn element_to_tokens(
}),
false,
),
Node::Text(node) => {
if let Some(primitive) = value_to_string(&node.value) {
(quote! { #primitive }, true)
} else {
let value = node.value.as_ref();
(
quote! {
#[allow(unused_braces)] #value
},
false,
)
}
}
Node::Block(node) => {
if let Some(primitive) = value_to_string(&node.value) {
(quote! { #primitive }, true)
} else {
let value = node.value.as_ref();
(
quote! {
#[allow(unused_braces)] #value
},
false,
)
}
Node::Text(node) => (quote! { #node }, true),
Node::RawText(node) => {
let text = node.to_string_best();
let text = syn::LitStr::new(&text, node.span());
(quote! { #text }, true)
}
Node::Block(node) => (
quote! {
#node
},
false,
),
Node::Element(node) => (
element_to_tokens(
cx,
@@ -1139,9 +1124,7 @@ fn element_to_tokens(
.unwrap_or_default(),
false,
),
Node::Comment(_) | Node::Doctype(_) | Node::Attribute(_) => {
(quote! {}, false)
}
Node::Comment(_) | Node::Doctype(_) => (quote! {}, false),
};
if is_static {
quote! {
@@ -1172,7 +1155,7 @@ fn element_to_tokens(
fn attribute_to_tokens(
cx: &Ident,
node: &NodeAttribute,
node: &KeyedAttribute,
global_class: Option<&TokenTree>,
) -> TokenStream {
let span = node.key.span();
@@ -1303,7 +1286,7 @@ fn attribute_to_tokens(
// special case of global_class and class attribute
if name == "class"
&& global_class.is_some()
&& node.value.as_ref().and_then(value_to_string).is_none()
&& node.value().and_then(value_to_string).is_none()
{
let span = node.key.span();
proc_macro_error::emit_error!(span, "Combining a global class (view! { cx, class = ... }) \
@@ -1313,10 +1296,8 @@ fn attribute_to_tokens(
};
// all other attributes
let value = match node.value.as_ref() {
let value = match node.value() {
Some(value) => {
let value = value.as_ref();
quote! { #value }
}
None => quote_spanned! { span => "" },
@@ -1367,7 +1348,7 @@ pub(crate) fn parse_event_name(name: &str) -> (TokenStream, bool, bool) {
pub(crate) fn slot_to_tokens(
cx: &Ident,
node: &NodeElement,
slot: &NodeAttribute,
slot: &KeyedAttribute,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>,
) {
@@ -1376,19 +1357,19 @@ pub(crate) fn slot_to_tokens(
let name = convert_to_snake_case(if name.starts_with("slot:") {
name.replacen("slot:", "", 1)
} else {
node.name.to_string()
node.name().to_string()
});
let component_name = ident_from_tag_name(&node.name);
let span = node.name.span();
let component_name = ident_from_tag_name(node.name());
let span = node.name().span();
let Some(parent_slots) = parent_slots else {
proc_macro_error::emit_error!(span, "slots cannot be used inside HTML elements");
return;
};
let attrs = node.attributes.iter().filter_map(|node| {
if let Node::Attribute(node) = node {
let attrs = node.attributes().iter().filter_map(|node| {
if let NodeAttribute::Attribute(node) = node {
if is_slot(node) {
None
} else {
@@ -1406,10 +1387,8 @@ pub(crate) fn slot_to_tokens(
let name = &attr.key;
let value = attr
.value
.as_ref()
.value()
.map(|v| {
let v = v.as_ref();
quote! { #v }
})
.unwrap_or_else(|| quote! { #name });
@@ -1474,9 +1453,9 @@ pub(crate) fn slot_to_tokens(
let slot = Ident::new(&slot, span);
if values.len() > 1 {
quote! {
.#slot(vec![
.#slot([
#(#values)*
])
].to_vec())
}
} else {
let value = &values[0];
@@ -1504,12 +1483,12 @@ pub(crate) fn component_to_tokens(
node: &NodeElement,
global_class: Option<&TokenTree>,
) -> TokenStream {
let name = &node.name;
let component_name = ident_from_tag_name(&node.name);
let span = node.name.span();
let name = node.name();
let component_name = ident_from_tag_name(node.name());
let span = node.name().span();
let attrs = node.attributes.iter().filter_map(|node| {
if let Node::Attribute(node) = node {
let attrs = node.attributes().iter().filter_map(|node| {
if let NodeAttribute::Attribute(node) = node {
Some(node)
} else {
None
@@ -1526,10 +1505,8 @@ pub(crate) fn component_to_tokens(
let name = &attr.key;
let value = attr
.value
.as_ref()
.value()
.map(|v| {
let v = v.as_ref();
quote! { #v }
})
.unwrap_or_else(|| quote! { #name });
@@ -1637,7 +1614,7 @@ pub(crate) fn component_to_tokens(
}
pub(crate) fn event_from_attribute_node(
attr: &NodeAttribute,
attr: &KeyedAttribute,
force_undelegated: bool,
) -> (TokenStream, &Expr) {
let event_name = attr
@@ -1697,7 +1674,7 @@ fn ident_from_tag_name(tag_name: &NodeName) -> Ident {
fn expr_to_ident(expr: &syn::Expr) -> Option<&ExprPath> {
match expr {
syn::Expr::Block(block) => block.block.stmts.last().and_then(|stmt| {
if let syn::Stmt::Expr(expr) = stmt {
if let syn::Stmt::Expr(expr, ..) = stmt {
expr_to_ident(expr)
} else {
None
@@ -1708,15 +1685,15 @@ fn expr_to_ident(expr: &syn::Expr) -> Option<&ExprPath> {
}
}
fn is_slot(node: &NodeAttribute) -> bool {
fn is_slot(node: &KeyedAttribute) -> bool {
let key = node.key.to_string();
let key = key.trim();
key == "slot" || key.starts_with("slot:")
}
fn get_slot(node: &NodeElement) -> Option<&NodeAttribute> {
node.attributes.iter().find_map(|node| {
if let Node::Attribute(node) = node {
fn get_slot(node: &NodeElement) -> Option<&KeyedAttribute> {
node.attributes().iter().find_map(|node| {
if let NodeAttribute::Attribute(node) = node {
if is_slot(node) {
Some(node)
} else {
@@ -1744,7 +1721,7 @@ fn is_self_closing(node: &NodeElement) -> bool {
// self-closing tags
// https://developer.mozilla.org/en-US/docs/Glossary/Empty_element
matches!(
node.name.to_string().as_str(),
node.name().to_string().as_str(),
"area"
| "base"
| "br"
@@ -1899,13 +1876,13 @@ fn parse_event(event_name: &str) -> (&str, bool) {
fn fancy_class_name<'a>(
name: &str,
cx: &Ident,
node: &'a NodeAttribute,
node: &'a KeyedAttribute,
) -> Option<(TokenStream, String, &'a Expr)> {
// special case for complex class names:
// e.g., Tailwind `class=("mt-[calc(100vh_-_3rem)]", true)`
if name == "class" {
if let Some(expr) = node.value.as_ref() {
if let syn::Expr::Tuple(tuple) = expr.as_ref() {
if let Some(expr) = node.value() {
if let syn::Expr::Tuple(tuple) = expr {
if tuple.elems.len() == 2 {
let span = node.key.span();
let class = quote_spanned! {
@@ -1948,12 +1925,12 @@ fn fancy_class_name<'a>(
fn fancy_style_name<'a>(
name: &str,
cx: &Ident,
node: &'a NodeAttribute,
node: &'a KeyedAttribute,
) -> Option<(TokenStream, String, &'a Expr)> {
// special case for complex dynamic style names:
if name == "style" {
if let Some(expr) = node.value.as_ref() {
if let syn::Expr::Tuple(tuple) = expr.as_ref() {
if let Some(expr) = node.value() {
if let syn::Expr::Tuple(tuple) = expr {
if tuple.elems.len() == 2 {
let span = node.key.span();
let style = quote_spanned! {

View File

@@ -44,7 +44,7 @@ error: unexpected end of input, expected assignment `=`
47 | #[prop(default)] default: bool,
| ^
error: unexpected end of input, expected one of: `::`, `<`, `_`, literal, `const`, `ref`, `mut`, `&`, parentheses, square brackets, `..`, `const`
error: unexpected end of input, expected one of: identifier, `::`, `<`, `_`, literal, `const`, `ref`, `mut`, `&`, parentheses, square brackets, `..`, `const`
= help: try `#[prop(default=5 * 10)]`
--> tests/ui/component.rs:56:22

View File

@@ -44,7 +44,7 @@ error: unexpected end of input, expected assignment `=`
45 | #[prop(default)] default: bool,
| ^
error: unexpected end of input, expected one of: `::`, `<`, `_`, literal, `const`, `ref`, `mut`, `&`, parentheses, square brackets, `..`, `const`
error: unexpected end of input, expected one of: identifier, `::`, `<`, `_`, literal, `const`, `ref`, `mut`, `&`, parentheses, square brackets, `..`, `const`
= help: try `#[prop(default=5 * 10)]`
--> tests/ui/component_absolute.rs:54:22

View File

@@ -42,7 +42,7 @@ web-sys = { version = "0.3", optional = true, features = [
] }
cfg-if = "1"
indexmap = "1"
self_cell = "1.0.0"
ouroboros = { version = "0.15.6", default-features = false }
[dev-dependencies]
criterion = { version = "0.4.0", features = ["html_reports"] }

View File

@@ -110,7 +110,7 @@ pub use slice::*;
pub use spawn::*;
pub use spawn_microtask::*;
pub use stored_value::*;
pub use suspense::SuspenseContext;
pub use suspense::{GlobalSuspenseContext, SuspenseContext};
pub use trigger::*;
mod macros {

View File

@@ -5,7 +5,7 @@ use crate::{
SignalDispose, SignalGet, SignalGetUntracked, SignalStream, SignalWith,
SignalWithUntracked,
};
use std::{any::Any, cell::RefCell, fmt::Debug, marker::PhantomData, rc::Rc};
use std::{any::Any, cell::RefCell, fmt, marker::PhantomData, rc::Rc};
/// Creates an efficient derived reactive value based on other reactive values.
///
@@ -151,7 +151,6 @@ where
/// });
/// # }).dispose();
/// ```
#[derive(Debug, PartialEq, Eq)]
pub struct Memo<T>
where
T: 'static,
@@ -180,6 +179,26 @@ where
impl<T> Copy for Memo<T> {}
impl<T> fmt::Debug for Memo<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut s = f.debug_struct("Memo");
s.field("runtime", &self.runtime);
s.field("id", &self.id);
s.field("ty", &self.ty);
#[cfg(any(debug_assertions, feature = "ssr"))]
s.field("defined_at", &self.defined_at);
s.finish()
}
}
impl<T> Eq for Memo<T> {}
impl<T> PartialEq for Memo<T> {
fn eq(&self, other: &Self) -> bool {
self.runtime == other.runtime && self.id == other.id
}
}
impl<T: Clone> SignalGetUntracked<T> for Memo<T> {
#[cfg_attr(
any(debug_assertions, feature = "ssr"),

View File

@@ -5,8 +5,9 @@ use crate::{
runtime::{with_runtime, RuntimeId},
serialization::Serializable,
spawn::spawn_local,
use_context, Memo, ReadSignal, Scope, ScopeProperty, SignalGetUntracked,
SignalSet, SignalUpdate, SignalWith, SuspenseContext, WriteSignal,
use_context, GlobalSuspenseContext, Memo, ReadSignal, Scope, ScopeProperty,
SignalGetUntracked, SignalSet, SignalUpdate, SignalWith, SuspenseContext,
WriteSignal,
};
use std::{
any::Any,
@@ -820,6 +821,7 @@ where
f: impl FnOnce(&T) -> U,
location: &'static Location<'static>,
) -> Option<U> {
let global_suspense_cx = use_context::<GlobalSuspenseContext>(cx);
let suspense_cx = use_context::<SuspenseContext>(cx);
let v = self
@@ -882,6 +884,24 @@ where
}
}
}
if let Some(g) = &global_suspense_cx {
if let Ok(ref mut contexts) = suspense_contexts.try_borrow_mut()
{
g.with_inner(|s| {
if !contexts.contains(s) {
contexts.insert(*s);
if !has_value {
s.increment(
serializable
!= ResourceSerialization::Local,
);
}
}
})
}
}
};
create_isomorphic_effect(cx, increment);
@@ -1005,6 +1025,7 @@ where
}
}
#[derive(Clone)]
pub(crate) enum AnyResource {
Unserializable(Rc<dyn UnserializableResource>),
Serializable(Rc<dyn SerializableResource>),

View File

@@ -188,6 +188,7 @@ impl Runtime {
}
}
#[allow(clippy::await_holding_refcell_ref)] // not using this part of ouroboros
pub(crate) fn mark_dirty(&self, node: NodeId) {
//crate::macros::debug_warn!("marking {node:?} dirty");
let mut nodes = self.nodes.borrow_mut();
@@ -216,24 +217,23 @@ impl Runtime {
* `Check` or `DirtyMarked`.
*
* Because `RefCell`, borrowing the iterators all at once is difficult,
* so a self-referential struct is used instead. self_cell produces safe
* so a self-referential struct is used instead. ouroboros produces safe
* code, but it would not be recommended to use this outside of this
* algorithm.
*/
type Dependent<'a> = indexmap::set::Iter<'a, NodeId>;
#[ouroboros::self_referencing]
struct RefIter<'a> {
set: std::cell::Ref<'a, FxIndexSet<NodeId>>,
self_cell::self_cell! {
struct RefIter<'a> {
owner: std::cell::Ref<'a, FxIndexSet<NodeId>>,
#[not_covariant] // avoids extra codegen, harmless to mark it as such
dependent: Dependent,
}
// Boxes the iterator internally
#[borrows(set)]
#[covariant]
iter: indexmap::set::Iter<'this, NodeId>,
}
/// Due to the limitations of self-referencing, we cannot borrow the
/// stack and iter simultaneously within the closure or the loop,
/// Due to the limitations of ouroboros, we cannot borrow the
/// stack and iter simultaneously, or directly within the loop,
/// therefore this must be used to command the outside scope
/// of what to do.
enum IterResult<'a> {
@@ -251,7 +251,7 @@ impl Runtime {
}
while let Some(iter) = stack.last_mut() {
let res = iter.with_dependent_mut(|_, iter| {
let res = iter.with_iter_mut(|iter| {
let Some(mut child) = iter.next().copied() else {
return IterResult::Empty;
};
@@ -743,7 +743,7 @@ impl Runtime {
S: 'static,
T: 'static,
{
let resources = self.resources.borrow();
let resources = { self.resources.borrow().clone() };
let res = resources.get(id);
if let Some(res) = res {
let res_state = match res {
@@ -796,7 +796,8 @@ impl Runtime {
cx: Scope,
) -> FuturesUnordered<PinnedFuture<(ResourceId, String)>> {
let f = FuturesUnordered::new();
for (id, resource) in self.resources.borrow().iter() {
let resources = { self.resources.borrow().clone() };
for (id, resource) in resources.iter() {
if let AnyResource::Serializable(resource) = resource {
f.push(resource.to_serialization_resolver(cx, id));
}

View File

@@ -10,7 +10,13 @@ use crate::{
};
use futures::Stream;
use std::{
any::Any, cell::RefCell, fmt::Debug, marker::PhantomData, pin::Pin, rc::Rc,
any::Any,
cell::RefCell,
fmt,
hash::{Hash, Hasher},
marker::PhantomData,
pin::Pin,
rc::Rc,
};
use thiserror::Error;
@@ -463,7 +469,6 @@ pub fn create_signal_from_stream<T>(
/// # }).dispose();
/// #
/// ```
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct ReadSignal<T>
where
T: 'static,
@@ -784,6 +789,33 @@ impl<T> Clone for ReadSignal<T> {
impl<T> Copy for ReadSignal<T> {}
impl<T> fmt::Debug for ReadSignal<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut s = f.debug_struct("ReadSignal");
s.field("runtime", &self.runtime);
s.field("id", &self.id);
s.field("ty", &self.ty);
#[cfg(any(debug_assertions, feature = "ssr"))]
s.field("defined_at", &self.defined_at);
s.finish()
}
}
impl<T> Eq for ReadSignal<T> {}
impl<T> PartialEq for ReadSignal<T> {
fn eq(&self, other: &Self) -> bool {
self.runtime == other.runtime && self.id == other.id
}
}
impl<T> Hash for ReadSignal<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.runtime.hash(state);
self.id.hash(state);
}
}
/// The setter for a reactive signal.
///
/// A signal is a piece of data that may change over time,
@@ -829,7 +861,6 @@ impl<T> Copy for ReadSignal<T> {}
/// # }).dispose();
/// #
/// ```
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct WriteSignal<T>
where
T: 'static,
@@ -1045,6 +1076,33 @@ impl<T> Clone for WriteSignal<T> {
impl<T> Copy for WriteSignal<T> {}
impl<T> fmt::Debug for WriteSignal<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut s = f.debug_struct("WriteSignal");
s.field("runtime", &self.runtime);
s.field("id", &self.id);
s.field("ty", &self.ty);
#[cfg(any(debug_assertions, feature = "ssr"))]
s.field("defined_at", &self.defined_at);
s.finish()
}
}
impl<T> Eq for WriteSignal<T> {}
impl<T> PartialEq for WriteSignal<T> {
fn eq(&self, other: &Self) -> bool {
self.runtime == other.runtime && self.id == other.id
}
}
impl<T> Hash for WriteSignal<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.runtime.hash(state);
self.id.hash(state);
}
}
/// Creates a reactive signal with the getter and setter unified in one value.
/// You may prefer this style, or it may be easier to pass around in a context
/// or as a function argument.
@@ -1126,7 +1184,6 @@ pub fn create_rw_signal<T>(cx: Scope, value: T) -> RwSignal<T> {
/// # }).dispose();
/// #
/// ```
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct RwSignal<T>
where
T: 'static,
@@ -1146,6 +1203,33 @@ impl<T> Clone for RwSignal<T> {
impl<T> Copy for RwSignal<T> {}
impl<T> fmt::Debug for RwSignal<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut s = f.debug_struct("RwSignal");
s.field("runtime", &self.runtime);
s.field("id", &self.id);
s.field("ty", &self.ty);
#[cfg(any(debug_assertions, feature = "ssr"))]
s.field("defined_at", &self.defined_at);
s.finish()
}
}
impl<T> Eq for RwSignal<T> {}
impl<T> PartialEq for RwSignal<T> {
fn eq(&self, other: &Self) -> bool {
self.runtime == other.runtime && self.id == other.id
}
}
impl<T> Hash for RwSignal<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.runtime.hash(state);
self.id.hash(state);
}
}
impl<T: Clone> SignalGetUntracked<T> for RwSignal<T> {
#[cfg_attr(
any(debug_assertions, feature = "ssr"),

View File

@@ -60,7 +60,6 @@ where
/// assert_eq!(above_3(&memoized_double_count.into()), true);
/// # });
/// ```
#[derive(Debug, PartialEq, Eq)]
pub struct Signal<T>
where
T: 'static,
@@ -82,6 +81,24 @@ impl<T> Clone for Signal<T> {
impl<T> Copy for Signal<T> {}
impl<T> std::fmt::Debug for Signal<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut s = f.debug_struct("Signal");
s.field("inner", &self.inner);
#[cfg(any(debug_assertions, feature = "ssr"))]
s.field("defined_at", &self.defined_at);
s.finish()
}
}
impl<T> Eq for Signal<T> {}
impl<T> PartialEq for Signal<T> {
fn eq(&self, other: &Self) -> bool {
self.inner == other.inner
}
}
/// Please note that using `Signal::with_untracked` still clones the inner value,
/// so there's no benefit to using it as opposed to calling
/// `Signal::get_untracked`.
@@ -431,10 +448,7 @@ impl<T> Clone for SignalTypes<T> {
impl<T> Copy for SignalTypes<T> {}
impl<T> std::fmt::Debug for SignalTypes<T>
where
T: std::fmt::Debug,
{
impl<T> std::fmt::Debug for SignalTypes<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::ReadSignal(arg0) => {
@@ -448,10 +462,7 @@ where
}
}
impl<T> PartialEq for SignalTypes<T>
where
T: PartialEq,
{
impl<T> PartialEq for SignalTypes<T> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(Self::ReadSignal(l0), Self::ReadSignal(r0)) => l0 == r0,

View File

@@ -83,7 +83,7 @@ where
)
}
/// Takes a memoized, read-only slice of a signal. This is equivalent to the
/// Takes a memoized, read-only slice of a signal. This is equivalent to the
/// read-only half of [`create_slice`].
pub fn create_read_slice<T, O>(
cx: Scope,

View File

@@ -1,6 +1,12 @@
#![forbid(unsafe_code)]
use crate::{with_runtime, RuntimeId, Scope, ScopeProperty};
use std::{cell::RefCell, marker::PhantomData, rc::Rc};
use std::{
cell::RefCell,
fmt,
hash::{Hash, Hasher},
marker::PhantomData,
rc::Rc,
};
slotmap::new_key_type! {
/// Unique ID assigned to a [`StoredValue`].
@@ -16,7 +22,6 @@ slotmap::new_key_type! {
/// and [`RwSignal`](crate::RwSignal)), it is `Copy` and `'static`. Unlike the signal
/// types, it is not reactive; accessing it does not cause effects to subscribe, and
/// updating it does not notify anything else.
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct StoredValue<T>
where
T: 'static,
@@ -38,9 +43,33 @@ impl<T> Clone for StoredValue<T> {
impl<T> Copy for StoredValue<T> {}
impl<T> fmt::Debug for StoredValue<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("StoredValue")
.field("runtime", &self.runtime)
.field("id", &self.id)
.field("ty", &self.ty)
.finish()
}
}
impl<T> Eq for StoredValue<T> {}
impl<T> PartialEq for StoredValue<T> {
fn eq(&self, other: &Self) -> bool {
self.runtime == other.runtime && self.id == other.id
}
}
impl<T> Hash for StoredValue<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.runtime.hash(state);
self.id.hash(state);
}
}
impl<T> StoredValue<T> {
/// Returns a clone of the signals current value, subscribing the effect
/// to this signal.
/// Returns a clone of the current stored value.
///
/// # Panics
/// Panics if you try to access a value stored in a [`Scope`] that has been disposed.
@@ -70,7 +99,7 @@ impl<T> StoredValue<T> {
self.try_get_value().expect("could not get stored value")
}
/// Same as [`StoredValue::get`] but will not panic by default.
/// Same as [`StoredValue::get_value`] but will not panic by default.
#[track_caller]
pub fn try_get_value(&self) -> Option<T>
where
@@ -79,7 +108,7 @@ impl<T> StoredValue<T> {
self.try_with_value(T::clone)
}
/// Applies a function to the current stored value.
/// Applies a function to the current stored value and returns the result.
///
/// # Panics
/// Panics if you try to access a value stored in a [`Scope`] that has been disposed.
@@ -105,8 +134,8 @@ impl<T> StoredValue<T> {
self.try_with_value(f).expect("could not get stored value")
}
/// Same as [`StoredValue::with`] but returns [`Some(O)]` only if
/// the signal is still valid. [`None`] otherwise.
/// Same as [`StoredValue::with_value`] but returns [`Some(O)]` only if
/// the stored value has not yet been disposed. [`None`] otherwise.
pub fn try_with_value<O>(&self, f: impl FnOnce(&T) -> O) -> Option<O> {
with_runtime(self.runtime, |runtime| {
let value = {
@@ -161,8 +190,8 @@ impl<T> StoredValue<T> {
.expect("could not set stored value");
}
/// Same as [`Self::update`], but returns [`Some(O)`] if the
/// signal is still valid, [`None`] otherwise.
/// Same as [`Self::update_value`], but returns [`Some(O)`] if the
/// stored value has not yet been disposed, [`None`] otherwise.
pub fn try_update_value<O>(self, f: impl FnOnce(&mut T) -> O) -> Option<O> {
with_runtime(self.runtime, |runtime| {
let values = runtime.stored_values.borrow();
@@ -195,8 +224,8 @@ impl<T> StoredValue<T> {
self.try_set_value(value);
}
/// Same as [`Self::set`], but returns [`None`] if the signal is
/// still valid, [`Some(T)`] otherwise.
/// Same as [`Self::set_value`], but returns [`None`] if the
/// stored value has not yet been disposed, [`Some(T)`] otherwise.
pub fn try_set_value(&self, value: T) -> Option<T> {
with_runtime(self.runtime, |runtime| {
let values = runtime.stored_values.borrow();

View File

@@ -2,11 +2,14 @@
#![forbid(unsafe_code)]
use crate::{
create_rw_signal, create_signal, queue_microtask, store_value, ReadSignal,
RwSignal, Scope, SignalUpdate, StoredValue, WriteSignal,
create_isomorphic_effect, create_rw_signal, create_signal, queue_microtask,
signal::SignalGet, store_value, ReadSignal, RwSignal, Scope, SignalSet,
SignalUpdate, StoredValue, WriteSignal,
};
use futures::Future;
use std::{borrow::Cow, collections::VecDeque, pin::Pin};
use std::{
borrow::Cow, cell::RefCell, collections::VecDeque, pin::Pin, rc::Rc,
};
/// Tracks [`Resource`](crate::Resource)s that are read under a suspense context,
/// i.e., within a [`Suspense`](https://docs.rs/leptos_core/latest/leptos_core/fn.Suspense.html) component.
@@ -20,6 +23,30 @@ pub struct SuspenseContext {
pub(crate) should_block: StoredValue<bool>,
}
/// A single, global suspense context that will be checked when resources
/// are read. This wont be “blocked” by lower suspense components. This is
/// useful for e.g., holding route transitions.
#[derive(Clone, Debug)]
pub struct GlobalSuspenseContext(Rc<RefCell<SuspenseContext>>);
impl GlobalSuspenseContext {
/// Creates an empty global suspense context.
pub fn new(cx: Scope) -> Self {
Self(Rc::new(RefCell::new(SuspenseContext::new(cx))))
}
/// Runs a function with a reference to the underlying suspense context.
pub fn with_inner<T>(&self, f: impl FnOnce(&SuspenseContext) -> T) -> T {
f(&*self.0.borrow())
}
/// Runs a function with a reference to the underlying suspense context.
pub fn reset(&self, cx: Scope) {
let mut inner = self.0.borrow_mut();
_ = std::mem::replace(&mut *inner, SuspenseContext::new(cx));
}
}
impl SuspenseContext {
/// Whether the suspense contains local resources at this moment,
/// and therefore can't be serialized
@@ -32,6 +59,25 @@ impl SuspenseContext {
pub fn should_block(&self) -> bool {
self.should_block.get_value()
}
/// Returns a `Future` that resolves when this suspense is resolved.
pub fn to_future(&self, cx: Scope) -> impl Future<Output = ()> {
use futures::StreamExt;
let pending_resources = self.pending_resources;
let (tx, mut rx) = futures::channel::mpsc::channel(1);
let tx = RefCell::new(tx);
queue_microtask(move || {
create_isomorphic_effect(cx, move |_| {
if pending_resources.get() == 0 {
_ = tx.borrow_mut().try_send(());
}
})
});
async move {
rx.next().await;
}
}
}
impl std::hash::Hash for SuspenseContext {
@@ -98,6 +144,12 @@ impl SuspenseContext {
});
}
/// Resets the counter of pending resources.
pub fn clear(&self) {
self.set_pending_resources.set(0);
self.pending_serializable_resources.set(0);
}
/// Tests whether all of the pending resources have resolved.
pub fn ready(&self) -> bool {
self.pending_resources

View File

@@ -11,7 +11,7 @@ use crate::{
/// Reactive Trigger, notifies reactive code to rerun.
///
/// See [`create_trigger`] for more.
#[derive(Clone, Copy)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Trigger {
pub(crate) runtime: RuntimeId,
pub(crate) id: NodeId,

View File

@@ -1,6 +1,6 @@
[package]
name = "leptos_meta"
version = "0.3.1"
version = "0.3.0"
edition = "2021"
authors = ["Greg Johnston"]
license = "MIT"

View File

@@ -1,6 +1,6 @@
[package]
name = "leptos_router"
version = "0.3.1"
version = "0.3.0"
edition = "2021"
authors = ["Greg Johnston"]
license = "MIT"

View File

@@ -95,8 +95,69 @@ where
let action = use_resolved_path(cx, move || action.clone())
.get()
.unwrap_or_default();
// multipart POST (setting Context-Type breaks the request)
if method == "post" && enctype == "multipart/form-data" {
ev.prevent_default();
ev.stop_propagation();
let on_response = on_response.clone();
spawn_local(async move {
let res = gloo_net::http::Request::post(&action)
.header("Accept", "application/json")
.redirect(RequestRedirect::Follow)
.body(form_data)
.send()
.await;
match res {
Err(e) => {
error!("<Form/> error while POSTing: {e:#?}");
if let Some(error) = error {
error.set(Some(Box::new(e)));
}
}
Ok(resp) => {
if let Some(version) = action_version {
version.update(|n| *n += 1);
}
if let Some(error) = error {
error.set(None);
}
if let Some(on_response) = on_response.clone() {
on_response(resp.as_raw());
}
// Check all the logical 3xx responses that might
// get returned from a server function
if resp.redirected() {
let resp_url = &resp.url();
match Url::try_from(resp_url.as_str()) {
Ok(url) => {
request_animation_frame(move || {
if let Err(e) = navigate(
&format!(
"{}{}{}",
url.pathname,
if url.search.is_empty() {
""
} else {
"?"
},
url.search,
),
Default::default(),
) {
warn!("{}", e);
}
});
}
Err(e) => warn!("{}", e),
}
}
}
}
});
}
// POST
if method == "post" {
else if method == "post" {
ev.prevent_default();
ev.stop_propagation();
@@ -135,8 +196,14 @@ where
request_animation_frame(move || {
if let Err(e) = navigate(
&format!(
"{}{}",
url.pathname, url.search,
"{}{}{}",
url.pathname,
if url.search.is_empty() {
""
} else {
"?"
},
url.search,
),
Default::default(),
) {

View File

@@ -90,10 +90,14 @@ where
children: Children,
) -> HtmlElement<leptos::html::A> {
#[cfg(not(any(feature = "hydrate", feature = "csr")))]
_ = state;
{
_ = state;
}
#[cfg(not(any(feature = "hydrate", feature = "csr")))]
_ = replace;
{
_ = replace;
}
let location = use_location(cx);
let is_active = create_memo(cx, move |_| match href.get() {

View File

@@ -1,6 +1,7 @@
mod form;
mod link;
mod outlet;
mod progress;
mod redirect;
mod route;
mod router;
@@ -9,6 +10,7 @@ mod routes;
pub use form::*;
pub use link::*;
pub use outlet::*;
pub use progress::*;
pub use redirect::*;
pub use route::*;
pub use router::*;

View File

@@ -1,6 +1,6 @@
use crate::{
animation::{Animation, AnimationState},
use_is_back_navigation, use_route,
use_is_back_navigation, use_route, SetIsRouting,
};
use leptos::{leptos_dom::HydrationCtx, *};
use std::{cell::Cell, rc::Rc};
@@ -45,6 +45,43 @@ pub fn Outlet(cx: Scope) -> impl IntoView {
}
});
let outlet: Signal<Option<View>> =
if cfg!(any(feature = "csr", feature = "hydrate"))
&& use_context::<SetIsRouting>(cx).is_some()
{
let global_suspense = expect_context::<GlobalSuspenseContext>(cx);
let (current_view, set_current_view) = create_signal(cx, None);
create_effect(cx, {
let global_suspense = global_suspense.clone();
move |prev| {
let outlet = outlet.get();
let is_fallback =
!global_suspense.with_inner(SuspenseContext::ready);
if prev.is_none() {
set_current_view.set(outlet);
} else if !is_fallback {
queue_microtask({
let global_suspense = global_suspense.clone();
move || {
let is_fallback = cx.untrack(move || {
!global_suspense
.with_inner(SuspenseContext::ready)
});
if !is_fallback {
set_current_view.set(outlet);
}
}
});
}
}
});
current_view.into()
} else {
outlet.into()
};
leptos::leptos_dom::DynChild::new_with_id(id, move || outlet.get())
}

View File

@@ -0,0 +1,69 @@
use leptos::{leptos_dom::helpers::IntervalHandle, *};
/// A visible indicator that the router is in the process of navigating
/// to another route.
///
/// This is used when `<Router set_is_routing>` has been provided, to
/// provide some visual indicator that the page is currently loading
/// async data, so that it is does not appear to have frozen. It can be
/// styled independently.
#[component]
pub fn RoutingProgress(
cx: Scope,
/// Whether the router is currently loading the new page.
#[prop(into)]
is_routing: Signal<bool>,
/// The maximum expected time for loading, which is used to
/// calibrate the animation process.
#[prop(optional, into)]
max_time: std::time::Duration,
/// The time to show the full progress bar after page has loaded, before hiding it. (Defaults to 100ms.)
#[prop(default = std::time::Duration::from_millis(250))]
before_hiding: std::time::Duration,
/// CSS classes to be applied to the `<progress>`.
#[prop(optional, into)]
class: String,
) -> impl IntoView {
const INCREMENT_EVERY_MS: f32 = 5.0;
let expected_increments =
max_time.as_secs_f32() / (INCREMENT_EVERY_MS / 1000.0);
let percent_per_increment = 100.0 / expected_increments;
let (is_showing, set_is_showing) = create_signal(cx, false);
let (progress, set_progress) = create_signal(cx, 0.0);
create_effect(cx, move |prev: Option<Option<IntervalHandle>>| {
if is_routing.get() && !is_showing.get() {
set_is_showing.set(true);
set_interval_with_handle(
move || {
set_progress.update(|n| *n += percent_per_increment);
},
std::time::Duration::from_millis(INCREMENT_EVERY_MS as u64),
)
.ok()
} else if is_routing.get() && is_showing.get() {
set_progress.set(0.0);
prev?
} else {
set_progress.set(100.0);
set_timeout(
move || {
set_progress.set(0.0);
set_is_showing.set(false);
},
before_hiding,
);
if let Some(Some(interval)) = prev {
interval.clear();
}
None
}
});
view! { cx,
<Show when=move || is_showing.get() fallback=|_| ()>
<progress class=class.clone() min="0" max="100" value=move || progress.get()/>
</Show>
}
}

View File

@@ -24,6 +24,9 @@ pub fn Router(
/// A fallback that should be shown if no route is matched.
#[prop(optional)]
fallback: Option<fn(Scope) -> View>,
/// A signal that will be set while the navigation process is underway.
#[prop(optional, into)]
set_is_routing: Option<SignalSetter<bool>>,
/// The `<Router/>` should usually wrap your whole page. It can contain
/// any elements, and should include a [Routes](crate::Routes) component somewhere
/// to define and display [Route](crate::Route)s.
@@ -32,10 +35,17 @@ pub fn Router(
// create a new RouterContext and provide it to every component beneath the router
let router = RouterContext::new(cx, base, fallback);
provide_context(cx, router);
provide_context(cx, GlobalSuspenseContext::new(cx));
if let Some(set_is_routing) = set_is_routing {
provide_context(cx, SetIsRouting(set_is_routing));
}
children(cx)
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) struct SetIsRouting(pub SignalSetter<bool>);
/// Context type that contains information about the current router state.
#[derive(Debug, Clone)]
pub struct RouterContext {
@@ -228,6 +238,9 @@ impl RouterContextInner {
resolve_path("", to, None).map(String::from)
};
// reset count of pending resources at global level
expect_context::<GlobalSuspenseContext>(cx).reset(cx);
match resolved_to {
None => Err(NavigationError::NotRoutable(to.to_string())),
Some(resolved_to) => {
@@ -262,18 +275,34 @@ impl RouterContextInner {
move |state| *state = next_state
});
self.path_stack.update_value(|stack| {
let global_suspense =
expect_context::<GlobalSuspenseContext>(cx);
let path_stack = self.path_stack;
path_stack.update_value(|stack| {
stack.push(resolved_to.clone())
});
if referrers.borrow().len() == len {
this.navigate_end(LocationChange {
value: resolved_to,
replace: false,
scroll: true,
state,
})
let set_is_routing = use_context::<SetIsRouting>(cx);
if let Some(set_is_routing) = set_is_routing {
set_is_routing.0.set(true);
}
spawn_local(async move {
if let Some(set_is_routing) = set_is_routing {
global_suspense
.with_inner(|s| s.to_future(cx))
.await;
set_is_routing.0.set(false);
}
if referrers.borrow().len() == len {
this.navigate_end(LocationChange {
value: resolved_to,
replace: false,
scroll: true,
state,
});
}
});
}
Ok(())
@@ -356,7 +385,10 @@ impl RouterContextInner {
return;
}
let to = path_name + &unescape(&url.search) + &unescape(&url.hash);
let to = path_name
+ if url.search.is_empty() { "" } else { "?" }
+ &unescape(&url.search)
+ &unescape(&url.hash);
let state =
leptos_dom::helpers::get_property(a.unchecked_ref(), "state")
.ok()

View File

@@ -4,7 +4,7 @@ use crate::{
expand_optionals, get_route_matches, join_paths, Branch, Matcher,
RouteDefinition, RouteMatch,
},
use_is_back_navigation, RouteContext, RouterContext,
use_is_back_navigation, RouteContext, RouterContext, SetIsRouting,
};
use leptos::{leptos_dom::HydrationCtx, *};
use std::{
@@ -56,6 +56,7 @@ pub fn Routes(
let id = HydrationCtx::id();
let root = root_route(cx, base_route, route_states, root_equal);
leptos::leptos_dom::DynChild::new_with_id(id, move || root.get())
.into_view(cx)
}
@@ -405,40 +406,75 @@ fn root_route(
base_route: RouteContext,
route_states: Memo<RouterState>,
root_equal: Rc<Cell<bool>>,
) -> Memo<Option<View>> {
) -> Signal<Option<View>> {
let root_cx = RefCell::new(None);
create_memo(cx, move |prev| {
provide_context(cx, route_states);
route_states.with(|state| {
if state.routes.borrow().is_empty() {
Some(base_route.outlet(cx).into_view(cx))
} else {
let root = state.routes.borrow();
let root = root.get(0);
if let Some(route) = root {
provide_context(cx, route.clone());
}
if prev.is_none() || !root_equal.get() {
let (root_view, _) = cx.run_child_scope(|cx| {
let prev_cx = std::mem::replace(
&mut *root_cx.borrow_mut(),
Some(cx),
);
if let Some(prev_cx) = prev_cx {
prev_cx.dispose();
}
root.as_ref()
.map(|route| route.outlet(cx).into_view(cx))
});
root_view
let root_view = create_memo(cx, {
let root_equal = Rc::clone(&root_equal);
move |prev| {
provide_context(cx, route_states);
route_states.with(|state| {
if state.routes.borrow().is_empty() {
Some(base_route.outlet(cx).into_view(cx))
} else {
prev.cloned().unwrap()
let root = state.routes.borrow();
let root = root.get(0);
if let Some(route) = root {
provide_context(cx, route.clone());
}
if prev.is_none() || !root_equal.get() {
let (root_view, _) = cx.run_child_scope(|cx| {
let prev_cx = std::mem::replace(
&mut *root_cx.borrow_mut(),
Some(cx),
);
if let Some(prev_cx) = prev_cx {
prev_cx.dispose();
}
root.as_ref()
.map(|route| route.outlet(cx).into_view(cx))
});
root_view
} else {
prev.cloned().unwrap()
}
}
})
}
});
if cfg!(any(feature = "csr", feature = "hydrate"))
&& use_context::<SetIsRouting>(cx).is_some()
{
let global_suspense = expect_context::<GlobalSuspenseContext>(cx);
let (current_view, set_current_view) = create_signal(cx, None);
create_effect(cx, move |prev| {
let root = root_view.get();
let is_fallback =
!global_suspense.with_inner(SuspenseContext::ready);
if prev.is_none() {
set_current_view.set(root);
} else if !is_fallback {
queue_microtask({
let global_suspense = global_suspense.clone();
move || {
let is_fallback = cx.untrack(move || {
!global_suspense.with_inner(SuspenseContext::ready)
});
if !is_fallback {
set_current_view.set(root);
}
}
});
}
})
})
});
current_view.into()
} else {
root_view.into()
}
}
#[derive(Clone, Debug, PartialEq)]

View File

@@ -42,7 +42,11 @@ impl TryFrom<&str> for Url {
Ok(Self {
origin: url.origin(),
pathname: url.pathname(),
search: url.search(),
search: url
.search()
.strip_prefix('?')
.map(String::from)
.unwrap_or_default(),
search_params: ParamsMap(
try_iter(&url.search_params())
.map_js_error()?

View File

@@ -76,6 +76,23 @@ pub fn use_resolved_path(
}
/// Returns a function that can be used to navigate to a new route.
///
/// ## Panics
/// `use_navigate` can sometimes panic due to a `BorrowMut` runtime error
/// if it is called immediately during routing/rendering. In this case, you should
/// wrap it in [`request_animation_frame`](leptos::request_animation_frame)
/// to delay it until that routing process is complete.
/// ```rust
/// # use leptos::{request_animation_frame,create_scope,create_runtime};
/// # create_scope(create_runtime(), |cx| {
/// # if false { // can't actually navigate, no <Router/>
/// let navigate = leptos_router::use_navigate(cx);
/// request_animation_frame(move || {
/// _ = navigate("/", Default::default());
/// });
/// # }
/// # });
/// ```
pub fn use_navigate(
cx: Scope,
) -> impl Fn(&str, NavigateOptions) -> Result<(), NavigationError> {
@@ -84,7 +101,7 @@ pub fn use_navigate(
Rc::clone(&router.inner).navigate_from_route(to, &options)
}
}
///
/// Returns a signal that tells you whether you are currently navigating backwards.
pub(crate) fn use_is_back_navigation(cx: Scope) -> ReadSignal<bool> {
let router = use_router(cx);

View File

@@ -15,7 +15,7 @@ serde_qs = "0.12"
thiserror = "1"
serde_json = "1"
quote = "1"
syn = { version = "1", features = ["full", "parsing", "extra-traits"] }
syn = { version = "2", features = ["full", "parsing", "extra-traits"] }
proc-macro2 = "1"
ciborium = "0.2"
xxhash-rust = { version = "0.8", features = ["const_xxh64"] }

View File

@@ -11,7 +11,7 @@ description = "The default implementation of the server_fn macro without a conte
proc-macro = true
[dependencies]
syn = { version = "1", features = ["full"] }
syn = { version = "2", features = ["full"] }
server_fn_macro = { workspace = true }
[dev-dependencies]

View File

@@ -11,7 +11,7 @@ readme = "../README.md"
[dependencies]
serde = { version = "1", features = ["derive"] }
quote = "1"
syn = { version = "1", features = ["full", "parsing", "extra-traits"] }
syn = { version = "2", features = ["full", "parsing", "extra-traits"] }
proc-macro2 = "1"
proc-macro-error = "1"
xxhash-rust = { version = "0.8.6", features = ["const_xxh64"] }

View File

@@ -75,9 +75,11 @@ pub fn server_macro_impl(
struct_name,
prefix,
encoding,
fn_path,
..
} = syn::parse2::<ServerFnName>(args)?;
let prefix = prefix.unwrap_or_else(|| Literal::string(""));
let fn_path = fn_path.unwrap_or_else(|| Literal::string(""));
let encoding = quote!(#server_fn_path::#encoding);
let body = syn::parse::<ServerFnBody>(body.into())?;
@@ -213,7 +215,11 @@ pub fn server_macro_impl(
}
fn url() -> &'static str {
if !#fn_path.is_empty(){
#fn_path
} else {
#server_fn_path::const_format::concatcp!(#fn_name_as_str, #server_fn_path::xxhash_rust::const_xxh64::xxh64(concat!(env!(#key_env_var), ":", file!(), ":", line!(), ":", column!()).as_bytes(), 0))
}
}
fn encoding() -> #server_fn_path::Encoding {
@@ -260,6 +266,8 @@ struct ServerFnName {
prefix: Option<Literal>,
_comma2: Option<Token![,]>,
encoding: Path,
_comma3: Option<Token![,]>,
fn_path: Option<Literal>,
}
impl Parse for ServerFnName {
@@ -280,6 +288,8 @@ impl Parse for ServerFnName {
}
})
.unwrap_or_else(|_| syn::parse_quote!(Encoding::Url));
let _comma3 = input.parse()?;
let fn_path = input.parse()?;
Ok(Self {
struct_name,
@@ -287,6 +297,8 @@ impl Parse for ServerFnName {
prefix,
_comma2,
encoding,
_comma3,
fn_path,
})
}
}