mirror of
https://github.com/leptos-rs/leptos.git
synced 2025-12-28 07:52:34 -05:00
Compare commits
6 Commits
0.7.0-alph
...
v0.6.13
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7e75801f7c | ||
|
|
0763a81cf1 | ||
|
|
3d37f08539 | ||
|
|
b3db094618 | ||
|
|
0c817d51fe | ||
|
|
fb5d8513ff |
@@ -27,3 +27,6 @@ tokio = { version = "1", features = ["rt", "fs"] }
|
||||
[features]
|
||||
nonce = ["leptos/nonce"]
|
||||
experimental-islands = ["leptos_integration_utils/experimental-islands"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -729,10 +729,7 @@ async fn stream_app(
|
||||
|
||||
build_stream_response(options, res_options, stream, runtime).await
|
||||
}
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
#[cfg_attr(any(debug_assertions), instrument(level = "trace", skip_all,))]
|
||||
async fn stream_app_in_order(
|
||||
options: &LeptosOptions,
|
||||
app: impl FnOnce() -> View + 'static,
|
||||
|
||||
@@ -37,3 +37,6 @@ nonce = ["leptos/nonce"]
|
||||
wasm = []
|
||||
default = ["tokio/fs", "tokio/sync"]
|
||||
experimental-islands = ["leptos_integration_utils/experimental-islands"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -18,3 +18,6 @@ tracing = "0.1.37"
|
||||
|
||||
[features]
|
||||
experimental-islands = []
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -141,3 +141,6 @@ skip_feature_sets = [
|
||||
"rustls",
|
||||
],
|
||||
]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -20,3 +20,6 @@ typed-builder = "0.18"
|
||||
tokio = { version = "1", features = ["rt", "macros"] }
|
||||
tempfile = "3"
|
||||
temp-env = { version = "0.3.6", features = ["async_closure"] }
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -178,3 +178,6 @@ trace-component-props = []
|
||||
[package.metadata.cargo-all-features]
|
||||
denylist = ["nightly", "trace-component-props"]
|
||||
skip_feature_sets = [["web", "ssr"]]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -46,6 +46,16 @@ pub trait IntoStyle {
|
||||
fn into_style_boxed(self: Box<Self>) -> Style;
|
||||
}
|
||||
|
||||
impl IntoStyle for Style {
|
||||
fn into_style(self) -> Style {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_style_boxed(self: Box<Self>) -> Style {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoStyle for &'static str {
|
||||
#[inline(always)]
|
||||
fn into_style(self) -> Style {
|
||||
@@ -176,7 +186,7 @@ impl Style {
|
||||
/// Converts the style to its HTML value at that moment so it can be rendered on the server.
|
||||
pub fn as_value_string(
|
||||
&self,
|
||||
style_name: &'static str,
|
||||
style_name: &str,
|
||||
) -> Option<Oco<'static, str>> {
|
||||
match self {
|
||||
Style::Value(value) => {
|
||||
|
||||
@@ -51,3 +51,6 @@ axum = ["server_fn_macro/axum"]
|
||||
[package.metadata.cargo-all-features]
|
||||
denylist = ["nightly", "tracing", "trace-component-props"]
|
||||
skip_feature_sets = [["csr", "hydrate"], ["hydrate", "csr"], ["hydrate", "ssr"]]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -24,10 +24,7 @@ pub(crate) enum Mode {
|
||||
|
||||
impl Default for Mode {
|
||||
fn default() -> Self {
|
||||
if cfg!(feature = "hydrate")
|
||||
|| cfg!(feature = "csr")
|
||||
|| cfg!(feature = "web")
|
||||
{
|
||||
if cfg!(feature = "hydrate") || cfg!(feature = "csr") {
|
||||
Mode::Client
|
||||
} else {
|
||||
Mode::Ssr
|
||||
|
||||
@@ -52,12 +52,10 @@ pub(crate) fn fragment_to_tokens(
|
||||
None,
|
||||
)?;
|
||||
|
||||
let node = quote_spanned! {span=>
|
||||
#[allow(unused_braces)] {#node}
|
||||
};
|
||||
let node = quote_spanned!(span => { #node });
|
||||
|
||||
Some(quote! {
|
||||
::leptos::IntoView::into_view(#node)
|
||||
::leptos::IntoView::into_view(#[allow(unused_braces)] #node)
|
||||
})
|
||||
})
|
||||
.peekable();
|
||||
@@ -339,9 +337,7 @@ pub(crate) fn element_to_tokens(
|
||||
quote! {}
|
||||
};
|
||||
let ide_helper_close_tag = ide_helper_close_tag.into_iter();
|
||||
Some(quote_spanned! {node.span()=>
|
||||
#[allow(unused_braces)]
|
||||
{
|
||||
let result = quote_spanned! {node.span()=> {
|
||||
#(#ide_helper_close_tag)*
|
||||
#name
|
||||
#(#attrs)*
|
||||
@@ -352,7 +348,10 @@ pub(crate) fn element_to_tokens(
|
||||
#(#children)*
|
||||
#view_marker
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
// We need to move "allow" out of "quote_spanned" because it breaks hovering in rust-analyzer
|
||||
Some(quote!(#[allow(unused_braces)] #result))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -70,12 +70,10 @@ pub(crate) fn component_to_tokens(
|
||||
})
|
||||
.unwrap_or_else(|| quote! { #name });
|
||||
|
||||
let value = quote_spanned! {value.span()=>
|
||||
#[allow(unused_braces)] {#value}
|
||||
};
|
||||
let value = quote_spanned!(value.span()=> { #value });
|
||||
|
||||
quote_spanned! {attr.span()=>
|
||||
.#name(#value)
|
||||
.#name(#[allow(unused_braces)] #value)
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -83,12 +83,10 @@ pub(crate) fn fragment_to_tokens_ssr(
|
||||
let nodes = nodes.iter().map(|node| {
|
||||
let span = node.span();
|
||||
let node = root_node_to_tokens_ssr(node, global_class, None);
|
||||
let node = quote_spanned! {span=>
|
||||
#[allow(unused_braces)] {#node}
|
||||
};
|
||||
let node = quote_spanned!(span=> { #node });
|
||||
|
||||
quote! {
|
||||
::leptos::IntoView::into_view(#node)
|
||||
::leptos::IntoView::into_view(#[allow(unused_braces)] #node)
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -61,12 +61,10 @@ pub(crate) fn slot_to_tokens(
|
||||
})
|
||||
.unwrap_or_else(|| quote! { #name });
|
||||
|
||||
let value = quote_spanned! {value.span()=>
|
||||
#[allow(unused_braces)] {#value}
|
||||
};
|
||||
let value = quote_spanned!(value.span()=> { #value });
|
||||
|
||||
quote_spanned! {attr.span()=>
|
||||
.#name(#value)
|
||||
.#name(#[allow(unused_braces)] #value)
|
||||
}
|
||||
});
|
||||
|
||||
@@ -187,7 +185,7 @@ pub(crate) fn slot_to_tokens(
|
||||
};
|
||||
|
||||
let slot = quote_spanned! {node.span()=>
|
||||
#[allow(unused_braces)] {
|
||||
{
|
||||
let slot = #component_name::builder()
|
||||
#(#props)*
|
||||
#(#slots)*
|
||||
@@ -200,6 +198,9 @@ pub(crate) fn slot_to_tokens(
|
||||
},
|
||||
};
|
||||
|
||||
// We need to move "allow" out of "quote_spanned" because it breaks hovering in rust-analyzer
|
||||
let slot = quote!(#[allow(unused_braces)] #slot);
|
||||
|
||||
parent_slots
|
||||
.entry(name)
|
||||
.and_modify(|entry| entry.push(slot.clone()))
|
||||
|
||||
@@ -145,27 +145,27 @@ TokenStream [
|
||||
Punct {
|
||||
char: '#',
|
||||
spacing: Alone,
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: allow,
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: unused_braces,
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
],
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
],
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
@@ -195,27 +195,27 @@ TokenStream [
|
||||
Punct {
|
||||
char: '#',
|
||||
spacing: Alone,
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: allow,
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: unused_braces,
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
],
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
],
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
|
||||
@@ -145,27 +145,27 @@ TokenStream [
|
||||
Punct {
|
||||
char: '#',
|
||||
spacing: Alone,
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: allow,
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: unused_braces,
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
],
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
],
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
@@ -195,27 +195,27 @@ TokenStream [
|
||||
Punct {
|
||||
char: '#',
|
||||
spacing: Alone,
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: allow,
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: unused_braces,
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
],
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
],
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
|
||||
@@ -1,33 +1,27 @@
|
||||
---
|
||||
source: leptos_macro/src/view/tests.rs
|
||||
assertion_line: 101
|
||||
expression: result
|
||||
---
|
||||
TokenStream [
|
||||
Punct {
|
||||
char: '#',
|
||||
spacing: Alone,
|
||||
span: bytes(10..331),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: allow,
|
||||
span: bytes(10..331),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: unused_braces,
|
||||
span: bytes(10..331),
|
||||
},
|
||||
],
|
||||
span: bytes(10..331),
|
||||
},
|
||||
],
|
||||
span: bytes(10..331),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
@@ -153,27 +147,22 @@ TokenStream [
|
||||
Punct {
|
||||
char: '#',
|
||||
spacing: Alone,
|
||||
span: bytes(28..83),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: allow,
|
||||
span: bytes(28..83),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: unused_braces,
|
||||
span: bytes(28..83),
|
||||
},
|
||||
],
|
||||
span: bytes(28..83),
|
||||
},
|
||||
],
|
||||
span: bytes(28..83),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
@@ -404,27 +393,22 @@ TokenStream [
|
||||
Punct {
|
||||
char: '#',
|
||||
spacing: Alone,
|
||||
span: bytes(96..176),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: allow,
|
||||
span: bytes(96..176),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: unused_braces,
|
||||
span: bytes(96..176),
|
||||
},
|
||||
],
|
||||
span: bytes(96..176),
|
||||
},
|
||||
],
|
||||
span: bytes(96..176),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
@@ -697,27 +681,22 @@ TokenStream [
|
||||
Punct {
|
||||
char: '#',
|
||||
spacing: Alone,
|
||||
span: bytes(189..223),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: allow,
|
||||
span: bytes(189..223),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: unused_braces,
|
||||
span: bytes(189..223),
|
||||
},
|
||||
],
|
||||
span: bytes(189..223),
|
||||
},
|
||||
],
|
||||
span: bytes(189..223),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
@@ -902,27 +881,22 @@ TokenStream [
|
||||
Punct {
|
||||
char: '#',
|
||||
spacing: Alone,
|
||||
span: bytes(236..316),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: allow,
|
||||
span: bytes(236..316),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: unused_braces,
|
||||
span: bytes(236..316),
|
||||
},
|
||||
],
|
||||
span: bytes(236..316),
|
||||
},
|
||||
],
|
||||
span: bytes(236..316),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
|
||||
@@ -145,27 +145,27 @@ TokenStream [
|
||||
Punct {
|
||||
char: '#',
|
||||
spacing: Alone,
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: allow,
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: unused_braces,
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
],
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
],
|
||||
span: bytes(51..52),
|
||||
span: bytes(37..52),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
@@ -195,27 +195,27 @@ TokenStream [
|
||||
Punct {
|
||||
char: '#',
|
||||
spacing: Alone,
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
Group {
|
||||
delimiter: Bracket,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: allow,
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
sym: unused_braces,
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
],
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
],
|
||||
span: bytes(70..71),
|
||||
span: bytes(65..71),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
|
||||
@@ -121,3 +121,6 @@ skip_feature_sets = [
|
||||
"rkyv",
|
||||
],
|
||||
]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -31,7 +31,7 @@ pub struct SharedContext {
|
||||
impl SharedContext {
|
||||
/// Returns IDs for all [`Resource`](crate::Resource)s found on any scope.
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
pub fn all_resources() -> Vec<ResourceId> {
|
||||
@@ -41,7 +41,7 @@ impl SharedContext {
|
||||
/// Returns IDs for all [`Resource`](crate::Resource)s found on any scope that are
|
||||
/// pending from the server.
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
pub fn pending_resources() -> Vec<ResourceId> {
|
||||
@@ -50,7 +50,7 @@ impl SharedContext {
|
||||
|
||||
/// Returns IDs for all [`Resource`](crate::Resource)s found on any scope.
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
pub fn serialization_resolvers(
|
||||
@@ -62,7 +62,7 @@ impl SharedContext {
|
||||
/// Registers the given [`SuspenseContext`](crate::SuspenseContext) with the current scope,
|
||||
/// calling the `resolver` when its resources are all resolved.
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
pub fn register_suspense(
|
||||
@@ -121,7 +121,7 @@ impl SharedContext {
|
||||
/// Returns a tuple of two pinned `Future`s that return content for out-of-order
|
||||
/// and in-order streaming, respectively.
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
pub fn take_pending_fragment(id: &str) -> Option<FragmentData> {
|
||||
@@ -135,7 +135,7 @@ impl SharedContext {
|
||||
|
||||
/// A future that will resolve when all blocking fragments are ready.
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
pub fn blocking_fragments_ready() -> PinnedFuture<()> {
|
||||
@@ -162,7 +162,7 @@ impl SharedContext {
|
||||
/// The keys are hydration IDs. Values are tuples of two pinned
|
||||
/// `Future`s that return content for out-of-order and in-order streaming, respectively.
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
pub fn pending_fragments() -> HashMap<String, FragmentData> {
|
||||
@@ -176,7 +176,7 @@ impl SharedContext {
|
||||
/// Registers the given element as an island with the current reactive owner.
|
||||
#[cfg(all(feature = "hydrate", feature = "experimental-islands"))]
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
pub fn register_island(el: &web_sys::HtmlElement) {
|
||||
@@ -190,7 +190,7 @@ impl SharedContext {
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
pub fn fragment_has_local_resources(fragment: &str) -> bool {
|
||||
@@ -204,7 +204,7 @@ impl SharedContext {
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
pub fn fragments_with_local_resources() -> HashSet<String> {
|
||||
@@ -216,7 +216,7 @@ impl SharedContext {
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
pub fn register_local_fragment(key: String) {
|
||||
|
||||
@@ -629,7 +629,7 @@ impl Runtime {
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
#[track_caller]
|
||||
@@ -644,7 +644,7 @@ impl Runtime {
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
#[track_caller]
|
||||
@@ -1382,7 +1382,7 @@ impl Drop for SetObserverOnDrop {
|
||||
///
|
||||
/// To avoid panicking under any circumstances, use [`try_batch`].
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
#[inline(always)]
|
||||
@@ -1398,7 +1398,7 @@ pub fn batch<T>(f: impl FnOnce() -> T) -> T {
|
||||
///
|
||||
/// Unlike [`batch`], this will not panic if the runtime has been disposed.
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
#[inline(always)]
|
||||
@@ -1447,7 +1447,7 @@ pub fn on_cleanup(cleanup_fn: impl FnOnce() + 'static) {
|
||||
}
|
||||
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
fn push_cleanup(cleanup_fn: Box<dyn FnOnce()>) {
|
||||
@@ -1509,7 +1509,7 @@ impl ScopeProperty {
|
||||
/// # runtime.dispose();
|
||||
/// ```
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
#[inline(always)]
|
||||
@@ -1519,7 +1519,7 @@ pub fn untrack<T>(f: impl FnOnce() -> T) -> T {
|
||||
|
||||
#[doc(hidden)]
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
#[inline(always)]
|
||||
|
||||
@@ -332,7 +332,7 @@ pub trait SignalDispose {
|
||||
/// #
|
||||
/// ```
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features="ssr"),
|
||||
any(debug_assertions, feature="ssr"),
|
||||
instrument(
|
||||
level = "trace",
|
||||
skip_all,
|
||||
@@ -354,7 +354,7 @@ pub fn create_signal<T>(value: T) -> (ReadSignal<T>, WriteSignal<T>) {
|
||||
/// **Note**: If used on the server side during server rendering, this will return `None`
|
||||
/// immediately and not begin driving the stream.
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features = "ssr"),
|
||||
any(debug_assertions, feature = "ssr"),
|
||||
instrument(level = "trace", skip_all,)
|
||||
)]
|
||||
pub fn create_signal_from_stream<T>(
|
||||
@@ -1143,7 +1143,7 @@ impl<T> Hash for WriteSignal<T> {
|
||||
/// #
|
||||
/// ```
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features="ssr"),
|
||||
any(debug_assertions, feature="ssr"),
|
||||
instrument(
|
||||
level = "trace",
|
||||
skip_all,
|
||||
@@ -1432,17 +1432,17 @@ impl<T> SignalSetUntracked<T> for RwSignal<T> {
|
||||
|
||||
impl<T> SignalUpdateUntracked<T> for RwSignal<T> {
|
||||
#[cfg_attr(
|
||||
any(debug_assertions, features="ssr"),
|
||||
instrument(
|
||||
level = "trace",
|
||||
name = "RwSignal::update_untracked()",
|
||||
skip_all,
|
||||
fields(
|
||||
id = ?self.id,
|
||||
defined_at = %self.defined_at,
|
||||
ty = %std::any::type_name::<T>()
|
||||
any(debug_assertions, feature="ssr"),
|
||||
instrument(
|
||||
level = "trace",
|
||||
name = "RwSignal::update_untracked()",
|
||||
skip_all,
|
||||
fields(
|
||||
id = ?self.id,
|
||||
defined_at = %self.defined_at,
|
||||
ty = %std::any::type_name::<T>()
|
||||
)
|
||||
)
|
||||
)
|
||||
)]
|
||||
#[inline(always)]
|
||||
fn update_untracked(&self, f: impl FnOnce(&mut T)) {
|
||||
|
||||
@@ -32,3 +32,6 @@ nightly = ["leptos_reactive/nightly"]
|
||||
|
||||
[package.metadata.cargo-all-features]
|
||||
denylist = ["nightly"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -392,7 +392,7 @@ where
|
||||
let pending_dispatches = Rc::clone(&self.pending_dispatches);
|
||||
let value = self.value;
|
||||
pending.set(true);
|
||||
pending_dispatches.set(pending_dispatches.get().saturating_sub(1));
|
||||
pending_dispatches.set(pending_dispatches.get().wrapping_add(1));
|
||||
spawn_local(async move {
|
||||
let new_value = fut.await;
|
||||
let res = try_batch(move || {
|
||||
|
||||
@@ -29,3 +29,6 @@ nightly = ["leptos/nightly"]
|
||||
[package.metadata.cargo-all-features]
|
||||
denylist = ["nightly"]
|
||||
skip_feature_sets = [["csr", "ssr"], ["csr", "hydrate"], ["ssr", "hydrate"]]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -117,7 +117,7 @@ impl core::fmt::Debug for MetaTagsContext {
|
||||
|
||||
impl MetaTagsContext {
|
||||
/// Converts metadata tags into an HTML string.
|
||||
#[cfg(any(feature = "ssr", docs))]
|
||||
#[cfg(any(feature = "ssr", doc))]
|
||||
pub fn as_string(&self) -> String {
|
||||
self.els
|
||||
.borrow()
|
||||
|
||||
@@ -78,3 +78,6 @@ nightly = ["leptos/nightly"]
|
||||
# No need to test optional dependencies as they are enabled by the ssr feature
|
||||
denylist = ["url", "regex", "nightly"]
|
||||
skip_feature_sets = [["csr", "ssr"], ["csr", "hydrate"], ["ssr", "hydrate"]]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -13,8 +13,6 @@ use leptos::{
|
||||
},
|
||||
*,
|
||||
};
|
||||
#[cfg(feature = "transition")]
|
||||
use leptos_reactive::use_transition;
|
||||
use send_wrapper::SendWrapper;
|
||||
use std::{cell::RefCell, rc::Rc};
|
||||
use thiserror::Error;
|
||||
@@ -189,10 +187,6 @@ impl RouterContext {
|
||||
let (state, set_state) =
|
||||
create_signal(source.with_untracked(|s| s.state.clone()));
|
||||
|
||||
// we'll use this transition to wait for async resources to load when navigating to a new route
|
||||
#[cfg(feature = "transition")]
|
||||
let transition = use_transition();
|
||||
|
||||
// Each field of `location` reactively represents a different part of the current location
|
||||
let location = create_location(reference, state);
|
||||
let referrers: Rc<RefCell<Vec<LocationChange>>> =
|
||||
|
||||
@@ -111,6 +111,7 @@ ssr = ["inventory"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
# disables some feature combos for testing in CI
|
||||
[package.metadata.cargo-all-features]
|
||||
|
||||
@@ -22,3 +22,6 @@ ssr = []
|
||||
actix = []
|
||||
axum = []
|
||||
reqwest = []
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
Reference in New Issue
Block a user