Compare commits

...

9 Commits

Author SHA1 Message Date
benwis
80768eeda1 Clippy why?
Signed-off-by: benwis <ben@celcyon.com>
2024-01-29 17:37:33 -08:00
benwis
27f7785bef Fix obvious bad things
Signed-off-by: benwis <ben@celcyon.com>
2024-01-29 17:31:09 -08:00
benwis
d0329993c9 Added missing doc comment and changed the type for as_query() to give implementers more flexibility
Signed-off-by: benwis <ben@celcyon.com>
2024-01-29 16:44:45 -08:00
Greg Johnston
8b1bd1ae9e Merge pull request #2240 from leptos-rs/err-serialization
fix: serialization error during SSR on ServerFnError
2024-01-29 16:32:55 -05:00
Greg Johnston
6ef1531059 example: file upload with streaming progress bar (#2242) 2024-01-29 15:20:19 -05:00
Greg Johnston
9f1406250e chore: update deprecated .remove() method on IndexSet 2024-01-29 11:32:15 -05:00
Greg Johnston
1f6a892291 fix: serialization error during SSR on ServerFnError 2024-01-29 10:36:08 -05:00
Greg Johnston
0ff1e279a2 fix: correctly track source in create_local_resource (#2238) 2024-01-28 10:09:03 -08:00
Chris
c6096cc2a0 chore: define edtion = "2021" in rustfmt.toml (#2235) 2024-01-27 16:04:25 -08:00
14 changed files with 195 additions and 23 deletions

View File

@@ -31,6 +31,9 @@ web-sys = { version = "0.3.67", features = ["FileList", "File"] }
strum = { version = "0.25.0", features = ["strum_macros", "derive"] }
notify = { version = "6.1.1", optional = true }
pin-project-lite = "0.2.13"
dashmap = { version = "5.5.3", optional = true }
once_cell = { version = "1.19.0", optional = true }
async-broadcast = { version = "0.6.0", optional = true }
[features]
hydrate = ["leptos/hydrate", "leptos_meta/hydrate", "leptos_router/hydrate"]
@@ -43,7 +46,10 @@ ssr = [
"leptos_meta/ssr",
"leptos_router/ssr",
"dep:leptos_axum",
"dep:notify"
"dep:notify",
"dep:dashmap",
"dep:once_cell",
"dep:async-broadcast"
]
[package.metadata.cargo-all-features]

View File

@@ -55,6 +55,7 @@ pub fn HomePage() -> impl IntoView {
<ServerFnArgumentExample/>
<RkyvExample/>
<FileUpload/>
<FileUploadWithProgress/>
<FileWatcher/>
<CustomEncoding/>
}
@@ -331,8 +332,8 @@ pub fn FileUpload() -> impl IntoView {
///
/// On the server, this uses the `multer` crate, which provides a streaming API.
#[server(
input = MultipartFormData,
)]
input = MultipartFormData,
)]
pub async fn file_length(
data: MultipartData,
) -> Result<usize, ServerFnError> {
@@ -390,6 +391,168 @@ pub fn FileUpload() -> impl IntoView {
}
}
/// This component uses server functions to upload a file, while streaming updates on the upload
/// progress.
#[component]
pub fn FileUploadWithProgress() -> impl IntoView {
/// In theory, you could create a single server function which
/// 1) received multipart form data
/// 2) returned a stream that contained updates on the progress
///
/// In reality, browsers do not actually support duplexing requests in this way. In other
/// words, every existing browser actually requires that the request stream be complete before
/// it begins processing the response stream.
///
/// Instead, we can create two separate server functions:
/// 1) one that receives multipart form data and begins processing the upload
/// 2) a second that returns a stream of updates on the progress
///
/// This requires us to store some global state of all the uploads. In a real app, you probably
/// shouldn't do exactly what I'm doing here in the demo. For example, this map just
/// distinguishes between files by filename, not by user.
#[cfg(feature = "ssr")]
mod progress {
use async_broadcast::{broadcast, Receiver, Sender};
use dashmap::DashMap;
use futures::Stream;
use once_cell::sync::Lazy;
struct File {
total: usize,
tx: Sender<usize>,
rx: Receiver<usize>,
}
static FILES: Lazy<DashMap<String, File>> = Lazy::new(DashMap::new);
pub async fn add_chunk(filename: &str, len: usize) {
println!("[{filename}]\tadding {len}");
let mut entry =
FILES.entry(filename.to_string()).or_insert_with(|| {
println!("[{filename}]\tinserting channel");
let (tx, rx) = broadcast(128);
File { total: 0, tx, rx }
});
entry.total += len;
let new_total = entry.total;
// we're about to do an async broadcast, so we don't want to hold a lock across it
let tx = entry.tx.clone();
drop(entry);
// now we send the message and don't have to worry about it
tx.broadcast(new_total)
.await
.expect("couldn't send a message over channel");
}
pub fn for_file(filename: &str) -> impl Stream<Item = usize> {
let entry =
FILES.entry(filename.to_string()).or_insert_with(|| {
println!("[{filename}]\tinserting channel");
let (tx, rx) = broadcast(128);
File { total: 0, tx, rx }
});
entry.rx.clone()
}
}
#[server(
input = MultipartFormData,
)]
pub async fn upload_file(data: MultipartData) -> Result<(), ServerFnError> {
let mut data = data.into_inner().unwrap();
while let Ok(Some(mut field)) = data.next_field().await {
let name =
field.file_name().expect("no filename on field").to_string();
while let Ok(Some(chunk)) = field.chunk().await {
let len = chunk.len();
println!("[{name}]\t{len}");
progress::add_chunk(&name, len).await;
// in a real server function, you'd do something like saving the file here
}
}
Ok(())
}
#[server(output = StreamingText)]
pub async fn file_progress(
filename: String,
) -> Result<TextStream, ServerFnError> {
println!("getting progress on {filename}");
// get the stream of current length for the file
let progress = progress::for_file(&filename);
// separate each number with a newline
// the HTTP response might pack multiple lines of this into a single chunk
// we need some way of dividing them up
let progress = progress.map(|bytes| Ok(format!("{bytes}\n")));
Ok(TextStream::new(progress))
}
let (filename, set_filename) = create_signal(None);
let (max, set_max) = create_signal(None);
let (current, set_current) = create_signal(None);
let on_submit = move |ev: SubmitEvent| {
ev.prevent_default();
let target = ev.target().unwrap().unchecked_into::<HtmlFormElement>();
let form_data = FormData::new_with_form(&target).unwrap();
let file = form_data
.get("file_to_upload")
.unchecked_into::<web_sys::File>();
let filename = file.name();
let size = file.size() as usize;
set_filename(Some(filename.clone()));
set_max(Some(size));
set_current(None::<usize>);
spawn_local(async move {
let mut progress = file_progress(filename)
.await
.expect("couldn't initialize stream")
.into_inner();
while let Some(Ok(len)) = progress.next().await {
// the TextStream from the server function will be a series of `usize` values
// however, the response itself may pack those chunks into a smaller number of
// chunks, each with more text in it
// so we've padded them with newspace, and will split them out here
// each value is the latest total, so we'll just take the last one
let len = len
.split('\n')
.filter(|n| !n.is_empty())
.last()
.expect(
"expected at least one non-empty value from \
newline-delimited rows",
)
.parse::<usize>()
.expect("invalid length");
set_current(Some(len));
}
});
spawn_local(async move {
upload_file(form_data.into())
.await
.expect("couldn't upload file");
});
};
view! {
<h3>File Upload with Progress</h3>
<p>A file upload with progress can be handled with two separate server functions.</p>
<aside>See the doc comment on the component for an explanation.</aside>
<form on:submit=on_submit>
<input type="file" name="file_to_upload"/>
<input type="submit"/>
</form>
{move || filename().map(|filename| view! { <p>Uploading {filename}</p> })}
{move || max().map(|max| view! {
<progress max=max value=move || current().unwrap_or_default()/>
})}
}
}
#[component]
pub fn FileWatcher() -> impl IntoView {
#[server(input = GetUrl, output = StreamingText)]

View File

@@ -385,7 +385,10 @@ where
// client
create_render_effect({
let r = Rc::clone(&r);
move |_| r.load(false, id)
move |_| {
source.track();
r.load(false, id)
}
});
Resource {

View File

@@ -281,7 +281,7 @@ impl Runtime {
let source_map = self.node_sources.borrow();
for effect in subs.borrow().iter() {
if let Some(effect_sources) = source_map.get(*effect) {
effect_sources.borrow_mut().remove(&node);
effect_sources.borrow_mut().swap_remove(&node);
}
}
}
@@ -308,7 +308,7 @@ impl Runtime {
let subs = self.node_subscribers.borrow();
for source in sources.borrow().iter() {
if let Some(source) = subs.get(*source) {
source.borrow_mut().remove(&node_id);
source.borrow_mut().swap_remove(&node_id);
}
}
}

View File

@@ -158,7 +158,7 @@ impl MetaTagsContext {
move || {
let head = document().head().unwrap_throw();
_ = head.remove_child(&el);
els.borrow_mut().remove(&id);
els.borrow_mut().swap_remove(&id);
}
});

View File

@@ -28,7 +28,7 @@
//! ## Example
//!
//! ```rust
//!
//!
//! use leptos::*;
//! use leptos_router::*;
//!

View File

@@ -1,3 +1,4 @@
edition = "2021"
imports_granularity = "Crate"
max_width = 80
format_strings = true

View File

@@ -40,7 +40,7 @@ where
{
async fn from_req(req: Request) -> Result<Self, ServerFnError<CustErr>> {
let string_data = req.as_query().unwrap_or_default();
let args = serde_qs::from_str::<Self>(string_data)
let args = serde_qs::from_str::<Self>(&string_data)
.map_err(|e| ServerFnError::Args(e.to_string()))?;
Ok(args)
}

View File

@@ -175,7 +175,6 @@ impl<E> ViaError<E> for WrapError<E> {
/// This means that other error types can easily be converted into it using the
/// `?` operator.
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum ServerFnError<E = NoCustomError> {
/// A user-defined custom error type, which defaults to [`NoCustomError`].
WrappedServerError(E),

View File

@@ -441,6 +441,7 @@ impl<Req, Res> Clone for ServerFnTraitObj<Req, Res> {
}
#[allow(unused)] // used by server integrations
/// A neat little type that stores our trait representations of your server functions
type LazyServerFnMap<Req, Res> =
Lazy<DashMap<&'static str, ServerFnTraitObj<Req, Res>>>;

View File

@@ -37,8 +37,8 @@ impl<CustErr> Req<CustErr> for ActixRequest
where
CustErr: 'static,
{
fn as_query(&self) -> Option<&str> {
self.0 .0.uri().query()
fn as_query(&self) -> Option<Cow<'_, str>> {
self.0 .0.uri().query().map(|q| q.into())
}
fn to_content_type(&self) -> Option<Cow<'_, str>> {

View File

@@ -12,8 +12,8 @@ impl<CustErr> Req<CustErr> for Request<Body>
where
CustErr: 'static,
{
fn as_query(&self) -> Option<&str> {
self.uri().query()
fn as_query(&self) -> Option<Cow<'_, str>> {
self.uri().query().map(|q| q.into())
}
fn to_content_type(&self) -> Option<Cow<'_, str>> {

View File

@@ -78,7 +78,7 @@ where
Self: Sized,
{
/// Returns the query string of the requests URL, starting after the `?`.
fn as_query(&self) -> Option<&str>;
fn as_query(&self) -> Option<Cow<'_, str>>;
/// Returns the `Content-Type` header, if any.
fn to_content_type(&self) -> Option<Cow<'_, str>>;
@@ -116,7 +116,7 @@ impl<CustErr> Req<CustErr> for BrowserMockReq
where
CustErr: 'static,
{
fn as_query(&self) -> Option<&str> {
fn as_query(&self) -> Option<Cow<'_, str>> {
unreachable!()
}

View File

@@ -59,15 +59,14 @@ pub fn server_macro_impl(
.inputs
.iter_mut()
.map(|f| {
let typed_arg = match f {
FnArg::Receiver(_) => {
return Err(syn::Error::new(
let typed_arg =
match f {
FnArg::Receiver(_) => return Err(syn::Error::new(
f.span(),
"cannot use receiver types in server function macro",
))
}
FnArg::Typed(t) => t,
};
)),
FnArg::Typed(t) => t,
};
// strip `mut`, which is allowed in fn args but not in struct fields
if let Pat::Ident(ident) = &mut *typed_arg.pat {