Compare commits

..

5 Commits

Author SHA1 Message Date
jk
60f5c68ec2 cargo fmt 2024-09-08 19:15:42 -04:00
jk
fd19dd38f6 fix: CI - method not found in ArcTrigger 2024-09-08 19:15:25 -04:00
jk
b42d86e600 clippy 2024-09-08 19:15:25 -04:00
jk
8845eec553 add copyable Trigger type 2024-09-08 19:15:25 -04:00
jk
b3c83f7700 rename Trigger -> Notify, trigger -> notify 2024-09-08 19:15:25 -04:00
121 changed files with 1583 additions and 4418 deletions

View File

@@ -1,21 +1,23 @@
name: CI Changed Examples
on:
push:
branches:
- main
- leptos_0.6
pull_request:
branches:
- main
- leptos_0.6
jobs:
get-example-changed:
uses: ./.github/workflows/get-example-changed.yml
get-matrix:
needs: [get-example-changed]
uses: ./.github/workflows/get-changed-examples-matrix.yml
with:
example_changed: ${{ fromJSON(needs.get-example-changed.outputs.example_changed) }}
test:
name: CI
needs: [get-example-changed, get-matrix]

View File

@@ -1,13 +1,13 @@
name: CI Examples
on:
push:
branches:
- main
- leptos_0.6
pull_request:
branches:
- main
- leptos_0.6
jobs:
get-leptos-changed:
uses: ./.github/workflows/get-leptos-changed.yml

View File

@@ -1,24 +1,27 @@
name: CI semver
on:
push:
branches:
- main
- leptos_0.6
pull_request:
branches:
- main
- leptos_0.6
jobs:
get-leptos-changed:
uses: ./.github/workflows/get-leptos-changed.yml
test:
needs: [get-leptos-changed]
if: github.event.pull_request.labels[0].name == 'semver' # needs.get-leptos-changed.outputs.leptos_changed == 'true' && github.event.pull_request.labels[0].name != 'breaking'
name: Run semver check (nightly-2024-08-01)
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Semver Checks
uses: obi1kenobi/cargo-semver-checks-action@v2
with:

View File

@@ -1,25 +1,50 @@
name: CI
on:
push:
branches:
- main
- leptos_0.6
pull_request:
branches:
- main
- leptos_0.6
jobs:
get-leptos-changed:
uses: ./.github/workflows/get-leptos-changed.yml
get-leptos-matrix:
uses: ./.github/workflows/get-leptos-matrix.yml
test:
name: CI
needs: [get-leptos-changed, get-leptos-matrix]
needs: [get-leptos-changed]
if: needs.get-leptos-changed.outputs.leptos_changed == 'true'
strategy:
matrix: ${{ fromJSON(needs.get-leptos-matrix.outputs.matrix) }}
fail-fast: false
matrix:
directory:
[
any_error,
any_spawner,
const_str_slice_concat,
either_of,
hydration_context,
integrations/actix,
integrations/axum,
integrations/utils,
leptos,
leptos_config,
leptos_dom,
leptos_hot_reload,
leptos_macro,
leptos_server,
meta,
next_tuple,
oco,
or_poisoned,
reactive_graph,
router,
router_macro,
server_fn,
server_fn/server_fn_macro_default,
server_fn_macro,
]
uses: ./.github/workflows/run-cargo-make-task.yml
with:
directory: ${{ matrix.directory }}

View File

@@ -1,10 +1,12 @@
name: Examples Changed Call
on:
workflow_call:
outputs:
example_changed:
description: "Example Changed"
value: ${{ jobs.get-example-changed.outputs.example_changed }}
jobs:
get-example-changed:
name: Get Example Changed
@@ -16,6 +18,7 @@ jobs:
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get example files that changed
id: changed-files
uses: tj-actions/changed-files@v45
@@ -23,10 +26,13 @@ jobs:
files: |
examples/**
!examples/cargo-make/**
!examples/gtk/**
!examples/Makefile.toml
!examples/*.md
- name: List example files that changed
run: echo '${{ steps.changed-files.outputs.all_changed_files }}'
- name: Set example_changed
id: set-example-changed
run: |

View File

@@ -1,34 +1,38 @@
name: Get Examples Matrix Call
on:
workflow_call:
outputs:
matrix:
description: "Matrix"
value: ${{ jobs.create.outputs.matrix }}
jobs:
create:
name: Create Examples Matrix
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
env:
# separate examples using "|" (vertical bar) char like "a|b|c".
# cargo-make should be excluded by default.
EXCLUDED_EXAMPLES: cargo-make
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install jq
run: sudo apt-get install jq
- name: Set Matrix
id: set-matrix
run: |
examples=$(ls -1d examples/*/ |
grep -vE "($EXCLUDED_EXAMPLES)" |
sed 's/\/$//' |
examples=$(ls examples |
awk '{print "examples/" $0}' |
grep -v .md |
grep -v examples/Makefile.toml |
grep -v examples/cargo-make |
grep -v examples/gtk |
jq -R -s -c 'split("\n")[:-1]')
echo "Example Directories: $examples"
echo "matrix={\"directory\":$examples}" >> "$GITHUB_OUTPUT"
- name: Print Location Info
run: |
echo "Workspace: ${{ github.workspace }}"

View File

@@ -1,10 +1,12 @@
name: Get Leptos Changed Call
on:
workflow_call:
outputs:
leptos_changed:
description: "Leptos Changed"
value: ${{ jobs.create.outputs.leptos_changed }}
jobs:
create:
name: Detect Source Change
@@ -16,19 +18,40 @@ jobs:
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get source files that changed
id: changed-source
uses: tj-actions/changed-files@v45
with:
files_ignore: |
.*/**/*
cargo-make/**/*
examples/**/*
projects/**/*
benchmarks/**/*
docs/**/*
files: |
any_error/**
any_spawner/**
const_str_slice_concat/**
either_of/**
hydration_context/**
integrations/actix/**
integrations/axum/**
integrations/utils/**
leptos/**
leptos_config/**
leptos_dom/**
leptos_hot_reload/**
leptos_macro/**
leptos_server/**
meta/**
next_tuple/**
oco/**
or_poisoned/**
reactive_graph/**
router/**
router_macro/**
server_fn/**
server_fn/server_fn_macro_default/**
server_fn_macro/**
- name: List source files that changed
run: echo '${{ steps.changed-source.outputs.all_changed_files }}'
- name: Set leptos_changed
id: set-source-changed
run: |

View File

@@ -1,32 +0,0 @@
name: Get Leptos Matrix Call
on:
workflow_call:
outputs:
matrix:
description: "Matrix"
value: ${{ jobs.create.outputs.matrix }}
jobs:
create:
name: Create Leptos Matrix
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install jq
run: sudo apt-get install jq
- name: Set Matrix
id: set-matrix
run: |
crates=$(cargo metadata --no-deps --quiet --format-version 1 |
jq -r '.packages[] | select(.name != "workspace") | .manifest_path| rtrimstr("/Cargo.toml")' |
sed "s|$(pwd)/||" |
jq -R -s -c 'split("\n")[:-1]')
echo "Leptos Directories: $crates"
echo "matrix={\"directory\":$crates}" >> "$GITHUB_OUTPUT"
- name: Print Location Info
run: |
echo "Workspace: ${{ github.workspace }}"
pwd
ls | sort -u

View File

@@ -1,4 +1,5 @@
name: Run Task
on:
workflow_call:
inputs:
@@ -11,53 +12,70 @@ on:
toolchain:
required: true
type: string
env:
CARGO_TERM_COLOR: always
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
jobs:
test:
name: Run ${{ inputs.cargo_make_task }} (${{ inputs.toolchain }})
runs-on: ubuntu-latest
steps:
# Setup environment
- uses: actions/checkout@v4
- name: Setup Rust
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ inputs.toolchain }}
- name: Add wasm32-unknown-unknown
run: rustup target add wasm32-unknown-unknown
- name: Setup cargo-make
uses: davidB/rust-cargo-make@v1
- name: Cargo generate-lockfile
run: cargo generate-lockfile
- uses: Swatinem/rust-cache@v2
- name: Install binstall
uses: cargo-bins/cargo-binstall@main
- name: Install wasm-bindgen
run: cargo binstall wasm-bindgen-cli --no-confirm
- name: Install cargo-leptos
run: cargo binstall cargo-leptos --no-confirm
- name: Install Trunk
uses: jetli/trunk-action@v0.5.0
with:
version: "latest"
- name: Print Trunk Version
run: trunk --version
- name: Install Node.js
uses: actions/setup-node@v4
with:
node-version: 20
- uses: pnpm/action-setup@v4
name: Install pnpm
id: pnpm-install
with:
version: 8
run_install: false
- name: Get pnpm store directory
id: pnpm-cache
run: |
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
- uses: actions/cache@v4
name: Setup pnpm cache
with:
@@ -65,6 +83,7 @@ jobs:
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Maybe install chromedriver
run: |
project_makefile=${{inputs.directory}}/Makefile.toml
@@ -80,6 +99,7 @@ jobs:
else
echo chromedriver is not required
fi
- name: Maybe install playwright browser dependencies
run: |
for pw_path in $(find ${{inputs.directory}} -name playwright.config.ts)
@@ -93,16 +113,12 @@ jobs:
echo Playwright is not required
fi
done
- name: Install Deno
uses: denoland/setup-deno@v1
with:
deno-version: v1.x
- name: Maybe install gtk-rs dependencies
run: |
if [ ! -z $(echo ${{inputs.directory}} | grep gtk) ]; then
sudo apt-get update
sudo apt-get install -y libglib2.0-dev libgio2.0-cil-dev libgraphene-1.0-dev libcairo2-dev libpango1.0-dev libgtk-4-dev
fi
# Run Cargo Make Task
- name: ${{ inputs.cargo_make_task }}
run: |

View File

@@ -40,36 +40,36 @@ members = [
exclude = ["benchmarks", "examples", "projects"]
[workspace.package]
version = "0.7.0-beta6"
version = "0.7.0-beta4"
edition = "2021"
rust-version = "1.76"
[workspace.dependencies]
throw_error = { path = "./any_error/", version = "0.2.0-beta6" }
throw_error = { path = "./any_error/", version = "0.2.0-beta4" }
any_spawner = { path = "./any_spawner/", version = "0.1.0" }
const_str_slice_concat = { path = "./const_str_slice_concat", version = "0.1.0" }
either_of = { path = "./either_of/", version = "0.1.0" }
hydration_context = { path = "./hydration_context", version = "0.2.0-beta6" }
leptos = { path = "./leptos", version = "0.7.0-beta6" }
leptos_config = { path = "./leptos_config", version = "0.7.0-beta6" }
leptos_dom = { path = "./leptos_dom", version = "0.7.0-beta6" }
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.7.0-beta6" }
leptos_integration_utils = { path = "./integrations/utils", version = "0.7.0-beta6" }
leptos_macro = { path = "./leptos_macro", version = "0.7.0-beta6" }
leptos_router = { path = "./router", version = "0.7.0-beta6" }
leptos_router_macro = { path = "./router_macro", version = "0.7.0-beta6" }
leptos_server = { path = "./leptos_server", version = "0.7.0-beta6" }
leptos_meta = { path = "./meta", version = "0.7.0-beta6" }
next_tuple = { path = "./next_tuple", version = "0.1.0-beta6" }
hydration_context = { path = "./hydration_context", version = "0.2.0-beta4" }
leptos = { path = "./leptos", version = "0.7.0-beta4" }
leptos_config = { path = "./leptos_config", version = "0.7.0-beta4" }
leptos_dom = { path = "./leptos_dom", version = "0.7.0-beta4" }
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.7.0-beta4" }
leptos_integration_utils = { path = "./integrations/utils", version = "0.7.0-beta4" }
leptos_macro = { path = "./leptos_macro", version = "0.7.0-beta4" }
leptos_router = { path = "./router", version = "0.7.0-beta4" }
leptos_router_macro = { path = "./router_macro", version = "0.7.0-beta4" }
leptos_server = { path = "./leptos_server", version = "0.7.0-beta4" }
leptos_meta = { path = "./meta", version = "0.7.0-beta4" }
next_tuple = { path = "./next_tuple", version = "0.1.0-beta4" }
oco_ref = { path = "./oco", version = "0.2.0" }
or_poisoned = { path = "./or_poisoned", version = "0.1.0" }
reactive_graph = { path = "./reactive_graph", version = "0.1.0-beta6" }
reactive_stores = { path = "./reactive_stores", version = "0.1.0-beta6" }
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.1.0-beta6" }
server_fn = { path = "./server_fn", version = "0.7.0-beta6" }
server_fn_macro = { path = "./server_fn_macro", version = "0.7.0-beta6" }
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.7.0-beta6" }
tachys = { path = "./tachys", version = "0.1.0-beta6" }
reactive_graph = { path = "./reactive_graph", version = "0.1.0-beta4" }
reactive_stores = { path = "./reactive_stores", version = "0.1.0-beta4" }
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.1.0-beta4" }
server_fn = { path = "./server_fn", version = "0.7.0-beta4" }
server_fn_macro = { path = "./server_fn_macro", version = "0.7.0-beta4" }
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.7.0-beta4" }
tachys = { path = "./tachys", version = "0.1.0-beta4" }
[profile.release]
codegen-units = 1

View File

@@ -1,6 +1,6 @@
[package]
name = "throw_error"
version = "0.2.0-beta6"
version = "0.2.0-beta4"
authors = ["Greg Johnston"]
license = "MIT"
readme = "../README.md"

View File

@@ -2,8 +2,7 @@
name = "benchmarks"
version = "0.1.0"
edition = "2021"
# std::sync::LazyLock is stabilized in Rust version 1.80.0
rust-version = "1.80.0"
rust-version.workspace = true
[dependencies]
l0410 = { package = "leptos", version = "0.4.10", features = [

View File

@@ -18,7 +18,7 @@ fn leptos_ssr_bench(b: &mut Bencher) {
}
}
let rendered = view! {
let rendered = view! {
<main>
<h1>"Welcome to our benchmark page."</h1>
<p>"Here's some introductory text."</p>
@@ -58,7 +58,7 @@ fn tachys_ssr_bench(b: &mut Bencher) {
}
}
let rendered = view! {
let rendered = view! {
<main>
<h1>"Welcome to our benchmark page."</h1>
<p>"Here's some introductory text."</p>
@@ -92,13 +92,13 @@ fn tera_ssr_bench(b: &mut Bencher) {
{% endfor %}
</main>"#;
static LazyCell<TERA>: Tera = LazyLock::new(|| {
let mut tera = Tera::default();
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
tera
});
lazy_static::lazy_static! {
static ref TERA: Tera = {
let mut tera = Tera::default();
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
tera
};
}
#[derive(Serialize, Deserialize)]
struct Counter {

View File

@@ -55,7 +55,7 @@ static TEMPLATE: &str = r#"<main>
{% else %}
<li><a href="/">All</a></li>
{% endif %}
{% if mode_active %}
<li><a href="/active" class="selected">Active</a></li>
{% else %}
@@ -91,13 +91,13 @@ fn tera_todomvc_ssr(b: &mut Bencher) {
use serde::{Deserialize, Serialize};
use tera::*;
static LazyLock<TERA>: Tera = LazyLock( || {
lazy_static::lazy_static! {
static ref TERA: Tera = {
let mut tera = Tera::default();
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
tera
});
};
}
#[derive(Serialize, Deserialize)]
struct Todo {
@@ -131,13 +131,13 @@ fn tera_todomvc_ssr_1000(b: &mut Bencher) {
use serde::{Deserialize, Serialize};
use tera::*;
static TERA: LazyLock<Tera> = LazyLock::new(|| {
let mut tera = Tera::default();
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
tera
});
lazy_static::lazy_static! {
static ref TERA: Tera = {
let mut tera = Tera::default();
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
tera
};
}
#[derive(Serialize, Deserialize)]
struct Todo {

View File

@@ -133,104 +133,3 @@ tuples!(EitherOf13 + EitherOf13Future + EitherOf13FutureProj => A, B, C, D, E, F
tuples!(EitherOf14 + EitherOf14Future + EitherOf14FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M, N);
tuples!(EitherOf15 + EitherOf15Future + EitherOf15FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M, N, O);
tuples!(EitherOf16 + EitherOf16Future + EitherOf16FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P);
/// Matches over the first expression and returns an either ([`Either`], [`EitherOf3`], ... [`EitherOf6`])
/// composed of the values returned by the match arms.
///
/// The pattern syntax is exactly the same as found in a match arm.
///
/// # Examples
///
/// ```
/// # use either_of::*;
/// let either2 = either!(Some("hello"),
/// Some(s) => s.len(),
/// None => 0.0,
/// );
/// assert!(matches!(either2, Either::<usize, f64>::Left(5)));
///
/// let either3 = either!(Some("admin"),
/// Some("admin") => "hello admin",
/// Some(_) => 'x',
/// _ => 0,
/// );
/// assert!(matches!(either3, EitherOf3::<&str, char, i32>::A("hello admin")));
/// ```
#[macro_export]
macro_rules! either {
($match:expr, $left_pattern:pat => $left_expression:expr, $right_pattern:pat => $right_expression:expr,) => {
match $match {
$left_pattern => $crate::Either::Left($left_expression),
$right_pattern => $crate::Either::Right($right_expression),
}
};
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr,) => {
match $match {
$a_pattern => $crate::EitherOf3::A($a_expression),
$b_pattern => $crate::EitherOf3::B($b_expression),
$c_pattern => $crate::EitherOf3::C($c_expression),
}
};
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr, $d_pattern:pat => $d_expression:expr,) => {
match $match {
$a_pattern => $crate::EitherOf4::A($a_expression),
$b_pattern => $crate::EitherOf4::B($b_expression),
$c_pattern => $crate::EitherOf4::C($c_expression),
$d_pattern => $crate::EitherOf4::D($d_expression),
}
};
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr, $d_pattern:pat => $d_expression:expr, $e_pattern:pat => $e_expression:expr,) => {
match $match {
$a_pattern => $crate::EitherOf5::A($a_expression),
$b_pattern => $crate::EitherOf5::B($b_expression),
$c_pattern => $crate::EitherOf5::C($c_expression),
$d_pattern => $crate::EitherOf5::D($d_expression),
$e_pattern => $crate::EitherOf5::E($e_expression),
}
};
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr, $d_pattern:pat => $d_expression:expr, $e_pattern:pat => $e_expression:expr, $f_pattern:pat => $f_expression:expr,) => {
match $match {
$a_pattern => $crate::EitherOf6::A($a_expression),
$b_pattern => $crate::EitherOf6::B($b_expression),
$c_pattern => $crate::EitherOf6::C($c_expression),
$d_pattern => $crate::EitherOf6::D($d_expression),
$e_pattern => $crate::EitherOf6::E($e_expression),
$f_pattern => $crate::EitherOf6::F($f_expression),
}
}; // if you need more eithers feel free to open a PR ;-)
}
// compile time test
#[test]
fn either_macro() {
let _: Either<&str, f64> = either!(12,
12 => "12",
_ => 0.0,
);
let _: EitherOf3<&str, f64, i32> = either!(12,
12 => "12",
13 => 0.0,
_ => 12,
);
let _: EitherOf4<&str, f64, char, i32> = either!(12,
12 => "12",
13 => 0.0,
14 => ' ',
_ => 12,
);
let _: EitherOf5<&str, f64, char, f32, i32> = either!(12,
12 => "12",
13 => 0.0,
14 => ' ',
15 => 0.0f32,
_ => 12,
);
let _: EitherOf6<&str, f64, char, f32, u8, i32> = either!(12,
12 => "12",
13 => 0.0,
14 => ' ',
15 => 0.0f32,
16 => 24u8,
_ => 12,
);
}

View File

@@ -32,6 +32,7 @@ pub fn App() -> impl IntoView {
// Provides context that manages stylesheets, titles, meta tags, etc.
provide_meta_context();
let fallback = || view! { "Page not found." }.into_view();
let ssr = SsrMode::Async;
view! {
<Stylesheet id="leptos" href="/pkg/axum_js_ssr.css"/>
@@ -78,19 +79,19 @@ pub fn App() -> impl IntoView {
<h1>"Leptos JavaScript Integration Demo with SSR in Axum"</h1>
<FlatRoutes fallback>
<Route path=path!("") view=HomePage/>
<Route path=path!("naive") view=Naive ssr=SsrMode::Async/>
<Route path=path!("naive-alt") view=|| view! { <NaiveEvent/> } ssr=SsrMode::Async/>
<Route path=path!("naive-hook") view=|| view! { <NaiveEvent hook=true/> } ssr=SsrMode::Async/>
<Route path=path!("naive") view=Naive ssr/>
<Route path=path!("naive-alt") view=|| view! { <NaiveEvent/> } ssr/>
<Route path=path!("naive-hook") view=|| view! { <NaiveEvent hook=true/> } ssr/>
<Route path=path!("naive-fallback") view=|| view! {
<NaiveEvent hook=true fallback=true/>
} ssr=SsrMode::Async/>
<Route path=path!("signal-effect-script") view=CodeDemoSignalEffect ssr=SsrMode::Async/>
<Route path=path!("custom-event") view=CustomEvent ssr=SsrMode::Async/>
<Route path=path!("wasm-bindgen-naive") view=WasmBindgenNaive ssr=SsrMode::Async/>
<Route path=path!("wasm-bindgen-event") view=WasmBindgenJSHookReadyEvent ssr=SsrMode::Async/>
<Route path=path!("wasm-bindgen-effect") view=WasmBindgenEffect ssr=SsrMode::Async/>
<Route path=path!("wasm-bindgen-direct") view=WasmBindgenDirect ssr=SsrMode::Async/>
<Route path=path!("wasm-bindgen-direct-fixed") view=WasmBindgenDirectFixed ssr=SsrMode::Async/>
} ssr/>
<Route path=path!("signal-effect-script") view=CodeDemoSignalEffect ssr/>
<Route path=path!("custom-event") view=CustomEvent ssr/>
<Route path=path!("wasm-bindgen-naive") view=WasmBindgenNaive ssr/>
<Route path=path!("wasm-bindgen-event") view=WasmBindgenJSHookReadyEvent ssr/>
<Route path=path!("wasm-bindgen-effect") view=WasmBindgenEffect ssr/>
<Route path=path!("wasm-bindgen-direct") view=WasmBindgenDirect ssr/>
<Route path=path!("wasm-bindgen-direct-fixed") view=WasmBindgenDirectFixed ssr/>
</FlatRoutes>
</article>
</main>

View File

@@ -2,8 +2,6 @@
name = "counter_isomorphic"
version = "0.1.0"
edition = "2021"
# std::sync::LazyLock is stabilized in Rust version 1.80.0
rust-version = "1.80.0"
[lib]
crate-type = ["cdylib", "rlib"]
@@ -19,6 +17,7 @@ broadcaster = "1.0"
console_log = "1.0"
console_error_panic_hook = "0.1.7"
futures = "0.3.30"
lazy_static = "1.5"
leptos = { path = "../../leptos" }
leptos_actix = { path = "../../integrations/actix", optional = true }
leptos_router = { path = "../../router" }
@@ -47,13 +46,13 @@ denylist = ["actix-files", "actix-web", "leptos_actix"]
skip_feature_sets = [["ssr", "hydrate"]]
[package.metadata.leptos]
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
output-name = "counter_isomorphic"
# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
# When NOT using cargo-leptos this must be updated to "." or the counters will not work. The above warning still applies if you do switch to cargo-leptos later.
site-root = "target/site"
# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
# Defaults to pkg
# Defaults to pkg
site-pkg-dir = "pkg"
# [Optional] The source CSS file. If it ends with .sass or .scss then it will be compiled by dart-sass into CSS. The CSS is optimized by Lightning CSS before being written to <site-root>/<site-pkg>/app.css
# style-file = "src/styles/tailwind.css"

View File

@@ -10,12 +10,12 @@ use tracing::instrument;
pub mod ssr_imports {
pub use broadcaster::BroadcastChannel;
pub use std::sync::atomic::{AtomicI32, Ordering};
use std::sync::LazyLock;
pub static COUNT: AtomicI32 = AtomicI32::new(0);
pub static COUNT_CHANNEL: LazyLock<BroadcastChannel<i32>> =
LazyLock::new(BroadcastChannel::<i32>::new);
lazy_static::lazy_static! {
pub static ref COUNT_CHANNEL: BroadcastChannel<i32> = BroadcastChannel::new();
}
}
#[server]

View File

@@ -1 +0,0 @@
extend = [{ path = "../cargo-make/main.toml" }]

View File

@@ -1,5 +1,6 @@
use self::properties::Connect;
use gtk::{
ffi::GtkWidget,
glib::{
object::{IsA, IsClass, ObjectExt},
Object, Value,
@@ -15,7 +16,7 @@ use leptos::{
},
};
use next_tuple::NextTuple;
use std::marker::PhantomData;
use std::{borrow::Cow, marker::PhantomData};
#[derive(Debug)]
pub struct LeptosGtk;
@@ -156,13 +157,13 @@ impl Renderer for LeptosGtk {
}
fn remove_node(
_parent: &Self::Element,
_child: &Self::Node,
parent: &Self::Element,
child: &Self::Node,
) -> Option<Self::Node> {
todo!()
}
fn remove(_node: &Self::Node) {
fn remove(node: &Self::Node) {
todo!()
}
@@ -170,19 +171,19 @@ impl Renderer for LeptosGtk {
node.0.parent().map(Element::from)
}
fn first_child(_node: &Self::Node) -> Option<Self::Node> {
fn first_child(node: &Self::Node) -> Option<Self::Node> {
todo!()
}
fn next_sibling(_node: &Self::Node) -> Option<Self::Node> {
fn next_sibling(node: &Self::Node) -> Option<Self::Node> {
todo!()
}
fn log_node(node: &Self::Node) {
println!("{node:?}");
todo!()
}
fn clear_children(_parent: &Self::Element) {
fn clear_children(parent: &Self::Element) {
todo!()
}
}
@@ -367,22 +368,7 @@ where
})
}
fn rebuild(self, widget: &Element, state: &mut Self::State) {
let prev_value = state.take_value();
let widget = widget.to_owned();
*state = RenderEffect::new_with_value(
move |prev| {
let value = self();
if let Some(mut state) = prev {
value.rebuild(&widget, &mut state);
state
} else {
unreachable!()
}
},
prev_value,
);
}
fn rebuild(self, widget: &Element, state: &mut Self::State) {}
}
pub fn button() -> LGtkWidget<gtk::Button, (), ()> {
@@ -414,9 +400,9 @@ mod widgets {
}
pub mod properties {
#![allow(dead_code)]
use super::{Element, LGtkWidget, LeptosGtk, Property, WidgetClass};
use super::{
Element, LGtkWidget, LGtkWidgetState, LeptosGtk, Property, WidgetClass,
};
use gtk::glib::{object::ObjectExt, Value};
use leptos::tachys::{renderer::Renderer, view::Render};
use next_tuple::NextTuple;
@@ -439,9 +425,7 @@ pub mod properties {
element.0.connect(self.signal_name, false, self.callback);
}
fn rebuild(self, _element: &Element, _state: &mut Self::State) {
// TODO we want to *remove* the previous listener, and reconnect with this new one
}
fn rebuild(self, element: &Element, state: &mut Self::State) {}
}
/* examples for macro */
@@ -544,7 +528,7 @@ pub mod properties {
}
/* end examples for properties macro */
#[derive(Debug)]
pub struct Label {
value: String,
}
@@ -570,9 +554,7 @@ pub mod properties {
}
fn rebuild(self, element: &Element, state: &mut Self::State) {
if self.value != state.value {
LeptosGtk::set_attribute(element, "label", &self.value);
}
todo!()
}
}

View File

@@ -31,6 +31,7 @@ tokio = { version = "1.39", features = ["full"], optional = true }
http = { version = "1.1", optional = true }
web-sys = { version = "0.3.70", features = ["AbortController", "AbortSignal"] }
wasm-bindgen = "0.2.93"
lazy_static = "1.5"
rust-embed = { version = "8.5", features = [
"axum",
"mime_guess",

View File

@@ -162,24 +162,22 @@ pub fn App() -> impl IntoView {
<table class="table table-hover table-striped test-data">
<tbody>
<For
each=move || data.get()
key=|row| row.id
each={move || data.get()}
key={|row| row.id}
children=move |row: RowData| {
let row_id = row.id;
let label = row.label;
let is_selected = is_selected.clone();
template! {
< tr class : danger = { move || is_selected.selected(Some(row_id)) }
> < td class = "col-md-1" > { row_id.to_string() } </ td > < td
class = "col-md-4" >< a on : click = move | _ | set_selected
.set(Some(row_id)) > { move || label.get() } </ a ></ td > < td
class = "col-md-1" >< a on : click = move | _ | remove(row_id) ><
span class = "glyphicon glyphicon-remove" aria - hidden = "true" ></
span ></ a ></ td > < td class = "col-md-6" /> </ tr >
}
ViewTemplate::new(view! {
<tr class:danger={move || is_selected.selected(Some(row_id))}>
<td class="col-md-1">{row_id.to_string()}</td>
<td class="col-md-4"><a on:click=move |_| set_selected.set(Some(row_id))>{move || label.get()}</a></td>
<td class="col-md-1"><a on:click=move |_| remove(row_id)><span class="glyphicon glyphicon-remove" aria-hidden="true"></span></a></td>
<td class="col-md-6"/>
</tr>
})
}
/>
</tbody>
</table>
<span class="preloadicon glyphicon glyphicon-remove" aria-hidden="true"></span>

View File

@@ -2,8 +2,6 @@
name = "ssr_modes"
version = "0.1.0"
edition = "2021"
# std::sync::LazyLock is stabilized in Rust version 1.80.0
rust-version = "1.80.0"
[lib]
crate-type = ["cdylib", "rlib"]
@@ -13,6 +11,7 @@ actix-files = { version = "0.6.6", optional = true }
actix-web = { version = "4.8", optional = true, features = ["macros"] }
console_error_panic_hook = "0.1.7"
console_log = "1.0"
lazy_static = "1.5"
leptos = { path = "../../leptos" }
leptos_meta = { path = "../../meta" }
leptos_actix = { path = "../../integrations/actix", optional = true }
@@ -39,12 +38,12 @@ denylist = ["actix-files", "actix-web", "leptos_actix"]
skip_feature_sets = [["ssr", "hydrate"]]
[package.metadata.leptos]
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
output-name = "ssr_modes"
# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
site-root = "target/site"
# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
# Defaults to pkg
# Defaults to pkg
site-pkg-dir = "pkg"
# [Optional] The source CSS file. If it ends with .sass or .scss then it will be compiled by dart-sass into CSS. The CSS is optimized by Lightning CSS before being written to <site-root>/<site-pkg>/app.css
style-file = "style/main.scss"

View File

@@ -1,5 +1,4 @@
use std::sync::LazyLock;
use lazy_static::lazy_static;
use leptos::prelude::*;
use leptos_meta::*;
use leptos_router::{
@@ -147,9 +146,8 @@ fn Post() -> impl IntoView {
}
// Dummy API
static POSTS: LazyLock<[Post; 3]> = LazyLock::new(|| {
[
lazy_static! {
static ref POSTS: Vec<Post> = vec![
Post {
id: 0,
title: "My first post".to_string(),
@@ -165,8 +163,8 @@ static POSTS: LazyLock<[Post; 3]> = LazyLock::new(|| {
title: "My third post".to_string(),
content: "This is my third post".to_string(),
},
]
});
];
}
#[derive(Error, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum PostError {

View File

@@ -2,8 +2,6 @@
name = "ssr_modes_axum"
version = "0.1.0"
edition = "2021"
# std::sync::LazyLock is stabilized in Rust version 1.80.0
rust-version = "1.80.0"
[lib]
crate-type = ["cdylib", "rlib"]
@@ -11,6 +9,7 @@ crate-type = ["cdylib", "rlib"]
[dependencies]
console_error_panic_hook = "0.1.7"
console_log = "1.0"
lazy_static = "1.5"
leptos = { path = "../../leptos", features = [
"hydration",
] } #"nightly", "hydration"] }

View File

@@ -1,5 +1,4 @@
use std::sync::LazyLock;
use lazy_static::lazy_static;
use leptos::prelude::*;
use leptos_meta::MetaTags;
use leptos_meta::*;
@@ -262,9 +261,8 @@ pub fn Admin() -> impl IntoView {
}
// Dummy API
static POSTS: LazyLock<[Post; 3]> = LazyLock::new(|| {
[
lazy_static! {
static ref POSTS: Vec<Post> = vec![
Post {
id: 0,
title: "My first post".to_string(),
@@ -280,8 +278,8 @@ static POSTS: LazyLock<[Post; 3]> = LazyLock::new(|| {
title: "My third post".to_string(),
content: "This is my third post".to_string(),
},
]
});
];
}
#[derive(Error, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub enum PostError {

View File

@@ -13,9 +13,6 @@ leptos = { path = "../../leptos", features = ["csr"] }
reactive_stores = { path = "../../reactive_stores" }
reactive_stores_macro = { path = "../../reactive_stores_macro" }
console_error_panic_hook = "0.1.7"
chrono = { version = "0.4.38", features = ["serde"] }
serde = { version = "1.0.210", features = ["derive"] }
serde_json = "1.0.128"
[dev-dependencies]
wasm-bindgen = "0.2.93"

View File

@@ -3,11 +3,6 @@
<head>
<link data-trunk rel="rust" data-wasm-opt="z"/>
<link data-trunk rel="icon" type="image/ico" href="/public/favicon.ico"/>
<style>
.hidden {
display: none;
}
</style>
</head>
<body></body>
</html>
</html>

View File

@@ -1,88 +1,43 @@
use std::sync::atomic::{AtomicUsize, Ordering};
use chrono::{Local, NaiveDate};
use leptos::prelude::*;
use reactive_stores::{Field, Patch, Store};
use reactive_stores_macro::{Patch, Store};
use serde::{Deserialize, Serialize};
use reactive_stores::{Field, Store, StoreFieldIterator};
use reactive_stores_macro::Store;
// ID starts higher than 0 because we have a few starting todos by default
static NEXT_ID: AtomicUsize = AtomicUsize::new(3);
#[derive(Debug, Store, Serialize, Deserialize)]
#[derive(Debug, Store)]
struct Todos {
user: User,
#[store(key: usize = |todo| todo.id)]
user: String,
todos: Vec<Todo>,
}
#[derive(Debug, Store, Patch, Serialize, Deserialize)]
struct User {
name: String,
email: String,
}
#[derive(Debug, Store, Serialize, Deserialize)]
#[derive(Debug, Store)]
struct Todo {
id: usize,
label: String,
status: Status,
}
#[derive(Debug, Default, Clone, Store, Serialize, Deserialize)]
enum Status {
#[default]
Pending,
Scheduled,
ScheduledFor {
date: NaiveDate,
},
Done,
}
impl Status {
pub fn next_step(&mut self) {
*self = match self {
Status::Pending => Status::ScheduledFor {
date: Local::now().naive_local().into(),
},
Status::Scheduled | Status::ScheduledFor { .. } => Status::Done,
Status::Done => Status::Done,
};
}
completed: bool,
}
impl Todo {
pub fn new(label: impl ToString) -> Self {
Self {
id: NEXT_ID.fetch_add(1, Ordering::Relaxed),
label: label.to_string(),
status: Status::Pending,
completed: false,
}
}
}
fn data() -> Todos {
Todos {
user: User {
name: "Bob".to_string(),
email: "lawblog@bobloblaw.com".into(),
},
user: "Bob".to_string(),
todos: vec![
Todo {
id: 0,
label: "Create reactive store".to_string(),
status: Status::Pending,
completed: true,
},
Todo {
id: 1,
label: "???".to_string(),
status: Status::Pending,
completed: false,
},
Todo {
id: 2,
label: "Profit".to_string(),
status: Status::Pending,
completed: false,
},
],
}
@@ -94,10 +49,17 @@ pub fn App() -> impl IntoView {
let input_ref = NodeRef::new();
let rows = move || {
store
.todos()
.iter()
.enumerate()
.map(|(idx, todo)| view! { <TodoRow store idx todo/> })
.collect_view()
};
view! {
<p>"Hello, " {move || store.user().name().get()}</p>
<UserForm user=store.user()/>
<hr/>
<p>"Hello, " {move || store.user().get()}</p>
<form on:submit=move |ev| {
ev.prevent_default();
store.todos().write().push(Todo::new(input_ref.get().unwrap().value()));
@@ -105,69 +67,30 @@ pub fn App() -> impl IntoView {
<label>"Add a Todo" <input type="text" node_ref=input_ref/></label>
<input type="submit"/>
</form>
<ol>
// because `todos` is a keyed field, `store.todos()` returns a struct that
// directly implements IntoIterator, so we can use it in <For/> and
// it will manage reactivity for the store fields correctly
<For
each=move || {
leptos::logging::log!("RERUNNING FOR CALCULATION");
store.todos()
}
key=|row| row.id().get()
let:todo
>
<TodoRow store todo/>
</For>
</ol>
<pre>{move || serde_json::to_string_pretty(&*store.read())}</pre>
}
}
#[component]
fn UserForm(#[prop(into)] user: Field<User>) -> impl IntoView {
let error = RwSignal::new(None);
view! {
{move || error.get().map(|n| view! { <p>{n}</p> })}
<form on:submit:target=move |ev| {
ev.prevent_default();
match User::from_event(&ev) {
Ok(new_user) => {
error.set(None);
user.patch(new_user);
}
Err(e) => error.set(Some(e.to_string())),
}
}>
<label>
"Name" <input type="text" name="name" prop:value=move || user.name().get()/>
</label>
<label>
"Email" <input type="email" name="email" prop:value=move || user.email().get()/>
</label>
<input type="submit"/>
</form>
<ol>{rows}</ol>
<div style="display: flex"></div>
}
}
#[component]
fn TodoRow(
store: Store<Todos>,
idx: usize,
#[prop(into)] todo: Field<Todo>,
) -> impl IntoView {
let status = todo.status();
let completed = todo.completed();
let title = todo.label();
let editing = RwSignal::new(true);
let editing = RwSignal::new(false);
view! {
<li style:text-decoration=move || {
status.done().then_some("line-through").unwrap_or_default()
}>
<li
style:text-decoration=move || {
completed.get().then_some("line-through").unwrap_or_default()
}
class:foo=move || completed.get()
>
<p
class:hidden=move || editing.get()
on:click=move |_| {
@@ -183,48 +106,25 @@ fn TodoRow(
prop:value=move || title.get()
on:change=move |ev| {
title.set(event_target_value(&ev));
editing.set(false);
}
on:blur=move |_| editing.set(false)
autofocus
/>
<button on:click=move |_| {
status.write().next_step()
}>
{move || {
if todo.status().done() {
"Done"
} else if status.scheduled() || status.scheduled_for() {
"Scheduled"
} else {
"Pending"
}
}}
</button>
<button on:click=move |_| {
let id = todo.id().get();
store.todos().write().retain(|todo| todo.id != id);
}>"X"</button>
<input
type="date"
prop:value=move || {
todo.status().scheduled_for_date().map(|n| n.get().to_string())
}
class:hidden=move || !todo.status().scheduled_for()
on:change:target=move |ev| {
if let Some(date) = todo.status().scheduled_for_date() {
let value = ev.target().value();
match NaiveDate::parse_from_str(&value, "%Y-%m-%d") {
Ok(new_date) => {
date.set(new_date);
}
Err(e) => warn!("{e}"),
}
}
}
type="checkbox"
prop:checked=move || completed.get()
on:click=move |_| { completed.update(|n| *n = !*n) }
/>
<button on:click=move |_| {
store
.todos()
.update(|todos| {
todos.remove(idx);
});
}>"X"</button>
</li>
}
}

View File

@@ -1,6 +1,6 @@
[package]
name = "hydration_context"
version = "0.2.0-beta6"
version = "0.2.0-beta4"
authors = ["Greg Johnston"]
license = "MIT"
readme = "../README.md"

View File

@@ -38,7 +38,7 @@ tokio = { version = "1.39", features = ["net", "rt-multi-thread"] }
[features]
wasm = []
default = ["tokio/fs", "tokio/sync", "tower-http/fs", "tower/util"]
default = ["tokio/fs", "tokio/sync", "tower-http/fs"]
islands-router = []
tracing = ["dep:tracing"]

View File

@@ -4,7 +4,7 @@ use leptos_macro::component;
use reactive_graph::{
computed::ArcMemo,
effect::RenderEffect,
owner::{provide_context, Owner},
owner::Owner,
signal::ArcRwSignal,
traits::{Get, Update, With, WithUntracked},
};
@@ -13,7 +13,6 @@ use std::{fmt::Debug, marker::PhantomData, sync::Arc};
use tachys::{
html::attribute::Attribute,
hydration::Cursor,
reactive_graph::OwnedView,
renderer::Renderer,
ssr::StreamBuilder,
view::{
@@ -97,25 +96,17 @@ where
let hook = hook as Arc<dyn ErrorHook>;
let _guard = throw_error::set_error_hook(Arc::clone(&hook));
let children = children.into_inner()();
let owner = Owner::new();
let children = owner.with(|| {
provide_context(Arc::clone(&hook));
children.into_inner()()
});
OwnedView::new_with_owner(
ErrorBoundaryView {
hook,
boundary_id,
errors_empty,
children,
errors,
fallback,
rndr: PhantomData,
},
owner,
)
ErrorBoundaryView {
hook,
boundary_id,
errors_empty,
children,
errors,
fallback,
rndr: PhantomData,
}
}
struct ErrorBoundaryView<Chil, FalFn, Rndr> {

View File

@@ -168,7 +168,7 @@ pub mod prelude {
pub use leptos_server::*;
pub use oco_ref::*;
pub use reactive_graph::{
actions::*, computed::*, effect::*, owner::*, signal::*, untrack,
actions::*, computed::*, effect::*, owner::*, signal::*,
wrappers::read::*,
};
pub use server_fn::{self, ServerFnError};

View File

@@ -13,7 +13,7 @@ use reactive_graph::{
effect::RenderEffect,
owner::{provide_context, use_context, Owner},
signal::ArcRwSignal,
traits::{Dispose, Get, Read, Track, With},
traits::{Get, Read, Track, With},
};
use slotmap::{DefaultKey, SlotMap};
use tachys::{
@@ -286,7 +286,7 @@ where
self.children.dry_resolve();
// check the set of tasks to see if it is empty, now or later
let eff = reactive_graph::effect::Effect::new_isomorphic({
let eff = reactive_graph::effect::RenderEffect::new_isomorphic({
move |_| {
tasks.track();
if tasks.read().is_empty() {
@@ -338,7 +338,7 @@ where
}
children = children => {
// clean up the (now useless) effect
eff.dispose();
drop(eff);
Some(OwnedView::new_with_owner(children, owner))
}

View File

@@ -1,6 +1,6 @@
[package]
name = "leptos_macro"
version = "0.7.0-beta6"
version = "0.7.0-beta4"
authors = ["Greg Johnston"]
license = "MIT"
repository = "https://github.com/leptos-rs/leptos"

View File

@@ -18,7 +18,7 @@ use syn::{
};
pub struct Model {
island: Option<String>,
is_island: bool,
docs: Docs,
unknown_attrs: UnknownAttrs,
vis: Visibility,
@@ -62,7 +62,7 @@ impl Parse for Model {
});
Ok(Self {
island: None,
is_island: false,
docs,
unknown_attrs,
vis: item.vis.clone(),
@@ -102,7 +102,7 @@ pub fn convert_from_snake_case(name: &Ident) -> Ident {
impl ToTokens for Model {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Self {
island,
is_island,
docs,
unknown_attrs,
vis,
@@ -111,7 +111,6 @@ impl ToTokens for Model {
body,
ret,
} = self;
let is_island = island.is_some();
let no_props = props.is_empty();
@@ -147,9 +146,9 @@ impl ToTokens for Model {
#[cfg(feature = "tracing")]
let trace_name = format!("<{name} />");
let is_island_with_children =
is_island && props.iter().any(|prop| prop.name.ident == "children");
let is_island_with_other_props = is_island
let is_island_with_children = *is_island
&& props.iter().any(|prop| prop.name.ident == "children");
let is_island_with_other_props = *is_island
&& ((is_island_with_children && props.len() > 1)
|| (!is_island_with_children && !props.is_empty()));
@@ -205,11 +204,11 @@ impl ToTokens for Model {
)]
},
quote! {
let __span = ::leptos::tracing::Span::current();
let span = ::leptos::tracing::Span::current();
},
quote! {
#[cfg(debug_assertions)]
let _guard = __span.entered();
let _guard = span.entered();
},
if no_props || !cfg!(feature = "trace-component-props") {
quote!()
@@ -231,8 +230,9 @@ impl ToTokens for Model {
let hydrate_fn_name = is_island.then(|| {
use std::hash::{Hash, Hasher};
let span = format!("{:?}", name.span());
let mut hasher = DefaultHasher::new();
island.hash(&mut hasher);
span.hash(&mut hasher);
let caller = hasher.finish() as usize;
Ident::new(&format!("{component_id}_{caller:?}"), name.span())
});
@@ -253,7 +253,7 @@ impl ToTokens for Model {
};
let body_name = unmodified_fn_name_from_fn_name(&body_name);
let body_expr = if is_island {
let body_expr = if *is_island {
quote! {
::leptos::reactive_graph::owner::Owner::with_hydration(move || {
#body_name(#prop_names)
@@ -276,7 +276,7 @@ impl ToTokens for Model {
};
// add island wrapper if island
let component = if is_island {
let component = if *is_island {
let hydrate_fn_name = hydrate_fn_name.as_ref().unwrap();
quote! {
{
@@ -343,64 +343,45 @@ impl ToTokens for Model {
#component
};
let binding = if is_island {
let binding = if *is_island {
let island_props = if is_island_with_children
|| is_island_with_other_props
{
let (destructure, prop_builders, optional_props) =
if is_island_with_other_props {
let prop_names = props
.iter()
.filter_map(|prop| {
if prop.name.ident == "children" {
None
} else {
let name = &prop.name.ident;
Some(quote! { #name, })
}
})
.collect::<TokenStream>();
let destructure = quote! {
let #props_serialized_name {
#prop_names
} = props;
};
let prop_builders = props
.iter()
.filter_map(|prop| {
if prop.name.ident == "children"
|| prop.prop_opts.optional
{
None
} else {
let name = &prop.name.ident;
Some(quote! {
.#name(#name)
})
}
})
.collect::<TokenStream>();
let optional_props = props
.iter()
.filter_map(|prop| {
if prop.name.ident == "children"
|| !prop.prop_opts.optional
{
None
} else {
let name = &prop.name.ident;
Some(quote! {
if let Some(#name) = #name {
props.#name = Some(#name)
}
})
}
})
.collect::<TokenStream>();
(destructure, prop_builders, optional_props)
} else {
(quote! {}, quote! {}, quote! {})
let (destructure, prop_builders) = if is_island_with_other_props
{
let prop_names = props
.iter()
.filter_map(|prop| {
if prop.name.ident == "children" {
None
} else {
let name = &prop.name.ident;
Some(quote! { #name, })
}
})
.collect::<TokenStream>();
let destructure = quote! {
let #props_serialized_name {
#prop_names
} = props;
};
let prop_builders = props
.iter()
.filter_map(|prop| {
if prop.name.ident == "children" {
None
} else {
let name = &prop.name.ident;
Some(quote! {
.#name(#name)
})
}
})
.collect::<TokenStream>();
(destructure, prop_builders)
} else {
(quote! {}, quote! {})
};
let children = if is_island_with_children {
quote! {
.children({Box::new(|| {
@@ -424,14 +405,10 @@ impl ToTokens for Model {
quote! {{
#destructure
let mut props = #props_name::builder()
#props_name::builder()
#prop_builders
#children
.build();
#optional_props
props
.build()
}}
} else {
quote! {}
@@ -521,8 +498,8 @@ impl ToTokens for Model {
impl Model {
#[allow(clippy::wrong_self_convention)]
pub fn with_island(mut self, island: Option<String>) -> Self {
self.island = island;
pub fn is_island(mut self, is_island: bool) -> Self {
self.is_island = is_island;
self
}

View File

@@ -266,21 +266,6 @@ mod slot;
#[proc_macro]
#[cfg_attr(feature = "tracing", tracing::instrument(level = "trace", skip_all))]
pub fn view(tokens: TokenStream) -> TokenStream {
view_macro_impl(tokens, false)
}
/// The `template` macro behaves like [`view`], except that it wraps the entire tree in a
/// [`ViewTemplate`](leptos::prelude::ViewTemplate). This optimizes creation speed by rendering
/// most of the view into a `<template>` tag with HTML rendered at compile time, then hydrating it.
/// In exchange, there is a small binary size overhead.
#[proc_macro_error2::proc_macro_error]
#[proc_macro]
#[cfg_attr(feature = "tracing", tracing::instrument(level = "trace", skip_all))]
pub fn template(tokens: TokenStream) -> TokenStream {
view_macro_impl(tokens, true)
}
fn view_macro_impl(tokens: TokenStream, template: bool) -> TokenStream {
let tokens: proc_macro2::TokenStream = tokens.into();
let mut tokens = tokens.into_iter();
@@ -317,19 +302,18 @@ fn view_macro_impl(tokens: TokenStream, template: bool) -> TokenStream {
};
let config = rstml::ParserConfig::default().recover_block(true);
let parser = rstml::Parser::new(config);
let (mut nodes, errors) = parser.parse_recoverable(tokens).split_vec();
let (nodes, errors) = parser.parse_recoverable(tokens).split_vec();
let errors = errors.into_iter().map(|e| e.emit_as_expr_tokens());
let nodes_output = view::render_view(
&mut nodes,
&nodes,
global_class.as_ref(),
normalized_call_site(proc_macro::Span::call_site()),
template,
);
// The allow lint needs to be put here instead of at the expansion of
// view::attribute_value(). Adding this next to the expanded expression
// seems to break rust-analyzer, but it works when the allow is put here.
let output = quote! {
quote! {
{
#[allow(unused_braces)]
{
@@ -337,14 +321,6 @@ fn view_macro_impl(tokens: TokenStream, template: bool) -> TokenStream {
#nodes_output
}
}
};
if template {
quote! {
::leptos::prelude::ViewTemplate::new(#output)
}
} else {
output
}
.into()
}
@@ -539,7 +515,7 @@ pub fn component(
_args: proc_macro::TokenStream,
s: TokenStream,
) -> TokenStream {
component_macro(s, None)
component_macro(s, false)
}
/// Defines a component as an interactive island when you are using the
@@ -616,16 +592,15 @@ pub fn component(
#[proc_macro_error2::proc_macro_error]
#[proc_macro_attribute]
pub fn island(_args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
let island_src = s.to_string();
component_macro(s, Some(island_src))
component_macro(s, true)
}
fn component_macro(s: TokenStream, island: Option<String>) -> TokenStream {
fn component_macro(s: TokenStream, island: bool) -> TokenStream {
let mut dummy = syn::parse::<DummyModel>(s.clone());
let parse_result = syn::parse::<component::Model>(s);
if let (Ok(ref mut unexpanded), Ok(model)) = (&mut dummy, parse_result) {
let expanded = model.with_island(island).into_token_stream();
let expanded = model.is_island(island).into_token_stream();
if !matches!(unexpanded.vis, Visibility::Public(_)) {
unexpanded.vis = Visibility::Public(Pub {
span: unexpanded.vis.span(),

View File

@@ -10,10 +10,11 @@ use std::collections::HashMap;
use syn::{spanned::Spanned, Expr, ExprPath, ExprRange, RangeLimits, Stmt};
pub(crate) fn component_to_tokens(
node: &mut NodeElement<impl CustomNode>,
node: &NodeElement<impl CustomNode>,
global_class: Option<&TokenTree>,
disable_inert_html: bool,
) -> TokenStream {
let name = node.name();
#[allow(unused)] // TODO this is used by hot-reloading
#[cfg(debug_assertions)]
let component_name = super::ident_from_tag_name(node.name());
@@ -44,21 +45,16 @@ pub(crate) fn component_to_tokens(
})
.unwrap_or_else(|| node.attributes().len());
let attrs = node
.attributes()
.iter()
.filter_map(|node| {
if let NodeAttribute::Attribute(node) = node {
Some(node)
} else {
None
}
})
.cloned()
.collect::<Vec<_>>();
let attrs = node.attributes().iter().filter_map(|node| {
if let NodeAttribute::Attribute(node) = node {
Some(node)
} else {
None
}
});
let props = attrs
.iter()
.clone()
.enumerate()
.filter(|(idx, attr)| {
idx < &spread_marker && {
@@ -89,7 +85,7 @@ pub(crate) fn component_to_tokens(
});
let items_to_bind = attrs
.iter()
.clone()
.filter_map(|attr| {
if !is_attr_let(&attr.key) {
return None;
@@ -111,7 +107,7 @@ pub(crate) fn component_to_tokens(
.collect::<Vec<_>>();
let items_to_clone = attrs
.iter()
.clone()
.filter_map(|attr| {
attr.key
.to_string()
@@ -187,12 +183,11 @@ pub(crate) fn component_to_tokens(
quote! {}
} else {
let children = fragment_to_tokens(
&mut node.children,
&node.children,
TagType::Unknown,
Some(&mut slots),
global_class,
None,
disable_inert_html,
);
// TODO view marker for hot-reloading
@@ -266,7 +261,6 @@ pub(crate) fn component_to_tokens(
quote! {}
};
let name = node.name();
#[allow(unused_mut)] // used in debug
let mut component = quote! {
{

View File

@@ -13,10 +13,7 @@ use rstml::node::{
CustomNode, KVAttributeValue, KeyedAttribute, Node, NodeAttribute,
NodeBlock, NodeElement, NodeName, NodeNameFragment,
};
use std::{
cmp::Ordering,
collections::{HashMap, HashSet, VecDeque},
};
use std::collections::{HashMap, HashSet};
use syn::{
spanned::Spanned, Expr, Expr::Tuple, ExprLit, ExprRange, Lit, LitStr,
RangeLimits, Stmt,
@@ -31,10 +28,9 @@ pub(crate) enum TagType {
}
pub fn render_view(
nodes: &mut [Node],
nodes: &[Node],
global_class: Option<&TokenTree>,
view_marker: Option<String>,
disable_inert_html: bool,
) -> Option<TokenStream> {
let (base, should_add_view) = match nodes.len() {
0 => {
@@ -48,13 +44,11 @@ pub fn render_view(
}
1 => (
node_to_tokens(
&mut nodes[0],
&nodes[0],
TagType::Unknown,
None,
global_class,
view_marker.as_deref(),
true,
disable_inert_html,
),
// only add View wrapper and view marker to a regular HTML
// element or component, not to a <{..} /> attribute list
@@ -70,7 +64,6 @@ pub fn render_view(
None,
global_class,
view_marker.as_deref(),
disable_inert_html,
),
true,
),
@@ -95,287 +88,12 @@ pub fn render_view(
})
}
fn is_inert_element(orig_node: &Node<impl CustomNode>) -> bool {
// do not use this if the top-level node is not an Element,
// or if it's an element with no children and no attrs
match orig_node {
Node::Element(el) => {
if el.attributes().is_empty() && el.children.is_empty() {
return false;
}
// also doesn't work if the top-level element is an SVG/MathML element
let el_name = el.name().to_string();
if is_svg_element(&el_name) || is_math_ml_element(&el_name) {
return false;
}
}
_ => return false,
}
// otherwise, walk over all the nodes to make sure everything is inert
let mut nodes = VecDeque::from([orig_node]);
while let Some(current_element) = nodes.pop_front() {
match current_element {
Node::Text(_) | Node::RawText(_) => {}
Node::Element(node) => {
if is_component_node(node) {
return false;
}
if is_spread_marker(node) {
return false;
}
match node.name() {
NodeName::Block(_) => return false,
_ => {
// check all attributes
for attr in node.attributes() {
match attr {
NodeAttribute::Block(_) => return false,
NodeAttribute::Attribute(attr) => {
let static_key =
!matches!(attr.key, NodeName::Block(_));
let static_value = match attr
.possible_value
.to_value()
{
None => true,
Some(value) => {
matches!(&value.value, KVAttributeValue::Expr(expr) if {
if let Expr::Lit(lit) = expr {
matches!(&lit.lit, Lit::Str(_))
} else {
false
}
})
}
};
if !static_key || !static_value {
return false;
}
}
}
}
// check all children
nodes.extend(&node.children);
}
}
}
_ => return false,
}
}
true
}
enum Item<'a, T> {
Node(&'a Node<T>),
ClosingTag(String),
}
enum InertElementBuilder<'a> {
GlobalClass {
global_class: &'a TokenTree,
strs: Vec<GlobalClassItem<'a>>,
buffer: String,
},
NoGlobalClass {
buffer: String,
},
}
impl<'a> ToTokens for InertElementBuilder<'a> {
fn to_tokens(&self, tokens: &mut TokenStream) {
match self {
InertElementBuilder::GlobalClass { strs, .. } => {
tokens.extend(quote! {
[#(#strs),*].join("")
});
}
InertElementBuilder::NoGlobalClass { buffer } => {
tokens.extend(quote! {
#buffer
})
}
}
}
}
enum GlobalClassItem<'a> {
Global(&'a TokenTree),
String(String),
}
impl<'a> ToTokens for GlobalClassItem<'a> {
fn to_tokens(&self, tokens: &mut TokenStream) {
let addl_tokens = match self {
GlobalClassItem::Global(v) => v.to_token_stream(),
GlobalClassItem::String(v) => v.to_token_stream(),
};
tokens.extend(addl_tokens);
}
}
impl<'a> InertElementBuilder<'a> {
fn new(global_class: Option<&'a TokenTree>) -> Self {
match global_class {
None => Self::NoGlobalClass {
buffer: String::new(),
},
Some(global_class) => Self::GlobalClass {
global_class,
strs: Vec::new(),
buffer: String::new(),
},
}
}
fn push(&mut self, c: char) {
match self {
InertElementBuilder::GlobalClass { buffer, .. } => buffer.push(c),
InertElementBuilder::NoGlobalClass { buffer } => buffer.push(c),
}
}
fn push_str(&mut self, s: &str) {
match self {
InertElementBuilder::GlobalClass { buffer, .. } => {
buffer.push_str(s)
}
InertElementBuilder::NoGlobalClass { buffer } => buffer.push_str(s),
}
}
fn push_class(&mut self, class: &str) {
match self {
InertElementBuilder::GlobalClass {
global_class,
strs,
buffer,
} => {
buffer.push_str(" class=\"");
strs.push(GlobalClassItem::String(std::mem::take(buffer)));
strs.push(GlobalClassItem::Global(global_class));
buffer.push(' ');
buffer.push_str(class);
buffer.push('"');
}
InertElementBuilder::NoGlobalClass { buffer } => {
buffer.push_str(" class=\"");
buffer.push_str(class);
buffer.push('"');
}
}
}
fn finish(&mut self) {
match self {
InertElementBuilder::GlobalClass { strs, buffer, .. } => {
strs.push(GlobalClassItem::String(std::mem::take(buffer)));
}
InertElementBuilder::NoGlobalClass { .. } => {}
}
}
}
fn inert_element_to_tokens(
node: &Node<impl CustomNode>,
global_class: Option<&TokenTree>,
) -> Option<TokenStream> {
let mut html = InertElementBuilder::new(global_class);
let mut nodes = VecDeque::from([Item::Node(node)]);
while let Some(current) = nodes.pop_front() {
match current {
Item::ClosingTag(tag) => {
// closing tag
html.push_str("</");
html.push_str(&tag);
html.push('>');
}
Item::Node(current) => {
match current {
Node::RawText(raw) => {
let text = raw.to_string_best();
html.push_str(&text);
}
Node::Text(text) => {
let text = text.value_string();
html.push_str(&text);
}
Node::Element(node) => {
let self_closing = is_self_closing(node);
let el_name = node.name().to_string();
// opening tag
html.push('<');
html.push_str(&el_name);
for attr in node.attributes() {
if let NodeAttribute::Attribute(attr) = attr {
let attr_name = attr.key.to_string();
if attr_name != "class" {
html.push(' ');
html.push_str(&attr_name);
}
if let Some(value) =
attr.possible_value.to_value()
{
if let KVAttributeValue::Expr(Expr::Lit(
lit,
)) = &value.value
{
if let Lit::Str(txt) = &lit.lit {
if attr_name == "class" {
html.push_class(&txt.value());
} else {
html.push_str("=\"");
html.push_str(&txt.value());
html.push('"');
}
}
}
};
}
}
html.push('>');
// render all children
if !self_closing {
nodes.push_front(Item::ClosingTag(el_name));
let children = node.children.iter().rev();
for child in children {
nodes.push_front(Item::Node(child));
}
}
}
_ => {}
}
}
}
}
html.finish();
Some(quote! {
::leptos::tachys::html::InertElement::new(#html)
})
}
fn element_children_to_tokens(
nodes: &mut [Node<impl CustomNode>],
nodes: &[Node<impl CustomNode>],
parent_type: TagType,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>,
view_marker: Option<&str>,
disable_inert_html: bool,
) -> Option<TokenStream> {
let children = children_to_tokens(
nodes,
@@ -383,8 +101,6 @@ fn element_children_to_tokens(
parent_slots,
global_class,
view_marker,
false,
disable_inert_html,
);
if children.is_empty() {
None
@@ -421,12 +137,11 @@ fn element_children_to_tokens(
}
fn fragment_to_tokens(
nodes: &mut [Node<impl CustomNode>],
nodes: &[Node<impl CustomNode>],
parent_type: TagType,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>,
view_marker: Option<&str>,
disable_inert_html: bool,
) -> Option<TokenStream> {
let children = children_to_tokens(
nodes,
@@ -434,8 +149,6 @@ fn fragment_to_tokens(
parent_slots,
global_class,
view_marker,
true,
disable_inert_html,
);
if children.is_empty() {
None
@@ -462,23 +175,19 @@ fn fragment_to_tokens(
}
fn children_to_tokens(
nodes: &mut [Node<impl CustomNode>],
nodes: &[Node<impl CustomNode>],
parent_type: TagType,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>,
view_marker: Option<&str>,
top_level: bool,
disable_inert_html: bool,
) -> Vec<TokenStream> {
if nodes.len() == 1 {
match node_to_tokens(
&mut nodes[0],
&nodes[0],
parent_type,
parent_slots,
global_class,
view_marker,
top_level,
disable_inert_html,
) {
Some(tokens) => vec![tokens],
None => vec![],
@@ -486,7 +195,7 @@ fn children_to_tokens(
} else {
let mut slots = HashMap::new();
let nodes = nodes
.iter_mut()
.iter()
.filter_map(|node| {
node_to_tokens(
node,
@@ -494,8 +203,6 @@ fn children_to_tokens(
Some(&mut slots),
global_class,
view_marker,
top_level,
disable_inert_html,
)
})
.collect();
@@ -512,16 +219,12 @@ fn children_to_tokens(
}
fn node_to_tokens(
node: &mut Node<impl CustomNode>,
node: &Node<impl CustomNode>,
parent_type: TagType,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>,
view_marker: Option<&str>,
top_level: bool,
disable_inert_html: bool,
) -> Option<TokenStream> {
let is_inert = !disable_inert_html && is_inert_element(node);
match node {
Node::Comment(_) => None,
Node::Doctype(node) => {
@@ -529,12 +232,11 @@ fn node_to_tokens(
Some(quote! { ::leptos::tachys::html::doctype(#value) })
}
Node::Fragment(fragment) => fragment_to_tokens(
&mut fragment.children,
&fragment.children,
parent_type,
parent_slots,
global_class,
view_marker,
disable_inert_html,
),
Node::Block(block) => Some(quote! { #block }),
Node::Text(text) => Some(text_to_tokens(&text.value)),
@@ -543,20 +245,13 @@ fn node_to_tokens(
let text = syn::LitStr::new(&text, raw.span());
Some(text_to_tokens(&text))
}
Node::Element(el_node) => {
if !top_level && is_inert {
inert_element_to_tokens(node, global_class)
} else {
element_to_tokens(
el_node,
parent_type,
parent_slots,
global_class,
view_marker,
disable_inert_html,
)
}
}
Node::Element(node) => element_to_tokens(
node,
parent_type,
parent_slots,
global_class,
view_marker,
),
Node::Custom(node) => Some(node.to_token_stream()),
}
}
@@ -575,57 +270,12 @@ fn text_to_tokens(text: &LitStr) -> TokenStream {
}
pub(crate) fn element_to_tokens(
node: &mut NodeElement<impl CustomNode>,
node: &NodeElement<impl CustomNode>,
mut parent_type: TagType,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>,
view_marker: Option<&str>,
disable_inert_html: bool,
) -> Option<TokenStream> {
// attribute sorting:
//
// the `class` and `style` attributes overwrite individual `class:` and `style:` attributes
// when they are set. as a result, we're going to sort the attributes so that `class` and
// `style` always come before all other attributes.
// if there's a spread marker, we don't want to move `class` or `style` before it
// so let's only sort attributes that come *before* a spread marker
let spread_position = node
.attributes()
.iter()
.position(|n| match n {
NodeAttribute::Block(node) => as_spread_attr(node).is_some(),
_ => false,
})
.unwrap_or_else(|| node.attributes().len());
// now, sort the attributes
node.attributes_mut()[0..spread_position].sort_by(|a, b| {
let key_a = match a {
NodeAttribute::Attribute(attr) => match &attr.key {
NodeName::Path(attr) => {
attr.path.segments.first().map(|n| n.ident.to_string())
}
_ => None,
},
_ => None,
};
let key_b = match b {
NodeAttribute::Attribute(attr) => match &attr.key {
NodeName::Path(attr) => {
attr.path.segments.first().map(|n| n.ident.to_string())
}
_ => None,
},
_ => None,
};
match (key_a.as_deref(), key_b.as_deref()) {
(Some("class"), _) | (Some("style"), _) => Ordering::Less,
(_, Some("class")) | (_, Some("style")) => Ordering::Greater,
_ => Ordering::Equal,
}
});
// check for duplicate attribute names and emit an error for all subsequent ones
let mut names = HashSet::new();
for attr in node.attributes() {
@@ -649,17 +299,10 @@ pub(crate) fn element_to_tokens(
let name = node.name();
if is_component_node(node) {
if let Some(slot) = get_slot(node) {
let slot = slot.clone();
slot_to_tokens(
node,
&slot,
parent_slots,
global_class,
disable_inert_html,
);
slot_to_tokens(node, slot, parent_slots, global_class);
None
} else {
Some(component_to_tokens(node, global_class, disable_inert_html))
Some(component_to_tokens(node, global_class))
}
} else if is_spread_marker(node) {
let mut attributes = Vec::new();
@@ -771,12 +414,11 @@ pub(crate) fn element_to_tokens(
let self_closing = is_self_closing(node);
let children = if !self_closing {
element_children_to_tokens(
&mut node.children,
&node.children,
parent_type,
parent_slots,
global_class,
view_marker,
disable_inert_html,
)
} else {
if !node.children.is_empty() {
@@ -821,25 +463,6 @@ fn is_spread_marker(node: &NodeElement<impl CustomNode>) -> bool {
}
}
fn as_spread_attr(node: &NodeBlock) -> Option<Option<&Expr>> {
if let NodeBlock::ValidBlock(block) = node {
match block.stmts.first() {
Some(Stmt::Expr(
Expr::Range(ExprRange {
start: None,
limits: RangeLimits::HalfOpen(_),
end,
..
}),
_,
)) => Some(end.as_deref()),
_ => None,
}
} else {
None
}
}
fn attribute_to_tokens(
tag_type: TagType,
node: &NodeAttribute,
@@ -847,18 +470,29 @@ fn attribute_to_tokens(
is_custom: bool,
) -> TokenStream {
match node {
NodeAttribute::Block(node) => as_spread_attr(node)
.flatten()
.map(|end| {
quote! {
.add_any_attr(#end)
NodeAttribute::Block(node) => {
let dotted = if let NodeBlock::ValidBlock(block) = node {
match block.stmts.first() {
Some(Stmt::Expr(
Expr::Range(ExprRange {
start: None,
limits: RangeLimits::HalfOpen(_),
end: Some(end),
..
}),
_,
)) => Some(quote! { .add_any_attr(#end) }),
_ => None,
}
})
.unwrap_or_else(|| {
} else {
None
};
dotted.unwrap_or_else(|| {
quote! {
.add_any_attr(#[allow(unused_braces)] { #node })
}
}),
})
}
NodeAttribute::Attribute(node) => {
let name = node.key.to_string();
if name == "node_ref" {

View File

@@ -7,11 +7,10 @@ use std::collections::HashMap;
use syn::spanned::Spanned;
pub(crate) fn slot_to_tokens(
node: &mut NodeElement<impl CustomNode>,
node: &NodeElement<impl CustomNode>,
slot: &KeyedAttribute,
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
global_class: Option<&TokenTree>,
disable_inert_html: bool,
) {
let name = slot.key.to_string();
let name = name.trim();
@@ -31,25 +30,20 @@ pub(crate) fn slot_to_tokens(
return;
};
let attrs = node
.attributes()
.iter()
.filter_map(|node| {
if let NodeAttribute::Attribute(node) = node {
if is_slot(node) {
None
} else {
Some(node)
}
} else {
let attrs = node.attributes().iter().filter_map(|node| {
if let NodeAttribute::Attribute(node) = node {
if is_slot(node) {
None
} else {
Some(node)
}
})
.cloned()
.collect::<Vec<_>>();
} else {
None
}
});
let props = attrs
.iter()
.clone()
.filter(|attr| {
!attr.key.to_string().starts_with("let:")
&& !attr.key.to_string().starts_with("clone:")
@@ -71,7 +65,7 @@ pub(crate) fn slot_to_tokens(
});
let items_to_bind = attrs
.iter()
.clone()
.filter_map(|attr| {
attr.key
.to_string()
@@ -81,7 +75,7 @@ pub(crate) fn slot_to_tokens(
.collect::<Vec<_>>();
let items_to_clone = attrs
.iter()
.clone()
.filter_map(|attr| {
attr.key
.to_string()
@@ -91,7 +85,6 @@ pub(crate) fn slot_to_tokens(
.collect::<Vec<_>>();
let dyn_attrs = attrs
.iter()
.filter(|attr| attr.key.to_string().starts_with("attr:"))
.filter_map(|attr| {
let name = &attr.key.to_string();
@@ -114,12 +107,11 @@ pub(crate) fn slot_to_tokens(
quote! {}
} else {
let children = fragment_to_tokens(
&mut node.children,
&node.children,
TagType::Unknown,
Some(&mut slots),
global_class,
None,
disable_inert_html,
);
// TODO view markers for hot-reloading

View File

@@ -9,7 +9,7 @@ use reactive_graph::{
},
owner::use_context,
signal::guards::{AsyncPlain, ReadGuard},
traits::{DefinedAt, IsDisposed, ReadUntracked},
traits::{DefinedAt, ReadUntracked},
};
use send_wrapper::SendWrapper;
use std::{
@@ -121,13 +121,6 @@ where
}
}
impl<T: 'static> IsDisposed for ArcLocalResource<T> {
#[inline(always)]
fn is_disposed(&self) -> bool {
false
}
}
impl<T: 'static> ToAnySource for ArcLocalResource<T> {
fn to_any_source(&self) -> AnySource {
self.data.to_any_source()
@@ -299,12 +292,6 @@ where
}
}
impl<T: 'static> IsDisposed for LocalResource<T> {
fn is_disposed(&self) -> bool {
self.data.is_disposed()
}
}
impl<T: 'static> ToAnySource for LocalResource<T>
where
T: Send + Sync + 'static,

View File

@@ -24,37 +24,12 @@ use reactive_graph::{
prelude::*,
signal::{ArcRwSignal, RwSignal},
};
use std::{future::IntoFuture, ops::Deref, panic::Location};
use std::{future::IntoFuture, ops::Deref};
pub struct ArcResource<T, Ser = JsonSerdeCodec> {
ser: PhantomData<Ser>,
refetch: ArcRwSignal<usize>,
data: ArcAsyncDerived<T>,
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
}
impl<T, Ser> Debug for ArcResource<T, Ser> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut d = f.debug_struct("ArcResource");
d.field("ser", &self.ser).field("data", &self.data);
#[cfg(debug_assertions)]
d.field("defined_at", self.defined_at);
d.finish_non_exhaustive()
}
}
impl<T, Ser> DefinedAt for ArcResource<T, Ser> {
fn defined_at(&self) -> Option<&'static Location<'static>> {
#[cfg(debug_assertions)]
{
Some(self.defined_at)
}
#[cfg(not(debug_assertions))]
{
None
}
}
}
impl<T, Ser> Clone for ArcResource<T, Ser> {
@@ -63,8 +38,6 @@ impl<T, Ser> Clone for ArcResource<T, Ser> {
ser: self.ser,
refetch: self.refetch.clone(),
data: self.data.clone(),
#[cfg(debug_assertions)]
defined_at: self.defined_at,
}
}
}
@@ -108,15 +81,13 @@ where
let is_ready = initial.is_some();
let refetch = ArcRwSignal::new(0);
let source = ArcMemo::new({
let refetch = refetch.clone();
move |_| (refetch.get(), source())
});
let source = ArcMemo::new(move |_| source());
let fun = {
let source = source.clone();
let refetch = refetch.clone();
move || {
let (_, source) = source.get();
fetcher(source)
refetch.track();
fetcher(source.get())
}
};
@@ -158,8 +129,6 @@ where
ser: PhantomData,
data,
refetch,
#[cfg(debug_assertions)]
defined_at: Location::caller(),
}
}
@@ -477,37 +446,6 @@ where
ser: PhantomData<Ser>,
data: AsyncDerived<T>,
refetch: RwSignal<usize>,
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
}
impl<T, Ser> Debug for Resource<T, Ser>
where
T: Send + Sync + 'static,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut d = f.debug_struct("ArcResource");
d.field("ser", &self.ser).field("data", &self.data);
#[cfg(debug_assertions)]
d.field("defined_at", self.defined_at);
d.finish_non_exhaustive()
}
}
impl<T, Ser> DefinedAt for Resource<T, Ser>
where
T: Send + Sync + 'static,
{
fn defined_at(&self) -> Option<&'static Location<'static>> {
#[cfg(debug_assertions)]
{
Some(self.defined_at)
}
#[cfg(not(debug_assertions))]
{
None
}
}
}
impl<T: Send + Sync + 'static, Ser> Copy for Resource<T, Ser> {}
@@ -763,8 +701,6 @@ where
ser: PhantomData,
data: data.into(),
refetch: refetch.into(),
#[cfg(debug_assertions)]
defined_at: Location::caller(),
}
}

View File

@@ -1,6 +1,6 @@
[package]
name = "leptos_meta"
version = "0.7.0-beta6"
version = "0.7.0-beta4"
authors = ["Greg Johnston"]
license = "MIT"
repository = "https://github.com/leptos-rs/leptos"

View File

@@ -29,7 +29,7 @@ use leptos::{
/// #[component]
/// fn MyApp() -> impl IntoView {
/// provide_meta_context();
/// let (prefers_dark, set_prefers_dark) = signal(false);
/// let (prefers_dark, set_prefers_dark) = create_signal(false);
/// let body_class = move || {
/// if prefers_dark.get() {
/// "dark".to_string()

View File

@@ -1,6 +1,6 @@
[package]
name = "next_tuple"
version = "0.1.0-beta6"
version = "0.1.0-beta4"
authors = ["Greg Johnston"]
license = "MIT"
readme = "../README.md"

View File

@@ -1,6 +1,6 @@
[package]
name = "reactive_graph"
version = "0.1.0-beta6"
version = "0.1.0-beta4"
authors = ["Greg Johnston"]
license = "MIT"
readme = "../README.md"

View File

@@ -1,7 +1,7 @@
use crate::{
computed::{ArcMemo, Memo},
diagnostics::is_suppressing_resource_load,
owner::{ArenaItem, FromLocal, LocalStorage, Storage, SyncStorage},
owner::{FromLocal, LocalStorage, Storage, StoredValue, SyncStorage},
signal::{ArcRwSignal, RwSignal},
traits::{DefinedAt, Dispose, Get, GetUntracked, Update},
unwrap_signal,
@@ -235,7 +235,7 @@ where
self.input.try_update(|inp| *inp = Some(input));
// Spawn the task
crate::spawn({
Executor::spawn({
let input = self.input.clone();
let version = self.version.clone();
let value = self.value.clone();
@@ -575,7 +575,7 @@ where
/// let action3 = Action::new(|input: &(usize, String)| async { todo!() });
/// ```
pub struct Action<I, O, S = SyncStorage> {
inner: ArenaItem<ArcAction<I, O>, S>,
inner: StoredValue<ArcAction<I, O>, S>,
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
}
@@ -639,7 +639,7 @@ where
Fu: Future<Output = O> + Send + 'static,
{
Self {
inner: ArenaItem::new(ArcAction::new(action_fn)),
inner: StoredValue::new(ArcAction::new(action_fn)),
#[cfg(debug_assertions)]
defined_at: Location::caller(),
}
@@ -664,7 +664,9 @@ where
Fu: Future<Output = O> + Send + 'static,
{
Self {
inner: ArenaItem::new(ArcAction::new_with_value(value, action_fn)),
inner: StoredValue::new(ArcAction::new_with_value(
value, action_fn,
)),
#[cfg(debug_assertions)]
defined_at: Location::caller(),
}
@@ -686,7 +688,7 @@ where
Fu: Future<Output = O> + Send + 'static,
{
Self {
inner: ArenaItem::new_local(ArcAction::new_unsync(action_fn)),
inner: StoredValue::new_local(ArcAction::new_unsync(action_fn)),
#[cfg(debug_assertions)]
defined_at: Location::caller(),
}
@@ -702,7 +704,7 @@ where
Fu: Future<Output = O> + Send + 'static,
{
Self {
inner: ArenaItem::new_local(ArcAction::new_unsync_with_value(
inner: StoredValue::new_local(ArcAction::new_unsync_with_value(
value, action_fn,
)),
#[cfg(debug_assertions)]
@@ -906,9 +908,7 @@ where
/// Calls the `async` function with a reference to the input type as its argument.
#[track_caller]
pub fn dispatch(&self, input: I) -> ActionAbortHandle {
self.inner
.try_with_value(|inner| inner.dispatch(input))
.unwrap_or_else(unwrap_signal!(self))
self.inner.with_value(|inner| inner.dispatch(input))
}
}
@@ -921,9 +921,7 @@ where
/// Calls the `async` function with a reference to the input type as its argument.
#[track_caller]
pub fn dispatch_local(&self, input: I) -> ActionAbortHandle {
self.inner
.try_with_value(|inner| inner.dispatch_local(input))
.unwrap_or_else(unwrap_signal!(self))
self.inner.with_value(|inner| inner.dispatch_local(input))
}
}
@@ -944,7 +942,7 @@ where
Fu: Future<Output = O> + 'static,
{
Self {
inner: ArenaItem::new_with_storage(ArcAction::new_unsync(
inner: StoredValue::new_with_storage(ArcAction::new_unsync(
action_fn,
)),
#[cfg(debug_assertions)]
@@ -963,7 +961,7 @@ where
Fu: Future<Output = O> + 'static,
{
Self {
inner: ArenaItem::new_with_storage(
inner: StoredValue::new_with_storage(
ArcAction::new_unsync_with_value(value, action_fn),
),
#[cfg(debug_assertions)]

View File

@@ -1,10 +1,11 @@
use crate::{
diagnostics::is_suppressing_resource_load,
owner::{ArenaItem, FromLocal, LocalStorage, Storage, SyncStorage},
owner::{FromLocal, LocalStorage, Storage, StoredValue, SyncStorage},
signal::{ArcReadSignal, ArcRwSignal, ReadSignal, RwSignal},
traits::{DefinedAt, Dispose, GetUntracked, Set, Update},
unwrap_signal,
};
use any_spawner::Executor;
use std::{fmt::Debug, future::Future, panic::Location, pin::Pin, sync::Arc};
/// An action that synchronizes multiple imperative `async` calls to the reactive system,
@@ -45,7 +46,7 @@ use std::{fmt::Debug, future::Future, panic::Location, pin::Pin, sync::Arc};
/// # });
/// ```
pub struct MultiAction<I, O, S = SyncStorage> {
inner: ArenaItem<ArcMultiAction<I, O>, S>,
inner: StoredValue<ArcMultiAction<I, O>, S>,
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
}
@@ -129,7 +130,9 @@ where
Fut: Future<Output = O> + Send + 'static,
{
Self {
inner: ArenaItem::new_with_storage(ArcMultiAction::new(action_fn)),
inner: StoredValue::new_with_storage(ArcMultiAction::new(
action_fn,
)),
#[cfg(debug_assertions)]
defined_at: Location::caller(),
}
@@ -187,7 +190,7 @@ where
/// ```
pub fn dispatch(&self, input: I) {
if !is_suppressing_resource_load() {
self.inner.try_with_value(|inner| inner.dispatch(input));
self.inner.with_value(|inner| inner.dispatch(input));
}
}
@@ -230,8 +233,7 @@ where
/// # });
/// ```
pub fn dispatch_sync(&self, value: O) {
self.inner
.try_with_value(|inner| inner.dispatch_sync(value));
self.inner.with_value(|inner| inner.dispatch_sync(value));
}
}
@@ -505,7 +507,7 @@ where
let version = self.version.clone();
crate::spawn(async move {
Executor::spawn(async move {
let new_value = fut.await;
let canceled = submission.canceled.get_untracked();
if !canceled {

View File

@@ -163,10 +163,10 @@ where
#[deprecated = "This function is being removed to conform to Rust idioms. \
Please use `Selector::new()` instead."]
pub fn create_selector<T>(
source: impl Fn() -> T + Clone + Send + Sync + 'static,
source: impl Fn() -> T + Clone + 'static,
) -> Selector<T>
where
T: PartialEq + Eq + Send + Sync + Clone + std::hash::Hash + 'static,
T: PartialEq + Eq + Clone + std::hash::Hash + 'static,
{
Selector::new(source)
}
@@ -178,11 +178,11 @@ where
#[deprecated = "This function is being removed to conform to Rust idioms. \
Please use `Selector::new_with_fn()` instead."]
pub fn create_selector_with_fn<T>(
source: impl Fn() -> T + Clone + Send + Sync + 'static,
source: impl Fn() -> T + Clone + 'static,
f: impl Fn(&T, &T) -> bool + Send + Sync + Clone + 'static,
) -> Selector<T>
where
T: PartialEq + Eq + Send + Sync + Clone + std::hash::Hash + 'static,
T: PartialEq + Eq + Clone + std::hash::Hash + 'static,
{
Selector::new_with_fn(source, f)
}

View File

@@ -9,7 +9,7 @@ use crate::{
guards::{Mapped, Plain, ReadGuard},
ArcReadSignal, ArcRwSignal,
},
traits::{DefinedAt, Get, IsDisposed, ReadUntracked},
traits::{DefinedAt, Get, ReadUntracked},
};
use core::fmt::Debug;
use or_poisoned::OrPoisoned;
@@ -260,16 +260,6 @@ where
}
}
impl<T: 'static, S> IsDisposed for ArcMemo<T, S>
where
S: Storage<T>,
{
#[inline(always)]
fn is_disposed(&self) -> bool {
false
}
}
impl<T: 'static, S> ToAnySource for ArcMemo<T, S>
where
S: Storage<T>,

View File

@@ -18,8 +18,7 @@ use crate::{
ArcTrigger,
},
traits::{
DefinedAt, IsDisposed, Notify, ReadUntracked, Track, UntrackableGuard,
Writeable,
DefinedAt, Notify, ReadUntracked, Track, UntrackableGuard, Writeable,
},
transition::AsyncTransition,
};
@@ -233,8 +232,7 @@ macro_rules! spawn_derived {
sources: SourceSet::new(),
subscribers: SubscriberSet::new(),
state: AsyncDerivedState::Clean,
version: 0,
suspenses: Vec::new()
version: 0
}));
let value = Arc::new(AsyncRwLock::new($initial));
let wakers = Arc::new(RwLock::new(Vec::new()));
@@ -346,21 +344,14 @@ macro_rules! spawn_derived {
// generate and assign new value
loading.store(true, Ordering::Relaxed);
let (this_version, suspense_ids) = {
let this_version = {
let mut guard = inner.write().or_poisoned();
guard.version += 1;
let version = guard.version;
let suspense_ids = mem::take(&mut guard.suspenses)
.into_iter()
.map(|sc| sc.task_id())
.collect::<Vec<_>>();
(version, suspense_ids)
guard.version
};
let new_value = fut.await;
drop(suspense_ids);
let latest_version = inner.read().or_poisoned().version;
if latest_version == this_version {
@@ -579,15 +570,10 @@ impl<T: 'static> ReadUntracked for ArcAsyncDerived<T> {
if self.value.blocking_read().is_none() {
let handle = suspense_context.task_id();
let ready = SpecialNonReactiveFuture::new(self.ready());
crate::spawn(async move {
Executor::spawn(async move {
ready.await;
drop(handle);
});
self.inner
.write()
.or_poisoned()
.suspenses
.push(suspense_context);
}
}
AsyncPlain::try_new(&self.value).map(ReadGuard::new)
@@ -614,13 +600,6 @@ impl<T: 'static> Writeable for ArcAsyncDerived<T> {
}
}
impl<T: 'static> IsDisposed for ArcAsyncDerived<T> {
#[inline(always)]
fn is_disposed(&self) -> bool {
false
}
}
impl<T: 'static> ToAnySource for ArcAsyncDerived<T> {
fn to_any_source(&self) -> AnySource {
AnySource(

View File

@@ -4,11 +4,10 @@ use crate::{
AnySource, AnySubscriber, ReactiveNode, Source, Subscriber,
ToAnySource, ToAnySubscriber,
},
owner::{ArenaItem, FromLocal, LocalStorage, Storage, SyncStorage},
owner::{FromLocal, LocalStorage, Storage, StoredValue, SyncStorage},
signal::guards::{AsyncPlain, ReadGuard, WriteGuard},
traits::{
DefinedAt, Dispose, IsDisposed, Notify, ReadUntracked,
UntrackableGuard, Writeable,
DefinedAt, Dispose, Notify, ReadUntracked, UntrackableGuard, Writeable,
},
unwrap_signal,
};
@@ -85,7 +84,7 @@ use std::{future::Future, ops::DerefMut, panic::Location};
pub struct AsyncDerived<T, S = SyncStorage> {
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
pub(crate) inner: ArenaItem<ArcAsyncDerived<T>, S>,
pub(crate) inner: StoredValue<ArcAsyncDerived<T>, S>,
}
impl<T, S> Dispose for AsyncDerived<T, S> {
@@ -104,7 +103,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at,
inner: ArenaItem::new_with_storage(value),
inner: StoredValue::new_with_storage(value),
}
}
}
@@ -119,7 +118,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at,
inner: ArenaItem::new_with_storage(value),
inner: StoredValue::new_with_storage(value),
}
}
}
@@ -141,7 +140,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(ArcAsyncDerived::new(fun)),
inner: StoredValue::new_with_storage(ArcAsyncDerived::new(fun)),
}
}
@@ -159,7 +158,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(
inner: StoredValue::new_with_storage(
ArcAsyncDerived::new_with_initial(initial_value, fun),
),
}
@@ -176,7 +175,9 @@ impl<T> AsyncDerived<SendWrapper<T>> {
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(ArcAsyncDerived::new_mock(fun)),
inner: StoredValue::new_with_storage(ArcAsyncDerived::new_mock(
fun,
)),
}
}
}
@@ -198,7 +199,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(ArcAsyncDerived::new_unsync(
inner: StoredValue::new_with_storage(ArcAsyncDerived::new_unsync(
fun,
)),
}
@@ -219,7 +220,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(
inner: StoredValue::new_with_storage(
ArcAsyncDerived::new_unsync_with_initial(initial_value, fun),
),
}
@@ -321,16 +322,6 @@ where
}
}
impl<T, S> IsDisposed for AsyncDerived<T, S>
where
T: 'static,
S: Storage<ArcAsyncDerived<T>>,
{
fn is_disposed(&self) -> bool {
self.inner.is_disposed()
}
}
impl<T, S> ToAnySource for AsyncDerived<T, S>
where
T: 'static,

View File

@@ -1,9 +1,8 @@
use super::{inner::ArcAsyncDerivedInner, ArcAsyncDerived, AsyncDerived};
use super::{ArcAsyncDerived, AsyncDerived};
use crate::{
computed::suspense::SuspenseContext,
diagnostics::SpecialNonReactiveZone,
graph::{AnySource, ToAnySource},
owner::{use_context, Storage},
owner::Storage,
signal::guards::{AsyncPlain, Mapped, ReadGuard},
traits::{DefinedAt, Track},
unwrap_signal,
@@ -64,7 +63,6 @@ where
value: Arc::clone(&self.value),
loading: Arc::clone(&self.loading),
wakers: Arc::clone(&self.wakers),
inner: Arc::clone(&self.inner),
}
}
}
@@ -94,7 +92,6 @@ pub struct AsyncDerivedFuture<T> {
value: Arc<async_lock::RwLock<Option<T>>>,
loading: Arc<AtomicBool>,
wakers: Arc<RwLock<Vec<Waker>>>,
inner: Arc<RwLock<ArcAsyncDerivedInner>>,
}
impl<T> Future for AsyncDerivedFuture<T>
@@ -110,15 +107,6 @@ where
let waker = cx.waker();
self.source.track();
let value = self.value.read_arc();
if let Some(suspense_context) = use_context::<SuspenseContext>() {
self.inner
.write()
.or_poisoned()
.suspenses
.push(suspense_context);
}
pin_mut!(value);
match (self.loading.load(Ordering::Relaxed), value.poll(cx)) {
(true, _) => {

View File

@@ -1,6 +1,5 @@
use crate::{
channel::Sender,
computed::suspense::SuspenseContext,
graph::{
AnySource, AnySubscriber, ReactiveNode, Source, SourceSet, Subscriber,
SubscriberSet,
@@ -21,7 +20,6 @@ pub(crate) struct ArcAsyncDerivedInner {
pub notifier: Sender,
pub state: AsyncDerivedState,
pub version: usize,
pub suspenses: Vec<SuspenseContext>,
}
#[derive(Debug, PartialEq, Eq)]

View File

@@ -1,6 +1,6 @@
use super::{inner::MemoInner, ArcMemo};
use crate::{
owner::{ArenaItem, FromLocal, LocalStorage, Storage, SyncStorage},
owner::{FromLocal, LocalStorage, Storage, StoredValue, SyncStorage},
signal::{
guards::{Mapped, Plain, ReadGuard},
ArcReadSignal,
@@ -102,7 +102,7 @@ where
{
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
inner: ArenaItem<ArcMemo<T, S>, S>,
inner: StoredValue<ArcMemo<T, S>, S>,
}
impl<T, S> Dispose for Memo<T, S>
@@ -123,7 +123,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(value),
inner: StoredValue::new_with_storage(value),
}
}
}
@@ -137,7 +137,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(value),
inner: StoredValue::new_with_storage(value),
}
}
}
@@ -177,7 +177,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(ArcMemo::new(fun)),
inner: StoredValue::new_with_storage(ArcMemo::new(fun)),
}
}
@@ -202,7 +202,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(ArcMemo::new_with_compare(
inner: StoredValue::new_with_storage(ArcMemo::new_with_compare(
fun, changed,
)),
}
@@ -229,7 +229,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(ArcMemo::new_owning(fun)),
inner: StoredValue::new_with_storage(ArcMemo::new_owning(fun)),
}
}
}

View File

@@ -30,7 +30,7 @@ use std::{
/// let a = RwSignal::new(0);
/// let is_selected = Selector::new(move || a.get());
/// let total_notifications = StoredValue::new(0);
/// Effect::new_isomorphic({
/// Effect::new({
/// let is_selected = is_selected.clone();
/// move |_| {
/// if is_selected.selected(5) {
@@ -55,7 +55,7 @@ use std::{
///
/// # any_spawner::Executor::tick().await;
/// assert_eq!(is_selected.selected(5), false);
/// # }).await;
/// # });
/// # });
/// ```
#[derive(Clone)]
@@ -74,17 +74,17 @@ where
impl<T> Selector<T>
where
T: PartialEq + Send + Sync + Eq + Clone + Hash + 'static,
T: PartialEq + Eq + Clone + Hash + 'static,
{
/// Creates a new selector that compares values using [`PartialEq`].
pub fn new(source: impl Fn() -> T + Send + Sync + Clone + 'static) -> Self {
pub fn new(source: impl Fn() -> T + Clone + 'static) -> Self {
Self::new_with_fn(source, PartialEq::eq)
}
/// Creates a new selector that compares values by returning `true` from a comparator function
/// if the values are the same.
pub fn new_with_fn(
source: impl Fn() -> T + Clone + Send + Sync + 'static,
source: impl Fn() -> T + Clone + 'static,
f: impl Fn(&T, &T) -> bool + Send + Sync + Clone + 'static,
) -> Self {
let subs: Arc<RwLock<FxHashMap<T, ArcRwSignal<bool>>>> =
@@ -92,7 +92,7 @@ where
let v: Arc<RwLock<Option<T>>> = Default::default();
let f = Arc::new(f) as Arc<dyn Fn(&T, &T) -> bool + Send + Sync>;
let effect = Arc::new(RenderEffect::new_isomorphic({
let effect = Arc::new(RenderEffect::new({
let subs = Arc::clone(&subs);
let f = Arc::clone(&f);
let v = Arc::clone(&v);

View File

@@ -5,7 +5,7 @@ use crate::{
AnySubscriber, ReactiveNode, SourceSet, Subscriber, ToAnySubscriber,
WithObserver,
},
owner::{ArenaItem, LocalStorage, Owner, Storage, SyncStorage},
owner::{LocalStorage, Owner, Storage, StoredValue, SyncStorage},
traits::Dispose,
};
use any_spawner::Executor;
@@ -40,10 +40,9 @@ use std::{
/// # use reactive_graph::signal::*;
/// # use reactive_graph::prelude::*;
/// # use reactive_graph::effect::Effect;
/// # use reactive_graph::owner::ArenaItem;
/// # use reactive_graph::owner::StoredValue;
/// # tokio_test::block_on(async move {
/// # tokio::task::LocalSet::new().run_until(async move {
/// # any_spawner::Executor::init_tokio();
/// let a = RwSignal::new(0);
/// let b = RwSignal::new(0);
///
@@ -53,9 +52,7 @@ use std::{
/// println!("Value: {}", a.get());
/// });
///
/// # assert_eq!(a.get(), 0);
/// a.set(1);
/// # assert_eq!(a.get(), 1);
/// // ✅ because it's subscribed to `a`, the effect reruns and prints "Value: 1"
///
/// // ❌ don't use effects to synchronize state within the reactive system
@@ -64,7 +61,7 @@ use std::{
/// // and easily lead to problems like infinite loops
/// b.set(a.get() + 1);
/// });
/// # }).await;
/// # });
/// # });
/// ```
/// ## Web-Specific Notes
@@ -78,7 +75,7 @@ use std::{
/// If you need an effect to run on the server, use [`Effect::new_isomorphic`].
#[derive(Debug, Clone, Copy)]
pub struct Effect<S> {
inner: Option<ArenaItem<StoredEffect, S>>,
inner: Option<StoredValue<StoredEffect, S>>,
}
type StoredEffect = Option<Arc<RwLock<EffectInner>>>;
@@ -165,7 +162,7 @@ impl Effect<LocalStorage> {
}
});
ArenaItem::new_with_storage(Some(inner))
StoredValue::new_with_storage(Some(inner))
});
Self { inner }
@@ -185,7 +182,6 @@ impl Effect<LocalStorage> {
/// # use reactive_graph::signal::signal;
/// # tokio_test::block_on(async move {
/// # tokio::task::LocalSet::new().run_until(async move {
/// # any_spawner::Executor::init_tokio();
/// #
/// let (num, set_num) = signal(0);
///
@@ -196,16 +192,13 @@ impl Effect<LocalStorage> {
/// },
/// false,
/// );
/// # assert_eq!(num.get(), 0);
///
/// set_num.set(1); // > "Number: 1; Prev: Some(0)"
/// # assert_eq!(num.get(), 1);
///
/// effect.stop(); // stop watching
///
/// set_num.set(2); // (nothing happens)
/// # assert_eq!(num.get(), 2);
/// # }).await;
/// # });
/// # });
/// ```
///
@@ -217,7 +210,6 @@ impl Effect<LocalStorage> {
/// # use reactive_graph::signal::signal;
/// # tokio_test::block_on(async move {
/// # tokio::task::LocalSet::new().run_until(async move {
/// # any_spawner::Executor::init_tokio();
/// #
/// let (num, set_num) = signal(0);
/// let (cb_num, set_cb_num) = signal(0);
@@ -230,17 +222,12 @@ impl Effect<LocalStorage> {
/// false,
/// );
///
/// # assert_eq!(num.get(), 0);
/// set_num.set(1); // > "Number: 1; Cb: 0"
/// # assert_eq!(num.get(), 1);
///
/// # assert_eq!(cb_num.get(), 0);
/// set_cb_num.set(1); // (nothing happens)
/// # assert_eq!(cb_num.get(), 1);
///
/// set_num.set(2); // > "Number: 2; Cb: 1"
/// # assert_eq!(num.get(), 2);
/// # }).await;
/// # });
/// # });
/// ```
///
@@ -256,7 +243,6 @@ impl Effect<LocalStorage> {
/// # use reactive_graph::signal::signal;
/// # tokio_test::block_on(async move {
/// # tokio::task::LocalSet::new().run_until(async move {
/// # any_spawner::Executor::init_tokio();
/// #
/// let (num, set_num) = signal(0);
///
@@ -268,10 +254,8 @@ impl Effect<LocalStorage> {
/// true,
/// ); // > "Number: 0; Prev: None"
///
/// # assert_eq!(num.get(), 0);
/// set_num.set(1); // > "Number: 1; Prev: Some(0)"
/// # assert_eq!(num.get(), 1);
/// # }).await;
/// # });
/// # });
/// ```
pub fn watch<D, T>(
@@ -334,7 +318,7 @@ impl Effect<LocalStorage> {
}
});
ArenaItem::new_with_storage(Some(inner))
StoredValue::new_with_storage(Some(inner))
});
Self { inner }
@@ -358,7 +342,7 @@ impl Effect<SyncStorage> {
let mut first_run = true;
let value = Arc::new(RwLock::new(None::<T>));
crate::spawn({
Executor::spawn({
let value = Arc::clone(&value);
let subscriber = inner.to_any_subscriber();
@@ -383,7 +367,7 @@ impl Effect<SyncStorage> {
}
});
ArenaItem::new_with_storage(Some(inner))
StoredValue::new_with_storage(Some(inner))
});
Self { inner }
@@ -403,7 +387,7 @@ impl Effect<SyncStorage> {
let mut first_run = true;
let value = Arc::new(RwLock::new(None::<T>));
let task = {
Executor::spawn({
let value = Arc::clone(&value);
let subscriber = inner.to_any_subscriber();
@@ -425,12 +409,10 @@ impl Effect<SyncStorage> {
}
}
}
};
crate::spawn(task);
});
Self {
inner: Some(ArenaItem::new_with_storage(Some(inner))),
inner: Some(StoredValue::new_with_storage(Some(inner))),
}
}
@@ -453,7 +435,7 @@ impl Effect<SyncStorage> {
let watch_value = Arc::new(RwLock::new(None::<T>));
let inner = cfg!(feature = "effects").then(|| {
crate::spawn({
Executor::spawn({
let dep_value = Arc::clone(&dep_value);
let watch_value = Arc::clone(&watch_value);
let subscriber = inner.to_any_subscriber();
@@ -498,7 +480,7 @@ impl Effect<SyncStorage> {
}
});
ArenaItem::new_with_storage(Some(inner))
StoredValue::new_with_storage(Some(inner))
});
Self { inner }

View File

@@ -135,50 +135,44 @@ where
{
/// Creates a render effect that will run whether the `effects` feature is enabled or not.
pub fn new_isomorphic(
fun: impl FnMut(Option<T>) -> T + Send + Sync + 'static,
mut fun: impl FnMut(Option<T>) -> T + Send + 'static,
) -> Self {
fn erased<T: Send + Sync + 'static>(
mut fun: Box<dyn FnMut(Option<T>) -> T + Send + Sync + 'static>,
) -> RenderEffect<T> {
let (observer, mut rx) = channel();
let value = Arc::new(RwLock::new(None::<T>));
let owner = Owner::new();
let inner = Arc::new(RwLock::new(EffectInner {
dirty: false,
observer,
sources: SourceSet::new(),
}));
let (mut observer, mut rx) = channel();
observer.notify();
let initial_value = owner
.with(|| inner.to_any_subscriber().with_observer(|| fun(None)));
*value.write().or_poisoned() = Some(initial_value);
let value = Arc::new(RwLock::new(None::<T>));
let owner = Owner::new();
let inner = Arc::new(RwLock::new(EffectInner {
dirty: false,
observer,
sources: SourceSet::new(),
}));
let mut first_run = true;
crate::spawn({
let value = Arc::clone(&value);
let subscriber = inner.to_any_subscriber();
Executor::spawn({
let value = Arc::clone(&value);
let subscriber = inner.to_any_subscriber();
async move {
while rx.next().await.is_some() {
if subscriber
async move {
while rx.next().await.is_some() {
if first_run
|| subscriber
.with_observer(|| subscriber.update_if_necessary())
{
subscriber.clear_sources(&subscriber);
{
first_run = false;
subscriber.clear_sources(&subscriber);
let old_value =
mem::take(&mut *value.write().or_poisoned());
let new_value = owner.with_cleanup(|| {
subscriber.with_observer(|| fun(old_value))
});
*value.write().or_poisoned() = Some(new_value);
}
let old_value =
mem::take(&mut *value.write().or_poisoned());
let new_value = owner.with_cleanup(|| {
subscriber.with_observer(|| fun(old_value))
});
*value.write().or_poisoned() = Some(new_value);
}
}
});
RenderEffect { value, inner }
}
erased(Box::new(fun))
}
});
RenderEffect { value, inner }
}
}

View File

@@ -1,10 +1,10 @@
use super::{node::ReactiveNode, AnySubscriber};
use crate::traits::{DefinedAt, IsDisposed};
use crate::traits::DefinedAt;
use core::{fmt::Debug, hash::Hash};
use std::{panic::Location, sync::Weak};
/// Abstracts over the type of any reactive source.
pub trait ToAnySource: IsDisposed {
pub trait ToAnySource {
/// Converts this type to its type-erased equivalent.
fn to_any_source(&self) -> AnySource;
}
@@ -62,13 +62,6 @@ impl PartialEq for AnySource {
impl Eq for AnySource {}
impl IsDisposed for AnySource {
#[inline(always)]
fn is_disposed(&self) -> bool {
false
}
}
impl ToAnySource for AnySource {
fn to_any_source(&self) -> AnySource {
self.clone()

View File

@@ -71,7 +71,7 @@
#![cfg_attr(feature = "nightly", feature(fn_traits))]
#![deny(missing_docs)]
use std::{fmt::Arguments, future::Future};
use std::fmt::Arguments;
pub mod actions;
pub(crate) mod channel;
@@ -99,8 +99,7 @@ pub mod prelude {
// TODO remove this, it's just useful while developing
#[allow(unused)]
#[doc(hidden)]
pub fn log_warning(text: Arguments) {
fn log_warning(text: Arguments) {
#[cfg(feature = "tracing")]
{
tracing::warn!(text);
@@ -121,12 +120,3 @@ pub fn log_warning(text: Arguments) {
eprintln!("{}", text);
}
}
/// Calls [`Executor::spawn`], but ensures that the task also runs in the current arena, if
/// multithreaded arena sandboxing is enabled.
pub(crate) fn spawn(task: impl Future<Output = ()> + Send + 'static) {
#[cfg(feature = "sandboxed-arenas")]
let task = owner::Sandboxed::new(task);
any_spawner::Executor::spawn(task);
}

View File

@@ -13,25 +13,24 @@ use std::{
};
mod arena;
mod arena_item;
mod context;
mod storage;
mod stored_value;
use self::arena::Arena;
#[cfg(feature = "sandboxed-arenas")]
pub use arena::sandboxed::Sandboxed;
use arena::NodeId;
pub use arena_item::*;
pub use context::*;
pub use storage::*;
#[allow(deprecated)] // allow exporting deprecated fn
pub use stored_value::{store_value, FromLocal, StoredValue};
pub use stored_value::{
store_value, FromLocal, LocalStorage, Storage, StorageAccess, StoredValue,
SyncStorage,
};
/// A reactive owner, which manages
/// 1) the cancelation of [`Effect`](crate::effect::Effect)s,
/// 2) providing and accessing environment data via [`provide_context`] and [`use_context`],
/// 3) running cleanup functions defined via [`Owner::on_cleanup`], and
/// 4) an arena storage system to provide `Copy` handles via [`ArenaItem`], which is what allows
/// 4) an arena storage system to provide `Copy` handles via [`StoredValue`], which is what allows
/// types like [`RwSignal`](crate::signal::RwSignal), [`Memo`](crate::computed::Memo), and so on to be `Copy`.
///
/// Every effect and computed reactive value has an associated `Owner`. While it is running, this
@@ -210,7 +209,7 @@ impl Owner {
/// Cleans up this owner in the following order:
/// 1) Runs `cleanup` on all children,
/// 2) Runs all cleanup functions registered with [`Owner::on_cleanup`],
/// 3) Drops the values of any arena-allocated [`ArenaItem`]s.
/// 3) Drops the values of any arena-allocated [`StoredValue`]s.
pub fn cleanup(&self) {
self.inner.cleanup();
}

View File

@@ -124,7 +124,7 @@ pub mod sandboxed {
}
impl<T> Sandboxed<T> {
/// Wraps the given [`Future`], ensuring that any [`ArenaItem`] created while it is being
/// Wraps the given [`Future`], ensuring that any [`StoredValue`] created while it is being
/// polled will be associated with the same arena that was active when this was called.
pub fn new(inner: T) -> Self {
let arena = MAP.with_borrow(|current| {

View File

@@ -1,136 +0,0 @@
use super::{
arena::{Arena, NodeId},
LocalStorage, Storage, SyncStorage, OWNER,
};
use crate::traits::{Dispose, IsDisposed};
use send_wrapper::SendWrapper;
use std::{any::Any, hash::Hash, marker::PhantomData};
/// A copyable, stable reference for any value, stored on the arena whose ownership is managed by the
/// reactive ownership tree.
#[derive(Debug)]
pub struct ArenaItem<T, S = SyncStorage> {
node: NodeId,
#[allow(clippy::type_complexity)]
ty: PhantomData<fn() -> (SendWrapper<T>, S)>,
}
impl<T, S> Copy for ArenaItem<T, S> {}
impl<T, S> Clone for ArenaItem<T, S> {
fn clone(&self) -> Self {
*self
}
}
impl<T, S> PartialEq for ArenaItem<T, S> {
fn eq(&self, other: &Self) -> bool {
self.node == other.node
}
}
impl<T, S> Eq for ArenaItem<T, S> {}
impl<T, S> Hash for ArenaItem<T, S> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.node.hash(state);
}
}
impl<T, S> ArenaItem<T, S>
where
T: 'static,
S: Storage<T>,
{
/// Stores the given value in the arena allocator.
#[track_caller]
pub fn new_with_storage(value: T) -> Self {
let node = {
Arena::with_mut(|arena| {
arena.insert(
Box::new(S::wrap(value)) as Box<dyn Any + Send + Sync>
)
})
};
OWNER.with(|o| {
if let Some(owner) = &*o.borrow() {
owner.register(node);
}
});
Self {
node,
ty: PhantomData,
}
}
}
impl<T, S> Default for ArenaItem<T, S>
where
T: Default + 'static,
S: Storage<T>,
{
#[track_caller] // Default trait is not annotated with #[track_caller]
fn default() -> Self {
Self::new_with_storage(Default::default())
}
}
impl<T> ArenaItem<T>
where
T: Send + Sync + 'static,
{
/// Stores the given value in the arena allocator.
#[track_caller]
pub fn new(value: T) -> Self {
ArenaItem::new_with_storage(value)
}
}
impl<T> ArenaItem<T, LocalStorage>
where
T: 'static,
{
/// Stores the given value in the arena allocator.
#[track_caller]
pub fn new_local(value: T) -> Self {
ArenaItem::new_with_storage(value)
}
}
impl<T, S: Storage<T>> ArenaItem<T, S> {
/// Applies a function to a reference to the stored value and returns the result, or `None` if it has already been disposed.
#[track_caller]
pub fn try_with_value<U>(&self, fun: impl FnOnce(&T) -> U) -> Option<U> {
S::try_with(self.node, fun)
}
/// Applies a function to a mutable reference to the stored value and returns the result, or `None` if it has already been disposed.
#[track_caller]
pub fn try_update_value<U>(
&self,
fun: impl FnOnce(&mut T) -> U,
) -> Option<U> {
S::try_with_mut(self.node, fun)
}
}
impl<T: Clone, S: Storage<T>> ArenaItem<T, S> {
/// Returns a clone of the stored value, or `None` if it has already been disposed.
#[track_caller]
pub fn try_get_value(&self) -> Option<T> {
S::try_with(self.node, Clone::clone)
}
}
impl<T, S> IsDisposed for ArenaItem<T, S> {
fn is_disposed(&self) -> bool {
Arena::with(|arena| !arena.contains_key(self.node))
}
}
impl<T, S> Dispose for ArenaItem<T, S> {
fn dispose(self) {
Arena::with_mut(|arena| arena.remove(self.node));
}
}

View File

@@ -1,151 +0,0 @@
use super::arena::{Arena, NodeId};
use send_wrapper::SendWrapper;
/// A trait for borrowing and taking data.
pub trait StorageAccess<T> {
/// Borrows the value.
fn as_borrowed(&self) -> &T;
/// Takes the value.
fn into_taken(self) -> T;
}
impl<T> StorageAccess<T> for T {
fn as_borrowed(&self) -> &T {
self
}
fn into_taken(self) -> T {
self
}
}
impl<T> StorageAccess<T> for SendWrapper<T> {
fn as_borrowed(&self) -> &T {
self
}
fn into_taken(self) -> T {
self.take()
}
}
/// A way of storing a [`ArenaItem`], either as itself or with a wrapper to make it threadsafe.
///
/// This exists because all items stored in the arena must be `Send + Sync`, but in single-threaded
/// environments you might want or need to use thread-unsafe types.
pub trait Storage<T>: Send + Sync + 'static {
/// The type being stored, once it has been wrapped.
type Wrapped: StorageAccess<T> + Send + Sync + 'static;
/// Adds any needed wrapper to the type.
fn wrap(value: T) -> Self::Wrapped;
/// Applies the given function to the stored value, if it exists and can be accessed from this
/// thread.
fn try_with<U>(node: NodeId, fun: impl FnOnce(&T) -> U) -> Option<U>;
/// Applies the given function to a mutable reference to the stored value, if it exists and can be accessed from this
/// thread.
fn try_with_mut<U>(
node: NodeId,
fun: impl FnOnce(&mut T) -> U,
) -> Option<U>;
/// Sets a new value for the stored value. If it has been disposed, returns `Some(T)`.
fn try_set(node: NodeId, value: T) -> Option<T>;
}
/// A form of [`Storage`] that stores the type as itself, with no wrapper.
#[derive(Debug, Copy, Clone)]
pub struct SyncStorage;
impl<T> Storage<T> for SyncStorage
where
T: Send + Sync + 'static,
{
type Wrapped = T;
#[inline(always)]
fn wrap(value: T) -> Self::Wrapped {
value
}
fn try_with<U>(node: NodeId, fun: impl FnOnce(&T) -> U) -> Option<U> {
Arena::with(|arena| {
let m = arena.get(node);
m.and_then(|n| n.downcast_ref::<T>()).map(fun)
})
}
fn try_with_mut<U>(
node: NodeId,
fun: impl FnOnce(&mut T) -> U,
) -> Option<U> {
Arena::with_mut(|arena| {
let m = arena.get_mut(node);
m.and_then(|n| n.downcast_mut::<T>()).map(fun)
})
}
fn try_set(node: NodeId, value: T) -> Option<T> {
Arena::with_mut(|arena| {
let m = arena.get_mut(node);
match m.and_then(|n| n.downcast_mut::<T>()) {
Some(inner) => {
*inner = value;
None
}
None => Some(value),
}
})
}
}
/// A form of [`Storage`] that stores the type with a wrapper that makes it `Send + Sync`, but only
/// allows it to be accessed from the thread on which it was created.
#[derive(Debug, Copy, Clone)]
pub struct LocalStorage;
impl<T> Storage<T> for LocalStorage
where
T: 'static,
{
type Wrapped = SendWrapper<T>;
fn wrap(value: T) -> Self::Wrapped {
SendWrapper::new(value)
}
fn try_with<U>(node: NodeId, fun: impl FnOnce(&T) -> U) -> Option<U> {
Arena::with(|arena| {
let m = arena.get(node);
m.and_then(|n| n.downcast_ref::<SendWrapper<T>>())
.map(|inner| fun(inner))
})
}
fn try_with_mut<U>(
node: NodeId,
fun: impl FnOnce(&mut T) -> U,
) -> Option<U> {
Arena::with_mut(|arena| {
let m = arena.get_mut(node);
m.and_then(|n| n.downcast_mut::<SendWrapper<T>>())
.map(|inner| fun(&mut *inner))
})
}
fn try_set(node: NodeId, value: T) -> Option<T> {
Arena::with_mut(|arena| {
let m = arena.get_mut(node);
match m.and_then(|n| n.downcast_mut::<SendWrapper<T>>()) {
Some(inner) => {
*inner = SendWrapper::new(value);
None
}
None => Some(value),
}
})
}
}

View File

@@ -1,14 +1,162 @@
use super::{ArenaItem, LocalStorage, Storage, SyncStorage};
use super::{
arena::{Arena, NodeId},
OWNER,
};
use crate::{
traits::{DefinedAt, Dispose, IsDisposed},
unwrap_signal,
};
use or_poisoned::OrPoisoned;
use std::{
hash::Hash,
panic::Location,
sync::{Arc, RwLock},
};
use send_wrapper::SendWrapper;
use std::{any::Any, hash::Hash, marker::PhantomData, panic::Location};
/// A trait for borrowing and taking data.
pub trait StorageAccess<T> {
/// Borrows the value.
fn as_borrowed(&self) -> &T;
/// Takes the value.
fn into_taken(self) -> T;
}
impl<T> StorageAccess<T> for T {
fn as_borrowed(&self) -> &T {
self
}
fn into_taken(self) -> T {
self
}
}
impl<T> StorageAccess<T> for SendWrapper<T> {
fn as_borrowed(&self) -> &T {
self
}
fn into_taken(self) -> T {
self.take()
}
}
/// A way of storing a [`StoredValue`], either as itself or with a wrapper to make it threadsafe.
///
/// This exists because all items stored in the arena must be `Send + Sync`, but in single-threaded
/// environments you might want or need to use thread-unsafe types.
pub trait Storage<T>: Send + Sync + 'static {
/// The type being stored, once it has been wrapped.
type Wrapped: StorageAccess<T> + Send + Sync + 'static;
/// Adds any needed wrapper to the type.
fn wrap(value: T) -> Self::Wrapped;
/// Applies the given function to the stored value, if it exists and can be accessed from this
/// thread.
fn try_with<U>(node: NodeId, fun: impl FnOnce(&T) -> U) -> Option<U>;
/// Applies the given function to a mutable reference to the stored value, if it exists and can be accessed from this
/// thread.
fn try_with_mut<U>(
node: NodeId,
fun: impl FnOnce(&mut T) -> U,
) -> Option<U>;
/// Sets a new value for the stored value. If it has been disposed, returns `Some(T)`.
fn try_set(node: NodeId, value: T) -> Option<T>;
}
/// A form of [`Storage`] that stores the type as itself, with no wrapper.
#[derive(Debug, Copy, Clone)]
pub struct SyncStorage;
impl<T> Storage<T> for SyncStorage
where
T: Send + Sync + 'static,
{
type Wrapped = T;
#[inline(always)]
fn wrap(value: T) -> Self::Wrapped {
value
}
fn try_with<U>(node: NodeId, fun: impl FnOnce(&T) -> U) -> Option<U> {
Arena::with(|arena| {
let m = arena.get(node);
m.and_then(|n| n.downcast_ref::<T>()).map(fun)
})
}
fn try_with_mut<U>(
node: NodeId,
fun: impl FnOnce(&mut T) -> U,
) -> Option<U> {
Arena::with_mut(|arena| {
let m = arena.get_mut(node);
m.and_then(|n| n.downcast_mut::<T>()).map(fun)
})
}
fn try_set(node: NodeId, value: T) -> Option<T> {
Arena::with_mut(|arena| {
let m = arena.get_mut(node);
match m.and_then(|n| n.downcast_mut::<T>()) {
Some(inner) => {
*inner = value;
None
}
None => Some(value),
}
})
}
}
/// A form of [`Storage`] that stores the type with a wrapper that makes it `Send + Sync`, but only
/// allows it to be accessed from the thread on which it was created.
#[derive(Debug, Copy, Clone)]
pub struct LocalStorage;
impl<T> Storage<T> for LocalStorage
where
T: 'static,
{
type Wrapped = SendWrapper<T>;
fn wrap(value: T) -> Self::Wrapped {
SendWrapper::new(value)
}
fn try_with<U>(node: NodeId, fun: impl FnOnce(&T) -> U) -> Option<U> {
Arena::with(|arena| {
let m = arena.get(node);
m.and_then(|n| n.downcast_ref::<SendWrapper<T>>())
.map(|inner| fun(inner))
})
}
fn try_with_mut<U>(
node: NodeId,
fun: impl FnOnce(&mut T) -> U,
) -> Option<U> {
Arena::with_mut(|arena| {
let m = arena.get_mut(node);
m.and_then(|n| n.downcast_mut::<SendWrapper<T>>())
.map(|inner| fun(&mut *inner))
})
}
fn try_set(node: NodeId, value: T) -> Option<T> {
Arena::with_mut(|arena| {
let m = arena.get_mut(node);
match m.and_then(|n| n.downcast_mut::<SendWrapper<T>>()) {
Some(inner) => {
*inner = SendWrapper::new(value);
None
}
None => Some(value),
}
})
}
}
/// A **non-reactive**, `Copy` handle for any value.
///
@@ -19,7 +167,8 @@ use std::{
/// updating it does not notify anything else.
#[derive(Debug)]
pub struct StoredValue<T, S = SyncStorage> {
value: ArenaItem<Arc<RwLock<T>>, S>,
node: NodeId,
ty: PhantomData<(SendWrapper<T>, S)>,
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
}
@@ -34,7 +183,7 @@ impl<T, S> Clone for StoredValue<T, S> {
impl<T, S> PartialEq for StoredValue<T, S> {
fn eq(&self, other: &Self) -> bool {
self.value == other.value
self.node == other.node
}
}
@@ -42,7 +191,7 @@ impl<T, S> Eq for StoredValue<T, S> {}
impl<T, S> Hash for StoredValue<T, S> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.value.hash(state);
self.node.hash(state);
}
}
@@ -62,13 +211,27 @@ impl<T, S> DefinedAt for StoredValue<T, S> {
impl<T, S> StoredValue<T, S>
where
T: 'static,
S: Storage<Arc<RwLock<T>>>,
S: Storage<T>,
{
/// Stores the given value in the arena allocator.
#[track_caller]
pub fn new_with_storage(value: T) -> Self {
let node = {
Arena::with_mut(|arena| {
arena.insert(
Box::new(S::wrap(value)) as Box<dyn Any + Send + Sync>
)
})
};
OWNER.with(|o| {
if let Some(owner) = &*o.borrow() {
owner.register(node);
}
});
Self {
value: ArenaItem::new_with_storage(Arc::new(RwLock::new(value))),
node,
ty: PhantomData,
#[cfg(debug_assertions)]
defined_at: Location::caller(),
}
@@ -78,7 +241,7 @@ where
impl<T, S> Default for StoredValue<T, S>
where
T: Default + 'static,
S: Storage<Arc<RwLock<T>>>,
S: Storage<T>,
{
#[track_caller] // Default trait is not annotated with #[track_caller]
fn default() -> Self {
@@ -108,7 +271,7 @@ where
}
}
impl<T, S: Storage<Arc<RwLock<T>>>> StoredValue<T, S> {
impl<T, S: Storage<T>> StoredValue<T, S> {
/// Returns an [`Option`] of applying a function to the value within the [`StoredValue`].
///
/// If the owner of the reactive node has not been disposed [`Some`] is returned. Calling this
@@ -152,9 +315,7 @@ impl<T, S: Storage<Arc<RwLock<T>>>> StoredValue<T, S> {
/// ```
#[track_caller]
pub fn try_with_value<U>(&self, fun: impl FnOnce(&T) -> U) -> Option<U> {
self.value
.try_get_value()
.map(|inner| fun(&*inner.read().or_poisoned()))
S::try_with(self.node, fun)
}
/// Returns the output of applying a function to the value within the [`StoredValue`].
@@ -199,9 +360,7 @@ impl<T, S: Storage<Arc<RwLock<T>>>> StoredValue<T, S> {
&self,
fun: impl FnOnce(&mut T) -> U,
) -> Option<U> {
self.value
.try_get_value()
.map(|inner| fun(&mut *inner.write().or_poisoned()))
S::try_with_mut(self.node, fun)
}
/// Updates the value within [`StoredValue`] by applying a function to it.
@@ -294,13 +453,7 @@ impl<T, S: Storage<Arc<RwLock<T>>>> StoredValue<T, S> {
/// assert_eq!(reset().as_deref(), Some(""));
/// ```
pub fn try_set_value(&self, value: T) -> Option<T> {
match self.value.try_get_value() {
Some(inner) => {
*inner.write().or_poisoned() = value;
None
}
None => Some(value),
}
S::try_set(self.node, value)
}
/// Sets the value within [`StoredValue`].
@@ -337,11 +490,11 @@ impl<T, S: Storage<Arc<RwLock<T>>>> StoredValue<T, S> {
impl<T, S> IsDisposed for StoredValue<T, S> {
fn is_disposed(&self) -> bool {
self.value.is_disposed()
Arena::with(|arena| !arena.contains_key(self.node))
}
}
impl<T, S: Storage<Arc<RwLock<T>>>> StoredValue<T, S>
impl<T, S: Storage<T>> StoredValue<T, S>
where
T: Clone + 'static,
{
@@ -421,7 +574,7 @@ where
impl<T, S> Dispose for StoredValue<T, S> {
fn dispose(self) {
self.value.dispose();
Arena::with_mut(|arena| arena.remove(self.node));
}
}
@@ -429,7 +582,7 @@ impl<T, S> Dispose for StoredValue<T, S> {
#[inline(always)]
#[track_caller]
#[deprecated(
since = "0.7.0-beta5",
since = "0.7.0-beta4",
note = "This function is being removed to conform to Rust idioms. Please \
use `StoredValue::new()` or `StoredValue::new_local()` instead."
)]

View File

@@ -56,7 +56,7 @@ use std::{
/// > Each of these has a related `_untracked()` method, which updates the signal
/// > without notifying subscribers. Untracked updates are not desirable in most
/// > cases, as they cause “tearing” between the signals value and its observed
/// > value. If you want a non-reactive container, used [`ArenaItem`](crate::owner::ArenaItem)
/// > value. If you want a non-reactive container, used [`StoredValue`](crate::owner::StoredValue)
/// > instead.
///
/// ## Examples

View File

@@ -29,7 +29,7 @@ use std::{
/// > Each of these has a related `_untracked()` method, which updates the signal
/// > without notifying subscribers. Untracked updates are not desirable in most
/// > cases, as they cause “tearing” between the signals value and its observed
/// > value. If you want a non-reactive container, used [`ArenaItem`](crate::owner::ArenaItem)
/// > value. If you want a non-reactive container, used [`StoredValue`](crate::owner::StoredValue)
/// > instead.
///
/// ## Examples

View File

@@ -5,7 +5,7 @@ use super::{
};
use crate::{
graph::SubscriberSet,
owner::{ArenaItem, FromLocal, LocalStorage, Storage, SyncStorage},
owner::{FromLocal, LocalStorage, Storage, StoredValue, SyncStorage},
traits::{DefinedAt, Dispose, IsDisposed, ReadUntracked},
unwrap_signal,
};
@@ -60,7 +60,7 @@ use std::{
pub struct ReadSignal<T, S = SyncStorage> {
#[cfg(debug_assertions)]
pub(crate) defined_at: &'static Location<'static>,
pub(crate) inner: ArenaItem<ArcReadSignal<T>, S>,
pub(crate) inner: StoredValue<ArcReadSignal<T>, S>,
}
impl<T, S> Dispose for ReadSignal<T, S> {
@@ -158,7 +158,7 @@ where
ReadSignal {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(value),
inner: StoredValue::new_with_storage(value),
}
}
}
@@ -172,7 +172,7 @@ where
ReadSignal {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(value),
inner: StoredValue::new_with_storage(value),
}
}
}

View File

@@ -5,7 +5,7 @@ use super::{
};
use crate::{
graph::{ReactiveNode, SubscriberSet},
owner::{ArenaItem, FromLocal, LocalStorage, Storage, SyncStorage},
owner::{FromLocal, LocalStorage, Storage, StoredValue, SyncStorage},
signal::guards::{UntrackedWriteGuard, WriteGuard},
traits::{
DefinedAt, Dispose, IsDisposed, Notify, ReadUntracked,
@@ -63,7 +63,7 @@ use std::{
/// > Each of these has a related `_untracked()` method, which updates the signal
/// > without notifying subscribers. Untracked updates are not desirable in most
/// > cases, as they cause “tearing” between the signals value and its observed
/// > value. If you want a non-reactive container, used [`ArenaItem`] instead.
/// > value. If you want a non-reactive container, used [`StoredValue`] instead.
///
/// ## Examples
///
@@ -102,7 +102,7 @@ use std::{
pub struct RwSignal<T, S = SyncStorage> {
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
inner: ArenaItem<ArcRwSignal<T>, S>,
inner: StoredValue<ArcRwSignal<T>, S>,
}
impl<T, S> Dispose for RwSignal<T, S> {
@@ -141,7 +141,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(ArcRwSignal::new(value)),
inner: StoredValue::new_with_storage(ArcRwSignal::new(value)),
}
}
}
@@ -174,7 +174,7 @@ where
ReadSignal {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(
inner: StoredValue::new_with_storage(
self.inner
.try_get_value()
.map(|inner| inner.read_only())
@@ -196,7 +196,7 @@ where
WriteSignal {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(
inner: StoredValue::new_with_storage(
self.inner
.try_get_value()
.map(|inner| inner.write_only())
@@ -233,7 +233,7 @@ where
Some(Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(ArcRwSignal {
inner: StoredValue::new_with_storage(ArcRwSignal {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
value: Arc::clone(&read.value),
@@ -365,9 +365,7 @@ where
#[allow(refining_impl_trait)]
fn try_write_untracked(&self) -> Option<UntrackedWriteGuard<Self::Value>> {
self.inner
.try_with_value(|n| n.try_write_untracked())
.flatten()
self.inner.with_value(|n| n.try_write_untracked())
}
}
@@ -380,7 +378,7 @@ where
RwSignal {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(value),
inner: StoredValue::new_with_storage(value),
}
}
}
@@ -404,7 +402,7 @@ where
RwSignal {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(value),
inner: StoredValue::new_with_storage(value),
}
}
}

View File

@@ -13,7 +13,7 @@ use crate::{
AnySource, AnySubscriber, ReactiveNode, Source, SubscriberSet,
ToAnySource,
},
traits::{DefinedAt, IsDisposed},
traits::DefinedAt,
unwrap_signal,
};
use or_poisoned::OrPoisoned;
@@ -93,11 +93,10 @@ impl<T: AsSubscriberSet + DefinedAt> Source for T {
}
}
impl<T: AsSubscriberSet + DefinedAt + IsDisposed> ToAnySource for T
impl<T: AsSubscriberSet + DefinedAt> ToAnySource for T
where
T::Output: Borrow<Arc<RwLock<SubscriberSet>>>,
{
#[track_caller]
fn to_any_source(&self) -> AnySource {
self.as_subscriber_set()
.map(|subs| {

View File

@@ -1,7 +1,7 @@
use super::{subscriber_traits::AsSubscriberSet, ArcTrigger};
use crate::{
graph::{ReactiveNode, SubscriberSet},
owner::ArenaItem,
owner::StoredValue,
traits::{DefinedAt, Dispose, IsDisposed, Notify},
};
use std::{
@@ -20,7 +20,7 @@ use std::{
pub struct Trigger {
#[cfg(debug_assertions)]
pub(crate) defined_at: &'static Location<'static>,
pub(crate) inner: ArenaItem<ArcTrigger>,
pub(crate) inner: StoredValue<ArcTrigger>,
}
impl Trigger {
@@ -30,7 +30,7 @@ impl Trigger {
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new(ArcTrigger::new()),
inner: StoredValue::new(ArcTrigger::new()),
}
}
}

View File

@@ -1,6 +1,6 @@
use super::{guards::WriteGuard, ArcWriteSignal};
use crate::{
owner::{ArenaItem, Storage, SyncStorage},
owner::{Storage, StoredValue, SyncStorage},
traits::{
DefinedAt, Dispose, IsDisposed, Notify, UntrackableGuard, Writeable,
},
@@ -28,7 +28,7 @@ use std::{hash::Hash, ops::DerefMut, panic::Location, sync::Arc};
/// > Each of these has a related `_untracked()` method, which updates the signal
/// > without notifying subscribers. Untracked updates are not desirable in most
/// > cases, as they cause “tearing” between the signals value and its observed
/// > value. If you want a non-reactive container, used [`ArenaItem`] instead.
/// > value. If you want a non-reactive container, used [`StoredValue`] instead.
///
/// ## Examples
/// ```
@@ -54,7 +54,7 @@ use std::{hash::Hash, ops::DerefMut, panic::Location, sync::Arc};
pub struct WriteSignal<T, S = SyncStorage> {
#[cfg(debug_assertions)]
pub(crate) defined_at: &'static Location<'static>,
pub(crate) inner: ArenaItem<ArcWriteSignal<T>, S>,
pub(crate) inner: StoredValue<ArcWriteSignal<T>, S>,
}
impl<T, S> Dispose for WriteSignal<T, S> {
@@ -145,8 +145,6 @@ where
fn try_write_untracked(
&self,
) -> Option<impl DerefMut<Target = Self::Value>> {
self.inner
.try_with_value(|n| n.try_write_untracked())
.flatten()
self.inner.with_value(|n| n.try_write_untracked())
}
}

View File

@@ -107,10 +107,6 @@ pub trait Track {
impl<T: Source + ToAnySource + DefinedAt> Track for T {
#[track_caller]
fn track(&self) {
if self.is_disposed() {
return;
}
if let Some(subscriber) = Observer::get() {
subscriber.add_source(self.to_any_source());
self.add_subscriber(subscriber);
@@ -587,7 +583,7 @@ where
fn from_stream(stream: impl Stream<Item = T> + Send + 'static) -> Self {
let (read, write) = arc_signal(None);
let mut stream = Box::pin(stream);
crate::spawn(async move {
Executor::spawn(async move {
while let Some(value) = stream.next().await {
write.set(Some(value));
}

View File

@@ -4,7 +4,7 @@
pub mod read {
use crate::{
computed::{ArcMemo, Memo},
owner::{ArenaItem, FromLocal, LocalStorage, Storage, SyncStorage},
owner::{FromLocal, LocalStorage, Storage, StoredValue, SyncStorage},
signal::{ArcReadSignal, ArcRwSignal, ReadSignal, RwSignal},
traits::{DefinedAt, Dispose, Get, With, WithUntracked},
untrack, unwrap_signal,
@@ -279,7 +279,7 @@ pub mod read {
{
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
inner: ArenaItem<SignalTypes<T, S>, S>,
inner: StoredValue<SignalTypes<T, S>, S>,
}
impl<T, S> Dispose for Signal<T, S>
@@ -425,9 +425,9 @@ pub mod read {
};
Self {
inner: ArenaItem::new_with_storage(SignalTypes::DerivedSignal(
Arc::new(derived_signal),
)),
inner: StoredValue::new_with_storage(
SignalTypes::DerivedSignal(Arc::new(derived_signal)),
),
#[cfg(debug_assertions)]
defined_at: std::panic::Location::caller(),
}
@@ -452,7 +452,7 @@ pub mod read {
};
Self {
inner: ArenaItem::new_local(SignalTypes::DerivedSignal(
inner: StoredValue::new_local(SignalTypes::DerivedSignal(
Arc::new(derived_signal),
)),
#[cfg(debug_assertions)]
@@ -515,7 +515,7 @@ pub mod read {
Signal {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new(value.inner),
inner: StoredValue::new(value.inner),
}
}
}
@@ -529,7 +529,7 @@ pub mod read {
Signal {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_local(value.inner),
inner: StoredValue::new_local(value.inner),
}
}
}
@@ -558,7 +558,7 @@ pub mod read {
#[track_caller]
fn from(value: ReadSignal<T>) -> Self {
Self {
inner: ArenaItem::new(SignalTypes::ReadSignal(value.into())),
inner: StoredValue::new(SignalTypes::ReadSignal(value.into())),
#[cfg(debug_assertions)]
defined_at: std::panic::Location::caller(),
}
@@ -572,7 +572,7 @@ pub mod read {
#[track_caller]
fn from(value: ReadSignal<T, LocalStorage>) -> Self {
Self {
inner: ArenaItem::new_local(SignalTypes::ReadSignal(
inner: StoredValue::new_local(SignalTypes::ReadSignal(
value.into(),
)),
#[cfg(debug_assertions)]
@@ -588,7 +588,7 @@ pub mod read {
#[track_caller]
fn from(value: RwSignal<T>) -> Self {
Self {
inner: ArenaItem::new(SignalTypes::ReadSignal(
inner: StoredValue::new(SignalTypes::ReadSignal(
value.read_only().into(),
)),
#[cfg(debug_assertions)]
@@ -604,7 +604,7 @@ pub mod read {
#[track_caller]
fn from(value: RwSignal<T, LocalStorage>) -> Self {
Self {
inner: ArenaItem::new_local(SignalTypes::ReadSignal(
inner: StoredValue::new_local(SignalTypes::ReadSignal(
value.read_only().into(),
)),
#[cfg(debug_assertions)]
@@ -620,7 +620,7 @@ pub mod read {
#[track_caller]
fn from(value: Memo<T>) -> Self {
Self {
inner: ArenaItem::new(SignalTypes::Memo(value.into())),
inner: StoredValue::new(SignalTypes::Memo(value.into())),
#[cfg(debug_assertions)]
defined_at: std::panic::Location::caller(),
}
@@ -634,7 +634,7 @@ pub mod read {
#[track_caller]
fn from(value: Memo<T, LocalStorage>) -> Self {
Self {
inner: ArenaItem::new_local(SignalTypes::Memo(value.into())),
inner: StoredValue::new_local(SignalTypes::Memo(value.into())),
#[cfg(debug_assertions)]
defined_at: std::panic::Location::caller(),
}
@@ -1246,7 +1246,7 @@ pub mod read {
/// Types that abstract over the ability to update a signal.
pub mod write {
use crate::{
owner::{ArenaItem, Storage, SyncStorage},
owner::{Storage, StoredValue, SyncStorage},
signal::{ArcRwSignal, ArcWriteSignal, RwSignal, WriteSignal},
traits::Set,
};
@@ -1341,7 +1341,7 @@ pub mod write {
SignalSetterTypes::Default => {}
SignalSetterTypes::Write(w) => w.set(new_value),
SignalSetterTypes::Mapped(s) => {
s.try_with_value(|setter| setter(new_value));
s.with_value(|setter| setter(new_value))
}
}
}
@@ -1371,9 +1371,9 @@ pub mod write {
#[track_caller]
pub fn map(mapped_setter: impl Fn(T) + Send + Sync + 'static) -> Self {
Self {
inner: SignalSetterTypes::Mapped(ArenaItem::new_with_storage(
Box::new(mapped_setter),
)),
inner: SignalSetterTypes::Mapped(
StoredValue::new_with_storage(Box::new(mapped_setter)),
),
#[cfg(debug_assertions)]
defined_at: std::panic::Location::caller(),
}
@@ -1411,7 +1411,7 @@ pub mod write {
T: 'static,
{
Write(WriteSignal<T, S>),
Mapped(ArenaItem<Box<dyn Fn(T) + Send + Sync>, S>),
Mapped(StoredValue<Box<dyn Fn(T) + Send + Sync>, S>),
Default,
}

View File

@@ -1,6 +1,6 @@
[package]
name = "reactive_stores"
version = "0.1.0-beta6"
version = "0.1.0-beta4"
rust-version.workspace = true
edition.workspace = true

View File

@@ -1 +0,0 @@
extend = { path = "../cargo-make/main.toml" }

View File

@@ -1,14 +1,14 @@
use crate::{
path::{StorePath, StorePathSegment},
AtIndex, AtKeyed, KeyMap, KeyedSubfield, StoreField, StoreFieldTrigger,
Subfield,
AtIndex, StoreField, Subfield,
};
use reactive_graph::traits::{
DefinedAt, IsDisposed, Notify, ReadUntracked, Track, UntrackableGuard,
use reactive_graph::{
signal::ArcTrigger,
traits::{
DefinedAt, IsDisposed, Notify, ReadUntracked, Track, UntrackableGuard,
},
};
use std::{
fmt::Debug,
hash::Hash,
ops::{Deref, DerefMut, IndexMut},
panic::Location,
sync::Arc,
@@ -21,11 +21,10 @@ where
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
path: StorePath,
trigger: StoreFieldTrigger,
get_trigger: Arc<dyn Fn(StorePath) -> StoreFieldTrigger + Send + Sync>,
trigger: ArcTrigger,
get_trigger: Arc<dyn Fn(StorePath) -> ArcTrigger + Send + Sync>,
read: Arc<dyn Fn() -> Option<StoreFieldReader<T>> + Send + Sync>,
write: Arc<dyn Fn() -> Option<StoreFieldWriter<T>> + Send + Sync>,
keys: Arc<dyn Fn() -> Option<KeyMap> + Send + Sync>,
}
pub struct StoreFieldReader<T>(Box<dyn Deref<Target = T>>);
@@ -77,7 +76,7 @@ impl<T> StoreField for ArcField<T> {
type Reader = StoreFieldReader<T>;
type Writer = StoreFieldWriter<T>;
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
fn get_trigger(&self, path: StorePath) -> ArcTrigger {
(self.get_trigger)(path)
}
@@ -92,10 +91,6 @@ impl<T> StoreField for ArcField<T> {
fn writer(&self) -> Option<Self::Writer> {
(self.write)().map(StoreFieldWriter::new)
}
fn keys(&self) -> Option<KeyMap> {
(self.keys)()
}
}
impl<Inner, Prev, T> From<Subfield<Inner, Prev, T>> for ArcField<T>
@@ -124,10 +119,6 @@ where
let value = value.clone();
move || value.writer().map(StoreFieldWriter::new)
}),
keys: Arc::new({
let value = value.clone();
move || value.keys()
}),
}
}
}
@@ -158,48 +149,6 @@ where
let value = value.clone();
move || value.writer().map(StoreFieldWriter::new)
}),
keys: Arc::new({
let value = value.clone();
move || value.keys()
}),
}
}
}
impl<Inner, Prev, K, T> From<AtKeyed<Inner, Prev, K, T>> for ArcField<T::Output>
where
AtKeyed<Inner, Prev, K, T>: Clone,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev> + Send + Sync + 'static,
Prev: 'static,
T: IndexMut<usize> + 'static,
T::Output: Sized,
{
#[track_caller]
fn from(value: AtKeyed<Inner, Prev, K, T>) -> Self {
ArcField {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
path: value.path().into_iter().collect(),
trigger: value.get_trigger(value.path().into_iter().collect()),
get_trigger: Arc::new({
let value = value.clone();
move |path| value.get_trigger(path)
}),
read: Arc::new({
let value = value.clone();
move || value.reader().map(StoreFieldReader::new)
}),
write: Arc::new({
let value = value.clone();
move || value.writer().map(StoreFieldWriter::new)
}),
keys: Arc::new({
let value = value.clone();
move || value.keys()
}),
}
}
}
@@ -214,7 +163,6 @@ impl<T> Clone for ArcField<T> {
get_trigger: Arc::clone(&self.get_trigger),
read: Arc::clone(&self.read),
write: Arc::clone(&self.write),
keys: Arc::clone(&self.keys),
}
}
}
@@ -234,14 +182,13 @@ impl<T> DefinedAt for ArcField<T> {
impl<T> Notify for ArcField<T> {
fn notify(&self) {
self.trigger.this.notify();
self.trigger.notify();
}
}
impl<T> Track for ArcField<T> {
fn track(&self) {
self.trigger.this.track();
self.trigger.children.track();
self.trigger.track();
}
}

View File

@@ -1,15 +1,15 @@
use crate::{
arc_field::{StoreFieldReader, StoreFieldWriter},
path::{StorePath, StorePathSegment},
ArcField, AtIndex, AtKeyed, KeyMap, KeyedSubfield, StoreField,
StoreFieldTrigger, Subfield,
ArcField, AtIndex, StoreField, Subfield,
};
use reactive_graph::{
owner::{ArenaItem, Storage, SyncStorage},
owner::{Storage, StoredValue, SyncStorage},
signal::ArcTrigger,
traits::{DefinedAt, IsDisposed, Notify, ReadUntracked, Track},
unwrap_signal,
};
use std::{fmt::Debug, hash::Hash, ops::IndexMut, panic::Location};
use std::{ops::IndexMut, panic::Location};
pub struct Field<T, S = SyncStorage>
where
@@ -17,7 +17,7 @@ where
{
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
inner: ArenaItem<ArcField<T>, S>,
inner: StoredValue<ArcField<T>, S>,
}
impl<T, S> StoreField for Field<T, S>
@@ -28,7 +28,7 @@ where
type Reader = StoreFieldReader<T>;
type Writer = StoreFieldWriter<T>;
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
fn get_trigger(&self, path: StorePath) -> ArcTrigger {
self.inner
.try_get_value()
.map(|inner| inner.get_trigger(path))
@@ -49,10 +49,6 @@ where
fn writer(&self) -> Option<Self::Writer> {
self.inner.try_get_value().and_then(|inner| inner.writer())
}
fn keys(&self) -> Option<KeyMap> {
self.inner.try_get_value().and_then(|n| n.keys())
}
}
impl<Inner, Prev, T, S> From<Subfield<Inner, Prev, T>> for Field<T, S>
@@ -68,7 +64,7 @@ where
Field {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(value.into()),
inner: StoredValue::new_with_storage(value.into()),
}
}
}
@@ -86,30 +82,7 @@ where
Field {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(value.into()),
}
}
}
impl<Inner, Prev, K, T, S> From<AtKeyed<Inner, Prev, K, T>>
for Field<T::Output, S>
where
S: Storage<ArcField<T::Output>>,
AtKeyed<Inner, Prev, K, T>: Clone,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev> + Send + Sync + 'static,
Prev: 'static,
T: IndexMut<usize> + 'static,
T::Output: Sized,
{
#[track_caller]
fn from(value: AtKeyed<Inner, Prev, K, T>) -> Self {
Field {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(value.into()),
inner: StoredValue::new_with_storage(value.into()),
}
}
}

View File

@@ -1,7 +1,6 @@
use crate::{
path::{StorePath, StorePathSegment},
store_field::StoreField,
KeyMap, StoreFieldTrigger,
};
use reactive_graph::{
signal::{
@@ -21,7 +20,10 @@ use std::{
};
#[derive(Debug)]
pub struct AtIndex<Inner, Prev> {
pub struct AtIndex<Inner, Prev>
where
Inner: StoreField<Value = Prev>,
{
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
inner: Inner,
@@ -31,7 +33,7 @@ pub struct AtIndex<Inner, Prev> {
impl<Inner, Prev> Clone for AtIndex<Inner, Prev>
where
Inner: Clone,
Inner: StoreField<Value = Prev> + Clone,
{
fn clone(&self) -> Self {
Self {
@@ -44,9 +46,15 @@ where
}
}
impl<Inner, Prev> Copy for AtIndex<Inner, Prev> where Inner: Copy {}
impl<Inner, Prev> Copy for AtIndex<Inner, Prev> where
Inner: StoreField<Value = Prev> + Copy
{
}
impl<Inner, Prev> AtIndex<Inner, Prev> {
impl<Inner, Prev> AtIndex<Inner, Prev>
where
Inner: StoreField<Value = Prev>,
{
#[track_caller]
pub fn new(inner: Inner, index: usize) -> Self {
Self {
@@ -77,7 +85,7 @@ where
.chain(iter::once(self.index.into()))
}
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
fn get_trigger(&self, path: StorePath) -> ArcTrigger {
self.inner.get_trigger(path)
}
@@ -93,7 +101,7 @@ where
fn writer(&self) -> Option<Self::Writer> {
let trigger = self.get_trigger(self.path().into_iter().collect());
let inner = WriteGuard::new(trigger.children, self.inner.writer()?);
let inner = WriteGuard::new(trigger, self.inner.writer()?);
let index = self.index;
Some(MappedMutArc::new(
inner,
@@ -101,11 +109,6 @@ where
move |n| &mut n[index],
))
}
#[inline(always)]
fn keys(&self) -> Option<KeyMap> {
self.inner.keys()
}
}
impl<Inner, Prev> DefinedAt for AtIndex<Inner, Prev>
@@ -141,7 +144,7 @@ where
{
fn notify(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.this.notify();
trigger.notify();
}
}
@@ -153,8 +156,7 @@ where
{
fn track(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.this.track();
trigger.children.track();
trigger.track();
}
}
@@ -193,32 +195,20 @@ where
}
}
pub trait StoreFieldIterator<Prev>
where
Self: StoreField<Value = Prev>,
{
fn at(self, index: usize) -> AtIndex<Self, Prev>;
pub trait StoreFieldIterator<Prev>: Sized {
fn iter(self) -> StoreFieldIter<Self, Prev>;
}
impl<Inner, Prev> StoreFieldIterator<Prev> for Inner
where
Inner: StoreField<Value = Prev> + Clone,
Inner: StoreField<Value = Prev>,
Prev::Output: Sized,
Prev: IndexMut<usize> + AsRef<[Prev::Output]>,
{
#[track_caller]
fn at(self, index: usize) -> AtIndex<Inner, Prev> {
AtIndex::new(self.clone(), index)
}
#[track_caller]
fn iter(self) -> StoreFieldIter<Inner, Prev> {
// reactively track changes to this field
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.this.track();
trigger.children.track();
trigger.track();
// get the current length of the field by accessing slice
let len = self.reader().map(|n| n.as_ref().len()).unwrap_or(0);
@@ -250,7 +240,13 @@ where
fn next(&mut self) -> Option<Self::Item> {
if self.idx < self.len {
let field = AtIndex::new(self.inner.clone(), self.idx);
let field = AtIndex {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
index: self.idx,
inner: self.inner.clone(),
ty: PhantomData,
};
self.idx += 1;
Some(field)
} else {

View File

@@ -1,684 +0,0 @@
use crate::{
path::{StorePath, StorePathSegment},
store_field::StoreField,
KeyMap, StoreFieldTrigger,
};
use reactive_graph::{
signal::{
guards::{Mapped, MappedMut, MappedMutArc, WriteGuard},
ArcTrigger,
},
traits::{
DefinedAt, IsDisposed, Notify, ReadUntracked, Track, UntrackableGuard,
Writeable,
},
};
use std::{
collections::VecDeque,
fmt::Debug,
hash::Hash,
iter,
ops::{Deref, DerefMut, IndexMut},
panic::Location,
};
#[derive(Debug)]
pub struct KeyedSubfield<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
{
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
path_segment: StorePathSegment,
inner: Inner,
read: fn(&Prev) -> &T,
write: fn(&mut Prev) -> &mut T,
key_fn: fn(<&T as IntoIterator>::Item) -> K,
}
impl<Inner, Prev, K, T> Clone for KeyedSubfield<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
Inner: Clone,
{
fn clone(&self) -> Self {
Self {
#[cfg(debug_assertions)]
defined_at: self.defined_at,
path_segment: self.path_segment,
inner: self.inner.clone(),
read: self.read,
write: self.write,
key_fn: self.key_fn,
}
}
}
impl<Inner, Prev, K, T> Copy for KeyedSubfield<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
Inner: Copy,
{
}
impl<Inner, Prev, K, T> KeyedSubfield<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
{
#[track_caller]
pub fn new(
inner: Inner,
path_segment: StorePathSegment,
key_fn: fn(<&T as IntoIterator>::Item) -> K,
read: fn(&Prev) -> &T,
write: fn(&mut Prev) -> &mut T,
) -> Self {
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner,
path_segment,
read,
write,
key_fn,
}
}
}
impl<Inner, Prev, K, T> StoreField for KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
type Value = T;
type Reader = Mapped<Inner::Reader, T>;
type Writer = MappedMut<WriteGuard<ArcTrigger, Inner::Writer>, T>;
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
self.inner
.path()
.into_iter()
.chain(iter::once(self.path_segment))
}
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
self.inner.get_trigger(path)
}
fn reader(&self) -> Option<Self::Reader> {
let inner = self.inner.reader()?;
Some(Mapped::new_with_guard(inner, self.read))
}
fn writer(&self) -> Option<Self::Writer> {
let path = self.path().into_iter().collect::<StorePath>();
let trigger = self.get_trigger(path.clone());
let guard = WriteGuard::new(trigger.children, self.inner.writer()?);
Some(MappedMut::new(guard, self.read, self.write))
}
#[inline(always)]
fn keys(&self) -> Option<KeyMap> {
self.inner.keys()
}
}
impl<Inner, Prev, K, T> KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
fn latest_keys(&self) -> Vec<K> {
self.reader()
.expect("trying to update keys")
.deref()
.into_iter()
.map(|n| (self.key_fn)(n))
.collect()
}
}
pub struct KeyedSubfieldWriteGuard<Inner, Prev, K, T, Guard>
where
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
inner: KeyedSubfield<Inner, Prev, K, T>,
guard: Option<Guard>,
}
impl<Inner, Prev, K, T, Guard> Deref
for KeyedSubfieldWriteGuard<Inner, Prev, K, T, Guard>
where
Guard: Deref,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
type Target = Guard::Target;
fn deref(&self) -> &Self::Target {
self.guard
.as_ref()
.expect("should be Some(_) until dropped")
.deref()
}
}
impl<Inner, Prev, K, T, Guard> DerefMut
for KeyedSubfieldWriteGuard<Inner, Prev, K, T, Guard>
where
Guard: DerefMut,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
fn deref_mut(&mut self) -> &mut Self::Target {
self.guard
.as_mut()
.expect("should be Some(_) until dropped")
.deref_mut()
}
}
impl<Inner, Prev, K, T, Guard> UntrackableGuard
for KeyedSubfieldWriteGuard<Inner, Prev, K, T, Guard>
where
Guard: UntrackableGuard,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
fn untrack(&mut self) {
if let Some(inner) = self.guard.as_mut() {
inner.untrack();
}
}
}
impl<Inner, Prev, K, T, Guard> Drop
for KeyedSubfieldWriteGuard<Inner, Prev, K, T, Guard>
where
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
fn drop(&mut self) {
// dropping the inner guard will
// 1) synchronously release its write lock on the store's value
// 2) trigger an (asynchronous) reactive update
drop(self.guard.take());
// now that the write lock is release, we can get a read lock to refresh this keyed field
// based on the new value
self.inner.update_keys();
self.inner.notify();
// reactive updates happen on the next tick
}
}
impl<Inner, Prev, K, T> DefinedAt for KeyedSubfield<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
{
fn defined_at(&self) -> Option<&'static Location<'static>> {
#[cfg(debug_assertions)]
{
Some(self.defined_at)
}
#[cfg(not(debug_assertions))]
{
None
}
}
}
impl<Inner, Prev, K, T> IsDisposed for KeyedSubfield<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
Inner: IsDisposed,
{
fn is_disposed(&self) -> bool {
self.inner.is_disposed()
}
}
impl<Inner, Prev, K, T> Notify for KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
fn notify(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.this.notify();
trigger.children.notify();
}
}
impl<Inner, Prev, K, T> Track for KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev> + Track + 'static,
Prev: 'static,
T: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
fn track(&self) {
let inner = self
.inner
.get_trigger(self.inner.path().into_iter().collect());
inner.this.track();
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.this.track();
trigger.children.track();
}
}
impl<Inner, Prev, K, T> ReadUntracked for KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
type Value = <Self as StoreField>::Reader;
fn try_read_untracked(&self) -> Option<Self::Value> {
self.reader()
}
}
impl<Inner, Prev, K, T> Writeable for KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
T: 'static,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
type Value = T;
fn try_write(&self) -> Option<impl UntrackableGuard<Target = Self::Value>> {
let guard = self.writer()?;
Some(KeyedSubfieldWriteGuard {
inner: self.clone(),
guard: Some(guard),
})
}
fn try_write_untracked(
&self,
) -> Option<impl DerefMut<Target = Self::Value>> {
let mut guard = self.writer()?;
guard.untrack();
Some(KeyedSubfieldWriteGuard {
inner: self.clone(),
guard: Some(guard),
})
}
}
#[derive(Debug)]
pub struct AtKeyed<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
{
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
inner: KeyedSubfield<Inner, Prev, K, T>,
key: K,
}
impl<Inner, Prev, K, T> Clone for AtKeyed<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
KeyedSubfield<Inner, Prev, K, T>: Clone,
K: Debug + Clone,
{
fn clone(&self) -> Self {
Self {
#[cfg(debug_assertions)]
defined_at: self.defined_at,
inner: self.inner.clone(),
key: self.key.clone(),
}
}
}
impl<Inner, Prev, K, T> Copy for AtKeyed<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
KeyedSubfield<Inner, Prev, K, T>: Copy,
K: Debug + Copy,
{
}
impl<Inner, Prev, K, T> AtKeyed<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
{
#[track_caller]
pub fn new(inner: KeyedSubfield<Inner, Prev, K, T>, key: K) -> Self {
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner,
key,
}
}
}
impl<Inner, Prev, K, T> StoreField for AtKeyed<Inner, Prev, K, T>
where
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
T: IndexMut<usize>,
T::Output: Sized,
{
type Value = T::Output;
type Reader = MappedMutArc<
<KeyedSubfield<Inner, Prev, K, T> as StoreField>::Reader,
T::Output,
>;
type Writer = WriteGuard<
ArcTrigger,
MappedMutArc<
<KeyedSubfield<Inner, Prev, K, T> as StoreField>::Writer,
T::Output,
>,
>;
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
let inner = self.inner.path().into_iter().collect::<StorePath>();
let keys = self
.inner
.keys()
.expect("using keys on a store with no keys");
let this = keys
.with_field_keys(
inner.clone(),
|keys| keys.get(&self.key),
|| self.inner.latest_keys(),
)
.flatten()
.map(|(path, _)| path);
inner.into_iter().chain(this)
}
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
self.inner.get_trigger(path)
}
fn reader(&self) -> Option<Self::Reader> {
let inner = self.inner.reader()?;
let inner_path = self.inner.path().into_iter().collect();
let keys = self
.inner
.keys()
.expect("using keys on a store with no keys");
let index = keys
.with_field_keys(
inner_path,
|keys| keys.get(&self.key),
|| self.inner.latest_keys(),
)
.flatten()
.map(|(_, idx)| idx)
.expect("reading from a keyed field that has not yet been created");
Some(MappedMutArc::new(
inner,
move |n| &n[index],
move |n| &mut n[index],
))
}
fn writer(&self) -> Option<Self::Writer> {
let inner = self.inner.writer()?;
let trigger = self.get_trigger(self.path().into_iter().collect());
let inner_path = self.inner.path().into_iter().collect::<StorePath>();
let keys = self
.inner
.keys()
.expect("using keys on a store with no keys");
let index = keys
.with_field_keys(
inner_path.clone(),
|keys| keys.get(&self.key),
|| self.inner.latest_keys(),
)
.flatten()
.map(|(_, idx)| idx)
.expect("reading from a keyed field that has not yet been created");
Some(WriteGuard::new(
trigger.children,
MappedMutArc::new(
inner,
move |n| &n[index],
move |n| &mut n[index],
),
))
}
#[inline(always)]
fn keys(&self) -> Option<KeyMap> {
self.inner.keys()
}
}
impl<Inner, Prev, K, T> DefinedAt for AtKeyed<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
{
fn defined_at(&self) -> Option<&'static Location<'static>> {
#[cfg(debug_assertions)]
{
Some(self.defined_at)
}
#[cfg(not(debug_assertions))]
{
None
}
}
}
impl<Inner, Prev, K, T> IsDisposed for AtKeyed<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
Inner: IsDisposed,
{
fn is_disposed(&self) -> bool {
self.inner.is_disposed()
}
}
impl<Inner, Prev, K, T> Notify for AtKeyed<Inner, Prev, K, T>
where
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
T: IndexMut<usize>,
T::Output: Sized,
{
fn notify(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.this.notify();
trigger.children.notify();
}
}
impl<Inner, Prev, K, T> Track for AtKeyed<Inner, Prev, K, T>
where
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
T: IndexMut<usize>,
T::Output: Sized,
{
fn track(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.this.track();
trigger.children.track();
}
}
impl<Inner, Prev, K, T> ReadUntracked for AtKeyed<Inner, Prev, K, T>
where
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
T: IndexMut<usize>,
T::Output: Sized,
{
type Value = <Self as StoreField>::Reader;
fn try_read_untracked(&self) -> Option<Self::Value> {
self.reader()
}
}
impl<Inner, Prev, K, T> Writeable for AtKeyed<Inner, Prev, K, T>
where
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
KeyedSubfield<Inner, Prev, K, T>: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
T: IndexMut<usize>,
T::Output: Sized + 'static,
{
type Value = T::Output;
fn try_write(&self) -> Option<impl UntrackableGuard<Target = Self::Value>> {
self.writer()
}
fn try_write_untracked(
&self,
) -> Option<impl DerefMut<Target = Self::Value>> {
self.writer().map(|mut writer| {
writer.untrack();
writer
})
}
}
impl<Inner, Prev, K, T> KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: StoreField<Value = Prev>,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
{
pub fn update_keys(&self) {
let inner_path = self.path().into_iter().collect();
let keys = self
.inner
.keys()
.expect("updating keys on a store with no keys");
keys.with_field_keys(
inner_path,
|keys| {
keys.update(self.latest_keys());
},
|| self.latest_keys(),
);
}
}
impl<Inner, Prev, K, T> IntoIterator for KeyedSubfield<Inner, Prev, K, T>
where
Self: Clone,
for<'a> &'a T: IntoIterator,
Inner: Clone + StoreField<Value = Prev> + 'static,
Prev: 'static,
K: Debug + Send + Sync + PartialEq + Eq + Hash + 'static,
T: IndexMut<usize> + 'static,
T::Output: Sized,
{
type Item = AtKeyed<Inner, Prev, K, T>;
type IntoIter = StoreFieldKeyedIter<Inner, Prev, K, T>;
#[track_caller]
fn into_iter(self) -> StoreFieldKeyedIter<Inner, Prev, K, T> {
// reactively track changes to this field
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.this.track();
// get the current length of the field by accessing slice
let reader = self
.reader()
.expect("creating iterator from unavailable store field");
let keys = reader
.into_iter()
.map(|item| (self.key_fn)(item))
.collect::<VecDeque<_>>();
// return the iterator
StoreFieldKeyedIter { inner: self, keys }
}
}
pub struct StoreFieldKeyedIter<Inner, Prev, K, T>
where
for<'a> &'a T: IntoIterator,
T: IndexMut<usize>,
{
inner: KeyedSubfield<Inner, Prev, K, T>,
keys: VecDeque<K>,
}
impl<Inner, Prev, K, T> Iterator for StoreFieldKeyedIter<Inner, Prev, K, T>
where
Inner: StoreField<Value = Prev> + Clone + 'static,
T: IndexMut<usize> + 'static,
T::Output: Sized + 'static,
for<'a> &'a T: IntoIterator,
{
type Item = AtKeyed<Inner, Prev, K, T>;
fn next(&mut self) -> Option<Self::Item> {
self.keys
.pop_front()
.map(|key| AtKeyed::new(self.inner.clone(), key))
}
}

View File

@@ -1,22 +1,14 @@
use or_poisoned::OrPoisoned;
use reactive_graph::{
owner::{ArenaItem, LocalStorage, Storage, SyncStorage},
owner::{LocalStorage, Storage, StoredValue, SyncStorage},
signal::{
guards::{Plain, ReadGuard, WriteGuard},
guards::{Plain, ReadGuard},
ArcTrigger,
},
traits::{
DefinedAt, IsDisposed, Notify, ReadUntracked, Track, UntrackableGuard,
Writeable,
},
traits::{DefinedAt, IsDisposed, Notify, ReadUntracked, Track},
};
use rustc_hash::FxHashMap;
use std::{
any::Any,
collections::HashMap,
fmt::Debug,
hash::Hash,
ops::DerefMut,
panic::Location,
sync::{Arc, RwLock},
};
@@ -24,8 +16,6 @@ use std::{
mod arc_field;
mod field;
mod iter;
mod keyed;
mod option;
mod patch;
mod path;
mod store_field;
@@ -34,158 +24,36 @@ mod subfield;
pub use arc_field::ArcField;
pub use field::Field;
pub use iter::*;
pub use keyed::*;
pub use option::*;
pub use patch::*;
pub use path::{StorePath, StorePathSegment};
pub use store_field::{StoreField, Then};
use path::StorePath;
pub use store_field::StoreField;
pub use subfield::Subfield;
#[derive(Debug, Default)]
struct TriggerMap(FxHashMap<StorePath, StoreFieldTrigger>);
#[derive(Debug, Clone, Default)]
pub struct StoreFieldTrigger {
pub this: ArcTrigger,
pub children: ArcTrigger,
}
impl StoreFieldTrigger {
pub fn new() -> Self {
Self::default()
}
}
struct TriggerMap(FxHashMap<StorePath, ArcTrigger>);
impl TriggerMap {
fn get_or_insert(&mut self, key: StorePath) -> StoreFieldTrigger {
fn get_or_insert(&mut self, key: StorePath) -> ArcTrigger {
if let Some(trigger) = self.0.get(&key) {
trigger.clone()
} else {
let new = StoreFieldTrigger::new();
let new = ArcTrigger::new();
self.0.insert(key, new.clone());
new
}
}
#[allow(unused)]
fn remove(&mut self, key: &StorePath) -> Option<StoreFieldTrigger> {
fn remove(&mut self, key: &StorePath) -> Option<ArcTrigger> {
self.0.remove(key)
}
}
pub struct FieldKeys<K> {
spare_keys: Vec<StorePathSegment>,
current_key: usize,
keys: FxHashMap<K, (StorePathSegment, usize)>,
}
impl<K> FieldKeys<K>
where
K: Debug + Hash + PartialEq + Eq,
{
pub fn new(from_keys: Vec<K>) -> Self {
let mut keys = FxHashMap::with_capacity_and_hasher(
from_keys.len(),
Default::default(),
);
for (idx, key) in from_keys.into_iter().enumerate() {
let segment = idx.into();
keys.insert(key, (segment, idx));
}
Self {
spare_keys: Vec::new(),
current_key: 0,
keys,
}
}
}
impl<K> FieldKeys<K>
where
K: Hash + PartialEq + Eq,
{
pub fn get(&self, key: &K) -> Option<(StorePathSegment, usize)> {
self.keys.get(key).copied()
}
fn next_key(&mut self) -> StorePathSegment {
self.spare_keys.pop().unwrap_or_else(|| {
self.current_key += 1;
self.current_key.into()
})
}
pub fn update(&mut self, iter: impl IntoIterator<Item = K>) {
let new_keys = iter
.into_iter()
.enumerate()
.map(|(idx, key)| (key, idx))
.collect::<FxHashMap<K, usize>>();
// remove old keys and recycle the slots
self.keys.retain(|key, old_entry| match new_keys.get(key) {
Some(idx) => {
old_entry.1 = *idx;
true
}
None => {
self.spare_keys.push(old_entry.0);
false
}
});
// add new keys
for (key, idx) in new_keys {
// the suggestion doesn't compile because we need &mut for self.next_key(),
// and we don't want to call that until after the check
#[allow(clippy::map_entry)]
if !self.keys.contains_key(&key) {
let path = self.next_key();
self.keys.insert(key, (path, idx));
}
}
}
}
impl<K> Default for FieldKeys<K> {
fn default() -> Self {
Self {
spare_keys: Default::default(),
current_key: Default::default(),
keys: Default::default(),
}
}
}
#[derive(Default, Clone)]
pub struct KeyMap(Arc<RwLock<HashMap<StorePath, Box<dyn Any + Send + Sync>>>>);
impl KeyMap {
pub fn with_field_keys<K, T>(
&self,
path: StorePath,
fun: impl FnOnce(&mut FieldKeys<K>) -> T,
initialize: impl FnOnce() -> Vec<K>,
) -> Option<T>
where
K: Debug + Hash + PartialEq + Eq + Send + Sync + 'static,
{
let mut guard = self.0.write().or_poisoned();
let entry = guard
.entry(path)
.or_insert_with(|| Box::new(FieldKeys::new(initialize())));
let entry = entry.downcast_mut::<FieldKeys<K>>()?;
Some(fun(entry))
}
}
pub struct ArcStore<T> {
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
pub(crate) value: Arc<RwLock<T>>,
signals: Arc<RwLock<TriggerMap>>,
keys: KeyMap,
}
impl<T> ArcStore<T> {
@@ -195,7 +63,7 @@ impl<T> ArcStore<T> {
defined_at: Location::caller(),
value: Arc::new(RwLock::new(value)),
signals: Default::default(),
keys: Default::default(),
/* inner: Arc::new(RwLock::new(SubscriberSet::new())), */
}
}
}
@@ -218,7 +86,6 @@ impl<T> Clone for ArcStore<T> {
defined_at: self.defined_at,
value: Arc::clone(&self.value),
signals: Arc::clone(&self.signals),
keys: self.keys.clone(),
}
}
}
@@ -254,46 +121,22 @@ where
}
}
impl<T> Writeable for ArcStore<T>
where
T: 'static,
{
type Value = T;
fn try_write(&self) -> Option<impl UntrackableGuard<Target = Self::Value>> {
self.writer()
.map(|writer| WriteGuard::new(self.clone(), writer))
}
fn try_write_untracked(
&self,
) -> Option<impl DerefMut<Target = Self::Value>> {
let mut writer = self.writer()?;
writer.untrack();
Some(writer)
}
}
impl<T: 'static> Track for ArcStore<T> {
fn track(&self) {
let trigger = self.get_trigger(Default::default());
trigger.this.track();
trigger.children.track();
self.get_trigger(Default::default()).notify();
}
}
impl<T: 'static> Notify for ArcStore<T> {
fn notify(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.this.notify();
trigger.children.notify();
self.get_trigger(self.path().into_iter().collect()).notify();
}
}
pub struct Store<T, S = SyncStorage> {
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
inner: ArenaItem<ArcStore<T>, S>,
inner: StoredValue<ArcStore<T>, S>,
}
impl<T> Store<T>
@@ -304,7 +147,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(ArcStore::new(value)),
inner: StoredValue::new_with_storage(ArcStore::new(value)),
}
}
}
@@ -317,7 +160,7 @@ where
Self {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: ArenaItem::new_with_storage(ArcStore::new(value)),
inner: StoredValue::new_with_storage(ArcStore::new(value)),
}
}
}
@@ -375,27 +218,7 @@ where
fn try_read_untracked(&self) -> Option<Self::Value> {
self.inner
.try_get_value()
.and_then(|inner| inner.try_read_untracked())
}
}
impl<T, S> Writeable for Store<T, S>
where
T: 'static,
S: Storage<ArcStore<T>>,
{
type Value = T;
fn try_write(&self) -> Option<impl UntrackableGuard<Target = Self::Value>> {
self.writer().map(|writer| WriteGuard::new(*self, writer))
}
fn try_write_untracked(
&self,
) -> Option<impl DerefMut<Target = Self::Value>> {
let mut writer = self.writer()?;
writer.untrack();
Some(writer)
.map(|inner| inner.read_untracked())
}
}
@@ -440,13 +263,13 @@ mod tests {
tokio::time::sleep(std::time::Duration::from_micros(1)).await;
}
#[derive(Debug, Store, Patch, Default)]
#[derive(Debug, Store, Patch)]
struct Todos {
user: String,
todos: Vec<Todo>,
}
#[derive(Debug, Store, Patch, Default)]
#[derive(Debug, Store, Patch)]
struct Todo {
label: String,
completed: bool,
@@ -512,6 +335,18 @@ mod tests {
tick().await;
// the effect reads from `user`, so it should trigger every time
assert_eq!(combined_count.load(Ordering::Relaxed), 4);
store
.todos()
.write()
.push(Todo::new("Create reactive stores"));
tick().await;
store.todos().write().push(Todo::new("???"));
tick().await;
store.todos().write().push(Todo::new("Profit!"));
tick().await;
// the effect doesn't read from `todos`, so the count should not have changed
assert_eq!(combined_count.load(Ordering::Relaxed), 4);
}
#[tokio::test]
@@ -542,72 +377,10 @@ mod tests {
tick().await;
store.user().update(|name| name.push_str("!!!"));
tick().await;
// the effect reads from `todos`, so it shouldn't trigger every time
// the effect reads from `user`, so it should trigger every time
assert_eq!(combined_count.load(Ordering::Relaxed), 1);
}
#[tokio::test]
async fn parent_does_notify() {
_ = any_spawner::Executor::init_tokio();
let combined_count = Arc::new(AtomicUsize::new(0));
let store = Store::new(data());
Effect::new_sync({
let combined_count = Arc::clone(&combined_count);
move |prev: Option<()>| {
if prev.is_none() {
println!("first run");
} else {
println!("next run");
}
println!("{:?}", *store.todos().read());
combined_count.fetch_add(1, Ordering::Relaxed);
}
});
tick().await;
tick().await;
store.set(Todos::default());
tick().await;
store.set(data());
tick().await;
assert_eq!(combined_count.load(Ordering::Relaxed), 3);
}
#[tokio::test]
async fn changes_do_notify_parent() {
_ = any_spawner::Executor::init_tokio();
let combined_count = Arc::new(AtomicUsize::new(0));
let store = Store::new(data());
Effect::new_sync({
let combined_count = Arc::clone(&combined_count);
move |prev: Option<()>| {
if prev.is_none() {
println!("first run");
} else {
println!("next run");
}
println!("{:?}", *store.read());
combined_count.fetch_add(1, Ordering::Relaxed);
}
});
tick().await;
tick().await;
store.user().set("Greg".into());
tick().await;
store.user().set("Carol".into());
tick().await;
store.user().update(|name| name.push_str("!!!"));
tick().await;
store.todos().write().clear();
tick().await;
assert_eq!(combined_count.load(Ordering::Relaxed), 5);
}
#[tokio::test]
async fn iterator_tracks_the_field() {
_ = any_spawner::Executor::init_tokio();
@@ -689,9 +462,4 @@ mod tests {
tick().await;
assert_eq!(combined_count.load(Ordering::Relaxed), 2);
}
#[derive(Debug, Store)]
pub struct StructWithOption {
opt_field: Option<Todo>,
}
}

View File

@@ -1,210 +0,0 @@
use crate::{StoreField, Subfield};
use reactive_graph::traits::Read;
use std::ops::Deref;
pub trait OptionStoreExt
where
Self: StoreField<Value = Option<Self::Output>>,
{
type Output;
fn unwrap(self) -> Subfield<Self, Option<Self::Output>, Self::Output>;
fn map<U>(
self,
map_fn: impl FnOnce(Subfield<Self, Option<Self::Output>, Self::Output>) -> U,
) -> Option<U>;
}
impl<T, S> OptionStoreExt for S
where
S: StoreField<Value = Option<T>> + Read,
<S as Read>::Value: Deref<Target = Option<T>>,
{
type Output = T;
fn unwrap(self) -> Subfield<Self, Option<Self::Output>, Self::Output> {
Subfield::new(
self,
0.into(),
|t| t.as_ref().unwrap(),
|t| t.as_mut().unwrap(),
)
}
fn map<U>(
self,
map_fn: impl FnOnce(Subfield<S, Option<T>, T>) -> U,
) -> Option<U> {
if self.read().is_some() {
Some(map_fn(self.unwrap()))
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use crate::{self as reactive_stores, Store};
use reactive_graph::{
effect::Effect,
traits::{Get, Read, ReadUntracked, Set, Writeable},
};
use reactive_stores_macro::Store;
use std::sync::{
atomic::{AtomicUsize, Ordering},
Arc,
};
pub async fn tick() {
tokio::time::sleep(std::time::Duration::from_micros(1)).await;
}
#[derive(Debug, Clone, Store)]
pub struct User {
pub name: Option<Name>,
}
#[derive(Debug, Clone, Store)]
pub struct Name {
pub first_name: Option<String>,
}
#[tokio::test]
async fn substores_reachable_through_option() {
use crate::OptionStoreExt;
_ = any_spawner::Executor::init_tokio();
let combined_count = Arc::new(AtomicUsize::new(0));
let store = Store::new(User { name: None });
Effect::new_sync({
let combined_count = Arc::clone(&combined_count);
move |prev: Option<()>| {
if prev.is_none() {
println!("first run");
} else {
println!("next run");
}
if store.name().read().is_some() {
println!(
"inner value = {:?}",
*store.name().unwrap().first_name().read()
);
} else {
println!("no inner value");
}
combined_count.fetch_add(1, Ordering::Relaxed);
}
});
tick().await;
store.name().set(Some(Name {
first_name: Some("Greg".into()),
}));
tick().await;
store.name().set(None);
tick().await;
store.name().set(Some(Name {
first_name: Some("Bob".into()),
}));
tick().await;
store
.name()
.unwrap()
.first_name()
.write()
.as_mut()
.unwrap()
.push_str("!!!");
tick().await;
assert_eq!(combined_count.load(Ordering::Relaxed), 5);
assert_eq!(
store
.name()
.read_untracked()
.as_ref()
.unwrap()
.first_name
.as_ref()
.unwrap(),
"Bob!!!"
);
}
#[tokio::test]
async fn mapping_over_optional_store_field() {
use crate::OptionStoreExt;
_ = any_spawner::Executor::init_tokio();
let parent_count = Arc::new(AtomicUsize::new(0));
let inner_count = Arc::new(AtomicUsize::new(0));
let store = Store::new(User { name: None });
Effect::new_sync({
let parent_count = Arc::clone(&parent_count);
move |prev: Option<()>| {
if prev.is_none() {
println!("parent: first run");
} else {
println!("parent: next run");
}
println!(" is_some = {}", store.name().read().is_some());
parent_count.fetch_add(1, Ordering::Relaxed);
}
});
Effect::new_sync({
let inner_count = Arc::clone(&inner_count);
move |prev: Option<()>| {
if prev.is_none() {
println!("inner: first run");
} else {
println!("inner: next run");
}
println!(
"store inner value length = {:?}",
store.name().map(|inner| inner
.first_name()
.get()
.unwrap_or_default()
.len())
);
inner_count.fetch_add(1, Ordering::Relaxed);
}
});
tick().await;
assert_eq!(parent_count.load(Ordering::Relaxed), 1);
assert_eq!(inner_count.load(Ordering::Relaxed), 1);
store.name().set(Some(Name {
first_name: Some("Greg".into()),
}));
tick().await;
assert_eq!(parent_count.load(Ordering::Relaxed), 2);
assert_eq!(inner_count.load(Ordering::Relaxed), 2);
println!("\nUpdating first name only");
store
.name()
.unwrap()
.first_name()
.write()
.as_mut()
.unwrap()
.push_str("!!!");
tick().await;
assert_eq!(parent_count.load(Ordering::Relaxed), 3);
assert_eq!(inner_count.load(Ordering::Relaxed), 3);
}
}

View File

@@ -27,13 +27,13 @@ where
type Value = T::Value;
fn patch(&self, new: Self::Value) {
let path = self.path().into_iter().collect::<StorePath>();
let path = StorePath::default();
if let Some(mut writer) = self.writer() {
// don't track the writer for the whole store
writer.untrack();
let mut notify = |path: &StorePath| {
self.get_trigger(path.to_owned()).this.notify();
self.get_trigger(path.to_owned()).children.notify();
println!("notifying on {path:?}");
self.get_trigger(path.to_owned()).notify();
};
writer.patch_field(new, &path, &mut notify);
}

View File

@@ -1,63 +1,32 @@
use crate::{
path::{StorePath, StorePathSegment},
ArcStore, KeyMap, Store, StoreFieldTrigger,
ArcStore, Store,
};
use guardian::ArcRwLockWriteGuardian;
use or_poisoned::OrPoisoned;
use reactive_graph::{
owner::Storage,
signal::{
guards::{Mapped, MappedMut, Plain, UntrackedWriteGuard, WriteGuard},
guards::{Plain, WriteGuard},
ArcTrigger,
},
traits::{
DefinedAt, IsDisposed, Notify, ReadUntracked, Track, UntrackableGuard,
Writeable,
},
traits::{DefinedAt, UntrackableGuard},
unwrap_signal,
};
use std::{
iter,
ops::{Deref, DerefMut},
panic::Location,
sync::Arc,
};
use std::{iter, ops::Deref, sync::Arc};
pub trait StoreField: Sized {
type Value;
type Reader: Deref<Target = Self::Value>;
type Writer: UntrackableGuard<Target = Self::Value>;
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger;
fn get_trigger(&self, path: StorePath) -> ArcTrigger;
fn path(&self) -> impl IntoIterator<Item = StorePathSegment>;
fn track_field(&self) {
let path = self.path().into_iter().collect();
let trigger = self.get_trigger(path);
trigger.this.track();
trigger.children.track();
}
fn reader(&self) -> Option<Self::Reader>;
fn writer(&self) -> Option<Self::Writer>;
fn keys(&self) -> Option<KeyMap>;
#[track_caller]
fn then<T>(
self,
map_fn: fn(&Self::Value) -> &T,
map_fn_mut: fn(&mut Self::Value) -> &mut T,
) -> Then<T, Self> {
Then {
#[cfg(debug_assertions)]
defined_at: Location::caller(),
inner: self,
map_fn,
map_fn_mut,
}
}
}
impl<T> StoreField for ArcStore<T>
@@ -66,9 +35,9 @@ where
{
type Value = T;
type Reader = Plain<T>;
type Writer = WriteGuard<ArcTrigger, UntrackedWriteGuard<T>>;
type Writer = WriteGuard<ArcTrigger, ArcRwLockWriteGuardian<T>>;
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
fn get_trigger(&self, path: StorePath) -> ArcTrigger {
let triggers = &self.signals;
let trigger = triggers.write().or_poisoned().get_or_insert(path);
trigger
@@ -84,12 +53,9 @@ where
fn writer(&self) -> Option<Self::Writer> {
let trigger = self.get_trigger(Default::default());
let guard = UntrackedWriteGuard::try_new(Arc::clone(&self.value))?;
Some(WriteGuard::new(trigger.children, guard))
}
fn keys(&self) -> Option<KeyMap> {
Some(self.keys.clone())
let guard =
ArcRwLockWriteGuardian::take(Arc::clone(&self.value)).ok()?;
Some(WriteGuard::new(trigger, guard))
}
}
@@ -100,9 +66,9 @@ where
{
type Value = T;
type Reader = Plain<T>;
type Writer = WriteGuard<ArcTrigger, UntrackedWriteGuard<T>>;
type Writer = WriteGuard<ArcTrigger, ArcRwLockWriteGuardian<T>>;
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
fn get_trigger(&self, path: StorePath) -> ArcTrigger {
self.inner
.try_get_value()
.map(|n| n.get_trigger(path))
@@ -123,151 +89,4 @@ where
fn writer(&self) -> Option<Self::Writer> {
self.inner.try_get_value().and_then(|n| n.writer())
}
fn keys(&self) -> Option<KeyMap> {
self.inner.try_get_value().and_then(|inner| inner.keys())
}
}
#[derive(Debug, Copy, Clone)]
pub struct Then<T, S>
where
S: StoreField,
{
inner: S,
map_fn: fn(&S::Value) -> &T,
map_fn_mut: fn(&mut S::Value) -> &mut T,
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
}
impl<T, S> Then<T, S>
where
S: StoreField,
{
#[track_caller]
pub fn new(
inner: S,
map_fn: fn(&S::Value) -> &T,
map_fn_mut: fn(&mut S::Value) -> &mut T,
) -> Self {
Self {
inner,
map_fn,
map_fn_mut,
#[cfg(debug_assertions)]
defined_at: Location::caller(),
}
}
}
impl<T, S> StoreField for Then<T, S>
where
S: StoreField,
{
type Value = T;
type Reader = Mapped<S::Reader, T>;
type Writer = MappedMut<S::Writer, T>;
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
self.inner.get_trigger(path)
}
fn path(&self) -> impl IntoIterator<Item = StorePathSegment> {
self.inner.path()
}
fn reader(&self) -> Option<Self::Reader> {
let inner = self.inner.reader()?;
Some(Mapped::new_with_guard(inner, self.map_fn))
}
fn writer(&self) -> Option<Self::Writer> {
let inner = self.inner.writer()?;
Some(MappedMut::new(inner, self.map_fn, self.map_fn_mut))
}
#[inline(always)]
fn keys(&self) -> Option<KeyMap> {
self.inner.keys()
}
}
impl<T, S> DefinedAt for Then<T, S>
where
S: StoreField,
{
fn defined_at(&self) -> Option<&'static Location<'static>> {
#[cfg(debug_assertions)]
{
Some(self.defined_at)
}
#[cfg(not(debug_assertions))]
{
None
}
}
}
impl<T, S> IsDisposed for Then<T, S>
where
S: StoreField + IsDisposed,
{
fn is_disposed(&self) -> bool {
self.inner.is_disposed()
}
}
impl<T, S> Notify for Then<T, S>
where
S: StoreField,
{
fn notify(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.this.notify();
trigger.children.notify();
}
}
impl<T, S> Track for Then<T, S>
where
S: StoreField,
{
fn track(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.this.track();
trigger.children.track();
}
}
impl<T, S> ReadUntracked for Then<T, S>
where
S: StoreField,
{
type Value = <Self as StoreField>::Reader;
fn try_read_untracked(&self) -> Option<Self::Value> {
self.reader()
}
}
impl<T, S> Writeable for Then<T, S>
where
T: 'static,
S: StoreField,
{
type Value = T;
fn try_write(&self) -> Option<impl UntrackableGuard<Target = Self::Value>> {
self.writer()
}
fn try_write_untracked(
&self,
) -> Option<impl DerefMut<Target = Self::Value>> {
self.writer().map(|mut writer| {
writer.untrack();
writer
})
}
}

View File

@@ -1,7 +1,6 @@
use crate::{
path::{StorePath, StorePathSegment},
store_field::StoreField,
KeyMap, StoreFieldTrigger,
};
use reactive_graph::{
signal::{
@@ -16,7 +15,10 @@ use reactive_graph::{
use std::{iter, marker::PhantomData, ops::DerefMut, panic::Location};
#[derive(Debug)]
pub struct Subfield<Inner, Prev, T> {
pub struct Subfield<Inner, Prev, T>
where
Inner: StoreField<Value = Prev>,
{
#[cfg(debug_assertions)]
defined_at: &'static Location<'static>,
path_segment: StorePathSegment,
@@ -28,7 +30,7 @@ pub struct Subfield<Inner, Prev, T> {
impl<Inner, Prev, T> Clone for Subfield<Inner, Prev, T>
where
Inner: Clone,
Inner: StoreField<Value = Prev> + Clone,
{
fn clone(&self) -> Self {
Self {
@@ -43,9 +45,15 @@ where
}
}
impl<Inner, Prev, T> Copy for Subfield<Inner, Prev, T> where Inner: Copy {}
impl<Inner, Prev, T> Copy for Subfield<Inner, Prev, T> where
Inner: StoreField<Value = Prev> + Copy
{
}
impl<Inner, Prev, T> Subfield<Inner, Prev, T> {
impl<Inner, Prev, T> Subfield<Inner, Prev, T>
where
Inner: StoreField<Value = Prev>,
{
#[track_caller]
pub fn new(
inner: Inner,
@@ -81,7 +89,7 @@ where
.chain(iter::once(self.path_segment))
}
fn get_trigger(&self, path: StorePath) -> StoreFieldTrigger {
fn get_trigger(&self, path: StorePath) -> ArcTrigger {
self.inner.get_trigger(path)
}
@@ -92,14 +100,9 @@ where
fn writer(&self) -> Option<Self::Writer> {
let trigger = self.get_trigger(self.path().into_iter().collect());
let inner = WriteGuard::new(trigger.children, self.inner.writer()?);
let inner = WriteGuard::new(trigger, self.inner.writer()?);
Some(MappedMut::new(inner, self.read, self.write))
}
#[inline(always)]
fn keys(&self) -> Option<KeyMap> {
self.inner.keys()
}
}
impl<Inner, Prev, T> DefinedAt for Subfield<Inner, Prev, T>
@@ -120,7 +123,7 @@ where
impl<Inner, Prev, T> IsDisposed for Subfield<Inner, Prev, T>
where
Inner: IsDisposed,
Inner: StoreField<Value = Prev> + IsDisposed,
{
fn is_disposed(&self) -> bool {
self.inner.is_disposed()
@@ -134,25 +137,19 @@ where
{
fn notify(&self) {
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.this.notify();
trigger.children.notify();
trigger.notify();
}
}
impl<Inner, Prev, T> Track for Subfield<Inner, Prev, T>
where
Inner: StoreField<Value = Prev> + Track + 'static,
Inner: StoreField<Value = Prev> + 'static,
Prev: 'static,
T: 'static,
{
fn track(&self) {
let inner = self
.inner
.get_trigger(self.inner.path().into_iter().collect());
inner.this.track();
let trigger = self.get_trigger(self.path().into_iter().collect());
trigger.this.track();
trigger.children.track();
trigger.track();
}
}

View File

@@ -1,6 +1,6 @@
[package]
name = "reactive_stores_macro"
version = "0.1.0-beta6"
version = "0.1.0-beta4"
rust-version.workspace = true
edition.workspace = true
@@ -8,8 +8,7 @@ edition.workspace = true
proc-macro = true
[dependencies]
convert_case = "0.6"
proc-macro-error = "1.0"
proc-macro2 = "1.0"
quote = "1.0"
syn = { version = "2.0", features = ["full"] }
syn = "2.0"

View File

@@ -1 +0,0 @@
extend = { path = "../cargo-make/main.toml" }

View File

@@ -1,13 +1,12 @@
use convert_case::{Case, Casing};
use proc_macro2::{Span, TokenStream};
use proc_macro2::Span;
use proc_macro_error::{abort, abort_call_site, proc_macro_error};
use quote::{quote, ToTokens};
use syn::{
parse::{Parse, ParseStream, Parser},
punctuated::Punctuated,
token::Comma,
ExprClosure, Field, Fields, Generics, Ident, Index, Meta, Result, Token,
Type, Variant, Visibility, WhereClause,
Field, Generics, Ident, Index, Meta, Result, Token, Type, Visibility,
WhereClause,
};
#[proc_macro_error]
@@ -27,234 +26,61 @@ pub fn derive_patch(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
}
struct Model {
vis: Visibility,
name: Ident,
generics: Generics,
ty: ModelTy,
}
enum ModelTy {
Struct { fields: Vec<Field> },
Enum { variants: Vec<Variant> },
pub vis: Visibility,
pub struct_name: Ident,
pub generics: Generics,
pub fields: Vec<Field>,
}
impl Parse for Model {
fn parse(input: ParseStream) -> Result<Self> {
let input = syn::DeriveInput::parse(input)?;
let ty = match input.data {
syn::Data::Struct(s) => {
let fields = match s.fields {
syn::Fields::Unit => {
abort!(s.semi_token, "unit structs are not supported");
}
syn::Fields::Named(fields) => {
fields.named.into_iter().collect::<Vec<_>>()
}
syn::Fields::Unnamed(fields) => {
fields.unnamed.into_iter().collect::<Vec<_>>()
}
};
let syn::Data::Struct(s) = input.data else {
abort_call_site!("only structs can be used with `Store`");
};
ModelTy::Struct { fields }
let fields = match s.fields {
syn::Fields::Unit => {
abort!(s.semi_token, "unit structs are not supported");
}
syn::Data::Enum(e) => ModelTy::Enum {
variants: e.variants.into_iter().collect(),
},
_ => {
abort_call_site!(
"only structs and enums can be used with `Store`"
);
syn::Fields::Named(fields) => {
fields.named.into_iter().collect::<Vec<_>>()
}
syn::Fields::Unnamed(fields) => {
fields.unnamed.into_iter().collect::<Vec<_>>()
}
};
Ok(Self {
vis: input.vis,
struct_name: input.ident,
generics: input.generics,
name: input.ident,
ty,
fields,
})
}
}
#[derive(Clone)]
enum SubfieldMode {
Keyed(ExprClosure, Type),
Skip,
Keyed(Ident, Type),
}
impl Parse for SubfieldMode {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
let mode: Ident = input.parse()?;
if mode == "key" {
let _eq: Token!(=) = input.parse()?;
let ident: Ident = input.parse()?;
let _col: Token!(:) = input.parse()?;
let ty: Type = input.parse()?;
let _eq: Token!(=) = input.parse()?;
let ident: ExprClosure = input.parse()?;
Ok(SubfieldMode::Keyed(ident, ty))
} else if mode == "skip" {
Ok(SubfieldMode::Skip)
} else {
Err(input.error("expected `key = <ident>: <Type>`"))
}
}
}
impl ToTokens for Model {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let library_path = quote! { reactive_stores };
let Model {
vis,
name,
generics,
ty,
} = &self;
let any_store_field = Ident::new("AnyStoreField", Span::call_site());
let trait_name = Ident::new(&format!("{name}StoreFields"), name.span());
let generics_with_orig = {
let params = &generics.params;
quote! { <#any_store_field, #params> }
};
let where_with_orig = {
generics
.where_clause
.as_ref()
.map(|w| {
let WhereClause {
where_token,
predicates,
} = &w;
quote! {
#where_token
#any_store_field: #library_path::StoreField<Value = #name #generics>,
#predicates
}
})
.unwrap_or_else(|| quote! { where #any_store_field: #library_path::StoreField<Value = #name #generics> })
};
// define an extension trait that matches this struct
// and implement that trait for all StoreFields
let (trait_fields, read_fields): (Vec<_>, Vec<_>) =
ty.to_field_data(&library_path, generics, &any_store_field, name);
// read access
tokens.extend(quote! {
#vis trait #trait_name <AnyStoreField>
#where_with_orig
{
#(#trait_fields)*
}
impl #generics_with_orig #trait_name <AnyStoreField> for AnyStoreField
#where_with_orig
{
#(#read_fields)*
}
});
}
}
impl ModelTy {
fn to_field_data(
&self,
library_path: &TokenStream,
generics: &Generics,
any_store_field: &Ident,
name: &Ident,
) -> (Vec<TokenStream>, Vec<TokenStream>) {
match self {
ModelTy::Struct { fields } => fields
.iter()
.enumerate()
.map(|(idx, field)| {
let Field {
ident, ty, attrs, ..
} = &field;
let modes = attrs
.iter()
.find_map(|attr| {
attr.meta.path().is_ident("store").then(|| {
match &attr.meta {
Meta::List(list) => {
match Punctuated::<
SubfieldMode,
Comma,
>::parse_terminated
.parse2(list.tokens.clone())
{
Ok(modes) => Some(
modes
.iter()
.cloned()
.collect::<Vec<_>>(),
),
Err(e) => abort!(list, e),
}
}
_ => None,
}
})
})
.flatten();
(
field_to_tokens(
idx,
false,
modes.as_deref(),
library_path,
ident.as_ref(),
generics,
any_store_field,
name,
ty,
),
field_to_tokens(
idx,
true,
modes.as_deref(),
library_path,
ident.as_ref(),
generics,
any_store_field,
name,
ty,
),
)
})
.unzip(),
ModelTy::Enum { variants } => variants
.iter()
.map(|variant| {
let Variant { ident, fields, .. } = variant;
(
variant_to_tokens(
false,
library_path,
ident,
generics,
any_store_field,
name,
fields,
),
variant_to_tokens(
true,
library_path,
ident,
generics,
any_store_field,
name,
fields,
),
)
})
.unzip(),
}
}
}
#[allow(clippy::too_many_arguments)]
fn field_to_tokens(
idx: usize,
@@ -264,7 +90,7 @@ fn field_to_tokens(
orig_ident: Option<&Ident>,
generics: &Generics,
any_store_field: &Ident,
name: &Ident,
struct_name: &Ident,
ty: &Type,
) -> proc_macro2::TokenStream {
let ident = if orig_ident.is_none() {
@@ -283,29 +109,21 @@ fn field_to_tokens(
if let Some(modes) = modes {
if modes.len() == 1 {
let mode = &modes[0];
match mode {
SubfieldMode::Keyed(keyed_by, key_ty) => {
let signature = quote! {
fn #ident(self) -> #library_path::KeyedSubfield<#any_store_field, #name #generics, #key_ty, #ty>
};
return if include_body {
quote! {
#signature {
#library_path::KeyedSubfield::new(
self,
#idx.into(),
#keyed_by,
|prev| &prev.#locator,
|prev| &mut prev.#locator,
)
}
}
} else {
quote! { #signature; }
};
// Can replace with a match if additional modes added
// TODO keyed_by
let SubfieldMode::Keyed(_keyed_by, key_ty) = mode;
let signature = quote! {
fn #ident(self) -> #library_path::KeyedField<#any_store_field, #struct_name #generics, #ty, #key_ty>
};
return if include_body {
quote! {
#signature {
todo!()
}
}
SubfieldMode::Skip => return quote! {},
}
} else {
quote! { #signature; }
};
} else {
abort!(
orig_ident
@@ -319,7 +137,7 @@ fn field_to_tokens(
// default subfield
if include_body {
quote! {
fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #generics, #ty> {
fn #ident(self) -> #library_path::Subfield<#any_store_field, #struct_name #generics, #ty> {
#library_path::Subfield::new(
self,
#idx.into(),
@@ -330,212 +148,93 @@ fn field_to_tokens(
}
} else {
quote! {
fn #ident(self) -> #library_path::Subfield<#any_store_field, #name #generics, #ty>;
fn #ident(self) -> #library_path::Subfield<#any_store_field, #struct_name #generics, #ty>;
}
}
}
#[allow(clippy::too_many_arguments)]
fn variant_to_tokens(
include_body: bool,
library_path: &proc_macro2::TokenStream,
ident: &Ident,
generics: &Generics,
any_store_field: &Ident,
name: &Ident,
fields: &Fields,
) -> proc_macro2::TokenStream {
// the method name will always be the snake_cased ident
let orig_ident = &ident;
let ident =
Ident::new(&ident.to_string().to_case(Case::Snake), ident.span());
match fields {
// For unit enum fields, we will just return a `bool` subfield, which is
// true when this field matches
Fields::Unit => {
// default subfield
if include_body {
quote! {
fn #ident(self) -> bool {
match #library_path::StoreField::reader(&self) {
Some(reader) => {
#library_path::StoreField::track_field(&self);
matches!(&*reader, #name::#orig_ident)
},
None => false
}
impl ToTokens for Model {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let library_path = quote! { reactive_stores };
let Model {
vis,
struct_name,
generics,
fields,
} = &self;
let any_store_field = Ident::new("AnyStoreField", Span::call_site());
let trait_name = Ident::new(
&format!("{struct_name}StoreFields"),
struct_name.span(),
);
let generics_with_orig = {
let params = &generics.params;
quote! { <#any_store_field, #params> }
};
let where_with_orig = {
generics
.where_clause
.as_ref()
.map(|w| {
let WhereClause {
where_token,
predicates,
} = &w;
quote! {
#where_token
#any_store_field: #library_path::StoreField<Value = #struct_name #generics>,
#predicates
}
}
} else {
quote! {
fn #ident(self) -> bool;
}
})
.unwrap_or_else(|| quote! { where #any_store_field: #library_path::StoreField<Value = #struct_name #generics> })
};
// define an extension trait that matches this struct
let all_field_data = fields.iter().enumerate().map(|(idx, field)| {
let Field { ident, ty, attrs, .. } = &field;
let modes = attrs.iter().find_map(|attr| {
attr.meta.path().is_ident("store").then(|| {
match &attr.meta {
Meta::List(list) => {
match Punctuated::<SubfieldMode, Comma>::parse_terminated.parse2(list.tokens.clone()) {
Ok(modes) => Some(modes.iter().cloned().collect::<Vec<_>>()),
Err(e) => abort!(list, e)
}
},
_ => None
}
})
}).flatten();
(
field_to_tokens(idx, false, modes.as_deref(), &library_path, ident.as_ref(), generics, &any_store_field, struct_name, ty),
field_to_tokens(idx, true, modes.as_deref(), &library_path, ident.as_ref(), generics, &any_store_field, struct_name, ty),
)
});
// implement that trait for all StoreFields
let (trait_fields, read_fields): (Vec<_>, Vec<_>) =
all_field_data.unzip();
// read access
tokens.extend(quote! {
#vis trait #trait_name <AnyStoreField>
#where_with_orig
{
#(#trait_fields)*
}
}
// If an enum branch has named fields, we create N + 1 methods:
// 1 `bool` subfield, which is true when this field matches
// N `Option<T>` subfields for each of the named fields
Fields::Named(fields) => {
let mut tokens = if include_body {
quote! {
fn #ident(self) -> bool {
match #library_path::StoreField::reader(&self) {
Some(reader) => {
#library_path::StoreField::track_field(&self);
matches!(&*reader, #name::#orig_ident { .. })
},
None => false
}
}
}
} else {
quote! {
fn #ident(self) -> bool;
}
};
tokens.extend(fields
.named
.iter()
.map(|field| {
let field_ident = field.ident.as_ref().unwrap();
let field_ty = &field.ty;
let combined_ident = Ident::new(
&format!("{}_{}", ident, field_ident),
field_ident.span(),
);
// default subfield
if include_body {
quote! {
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>> {
#library_path::StoreField::track_field(&self);
let reader = #library_path::StoreField::reader(&self);
let matches = reader
.map(|reader| matches!(&*reader, #name::#orig_ident { .. }))
.unwrap_or(false);
if matches {
Some(#library_path::Subfield::new(
self,
0.into(),
|prev| {
match prev {
#name::#orig_ident { #field_ident, .. } => Some(#field_ident),
_ => None,
}
.expect("accessed an enum field that is no longer matched")
},
|prev| {
match prev {
#name::#orig_ident { #field_ident, .. } => Some(#field_ident),
_ => None,
}
.expect("accessed an enum field that is no longer matched")
},
))
} else {
None
}
}
}
} else {
quote! {
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>>;
}
}
}));
tokens
}
// If an enum branch has unnamed fields, we create N + 1 methods:
// 1 `bool` subfield, which is true when this field matches
// N `Option<T>` subfields for each of the unnamed fields
Fields::Unnamed(fields) => {
let mut tokens = if include_body {
quote! {
fn #ident(self) -> bool {
match #library_path::StoreField::reader(&self) {
Some(reader) => {
#library_path::StoreField::track_field(&self);
matches!(&*reader, #name::#orig_ident { .. })
},
None => false
}
}
}
} else {
quote! {
fn #ident(self) -> bool;
}
};
let number_of_fields = fields.unnamed.len();
tokens.extend(fields
.unnamed
.iter()
.enumerate()
.map(|(idx, field)| {
let field_ident = idx;
let field_ty = &field.ty;
let combined_ident = Ident::new(
&format!("{}_{}", ident, field_ident),
ident.span(),
);
let ignore_before = (0..idx).map(|_| quote! { _, });
let ignore_before2 = ignore_before.clone();
let ignore_after = (idx..number_of_fields.saturating_sub(1)).map(|_| quote !{_, });
let ignore_after2 = ignore_after.clone();
// default subfield
if include_body {
quote! {
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>> {
#library_path::StoreField::track_field(&self);
let reader = #library_path::StoreField::reader(&self);
let matches = reader
.map(|reader| matches!(&*reader, #name::#orig_ident(..)))
.unwrap_or(false);
if matches {
Some(#library_path::Subfield::new(
self,
0.into(),
|prev| {
match prev {
#name::#orig_ident(#(#ignore_before)* this, #(#ignore_after)*) => Some(this),
_ => None,
}
.expect("accessed an enum field that is no longer matched")
},
|prev| {
match prev {
#name::#orig_ident(#(#ignore_before2)* this, #(#ignore_after2)*) => Some(this),
_ => None,
}
.expect("accessed an enum field that is no longer matched")
},
))
} else {
None
}
}
}
} else {
quote! {
fn #combined_ident(self) -> Option<#library_path::Subfield<#any_store_field, #name #generics, #field_ty>>;
}
}
}));
tokens
}
impl #generics_with_orig #trait_name <AnyStoreField> for AnyStoreField
#where_with_orig
{
#(#read_fields)*
}
});
}
}
struct PatchModel {
pub name: Ident,
pub struct_name: Ident,
pub generics: Generics,
pub fields: Vec<Field>,
}
@@ -561,7 +260,7 @@ impl Parse for PatchModel {
};
Ok(Self {
name: input.ident,
struct_name: input.ident,
generics: input.generics,
fields,
})
@@ -572,7 +271,7 @@ impl ToTokens for PatchModel {
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let library_path = quote! { reactive_stores };
let PatchModel {
name,
struct_name,
generics,
fields,
} = &self;
@@ -595,7 +294,7 @@ impl ToTokens for PatchModel {
// read access
tokens.extend(quote! {
impl #library_path::PatchField for #name #generics
impl #library_path::PatchField for #struct_name #generics
{
fn patch_field(
&mut self,

View File

@@ -1,6 +1,6 @@
[package]
name = "leptos_router"
version = "0.7.0-beta6"
version = "0.7.0-beta4"
authors = ["Greg Johnston", "Ben Wishovich"]
license = "MIT"
readme = "../README.md"

View File

@@ -120,9 +120,6 @@ where
);
let params_memo = ArcMemo::from(params.clone());
// release URL lock
drop(current_url);
match new_match {
None => Rc::new(RefCell::new(FlatRoutesViewState {
view: EitherOf3::B(fallback()).build(),
@@ -384,10 +381,6 @@ where
.unwrap_or_default(),
);
let params_memo = ArcMemo::from(params.clone());
// release URL lock
drop(current_url);
let view = match new_match {
None => Either::Left((self.fallback)()),
Some(new_match) => {
@@ -550,9 +543,6 @@ where
);
let params_memo = ArcMemo::from(params.clone());
// release URL lock
drop(current_url);
match new_match {
None => Rc::new(RefCell::new(FlatRoutesViewState {
view: EitherOf3::B(fallback())

View File

@@ -2,33 +2,6 @@ use super::{PartialPathMatch, PathSegment, PossibleRouteMatch};
use core::iter;
use std::borrow::Cow;
/// A segment that captures a value from the url and maps it to a key.
///
/// # Examples
/// ```rust
/// # (|| -> Option<()> { // Option does not impl Terminate, so no main
/// use leptos::prelude::*;
/// use leptos_router::{path, ParamSegment, PossibleRouteMatch};
///
/// let path = &"/hello";
///
/// // Manual definition
/// let manual = (ParamSegment("message"),);
/// let (key, value) = manual.test(path)?.params().last()?;
///
/// assert_eq!(key, "message");
/// assert_eq!(value, "hello");
///
/// // Macro definition
/// let using_macro = path!("/:message");
/// let (key, value) = using_macro.test(path)?.params().last()?;
///
/// assert_eq!(key, "message");
/// assert_eq!(value, "hello");
///
/// # Some(())
/// # })().unwrap();
/// ```
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct ParamSegment(pub &'static str);
@@ -78,46 +51,6 @@ impl PossibleRouteMatch for ParamSegment {
}
}
/// A segment that captures all remaining values from the url and maps it to a key.
///
/// A [`WildcardSegment`] __must__ be the last segment of your path definition.
///
/// ```rust
/// # (|| -> Option<()> { // Option does not impl Terminate, so no main
/// use leptos::prelude::*;
/// use leptos_router::{
/// path, ParamSegment, PossibleRouteMatch, StaticSegment, WildcardSegment,
/// };
///
/// let path = &"/echo/send/sync/and/static";
///
/// // Manual definition
/// let manual = (StaticSegment("echo"), WildcardSegment("kitchen_sink"));
/// let (key, value) = manual.test(path)?.params().last()?;
///
/// assert_eq!(key, "kitchen_sink");
/// assert_eq!(value, "send/sync/and/static");
///
/// // Macro definition
/// let using_macro = path!("/echo/*else");
/// let (key, value) = using_macro.test(path)?.params().last()?;
///
/// assert_eq!(key, "else");
/// assert_eq!(value, "send/sync/and/static");
///
/// // This fails to compile because the macro will catch the bad ordering
/// // let bad = path!("/echo/*foo/bar/:baz");
///
/// // This compiles but may not work as you expect at runtime.
/// (
/// StaticSegment("echo"),
/// WildcardSegment("foo"),
/// ParamSegment("baz"),
/// );
///
/// # Some(())
/// # })().unwrap();
/// ```
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct WildcardSegment(pub &'static str);

View File

@@ -25,37 +25,6 @@ impl AsPath for &'static str {
}
}
/// A segment that is expected to be static. Not requiring mapping into params.
///
/// Should work exactly as you would expect.
///
/// # Examples
/// ```rust
/// # (|| -> Option<()> { // Option does not impl Terminate, so no main
/// use leptos::prelude::*;
/// use leptos_router::{path, PossibleRouteMatch, StaticSegment};
///
/// let path = &"/users";
///
/// // Manual definition
/// let manual = (StaticSegment("users"),);
/// let matched = manual.test(path)?;
/// assert_eq!(matched.matched(), "/users");
///
/// // Params are empty as we had no `ParamSegement`s or `WildcardSegment`s
/// // If you did have additional dynamic segments, this would not be empty.
/// assert_eq!(matched.params().count(), 0);
///
/// // Macro definition
/// let using_macro = path!("/users");
/// let matched = manual.test(path)?;
/// assert_eq!(matched.matched(), "/users");
///
/// assert_eq!(matched.params().count(), 0);
///
/// # Some(())
/// # })().unwrap();
/// ```
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct StaticSegment<T: AsPath>(pub T);
@@ -104,11 +73,6 @@ impl<T: AsPath> PossibleRouteMatch for StaticSegment<T> {
}
}
// if we still have remaining, unmatched characters in this segment, it was not a match
if this.next().is_some() {
return None;
}
// build the match object
// the remaining is built from the path in, with the slice moved
// by the length of this match

View File

@@ -156,7 +156,6 @@ mod tests {
matching::MatchParams, MatchInterface, PathSegment, StaticSegment,
WildcardSegment,
};
use either_of::Either;
use tachys::renderer::dom::Dom;
#[test]
@@ -165,11 +164,8 @@ mod tests {
Routes::<_, Dom>::new(NestedRoute::new(StaticSegment("/"), || ()));
let matched = routes.match_route("/");
assert!(matched.is_some());
// this case seems like it should match, but implementing it interferes with
// handling trailing slash requirements accurately -- paths for the root are "/",
// not "", in any case
let matched = routes.match_route("");
assert!(matched.is_none());
assert!(matched.is_some());
let (base, paths) = routes.generate_routes();
assert_eq!(base, None);
let paths = paths.into_iter().map(|g| g.segments).collect::<Vec<_>>();
@@ -208,16 +204,6 @@ mod tests {
);
}
#[test]
pub fn does_not_match_route_unless_full_param_matches() {
let routes = Routes::<_, Dom>::new((
NestedRoute::new(StaticSegment("/property-api"), || ()),
NestedRoute::new(StaticSegment("/property"), || ()),
));
let matched = routes.match_route("/property").unwrap();
assert!(matches!(matched, Either::Right(_)));
}
#[test]
pub fn does_not_match_incomplete_route() {
let routes: Routes<_, Dom> =
@@ -358,7 +344,7 @@ impl<'a, ParamsIter> PartialPathMatch<'a, ParamsIter> {
self.remaining.is_empty() || self.remaining == "/"
}
pub fn remaining(&self) -> &'a str {
pub fn remaining(&self) -> &str {
self.remaining
}
@@ -366,7 +352,7 @@ impl<'a, ParamsIter> PartialPathMatch<'a, ParamsIter> {
self.params
}
pub fn matched(&self) -> &'a str {
pub fn matched(&self) -> &str {
self.matched
}
}

View File

@@ -93,7 +93,6 @@ where
let mut loaders = Vec::new();
let mut outlets = Vec::new();
let url = current_url.read_untracked();
let path = url.path().to_string();
// match the route
let new_match = routes.match_route(url.path());
@@ -111,7 +110,6 @@ where
&mut outlets,
&outer_owner,
);
drop(url);
outer_owner.with(|| {
EitherOf3::C(
Outlet(OutletProps::builder().build()).into_any(),
@@ -133,7 +131,7 @@ where
});
NestedRouteViewState {
path,
path: url.path().to_string(),
current_url,
outlets,
view,
@@ -413,7 +411,6 @@ where
let mut loaders = Vec::new();
let mut outlets = Vec::new();
let url = current_url.read_untracked();
let path = url.path().to_string();
// match the route
let new_match = routes.match_route(url.path());
@@ -430,8 +427,6 @@ where
&mut outlets,
&outer_owner,
);
drop(url);
// TODO support for lazy hydration
join_all(mem::take(&mut loaders))
.now_or_never()
@@ -447,7 +442,7 @@ where
));
NestedRouteViewState {
path,
path: url.path().to_string(),
current_url,
outlets,
view,

View File

@@ -313,6 +313,7 @@ impl ResolvedStaticPath {
// awaiting the Future
_ = tx.send((owner.clone(), Some(html)));
} else {
_ = tx.send((owner.clone(), None));
if let Err(e) = writer(&self, &owner, html).await {
#[cfg(feature = "tracing")]
tracing::warn!("{e}");
@@ -320,7 +321,6 @@ impl ResolvedStaticPath {
#[cfg(not(feature = "tracing"))]
eprintln!("{e}");
}
_ = tx.send((owner.clone(), None));
}
// if there's a regeneration function, keep looping

View File

@@ -1,6 +1,6 @@
[package]
name = "leptos_router_macro"
version = "0.7.0-beta6"
version = "0.7.0-beta4"
authors = ["Greg Johnston", "Ben Wishovich"]
license = "MIT"
readme = "../README.md"

View File

@@ -1,6 +1,6 @@
[package]
name = "tachys"
version = "0.1.0-beta6"
version = "0.1.0-beta4"
authors = ["Greg Johnston"]
license = "MIT"
readme = "../README.md"
@@ -179,7 +179,4 @@ sledgehammer = ["dep:sledgehammer_bindgen", "dep:sledgehammer_utils"]
tracing = ["dep:tracing"]
[package.metadata.cargo-all-features]
denylist = ["tracing", "sledgehammer"]
skip_feature_sets = [
["ssr", "hydrate"],
]
denylist = ["tracing"]

View File

@@ -207,7 +207,7 @@ impl<T, D, P, R> ToTemplate for Directive<T, D, P, R> {
///
/// You can use directives like the following.
///
/// ```ignore
/// ```
/// # use leptos::{*, html::AnyElement};
///
/// // This doesn't take an attribute value

View File

@@ -17,6 +17,8 @@ where
rndr: PhantomData,
attributes: (),
children: (),
#[cfg(debug_assertions)]
defined_at: std::panic::Location::caller(),
}
}

View File

@@ -13,8 +13,7 @@ use web_sys::Element;
/// Extends the [`Element`](Renderer::Element) type of a [`Renderer`], allowing you to add
/// attributes and children to the element's built state at runtime, with a similar API to how they
/// can be added to the static view tree at compile time.
///
/// ```rust,ignore
/// ```rust
/// use tachys::html::element::ElementExt;
///
/// let view: HtmlElement<_, _, _, MockDom> = button();

Some files were not shown because too many files have changed in this diff Show More