mirror of
https://github.com/leptos-rs/leptos.git
synced 2025-12-27 16:54:41 -05:00
Compare commits
130 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b97cf3353a | ||
|
|
68c849073c | ||
|
|
3d2cdc21a1 | ||
|
|
93d939aef8 | ||
|
|
fb04750607 | ||
|
|
a080496e7e | ||
|
|
9fc1002167 | ||
|
|
bc5c766530 | ||
|
|
17821f863a | ||
|
|
1ca4f34ef3 | ||
|
|
8f0a1554b1 | ||
|
|
38d4f26d03 | ||
|
|
2b04c2710d | ||
|
|
a4937a1236 | ||
|
|
f6f2c39686 | ||
|
|
d7eacf1ab5 | ||
|
|
d1a4bbe28e | ||
|
|
412ecd6b1b | ||
|
|
9bc0152121 | ||
|
|
4b05cada8f | ||
|
|
a818862704 | ||
|
|
173487debc | ||
|
|
449d96cc9a | ||
|
|
f9bf6a95ed | ||
|
|
5bf6c94bb2 | ||
|
|
e1ce94a28d | ||
|
|
2a62dcf27e | ||
|
|
3094766c5c | ||
|
|
a52804595d | ||
|
|
e72f12d32b | ||
|
|
e70083708a | ||
|
|
cbc4caef19 | ||
|
|
fbeee4dbf5 | ||
|
|
d13f7e5438 | ||
|
|
7b543bd31c | ||
|
|
1743724420 | ||
|
|
73e0add670 | ||
|
|
4f5eb444bc | ||
|
|
7de98823fb | ||
|
|
6d930573fc | ||
|
|
3317002ff5 | ||
|
|
99403d0167 | ||
|
|
23ce022c60 | ||
|
|
96e1fd0fb8 | ||
|
|
f28dac1093 | ||
|
|
ff28544fb2 | ||
|
|
27765b417c | ||
|
|
b0d8d4ee26 | ||
|
|
c4b1176a6a | ||
|
|
fd133dd79a | ||
|
|
9c2477a4cf | ||
|
|
f3b6d1f351 | ||
|
|
5af7b54c9c | ||
|
|
ba9604101d | ||
|
|
e136c1fc44 | ||
|
|
c581b3293e | ||
|
|
cc7f861637 | ||
|
|
642d6fc72b | ||
|
|
e69c7f4ae0 | ||
|
|
9ca36d4763 | ||
|
|
8dc600ca02 | ||
|
|
b621ead607 | ||
|
|
66cf21f650 | ||
|
|
f3dcdc057d | ||
|
|
2bdacf636e | ||
|
|
fc06980c60 | ||
|
|
550a3a4e6d | ||
|
|
3310e7766b | ||
|
|
5ab865e89d | ||
|
|
f0c60f6ef6 | ||
|
|
f3f685c923 | ||
|
|
3646bf31b0 | ||
|
|
b39895fa2d | ||
|
|
1fce8931ab | ||
|
|
6166f6edbd | ||
|
|
dc9fbb0585 | ||
|
|
d7b2f9d05b | ||
|
|
69c4090d32 | ||
|
|
fff5fa3459 | ||
|
|
e92b80c71e | ||
|
|
8bb04ef248 | ||
|
|
d7881ccfb5 | ||
|
|
96a1f80daf | ||
|
|
a083b57260 | ||
|
|
4fa6660a3f | ||
|
|
43f2ad7043 | ||
|
|
2bf04072ea | ||
|
|
efc6fc017d | ||
|
|
6cb10401df | ||
|
|
346efd66f5 | ||
|
|
7c0889e873 | ||
|
|
bb40576bd5 | ||
|
|
6baf20275f | ||
|
|
5a57d48913 | ||
|
|
73f0207a7d | ||
|
|
4e4fb8ab10 | ||
|
|
b9cccc6b91 | ||
|
|
d42163d888 | ||
|
|
2db3e4f4d8 | ||
|
|
45380a258a | ||
|
|
40292d0896 | ||
|
|
e8be9e31ff | ||
|
|
3d0fdb1ab0 | ||
|
|
4dea1195e2 | ||
|
|
92ea39ddac | ||
|
|
05e08166c4 | ||
|
|
827cc0bdfa | ||
|
|
57bd343f4a | ||
|
|
4a76aead68 | ||
|
|
48c2148589 | ||
|
|
32bea69c28 | ||
|
|
f3c57f8bce | ||
|
|
000896b2f7 | ||
|
|
88004e5042 | ||
|
|
6001a93475 | ||
|
|
4784b2ddab | ||
|
|
32b4cd008f | ||
|
|
823f8b51be | ||
|
|
209743d6bc | ||
|
|
b93a88accc | ||
|
|
dc2314d5e2 | ||
|
|
33aa676854 | ||
|
|
4a3b3ffb8a | ||
|
|
ee5cbf1891 | ||
|
|
8fcf3544a8 | ||
|
|
2b8e987cb8 | ||
|
|
998165148b | ||
|
|
c80eff1098 | ||
|
|
cd8f2c2153 | ||
|
|
cb0abff2d5 |
6
.github/workflows/ci-changed-examples.yml
vendored
6
.github/workflows/ci-changed-examples.yml
vendored
@@ -1,23 +1,21 @@
|
||||
name: CI Changed Examples
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- leptos_0.6
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
- leptos_0.6
|
||||
jobs:
|
||||
get-example-changed:
|
||||
uses: ./.github/workflows/get-example-changed.yml
|
||||
|
||||
get-matrix:
|
||||
needs: [get-example-changed]
|
||||
uses: ./.github/workflows/get-changed-examples-matrix.yml
|
||||
with:
|
||||
example_changed: ${{ fromJSON(needs.get-example-changed.outputs.example_changed) }}
|
||||
|
||||
test:
|
||||
name: CI
|
||||
needs: [get-example-changed, get-matrix]
|
||||
|
||||
4
.github/workflows/ci-examples.yml
vendored
4
.github/workflows/ci-examples.yml
vendored
@@ -1,13 +1,13 @@
|
||||
name: CI Examples
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- leptos_0.6
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
- leptos_0.6
|
||||
jobs:
|
||||
get-leptos-changed:
|
||||
uses: ./.github/workflows/get-leptos-changed.yml
|
||||
|
||||
7
.github/workflows/ci-semver.yml
vendored
7
.github/workflows/ci-semver.yml
vendored
@@ -1,27 +1,24 @@
|
||||
name: CI semver
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- leptos_0.6
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
- leptos_0.6
|
||||
jobs:
|
||||
get-leptos-changed:
|
||||
uses: ./.github/workflows/get-leptos-changed.yml
|
||||
|
||||
test:
|
||||
needs: [get-leptos-changed]
|
||||
if: github.event.pull_request.labels[0].name == 'semver' # needs.get-leptos-changed.outputs.leptos_changed == 'true' && github.event.pull_request.labels[0].name != 'breaking'
|
||||
name: Run semver check (nightly-2024-08-01)
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Semver Checks
|
||||
uses: obi1kenobi/cargo-semver-checks-action@v2
|
||||
with:
|
||||
|
||||
39
.github/workflows/ci.yml
vendored
39
.github/workflows/ci.yml
vendored
@@ -1,50 +1,25 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- leptos_0.6
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
- leptos_0.6
|
||||
jobs:
|
||||
get-leptos-changed:
|
||||
uses: ./.github/workflows/get-leptos-changed.yml
|
||||
|
||||
get-leptos-matrix:
|
||||
uses: ./.github/workflows/get-leptos-matrix.yml
|
||||
test:
|
||||
name: CI
|
||||
needs: [get-leptos-changed]
|
||||
needs: [get-leptos-changed, get-leptos-matrix]
|
||||
if: needs.get-leptos-changed.outputs.leptos_changed == 'true'
|
||||
strategy:
|
||||
matrix:
|
||||
directory:
|
||||
[
|
||||
any_error,
|
||||
any_spawner,
|
||||
const_str_slice_concat,
|
||||
either_of,
|
||||
hydration_context,
|
||||
integrations/actix,
|
||||
integrations/axum,
|
||||
integrations/utils,
|
||||
leptos,
|
||||
leptos_config,
|
||||
leptos_dom,
|
||||
leptos_hot_reload,
|
||||
leptos_macro,
|
||||
leptos_server,
|
||||
meta,
|
||||
next_tuple,
|
||||
oco,
|
||||
or_poisoned,
|
||||
reactive_graph,
|
||||
router,
|
||||
router_macro,
|
||||
server_fn,
|
||||
server_fn/server_fn_macro_default,
|
||||
server_fn_macro,
|
||||
]
|
||||
matrix: ${{ fromJSON(needs.get-leptos-matrix.outputs.matrix) }}
|
||||
fail-fast: false
|
||||
uses: ./.github/workflows/run-cargo-make-task.yml
|
||||
with:
|
||||
directory: ${{ matrix.directory }}
|
||||
|
||||
6
.github/workflows/get-example-changed.yml
vendored
6
.github/workflows/get-example-changed.yml
vendored
@@ -1,12 +1,10 @@
|
||||
name: Examples Changed Call
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
example_changed:
|
||||
description: "Example Changed"
|
||||
value: ${{ jobs.get-example-changed.outputs.example_changed }}
|
||||
|
||||
jobs:
|
||||
get-example-changed:
|
||||
name: Get Example Changed
|
||||
@@ -18,7 +16,6 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get example files that changed
|
||||
id: changed-files
|
||||
uses: tj-actions/changed-files@v45
|
||||
@@ -26,13 +23,10 @@ jobs:
|
||||
files: |
|
||||
examples/**
|
||||
!examples/cargo-make/**
|
||||
!examples/gtk/**
|
||||
!examples/Makefile.toml
|
||||
!examples/*.md
|
||||
|
||||
- name: List example files that changed
|
||||
run: echo '${{ steps.changed-files.outputs.all_changed_files }}'
|
||||
|
||||
- name: Set example_changed
|
||||
id: set-example-changed
|
||||
run: |
|
||||
|
||||
18
.github/workflows/get-examples-matrix.yml
vendored
18
.github/workflows/get-examples-matrix.yml
vendored
@@ -1,38 +1,34 @@
|
||||
name: Get Examples Matrix Call
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
matrix:
|
||||
description: "Matrix"
|
||||
value: ${{ jobs.create.outputs.matrix }}
|
||||
|
||||
jobs:
|
||||
create:
|
||||
name: Create Examples Matrix
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
env:
|
||||
# separate examples using "|" (vertical bar) char like "a|b|c".
|
||||
# cargo-make should be excluded by default.
|
||||
EXCLUDED_EXAMPLES: cargo-make
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install jq
|
||||
run: sudo apt-get install jq
|
||||
|
||||
- name: Set Matrix
|
||||
id: set-matrix
|
||||
run: |
|
||||
examples=$(ls examples |
|
||||
awk '{print "examples/" $0}' |
|
||||
grep -v .md |
|
||||
grep -v examples/Makefile.toml |
|
||||
grep -v examples/cargo-make |
|
||||
grep -v examples/gtk |
|
||||
examples=$(ls -1d examples/*/ |
|
||||
grep -vE "($EXCLUDED_EXAMPLES)" |
|
||||
sed 's/\/$//' |
|
||||
jq -R -s -c 'split("\n")[:-1]')
|
||||
echo "Example Directories: $examples"
|
||||
echo "matrix={\"directory\":$examples}" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Print Location Info
|
||||
run: |
|
||||
echo "Workspace: ${{ github.workspace }}"
|
||||
|
||||
37
.github/workflows/get-leptos-changed.yml
vendored
37
.github/workflows/get-leptos-changed.yml
vendored
@@ -1,12 +1,10 @@
|
||||
name: Get Leptos Changed Call
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
leptos_changed:
|
||||
description: "Leptos Changed"
|
||||
value: ${{ jobs.create.outputs.leptos_changed }}
|
||||
|
||||
jobs:
|
||||
create:
|
||||
name: Detect Source Change
|
||||
@@ -18,40 +16,19 @@ jobs:
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get source files that changed
|
||||
id: changed-source
|
||||
uses: tj-actions/changed-files@v45
|
||||
with:
|
||||
files: |
|
||||
any_error/**
|
||||
any_spawner/**
|
||||
const_str_slice_concat/**
|
||||
either_of/**
|
||||
hydration_context/**
|
||||
integrations/actix/**
|
||||
integrations/axum/**
|
||||
integrations/utils/**
|
||||
leptos/**
|
||||
leptos_config/**
|
||||
leptos_dom/**
|
||||
leptos_hot_reload/**
|
||||
leptos_macro/**
|
||||
leptos_server/**
|
||||
meta/**
|
||||
next_tuple/**
|
||||
oco/**
|
||||
or_poisoned/**
|
||||
reactive_graph/**
|
||||
router/**
|
||||
router_macro/**
|
||||
server_fn/**
|
||||
server_fn/server_fn_macro_default/**
|
||||
server_fn_macro/**
|
||||
|
||||
files_ignore: |
|
||||
.*/**/*
|
||||
cargo-make/**/*
|
||||
examples/**/*
|
||||
projects/**/*
|
||||
benchmarks/**/*
|
||||
docs/**/*
|
||||
- name: List source files that changed
|
||||
run: echo '${{ steps.changed-source.outputs.all_changed_files }}'
|
||||
|
||||
- name: Set leptos_changed
|
||||
id: set-source-changed
|
||||
run: |
|
||||
|
||||
32
.github/workflows/get-leptos-matrix.yml
vendored
Normal file
32
.github/workflows/get-leptos-matrix.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: Get Leptos Matrix Call
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
matrix:
|
||||
description: "Matrix"
|
||||
value: ${{ jobs.create.outputs.matrix }}
|
||||
jobs:
|
||||
create:
|
||||
name: Create Leptos Matrix
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
matrix: ${{ steps.set-matrix.outputs.matrix }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Install jq
|
||||
run: sudo apt-get install jq
|
||||
- name: Set Matrix
|
||||
id: set-matrix
|
||||
run: |
|
||||
crates=$(cargo metadata --no-deps --quiet --format-version 1 |
|
||||
jq -r '.packages[] | select(.name != "workspace") | .manifest_path| rtrimstr("/Cargo.toml")' |
|
||||
sed "s|$(pwd)/||" |
|
||||
jq -R -s -c 'split("\n")[:-1]')
|
||||
echo "Leptos Directories: $crates"
|
||||
echo "matrix={\"directory\":$crates}" >> "$GITHUB_OUTPUT"
|
||||
- name: Print Location Info
|
||||
run: |
|
||||
echo "Workspace: ${{ github.workspace }}"
|
||||
pwd
|
||||
ls | sort -u
|
||||
28
.github/workflows/run-cargo-make-task.yml
vendored
28
.github/workflows/run-cargo-make-task.yml
vendored
@@ -1,5 +1,4 @@
|
||||
name: Run Task
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
@@ -12,70 +11,53 @@ on:
|
||||
toolchain:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Run ${{ inputs.cargo_make_task }} (${{ inputs.toolchain }})
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
# Setup environment
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: ${{ inputs.toolchain }}
|
||||
|
||||
- name: Add wasm32-unknown-unknown
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
|
||||
- name: Setup cargo-make
|
||||
uses: davidB/rust-cargo-make@v1
|
||||
|
||||
- name: Cargo generate-lockfile
|
||||
run: cargo generate-lockfile
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Install binstall
|
||||
uses: cargo-bins/cargo-binstall@main
|
||||
|
||||
- name: Install wasm-bindgen
|
||||
run: cargo binstall wasm-bindgen-cli --no-confirm
|
||||
|
||||
- name: Install cargo-leptos
|
||||
run: cargo binstall cargo-leptos --no-confirm
|
||||
|
||||
- name: Install Trunk
|
||||
uses: jetli/trunk-action@v0.5.0
|
||||
with:
|
||||
version: "latest"
|
||||
|
||||
- name: Print Trunk Version
|
||||
run: trunk --version
|
||||
|
||||
- name: Install Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
id: pnpm-install
|
||||
with:
|
||||
version: 8
|
||||
run_install: false
|
||||
|
||||
- name: Get pnpm store directory
|
||||
id: pnpm-cache
|
||||
run: |
|
||||
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
|
||||
|
||||
- uses: actions/cache@v4
|
||||
name: Setup pnpm cache
|
||||
with:
|
||||
@@ -83,7 +65,6 @@ jobs:
|
||||
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-store-
|
||||
|
||||
- name: Maybe install chromedriver
|
||||
run: |
|
||||
project_makefile=${{inputs.directory}}/Makefile.toml
|
||||
@@ -99,7 +80,6 @@ jobs:
|
||||
else
|
||||
echo chromedriver is not required
|
||||
fi
|
||||
|
||||
- name: Maybe install playwright browser dependencies
|
||||
run: |
|
||||
for pw_path in $(find ${{inputs.directory}} -name playwright.config.ts)
|
||||
@@ -113,12 +93,16 @@ jobs:
|
||||
echo Playwright is not required
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Install Deno
|
||||
uses: denoland/setup-deno@v1
|
||||
with:
|
||||
deno-version: v1.x
|
||||
|
||||
- name: Maybe install gtk-rs dependencies
|
||||
run: |
|
||||
if [ ! -z $(echo ${{inputs.directory}} | grep gtk) ]; then
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libglib2.0-dev libgio2.0-cil-dev libgraphene-1.0-dev libcairo2-dev libpango1.0-dev libgtk-4-dev
|
||||
fi
|
||||
# Run Cargo Make Task
|
||||
- name: ${{ inputs.cargo_make_task }}
|
||||
run: |
|
||||
|
||||
42
Cargo.toml
42
Cargo.toml
@@ -40,36 +40,36 @@ members = [
|
||||
exclude = ["benchmarks", "examples", "projects"]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.7.0-beta4"
|
||||
version = "0.7.0-beta6"
|
||||
edition = "2021"
|
||||
rust-version = "1.76"
|
||||
|
||||
[workspace.dependencies]
|
||||
throw_error = { path = "./any_error/", version = "0.2.0-beta4" }
|
||||
throw_error = { path = "./any_error/", version = "0.2.0-beta6" }
|
||||
any_spawner = { path = "./any_spawner/", version = "0.1.0" }
|
||||
const_str_slice_concat = { path = "./const_str_slice_concat", version = "0.1.0" }
|
||||
either_of = { path = "./either_of/", version = "0.1.0" }
|
||||
hydration_context = { path = "./hydration_context", version = "0.2.0-beta4" }
|
||||
leptos = { path = "./leptos", version = "0.7.0-beta4" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.7.0-beta4" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.7.0-beta4" }
|
||||
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.7.0-beta4" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.7.0-beta4" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.7.0-beta4" }
|
||||
leptos_router = { path = "./router", version = "0.7.0-beta4" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.7.0-beta4" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.7.0-beta4" }
|
||||
leptos_meta = { path = "./meta", version = "0.7.0-beta4" }
|
||||
next_tuple = { path = "./next_tuple", version = "0.1.0-beta4" }
|
||||
hydration_context = { path = "./hydration_context", version = "0.2.0-beta6" }
|
||||
leptos = { path = "./leptos", version = "0.7.0-beta6" }
|
||||
leptos_config = { path = "./leptos_config", version = "0.7.0-beta6" }
|
||||
leptos_dom = { path = "./leptos_dom", version = "0.7.0-beta6" }
|
||||
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.7.0-beta6" }
|
||||
leptos_integration_utils = { path = "./integrations/utils", version = "0.7.0-beta6" }
|
||||
leptos_macro = { path = "./leptos_macro", version = "0.7.0-beta6" }
|
||||
leptos_router = { path = "./router", version = "0.7.0-beta6" }
|
||||
leptos_router_macro = { path = "./router_macro", version = "0.7.0-beta6" }
|
||||
leptos_server = { path = "./leptos_server", version = "0.7.0-beta6" }
|
||||
leptos_meta = { path = "./meta", version = "0.7.0-beta6" }
|
||||
next_tuple = { path = "./next_tuple", version = "0.1.0-beta6" }
|
||||
oco_ref = { path = "./oco", version = "0.2.0" }
|
||||
or_poisoned = { path = "./or_poisoned", version = "0.1.0" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.1.0-beta4" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.1.0-beta4" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.1.0-beta4" }
|
||||
server_fn = { path = "./server_fn", version = "0.7.0-beta4" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.7.0-beta4" }
|
||||
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.7.0-beta4" }
|
||||
tachys = { path = "./tachys", version = "0.1.0-beta4" }
|
||||
reactive_graph = { path = "./reactive_graph", version = "0.1.0-beta6" }
|
||||
reactive_stores = { path = "./reactive_stores", version = "0.1.0-beta6" }
|
||||
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.1.0-beta6" }
|
||||
server_fn = { path = "./server_fn", version = "0.7.0-beta6" }
|
||||
server_fn_macro = { path = "./server_fn_macro", version = "0.7.0-beta6" }
|
||||
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.7.0-beta6" }
|
||||
tachys = { path = "./tachys", version = "0.1.0-beta6" }
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "throw_error"
|
||||
version = "0.2.0-beta4"
|
||||
version = "0.2.0-beta6"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -2,7 +2,8 @@
|
||||
name = "benchmarks"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
rust-version.workspace = true
|
||||
# std::sync::LazyLock is stabilized in Rust version 1.80.0
|
||||
rust-version = "1.80.0"
|
||||
|
||||
[dependencies]
|
||||
l0410 = { package = "leptos", version = "0.4.10", features = [
|
||||
|
||||
@@ -18,7 +18,7 @@ fn leptos_ssr_bench(b: &mut Bencher) {
|
||||
}
|
||||
}
|
||||
|
||||
let rendered = view! {
|
||||
let rendered = view! {
|
||||
<main>
|
||||
<h1>"Welcome to our benchmark page."</h1>
|
||||
<p>"Here's some introductory text."</p>
|
||||
@@ -58,7 +58,7 @@ fn tachys_ssr_bench(b: &mut Bencher) {
|
||||
}
|
||||
}
|
||||
|
||||
let rendered = view! {
|
||||
let rendered = view! {
|
||||
<main>
|
||||
<h1>"Welcome to our benchmark page."</h1>
|
||||
<p>"Here's some introductory text."</p>
|
||||
@@ -92,13 +92,13 @@ fn tera_ssr_bench(b: &mut Bencher) {
|
||||
{% endfor %}
|
||||
</main>"#;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref TERA: Tera = {
|
||||
let mut tera = Tera::default();
|
||||
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
|
||||
tera
|
||||
};
|
||||
}
|
||||
|
||||
static LazyCell<TERA>: Tera = LazyLock::new(|| {
|
||||
let mut tera = Tera::default();
|
||||
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
|
||||
tera
|
||||
});
|
||||
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct Counter {
|
||||
|
||||
@@ -55,7 +55,7 @@ static TEMPLATE: &str = r#"<main>
|
||||
{% else %}
|
||||
<li><a href="/">All</a></li>
|
||||
{% endif %}
|
||||
|
||||
|
||||
{% if mode_active %}
|
||||
<li><a href="/active" class="selected">Active</a></li>
|
||||
{% else %}
|
||||
@@ -91,13 +91,13 @@ fn tera_todomvc_ssr(b: &mut Bencher) {
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tera::*;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref TERA: Tera = {
|
||||
|
||||
static LazyLock<TERA>: Tera = LazyLock( || {
|
||||
let mut tera = Tera::default();
|
||||
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
|
||||
tera
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct Todo {
|
||||
@@ -131,13 +131,13 @@ fn tera_todomvc_ssr_1000(b: &mut Bencher) {
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tera::*;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref TERA: Tera = {
|
||||
let mut tera = Tera::default();
|
||||
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
|
||||
tera
|
||||
};
|
||||
}
|
||||
|
||||
static TERA: LazyLock<Tera> = LazyLock::new(|| {
|
||||
let mut tera = Tera::default();
|
||||
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
|
||||
tera
|
||||
});
|
||||
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct Todo {
|
||||
|
||||
@@ -133,3 +133,104 @@ tuples!(EitherOf13 + EitherOf13Future + EitherOf13FutureProj => A, B, C, D, E, F
|
||||
tuples!(EitherOf14 + EitherOf14Future + EitherOf14FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M, N);
|
||||
tuples!(EitherOf15 + EitherOf15Future + EitherOf15FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M, N, O);
|
||||
tuples!(EitherOf16 + EitherOf16Future + EitherOf16FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P);
|
||||
|
||||
/// Matches over the first expression and returns an either ([`Either`], [`EitherOf3`], ... [`EitherOf6`])
|
||||
/// composed of the values returned by the match arms.
|
||||
///
|
||||
/// The pattern syntax is exactly the same as found in a match arm.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// # use either_of::*;
|
||||
/// let either2 = either!(Some("hello"),
|
||||
/// Some(s) => s.len(),
|
||||
/// None => 0.0,
|
||||
/// );
|
||||
/// assert!(matches!(either2, Either::<usize, f64>::Left(5)));
|
||||
///
|
||||
/// let either3 = either!(Some("admin"),
|
||||
/// Some("admin") => "hello admin",
|
||||
/// Some(_) => 'x',
|
||||
/// _ => 0,
|
||||
/// );
|
||||
/// assert!(matches!(either3, EitherOf3::<&str, char, i32>::A("hello admin")));
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! either {
|
||||
($match:expr, $left_pattern:pat => $left_expression:expr, $right_pattern:pat => $right_expression:expr,) => {
|
||||
match $match {
|
||||
$left_pattern => $crate::Either::Left($left_expression),
|
||||
$right_pattern => $crate::Either::Right($right_expression),
|
||||
}
|
||||
};
|
||||
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr,) => {
|
||||
match $match {
|
||||
$a_pattern => $crate::EitherOf3::A($a_expression),
|
||||
$b_pattern => $crate::EitherOf3::B($b_expression),
|
||||
$c_pattern => $crate::EitherOf3::C($c_expression),
|
||||
}
|
||||
};
|
||||
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr, $d_pattern:pat => $d_expression:expr,) => {
|
||||
match $match {
|
||||
$a_pattern => $crate::EitherOf4::A($a_expression),
|
||||
$b_pattern => $crate::EitherOf4::B($b_expression),
|
||||
$c_pattern => $crate::EitherOf4::C($c_expression),
|
||||
$d_pattern => $crate::EitherOf4::D($d_expression),
|
||||
}
|
||||
};
|
||||
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr, $d_pattern:pat => $d_expression:expr, $e_pattern:pat => $e_expression:expr,) => {
|
||||
match $match {
|
||||
$a_pattern => $crate::EitherOf5::A($a_expression),
|
||||
$b_pattern => $crate::EitherOf5::B($b_expression),
|
||||
$c_pattern => $crate::EitherOf5::C($c_expression),
|
||||
$d_pattern => $crate::EitherOf5::D($d_expression),
|
||||
$e_pattern => $crate::EitherOf5::E($e_expression),
|
||||
}
|
||||
};
|
||||
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr, $d_pattern:pat => $d_expression:expr, $e_pattern:pat => $e_expression:expr, $f_pattern:pat => $f_expression:expr,) => {
|
||||
match $match {
|
||||
$a_pattern => $crate::EitherOf6::A($a_expression),
|
||||
$b_pattern => $crate::EitherOf6::B($b_expression),
|
||||
$c_pattern => $crate::EitherOf6::C($c_expression),
|
||||
$d_pattern => $crate::EitherOf6::D($d_expression),
|
||||
$e_pattern => $crate::EitherOf6::E($e_expression),
|
||||
$f_pattern => $crate::EitherOf6::F($f_expression),
|
||||
}
|
||||
}; // if you need more eithers feel free to open a PR ;-)
|
||||
}
|
||||
|
||||
// compile time test
|
||||
#[test]
|
||||
fn either_macro() {
|
||||
let _: Either<&str, f64> = either!(12,
|
||||
12 => "12",
|
||||
_ => 0.0,
|
||||
);
|
||||
let _: EitherOf3<&str, f64, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
_ => 12,
|
||||
);
|
||||
let _: EitherOf4<&str, f64, char, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
14 => ' ',
|
||||
_ => 12,
|
||||
);
|
||||
let _: EitherOf5<&str, f64, char, f32, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
14 => ' ',
|
||||
15 => 0.0f32,
|
||||
_ => 12,
|
||||
);
|
||||
let _: EitherOf6<&str, f64, char, f32, u8, i32> = either!(12,
|
||||
12 => "12",
|
||||
13 => 0.0,
|
||||
14 => ' ',
|
||||
15 => 0.0f32,
|
||||
16 => 24u8,
|
||||
_ => 12,
|
||||
);
|
||||
}
|
||||
|
||||
111
examples/axum_js_ssr/Cargo.toml
Normal file
111
examples/axum_js_ssr/Cargo.toml
Normal file
@@ -0,0 +1,111 @@
|
||||
[package]
|
||||
name = "axum_js_ssr"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
axum = { version = "0.7.5", optional = true }
|
||||
console_error_panic_hook = "0.1.7"
|
||||
console_log = "1.0"
|
||||
gloo-utils = "0.2.0"
|
||||
html-escape = "0.2.13"
|
||||
http-body-util = { version = "0.1.0", optional = true }
|
||||
js-sys = { version = "0.3.69", optional = true }
|
||||
leptos = { path = "../../leptos", features = ["tracing"] }
|
||||
leptos_meta = { path = "../../meta" }
|
||||
leptos_axum = { path = "../../integrations/axum", optional = true }
|
||||
leptos_router = { path = "../../router" }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1.39", features = [ "rt-multi-thread", "macros", "time" ], optional = true }
|
||||
tower = { version = "0.4.13", optional = true }
|
||||
tower-http = { version = "0.5.2", features = ["fs"], optional = true }
|
||||
wasm-bindgen = "0.2.92"
|
||||
web-sys = { version = "0.3.69", features = [ "AddEventListenerOptions", "Document", "Element", "Event", "EventListener", "EventTarget", "Performance", "Window" ], optional = true }
|
||||
|
||||
[features]
|
||||
hydrate = [
|
||||
"leptos/hydrate",
|
||||
"dep:js-sys",
|
||||
"dep:web-sys",
|
||||
]
|
||||
ssr = [
|
||||
"dep:axum",
|
||||
"dep:http-body-util",
|
||||
"dep:tower",
|
||||
"dep:tower-http",
|
||||
"dep:tokio",
|
||||
"leptos/ssr",
|
||||
"leptos_meta/ssr",
|
||||
"dep:leptos_axum",
|
||||
"leptos_router/ssr",
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
panic = "abort"
|
||||
|
||||
[profile.wasm-release]
|
||||
inherits = "release"
|
||||
opt-level = 'z'
|
||||
lto = true
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
|
||||
[package.metadata.cargo-all-features]
|
||||
denylist = ["axum", "tower", "tower-http", "tokio", "sqlx", "leptos_axum"]
|
||||
skip_feature_sets = [["ssr", "hydrate"]]
|
||||
|
||||
[package.metadata.leptos]
|
||||
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
|
||||
output-name = "axum_js_ssr"
|
||||
# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
|
||||
site-root = "target/site"
|
||||
# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
|
||||
# Defaults to pkg
|
||||
site-pkg-dir = "pkg"
|
||||
# [Optional] The source CSS file. If it ends with .sass or .scss then it will be compiled by dart-sass into CSS. The CSS is optimized by Lightning CSS before being written to <site-root>/<site-pkg>/app.css
|
||||
style-file = "style/main.scss"
|
||||
# Assets source dir. All files found here will be copied and synchronized to site-root.
|
||||
# The assets-dir cannot have a sub directory with the same name/path as site-pkg-dir.
|
||||
#
|
||||
# Optional. Env: LEPTOS_ASSETS_DIR.
|
||||
assets-dir = "assets"
|
||||
# The IP and port (ex: 127.0.0.1:3000) where the server serves the content. Use it in your server setup.
|
||||
site-addr = "127.0.0.1:3000"
|
||||
# The port to use for automatic reload monitoring
|
||||
reload-port = 3001
|
||||
# [Optional] Command to use when running end2end tests. It will run in the end2end dir.
|
||||
# [Windows] for non-WSL use "npx.cmd playwright test"
|
||||
# This binary name can be checked in Powershell with Get-Command npx
|
||||
end2end-cmd = "npx playwright test"
|
||||
end2end-dir = "end2end"
|
||||
# The browserlist query used for optimizing the CSS.
|
||||
browserquery = "defaults"
|
||||
# Set by cargo-leptos watch when building with that tool. Controls whether autoreload JS will be included in the head
|
||||
watch = false
|
||||
# The environment Leptos will run in, usually either "DEV" or "PROD"
|
||||
env = "DEV"
|
||||
# The features to use when compiling the bin target
|
||||
#
|
||||
# Optional. Can be over-ridden with the command line parameter --bin-features
|
||||
bin-features = ["ssr"]
|
||||
|
||||
# If the --no-default-features flag should be used when compiling the bin target
|
||||
#
|
||||
# Optional. Defaults to false.
|
||||
bin-default-features = false
|
||||
|
||||
# The features to use when compiling the lib target
|
||||
#
|
||||
# Optional. Can be over-ridden with the command line parameter --lib-features
|
||||
lib-features = ["hydrate"]
|
||||
|
||||
# If the --no-default-features flag should be used when compiling the lib target
|
||||
#
|
||||
# Optional. Defaults to false.
|
||||
lib-default-features = false
|
||||
|
||||
lib-profile-release = "wasm-release"
|
||||
21
examples/axum_js_ssr/LICENSE
Normal file
21
examples/axum_js_ssr/LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2024 Tommy Yu
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
8
examples/axum_js_ssr/Makefile.toml
Normal file
8
examples/axum_js_ssr/Makefile.toml
Normal file
@@ -0,0 +1,8 @@
|
||||
extend = [
|
||||
{ path = "../cargo-make/main.toml" },
|
||||
{ path = "../cargo-make/cargo-leptos.toml" },
|
||||
]
|
||||
|
||||
[env]
|
||||
|
||||
CLIENT_PROCESS_NAME = "axum_js_ssr"
|
||||
10
examples/axum_js_ssr/README.md
Normal file
10
examples/axum_js_ssr/README.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# Leptos Axum JS SSR Example
|
||||
|
||||
This example shows the various ways that JavaScript may be included into
|
||||
a Leptos application. The intent is to demonstrate how this may be done
|
||||
and how it may cause the application to fail in an unexpected manner if
|
||||
done incorrectly.
|
||||
|
||||
## Quick Start
|
||||
|
||||
Run `cargo leptos watch` to run this example.
|
||||
BIN
examples/axum_js_ssr/assets/favicon.ico
Normal file
BIN
examples/axum_js_ssr/assets/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
29
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/LICENSE
generated
vendored
Normal file
29
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
BSD 3-Clause License
|
||||
|
||||
Copyright (c) 2006, Ivan Sagalaev.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
47
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/README.md
generated
vendored
Normal file
47
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/README.md
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
# Highlight.js CDN Assets
|
||||
|
||||
**Note: this contains only a subset of files from the full package from NPM.**
|
||||
|
||||
[](https://packagephobia.now.sh/result?p=highlight.js)
|
||||
|
||||
**This package contains only the CDN build assets of highlight.js.**
|
||||
|
||||
This may be what you want if you'd like to install the pre-built distributable highlight.js client-side assets via NPM. If you're wanting to use highlight.js mainly on the server-side you likely want the [highlight.js][1] package instead.
|
||||
|
||||
To access these files via CDN:<br>
|
||||
https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@latest/build/
|
||||
|
||||
**If you just want a single .js file with the common languages built-in:
|
||||
<https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@latest/build/highlight.min.js>**
|
||||
|
||||
---
|
||||
|
||||
## Highlight.js
|
||||
|
||||
Highlight.js is a syntax highlighter written in JavaScript. It works in
|
||||
the browser as well as on the server. It works with pretty much any
|
||||
markup, doesn’t depend on any framework, and has automatic language
|
||||
detection.
|
||||
|
||||
If you'd like to read the full README:<br>
|
||||
<https://github.com/highlightjs/highlight.js/blob/main/README.md>
|
||||
|
||||
## License
|
||||
|
||||
Highlight.js is released under the BSD License. See [LICENSE][7] file
|
||||
for details.
|
||||
|
||||
## Links
|
||||
|
||||
The official site for the library is at <https://highlightjs.org/>.
|
||||
|
||||
The Github project may be found at: <https://github.com/highlightjs/highlight.js>
|
||||
|
||||
Further in-depth documentation for the API and other topics is at
|
||||
<http://highlightjs.readthedocs.io/>.
|
||||
|
||||
A list of the Core Team and contributors can be found in the [CONTRIBUTORS.md][8] file.
|
||||
|
||||
[1]: https://www.npmjs.com/package/highlight.js
|
||||
[7]: https://github.com/highlightjs/highlight.js/blob/main/LICENSE
|
||||
[8]: https://github.com/highlightjs/highlight.js/blob/main/CONTRIBUTORS.md
|
||||
1230
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/es/highlight.min.js
generated
vendored
Normal file
1230
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/es/highlight.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1232
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/highlight.min.js
generated
vendored
Normal file
1232
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/highlight.min.js
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
93
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/package.json
generated
vendored
Normal file
93
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/package.json
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
{
|
||||
"name": "@highlightjs/cdn-assets",
|
||||
"description": "Syntax highlighting with language autodetection. (pre-compiled CDN assets)",
|
||||
"keywords": [
|
||||
"highlight",
|
||||
"syntax"
|
||||
],
|
||||
"homepage": "https://highlightjs.org/",
|
||||
"version": "11.10.0",
|
||||
"author": "Josh Goebel <hello@joshgoebel.com>",
|
||||
"contributors": [
|
||||
"Josh Goebel <hello@joshgoebel.com>",
|
||||
"Egor Rogov <e.rogov@postgrespro.ru>",
|
||||
"Vladimir Jimenez <me@allejo.io>",
|
||||
"Ivan Sagalaev <maniac@softwaremaniacs.org>",
|
||||
"Jeremy Hull <sourdrums@gmail.com>",
|
||||
"Oleg Efimov <efimovov@gmail.com>",
|
||||
"Gidi Meir Morris <gidi@gidi.io>",
|
||||
"Jan T. Sott <git@idleberg.com>",
|
||||
"Li Xuanji <xuanji@gmail.com>",
|
||||
"Marcos Cáceres <marcos@marcosc.com>",
|
||||
"Sang Dang <sang.dang@polku.io>"
|
||||
],
|
||||
"bugs": {
|
||||
"url": "https://github.com/highlightjs/highlight.js/issues"
|
||||
},
|
||||
"license": "BSD-3-Clause",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/highlightjs/highlight.js.git"
|
||||
},
|
||||
"sideEffects": [
|
||||
"./es/common.js",
|
||||
"./lib/common.js",
|
||||
"*.css",
|
||||
"*.scss"
|
||||
],
|
||||
"scripts": {
|
||||
"mocha": "mocha",
|
||||
"lint": "eslint src/*.js src/lib/*.js demo/*.js tools/**/*.js --ignore-pattern vendor",
|
||||
"lint-languages": "eslint --no-eslintrc -c .eslintrc.lang.js src/languages/**/*.js",
|
||||
"build_and_test": "npm run build && npm run test",
|
||||
"build_and_test_browser": "npm run build-browser && npm run test-browser",
|
||||
"build": "node ./tools/build.js -t node",
|
||||
"build-cdn": "node ./tools/build.js -t cdn",
|
||||
"build-browser": "node ./tools/build.js -t browser :common",
|
||||
"devtool": "npx http-server",
|
||||
"test": "mocha test",
|
||||
"test-markup": "mocha test/markup",
|
||||
"test-detect": "mocha test/detect",
|
||||
"test-browser": "mocha test/browser",
|
||||
"test-parser": "mocha test/parser"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@colors/colors": "^1.6.0",
|
||||
"@rollup/plugin-commonjs": "^26.0.1",
|
||||
"@rollup/plugin-json": "^6.0.1",
|
||||
"@rollup/plugin-node-resolve": "^15.2.3",
|
||||
"@types/mocha": "^10.0.2",
|
||||
"@typescript-eslint/eslint-plugin": "^7.15.0",
|
||||
"@typescript-eslint/parser": "^7.15.0",
|
||||
"clean-css": "^5.3.2",
|
||||
"cli-table": "^0.3.1",
|
||||
"commander": "^12.1.0",
|
||||
"css": "^3.0.0",
|
||||
"css-color-names": "^1.0.1",
|
||||
"deep-freeze-es6": "^3.0.2",
|
||||
"del": "^7.1.0",
|
||||
"dependency-resolver": "^2.0.1",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-standard": "^17.1.0",
|
||||
"eslint-plugin-import": "^2.28.1",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-promise": "^6.1.1",
|
||||
"glob": "^8.1.0",
|
||||
"glob-promise": "^6.0.5",
|
||||
"handlebars": "^4.7.8",
|
||||
"http-server": "^14.1.1",
|
||||
"jsdom": "^24.1.0",
|
||||
"lodash": "^4.17.20",
|
||||
"mocha": "^10.2.0",
|
||||
"refa": "^0.4.1",
|
||||
"rollup": "^4.0.2",
|
||||
"should": "^13.2.3",
|
||||
"terser": "^5.21.0",
|
||||
"tiny-worker": "^2.3.0",
|
||||
"typescript": "^5.2.2",
|
||||
"wcag-contrast": "^3.0.0"
|
||||
}
|
||||
}
|
||||
10
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/styles/github-dark.min.css
generated
vendored
Normal file
10
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/styles/github-dark.min.css
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
pre code.hljs{display:block;overflow-x:auto;padding:1em}code.hljs{padding:3px 5px}/*!
|
||||
Theme: GitHub Dark
|
||||
Description: Dark theme as seen on github.com
|
||||
Author: github.com
|
||||
Maintainer: @Hirse
|
||||
Updated: 2021-05-15
|
||||
|
||||
Outdated base version: https://github.com/primer/github-syntax-dark
|
||||
Current colors taken from GitHub's CSS
|
||||
*/.hljs{color:#c9d1d9;background:#0d1117}.hljs-doctag,.hljs-keyword,.hljs-meta .hljs-keyword,.hljs-template-tag,.hljs-template-variable,.hljs-type,.hljs-variable.language_{color:#ff7b72}.hljs-title,.hljs-title.class_,.hljs-title.class_.inherited__,.hljs-title.function_{color:#d2a8ff}.hljs-attr,.hljs-attribute,.hljs-literal,.hljs-meta,.hljs-number,.hljs-operator,.hljs-selector-attr,.hljs-selector-class,.hljs-selector-id,.hljs-variable{color:#79c0ff}.hljs-meta .hljs-string,.hljs-regexp,.hljs-string{color:#a5d6ff}.hljs-built_in,.hljs-symbol{color:#ffa657}.hljs-code,.hljs-comment,.hljs-formula{color:#8b949e}.hljs-name,.hljs-quote,.hljs-selector-pseudo,.hljs-selector-tag{color:#7ee787}.hljs-subst{color:#c9d1d9}.hljs-section{color:#1f6feb;font-weight:700}.hljs-bullet{color:#f2cc60}.hljs-emphasis{color:#c9d1d9;font-style:italic}.hljs-strong{color:#c9d1d9;font-weight:700}.hljs-addition{color:#aff5b4;background-color:#033a16}.hljs-deletion{color:#ffdcd7;background-color:#67060c}
|
||||
10
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/styles/github.min.css
generated
vendored
Normal file
10
examples/axum_js_ssr/node_modules/@highlightjs/cdn-assets/styles/github.min.css
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
pre code.hljs{display:block;overflow-x:auto;padding:1em}code.hljs{padding:3px 5px}/*!
|
||||
Theme: GitHub
|
||||
Description: Light theme as seen on github.com
|
||||
Author: github.com
|
||||
Maintainer: @Hirse
|
||||
Updated: 2021-05-15
|
||||
|
||||
Outdated base version: https://github.com/primer/github-syntax-light
|
||||
Current colors taken from GitHub's CSS
|
||||
*/.hljs{color:#24292e;background:#fff}.hljs-doctag,.hljs-keyword,.hljs-meta .hljs-keyword,.hljs-template-tag,.hljs-template-variable,.hljs-type,.hljs-variable.language_{color:#d73a49}.hljs-title,.hljs-title.class_,.hljs-title.class_.inherited__,.hljs-title.function_{color:#6f42c1}.hljs-attr,.hljs-attribute,.hljs-literal,.hljs-meta,.hljs-number,.hljs-operator,.hljs-selector-attr,.hljs-selector-class,.hljs-selector-id,.hljs-variable{color:#005cc5}.hljs-meta .hljs-string,.hljs-regexp,.hljs-string{color:#032f62}.hljs-built_in,.hljs-symbol{color:#e36209}.hljs-code,.hljs-comment,.hljs-formula{color:#6a737d}.hljs-name,.hljs-quote,.hljs-selector-pseudo,.hljs-selector-tag{color:#22863a}.hljs-subst{color:#24292e}.hljs-section{color:#005cc5;font-weight:700}.hljs-bullet{color:#735c0f}.hljs-emphasis{color:#24292e;font-style:italic}.hljs-strong{color:#24292e;font-weight:700}.hljs-addition{color:#22863a;background-color:#f0fff4}.hljs-deletion{color:#b31d28;background-color:#ffeef0}
|
||||
6
examples/axum_js_ssr/package.json
Normal file
6
examples/axum_js_ssr/package.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"name": "axum_js_ssr",
|
||||
"dependencies": {
|
||||
"@highlightjs/cdn-assets": "^11.10.0"
|
||||
}
|
||||
}
|
||||
2
examples/axum_js_ssr/rust-toolchain.toml
Normal file
2
examples/axum_js_ssr/rust-toolchain.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[toolchain]
|
||||
channel = "stable" # test change
|
||||
8
examples/axum_js_ssr/src/api.rs
Normal file
8
examples/axum_js_ssr/src/api.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
use leptos::{prelude::ServerFnError, server};
|
||||
|
||||
#[server]
|
||||
pub async fn fetch_code() -> Result<String, ServerFnError> {
|
||||
// emulate loading of code from a database/version control/etc
|
||||
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
||||
Ok(crate::consts::CH05_02A.to_string())
|
||||
}
|
||||
1133
examples/axum_js_ssr/src/app.rs
Normal file
1133
examples/axum_js_ssr/src/app.rs
Normal file
File diff suppressed because it is too large
Load Diff
39
examples/axum_js_ssr/src/consts.rs
Normal file
39
examples/axum_js_ssr/src/consts.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
// Example programs from the Rust Programming Language Book
|
||||
|
||||
pub const CH03_05A: &str = r#"fn main() {
|
||||
let number = 3;
|
||||
|
||||
if number < 5 {
|
||||
println!("condition was true");
|
||||
} else {
|
||||
println!("condition was false");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
"#;
|
||||
|
||||
// For some reason, swapping the code examples "fixes" example 6. It
|
||||
// might have something to do with the lower complexity of highlighting
|
||||
// a shorter example. Anyway, including extra newlines for the shorter
|
||||
// example to match with the longer in order to avoid reflowing the
|
||||
// table during the async resource loading for CSR.
|
||||
|
||||
pub const CH05_02A: &str = r#"fn main() {
|
||||
let width1 = 30;
|
||||
let height1 = 50;
|
||||
|
||||
println!(
|
||||
"The area of the rectangle is {} square pixels.",
|
||||
area(width1, height1)
|
||||
);
|
||||
}
|
||||
|
||||
fn area(width: u32, height: u32) -> u32 {
|
||||
width * height
|
||||
}
|
||||
"#;
|
||||
|
||||
pub const LEPTOS_HYDRATED: &str = "_leptos_hydrated";
|
||||
59
examples/axum_js_ssr/src/hljs.rs
Normal file
59
examples/axum_js_ssr/src/hljs.rs
Normal file
@@ -0,0 +1,59 @@
|
||||
#[cfg(not(feature = "ssr"))]
|
||||
mod csr {
|
||||
use gloo_utils::format::JsValueSerdeExt;
|
||||
use js_sys::{
|
||||
Object,
|
||||
Reflect::{get, set},
|
||||
};
|
||||
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
|
||||
|
||||
#[wasm_bindgen(
|
||||
module = "/node_modules/@highlightjs/cdn-assets/es/highlight.min.js"
|
||||
)]
|
||||
extern "C" {
|
||||
type HighlightOptions;
|
||||
|
||||
#[wasm_bindgen(catch, js_namespace = default, js_name = highlight)]
|
||||
fn highlight_lang(
|
||||
code: String,
|
||||
options: Object,
|
||||
) -> Result<Object, JsValue>;
|
||||
|
||||
#[wasm_bindgen(js_namespace = default, js_name = highlightAll)]
|
||||
pub fn highlight_all();
|
||||
}
|
||||
|
||||
// Keeping the `ignoreIllegals` argument out of the default case, and since there is no optional arguments
|
||||
// in Rust, this will have to be provided in a separate function (e.g. `highlight_ignore_illegals`), much
|
||||
// like how `web_sys` does it for the browser APIs. For simplicity, only the highlighted HTML code is
|
||||
// returned on success, and None on error.
|
||||
pub fn highlight(code: String, lang: String) -> Option<String> {
|
||||
let options = js_sys::Object::new();
|
||||
set(&options, &"language".into(), &lang.into())
|
||||
.expect("failed to assign lang to options");
|
||||
highlight_lang(code, options)
|
||||
.map(|result| {
|
||||
let value = get(&result, &"value".into())
|
||||
.expect("HighlightResult failed to contain the value key");
|
||||
value.into_serde().expect("Value should have been a string")
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
mod ssr {
|
||||
// noop under ssr
|
||||
pub fn highlight_all() {}
|
||||
|
||||
// TODO see if there is a Rust-based solution that will enable isomorphic rendering for this feature.
|
||||
// the current (disabled) implementation simply calls html_escape.
|
||||
// pub fn highlight(code: String, _lang: String) -> Option<String> {
|
||||
// Some(html_escape::encode_text(&code).into_owned())
|
||||
// }
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "ssr"))]
|
||||
pub use csr::*;
|
||||
#[cfg(feature = "ssr")]
|
||||
pub use ssr::*;
|
||||
51
examples/axum_js_ssr/src/lib.rs
Normal file
51
examples/axum_js_ssr/src/lib.rs
Normal file
@@ -0,0 +1,51 @@
|
||||
pub mod api;
|
||||
pub mod app;
|
||||
pub mod consts;
|
||||
pub mod hljs;
|
||||
|
||||
#[cfg(feature = "hydrate")]
|
||||
#[wasm_bindgen::prelude::wasm_bindgen]
|
||||
pub fn hydrate() {
|
||||
use app::*;
|
||||
use consts::LEPTOS_HYDRATED;
|
||||
use std::panic;
|
||||
panic::set_hook(Box::new(|info| {
|
||||
// this custom hook will call out to show the usual error log at
|
||||
// the console while also attempt to update the UI to indicate
|
||||
// a restart of the application is required to continue.
|
||||
console_error_panic_hook::hook(info);
|
||||
let window = leptos::prelude::window();
|
||||
if !matches!(
|
||||
js_sys::Reflect::get(&window, &wasm_bindgen::JsValue::from_str(LEPTOS_HYDRATED)),
|
||||
Ok(t) if t == true
|
||||
) {
|
||||
let document = leptos::prelude::document();
|
||||
let _ = document.query_selector("#reset").map(|el| {
|
||||
el.map(|el| {
|
||||
el.set_class_name("panicked");
|
||||
})
|
||||
});
|
||||
let _ = document.query_selector("#notice").map(|el| {
|
||||
el.map(|el| {
|
||||
el.set_class_name("panicked");
|
||||
})
|
||||
});
|
||||
}
|
||||
}));
|
||||
leptos::mount::hydrate_body(App);
|
||||
|
||||
let window = leptos::prelude::window();
|
||||
js_sys::Reflect::set(
|
||||
&window,
|
||||
&wasm_bindgen::JsValue::from_str(LEPTOS_HYDRATED),
|
||||
&wasm_bindgen::JsValue::TRUE,
|
||||
)
|
||||
.expect("error setting hydrated status");
|
||||
let event = web_sys::Event::new(LEPTOS_HYDRATED)
|
||||
.expect("error creating hydrated event");
|
||||
let document = leptos::prelude::document();
|
||||
document
|
||||
.dispatch_event(&event)
|
||||
.expect("error dispatching hydrated event");
|
||||
leptos::logging::log!("dispatched hydrated event");
|
||||
}
|
||||
152
examples/axum_js_ssr/src/main.rs
Normal file
152
examples/axum_js_ssr/src/main.rs
Normal file
@@ -0,0 +1,152 @@
|
||||
#[cfg(feature = "ssr")]
|
||||
mod latency {
|
||||
use std::sync::{Mutex, OnceLock};
|
||||
pub static LATENCY: OnceLock<
|
||||
Mutex<std::iter::Cycle<std::slice::Iter<'_, u64>>>,
|
||||
> = OnceLock::new();
|
||||
pub static ES_LATENCY: OnceLock<
|
||||
Mutex<std::iter::Cycle<std::slice::Iter<'_, u64>>>,
|
||||
> = OnceLock::new();
|
||||
}
|
||||
|
||||
#[cfg(feature = "ssr")]
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
use axum::{
|
||||
body::Body,
|
||||
extract::Request,
|
||||
http::{
|
||||
header::{self, HeaderValue},
|
||||
StatusCode,
|
||||
},
|
||||
middleware::{self, Next},
|
||||
response::{IntoResponse, Response},
|
||||
routing::get,
|
||||
Router,
|
||||
};
|
||||
use axum_js_ssr::app::*;
|
||||
use http_body_util::BodyExt;
|
||||
use leptos::prelude::*;
|
||||
use leptos_axum::{generate_route_list, LeptosRoutes};
|
||||
|
||||
latency::LATENCY.get_or_init(|| [0, 4, 40, 400].iter().cycle().into());
|
||||
latency::ES_LATENCY.get_or_init(|| [0].iter().cycle().into());
|
||||
// Having the ES_LATENCY (a cycle of latency for the loading of the es
|
||||
// module) in an identical cycle as LATENCY (for the standard version)
|
||||
// adversely influences the intended demo, as this ultimately delays
|
||||
// hydration when set too high which can cause panic under every case.
|
||||
// If you want to test the effects of the delay just modify the list of
|
||||
// values for the desired cycle of delays.
|
||||
|
||||
let conf = get_configuration(None).unwrap();
|
||||
let addr = conf.leptos_options.site_addr;
|
||||
let leptos_options = conf.leptos_options;
|
||||
// Generate the list of routes in your Leptos App
|
||||
let routes = generate_route_list(App);
|
||||
|
||||
async fn highlight_js() -> impl IntoResponse {
|
||||
(
|
||||
[(header::CONTENT_TYPE, "text/javascript")],
|
||||
include_str!(
|
||||
"../node_modules/@highlightjs/cdn-assets/highlight.min.js"
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
async fn latency_for_highlight_js(
|
||||
req: Request,
|
||||
next: Next,
|
||||
) -> Result<impl IntoResponse, (StatusCode, String)> {
|
||||
let uri_parts = &mut req.uri().path().rsplit('/');
|
||||
|
||||
let is_highlightjs = uri_parts.next() == Some("highlight.min.js");
|
||||
let es = uri_parts.next() == Some("es");
|
||||
let module_type = if es { "es module " } else { "standard " };
|
||||
let res = next.run(req).await;
|
||||
if is_highlightjs {
|
||||
// additional processing if the filename is the test subject
|
||||
let (mut parts, body) = res.into_parts();
|
||||
let bytes = body
|
||||
.collect()
|
||||
.await
|
||||
.map_err(|err| {
|
||||
(
|
||||
StatusCode::BAD_REQUEST,
|
||||
format!("error reading body: {err}"),
|
||||
)
|
||||
})?
|
||||
.to_bytes();
|
||||
let latency = if es {
|
||||
&latency::ES_LATENCY
|
||||
} else {
|
||||
&latency::LATENCY
|
||||
};
|
||||
|
||||
let delay = match latency
|
||||
.get()
|
||||
.expect("latency cycle wasn't set up")
|
||||
.try_lock()
|
||||
{
|
||||
Ok(ref mut mutex) => {
|
||||
*mutex.next().expect("cycle always has next")
|
||||
}
|
||||
Err(_) => 0,
|
||||
};
|
||||
|
||||
// inject the logging of the delay used into the target script
|
||||
log!(
|
||||
"loading {module_type}highlight.min.js with latency of \
|
||||
{delay} ms"
|
||||
);
|
||||
let js_log = format!(
|
||||
"\nconsole.log('loaded {module_type}highlight.js with a \
|
||||
minimum latency of {delay} ms');"
|
||||
);
|
||||
tokio::time::sleep(std::time::Duration::from_millis(delay)).await;
|
||||
|
||||
let bytes = [bytes, js_log.into()].concat();
|
||||
let length = bytes.len();
|
||||
let body = Body::from(bytes);
|
||||
|
||||
// Provide the bare minimum set of headers to avoid browser cache.
|
||||
parts.headers = header::HeaderMap::from_iter(
|
||||
[
|
||||
(
|
||||
header::CONTENT_TYPE,
|
||||
HeaderValue::from_static("text/javascript"),
|
||||
),
|
||||
(header::CONTENT_LENGTH, HeaderValue::from(length)),
|
||||
]
|
||||
.into_iter(),
|
||||
);
|
||||
Ok(Response::from_parts(parts, body))
|
||||
} else {
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
let app = Router::new()
|
||||
.route("/highlight.min.js", get(highlight_js))
|
||||
.leptos_routes(&leptos_options, routes, {
|
||||
let leptos_options = leptos_options.clone();
|
||||
move || shell(leptos_options.clone())
|
||||
})
|
||||
.fallback(leptos_axum::file_and_error_handler(shell))
|
||||
.layer(middleware::from_fn(latency_for_highlight_js))
|
||||
.with_state(leptos_options);
|
||||
|
||||
// run our app with hyper
|
||||
// `axum::Server` is a re-export of `hyper::Server`
|
||||
log!("listening on http://{}", &addr);
|
||||
let listener = tokio::net::TcpListener::bind(&addr).await.unwrap();
|
||||
axum::serve(listener, app.into_make_service())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "ssr"))]
|
||||
pub fn main() {
|
||||
// no client-side main function
|
||||
// unless we want this to work with e.g., Trunk for pure client-side testing
|
||||
// see lib.rs for hydration function instead
|
||||
}
|
||||
171
examples/axum_js_ssr/style/main.scss
Normal file
171
examples/axum_js_ssr/style/main.scss
Normal file
@@ -0,0 +1,171 @@
|
||||
html, body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
font-family: sans-serif;
|
||||
height: 100vh;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
body {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
}
|
||||
|
||||
nav {
|
||||
min-width: 17em;
|
||||
height: 100vh;
|
||||
counter-reset: example-counter 0;
|
||||
list-style-type: none;
|
||||
list-style-position: outside;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
nav a {
|
||||
display: block;
|
||||
padding: 0.5em 2em;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
nav a small {
|
||||
display: block;
|
||||
}
|
||||
|
||||
nav a.example::before {
|
||||
counter-reset: subexample-counter 0;
|
||||
counter-increment: example-counter 1;
|
||||
content: counter(example-counter) ". ";
|
||||
}
|
||||
|
||||
nav a.subexample::before {
|
||||
counter-increment: subexample-counter 1;
|
||||
content: counter(example-counter) "." counter(subexample-counter) " ";
|
||||
}
|
||||
|
||||
div#notice {
|
||||
display: none;
|
||||
}
|
||||
|
||||
main div#notice.panicked {
|
||||
position: sticky;
|
||||
top: 0;
|
||||
padding: 0.5em 2em;
|
||||
display: block;
|
||||
}
|
||||
|
||||
main {
|
||||
width: 100%;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
main article {
|
||||
max-width: 60em;
|
||||
margin: 0 1em;
|
||||
padding: 0 1em;
|
||||
}
|
||||
|
||||
main p, main li {
|
||||
line-height: 1.3em;
|
||||
}
|
||||
|
||||
main li pre code, main div pre code {
|
||||
display: block;
|
||||
line-height: normal;
|
||||
}
|
||||
|
||||
main ol, main ul {
|
||||
padding-left: 2em;
|
||||
}
|
||||
|
||||
h2>code, p>code, li>code {
|
||||
border-radius: 3px;
|
||||
padding: 2px;
|
||||
}
|
||||
|
||||
li pre code, div pre code {
|
||||
margin: 0 !important;
|
||||
padding: 0 !important;
|
||||
}
|
||||
|
||||
#code-demo {
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
#code-demo table {
|
||||
width: 50em;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
#code-demo table td {
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
#code-demo table code {
|
||||
display: block;
|
||||
padding: 1em;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: light) {
|
||||
nav {
|
||||
background: #f7f7f7;
|
||||
}
|
||||
|
||||
nav a {
|
||||
color: #000;
|
||||
}
|
||||
|
||||
nav a[aria-current="page"] {
|
||||
background-color: #e0e0e0;
|
||||
}
|
||||
|
||||
nav a:hover, h2>code, p>code, li>code {
|
||||
background-color: #e7e7e7;
|
||||
}
|
||||
|
||||
nav a.panicked, main div#notice.panicked {
|
||||
background: #fdd;
|
||||
}
|
||||
|
||||
main div#notice.panicked a {
|
||||
color: #000;
|
||||
}
|
||||
|
||||
nav a.section {
|
||||
border-bottom: 1px solid #777;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
nav {
|
||||
background: #080808;
|
||||
}
|
||||
|
||||
nav a {
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
nav a[aria-current="page"] {
|
||||
background-color: #3f3f3f;
|
||||
}
|
||||
|
||||
nav a:hover, h2>code, p>code, li>code {
|
||||
background-color: #383838;
|
||||
}
|
||||
|
||||
nav a.panicked, main div#notice.panicked {
|
||||
background: #733;
|
||||
}
|
||||
|
||||
main div#notice.panicked a {
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
nav a.section {
|
||||
border-bottom: 1px solid #888;
|
||||
}
|
||||
}
|
||||
|
||||
// Just include the raw style as-is because I can't find a quick and easy way to import them just for the
|
||||
// appropriate media type...
|
||||
pre code.hljs{display:block;overflow-x:auto;padding:1em}code.hljs{padding:3px 5px}
|
||||
@media (prefers-color-scheme: light){.hljs{color:#24292e;background:#fff}.hljs-doctag,.hljs-keyword,.hljs-meta .hljs-keyword,.hljs-template-tag,.hljs-template-variable,.hljs-type,.hljs-variable.language_{color:#d73a49}.hljs-title,.hljs-title.class_,.hljs-title.class_.inherited__,.hljs-title.function_{color:#6f42c1}.hljs-attr,.hljs-attribute,.hljs-literal,.hljs-meta,.hljs-number,.hljs-operator,.hljs-selector-attr,.hljs-selector-class,.hljs-selector-id,.hljs-variable{color:#005cc5}.hljs-meta .hljs-string,.hljs-regexp,.hljs-string{color:#032f62}.hljs-built_in,.hljs-symbol{color:#e36209}.hljs-code,.hljs-comment,.hljs-formula{color:#6a737d}.hljs-name,.hljs-quote,.hljs-selector-pseudo,.hljs-selector-tag{color:#22863a}.hljs-subst{color:#24292e}.hljs-section{color:#005cc5;font-weight:700}.hljs-bullet{color:#735c0f}.hljs-emphasis{color:#24292e;font-style:italic}.hljs-strong{color:#24292e;font-weight:700}.hljs-addition{color:#22863a;background-color:#f0fff4}.hljs-deletion{color:#b31d28;background-color:#ffeef0}}
|
||||
@media (prefers-color-scheme: dark){.hljs{color:#c9d1d9;background:#0d1117}.hljs-doctag,.hljs-keyword,.hljs-meta .hljs-keyword,.hljs-template-tag,.hljs-template-variable,.hljs-type,.hljs-variable.language_{color:#ff7b72}.hljs-title,.hljs-title.class_,.hljs-title.class_.inherited__,.hljs-title.function_{color:#d2a8ff}.hljs-attr,.hljs-attribute,.hljs-literal,.hljs-meta,.hljs-number,.hljs-operator,.hljs-selector-attr,.hljs-selector-class,.hljs-selector-id,.hljs-variable{color:#79c0ff}.hljs-meta .hljs-string,.hljs-regexp,.hljs-string{color:#a5d6ff}.hljs-built_in,.hljs-symbol{color:#ffa657}.hljs-code,.hljs-comment,.hljs-formula{color:#8b949e}.hljs-name,.hljs-quote,.hljs-selector-pseudo,.hljs-selector-tag{color:#7ee787}.hljs-subst{color:#c9d1d9}.hljs-section{color:#1f6feb;font-weight:700}.hljs-bullet{color:#f2cc60}.hljs-emphasis{color:#c9d1d9;font-style:italic}.hljs-strong{color:#c9d1d9;font-weight:700}.hljs-addition{color:#aff5b4;background-color:#033a16}.hljs-deletion{color:#ffdcd7;background-color:#67060c}}
|
||||
@@ -2,6 +2,8 @@
|
||||
name = "counter_isomorphic"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
# std::sync::LazyLock is stabilized in Rust version 1.80.0
|
||||
rust-version = "1.80.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
@@ -17,7 +19,6 @@ broadcaster = "1.0"
|
||||
console_log = "1.0"
|
||||
console_error_panic_hook = "0.1.7"
|
||||
futures = "0.3.30"
|
||||
lazy_static = "1.5"
|
||||
leptos = { path = "../../leptos" }
|
||||
leptos_actix = { path = "../../integrations/actix", optional = true }
|
||||
leptos_router = { path = "../../router" }
|
||||
@@ -46,13 +47,13 @@ denylist = ["actix-files", "actix-web", "leptos_actix"]
|
||||
skip_feature_sets = [["ssr", "hydrate"]]
|
||||
|
||||
[package.metadata.leptos]
|
||||
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
|
||||
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
|
||||
output-name = "counter_isomorphic"
|
||||
# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
|
||||
# When NOT using cargo-leptos this must be updated to "." or the counters will not work. The above warning still applies if you do switch to cargo-leptos later.
|
||||
site-root = "target/site"
|
||||
# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
|
||||
# Defaults to pkg
|
||||
# Defaults to pkg
|
||||
site-pkg-dir = "pkg"
|
||||
# [Optional] The source CSS file. If it ends with .sass or .scss then it will be compiled by dart-sass into CSS. The CSS is optimized by Lightning CSS before being written to <site-root>/<site-pkg>/app.css
|
||||
# style-file = "src/styles/tailwind.css"
|
||||
|
||||
@@ -10,12 +10,12 @@ use tracing::instrument;
|
||||
pub mod ssr_imports {
|
||||
pub use broadcaster::BroadcastChannel;
|
||||
pub use std::sync::atomic::{AtomicI32, Ordering};
|
||||
use std::sync::LazyLock;
|
||||
|
||||
pub static COUNT: AtomicI32 = AtomicI32::new(0);
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref COUNT_CHANNEL: BroadcastChannel<i32> = BroadcastChannel::new();
|
||||
}
|
||||
pub static COUNT_CHANNEL: LazyLock<BroadcastChannel<i32>> =
|
||||
LazyLock::new(BroadcastChannel::<i32>::new);
|
||||
}
|
||||
|
||||
#[server]
|
||||
|
||||
1
examples/gtk/Makefile.toml
Normal file
1
examples/gtk/Makefile.toml
Normal file
@@ -0,0 +1 @@
|
||||
extend = [{ path = "../cargo-make/main.toml" }]
|
||||
@@ -1,6 +1,5 @@
|
||||
use self::properties::Connect;
|
||||
use gtk::{
|
||||
ffi::GtkWidget,
|
||||
glib::{
|
||||
object::{IsA, IsClass, ObjectExt},
|
||||
Object, Value,
|
||||
@@ -16,7 +15,7 @@ use leptos::{
|
||||
},
|
||||
};
|
||||
use next_tuple::NextTuple;
|
||||
use std::{borrow::Cow, marker::PhantomData};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LeptosGtk;
|
||||
@@ -157,13 +156,13 @@ impl Renderer for LeptosGtk {
|
||||
}
|
||||
|
||||
fn remove_node(
|
||||
parent: &Self::Element,
|
||||
child: &Self::Node,
|
||||
_parent: &Self::Element,
|
||||
_child: &Self::Node,
|
||||
) -> Option<Self::Node> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn remove(node: &Self::Node) {
|
||||
fn remove(_node: &Self::Node) {
|
||||
todo!()
|
||||
}
|
||||
|
||||
@@ -171,19 +170,19 @@ impl Renderer for LeptosGtk {
|
||||
node.0.parent().map(Element::from)
|
||||
}
|
||||
|
||||
fn first_child(node: &Self::Node) -> Option<Self::Node> {
|
||||
fn first_child(_node: &Self::Node) -> Option<Self::Node> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn next_sibling(node: &Self::Node) -> Option<Self::Node> {
|
||||
fn next_sibling(_node: &Self::Node) -> Option<Self::Node> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn log_node(node: &Self::Node) {
|
||||
todo!()
|
||||
println!("{node:?}");
|
||||
}
|
||||
|
||||
fn clear_children(parent: &Self::Element) {
|
||||
fn clear_children(_parent: &Self::Element) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
@@ -368,7 +367,22 @@ where
|
||||
})
|
||||
}
|
||||
|
||||
fn rebuild(self, widget: &Element, state: &mut Self::State) {}
|
||||
fn rebuild(self, widget: &Element, state: &mut Self::State) {
|
||||
let prev_value = state.take_value();
|
||||
let widget = widget.to_owned();
|
||||
*state = RenderEffect::new_with_value(
|
||||
move |prev| {
|
||||
let value = self();
|
||||
if let Some(mut state) = prev {
|
||||
value.rebuild(&widget, &mut state);
|
||||
state
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
},
|
||||
prev_value,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn button() -> LGtkWidget<gtk::Button, (), ()> {
|
||||
@@ -400,9 +414,9 @@ mod widgets {
|
||||
}
|
||||
|
||||
pub mod properties {
|
||||
use super::{
|
||||
Element, LGtkWidget, LGtkWidgetState, LeptosGtk, Property, WidgetClass,
|
||||
};
|
||||
#![allow(dead_code)]
|
||||
|
||||
use super::{Element, LGtkWidget, LeptosGtk, Property, WidgetClass};
|
||||
use gtk::glib::{object::ObjectExt, Value};
|
||||
use leptos::tachys::{renderer::Renderer, view::Render};
|
||||
use next_tuple::NextTuple;
|
||||
@@ -425,7 +439,9 @@ pub mod properties {
|
||||
element.0.connect(self.signal_name, false, self.callback);
|
||||
}
|
||||
|
||||
fn rebuild(self, element: &Element, state: &mut Self::State) {}
|
||||
fn rebuild(self, _element: &Element, _state: &mut Self::State) {
|
||||
// TODO we want to *remove* the previous listener, and reconnect with this new one
|
||||
}
|
||||
}
|
||||
|
||||
/* examples for macro */
|
||||
@@ -528,7 +544,7 @@ pub mod properties {
|
||||
}
|
||||
|
||||
/* end examples for properties macro */
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Label {
|
||||
value: String,
|
||||
}
|
||||
@@ -554,7 +570,9 @@ pub mod properties {
|
||||
}
|
||||
|
||||
fn rebuild(self, element: &Element, state: &mut Self::State) {
|
||||
todo!()
|
||||
if self.value != state.value {
|
||||
LeptosGtk::set_attribute(element, "label", &self.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -31,7 +31,6 @@ tokio = { version = "1.39", features = ["full"], optional = true }
|
||||
http = { version = "1.1", optional = true }
|
||||
web-sys = { version = "0.3.70", features = ["AbortController", "AbortSignal"] }
|
||||
wasm-bindgen = "0.2.93"
|
||||
lazy_static = "1.5"
|
||||
rust-embed = { version = "8.5", features = [
|
||||
"axum",
|
||||
"mime_guess",
|
||||
|
||||
@@ -162,22 +162,24 @@ pub fn App() -> impl IntoView {
|
||||
<table class="table table-hover table-striped test-data">
|
||||
<tbody>
|
||||
<For
|
||||
each={move || data.get()}
|
||||
key={|row| row.id}
|
||||
each=move || data.get()
|
||||
key=|row| row.id
|
||||
children=move |row: RowData| {
|
||||
let row_id = row.id;
|
||||
let label = row.label;
|
||||
let is_selected = is_selected.clone();
|
||||
ViewTemplate::new(view! {
|
||||
<tr class:danger={move || is_selected.selected(Some(row_id))}>
|
||||
<td class="col-md-1">{row_id.to_string()}</td>
|
||||
<td class="col-md-4"><a on:click=move |_| set_selected.set(Some(row_id))>{move || label.get()}</a></td>
|
||||
<td class="col-md-1"><a on:click=move |_| remove(row_id)><span class="glyphicon glyphicon-remove" aria-hidden="true"></span></a></td>
|
||||
<td class="col-md-6"/>
|
||||
</tr>
|
||||
})
|
||||
template! {
|
||||
< tr class : danger = { move || is_selected.selected(Some(row_id)) }
|
||||
> < td class = "col-md-1" > { row_id.to_string() } </ td > < td
|
||||
class = "col-md-4" >< a on : click = move | _ | set_selected
|
||||
.set(Some(row_id)) > { move || label.get() } </ a ></ td > < td
|
||||
class = "col-md-1" >< a on : click = move | _ | remove(row_id) ><
|
||||
span class = "glyphicon glyphicon-remove" aria - hidden = "true" ></
|
||||
span ></ a ></ td > < td class = "col-md-6" /> </ tr >
|
||||
}
|
||||
}
|
||||
/>
|
||||
|
||||
</tbody>
|
||||
</table>
|
||||
<span class="preloadicon glyphicon glyphicon-remove" aria-hidden="true"></span>
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
name = "ssr_modes"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
# std::sync::LazyLock is stabilized in Rust version 1.80.0
|
||||
rust-version = "1.80.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
@@ -11,7 +13,6 @@ actix-files = { version = "0.6.6", optional = true }
|
||||
actix-web = { version = "4.8", optional = true, features = ["macros"] }
|
||||
console_error_panic_hook = "0.1.7"
|
||||
console_log = "1.0"
|
||||
lazy_static = "1.5"
|
||||
leptos = { path = "../../leptos" }
|
||||
leptos_meta = { path = "../../meta" }
|
||||
leptos_actix = { path = "../../integrations/actix", optional = true }
|
||||
@@ -38,12 +39,12 @@ denylist = ["actix-files", "actix-web", "leptos_actix"]
|
||||
skip_feature_sets = [["ssr", "hydrate"]]
|
||||
|
||||
[package.metadata.leptos]
|
||||
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
|
||||
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
|
||||
output-name = "ssr_modes"
|
||||
# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
|
||||
site-root = "target/site"
|
||||
# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
|
||||
# Defaults to pkg
|
||||
# Defaults to pkg
|
||||
site-pkg-dir = "pkg"
|
||||
# [Optional] The source CSS file. If it ends with .sass or .scss then it will be compiled by dart-sass into CSS. The CSS is optimized by Lightning CSS before being written to <site-root>/<site-pkg>/app.css
|
||||
style-file = "style/main.scss"
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use lazy_static::lazy_static;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use leptos::prelude::*;
|
||||
use leptos_meta::*;
|
||||
use leptos_router::{
|
||||
@@ -146,8 +147,9 @@ fn Post() -> impl IntoView {
|
||||
}
|
||||
|
||||
// Dummy API
|
||||
lazy_static! {
|
||||
static ref POSTS: Vec<Post> = vec![
|
||||
|
||||
static POSTS: LazyLock<[Post; 3]> = LazyLock::new(|| {
|
||||
[
|
||||
Post {
|
||||
id: 0,
|
||||
title: "My first post".to_string(),
|
||||
@@ -163,8 +165,8 @@ lazy_static! {
|
||||
title: "My third post".to_string(),
|
||||
content: "This is my third post".to_string(),
|
||||
},
|
||||
];
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
#[derive(Error, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum PostError {
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
name = "ssr_modes_axum"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
# std::sync::LazyLock is stabilized in Rust version 1.80.0
|
||||
rust-version = "1.80.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
@@ -9,7 +11,6 @@ crate-type = ["cdylib", "rlib"]
|
||||
[dependencies]
|
||||
console_error_panic_hook = "0.1.7"
|
||||
console_log = "1.0"
|
||||
lazy_static = "1.5"
|
||||
leptos = { path = "../../leptos", features = [
|
||||
"hydration",
|
||||
] } #"nightly", "hydration"] }
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use lazy_static::lazy_static;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use leptos::prelude::*;
|
||||
use leptos_meta::MetaTags;
|
||||
use leptos_meta::*;
|
||||
@@ -261,8 +262,9 @@ pub fn Admin() -> impl IntoView {
|
||||
}
|
||||
|
||||
// Dummy API
|
||||
lazy_static! {
|
||||
static ref POSTS: Vec<Post> = vec![
|
||||
|
||||
static POSTS: LazyLock<[Post; 3]> = LazyLock::new(|| {
|
||||
[
|
||||
Post {
|
||||
id: 0,
|
||||
title: "My first post".to_string(),
|
||||
@@ -278,8 +280,8 @@ lazy_static! {
|
||||
title: "My third post".to_string(),
|
||||
content: "This is my third post".to_string(),
|
||||
},
|
||||
];
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
#[derive(Error, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum PostError {
|
||||
|
||||
@@ -13,6 +13,9 @@ leptos = { path = "../../leptos", features = ["csr"] }
|
||||
reactive_stores = { path = "../../reactive_stores" }
|
||||
reactive_stores_macro = { path = "../../reactive_stores_macro" }
|
||||
console_error_panic_hook = "0.1.7"
|
||||
chrono = { version = "0.4.38", features = ["serde"] }
|
||||
serde = { version = "1.0.210", features = ["derive"] }
|
||||
serde_json = "1.0.128"
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen = "0.2.93"
|
||||
|
||||
@@ -3,6 +3,11 @@
|
||||
<head>
|
||||
<link data-trunk rel="rust" data-wasm-opt="z"/>
|
||||
<link data-trunk rel="icon" type="image/ico" href="/public/favicon.ico"/>
|
||||
<style>
|
||||
.hidden {
|
||||
display: none;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body></body>
|
||||
</html>
|
||||
</html>
|
||||
|
||||
@@ -1,43 +1,88 @@
|
||||
use leptos::prelude::*;
|
||||
use reactive_stores::{Field, Store, StoreFieldIterator};
|
||||
use reactive_stores_macro::Store;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
|
||||
#[derive(Debug, Store)]
|
||||
use chrono::{Local, NaiveDate};
|
||||
use leptos::prelude::*;
|
||||
use reactive_stores::{Field, Patch, Store};
|
||||
use reactive_stores_macro::{Patch, Store};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// ID starts higher than 0 because we have a few starting todos by default
|
||||
static NEXT_ID: AtomicUsize = AtomicUsize::new(3);
|
||||
|
||||
#[derive(Debug, Store, Serialize, Deserialize)]
|
||||
struct Todos {
|
||||
user: String,
|
||||
user: User,
|
||||
#[store(key: usize = |todo| todo.id)]
|
||||
todos: Vec<Todo>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Store)]
|
||||
#[derive(Debug, Store, Patch, Serialize, Deserialize)]
|
||||
struct User {
|
||||
name: String,
|
||||
email: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Store, Serialize, Deserialize)]
|
||||
struct Todo {
|
||||
id: usize,
|
||||
label: String,
|
||||
completed: bool,
|
||||
status: Status,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Store, Serialize, Deserialize)]
|
||||
enum Status {
|
||||
#[default]
|
||||
Pending,
|
||||
Scheduled,
|
||||
ScheduledFor {
|
||||
date: NaiveDate,
|
||||
},
|
||||
Done,
|
||||
}
|
||||
|
||||
impl Status {
|
||||
pub fn next_step(&mut self) {
|
||||
*self = match self {
|
||||
Status::Pending => Status::ScheduledFor {
|
||||
date: Local::now().naive_local().into(),
|
||||
},
|
||||
Status::Scheduled | Status::ScheduledFor { .. } => Status::Done,
|
||||
Status::Done => Status::Done,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
impl Todo {
|
||||
pub fn new(label: impl ToString) -> Self {
|
||||
Self {
|
||||
id: NEXT_ID.fetch_add(1, Ordering::Relaxed),
|
||||
label: label.to_string(),
|
||||
completed: false,
|
||||
status: Status::Pending,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn data() -> Todos {
|
||||
Todos {
|
||||
user: "Bob".to_string(),
|
||||
user: User {
|
||||
name: "Bob".to_string(),
|
||||
email: "lawblog@bobloblaw.com".into(),
|
||||
},
|
||||
todos: vec![
|
||||
Todo {
|
||||
id: 0,
|
||||
label: "Create reactive store".to_string(),
|
||||
completed: true,
|
||||
status: Status::Pending,
|
||||
},
|
||||
Todo {
|
||||
id: 1,
|
||||
label: "???".to_string(),
|
||||
completed: false,
|
||||
status: Status::Pending,
|
||||
},
|
||||
Todo {
|
||||
id: 2,
|
||||
label: "Profit".to_string(),
|
||||
completed: false,
|
||||
status: Status::Pending,
|
||||
},
|
||||
],
|
||||
}
|
||||
@@ -49,17 +94,10 @@ pub fn App() -> impl IntoView {
|
||||
|
||||
let input_ref = NodeRef::new();
|
||||
|
||||
let rows = move || {
|
||||
store
|
||||
.todos()
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, todo)| view! { <TodoRow store idx todo/> })
|
||||
.collect_view()
|
||||
};
|
||||
|
||||
view! {
|
||||
<p>"Hello, " {move || store.user().get()}</p>
|
||||
<p>"Hello, " {move || store.user().name().get()}</p>
|
||||
<UserForm user=store.user()/>
|
||||
<hr/>
|
||||
<form on:submit=move |ev| {
|
||||
ev.prevent_default();
|
||||
store.todos().write().push(Todo::new(input_ref.get().unwrap().value()));
|
||||
@@ -67,30 +105,69 @@ pub fn App() -> impl IntoView {
|
||||
<label>"Add a Todo" <input type="text" node_ref=input_ref/></label>
|
||||
<input type="submit"/>
|
||||
</form>
|
||||
<ol>{rows}</ol>
|
||||
<div style="display: flex"></div>
|
||||
<ol>
|
||||
// because `todos` is a keyed field, `store.todos()` returns a struct that
|
||||
// directly implements IntoIterator, so we can use it in <For/> and
|
||||
// it will manage reactivity for the store fields correctly
|
||||
<For
|
||||
each=move || {
|
||||
leptos::logging::log!("RERUNNING FOR CALCULATION");
|
||||
store.todos()
|
||||
}
|
||||
|
||||
key=|row| row.id().get()
|
||||
let:todo
|
||||
>
|
||||
<TodoRow store todo/>
|
||||
</For>
|
||||
|
||||
</ol>
|
||||
<pre>{move || serde_json::to_string_pretty(&*store.read())}</pre>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn UserForm(#[prop(into)] user: Field<User>) -> impl IntoView {
|
||||
let error = RwSignal::new(None);
|
||||
|
||||
view! {
|
||||
{move || error.get().map(|n| view! { <p>{n}</p> })}
|
||||
<form on:submit:target=move |ev| {
|
||||
ev.prevent_default();
|
||||
match User::from_event(&ev) {
|
||||
Ok(new_user) => {
|
||||
error.set(None);
|
||||
user.patch(new_user);
|
||||
}
|
||||
Err(e) => error.set(Some(e.to_string())),
|
||||
}
|
||||
}>
|
||||
<label>
|
||||
"Name" <input type="text" name="name" prop:value=move || user.name().get()/>
|
||||
</label>
|
||||
<label>
|
||||
"Email" <input type="email" name="email" prop:value=move || user.email().get()/>
|
||||
</label>
|
||||
<input type="submit"/>
|
||||
</form>
|
||||
}
|
||||
}
|
||||
|
||||
#[component]
|
||||
fn TodoRow(
|
||||
store: Store<Todos>,
|
||||
idx: usize,
|
||||
#[prop(into)] todo: Field<Todo>,
|
||||
) -> impl IntoView {
|
||||
let completed = todo.completed();
|
||||
let status = todo.status();
|
||||
let title = todo.label();
|
||||
|
||||
let editing = RwSignal::new(false);
|
||||
let editing = RwSignal::new(true);
|
||||
|
||||
view! {
|
||||
<li
|
||||
style:text-decoration=move || {
|
||||
completed.get().then_some("line-through").unwrap_or_default()
|
||||
}
|
||||
<li style:text-decoration=move || {
|
||||
status.done().then_some("line-through").unwrap_or_default()
|
||||
}>
|
||||
|
||||
class:foo=move || completed.get()
|
||||
>
|
||||
<p
|
||||
class:hidden=move || editing.get()
|
||||
on:click=move |_| {
|
||||
@@ -106,25 +183,48 @@ fn TodoRow(
|
||||
prop:value=move || title.get()
|
||||
on:change=move |ev| {
|
||||
title.set(event_target_value(&ev));
|
||||
editing.set(false);
|
||||
}
|
||||
|
||||
on:blur=move |_| editing.set(false)
|
||||
autofocus
|
||||
/>
|
||||
<input
|
||||
type="checkbox"
|
||||
prop:checked=move || completed.get()
|
||||
on:click=move |_| { completed.update(|n| *n = !*n) }
|
||||
/>
|
||||
|
||||
<button on:click=move |_| {
|
||||
store
|
||||
.todos()
|
||||
.update(|todos| {
|
||||
todos.remove(idx);
|
||||
});
|
||||
status.write().next_step()
|
||||
}>
|
||||
{move || {
|
||||
if todo.status().done() {
|
||||
"Done"
|
||||
} else if status.scheduled() || status.scheduled_for() {
|
||||
"Scheduled"
|
||||
} else {
|
||||
"Pending"
|
||||
}
|
||||
}}
|
||||
|
||||
</button>
|
||||
|
||||
<button on:click=move |_| {
|
||||
let id = todo.id().get();
|
||||
store.todos().write().retain(|todo| todo.id != id);
|
||||
}>"X"</button>
|
||||
<input
|
||||
type="date"
|
||||
prop:value=move || {
|
||||
todo.status().scheduled_for_date().map(|n| n.get().to_string())
|
||||
}
|
||||
|
||||
class:hidden=move || !todo.status().scheduled_for()
|
||||
on:change:target=move |ev| {
|
||||
if let Some(date) = todo.status().scheduled_for_date() {
|
||||
let value = ev.target().value();
|
||||
match NaiveDate::parse_from_str(&value, "%Y-%m-%d") {
|
||||
Ok(new_date) => {
|
||||
date.set(new_date);
|
||||
}
|
||||
Err(e) => warn!("{e}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
/>
|
||||
|
||||
</li>
|
||||
}
|
||||
}
|
||||
|
||||
@@ -147,14 +147,13 @@ fn Nested() -> impl IntoView {
|
||||
"Loading 1..."
|
||||
}>
|
||||
{move || {
|
||||
one_second.get().map(|_| view! { <p id="loaded-1">"One Second: Loaded 1!"</p> })
|
||||
one_second.map(|_| view! { <p id="loaded-1">"One Second: Loaded 1!"</p> })
|
||||
}}
|
||||
<Suspense fallback=|| {
|
||||
"Loading 2..."
|
||||
}>
|
||||
{move || {
|
||||
two_second
|
||||
.get()
|
||||
.map(|_| {
|
||||
view! {
|
||||
<p id="loaded-2">"Two Second: Loaded 2!"</p>
|
||||
@@ -217,7 +216,6 @@ fn Parallel() -> impl IntoView {
|
||||
}>
|
||||
{move || {
|
||||
one_second
|
||||
.get()
|
||||
.map(move |_| {
|
||||
view! {
|
||||
<p id="loaded-1">"One Second: Loaded 1!"</p>
|
||||
@@ -234,7 +232,6 @@ fn Parallel() -> impl IntoView {
|
||||
}>
|
||||
{move || {
|
||||
two_second
|
||||
.get()
|
||||
.map(move |_| {
|
||||
view! {
|
||||
<p id="loaded-2">"Two Second: Loaded 2!"</p>
|
||||
@@ -264,7 +261,7 @@ fn Single() -> impl IntoView {
|
||||
"Loading 1..."
|
||||
}>
|
||||
{move || {
|
||||
one_second.get().map(|_| view! { <p id="loaded-1">"One Second: Loaded 1!"</p> })
|
||||
one_second.map(|_| view! { <p id="loaded-1">"One Second: Loaded 1!"</p> })
|
||||
}}
|
||||
|
||||
</Suspense>
|
||||
@@ -300,7 +297,7 @@ fn InsideComponentChild() -> impl IntoView {
|
||||
"Loading 1..."
|
||||
}>
|
||||
{move || {
|
||||
one_second.get().map(|_| view! { <p id="loaded-1">"One Second: Loaded 1!"</p> })
|
||||
one_second.map(|_| view! { <p id="loaded-1">"One Second: Loaded 1!"</p> })
|
||||
}}
|
||||
|
||||
</Suspense>
|
||||
@@ -319,7 +316,7 @@ fn LocalResource() -> impl IntoView {
|
||||
"Loading 1..."
|
||||
}>
|
||||
{move || {
|
||||
one_second.get().map(|_| view! { <p id="loaded-1">"One Second: Loaded 1!"</p> })
|
||||
one_second.map(|_| view! { <p id="loaded-1">"One Second: Loaded 1!"</p> })
|
||||
}}
|
||||
{move || {
|
||||
Suspend::new(async move {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "hydration_context"
|
||||
version = "0.2.0-beta4"
|
||||
version = "0.2.0-beta6"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -27,6 +27,7 @@ type SealedErrors = Arc<RwLock<HashSet<SerializedDataId>>>;
|
||||
/// The shared context that should be used on the server side.
|
||||
pub struct SsrSharedContext {
|
||||
id: AtomicUsize,
|
||||
non_hydration_id: AtomicUsize,
|
||||
is_hydrating: AtomicBool,
|
||||
sync_buf: RwLock<Vec<ResolvedData>>,
|
||||
async_buf: AsyncDataBuf,
|
||||
@@ -41,6 +42,7 @@ impl SsrSharedContext {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
is_hydrating: AtomicBool::new(true),
|
||||
non_hydration_id: AtomicUsize::new(usize::MAX),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
@@ -52,6 +54,7 @@ impl SsrSharedContext {
|
||||
pub fn new_islands() -> Self {
|
||||
Self {
|
||||
is_hydrating: AtomicBool::new(false),
|
||||
non_hydration_id: AtomicUsize::new(usize::MAX),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
@@ -73,8 +76,13 @@ impl SharedContext for SsrSharedContext {
|
||||
false
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn next_id(&self) -> SerializedDataId {
|
||||
let id = self.id.fetch_add(1, Ordering::Relaxed);
|
||||
let id = if self.get_is_hydrating() {
|
||||
self.id.fetch_add(1, Ordering::Relaxed)
|
||||
} else {
|
||||
self.non_hydration_id.fetch_sub(1, Ordering::Relaxed)
|
||||
};
|
||||
SerializedDataId(id)
|
||||
}
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ tokio = { version = "1.39", features = ["net", "rt-multi-thread"] }
|
||||
|
||||
[features]
|
||||
wasm = []
|
||||
default = ["tokio/fs", "tokio/sync", "tower-http/fs"]
|
||||
default = ["tokio/fs", "tokio/sync", "tower-http/fs", "tower/util"]
|
||||
islands-router = []
|
||||
tracing = ["dep:tracing"]
|
||||
|
||||
|
||||
@@ -59,7 +59,7 @@ pub trait ExtendResponse: Sized {
|
||||
// drop the owner, cleaning up the reactive runtime,
|
||||
// once the stream is over
|
||||
.chain(once(async move {
|
||||
drop(owner);
|
||||
owner.unset();
|
||||
Default::default()
|
||||
})),
|
||||
));
|
||||
|
||||
@@ -56,6 +56,7 @@ hydration = [
|
||||
"reactive_graph/hydration",
|
||||
"leptos_server/hydration",
|
||||
"hydration_context/browser",
|
||||
"leptos_dom/hydration"
|
||||
]
|
||||
csr = ["leptos_macro/csr", "reactive_graph/effects"]
|
||||
hydrate = [
|
||||
|
||||
@@ -4,7 +4,7 @@ use leptos_macro::component;
|
||||
use reactive_graph::{
|
||||
computed::ArcMemo,
|
||||
effect::RenderEffect,
|
||||
owner::Owner,
|
||||
owner::{provide_context, Owner},
|
||||
signal::ArcRwSignal,
|
||||
traits::{Get, Update, With, WithUntracked},
|
||||
};
|
||||
@@ -13,6 +13,7 @@ use std::{fmt::Debug, marker::PhantomData, sync::Arc};
|
||||
use tachys::{
|
||||
html::attribute::Attribute,
|
||||
hydration::Cursor,
|
||||
reactive_graph::OwnedView,
|
||||
renderer::Renderer,
|
||||
ssr::StreamBuilder,
|
||||
view::{
|
||||
@@ -96,17 +97,25 @@ where
|
||||
let hook = hook as Arc<dyn ErrorHook>;
|
||||
|
||||
let _guard = throw_error::set_error_hook(Arc::clone(&hook));
|
||||
let children = children.into_inner()();
|
||||
|
||||
ErrorBoundaryView {
|
||||
hook,
|
||||
boundary_id,
|
||||
errors_empty,
|
||||
children,
|
||||
errors,
|
||||
fallback,
|
||||
rndr: PhantomData,
|
||||
}
|
||||
let owner = Owner::new();
|
||||
let children = owner.with(|| {
|
||||
provide_context(Arc::clone(&hook));
|
||||
children.into_inner()()
|
||||
});
|
||||
|
||||
OwnedView::new_with_owner(
|
||||
ErrorBoundaryView {
|
||||
hook,
|
||||
boundary_id,
|
||||
errors_empty,
|
||||
children,
|
||||
errors,
|
||||
fallback,
|
||||
rndr: PhantomData,
|
||||
},
|
||||
owner,
|
||||
)
|
||||
}
|
||||
|
||||
struct ErrorBoundaryView<Chil, FalFn, Rndr> {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
(function (pkg_path, output_name, wasm_output_name) {
|
||||
import(`/${pkg_path}/${output_name}.js`)
|
||||
(function (root, pkg_path, output_name, wasm_output_name) {
|
||||
import(`${root}/${pkg_path}/${output_name}.js`)
|
||||
.then(mod => {
|
||||
mod.default(`/${pkg_path}/${wasm_output_name}.wasm`).then(() => {
|
||||
mod.hydrate();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
((pkg_path, output_name, wasm_output_name) => {
|
||||
((root, pkg_path, output_name, wasm_output_name) => {
|
||||
function idle(c) {
|
||||
if ("requestIdleCallback" in window) {
|
||||
window.requestIdleCallback(c);
|
||||
@@ -34,7 +34,7 @@
|
||||
return { el: null, id: null, children: tree };
|
||||
}
|
||||
function hydrateIsland(el, id, mod) {
|
||||
const islandFn = mod[`_island_${id}`];
|
||||
const islandFn = mod[id];
|
||||
if (islandFn) {
|
||||
islandFn(el);
|
||||
} else {
|
||||
@@ -50,9 +50,9 @@
|
||||
}
|
||||
}
|
||||
idle(() => {
|
||||
import(`/${pkg_path}/${output_name}.js`)
|
||||
import(`${root}/${pkg_path}/${output_name}.js`)
|
||||
.then(mod => {
|
||||
mod.default(`/${pkg_path}/${wasm_output_name}.wasm`).then(() => {
|
||||
mod.default(`${root}/${pkg_path}/${wasm_output_name}.wasm`).then(() => {
|
||||
mod.hydrate();
|
||||
hydrateIslands(islandTree(document.body, null), mod);
|
||||
});
|
||||
|
||||
@@ -38,6 +38,9 @@ pub fn AutoReload(
|
||||
pub fn HydrationScripts(
|
||||
options: LeptosOptions,
|
||||
#[prop(optional)] islands: bool,
|
||||
/// A base url, not including a trailing slash
|
||||
#[prop(optional, into)]
|
||||
root: Option<String>,
|
||||
) -> impl IntoView {
|
||||
let mut js_file_name = options.output_name.to_string();
|
||||
let mut wasm_file_name = options.output_name.to_string();
|
||||
@@ -56,9 +59,10 @@ pub fn HydrationScripts(
|
||||
if !line.is_empty() {
|
||||
if let Some((file, hash)) = line.split_once(':') {
|
||||
if file == "js" {
|
||||
js_file_name.push_str(&format!(".{}", hash));
|
||||
js_file_name.push_str(&format!(".{}", hash.trim()));
|
||||
} else if file == "wasm" {
|
||||
wasm_file_name.push_str(&format!(".{}", hash));
|
||||
wasm_file_name
|
||||
.push_str(&format!(".{}", hash.trim()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -82,17 +86,18 @@ pub fn HydrationScripts(
|
||||
include_str!("./hydration_script.js")
|
||||
};
|
||||
|
||||
let root = root.unwrap_or_default();
|
||||
view! {
|
||||
<link rel="modulepreload" href=format!("/{pkg_path}/{js_file_name}.js") nonce=nonce.clone()/>
|
||||
<link rel="modulepreload" href=format!("{root}/{pkg_path}/{js_file_name}.js") nonce=nonce.clone()/>
|
||||
<link
|
||||
rel="preload"
|
||||
href=format!("/{pkg_path}/{wasm_file_name}.wasm")
|
||||
href=format!("{root}/{pkg_path}/{wasm_file_name}.wasm")
|
||||
r#as="fetch"
|
||||
r#type="application/wasm"
|
||||
crossorigin=nonce.clone().unwrap_or_default()
|
||||
/>
|
||||
<script type="module" nonce=nonce>
|
||||
{format!("{script}({pkg_path:?}, {js_file_name:?}, {wasm_file_name:?})")}
|
||||
{format!("{script}({root:?}, {pkg_path:?}, {js_file_name:?}, {wasm_file_name:?})")}
|
||||
</script>
|
||||
}
|
||||
}
|
||||
|
||||
@@ -168,7 +168,7 @@ pub mod prelude {
|
||||
pub use leptos_server::*;
|
||||
pub use oco_ref::*;
|
||||
pub use reactive_graph::{
|
||||
actions::*, computed::*, effect::*, owner::*, signal::*,
|
||||
actions::*, computed::*, effect::*, owner::*, signal::*, untrack,
|
||||
wrappers::read::*,
|
||||
};
|
||||
pub use server_fn::{self, ServerFnError};
|
||||
|
||||
@@ -13,7 +13,7 @@ use reactive_graph::{
|
||||
effect::RenderEffect,
|
||||
owner::{provide_context, use_context, Owner},
|
||||
signal::ArcRwSignal,
|
||||
traits::{Get, Read, Track, With},
|
||||
traits::{Dispose, Get, Read, Track, With},
|
||||
};
|
||||
use slotmap::{DefaultKey, SlotMap};
|
||||
use tachys::{
|
||||
@@ -286,7 +286,7 @@ where
|
||||
self.children.dry_resolve();
|
||||
|
||||
// check the set of tasks to see if it is empty, now or later
|
||||
let eff = reactive_graph::effect::RenderEffect::new_isomorphic({
|
||||
let eff = reactive_graph::effect::Effect::new_isomorphic({
|
||||
move |_| {
|
||||
tasks.track();
|
||||
if tasks.read().is_empty() {
|
||||
@@ -338,7 +338,7 @@ where
|
||||
}
|
||||
children = children => {
|
||||
// clean up the (now useless) effect
|
||||
drop(eff);
|
||||
eff.dispose();
|
||||
|
||||
Some(OwnedView::new_with_owner(children, owner))
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ features = ["Location"]
|
||||
default = []
|
||||
tracing = ["dep:tracing"]
|
||||
trace-component-props = ["dep:serde", "dep:serde_json"]
|
||||
hydration = ["reactive_graph/hydration"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustdoc-args = ["--generate-link-to-definition"]
|
||||
|
||||
@@ -64,18 +64,17 @@ pub fn location() -> web_sys::Location {
|
||||
/// Current [`window.location.hash`](https://developer.mozilla.org/en-US/docs/Web/API/Window/location)
|
||||
/// without the beginning #.
|
||||
pub fn location_hash() -> Option<String> {
|
||||
// TODO use shared context for is_server
|
||||
/*if is_server() {
|
||||
if is_server() {
|
||||
None
|
||||
} else {*/
|
||||
location()
|
||||
.hash()
|
||||
.ok()
|
||||
.map(|hash| match hash.chars().next() {
|
||||
Some('#') => hash[1..].to_string(),
|
||||
_ => hash,
|
||||
})
|
||||
//}
|
||||
} else {
|
||||
location()
|
||||
.hash()
|
||||
.ok()
|
||||
.map(|hash| match hash.chars().next() {
|
||||
Some('#') => hash[1..].to_string(),
|
||||
_ => hash,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Current [`window.location.pathname`](https://developer.mozilla.org/en-US/docs/Web/API/Window/location).
|
||||
@@ -475,9 +474,7 @@ pub fn window_event_listener_untyped(
|
||||
cb(e);
|
||||
};
|
||||
|
||||
// TODO use shared context for is_server
|
||||
if true {
|
||||
// !is_server() {
|
||||
if !is_server() {
|
||||
#[inline(never)]
|
||||
fn wel(
|
||||
cb: Box<dyn FnMut(web_sys::Event)>,
|
||||
@@ -550,3 +547,16 @@ impl WindowListenerHandle {
|
||||
(self.0)()
|
||||
}
|
||||
}
|
||||
|
||||
fn is_server() -> bool {
|
||||
#[cfg(feature = "hydration")]
|
||||
{
|
||||
Owner::current_shared_context()
|
||||
.map(|sc| !sc.is_browser())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
#[cfg(not(feature = "hydration"))]
|
||||
{
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_macro"
|
||||
version = "0.7.0-beta4"
|
||||
version = "0.7.0-beta6"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
@@ -18,7 +18,7 @@ cfg-if = "1.0"
|
||||
html-escape = "0.2.13"
|
||||
itertools = "0.13.0"
|
||||
prettyplease = "0.2.20"
|
||||
proc-macro-error = { version = "1.0", default-features = false }
|
||||
proc-macro-error2 = { version = "2.0", default-features = false }
|
||||
proc-macro2 = "1.0"
|
||||
quote = "1.0"
|
||||
syn = { version = "2.0", features = ["full"] }
|
||||
|
||||
@@ -6,8 +6,9 @@ use convert_case::{
|
||||
use itertools::Itertools;
|
||||
use leptos_hot_reload::parsing::value_to_string;
|
||||
use proc_macro2::{Ident, Span, TokenStream};
|
||||
use proc_macro_error::abort;
|
||||
use proc_macro_error2::abort;
|
||||
use quote::{format_ident, quote, quote_spanned, ToTokens, TokenStreamExt};
|
||||
use std::hash::DefaultHasher;
|
||||
use syn::{
|
||||
parse::Parse, parse_quote, spanned::Spanned, token::Colon,
|
||||
visit_mut::VisitMut, AngleBracketedGenericArguments, Attribute, FnArg,
|
||||
@@ -17,7 +18,7 @@ use syn::{
|
||||
};
|
||||
|
||||
pub struct Model {
|
||||
is_island: bool,
|
||||
island: Option<String>,
|
||||
docs: Docs,
|
||||
unknown_attrs: UnknownAttrs,
|
||||
vis: Visibility,
|
||||
@@ -61,7 +62,7 @@ impl Parse for Model {
|
||||
});
|
||||
|
||||
Ok(Self {
|
||||
is_island: false,
|
||||
island: None,
|
||||
docs,
|
||||
unknown_attrs,
|
||||
vis: item.vis.clone(),
|
||||
@@ -101,7 +102,7 @@ pub fn convert_from_snake_case(name: &Ident) -> Ident {
|
||||
impl ToTokens for Model {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
is_island,
|
||||
island,
|
||||
docs,
|
||||
unknown_attrs,
|
||||
vis,
|
||||
@@ -110,6 +111,7 @@ impl ToTokens for Model {
|
||||
body,
|
||||
ret,
|
||||
} = self;
|
||||
let is_island = island.is_some();
|
||||
|
||||
let no_props = props.is_empty();
|
||||
|
||||
@@ -120,7 +122,7 @@ impl ToTokens for Model {
|
||||
_ => None,
|
||||
});
|
||||
if let Some(semi) = ends_semi {
|
||||
proc_macro_error::emit_error!(
|
||||
proc_macro_error2::emit_error!(
|
||||
semi.span(),
|
||||
"A component that ends with a `view!` macro followed by a \
|
||||
semicolon will return (), an empty view. This is usually an \
|
||||
@@ -145,9 +147,9 @@ impl ToTokens for Model {
|
||||
#[cfg(feature = "tracing")]
|
||||
let trace_name = format!("<{name} />");
|
||||
|
||||
let is_island_with_children = *is_island
|
||||
&& props.iter().any(|prop| prop.name.ident == "children");
|
||||
let is_island_with_other_props = *is_island
|
||||
let is_island_with_children =
|
||||
is_island && props.iter().any(|prop| prop.name.ident == "children");
|
||||
let is_island_with_other_props = is_island
|
||||
&& ((is_island_with_children && props.len() > 1)
|
||||
|| (!is_island_with_children && !props.is_empty()));
|
||||
|
||||
@@ -203,11 +205,11 @@ impl ToTokens for Model {
|
||||
)]
|
||||
},
|
||||
quote! {
|
||||
let span = ::leptos::tracing::Span::current();
|
||||
let __span = ::leptos::tracing::Span::current();
|
||||
},
|
||||
quote! {
|
||||
#[cfg(debug_assertions)]
|
||||
let _guard = span.entered();
|
||||
let _guard = __span.entered();
|
||||
},
|
||||
if no_props || !cfg!(feature = "trace-component-props") {
|
||||
quote!()
|
||||
@@ -226,8 +228,14 @@ impl ToTokens for Model {
|
||||
};
|
||||
|
||||
let component_id = name.to_string();
|
||||
let hydrate_fn_name =
|
||||
Ident::new(&format!("_island_{component_id}"), name.span());
|
||||
let hydrate_fn_name = is_island.then(|| {
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
let mut hasher = DefaultHasher::new();
|
||||
island.hash(&mut hasher);
|
||||
let caller = hasher.finish() as usize;
|
||||
Ident::new(&format!("{component_id}_{caller:?}"), name.span())
|
||||
});
|
||||
|
||||
let island_serialize_props = if is_island_with_other_props {
|
||||
quote! {
|
||||
@@ -245,7 +253,7 @@ impl ToTokens for Model {
|
||||
};
|
||||
|
||||
let body_name = unmodified_fn_name_from_fn_name(&body_name);
|
||||
let body_expr = if *is_island {
|
||||
let body_expr = if is_island {
|
||||
quote! {
|
||||
::leptos::reactive_graph::owner::Owner::with_hydration(move || {
|
||||
#body_name(#prop_names)
|
||||
@@ -268,7 +276,8 @@ impl ToTokens for Model {
|
||||
};
|
||||
|
||||
// add island wrapper if island
|
||||
let component = if *is_island {
|
||||
let component = if is_island {
|
||||
let hydrate_fn_name = hydrate_fn_name.as_ref().unwrap();
|
||||
quote! {
|
||||
{
|
||||
if ::leptos::reactive_graph::owner::Owner::current_shared_context()
|
||||
@@ -280,7 +289,7 @@ impl ToTokens for Model {
|
||||
} else {
|
||||
::leptos::either::Either::Right(
|
||||
::leptos::tachys::html::islands::Island::new(
|
||||
#component_id,
|
||||
stringify!(#hydrate_fn_name),
|
||||
#component
|
||||
)
|
||||
#island_serialized_props
|
||||
@@ -334,45 +343,64 @@ impl ToTokens for Model {
|
||||
#component
|
||||
};
|
||||
|
||||
let binding = if *is_island {
|
||||
let binding = if is_island {
|
||||
let island_props = if is_island_with_children
|
||||
|| is_island_with_other_props
|
||||
{
|
||||
let (destructure, prop_builders) = if is_island_with_other_props
|
||||
{
|
||||
let prop_names = props
|
||||
.iter()
|
||||
.filter_map(|prop| {
|
||||
if prop.name.ident == "children" {
|
||||
None
|
||||
} else {
|
||||
let name = &prop.name.ident;
|
||||
Some(quote! { #name, })
|
||||
}
|
||||
})
|
||||
.collect::<TokenStream>();
|
||||
let destructure = quote! {
|
||||
let #props_serialized_name {
|
||||
#prop_names
|
||||
} = props;
|
||||
let (destructure, prop_builders, optional_props) =
|
||||
if is_island_with_other_props {
|
||||
let prop_names = props
|
||||
.iter()
|
||||
.filter_map(|prop| {
|
||||
if prop.name.ident == "children" {
|
||||
None
|
||||
} else {
|
||||
let name = &prop.name.ident;
|
||||
Some(quote! { #name, })
|
||||
}
|
||||
})
|
||||
.collect::<TokenStream>();
|
||||
let destructure = quote! {
|
||||
let #props_serialized_name {
|
||||
#prop_names
|
||||
} = props;
|
||||
};
|
||||
let prop_builders = props
|
||||
.iter()
|
||||
.filter_map(|prop| {
|
||||
if prop.name.ident == "children"
|
||||
|| prop.prop_opts.optional
|
||||
{
|
||||
None
|
||||
} else {
|
||||
let name = &prop.name.ident;
|
||||
Some(quote! {
|
||||
.#name(#name)
|
||||
})
|
||||
}
|
||||
})
|
||||
.collect::<TokenStream>();
|
||||
let optional_props = props
|
||||
.iter()
|
||||
.filter_map(|prop| {
|
||||
if prop.name.ident == "children"
|
||||
|| !prop.prop_opts.optional
|
||||
{
|
||||
None
|
||||
} else {
|
||||
let name = &prop.name.ident;
|
||||
Some(quote! {
|
||||
if let Some(#name) = #name {
|
||||
props.#name = Some(#name)
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
.collect::<TokenStream>();
|
||||
(destructure, prop_builders, optional_props)
|
||||
} else {
|
||||
(quote! {}, quote! {}, quote! {})
|
||||
};
|
||||
let prop_builders = props
|
||||
.iter()
|
||||
.filter_map(|prop| {
|
||||
if prop.name.ident == "children" {
|
||||
None
|
||||
} else {
|
||||
let name = &prop.name.ident;
|
||||
Some(quote! {
|
||||
.#name(#name)
|
||||
})
|
||||
}
|
||||
})
|
||||
.collect::<TokenStream>();
|
||||
(destructure, prop_builders)
|
||||
} else {
|
||||
(quote! {}, quote! {})
|
||||
};
|
||||
let children = if is_island_with_children {
|
||||
quote! {
|
||||
.children({Box::new(|| {
|
||||
@@ -396,10 +424,14 @@ impl ToTokens for Model {
|
||||
|
||||
quote! {{
|
||||
#destructure
|
||||
#props_name::builder()
|
||||
let mut props = #props_name::builder()
|
||||
#prop_builders
|
||||
#children
|
||||
.build()
|
||||
.build();
|
||||
|
||||
#optional_props
|
||||
|
||||
props
|
||||
}}
|
||||
} else {
|
||||
quote! {}
|
||||
@@ -414,6 +446,7 @@ impl ToTokens for Model {
|
||||
quote! {}
|
||||
};
|
||||
|
||||
let hydrate_fn_name = hydrate_fn_name.as_ref().unwrap();
|
||||
quote! {
|
||||
#[::leptos::wasm_bindgen::prelude::wasm_bindgen(wasm_bindgen = ::leptos::wasm_bindgen)]
|
||||
#[allow(non_snake_case)]
|
||||
@@ -488,8 +521,8 @@ impl ToTokens for Model {
|
||||
|
||||
impl Model {
|
||||
#[allow(clippy::wrong_self_convention)]
|
||||
pub fn is_island(mut self, is_island: bool) -> Self {
|
||||
self.is_island = is_island;
|
||||
pub fn with_island(mut self, island: Option<String>) -> Self {
|
||||
self.island = island;
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
#![allow(private_macro_use)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate proc_macro_error;
|
||||
extern crate proc_macro_error2;
|
||||
|
||||
use component::DummyModel;
|
||||
use proc_macro::TokenStream;
|
||||
@@ -262,10 +262,25 @@ mod slot;
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_error::proc_macro_error]
|
||||
#[proc_macro_error2::proc_macro_error]
|
||||
#[proc_macro]
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(level = "trace", skip_all))]
|
||||
pub fn view(tokens: TokenStream) -> TokenStream {
|
||||
view_macro_impl(tokens, false)
|
||||
}
|
||||
|
||||
/// The `template` macro behaves like [`view`], except that it wraps the entire tree in a
|
||||
/// [`ViewTemplate`](leptos::prelude::ViewTemplate). This optimizes creation speed by rendering
|
||||
/// most of the view into a `<template>` tag with HTML rendered at compile time, then hydrating it.
|
||||
/// In exchange, there is a small binary size overhead.
|
||||
#[proc_macro_error2::proc_macro_error]
|
||||
#[proc_macro]
|
||||
#[cfg_attr(feature = "tracing", tracing::instrument(level = "trace", skip_all))]
|
||||
pub fn template(tokens: TokenStream) -> TokenStream {
|
||||
view_macro_impl(tokens, true)
|
||||
}
|
||||
|
||||
fn view_macro_impl(tokens: TokenStream, template: bool) -> TokenStream {
|
||||
let tokens: proc_macro2::TokenStream = tokens.into();
|
||||
let mut tokens = tokens.into_iter();
|
||||
|
||||
@@ -302,18 +317,19 @@ pub fn view(tokens: TokenStream) -> TokenStream {
|
||||
};
|
||||
let config = rstml::ParserConfig::default().recover_block(true);
|
||||
let parser = rstml::Parser::new(config);
|
||||
let (nodes, errors) = parser.parse_recoverable(tokens).split_vec();
|
||||
let (mut nodes, errors) = parser.parse_recoverable(tokens).split_vec();
|
||||
let errors = errors.into_iter().map(|e| e.emit_as_expr_tokens());
|
||||
let nodes_output = view::render_view(
|
||||
&nodes,
|
||||
&mut nodes,
|
||||
global_class.as_ref(),
|
||||
normalized_call_site(proc_macro::Span::call_site()),
|
||||
template,
|
||||
);
|
||||
|
||||
// The allow lint needs to be put here instead of at the expansion of
|
||||
// view::attribute_value(). Adding this next to the expanded expression
|
||||
// seems to break rust-analyzer, but it works when the allow is put here.
|
||||
quote! {
|
||||
let output = quote! {
|
||||
{
|
||||
#[allow(unused_braces)]
|
||||
{
|
||||
@@ -321,6 +337,14 @@ pub fn view(tokens: TokenStream) -> TokenStream {
|
||||
#nodes_output
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if template {
|
||||
quote! {
|
||||
::leptos::prelude::ViewTemplate::new(#output)
|
||||
}
|
||||
} else {
|
||||
output
|
||||
}
|
||||
.into()
|
||||
}
|
||||
@@ -346,7 +370,7 @@ fn normalized_call_site(site: proc_macro::Span) -> Option<String> {
|
||||
///
|
||||
/// The file is loaded and parsed during proc-macro execution, and its path is resolved relative to
|
||||
/// the crate root rather than relative to the file from which it is called.
|
||||
#[proc_macro_error::proc_macro_error]
|
||||
#[proc_macro_error2::proc_macro_error]
|
||||
#[proc_macro]
|
||||
pub fn include_view(tokens: TokenStream) -> TokenStream {
|
||||
let file_name = syn::parse::<syn::LitStr>(tokens).unwrap_or_else(|_| {
|
||||
@@ -509,13 +533,13 @@ pub fn include_view(tokens: TokenStream) -> TokenStream {
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_error::proc_macro_error]
|
||||
#[proc_macro_error2::proc_macro_error]
|
||||
#[proc_macro_attribute]
|
||||
pub fn component(
|
||||
_args: proc_macro::TokenStream,
|
||||
s: TokenStream,
|
||||
) -> TokenStream {
|
||||
component_macro(s, false)
|
||||
component_macro(s, None)
|
||||
}
|
||||
|
||||
/// Defines a component as an interactive island when you are using the
|
||||
@@ -589,18 +613,19 @@ pub fn component(
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_error::proc_macro_error]
|
||||
#[proc_macro_error2::proc_macro_error]
|
||||
#[proc_macro_attribute]
|
||||
pub fn island(_args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
component_macro(s, true)
|
||||
let island_src = s.to_string();
|
||||
component_macro(s, Some(island_src))
|
||||
}
|
||||
|
||||
fn component_macro(s: TokenStream, island: bool) -> TokenStream {
|
||||
fn component_macro(s: TokenStream, island: Option<String>) -> TokenStream {
|
||||
let mut dummy = syn::parse::<DummyModel>(s.clone());
|
||||
let parse_result = syn::parse::<component::Model>(s);
|
||||
|
||||
if let (Ok(ref mut unexpanded), Ok(model)) = (&mut dummy, parse_result) {
|
||||
let expanded = model.is_island(island).into_token_stream();
|
||||
let expanded = model.with_island(island).into_token_stream();
|
||||
if !matches!(unexpanded.vis, Visibility::Public(_)) {
|
||||
unexpanded.vis = Visibility::Public(Pub {
|
||||
span: unexpanded.vis.span(),
|
||||
@@ -728,7 +753,7 @@ fn component_macro(s: TokenStream, island: bool) -> TokenStream {
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[proc_macro_error::proc_macro_error]
|
||||
#[proc_macro_error2::proc_macro_error]
|
||||
#[proc_macro_attribute]
|
||||
pub fn slot(args: proc_macro::TokenStream, s: TokenStream) -> TokenStream {
|
||||
if !args.is_empty() {
|
||||
|
||||
@@ -10,11 +10,10 @@ use std::collections::HashMap;
|
||||
use syn::{spanned::Spanned, Expr, ExprPath, ExprRange, RangeLimits, Stmt};
|
||||
|
||||
pub(crate) fn component_to_tokens(
|
||||
node: &NodeElement<impl CustomNode>,
|
||||
node: &mut NodeElement<impl CustomNode>,
|
||||
global_class: Option<&TokenTree>,
|
||||
disable_inert_html: bool,
|
||||
) -> TokenStream {
|
||||
let name = node.name();
|
||||
|
||||
#[allow(unused)] // TODO this is used by hot-reloading
|
||||
#[cfg(debug_assertions)]
|
||||
let component_name = super::ident_from_tag_name(node.name());
|
||||
@@ -45,16 +44,21 @@ pub(crate) fn component_to_tokens(
|
||||
})
|
||||
.unwrap_or_else(|| node.attributes().len());
|
||||
|
||||
let attrs = node.attributes().iter().filter_map(|node| {
|
||||
if let NodeAttribute::Attribute(node) = node {
|
||||
Some(node)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
let attrs = node
|
||||
.attributes()
|
||||
.iter()
|
||||
.filter_map(|node| {
|
||||
if let NodeAttribute::Attribute(node) = node {
|
||||
Some(node)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let props = attrs
|
||||
.clone()
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(idx, attr)| {
|
||||
idx < &spread_marker && {
|
||||
@@ -85,7 +89,7 @@ pub(crate) fn component_to_tokens(
|
||||
});
|
||||
|
||||
let items_to_bind = attrs
|
||||
.clone()
|
||||
.iter()
|
||||
.filter_map(|attr| {
|
||||
if !is_attr_let(&attr.key) {
|
||||
return None;
|
||||
@@ -107,7 +111,7 @@ pub(crate) fn component_to_tokens(
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let items_to_clone = attrs
|
||||
.clone()
|
||||
.iter()
|
||||
.filter_map(|attr| {
|
||||
attr.key
|
||||
.to_string()
|
||||
@@ -183,11 +187,12 @@ pub(crate) fn component_to_tokens(
|
||||
quote! {}
|
||||
} else {
|
||||
let children = fragment_to_tokens(
|
||||
&node.children,
|
||||
&mut node.children,
|
||||
TagType::Unknown,
|
||||
Some(&mut slots),
|
||||
global_class,
|
||||
None,
|
||||
disable_inert_html,
|
||||
);
|
||||
|
||||
// TODO view marker for hot-reloading
|
||||
@@ -261,6 +266,7 @@ pub(crate) fn component_to_tokens(
|
||||
quote! {}
|
||||
};
|
||||
|
||||
let name = node.name();
|
||||
#[allow(unused_mut)] // used in debug
|
||||
let mut component = quote! {
|
||||
{
|
||||
|
||||
@@ -7,13 +7,16 @@ use self::{
|
||||
use convert_case::{Case::Snake, Casing};
|
||||
use leptos_hot_reload::parsing::{is_component_node, value_to_string};
|
||||
use proc_macro2::{Ident, Span, TokenStream, TokenTree};
|
||||
use proc_macro_error::abort;
|
||||
use proc_macro_error2::abort;
|
||||
use quote::{quote, quote_spanned, ToTokens};
|
||||
use rstml::node::{
|
||||
CustomNode, KVAttributeValue, KeyedAttribute, Node, NodeAttribute,
|
||||
NodeBlock, NodeElement, NodeName, NodeNameFragment,
|
||||
};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::{HashMap, HashSet, VecDeque},
|
||||
};
|
||||
use syn::{
|
||||
spanned::Spanned, Expr, Expr::Tuple, ExprLit, ExprRange, Lit, LitStr,
|
||||
RangeLimits, Stmt,
|
||||
@@ -28,9 +31,10 @@ pub(crate) enum TagType {
|
||||
}
|
||||
|
||||
pub fn render_view(
|
||||
nodes: &[Node],
|
||||
nodes: &mut [Node],
|
||||
global_class: Option<&TokenTree>,
|
||||
view_marker: Option<String>,
|
||||
disable_inert_html: bool,
|
||||
) -> Option<TokenStream> {
|
||||
let (base, should_add_view) = match nodes.len() {
|
||||
0 => {
|
||||
@@ -44,11 +48,13 @@ pub fn render_view(
|
||||
}
|
||||
1 => (
|
||||
node_to_tokens(
|
||||
&nodes[0],
|
||||
&mut nodes[0],
|
||||
TagType::Unknown,
|
||||
None,
|
||||
global_class,
|
||||
view_marker.as_deref(),
|
||||
true,
|
||||
disable_inert_html,
|
||||
),
|
||||
// only add View wrapper and view marker to a regular HTML
|
||||
// element or component, not to a <{..} /> attribute list
|
||||
@@ -64,6 +70,7 @@ pub fn render_view(
|
||||
None,
|
||||
global_class,
|
||||
view_marker.as_deref(),
|
||||
disable_inert_html,
|
||||
),
|
||||
true,
|
||||
),
|
||||
@@ -88,12 +95,287 @@ pub fn render_view(
|
||||
})
|
||||
}
|
||||
|
||||
fn is_inert_element(orig_node: &Node<impl CustomNode>) -> bool {
|
||||
// do not use this if the top-level node is not an Element,
|
||||
// or if it's an element with no children and no attrs
|
||||
match orig_node {
|
||||
Node::Element(el) => {
|
||||
if el.attributes().is_empty() && el.children.is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// also doesn't work if the top-level element is an SVG/MathML element
|
||||
let el_name = el.name().to_string();
|
||||
if is_svg_element(&el_name) || is_math_ml_element(&el_name) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
_ => return false,
|
||||
}
|
||||
|
||||
// otherwise, walk over all the nodes to make sure everything is inert
|
||||
let mut nodes = VecDeque::from([orig_node]);
|
||||
|
||||
while let Some(current_element) = nodes.pop_front() {
|
||||
match current_element {
|
||||
Node::Text(_) | Node::RawText(_) => {}
|
||||
Node::Element(node) => {
|
||||
if is_component_node(node) {
|
||||
return false;
|
||||
}
|
||||
if is_spread_marker(node) {
|
||||
return false;
|
||||
}
|
||||
|
||||
match node.name() {
|
||||
NodeName::Block(_) => return false,
|
||||
_ => {
|
||||
// check all attributes
|
||||
for attr in node.attributes() {
|
||||
match attr {
|
||||
NodeAttribute::Block(_) => return false,
|
||||
NodeAttribute::Attribute(attr) => {
|
||||
let static_key =
|
||||
!matches!(attr.key, NodeName::Block(_));
|
||||
|
||||
let static_value = match attr
|
||||
.possible_value
|
||||
.to_value()
|
||||
{
|
||||
None => true,
|
||||
Some(value) => {
|
||||
matches!(&value.value, KVAttributeValue::Expr(expr) if {
|
||||
if let Expr::Lit(lit) = expr {
|
||||
matches!(&lit.lit, Lit::Str(_))
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
if !static_key || !static_value {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// check all children
|
||||
nodes.extend(&node.children);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => return false,
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
enum Item<'a, T> {
|
||||
Node(&'a Node<T>),
|
||||
ClosingTag(String),
|
||||
}
|
||||
|
||||
enum InertElementBuilder<'a> {
|
||||
GlobalClass {
|
||||
global_class: &'a TokenTree,
|
||||
strs: Vec<GlobalClassItem<'a>>,
|
||||
buffer: String,
|
||||
},
|
||||
NoGlobalClass {
|
||||
buffer: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> ToTokens for InertElementBuilder<'a> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
InertElementBuilder::GlobalClass { strs, .. } => {
|
||||
tokens.extend(quote! {
|
||||
[#(#strs),*].join("")
|
||||
});
|
||||
}
|
||||
InertElementBuilder::NoGlobalClass { buffer } => {
|
||||
tokens.extend(quote! {
|
||||
#buffer
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum GlobalClassItem<'a> {
|
||||
Global(&'a TokenTree),
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl<'a> ToTokens for GlobalClassItem<'a> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let addl_tokens = match self {
|
||||
GlobalClassItem::Global(v) => v.to_token_stream(),
|
||||
GlobalClassItem::String(v) => v.to_token_stream(),
|
||||
};
|
||||
tokens.extend(addl_tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> InertElementBuilder<'a> {
|
||||
fn new(global_class: Option<&'a TokenTree>) -> Self {
|
||||
match global_class {
|
||||
None => Self::NoGlobalClass {
|
||||
buffer: String::new(),
|
||||
},
|
||||
Some(global_class) => Self::GlobalClass {
|
||||
global_class,
|
||||
strs: Vec::new(),
|
||||
buffer: String::new(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn push(&mut self, c: char) {
|
||||
match self {
|
||||
InertElementBuilder::GlobalClass { buffer, .. } => buffer.push(c),
|
||||
InertElementBuilder::NoGlobalClass { buffer } => buffer.push(c),
|
||||
}
|
||||
}
|
||||
|
||||
fn push_str(&mut self, s: &str) {
|
||||
match self {
|
||||
InertElementBuilder::GlobalClass { buffer, .. } => {
|
||||
buffer.push_str(s)
|
||||
}
|
||||
InertElementBuilder::NoGlobalClass { buffer } => buffer.push_str(s),
|
||||
}
|
||||
}
|
||||
|
||||
fn push_class(&mut self, class: &str) {
|
||||
match self {
|
||||
InertElementBuilder::GlobalClass {
|
||||
global_class,
|
||||
strs,
|
||||
buffer,
|
||||
} => {
|
||||
buffer.push_str(" class=\"");
|
||||
strs.push(GlobalClassItem::String(std::mem::take(buffer)));
|
||||
strs.push(GlobalClassItem::Global(global_class));
|
||||
buffer.push(' ');
|
||||
buffer.push_str(class);
|
||||
buffer.push('"');
|
||||
}
|
||||
InertElementBuilder::NoGlobalClass { buffer } => {
|
||||
buffer.push_str(" class=\"");
|
||||
buffer.push_str(class);
|
||||
buffer.push('"');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(&mut self) {
|
||||
match self {
|
||||
InertElementBuilder::GlobalClass { strs, buffer, .. } => {
|
||||
strs.push(GlobalClassItem::String(std::mem::take(buffer)));
|
||||
}
|
||||
InertElementBuilder::NoGlobalClass { .. } => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn inert_element_to_tokens(
|
||||
node: &Node<impl CustomNode>,
|
||||
global_class: Option<&TokenTree>,
|
||||
) -> Option<TokenStream> {
|
||||
let mut html = InertElementBuilder::new(global_class);
|
||||
let mut nodes = VecDeque::from([Item::Node(node)]);
|
||||
|
||||
while let Some(current) = nodes.pop_front() {
|
||||
match current {
|
||||
Item::ClosingTag(tag) => {
|
||||
// closing tag
|
||||
html.push_str("</");
|
||||
html.push_str(&tag);
|
||||
html.push('>');
|
||||
}
|
||||
Item::Node(current) => {
|
||||
match current {
|
||||
Node::RawText(raw) => {
|
||||
let text = raw.to_string_best();
|
||||
html.push_str(&text);
|
||||
}
|
||||
Node::Text(text) => {
|
||||
let text = text.value_string();
|
||||
html.push_str(&text);
|
||||
}
|
||||
Node::Element(node) => {
|
||||
let self_closing = is_self_closing(node);
|
||||
let el_name = node.name().to_string();
|
||||
|
||||
// opening tag
|
||||
html.push('<');
|
||||
html.push_str(&el_name);
|
||||
|
||||
for attr in node.attributes() {
|
||||
if let NodeAttribute::Attribute(attr) = attr {
|
||||
let attr_name = attr.key.to_string();
|
||||
if attr_name != "class" {
|
||||
html.push(' ');
|
||||
html.push_str(&attr_name);
|
||||
}
|
||||
|
||||
if let Some(value) =
|
||||
attr.possible_value.to_value()
|
||||
{
|
||||
if let KVAttributeValue::Expr(Expr::Lit(
|
||||
lit,
|
||||
)) = &value.value
|
||||
{
|
||||
if let Lit::Str(txt) = &lit.lit {
|
||||
if attr_name == "class" {
|
||||
html.push_class(&txt.value());
|
||||
} else {
|
||||
html.push_str("=\"");
|
||||
html.push_str(&txt.value());
|
||||
html.push('"');
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
html.push('>');
|
||||
|
||||
// render all children
|
||||
if !self_closing {
|
||||
nodes.push_front(Item::ClosingTag(el_name));
|
||||
let children = node.children.iter().rev();
|
||||
for child in children {
|
||||
nodes.push_front(Item::Node(child));
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
html.finish();
|
||||
|
||||
Some(quote! {
|
||||
::leptos::tachys::html::InertElement::new(#html)
|
||||
})
|
||||
}
|
||||
|
||||
fn element_children_to_tokens(
|
||||
nodes: &[Node<impl CustomNode>],
|
||||
nodes: &mut [Node<impl CustomNode>],
|
||||
parent_type: TagType,
|
||||
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
|
||||
global_class: Option<&TokenTree>,
|
||||
view_marker: Option<&str>,
|
||||
disable_inert_html: bool,
|
||||
) -> Option<TokenStream> {
|
||||
let children = children_to_tokens(
|
||||
nodes,
|
||||
@@ -101,27 +383,50 @@ fn element_children_to_tokens(
|
||||
parent_slots,
|
||||
global_class,
|
||||
view_marker,
|
||||
)
|
||||
.into_iter()
|
||||
.map(|child| {
|
||||
quote! {
|
||||
false,
|
||||
disable_inert_html,
|
||||
);
|
||||
if children.is_empty() {
|
||||
None
|
||||
} else if children.len() == 1 {
|
||||
let child = &children[0];
|
||||
Some(quote! {
|
||||
.child(
|
||||
#[allow(unused_braces)]
|
||||
{ #child }
|
||||
)
|
||||
}
|
||||
});
|
||||
Some(quote! {
|
||||
#(#children)*
|
||||
})
|
||||
})
|
||||
} else if children.len() > 16 {
|
||||
// implementations of various traits used in routing and rendering are implemented for
|
||||
// tuples of sizes 0, 1, 2, 3, ... N. N varies but is > 16. The traits are also implemented
|
||||
// for tuples of tuples, so if we have more than 16 items, we can split them out into
|
||||
// multiple tuples.
|
||||
let chunks = children.chunks(16).map(|children| {
|
||||
quote! {
|
||||
(#(#children),*)
|
||||
}
|
||||
});
|
||||
Some(quote! {
|
||||
.child(
|
||||
(#(#chunks),*)
|
||||
)
|
||||
})
|
||||
} else {
|
||||
Some(quote! {
|
||||
.child(
|
||||
(#(#children),*)
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn fragment_to_tokens(
|
||||
nodes: &[Node<impl CustomNode>],
|
||||
nodes: &mut [Node<impl CustomNode>],
|
||||
parent_type: TagType,
|
||||
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
|
||||
global_class: Option<&TokenTree>,
|
||||
view_marker: Option<&str>,
|
||||
disable_inert_html: bool,
|
||||
) -> Option<TokenStream> {
|
||||
let children = children_to_tokens(
|
||||
nodes,
|
||||
@@ -129,11 +434,26 @@ fn fragment_to_tokens(
|
||||
parent_slots,
|
||||
global_class,
|
||||
view_marker,
|
||||
true,
|
||||
disable_inert_html,
|
||||
);
|
||||
if children.is_empty() {
|
||||
None
|
||||
} else if children.len() == 1 {
|
||||
children.into_iter().next()
|
||||
} else if children.len() > 16 {
|
||||
// implementations of various traits used in routing and rendering are implemented for
|
||||
// tuples of sizes 0, 1, 2, 3, ... N. N varies but is > 16. The traits are also implemented
|
||||
// for tuples of tuples, so if we have more than 16 items, we can split them out into
|
||||
// multiple tuples.
|
||||
let chunks = children.chunks(16).map(|children| {
|
||||
quote! {
|
||||
(#(#children),*)
|
||||
}
|
||||
});
|
||||
Some(quote! {
|
||||
(#(#chunks),*)
|
||||
})
|
||||
} else {
|
||||
Some(quote! {
|
||||
(#(#children),*)
|
||||
@@ -142,19 +462,23 @@ fn fragment_to_tokens(
|
||||
}
|
||||
|
||||
fn children_to_tokens(
|
||||
nodes: &[Node<impl CustomNode>],
|
||||
nodes: &mut [Node<impl CustomNode>],
|
||||
parent_type: TagType,
|
||||
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
|
||||
global_class: Option<&TokenTree>,
|
||||
view_marker: Option<&str>,
|
||||
top_level: bool,
|
||||
disable_inert_html: bool,
|
||||
) -> Vec<TokenStream> {
|
||||
if nodes.len() == 1 {
|
||||
match node_to_tokens(
|
||||
&nodes[0],
|
||||
&mut nodes[0],
|
||||
parent_type,
|
||||
parent_slots,
|
||||
global_class,
|
||||
view_marker,
|
||||
top_level,
|
||||
disable_inert_html,
|
||||
) {
|
||||
Some(tokens) => vec![tokens],
|
||||
None => vec![],
|
||||
@@ -162,7 +486,7 @@ fn children_to_tokens(
|
||||
} else {
|
||||
let mut slots = HashMap::new();
|
||||
let nodes = nodes
|
||||
.iter()
|
||||
.iter_mut()
|
||||
.filter_map(|node| {
|
||||
node_to_tokens(
|
||||
node,
|
||||
@@ -170,6 +494,8 @@ fn children_to_tokens(
|
||||
Some(&mut slots),
|
||||
global_class,
|
||||
view_marker,
|
||||
top_level,
|
||||
disable_inert_html,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
@@ -186,12 +512,16 @@ fn children_to_tokens(
|
||||
}
|
||||
|
||||
fn node_to_tokens(
|
||||
node: &Node<impl CustomNode>,
|
||||
node: &mut Node<impl CustomNode>,
|
||||
parent_type: TagType,
|
||||
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
|
||||
global_class: Option<&TokenTree>,
|
||||
view_marker: Option<&str>,
|
||||
top_level: bool,
|
||||
disable_inert_html: bool,
|
||||
) -> Option<TokenStream> {
|
||||
let is_inert = !disable_inert_html && is_inert_element(node);
|
||||
|
||||
match node {
|
||||
Node::Comment(_) => None,
|
||||
Node::Doctype(node) => {
|
||||
@@ -199,11 +529,12 @@ fn node_to_tokens(
|
||||
Some(quote! { ::leptos::tachys::html::doctype(#value) })
|
||||
}
|
||||
Node::Fragment(fragment) => fragment_to_tokens(
|
||||
&fragment.children,
|
||||
&mut fragment.children,
|
||||
parent_type,
|
||||
parent_slots,
|
||||
global_class,
|
||||
view_marker,
|
||||
disable_inert_html,
|
||||
),
|
||||
Node::Block(block) => Some(quote! { #block }),
|
||||
Node::Text(text) => Some(text_to_tokens(&text.value)),
|
||||
@@ -212,13 +543,20 @@ fn node_to_tokens(
|
||||
let text = syn::LitStr::new(&text, raw.span());
|
||||
Some(text_to_tokens(&text))
|
||||
}
|
||||
Node::Element(node) => element_to_tokens(
|
||||
node,
|
||||
parent_type,
|
||||
parent_slots,
|
||||
global_class,
|
||||
view_marker,
|
||||
),
|
||||
Node::Element(el_node) => {
|
||||
if !top_level && is_inert {
|
||||
inert_element_to_tokens(node, global_class)
|
||||
} else {
|
||||
element_to_tokens(
|
||||
el_node,
|
||||
parent_type,
|
||||
parent_slots,
|
||||
global_class,
|
||||
view_marker,
|
||||
disable_inert_html,
|
||||
)
|
||||
}
|
||||
}
|
||||
Node::Custom(node) => Some(node.to_token_stream()),
|
||||
}
|
||||
}
|
||||
@@ -237,12 +575,57 @@ fn text_to_tokens(text: &LitStr) -> TokenStream {
|
||||
}
|
||||
|
||||
pub(crate) fn element_to_tokens(
|
||||
node: &NodeElement<impl CustomNode>,
|
||||
node: &mut NodeElement<impl CustomNode>,
|
||||
mut parent_type: TagType,
|
||||
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
|
||||
global_class: Option<&TokenTree>,
|
||||
view_marker: Option<&str>,
|
||||
disable_inert_html: bool,
|
||||
) -> Option<TokenStream> {
|
||||
// attribute sorting:
|
||||
//
|
||||
// the `class` and `style` attributes overwrite individual `class:` and `style:` attributes
|
||||
// when they are set. as a result, we're going to sort the attributes so that `class` and
|
||||
// `style` always come before all other attributes.
|
||||
|
||||
// if there's a spread marker, we don't want to move `class` or `style` before it
|
||||
// so let's only sort attributes that come *before* a spread marker
|
||||
let spread_position = node
|
||||
.attributes()
|
||||
.iter()
|
||||
.position(|n| match n {
|
||||
NodeAttribute::Block(node) => as_spread_attr(node).is_some(),
|
||||
_ => false,
|
||||
})
|
||||
.unwrap_or_else(|| node.attributes().len());
|
||||
|
||||
// now, sort the attributes
|
||||
node.attributes_mut()[0..spread_position].sort_by(|a, b| {
|
||||
let key_a = match a {
|
||||
NodeAttribute::Attribute(attr) => match &attr.key {
|
||||
NodeName::Path(attr) => {
|
||||
attr.path.segments.first().map(|n| n.ident.to_string())
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
};
|
||||
let key_b = match b {
|
||||
NodeAttribute::Attribute(attr) => match &attr.key {
|
||||
NodeName::Path(attr) => {
|
||||
attr.path.segments.first().map(|n| n.ident.to_string())
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
};
|
||||
match (key_a.as_deref(), key_b.as_deref()) {
|
||||
(Some("class"), _) | (Some("style"), _) => Ordering::Less,
|
||||
(_, Some("class")) | (_, Some("style")) => Ordering::Greater,
|
||||
_ => Ordering::Equal,
|
||||
}
|
||||
});
|
||||
|
||||
// check for duplicate attribute names and emit an error for all subsequent ones
|
||||
let mut names = HashSet::new();
|
||||
for attr in node.attributes() {
|
||||
@@ -253,7 +636,7 @@ pub(crate) fn element_to_tokens(
|
||||
name.push_str(&tuple_name);
|
||||
}
|
||||
if names.contains(&name) {
|
||||
proc_macro_error::emit_error!(
|
||||
proc_macro_error2::emit_error!(
|
||||
attr.span(),
|
||||
format!("This element already has a `{name}` attribute.")
|
||||
);
|
||||
@@ -266,10 +649,17 @@ pub(crate) fn element_to_tokens(
|
||||
let name = node.name();
|
||||
if is_component_node(node) {
|
||||
if let Some(slot) = get_slot(node) {
|
||||
slot_to_tokens(node, slot, parent_slots, global_class);
|
||||
let slot = slot.clone();
|
||||
slot_to_tokens(
|
||||
node,
|
||||
&slot,
|
||||
parent_slots,
|
||||
global_class,
|
||||
disable_inert_html,
|
||||
);
|
||||
None
|
||||
} else {
|
||||
Some(component_to_tokens(node, global_class))
|
||||
Some(component_to_tokens(node, global_class, disable_inert_html))
|
||||
}
|
||||
} else if is_spread_marker(node) {
|
||||
let mut attributes = Vec::new();
|
||||
@@ -331,7 +721,7 @@ pub(crate) fn element_to_tokens(
|
||||
match parent_type {
|
||||
TagType::Unknown => {
|
||||
// We decided this warning was too aggressive, but I'll leave it here in case we want it later
|
||||
/* proc_macro_error::emit_warning!(name.span(), "The view macro is assuming this is an HTML element, \
|
||||
/* proc_macro_error2::emit_warning!(name.span(), "The view macro is assuming this is an HTML element, \
|
||||
but it is ambiguous; if it is an SVG or MathML element, prefix with svg:: or math::"); */
|
||||
quote! {
|
||||
::leptos::tachys::html::element::#name()
|
||||
@@ -381,16 +771,17 @@ pub(crate) fn element_to_tokens(
|
||||
let self_closing = is_self_closing(node);
|
||||
let children = if !self_closing {
|
||||
element_children_to_tokens(
|
||||
&node.children,
|
||||
&mut node.children,
|
||||
parent_type,
|
||||
parent_slots,
|
||||
global_class,
|
||||
view_marker,
|
||||
disable_inert_html,
|
||||
)
|
||||
} else {
|
||||
if !node.children.is_empty() {
|
||||
let name = node.name();
|
||||
proc_macro_error::emit_error!(
|
||||
proc_macro_error2::emit_error!(
|
||||
name.span(),
|
||||
format!(
|
||||
"Self-closing elements like <{name}> cannot have \
|
||||
@@ -430,6 +821,25 @@ fn is_spread_marker(node: &NodeElement<impl CustomNode>) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn as_spread_attr(node: &NodeBlock) -> Option<Option<&Expr>> {
|
||||
if let NodeBlock::ValidBlock(block) = node {
|
||||
match block.stmts.first() {
|
||||
Some(Stmt::Expr(
|
||||
Expr::Range(ExprRange {
|
||||
start: None,
|
||||
limits: RangeLimits::HalfOpen(_),
|
||||
end,
|
||||
..
|
||||
}),
|
||||
_,
|
||||
)) => Some(end.as_deref()),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn attribute_to_tokens(
|
||||
tag_type: TagType,
|
||||
node: &NodeAttribute,
|
||||
@@ -437,29 +847,18 @@ fn attribute_to_tokens(
|
||||
is_custom: bool,
|
||||
) -> TokenStream {
|
||||
match node {
|
||||
NodeAttribute::Block(node) => {
|
||||
let dotted = if let NodeBlock::ValidBlock(block) = node {
|
||||
match block.stmts.first() {
|
||||
Some(Stmt::Expr(
|
||||
Expr::Range(ExprRange {
|
||||
start: None,
|
||||
limits: RangeLimits::HalfOpen(_),
|
||||
end: Some(end),
|
||||
..
|
||||
}),
|
||||
_,
|
||||
)) => Some(quote! { .add_any_attr(#end) }),
|
||||
_ => None,
|
||||
NodeAttribute::Block(node) => as_spread_attr(node)
|
||||
.flatten()
|
||||
.map(|end| {
|
||||
quote! {
|
||||
.add_any_attr(#end)
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
dotted.unwrap_or_else(|| {
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
quote! {
|
||||
.add_any_attr(#[allow(unused_braces)] { #node })
|
||||
}
|
||||
})
|
||||
}
|
||||
}),
|
||||
NodeAttribute::Attribute(node) => {
|
||||
let name = node.key.to_string();
|
||||
if name == "node_ref" {
|
||||
@@ -529,7 +928,7 @@ fn attribute_to_tokens(
|
||||
&& node.value().and_then(value_to_string).is_none()
|
||||
{
|
||||
let span = node.key.span();
|
||||
proc_macro_error::emit_error!(span, "Combining a global class (view! { class = ... }) \
|
||||
proc_macro_error2::emit_error!(span, "Combining a global class (view! { class = ... }) \
|
||||
and a dynamic `class=` attribute on an element causes runtime inconsistencies. You can \
|
||||
toggle individual classes dynamically with the `class:name=value` syntax. \n\nSee this issue \
|
||||
for more information and an example: https://github.com/leptos-rs/leptos/issues/773")
|
||||
@@ -559,8 +958,8 @@ pub(crate) fn attribute_absolute(
|
||||
match id {
|
||||
NodeNameFragment::Ident(id) => {
|
||||
let value = attribute_value(node);
|
||||
// ignore `let:`
|
||||
if id == "let" {
|
||||
// ignore `let:` and `clone:`
|
||||
if id == "let" || id == "clone" {
|
||||
None
|
||||
} else if id == "attr" {
|
||||
let key = &parts[1];
|
||||
@@ -620,7 +1019,7 @@ pub(crate) fn attribute_absolute(
|
||||
quote! { ::leptos::tachys::html::event::#on(#ty, #handler) },
|
||||
)
|
||||
} else {
|
||||
proc_macro_error::abort!(
|
||||
proc_macro_error2::abort!(
|
||||
id.span(),
|
||||
&format!(
|
||||
"`{id}:` syntax is not supported on \
|
||||
@@ -1121,7 +1520,7 @@ pub(crate) fn ident_from_tag_name(tag_name: &NodeName) -> Ident {
|
||||
.expect("element needs to have a name"),
|
||||
NodeName::Block(_) => {
|
||||
let span = tag_name.span();
|
||||
proc_macro_error::emit_error!(
|
||||
proc_macro_error2::emit_error!(
|
||||
span,
|
||||
"blocks not allowed in tag-name position"
|
||||
);
|
||||
|
||||
@@ -7,10 +7,11 @@ use std::collections::HashMap;
|
||||
use syn::spanned::Spanned;
|
||||
|
||||
pub(crate) fn slot_to_tokens(
|
||||
node: &NodeElement<impl CustomNode>,
|
||||
node: &mut NodeElement<impl CustomNode>,
|
||||
slot: &KeyedAttribute,
|
||||
parent_slots: Option<&mut HashMap<String, Vec<TokenStream>>>,
|
||||
global_class: Option<&TokenTree>,
|
||||
disable_inert_html: bool,
|
||||
) {
|
||||
let name = slot.key.to_string();
|
||||
let name = name.trim();
|
||||
@@ -23,27 +24,32 @@ pub(crate) fn slot_to_tokens(
|
||||
let component_name = ident_from_tag_name(node.name());
|
||||
|
||||
let Some(parent_slots) = parent_slots else {
|
||||
proc_macro_error::emit_error!(
|
||||
proc_macro_error2::emit_error!(
|
||||
node.name().span(),
|
||||
"slots cannot be used inside HTML elements"
|
||||
);
|
||||
return;
|
||||
};
|
||||
|
||||
let attrs = node.attributes().iter().filter_map(|node| {
|
||||
if let NodeAttribute::Attribute(node) = node {
|
||||
if is_slot(node) {
|
||||
None
|
||||
let attrs = node
|
||||
.attributes()
|
||||
.iter()
|
||||
.filter_map(|node| {
|
||||
if let NodeAttribute::Attribute(node) = node {
|
||||
if is_slot(node) {
|
||||
None
|
||||
} else {
|
||||
Some(node)
|
||||
}
|
||||
} else {
|
||||
Some(node)
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let props = attrs
|
||||
.clone()
|
||||
.iter()
|
||||
.filter(|attr| {
|
||||
!attr.key.to_string().starts_with("let:")
|
||||
&& !attr.key.to_string().starts_with("clone:")
|
||||
@@ -65,7 +71,7 @@ pub(crate) fn slot_to_tokens(
|
||||
});
|
||||
|
||||
let items_to_bind = attrs
|
||||
.clone()
|
||||
.iter()
|
||||
.filter_map(|attr| {
|
||||
attr.key
|
||||
.to_string()
|
||||
@@ -75,7 +81,7 @@ pub(crate) fn slot_to_tokens(
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let items_to_clone = attrs
|
||||
.clone()
|
||||
.iter()
|
||||
.filter_map(|attr| {
|
||||
attr.key
|
||||
.to_string()
|
||||
@@ -85,6 +91,7 @@ pub(crate) fn slot_to_tokens(
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let dyn_attrs = attrs
|
||||
.iter()
|
||||
.filter(|attr| attr.key.to_string().starts_with("attr:"))
|
||||
.filter_map(|attr| {
|
||||
let name = &attr.key.to_string();
|
||||
@@ -107,11 +114,12 @@ pub(crate) fn slot_to_tokens(
|
||||
quote! {}
|
||||
} else {
|
||||
let children = fragment_to_tokens(
|
||||
&node.children,
|
||||
&mut node.children,
|
||||
TagType::Unknown,
|
||||
Some(&mut slots),
|
||||
global_class,
|
||||
None,
|
||||
disable_inert_html,
|
||||
);
|
||||
|
||||
// TODO view markers for hot-reloading
|
||||
|
||||
@@ -20,6 +20,7 @@ futures = "0.3.30"
|
||||
|
||||
any_spawner = { workspace = true }
|
||||
tachys = { workspace = true, optional = true, features = ["reactive_graph"] }
|
||||
send_wrapper = "0.6"
|
||||
|
||||
# serialization formats
|
||||
serde = { version = "1.0" }
|
||||
|
||||
@@ -7,10 +7,11 @@ use reactive_graph::{
|
||||
AnySource, AnySubscriber, ReactiveNode, Source, Subscriber,
|
||||
ToAnySource, ToAnySubscriber,
|
||||
},
|
||||
owner::{use_context, LocalStorage},
|
||||
owner::use_context,
|
||||
signal::guards::{AsyncPlain, ReadGuard},
|
||||
traits::{DefinedAt, ReadUntracked},
|
||||
traits::{DefinedAt, IsDisposed, ReadUntracked},
|
||||
};
|
||||
use send_wrapper::SendWrapper;
|
||||
use std::{
|
||||
future::{pending, Future, IntoFuture},
|
||||
panic::Location,
|
||||
@@ -120,6 +121,13 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> IsDisposed for ArcLocalResource<T> {
|
||||
#[inline(always)]
|
||||
fn is_disposed(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> ToAnySource for ArcLocalResource<T> {
|
||||
fn to_any_source(&self) -> AnySource {
|
||||
self.data.to_any_source()
|
||||
@@ -175,7 +183,7 @@ impl<T> Subscriber for ArcLocalResource<T> {
|
||||
}
|
||||
|
||||
pub struct LocalResource<T> {
|
||||
data: AsyncDerived<T, LocalStorage>,
|
||||
data: AsyncDerived<SendWrapper<T>>,
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: &'static Location<'static>,
|
||||
}
|
||||
@@ -217,9 +225,13 @@ impl<T> LocalResource<T> {
|
||||
|
||||
Self {
|
||||
data: if cfg!(feature = "ssr") {
|
||||
AsyncDerived::new_mock_unsync(fetcher)
|
||||
AsyncDerived::new_mock(fetcher)
|
||||
} else {
|
||||
AsyncDerived::new_unsync(fetcher)
|
||||
let fetcher = SendWrapper::new(fetcher);
|
||||
AsyncDerived::new(move || {
|
||||
let fut = fetcher();
|
||||
async move { SendWrapper::new(fut.await) }
|
||||
})
|
||||
},
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
@@ -232,9 +244,14 @@ where
|
||||
T: Clone + 'static,
|
||||
{
|
||||
type Output = T;
|
||||
type IntoFuture = AsyncDerivedFuture<T>;
|
||||
type IntoFuture = futures::future::Map<
|
||||
AsyncDerivedFuture<SendWrapper<T>>,
|
||||
fn(SendWrapper<T>) -> T,
|
||||
>;
|
||||
|
||||
fn into_future(self) -> Self::IntoFuture {
|
||||
use futures::FutureExt;
|
||||
|
||||
if let Some(mut notifier) = use_context::<LocalResourceNotifier>() {
|
||||
notifier.notify();
|
||||
} else if cfg!(feature = "ssr") {
|
||||
@@ -244,7 +261,7 @@ where
|
||||
always pending on the server."
|
||||
);
|
||||
}
|
||||
self.data.into_future()
|
||||
self.data.into_future().map(|value| (*value).clone())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -265,7 +282,8 @@ impl<T> ReadUntracked for LocalResource<T>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
{
|
||||
type Value = ReadGuard<Option<T>, AsyncPlain<Option<T>>>;
|
||||
type Value =
|
||||
ReadGuard<Option<SendWrapper<T>>, AsyncPlain<Option<SendWrapper<T>>>>;
|
||||
|
||||
fn try_read_untracked(&self) -> Option<Self::Value> {
|
||||
if let Some(mut notifier) = use_context::<LocalResourceNotifier>() {
|
||||
@@ -281,6 +299,12 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> IsDisposed for LocalResource<T> {
|
||||
fn is_disposed(&self) -> bool {
|
||||
self.data.is_disposed()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> ToAnySource for LocalResource<T>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
|
||||
@@ -24,12 +24,37 @@ use reactive_graph::{
|
||||
prelude::*,
|
||||
signal::{ArcRwSignal, RwSignal},
|
||||
};
|
||||
use std::{future::IntoFuture, ops::Deref};
|
||||
use std::{future::IntoFuture, ops::Deref, panic::Location};
|
||||
|
||||
pub struct ArcResource<T, Ser = JsonSerdeCodec> {
|
||||
ser: PhantomData<Ser>,
|
||||
refetch: ArcRwSignal<usize>,
|
||||
data: ArcAsyncDerived<T>,
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: &'static Location<'static>,
|
||||
}
|
||||
|
||||
impl<T, Ser> Debug for ArcResource<T, Ser> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut d = f.debug_struct("ArcResource");
|
||||
d.field("ser", &self.ser).field("data", &self.data);
|
||||
#[cfg(debug_assertions)]
|
||||
d.field("defined_at", self.defined_at);
|
||||
d.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, Ser> DefinedAt for ArcResource<T, Ser> {
|
||||
fn defined_at(&self) -> Option<&'static Location<'static>> {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
Some(self.defined_at)
|
||||
}
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, Ser> Clone for ArcResource<T, Ser> {
|
||||
@@ -38,6 +63,8 @@ impl<T, Ser> Clone for ArcResource<T, Ser> {
|
||||
ser: self.ser,
|
||||
refetch: self.refetch.clone(),
|
||||
data: self.data.clone(),
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: self.defined_at,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -81,20 +108,23 @@ where
|
||||
let is_ready = initial.is_some();
|
||||
|
||||
let refetch = ArcRwSignal::new(0);
|
||||
let source = ArcMemo::new(move |_| source());
|
||||
let source = ArcMemo::new({
|
||||
let refetch = refetch.clone();
|
||||
move |_| (refetch.get(), source())
|
||||
});
|
||||
let fun = {
|
||||
let source = source.clone();
|
||||
let refetch = refetch.clone();
|
||||
move || {
|
||||
refetch.track();
|
||||
fetcher(source.get())
|
||||
let (_, source) = source.get();
|
||||
fetcher(source)
|
||||
}
|
||||
};
|
||||
|
||||
let data =
|
||||
ArcAsyncDerived::new_with_initial_without_spawning(initial, fun);
|
||||
let data = ArcAsyncDerived::new_with_manual_dependencies(
|
||||
initial, fun, &source,
|
||||
);
|
||||
if is_ready {
|
||||
source.with(|_| ());
|
||||
source.with_untracked(|_| ());
|
||||
source.add_subscriber(data.to_any_subscriber());
|
||||
}
|
||||
|
||||
@@ -107,25 +137,29 @@ where
|
||||
shared_context.defer_stream(Box::pin(data.ready()));
|
||||
}
|
||||
|
||||
shared_context.write_async(
|
||||
id,
|
||||
Box::pin(async move {
|
||||
ready_fut.await;
|
||||
value.with_untracked(|data| match &data {
|
||||
// TODO handle serialization errors
|
||||
Some(val) => {
|
||||
Ser::encode(val).unwrap().into_encoded_string()
|
||||
}
|
||||
_ => unreachable!(),
|
||||
})
|
||||
}),
|
||||
);
|
||||
if shared_context.get_is_hydrating() {
|
||||
shared_context.write_async(
|
||||
id,
|
||||
Box::pin(async move {
|
||||
ready_fut.await;
|
||||
value.with_untracked(|data| match &data {
|
||||
// TODO handle serialization errors
|
||||
Some(val) => {
|
||||
Ser::encode(val).unwrap().into_encoded_string()
|
||||
}
|
||||
_ => unreachable!(),
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
ArcResource {
|
||||
ser: PhantomData,
|
||||
data,
|
||||
refetch,
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -443,6 +477,37 @@ where
|
||||
ser: PhantomData<Ser>,
|
||||
data: AsyncDerived<T>,
|
||||
refetch: RwSignal<usize>,
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: &'static Location<'static>,
|
||||
}
|
||||
|
||||
impl<T, Ser> Debug for Resource<T, Ser>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut d = f.debug_struct("ArcResource");
|
||||
d.field("ser", &self.ser).field("data", &self.data);
|
||||
#[cfg(debug_assertions)]
|
||||
d.field("defined_at", self.defined_at);
|
||||
d.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, Ser> DefinedAt for Resource<T, Ser>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
{
|
||||
fn defined_at(&self) -> Option<&'static Location<'static>> {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
Some(self.defined_at)
|
||||
}
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Send + Sync + 'static, Ser> Copy for Resource<T, Ser> {}
|
||||
@@ -698,6 +763,8 @@ where
|
||||
ser: PhantomData,
|
||||
data: data.into(),
|
||||
refetch: refetch.into(),
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -191,16 +191,19 @@ where
|
||||
let init = initial();
|
||||
#[cfg(feature = "ssr")]
|
||||
if let Some(sc) = sc {
|
||||
match Ser::encode(&init)
|
||||
.map(IntoEncodedString::into_encoded_string)
|
||||
{
|
||||
Ok(value) => {
|
||||
sc.write_async(id, Box::pin(async move { value }))
|
||||
}
|
||||
#[allow(unused_variables)] // used in tracing
|
||||
Err(e) => {
|
||||
#[cfg(feature = "tracing")]
|
||||
tracing::error!("couldn't serialize: {e:?}");
|
||||
if sc.get_is_hydrating() {
|
||||
match Ser::encode(&init)
|
||||
.map(IntoEncodedString::into_encoded_string)
|
||||
{
|
||||
Ok(value) => sc.write_async(
|
||||
id,
|
||||
Box::pin(async move { value }),
|
||||
),
|
||||
#[allow(unused_variables)] // used in tracing
|
||||
Err(e) => {
|
||||
#[cfg(feature = "tracing")]
|
||||
tracing::error!("couldn't serialize: {e:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "leptos_meta"
|
||||
version = "0.7.0-beta4"
|
||||
version = "0.7.0-beta6"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
repository = "https://github.com/leptos-rs/leptos"
|
||||
|
||||
@@ -29,7 +29,7 @@ use leptos::{
|
||||
/// #[component]
|
||||
/// fn MyApp() -> impl IntoView {
|
||||
/// provide_meta_context();
|
||||
/// let (prefers_dark, set_prefers_dark) = create_signal(false);
|
||||
/// let (prefers_dark, set_prefers_dark) = signal(false);
|
||||
/// let body_class = move || {
|
||||
/// if prefers_dark.get() {
|
||||
/// "dark".to_string()
|
||||
|
||||
@@ -43,6 +43,9 @@ pub fn HashedStylesheet(
|
||||
/// An ID for the stylesheet.
|
||||
#[prop(optional, into)]
|
||||
id: Option<String>,
|
||||
/// A base url, not including a trailing slash
|
||||
#[prop(optional, into)]
|
||||
root: Option<String>,
|
||||
) -> impl IntoView {
|
||||
let mut css_file_name = options.output_name.to_string();
|
||||
if options.hash_files {
|
||||
@@ -60,7 +63,8 @@ pub fn HashedStylesheet(
|
||||
if !line.is_empty() {
|
||||
if let Some((file, hash)) = line.split_once(':') {
|
||||
if file == "css" {
|
||||
css_file_name.push_str(&format!(".{}", hash));
|
||||
css_file_name
|
||||
.push_str(&format!(".{}", hash.trim()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -69,11 +73,12 @@ pub fn HashedStylesheet(
|
||||
}
|
||||
css_file_name.push_str(".css");
|
||||
let pkg_path = &options.site_pkg_dir;
|
||||
let root = root.unwrap_or_default();
|
||||
// TODO additional attributes
|
||||
register(
|
||||
link()
|
||||
.id(id)
|
||||
.rel("stylesheet")
|
||||
.href(format!("/{pkg_path}/{css_file_name}")),
|
||||
.href(format!("{root}/{pkg_path}/{css_file_name}")),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "next_tuple"
|
||||
version = "0.1.0-beta4"
|
||||
version = "0.1.0-beta6"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "reactive_graph"
|
||||
version = "0.1.0-beta4"
|
||||
version = "0.1.0-beta6"
|
||||
authors = ["Greg Johnston"]
|
||||
license = "MIT"
|
||||
readme = "../README.md"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::{
|
||||
computed::{ArcMemo, Memo},
|
||||
diagnostics::is_suppressing_resource_load,
|
||||
owner::{FromLocal, LocalStorage, Storage, StoredValue, SyncStorage},
|
||||
owner::{ArenaItem, FromLocal, LocalStorage, Storage, SyncStorage},
|
||||
signal::{ArcRwSignal, RwSignal},
|
||||
traits::{DefinedAt, Dispose, Get, GetUntracked, Update},
|
||||
unwrap_signal,
|
||||
@@ -235,7 +235,7 @@ where
|
||||
self.input.try_update(|inp| *inp = Some(input));
|
||||
|
||||
// Spawn the task
|
||||
Executor::spawn({
|
||||
crate::spawn({
|
||||
let input = self.input.clone();
|
||||
let version = self.version.clone();
|
||||
let value = self.value.clone();
|
||||
@@ -575,7 +575,7 @@ where
|
||||
/// let action3 = Action::new(|input: &(usize, String)| async { todo!() });
|
||||
/// ```
|
||||
pub struct Action<I, O, S = SyncStorage> {
|
||||
inner: StoredValue<ArcAction<I, O>, S>,
|
||||
inner: ArenaItem<ArcAction<I, O>, S>,
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: &'static Location<'static>,
|
||||
}
|
||||
@@ -639,7 +639,7 @@ where
|
||||
Fu: Future<Output = O> + Send + 'static,
|
||||
{
|
||||
Self {
|
||||
inner: StoredValue::new(ArcAction::new(action_fn)),
|
||||
inner: ArenaItem::new(ArcAction::new(action_fn)),
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
}
|
||||
@@ -664,9 +664,7 @@ where
|
||||
Fu: Future<Output = O> + Send + 'static,
|
||||
{
|
||||
Self {
|
||||
inner: StoredValue::new(ArcAction::new_with_value(
|
||||
value, action_fn,
|
||||
)),
|
||||
inner: ArenaItem::new(ArcAction::new_with_value(value, action_fn)),
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
}
|
||||
@@ -688,7 +686,7 @@ where
|
||||
Fu: Future<Output = O> + Send + 'static,
|
||||
{
|
||||
Self {
|
||||
inner: StoredValue::new_local(ArcAction::new_unsync(action_fn)),
|
||||
inner: ArenaItem::new_local(ArcAction::new_unsync(action_fn)),
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
}
|
||||
@@ -704,7 +702,7 @@ where
|
||||
Fu: Future<Output = O> + Send + 'static,
|
||||
{
|
||||
Self {
|
||||
inner: StoredValue::new_local(ArcAction::new_unsync_with_value(
|
||||
inner: ArenaItem::new_local(ArcAction::new_unsync_with_value(
|
||||
value, action_fn,
|
||||
)),
|
||||
#[cfg(debug_assertions)]
|
||||
@@ -908,7 +906,9 @@ where
|
||||
/// Calls the `async` function with a reference to the input type as its argument.
|
||||
#[track_caller]
|
||||
pub fn dispatch(&self, input: I) -> ActionAbortHandle {
|
||||
self.inner.with_value(|inner| inner.dispatch(input))
|
||||
self.inner
|
||||
.try_with_value(|inner| inner.dispatch(input))
|
||||
.unwrap_or_else(unwrap_signal!(self))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -921,7 +921,9 @@ where
|
||||
/// Calls the `async` function with a reference to the input type as its argument.
|
||||
#[track_caller]
|
||||
pub fn dispatch_local(&self, input: I) -> ActionAbortHandle {
|
||||
self.inner.with_value(|inner| inner.dispatch_local(input))
|
||||
self.inner
|
||||
.try_with_value(|inner| inner.dispatch_local(input))
|
||||
.unwrap_or_else(unwrap_signal!(self))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -942,7 +944,7 @@ where
|
||||
Fu: Future<Output = O> + 'static,
|
||||
{
|
||||
Self {
|
||||
inner: StoredValue::new_with_storage(ArcAction::new_unsync(
|
||||
inner: ArenaItem::new_with_storage(ArcAction::new_unsync(
|
||||
action_fn,
|
||||
)),
|
||||
#[cfg(debug_assertions)]
|
||||
@@ -961,7 +963,7 @@ where
|
||||
Fu: Future<Output = O> + 'static,
|
||||
{
|
||||
Self {
|
||||
inner: StoredValue::new_with_storage(
|
||||
inner: ArenaItem::new_with_storage(
|
||||
ArcAction::new_unsync_with_value(value, action_fn),
|
||||
),
|
||||
#[cfg(debug_assertions)]
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
use crate::{
|
||||
diagnostics::is_suppressing_resource_load,
|
||||
owner::{FromLocal, LocalStorage, Storage, StoredValue, SyncStorage},
|
||||
owner::{ArenaItem, FromLocal, LocalStorage, Storage, SyncStorage},
|
||||
signal::{ArcReadSignal, ArcRwSignal, ReadSignal, RwSignal},
|
||||
traits::{DefinedAt, Dispose, GetUntracked, Set, Update},
|
||||
unwrap_signal,
|
||||
};
|
||||
use any_spawner::Executor;
|
||||
use std::{fmt::Debug, future::Future, panic::Location, pin::Pin, sync::Arc};
|
||||
|
||||
/// An action that synchronizes multiple imperative `async` calls to the reactive system,
|
||||
@@ -46,7 +45,7 @@ use std::{fmt::Debug, future::Future, panic::Location, pin::Pin, sync::Arc};
|
||||
/// # });
|
||||
/// ```
|
||||
pub struct MultiAction<I, O, S = SyncStorage> {
|
||||
inner: StoredValue<ArcMultiAction<I, O>, S>,
|
||||
inner: ArenaItem<ArcMultiAction<I, O>, S>,
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: &'static Location<'static>,
|
||||
}
|
||||
@@ -130,9 +129,7 @@ where
|
||||
Fut: Future<Output = O> + Send + 'static,
|
||||
{
|
||||
Self {
|
||||
inner: StoredValue::new_with_storage(ArcMultiAction::new(
|
||||
action_fn,
|
||||
)),
|
||||
inner: ArenaItem::new_with_storage(ArcMultiAction::new(action_fn)),
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
}
|
||||
@@ -190,7 +187,7 @@ where
|
||||
/// ```
|
||||
pub fn dispatch(&self, input: I) {
|
||||
if !is_suppressing_resource_load() {
|
||||
self.inner.with_value(|inner| inner.dispatch(input));
|
||||
self.inner.try_with_value(|inner| inner.dispatch(input));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,7 +230,8 @@ where
|
||||
/// # });
|
||||
/// ```
|
||||
pub fn dispatch_sync(&self, value: O) {
|
||||
self.inner.with_value(|inner| inner.dispatch_sync(value));
|
||||
self.inner
|
||||
.try_with_value(|inner| inner.dispatch_sync(value));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -507,7 +505,7 @@ where
|
||||
|
||||
let version = self.version.clone();
|
||||
|
||||
Executor::spawn(async move {
|
||||
crate::spawn(async move {
|
||||
let new_value = fut.await;
|
||||
let canceled = submission.canceled.get_untracked();
|
||||
if !canceled {
|
||||
|
||||
@@ -163,10 +163,10 @@ where
|
||||
#[deprecated = "This function is being removed to conform to Rust idioms. \
|
||||
Please use `Selector::new()` instead."]
|
||||
pub fn create_selector<T>(
|
||||
source: impl Fn() -> T + Clone + 'static,
|
||||
source: impl Fn() -> T + Clone + Send + Sync + 'static,
|
||||
) -> Selector<T>
|
||||
where
|
||||
T: PartialEq + Eq + Clone + std::hash::Hash + 'static,
|
||||
T: PartialEq + Eq + Send + Sync + Clone + std::hash::Hash + 'static,
|
||||
{
|
||||
Selector::new(source)
|
||||
}
|
||||
@@ -178,11 +178,11 @@ where
|
||||
#[deprecated = "This function is being removed to conform to Rust idioms. \
|
||||
Please use `Selector::new_with_fn()` instead."]
|
||||
pub fn create_selector_with_fn<T>(
|
||||
source: impl Fn() -> T + Clone + 'static,
|
||||
source: impl Fn() -> T + Clone + Send + Sync + 'static,
|
||||
f: impl Fn(&T, &T) -> bool + Send + Sync + Clone + 'static,
|
||||
) -> Selector<T>
|
||||
where
|
||||
T: PartialEq + Eq + Clone + std::hash::Hash + 'static,
|
||||
T: PartialEq + Eq + Send + Sync + Clone + std::hash::Hash + 'static,
|
||||
{
|
||||
Selector::new_with_fn(source, f)
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ use crate::{
|
||||
guards::{Mapped, Plain, ReadGuard},
|
||||
ArcReadSignal, ArcRwSignal,
|
||||
},
|
||||
traits::{DefinedAt, Get, ReadUntracked},
|
||||
traits::{DefinedAt, Get, IsDisposed, ReadUntracked},
|
||||
};
|
||||
use core::fmt::Debug;
|
||||
use or_poisoned::OrPoisoned;
|
||||
@@ -260,6 +260,16 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static, S> IsDisposed for ArcMemo<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
{
|
||||
#[inline(always)]
|
||||
fn is_disposed(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static, S> ToAnySource for ArcMemo<T, S>
|
||||
where
|
||||
S: Storage<T>,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use super::{
|
||||
inner::ArcAsyncDerivedInner, AsyncDerivedReadyFuture, ScopedFuture,
|
||||
inner::{ArcAsyncDerivedInner, AsyncDerivedState},
|
||||
AsyncDerivedReadyFuture, ScopedFuture,
|
||||
};
|
||||
#[cfg(feature = "sandboxed-arenas")]
|
||||
use crate::owner::Sandboxed;
|
||||
@@ -12,8 +13,14 @@ use crate::{
|
||||
SubscriberSet, ToAnySource, ToAnySubscriber, WithObserver,
|
||||
},
|
||||
owner::{use_context, Owner},
|
||||
signal::guards::{AsyncPlain, ReadGuard, WriteGuard},
|
||||
traits::{DefinedAt, ReadUntracked, Trigger, UntrackableGuard, Writeable},
|
||||
signal::{
|
||||
guards::{AsyncPlain, ReadGuard, WriteGuard},
|
||||
ArcTrigger,
|
||||
},
|
||||
traits::{
|
||||
DefinedAt, IsDisposed, Notify, ReadUntracked, Track, UntrackableGuard,
|
||||
Writeable,
|
||||
},
|
||||
transition::AsyncTransition,
|
||||
};
|
||||
use any_spawner::Executor;
|
||||
@@ -21,6 +28,7 @@ use async_lock::RwLock as AsyncRwLock;
|
||||
use core::fmt::Debug;
|
||||
use futures::{channel::oneshot, FutureExt, StreamExt};
|
||||
use or_poisoned::OrPoisoned;
|
||||
use send_wrapper::SendWrapper;
|
||||
use std::{
|
||||
future::Future,
|
||||
mem,
|
||||
@@ -213,7 +221,7 @@ impl<T> DefinedAt for ArcAsyncDerived<T> {
|
||||
// whether `fun` returns a `Future` that is `Send`. Doing it as a function would,
|
||||
// as far as I can tell, require repeating most of the function body.
|
||||
macro_rules! spawn_derived {
|
||||
($spawner:expr, $initial:ident, $fun:ident, $should_spawn:literal, $force_spawn:literal) => {{
|
||||
($spawner:expr, $initial:ident, $fun:ident, $should_spawn:literal, $force_spawn:literal, $should_track:literal, $source:expr) => {{
|
||||
let (notifier, mut rx) = channel();
|
||||
|
||||
let is_ready = $initial.is_some() && !$force_spawn;
|
||||
@@ -224,7 +232,9 @@ macro_rules! spawn_derived {
|
||||
notifier,
|
||||
sources: SourceSet::new(),
|
||||
subscribers: SubscriberSet::new(),
|
||||
dirty: false
|
||||
state: AsyncDerivedState::Clean,
|
||||
version: 0,
|
||||
suspenses: Vec::new()
|
||||
}));
|
||||
let value = Arc::new(AsyncRwLock::new($initial));
|
||||
let wakers = Arc::new(RwLock::new(Vec::new()));
|
||||
@@ -238,10 +248,17 @@ macro_rules! spawn_derived {
|
||||
loading: Arc::new(AtomicBool::new(!is_ready)),
|
||||
};
|
||||
let any_subscriber = this.to_any_subscriber();
|
||||
let initial_fut = owner.with_cleanup(|| {
|
||||
any_subscriber
|
||||
.with_observer(|| ScopedFuture::new($fun()))
|
||||
});
|
||||
let initial_fut = if $should_track {
|
||||
owner.with_cleanup(|| {
|
||||
any_subscriber
|
||||
.with_observer(|| ScopedFuture::new($fun()))
|
||||
})
|
||||
} else {
|
||||
owner.with_cleanup(|| {
|
||||
any_subscriber
|
||||
.with_observer_untracked(|| ScopedFuture::new($fun()))
|
||||
})
|
||||
};
|
||||
#[cfg(feature = "sandboxed-arenas")]
|
||||
let initial_fut = Sandboxed::new(initial_fut);
|
||||
let mut initial_fut = Box::pin(initial_fut);
|
||||
@@ -258,7 +275,7 @@ macro_rules! spawn_derived {
|
||||
Some(orig_value) => {
|
||||
let mut guard = this.inner.write().or_poisoned();
|
||||
|
||||
guard.dirty = false;
|
||||
guard.state = AsyncDerivedState::Clean;
|
||||
*value.blocking_write() = Some(orig_value);
|
||||
this.loading.store(false, Ordering::Relaxed);
|
||||
(true, None)
|
||||
@@ -278,6 +295,10 @@ macro_rules! spawn_derived {
|
||||
any_subscriber.mark_dirty();
|
||||
}
|
||||
|
||||
if let Some(source) = $source {
|
||||
any_subscriber.with_observer(|| source.track());
|
||||
}
|
||||
|
||||
if $should_spawn {
|
||||
$spawner({
|
||||
let value = Arc::downgrade(&this.value);
|
||||
@@ -286,16 +307,30 @@ macro_rules! spawn_derived {
|
||||
let loading = Arc::downgrade(&this.loading);
|
||||
let fut = async move {
|
||||
while rx.next().await.is_some() {
|
||||
if any_subscriber.with_observer(|| any_subscriber.update_if_necessary()) || first_run.is_some() {
|
||||
let update_if_necessary = if $should_track {
|
||||
any_subscriber
|
||||
.with_observer(|| any_subscriber.update_if_necessary())
|
||||
} else {
|
||||
any_subscriber
|
||||
.with_observer_untracked(|| any_subscriber.update_if_necessary())
|
||||
};
|
||||
if update_if_necessary || first_run.is_some() {
|
||||
match (value.upgrade(), inner.upgrade(), wakers.upgrade(), loading.upgrade()) {
|
||||
(Some(value), Some(inner), Some(wakers), Some(loading)) => {
|
||||
// generate new Future
|
||||
let owner = inner.read().or_poisoned().owner.clone();
|
||||
let fut = initial_fut.take().unwrap_or_else(|| {
|
||||
let fut = owner.with_cleanup(|| {
|
||||
any_subscriber
|
||||
.with_observer(|| ScopedFuture::new($fun()))
|
||||
});
|
||||
let fut = if $should_track {
|
||||
owner.with_cleanup(|| {
|
||||
any_subscriber
|
||||
.with_observer(|| ScopedFuture::new($fun()))
|
||||
})
|
||||
} else {
|
||||
owner.with_cleanup(|| {
|
||||
any_subscriber
|
||||
.with_observer_untracked(|| ScopedFuture::new($fun()))
|
||||
})
|
||||
};
|
||||
#[cfg(feature = "sandboxed-arenas")]
|
||||
let fut = Sandboxed::new(fut);
|
||||
Box::pin(fut)
|
||||
@@ -310,8 +345,27 @@ macro_rules! spawn_derived {
|
||||
|
||||
// generate and assign new value
|
||||
loading.store(true, Ordering::Relaxed);
|
||||
|
||||
let (this_version, suspense_ids) = {
|
||||
let mut guard = inner.write().or_poisoned();
|
||||
guard.version += 1;
|
||||
let version = guard.version;
|
||||
let suspense_ids = mem::take(&mut guard.suspenses)
|
||||
.into_iter()
|
||||
.map(|sc| sc.task_id())
|
||||
.collect::<Vec<_>>();
|
||||
(version, suspense_ids)
|
||||
};
|
||||
|
||||
let new_value = fut.await;
|
||||
Self::set_inner_value(new_value, value, wakers, inner, loading, Some(ready_tx)).await;
|
||||
|
||||
drop(suspense_ids);
|
||||
|
||||
let latest_version = inner.read().or_poisoned().version;
|
||||
|
||||
if latest_version == this_version {
|
||||
Self::set_inner_value(new_value, value, wakers, inner, loading, Some(ready_tx)).await;
|
||||
}
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
@@ -351,7 +405,7 @@ impl<T: 'static> ArcAsyncDerived<T> {
|
||||
) {
|
||||
loading.store(false, Ordering::Relaxed);
|
||||
|
||||
inner.write().or_poisoned().dirty = true;
|
||||
inner.write().or_poisoned().state = AsyncDerivedState::Notifying;
|
||||
|
||||
if let Some(ready_tx) = ready_tx {
|
||||
// if it's an Err, that just means the Receiver was dropped
|
||||
@@ -370,6 +424,8 @@ impl<T: 'static> ArcAsyncDerived<T> {
|
||||
for waker in mem::take(&mut *wakers.write().or_poisoned()) {
|
||||
waker.wake();
|
||||
}
|
||||
|
||||
inner.write().or_poisoned().state = AsyncDerivedState::Clean;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -398,8 +454,15 @@ impl<T: 'static> ArcAsyncDerived<T> {
|
||||
T: Send + Sync + 'static,
|
||||
Fut: Future<Output = T> + Send + 'static,
|
||||
{
|
||||
let (this, _) =
|
||||
spawn_derived!(Executor::spawn, initial_value, fun, true, true);
|
||||
let (this, _) = spawn_derived!(
|
||||
Executor::spawn,
|
||||
initial_value,
|
||||
fun,
|
||||
true,
|
||||
true,
|
||||
true,
|
||||
None::<ArcTrigger>
|
||||
);
|
||||
this
|
||||
}
|
||||
|
||||
@@ -410,16 +473,25 @@ impl<T: 'static> ArcAsyncDerived<T> {
|
||||
/// where you do not want to run the run the `Future` unnecessarily.
|
||||
#[doc(hidden)]
|
||||
#[track_caller]
|
||||
pub fn new_with_initial_without_spawning<Fut>(
|
||||
pub fn new_with_manual_dependencies<Fut, S>(
|
||||
initial_value: Option<T>,
|
||||
fun: impl Fn() -> Fut + Send + Sync + 'static,
|
||||
source: &S,
|
||||
) -> Self
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
Fut: Future<Output = T> + Send + 'static,
|
||||
S: Track,
|
||||
{
|
||||
let (this, _) =
|
||||
spawn_derived!(Executor::spawn, initial_value, fun, true, false);
|
||||
let (this, _) = spawn_derived!(
|
||||
Executor::spawn,
|
||||
initial_value,
|
||||
fun,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
Some(source)
|
||||
);
|
||||
this
|
||||
}
|
||||
|
||||
@@ -453,24 +525,13 @@ impl<T: 'static> ArcAsyncDerived<T> {
|
||||
initial_value,
|
||||
fun,
|
||||
true,
|
||||
true
|
||||
true,
|
||||
true,
|
||||
None::<ArcTrigger>
|
||||
);
|
||||
this
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[track_caller]
|
||||
pub fn new_mock_unsync<Fut>(fun: impl Fn() -> Fut + 'static) -> Self
|
||||
where
|
||||
T: 'static,
|
||||
Fut: Future<Output = T> + 'static,
|
||||
{
|
||||
let initial = None::<T>;
|
||||
let (this, _) =
|
||||
spawn_derived!(Executor::spawn_local, initial, fun, false, false);
|
||||
this
|
||||
}
|
||||
|
||||
/// Returns a `Future` that is ready when this resource has next finished loading.
|
||||
pub fn ready(&self) -> AsyncDerivedReadyFuture {
|
||||
AsyncDerivedReadyFuture {
|
||||
@@ -481,6 +542,35 @@ impl<T: 'static> ArcAsyncDerived<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> ArcAsyncDerived<SendWrapper<T>> {
|
||||
#[doc(hidden)]
|
||||
#[track_caller]
|
||||
pub fn new_mock<Fut>(fun: impl Fn() -> Fut + 'static) -> Self
|
||||
where
|
||||
T: 'static,
|
||||
Fut: Future<Output = T> + 'static,
|
||||
{
|
||||
let initial = None::<SendWrapper<T>>;
|
||||
let fun = move || {
|
||||
let fut = fun();
|
||||
async move {
|
||||
let value = fut.await;
|
||||
SendWrapper::new(value)
|
||||
}
|
||||
};
|
||||
let (this, _) = spawn_derived!(
|
||||
Executor::spawn_local,
|
||||
initial,
|
||||
fun,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
None::<ArcTrigger>
|
||||
);
|
||||
this
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> ReadUntracked for ArcAsyncDerived<T> {
|
||||
type Value = ReadGuard<Option<T>, AsyncPlain<Option<T>>>;
|
||||
|
||||
@@ -489,18 +579,23 @@ impl<T: 'static> ReadUntracked for ArcAsyncDerived<T> {
|
||||
if self.value.blocking_read().is_none() {
|
||||
let handle = suspense_context.task_id();
|
||||
let ready = SpecialNonReactiveFuture::new(self.ready());
|
||||
Executor::spawn(async move {
|
||||
crate::spawn(async move {
|
||||
ready.await;
|
||||
drop(handle);
|
||||
});
|
||||
self.inner
|
||||
.write()
|
||||
.or_poisoned()
|
||||
.suspenses
|
||||
.push(suspense_context);
|
||||
}
|
||||
}
|
||||
AsyncPlain::try_new(&self.value).map(ReadGuard::new)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> Trigger for ArcAsyncDerived<T> {
|
||||
fn trigger(&self) {
|
||||
impl<T: 'static> Notify for ArcAsyncDerived<T> {
|
||||
fn notify(&self) {
|
||||
Self::notify_subs(&self.wakers, &self.inner, &self.loading, None);
|
||||
}
|
||||
}
|
||||
@@ -519,6 +614,13 @@ impl<T: 'static> Writeable for ArcAsyncDerived<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> IsDisposed for ArcAsyncDerived<T> {
|
||||
#[inline(always)]
|
||||
fn is_disposed(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static> ToAnySource for ArcAsyncDerived<T> {
|
||||
fn to_any_source(&self) -> AnySource {
|
||||
AnySource(
|
||||
|
||||
@@ -4,14 +4,16 @@ use crate::{
|
||||
AnySource, AnySubscriber, ReactiveNode, Source, Subscriber,
|
||||
ToAnySource, ToAnySubscriber,
|
||||
},
|
||||
owner::{FromLocal, LocalStorage, Storage, StoredValue, SyncStorage},
|
||||
owner::{ArenaItem, FromLocal, LocalStorage, Storage, SyncStorage},
|
||||
signal::guards::{AsyncPlain, ReadGuard, WriteGuard},
|
||||
traits::{
|
||||
DefinedAt, Dispose, ReadUntracked, Trigger, UntrackableGuard, Writeable,
|
||||
DefinedAt, Dispose, IsDisposed, Notify, ReadUntracked,
|
||||
UntrackableGuard, Writeable,
|
||||
},
|
||||
unwrap_signal,
|
||||
};
|
||||
use core::fmt::Debug;
|
||||
use send_wrapper::SendWrapper;
|
||||
use std::{future::Future, ops::DerefMut, panic::Location};
|
||||
|
||||
/// A reactive value that is derived by running an asynchronous computation in response to changes
|
||||
@@ -83,7 +85,7 @@ use std::{future::Future, ops::DerefMut, panic::Location};
|
||||
pub struct AsyncDerived<T, S = SyncStorage> {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: &'static Location<'static>,
|
||||
pub(crate) inner: StoredValue<ArcAsyncDerived<T>, S>,
|
||||
pub(crate) inner: ArenaItem<ArcAsyncDerived<T>, S>,
|
||||
}
|
||||
|
||||
impl<T, S> Dispose for AsyncDerived<T, S> {
|
||||
@@ -102,7 +104,7 @@ where
|
||||
Self {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at,
|
||||
inner: StoredValue::new_with_storage(value),
|
||||
inner: ArenaItem::new_with_storage(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -117,7 +119,7 @@ where
|
||||
Self {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at,
|
||||
inner: StoredValue::new_with_storage(value),
|
||||
inner: ArenaItem::new_with_storage(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -139,7 +141,7 @@ where
|
||||
Self {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
inner: StoredValue::new_with_storage(ArcAsyncDerived::new(fun)),
|
||||
inner: ArenaItem::new_with_storage(ArcAsyncDerived::new(fun)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -157,13 +159,28 @@ where
|
||||
Self {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
inner: StoredValue::new_with_storage(
|
||||
inner: ArenaItem::new_with_storage(
|
||||
ArcAsyncDerived::new_with_initial(initial_value, fun),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> AsyncDerived<SendWrapper<T>> {
|
||||
#[doc(hidden)]
|
||||
pub fn new_mock<Fut>(fun: impl Fn() -> Fut + 'static) -> Self
|
||||
where
|
||||
T: 'static,
|
||||
Fut: Future<Output = T> + 'static,
|
||||
{
|
||||
Self {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
inner: ArenaItem::new_with_storage(ArcAsyncDerived::new_mock(fun)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> AsyncDerived<T, LocalStorage>
|
||||
where
|
||||
T: 'static,
|
||||
@@ -181,7 +198,7 @@ where
|
||||
Self {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
inner: StoredValue::new_with_storage(ArcAsyncDerived::new_unsync(
|
||||
inner: ArenaItem::new_with_storage(ArcAsyncDerived::new_unsync(
|
||||
fun,
|
||||
)),
|
||||
}
|
||||
@@ -202,26 +219,11 @@ where
|
||||
Self {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
inner: StoredValue::new_with_storage(
|
||||
inner: ArenaItem::new_with_storage(
|
||||
ArcAsyncDerived::new_unsync_with_initial(initial_value, fun),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn new_mock_unsync<Fut>(fun: impl Fn() -> Fut + 'static) -> Self
|
||||
where
|
||||
T: 'static,
|
||||
Fut: Future<Output = T> + 'static,
|
||||
{
|
||||
Self {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
inner: StoredValue::new_with_storage(
|
||||
ArcAsyncDerived::new_mock_unsync(fun),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> AsyncDerived<T, S>
|
||||
@@ -288,13 +290,13 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> Trigger for AsyncDerived<T, S>
|
||||
impl<T, S> Notify for AsyncDerived<T, S>
|
||||
where
|
||||
T: 'static,
|
||||
S: Storage<ArcAsyncDerived<T>>,
|
||||
{
|
||||
fn trigger(&self) {
|
||||
self.inner.try_with_value(|inner| inner.trigger());
|
||||
fn notify(&self) {
|
||||
self.inner.try_with_value(|inner| inner.notify());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -319,6 +321,16 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> IsDisposed for AsyncDerived<T, S>
|
||||
where
|
||||
T: 'static,
|
||||
S: Storage<ArcAsyncDerived<T>>,
|
||||
{
|
||||
fn is_disposed(&self) -> bool {
|
||||
self.inner.is_disposed()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> ToAnySource for AsyncDerived<T, S>
|
||||
where
|
||||
T: 'static,
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
use super::{ArcAsyncDerived, AsyncDerived};
|
||||
use super::{inner::ArcAsyncDerivedInner, ArcAsyncDerived, AsyncDerived};
|
||||
use crate::{
|
||||
computed::suspense::SuspenseContext,
|
||||
diagnostics::SpecialNonReactiveZone,
|
||||
graph::{AnySource, ToAnySource},
|
||||
owner::Storage,
|
||||
owner::{use_context, Storage},
|
||||
signal::guards::{AsyncPlain, Mapped, ReadGuard},
|
||||
traits::{DefinedAt, Track},
|
||||
unwrap_signal,
|
||||
@@ -36,6 +38,8 @@ impl Future for AsyncDerivedReadyFuture {
|
||||
type Output = ();
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
#[cfg(debug_assertions)]
|
||||
let _guard = SpecialNonReactiveZone::enter();
|
||||
let waker = cx.waker();
|
||||
self.source.track();
|
||||
if self.loading.load(Ordering::Relaxed) {
|
||||
@@ -60,6 +64,7 @@ where
|
||||
value: Arc::clone(&self.value),
|
||||
loading: Arc::clone(&self.loading),
|
||||
wakers: Arc::clone(&self.wakers),
|
||||
inner: Arc::clone(&self.inner),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -89,6 +94,7 @@ pub struct AsyncDerivedFuture<T> {
|
||||
value: Arc<async_lock::RwLock<Option<T>>>,
|
||||
loading: Arc<AtomicBool>,
|
||||
wakers: Arc<RwLock<Vec<Waker>>>,
|
||||
inner: Arc<RwLock<ArcAsyncDerivedInner>>,
|
||||
}
|
||||
|
||||
impl<T> Future for AsyncDerivedFuture<T>
|
||||
@@ -99,9 +105,20 @@ where
|
||||
|
||||
#[track_caller]
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
#[cfg(debug_assertions)]
|
||||
let _guard = SpecialNonReactiveZone::enter();
|
||||
let waker = cx.waker();
|
||||
self.source.track();
|
||||
let value = self.value.read_arc();
|
||||
|
||||
if let Some(suspense_context) = use_context::<SuspenseContext>() {
|
||||
self.inner
|
||||
.write()
|
||||
.or_poisoned()
|
||||
.suspenses
|
||||
.push(suspense_context);
|
||||
}
|
||||
|
||||
pin_mut!(value);
|
||||
match (self.loading.load(Ordering::Relaxed), value.poll(cx)) {
|
||||
(true, _) => {
|
||||
@@ -163,6 +180,8 @@ where
|
||||
type Output = AsyncDerivedGuard<T>;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
#[cfg(debug_assertions)]
|
||||
let _guard = SpecialNonReactiveZone::enter();
|
||||
let waker = cx.waker();
|
||||
self.source.track();
|
||||
let value = self.value.read_arc();
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::{
|
||||
channel::Sender,
|
||||
computed::suspense::SuspenseContext,
|
||||
graph::{
|
||||
AnySource, AnySubscriber, ReactiveNode, Source, SourceSet, Subscriber,
|
||||
SubscriberSet,
|
||||
@@ -18,18 +19,32 @@ pub(crate) struct ArcAsyncDerivedInner {
|
||||
pub subscribers: SubscriberSet,
|
||||
// when a source changes, notifying this will cause the async work to rerun
|
||||
pub notifier: Sender,
|
||||
pub dirty: bool,
|
||||
pub state: AsyncDerivedState,
|
||||
pub version: usize,
|
||||
pub suspenses: Vec<SuspenseContext>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub(crate) enum AsyncDerivedState {
|
||||
Clean,
|
||||
Dirty,
|
||||
Notifying,
|
||||
}
|
||||
|
||||
impl ReactiveNode for RwLock<ArcAsyncDerivedInner> {
|
||||
fn mark_dirty(&self) {
|
||||
let mut lock = self.write().or_poisoned();
|
||||
lock.dirty = true;
|
||||
lock.notifier.notify();
|
||||
if lock.state != AsyncDerivedState::Notifying {
|
||||
lock.state = AsyncDerivedState::Dirty;
|
||||
lock.notifier.notify();
|
||||
}
|
||||
}
|
||||
|
||||
fn mark_check(&self) {
|
||||
self.write().or_poisoned().notifier.notify();
|
||||
let mut lock = self.write().or_poisoned();
|
||||
if lock.state != AsyncDerivedState::Notifying {
|
||||
lock.notifier.notify();
|
||||
}
|
||||
}
|
||||
|
||||
fn mark_subscribers_check(&self) {
|
||||
@@ -41,11 +56,14 @@ impl ReactiveNode for RwLock<ArcAsyncDerivedInner> {
|
||||
|
||||
fn update_if_necessary(&self) -> bool {
|
||||
let mut guard = self.write().or_poisoned();
|
||||
let (is_dirty, sources) =
|
||||
(guard.dirty, (!guard.dirty).then(|| guard.sources.clone()));
|
||||
let (is_dirty, sources) = (
|
||||
guard.state == AsyncDerivedState::Dirty,
|
||||
(guard.state != AsyncDerivedState::Notifying)
|
||||
.then(|| guard.sources.clone()),
|
||||
);
|
||||
|
||||
if is_dirty {
|
||||
guard.dirty = false;
|
||||
guard.state = AsyncDerivedState::Clean;
|
||||
return true;
|
||||
}
|
||||
drop(guard);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use super::{inner::MemoInner, ArcMemo};
|
||||
use crate::{
|
||||
owner::{FromLocal, LocalStorage, Storage, StoredValue, SyncStorage},
|
||||
owner::{ArenaItem, FromLocal, LocalStorage, Storage, SyncStorage},
|
||||
signal::{
|
||||
guards::{Mapped, Plain, ReadGuard},
|
||||
ArcReadSignal,
|
||||
@@ -102,7 +102,7 @@ where
|
||||
{
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: &'static Location<'static>,
|
||||
inner: StoredValue<ArcMemo<T, S>, S>,
|
||||
inner: ArenaItem<ArcMemo<T, S>, S>,
|
||||
}
|
||||
|
||||
impl<T, S> Dispose for Memo<T, S>
|
||||
@@ -123,7 +123,7 @@ where
|
||||
Self {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
inner: StoredValue::new_with_storage(value),
|
||||
inner: ArenaItem::new_with_storage(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -137,7 +137,7 @@ where
|
||||
Self {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
inner: StoredValue::new_with_storage(value),
|
||||
inner: ArenaItem::new_with_storage(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -177,7 +177,7 @@ where
|
||||
Self {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
inner: StoredValue::new_with_storage(ArcMemo::new(fun)),
|
||||
inner: ArenaItem::new_with_storage(ArcMemo::new(fun)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -202,7 +202,7 @@ where
|
||||
Self {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
inner: StoredValue::new_with_storage(ArcMemo::new_with_compare(
|
||||
inner: ArenaItem::new_with_storage(ArcMemo::new_with_compare(
|
||||
fun, changed,
|
||||
)),
|
||||
}
|
||||
@@ -229,7 +229,7 @@ where
|
||||
Self {
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
inner: StoredValue::new_with_storage(ArcMemo::new_owning(fun)),
|
||||
inner: ArenaItem::new_with_storage(ArcMemo::new_owning(fun)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,7 +30,7 @@ use std::{
|
||||
/// let a = RwSignal::new(0);
|
||||
/// let is_selected = Selector::new(move || a.get());
|
||||
/// let total_notifications = StoredValue::new(0);
|
||||
/// Effect::new({
|
||||
/// Effect::new_isomorphic({
|
||||
/// let is_selected = is_selected.clone();
|
||||
/// move |_| {
|
||||
/// if is_selected.selected(5) {
|
||||
@@ -55,7 +55,7 @@ use std::{
|
||||
///
|
||||
/// # any_spawner::Executor::tick().await;
|
||||
/// assert_eq!(is_selected.selected(5), false);
|
||||
/// # });
|
||||
/// # }).await;
|
||||
/// # });
|
||||
/// ```
|
||||
#[derive(Clone)]
|
||||
@@ -74,17 +74,17 @@ where
|
||||
|
||||
impl<T> Selector<T>
|
||||
where
|
||||
T: PartialEq + Eq + Clone + Hash + 'static,
|
||||
T: PartialEq + Send + Sync + Eq + Clone + Hash + 'static,
|
||||
{
|
||||
/// Creates a new selector that compares values using [`PartialEq`].
|
||||
pub fn new(source: impl Fn() -> T + Clone + 'static) -> Self {
|
||||
pub fn new(source: impl Fn() -> T + Send + Sync + Clone + 'static) -> Self {
|
||||
Self::new_with_fn(source, PartialEq::eq)
|
||||
}
|
||||
|
||||
/// Creates a new selector that compares values by returning `true` from a comparator function
|
||||
/// if the values are the same.
|
||||
pub fn new_with_fn(
|
||||
source: impl Fn() -> T + Clone + 'static,
|
||||
source: impl Fn() -> T + Clone + Send + Sync + 'static,
|
||||
f: impl Fn(&T, &T) -> bool + Send + Sync + Clone + 'static,
|
||||
) -> Self {
|
||||
let subs: Arc<RwLock<FxHashMap<T, ArcRwSignal<bool>>>> =
|
||||
@@ -92,7 +92,7 @@ where
|
||||
let v: Arc<RwLock<Option<T>>> = Default::default();
|
||||
let f = Arc::new(f) as Arc<dyn Fn(&T, &T) -> bool + Send + Sync>;
|
||||
|
||||
let effect = Arc::new(RenderEffect::new({
|
||||
let effect = Arc::new(RenderEffect::new_isomorphic({
|
||||
let subs = Arc::clone(&subs);
|
||||
let f = Arc::clone(&f);
|
||||
let v = Arc::clone(&v);
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::{
|
||||
AnySubscriber, ReactiveNode, SourceSet, Subscriber, ToAnySubscriber,
|
||||
WithObserver,
|
||||
},
|
||||
owner::{LocalStorage, Owner, Storage, StoredValue, SyncStorage},
|
||||
owner::{ArenaItem, LocalStorage, Owner, Storage, SyncStorage},
|
||||
traits::Dispose,
|
||||
};
|
||||
use any_spawner::Executor;
|
||||
@@ -40,9 +40,10 @@ use std::{
|
||||
/// # use reactive_graph::signal::*;
|
||||
/// # use reactive_graph::prelude::*;
|
||||
/// # use reactive_graph::effect::Effect;
|
||||
/// # use reactive_graph::owner::StoredValue;
|
||||
/// # use reactive_graph::owner::ArenaItem;
|
||||
/// # tokio_test::block_on(async move {
|
||||
/// # tokio::task::LocalSet::new().run_until(async move {
|
||||
/// # any_spawner::Executor::init_tokio();
|
||||
/// let a = RwSignal::new(0);
|
||||
/// let b = RwSignal::new(0);
|
||||
///
|
||||
@@ -52,7 +53,9 @@ use std::{
|
||||
/// println!("Value: {}", a.get());
|
||||
/// });
|
||||
///
|
||||
/// # assert_eq!(a.get(), 0);
|
||||
/// a.set(1);
|
||||
/// # assert_eq!(a.get(), 1);
|
||||
/// // ✅ because it's subscribed to `a`, the effect reruns and prints "Value: 1"
|
||||
///
|
||||
/// // ❌ don't use effects to synchronize state within the reactive system
|
||||
@@ -61,7 +64,7 @@ use std::{
|
||||
/// // and easily lead to problems like infinite loops
|
||||
/// b.set(a.get() + 1);
|
||||
/// });
|
||||
/// # });
|
||||
/// # }).await;
|
||||
/// # });
|
||||
/// ```
|
||||
/// ## Web-Specific Notes
|
||||
@@ -75,7 +78,7 @@ use std::{
|
||||
/// If you need an effect to run on the server, use [`Effect::new_isomorphic`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct Effect<S> {
|
||||
inner: Option<StoredValue<StoredEffect, S>>,
|
||||
inner: Option<ArenaItem<StoredEffect, S>>,
|
||||
}
|
||||
|
||||
type StoredEffect = Option<Arc<RwLock<EffectInner>>>;
|
||||
@@ -162,7 +165,7 @@ impl Effect<LocalStorage> {
|
||||
}
|
||||
});
|
||||
|
||||
StoredValue::new_with_storage(Some(inner))
|
||||
ArenaItem::new_with_storage(Some(inner))
|
||||
});
|
||||
|
||||
Self { inner }
|
||||
@@ -182,6 +185,7 @@ impl Effect<LocalStorage> {
|
||||
/// # use reactive_graph::signal::signal;
|
||||
/// # tokio_test::block_on(async move {
|
||||
/// # tokio::task::LocalSet::new().run_until(async move {
|
||||
/// # any_spawner::Executor::init_tokio();
|
||||
/// #
|
||||
/// let (num, set_num) = signal(0);
|
||||
///
|
||||
@@ -192,13 +196,16 @@ impl Effect<LocalStorage> {
|
||||
/// },
|
||||
/// false,
|
||||
/// );
|
||||
/// # assert_eq!(num.get(), 0);
|
||||
///
|
||||
/// set_num.set(1); // > "Number: 1; Prev: Some(0)"
|
||||
/// # assert_eq!(num.get(), 1);
|
||||
///
|
||||
/// effect.stop(); // stop watching
|
||||
///
|
||||
/// set_num.set(2); // (nothing happens)
|
||||
/// # });
|
||||
/// # assert_eq!(num.get(), 2);
|
||||
/// # }).await;
|
||||
/// # });
|
||||
/// ```
|
||||
///
|
||||
@@ -210,6 +217,7 @@ impl Effect<LocalStorage> {
|
||||
/// # use reactive_graph::signal::signal;
|
||||
/// # tokio_test::block_on(async move {
|
||||
/// # tokio::task::LocalSet::new().run_until(async move {
|
||||
/// # any_spawner::Executor::init_tokio();
|
||||
/// #
|
||||
/// let (num, set_num) = signal(0);
|
||||
/// let (cb_num, set_cb_num) = signal(0);
|
||||
@@ -222,12 +230,17 @@ impl Effect<LocalStorage> {
|
||||
/// false,
|
||||
/// );
|
||||
///
|
||||
/// # assert_eq!(num.get(), 0);
|
||||
/// set_num.set(1); // > "Number: 1; Cb: 0"
|
||||
/// # assert_eq!(num.get(), 1);
|
||||
///
|
||||
/// # assert_eq!(cb_num.get(), 0);
|
||||
/// set_cb_num.set(1); // (nothing happens)
|
||||
/// # assert_eq!(cb_num.get(), 1);
|
||||
///
|
||||
/// set_num.set(2); // > "Number: 2; Cb: 1"
|
||||
/// # });
|
||||
/// # assert_eq!(num.get(), 2);
|
||||
/// # }).await;
|
||||
/// # });
|
||||
/// ```
|
||||
///
|
||||
@@ -243,6 +256,7 @@ impl Effect<LocalStorage> {
|
||||
/// # use reactive_graph::signal::signal;
|
||||
/// # tokio_test::block_on(async move {
|
||||
/// # tokio::task::LocalSet::new().run_until(async move {
|
||||
/// # any_spawner::Executor::init_tokio();
|
||||
/// #
|
||||
/// let (num, set_num) = signal(0);
|
||||
///
|
||||
@@ -254,8 +268,10 @@ impl Effect<LocalStorage> {
|
||||
/// true,
|
||||
/// ); // > "Number: 0; Prev: None"
|
||||
///
|
||||
/// # assert_eq!(num.get(), 0);
|
||||
/// set_num.set(1); // > "Number: 1; Prev: Some(0)"
|
||||
/// # });
|
||||
/// # assert_eq!(num.get(), 1);
|
||||
/// # }).await;
|
||||
/// # });
|
||||
/// ```
|
||||
pub fn watch<D, T>(
|
||||
@@ -318,7 +334,7 @@ impl Effect<LocalStorage> {
|
||||
}
|
||||
});
|
||||
|
||||
StoredValue::new_with_storage(Some(inner))
|
||||
ArenaItem::new_with_storage(Some(inner))
|
||||
});
|
||||
|
||||
Self { inner }
|
||||
@@ -342,7 +358,7 @@ impl Effect<SyncStorage> {
|
||||
let mut first_run = true;
|
||||
let value = Arc::new(RwLock::new(None::<T>));
|
||||
|
||||
Executor::spawn({
|
||||
crate::spawn({
|
||||
let value = Arc::clone(&value);
|
||||
let subscriber = inner.to_any_subscriber();
|
||||
|
||||
@@ -367,7 +383,7 @@ impl Effect<SyncStorage> {
|
||||
}
|
||||
});
|
||||
|
||||
StoredValue::new_with_storage(Some(inner))
|
||||
ArenaItem::new_with_storage(Some(inner))
|
||||
});
|
||||
|
||||
Self { inner }
|
||||
@@ -387,7 +403,7 @@ impl Effect<SyncStorage> {
|
||||
let mut first_run = true;
|
||||
let value = Arc::new(RwLock::new(None::<T>));
|
||||
|
||||
Executor::spawn({
|
||||
let task = {
|
||||
let value = Arc::clone(&value);
|
||||
let subscriber = inner.to_any_subscriber();
|
||||
|
||||
@@ -409,10 +425,12 @@ impl Effect<SyncStorage> {
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
crate::spawn(task);
|
||||
|
||||
Self {
|
||||
inner: Some(StoredValue::new_with_storage(Some(inner))),
|
||||
inner: Some(ArenaItem::new_with_storage(Some(inner))),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -435,7 +453,7 @@ impl Effect<SyncStorage> {
|
||||
let watch_value = Arc::new(RwLock::new(None::<T>));
|
||||
|
||||
let inner = cfg!(feature = "effects").then(|| {
|
||||
Executor::spawn({
|
||||
crate::spawn({
|
||||
let dep_value = Arc::clone(&dep_value);
|
||||
let watch_value = Arc::clone(&watch_value);
|
||||
let subscriber = inner.to_any_subscriber();
|
||||
@@ -480,7 +498,7 @@ impl Effect<SyncStorage> {
|
||||
}
|
||||
});
|
||||
|
||||
StoredValue::new_with_storage(Some(inner))
|
||||
ArenaItem::new_with_storage(Some(inner))
|
||||
});
|
||||
|
||||
Self { inner }
|
||||
|
||||
@@ -135,44 +135,50 @@ where
|
||||
{
|
||||
/// Creates a render effect that will run whether the `effects` feature is enabled or not.
|
||||
pub fn new_isomorphic(
|
||||
mut fun: impl FnMut(Option<T>) -> T + Send + 'static,
|
||||
fun: impl FnMut(Option<T>) -> T + Send + Sync + 'static,
|
||||
) -> Self {
|
||||
let (mut observer, mut rx) = channel();
|
||||
observer.notify();
|
||||
fn erased<T: Send + Sync + 'static>(
|
||||
mut fun: Box<dyn FnMut(Option<T>) -> T + Send + Sync + 'static>,
|
||||
) -> RenderEffect<T> {
|
||||
let (observer, mut rx) = channel();
|
||||
let value = Arc::new(RwLock::new(None::<T>));
|
||||
let owner = Owner::new();
|
||||
let inner = Arc::new(RwLock::new(EffectInner {
|
||||
dirty: false,
|
||||
observer,
|
||||
sources: SourceSet::new(),
|
||||
}));
|
||||
|
||||
let value = Arc::new(RwLock::new(None::<T>));
|
||||
let owner = Owner::new();
|
||||
let inner = Arc::new(RwLock::new(EffectInner {
|
||||
dirty: false,
|
||||
observer,
|
||||
sources: SourceSet::new(),
|
||||
}));
|
||||
let mut first_run = true;
|
||||
let initial_value = owner
|
||||
.with(|| inner.to_any_subscriber().with_observer(|| fun(None)));
|
||||
*value.write().or_poisoned() = Some(initial_value);
|
||||
|
||||
Executor::spawn({
|
||||
let value = Arc::clone(&value);
|
||||
let subscriber = inner.to_any_subscriber();
|
||||
crate::spawn({
|
||||
let value = Arc::clone(&value);
|
||||
let subscriber = inner.to_any_subscriber();
|
||||
|
||||
async move {
|
||||
while rx.next().await.is_some() {
|
||||
if first_run
|
||||
|| subscriber
|
||||
async move {
|
||||
while rx.next().await.is_some() {
|
||||
if subscriber
|
||||
.with_observer(|| subscriber.update_if_necessary())
|
||||
{
|
||||
first_run = false;
|
||||
subscriber.clear_sources(&subscriber);
|
||||
{
|
||||
subscriber.clear_sources(&subscriber);
|
||||
|
||||
let old_value =
|
||||
mem::take(&mut *value.write().or_poisoned());
|
||||
let new_value = owner.with_cleanup(|| {
|
||||
subscriber.with_observer(|| fun(old_value))
|
||||
});
|
||||
*value.write().or_poisoned() = Some(new_value);
|
||||
let old_value =
|
||||
mem::take(&mut *value.write().or_poisoned());
|
||||
let new_value = owner.with_cleanup(|| {
|
||||
subscriber.with_observer(|| fun(old_value))
|
||||
});
|
||||
*value.write().or_poisoned() = Some(new_value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
RenderEffect { value, inner }
|
||||
});
|
||||
|
||||
RenderEffect { value, inner }
|
||||
}
|
||||
|
||||
erased(Box::new(fun))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use super::{node::ReactiveNode, AnySubscriber};
|
||||
use crate::traits::DefinedAt;
|
||||
use crate::traits::{DefinedAt, IsDisposed};
|
||||
use core::{fmt::Debug, hash::Hash};
|
||||
use std::{panic::Location, sync::Weak};
|
||||
|
||||
/// Abstracts over the type of any reactive source.
|
||||
pub trait ToAnySource {
|
||||
pub trait ToAnySource: IsDisposed {
|
||||
/// Converts this type to its type-erased equivalent.
|
||||
fn to_any_source(&self) -> AnySource;
|
||||
}
|
||||
@@ -62,6 +62,13 @@ impl PartialEq for AnySource {
|
||||
|
||||
impl Eq for AnySource {}
|
||||
|
||||
impl IsDisposed for AnySource {
|
||||
#[inline(always)]
|
||||
fn is_disposed(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl ToAnySource for AnySource {
|
||||
fn to_any_source(&self) -> AnySource {
|
||||
self.clone()
|
||||
|
||||
@@ -1,9 +1,17 @@
|
||||
use super::{node::ReactiveNode, AnySource};
|
||||
#[cfg(debug_assertions)]
|
||||
use crate::diagnostics::SpecialNonReactiveZone;
|
||||
use core::{fmt::Debug, hash::Hash};
|
||||
use std::{cell::RefCell, mem, sync::Weak};
|
||||
|
||||
thread_local! {
|
||||
static OBSERVER: RefCell<Option<AnySubscriber>> = const { RefCell::new(None) };
|
||||
static OBSERVER: RefCell<Option<ObserverState>> = const { RefCell::new(None) };
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ObserverState {
|
||||
subscriber: AnySubscriber,
|
||||
untracked: bool,
|
||||
}
|
||||
|
||||
/// The current reactive observer.
|
||||
@@ -25,24 +33,67 @@ impl Drop for SetObserverOnDrop {
|
||||
impl Observer {
|
||||
/// Returns the current observer, if any.
|
||||
pub fn get() -> Option<AnySubscriber> {
|
||||
OBSERVER.with_borrow(Clone::clone)
|
||||
OBSERVER.with_borrow(|obs| {
|
||||
obs.as_ref().and_then(|obs| {
|
||||
if obs.untracked {
|
||||
None
|
||||
} else {
|
||||
Some(obs.subscriber.clone())
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn is(observer: &AnySubscriber) -> bool {
|
||||
OBSERVER.with_borrow(|o| o.as_ref() == Some(observer))
|
||||
OBSERVER.with_borrow(|o| {
|
||||
o.as_ref().map(|o| &o.subscriber) == Some(observer)
|
||||
})
|
||||
}
|
||||
|
||||
fn take() -> SetObserverOnDrop {
|
||||
SetObserverOnDrop(OBSERVER.with_borrow_mut(Option::take))
|
||||
SetObserverOnDrop(
|
||||
OBSERVER.with_borrow_mut(Option::take).map(|o| o.subscriber),
|
||||
)
|
||||
}
|
||||
|
||||
fn set(observer: Option<AnySubscriber>) {
|
||||
OBSERVER.with_borrow_mut(|o| *o = observer);
|
||||
OBSERVER.with_borrow_mut(|o| {
|
||||
*o = observer.map(|subscriber| ObserverState {
|
||||
subscriber,
|
||||
untracked: false,
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
fn replace(observer: Option<AnySubscriber>) -> SetObserverOnDrop {
|
||||
SetObserverOnDrop(
|
||||
OBSERVER.with(|o| mem::replace(&mut *o.borrow_mut(), observer)),
|
||||
OBSERVER
|
||||
.with(|o| {
|
||||
mem::replace(
|
||||
&mut *o.borrow_mut(),
|
||||
observer.map(|subscriber| ObserverState {
|
||||
subscriber,
|
||||
untracked: false,
|
||||
}),
|
||||
)
|
||||
})
|
||||
.map(|o| o.subscriber),
|
||||
)
|
||||
}
|
||||
|
||||
fn replace_untracked(observer: Option<AnySubscriber>) -> SetObserverOnDrop {
|
||||
SetObserverOnDrop(
|
||||
OBSERVER
|
||||
.with(|o| {
|
||||
mem::replace(
|
||||
&mut *o.borrow_mut(),
|
||||
observer.map(|subscriber| ObserverState {
|
||||
subscriber,
|
||||
untracked: true,
|
||||
}),
|
||||
)
|
||||
})
|
||||
.map(|o| o.subscriber),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -155,22 +206,38 @@ impl ReactiveNode for AnySubscriber {
|
||||
pub trait WithObserver {
|
||||
/// Runs the given function with this subscriber as the thread-local [`Observer`].
|
||||
fn with_observer<T>(&self, fun: impl FnOnce() -> T) -> T;
|
||||
|
||||
/// Runs the given function with this subscriber as the thread-local [`Observer`],
|
||||
/// but without tracking dependencies.
|
||||
fn with_observer_untracked<T>(&self, fun: impl FnOnce() -> T) -> T;
|
||||
}
|
||||
|
||||
impl WithObserver for AnySubscriber {
|
||||
/// Runs the given function with this subscriber as the thread-local [`Observer`].
|
||||
fn with_observer<T>(&self, fun: impl FnOnce() -> T) -> T {
|
||||
let _prev = Observer::replace(Some(self.clone()));
|
||||
fun()
|
||||
}
|
||||
|
||||
fn with_observer_untracked<T>(&self, fun: impl FnOnce() -> T) -> T {
|
||||
#[cfg(debug_assertions)]
|
||||
let _guard = SpecialNonReactiveZone::enter();
|
||||
let _prev = Observer::replace_untracked(Some(self.clone()));
|
||||
fun()
|
||||
}
|
||||
}
|
||||
|
||||
impl WithObserver for Option<AnySubscriber> {
|
||||
/// Runs the given function with this subscriber as the thread-local [`Observer`].
|
||||
fn with_observer<T>(&self, fun: impl FnOnce() -> T) -> T {
|
||||
let _prev = Observer::replace(self.clone());
|
||||
fun()
|
||||
}
|
||||
|
||||
fn with_observer_untracked<T>(&self, fun: impl FnOnce() -> T) -> T {
|
||||
#[cfg(debug_assertions)]
|
||||
let _guard = SpecialNonReactiveZone::enter();
|
||||
let _prev = Observer::replace_untracked(self.clone());
|
||||
fun()
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for AnySubscriber {
|
||||
|
||||
@@ -71,7 +71,7 @@
|
||||
#![cfg_attr(feature = "nightly", feature(fn_traits))]
|
||||
#![deny(missing_docs)]
|
||||
|
||||
use std::fmt::Arguments;
|
||||
use std::{fmt::Arguments, future::Future};
|
||||
|
||||
pub mod actions;
|
||||
pub(crate) mod channel;
|
||||
@@ -99,7 +99,8 @@ pub mod prelude {
|
||||
|
||||
// TODO remove this, it's just useful while developing
|
||||
#[allow(unused)]
|
||||
fn log_warning(text: Arguments) {
|
||||
#[doc(hidden)]
|
||||
pub fn log_warning(text: Arguments) {
|
||||
#[cfg(feature = "tracing")]
|
||||
{
|
||||
tracing::warn!(text);
|
||||
@@ -120,3 +121,12 @@ fn log_warning(text: Arguments) {
|
||||
eprintln!("{}", text);
|
||||
}
|
||||
}
|
||||
|
||||
/// Calls [`Executor::spawn`], but ensures that the task also runs in the current arena, if
|
||||
/// multithreaded arena sandboxing is enabled.
|
||||
pub(crate) fn spawn(task: impl Future<Output = ()> + Send + 'static) {
|
||||
#[cfg(feature = "sandboxed-arenas")]
|
||||
let task = owner::Sandboxed::new(task);
|
||||
|
||||
any_spawner::Executor::spawn(task);
|
||||
}
|
||||
|
||||
@@ -13,24 +13,25 @@ use std::{
|
||||
};
|
||||
|
||||
mod arena;
|
||||
mod arena_item;
|
||||
mod context;
|
||||
mod storage;
|
||||
mod stored_value;
|
||||
use self::arena::Arena;
|
||||
#[cfg(feature = "sandboxed-arenas")]
|
||||
pub use arena::sandboxed::Sandboxed;
|
||||
use arena::NodeId;
|
||||
pub use arena_item::*;
|
||||
pub use context::*;
|
||||
pub use storage::*;
|
||||
#[allow(deprecated)] // allow exporting deprecated fn
|
||||
pub use stored_value::{
|
||||
store_value, FromLocal, LocalStorage, Storage, StorageAccess, StoredValue,
|
||||
SyncStorage,
|
||||
};
|
||||
pub use stored_value::{store_value, FromLocal, StoredValue};
|
||||
|
||||
/// A reactive owner, which manages
|
||||
/// 1) the cancelation of [`Effect`](crate::effect::Effect)s,
|
||||
/// 2) providing and accessing environment data via [`provide_context`] and [`use_context`],
|
||||
/// 3) running cleanup functions defined via [`Owner::on_cleanup`], and
|
||||
/// 4) an arena storage system to provide `Copy` handles via [`StoredValue`], which is what allows
|
||||
/// 4) an arena storage system to provide `Copy` handles via [`ArenaItem`], which is what allows
|
||||
/// types like [`RwSignal`](crate::signal::RwSignal), [`Memo`](crate::computed::Memo), and so on to be `Copy`.
|
||||
///
|
||||
/// Every effect and computed reactive value has an associated `Owner`. While it is running, this
|
||||
@@ -55,6 +56,12 @@ pub struct Owner {
|
||||
pub(crate) shared_context: Option<Arc<dyn SharedContext + Send + Sync>>,
|
||||
}
|
||||
|
||||
impl PartialEq for Owner {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
Arc::ptr_eq(&self.inner, &other.inner)
|
||||
}
|
||||
}
|
||||
|
||||
thread_local! {
|
||||
static OWNER: RefCell<Option<Owner>> = Default::default();
|
||||
}
|
||||
@@ -203,7 +210,7 @@ impl Owner {
|
||||
/// Cleans up this owner in the following order:
|
||||
/// 1) Runs `cleanup` on all children,
|
||||
/// 2) Runs all cleanup functions registered with [`Owner::on_cleanup`],
|
||||
/// 3) Drops the values of any arena-allocated [`StoredValue`]s.
|
||||
/// 3) Drops the values of any arena-allocated [`ArenaItem`]s.
|
||||
pub fn cleanup(&self) {
|
||||
self.inner.cleanup();
|
||||
}
|
||||
@@ -241,6 +248,15 @@ impl Owner {
|
||||
self.shared_context.clone()
|
||||
}
|
||||
|
||||
/// Removes this from its state as the thread-local owner and drops it.
|
||||
pub fn unset(self) {
|
||||
OWNER.with_borrow_mut(|owner| {
|
||||
if owner.as_ref() == Some(&self) {
|
||||
mem::take(owner);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the current [`SharedContext`], if any.
|
||||
#[cfg(feature = "hydration")]
|
||||
pub fn current_shared_context(
|
||||
|
||||
@@ -124,7 +124,7 @@ pub mod sandboxed {
|
||||
}
|
||||
|
||||
impl<T> Sandboxed<T> {
|
||||
/// Wraps the given [`Future`], ensuring that any [`StoredValue`] created while it is being
|
||||
/// Wraps the given [`Future`], ensuring that any [`ArenaItem`] created while it is being
|
||||
/// polled will be associated with the same arena that was active when this was called.
|
||||
pub fn new(inner: T) -> Self {
|
||||
let arena = MAP.with_borrow(|current| {
|
||||
|
||||
136
reactive_graph/src/owner/arena_item.rs
Normal file
136
reactive_graph/src/owner/arena_item.rs
Normal file
@@ -0,0 +1,136 @@
|
||||
use super::{
|
||||
arena::{Arena, NodeId},
|
||||
LocalStorage, Storage, SyncStorage, OWNER,
|
||||
};
|
||||
use crate::traits::{Dispose, IsDisposed};
|
||||
use send_wrapper::SendWrapper;
|
||||
use std::{any::Any, hash::Hash, marker::PhantomData};
|
||||
|
||||
/// A copyable, stable reference for any value, stored on the arena whose ownership is managed by the
|
||||
/// reactive ownership tree.
|
||||
#[derive(Debug)]
|
||||
pub struct ArenaItem<T, S = SyncStorage> {
|
||||
node: NodeId,
|
||||
#[allow(clippy::type_complexity)]
|
||||
ty: PhantomData<fn() -> (SendWrapper<T>, S)>,
|
||||
}
|
||||
|
||||
impl<T, S> Copy for ArenaItem<T, S> {}
|
||||
|
||||
impl<T, S> Clone for ArenaItem<T, S> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> PartialEq for ArenaItem<T, S> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.node == other.node
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> Eq for ArenaItem<T, S> {}
|
||||
|
||||
impl<T, S> Hash for ArenaItem<T, S> {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.node.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> ArenaItem<T, S>
|
||||
where
|
||||
T: 'static,
|
||||
S: Storage<T>,
|
||||
{
|
||||
/// Stores the given value in the arena allocator.
|
||||
#[track_caller]
|
||||
pub fn new_with_storage(value: T) -> Self {
|
||||
let node = {
|
||||
Arena::with_mut(|arena| {
|
||||
arena.insert(
|
||||
Box::new(S::wrap(value)) as Box<dyn Any + Send + Sync>
|
||||
)
|
||||
})
|
||||
};
|
||||
OWNER.with(|o| {
|
||||
if let Some(owner) = &*o.borrow() {
|
||||
owner.register(node);
|
||||
}
|
||||
});
|
||||
|
||||
Self {
|
||||
node,
|
||||
ty: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> Default for ArenaItem<T, S>
|
||||
where
|
||||
T: Default + 'static,
|
||||
S: Storage<T>,
|
||||
{
|
||||
#[track_caller] // Default trait is not annotated with #[track_caller]
|
||||
fn default() -> Self {
|
||||
Self::new_with_storage(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ArenaItem<T>
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
{
|
||||
/// Stores the given value in the arena allocator.
|
||||
#[track_caller]
|
||||
pub fn new(value: T) -> Self {
|
||||
ArenaItem::new_with_storage(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ArenaItem<T, LocalStorage>
|
||||
where
|
||||
T: 'static,
|
||||
{
|
||||
/// Stores the given value in the arena allocator.
|
||||
#[track_caller]
|
||||
pub fn new_local(value: T) -> Self {
|
||||
ArenaItem::new_with_storage(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S: Storage<T>> ArenaItem<T, S> {
|
||||
/// Applies a function to a reference to the stored value and returns the result, or `None` if it has already been disposed.
|
||||
#[track_caller]
|
||||
pub fn try_with_value<U>(&self, fun: impl FnOnce(&T) -> U) -> Option<U> {
|
||||
S::try_with(self.node, fun)
|
||||
}
|
||||
|
||||
/// Applies a function to a mutable reference to the stored value and returns the result, or `None` if it has already been disposed.
|
||||
#[track_caller]
|
||||
pub fn try_update_value<U>(
|
||||
&self,
|
||||
fun: impl FnOnce(&mut T) -> U,
|
||||
) -> Option<U> {
|
||||
S::try_with_mut(self.node, fun)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Clone, S: Storage<T>> ArenaItem<T, S> {
|
||||
/// Returns a clone of the stored value, or `None` if it has already been disposed.
|
||||
#[track_caller]
|
||||
pub fn try_get_value(&self) -> Option<T> {
|
||||
S::try_with(self.node, Clone::clone)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> IsDisposed for ArenaItem<T, S> {
|
||||
fn is_disposed(&self) -> bool {
|
||||
Arena::with(|arena| !arena.contains_key(self.node))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S> Dispose for ArenaItem<T, S> {
|
||||
fn dispose(self) {
|
||||
Arena::with_mut(|arena| arena.remove(self.node));
|
||||
}
|
||||
}
|
||||
151
reactive_graph/src/owner/storage.rs
Normal file
151
reactive_graph/src/owner/storage.rs
Normal file
@@ -0,0 +1,151 @@
|
||||
use super::arena::{Arena, NodeId};
|
||||
use send_wrapper::SendWrapper;
|
||||
|
||||
/// A trait for borrowing and taking data.
|
||||
pub trait StorageAccess<T> {
|
||||
/// Borrows the value.
|
||||
fn as_borrowed(&self) -> &T;
|
||||
|
||||
/// Takes the value.
|
||||
fn into_taken(self) -> T;
|
||||
}
|
||||
|
||||
impl<T> StorageAccess<T> for T {
|
||||
fn as_borrowed(&self) -> &T {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_taken(self) -> T {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> StorageAccess<T> for SendWrapper<T> {
|
||||
fn as_borrowed(&self) -> &T {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_taken(self) -> T {
|
||||
self.take()
|
||||
}
|
||||
}
|
||||
|
||||
/// A way of storing a [`ArenaItem`], either as itself or with a wrapper to make it threadsafe.
|
||||
///
|
||||
/// This exists because all items stored in the arena must be `Send + Sync`, but in single-threaded
|
||||
/// environments you might want or need to use thread-unsafe types.
|
||||
pub trait Storage<T>: Send + Sync + 'static {
|
||||
/// The type being stored, once it has been wrapped.
|
||||
type Wrapped: StorageAccess<T> + Send + Sync + 'static;
|
||||
|
||||
/// Adds any needed wrapper to the type.
|
||||
fn wrap(value: T) -> Self::Wrapped;
|
||||
|
||||
/// Applies the given function to the stored value, if it exists and can be accessed from this
|
||||
/// thread.
|
||||
fn try_with<U>(node: NodeId, fun: impl FnOnce(&T) -> U) -> Option<U>;
|
||||
|
||||
/// Applies the given function to a mutable reference to the stored value, if it exists and can be accessed from this
|
||||
/// thread.
|
||||
fn try_with_mut<U>(
|
||||
node: NodeId,
|
||||
fun: impl FnOnce(&mut T) -> U,
|
||||
) -> Option<U>;
|
||||
|
||||
/// Sets a new value for the stored value. If it has been disposed, returns `Some(T)`.
|
||||
fn try_set(node: NodeId, value: T) -> Option<T>;
|
||||
}
|
||||
|
||||
/// A form of [`Storage`] that stores the type as itself, with no wrapper.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct SyncStorage;
|
||||
|
||||
impl<T> Storage<T> for SyncStorage
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
{
|
||||
type Wrapped = T;
|
||||
|
||||
#[inline(always)]
|
||||
fn wrap(value: T) -> Self::Wrapped {
|
||||
value
|
||||
}
|
||||
|
||||
fn try_with<U>(node: NodeId, fun: impl FnOnce(&T) -> U) -> Option<U> {
|
||||
Arena::with(|arena| {
|
||||
let m = arena.get(node);
|
||||
m.and_then(|n| n.downcast_ref::<T>()).map(fun)
|
||||
})
|
||||
}
|
||||
|
||||
fn try_with_mut<U>(
|
||||
node: NodeId,
|
||||
fun: impl FnOnce(&mut T) -> U,
|
||||
) -> Option<U> {
|
||||
Arena::with_mut(|arena| {
|
||||
let m = arena.get_mut(node);
|
||||
m.and_then(|n| n.downcast_mut::<T>()).map(fun)
|
||||
})
|
||||
}
|
||||
|
||||
fn try_set(node: NodeId, value: T) -> Option<T> {
|
||||
Arena::with_mut(|arena| {
|
||||
let m = arena.get_mut(node);
|
||||
match m.and_then(|n| n.downcast_mut::<T>()) {
|
||||
Some(inner) => {
|
||||
*inner = value;
|
||||
None
|
||||
}
|
||||
None => Some(value),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A form of [`Storage`] that stores the type with a wrapper that makes it `Send + Sync`, but only
|
||||
/// allows it to be accessed from the thread on which it was created.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct LocalStorage;
|
||||
|
||||
impl<T> Storage<T> for LocalStorage
|
||||
where
|
||||
T: 'static,
|
||||
{
|
||||
type Wrapped = SendWrapper<T>;
|
||||
|
||||
fn wrap(value: T) -> Self::Wrapped {
|
||||
SendWrapper::new(value)
|
||||
}
|
||||
|
||||
fn try_with<U>(node: NodeId, fun: impl FnOnce(&T) -> U) -> Option<U> {
|
||||
Arena::with(|arena| {
|
||||
let m = arena.get(node);
|
||||
m.and_then(|n| n.downcast_ref::<SendWrapper<T>>())
|
||||
.map(|inner| fun(inner))
|
||||
})
|
||||
}
|
||||
|
||||
fn try_with_mut<U>(
|
||||
node: NodeId,
|
||||
fun: impl FnOnce(&mut T) -> U,
|
||||
) -> Option<U> {
|
||||
Arena::with_mut(|arena| {
|
||||
let m = arena.get_mut(node);
|
||||
m.and_then(|n| n.downcast_mut::<SendWrapper<T>>())
|
||||
.map(|inner| fun(&mut *inner))
|
||||
})
|
||||
}
|
||||
|
||||
fn try_set(node: NodeId, value: T) -> Option<T> {
|
||||
Arena::with_mut(|arena| {
|
||||
let m = arena.get_mut(node);
|
||||
match m.and_then(|n| n.downcast_mut::<SendWrapper<T>>()) {
|
||||
Some(inner) => {
|
||||
*inner = SendWrapper::new(value);
|
||||
None
|
||||
}
|
||||
None => Some(value),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,162 +1,14 @@
|
||||
use super::{
|
||||
arena::{Arena, NodeId},
|
||||
OWNER,
|
||||
};
|
||||
use super::{ArenaItem, LocalStorage, Storage, SyncStorage};
|
||||
use crate::{
|
||||
traits::{DefinedAt, Dispose, IsDisposed},
|
||||
unwrap_signal,
|
||||
};
|
||||
use send_wrapper::SendWrapper;
|
||||
use std::{any::Any, hash::Hash, marker::PhantomData, panic::Location};
|
||||
|
||||
/// A trait for borrowing and taking data.
|
||||
pub trait StorageAccess<T> {
|
||||
/// Borrows the value.
|
||||
fn as_borrowed(&self) -> &T;
|
||||
|
||||
/// Takes the value.
|
||||
fn into_taken(self) -> T;
|
||||
}
|
||||
|
||||
impl<T> StorageAccess<T> for T {
|
||||
fn as_borrowed(&self) -> &T {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_taken(self) -> T {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> StorageAccess<T> for SendWrapper<T> {
|
||||
fn as_borrowed(&self) -> &T {
|
||||
self
|
||||
}
|
||||
|
||||
fn into_taken(self) -> T {
|
||||
self.take()
|
||||
}
|
||||
}
|
||||
|
||||
/// A way of storing a [`StoredValue`], either as itself or with a wrapper to make it threadsafe.
|
||||
///
|
||||
/// This exists because all items stored in the arena must be `Send + Sync`, but in single-threaded
|
||||
/// environments you might want or need to use thread-unsafe types.
|
||||
pub trait Storage<T>: Send + Sync + 'static {
|
||||
/// The type being stored, once it has been wrapped.
|
||||
type Wrapped: StorageAccess<T> + Send + Sync + 'static;
|
||||
|
||||
/// Adds any needed wrapper to the type.
|
||||
fn wrap(value: T) -> Self::Wrapped;
|
||||
|
||||
/// Applies the given function to the stored value, if it exists and can be accessed from this
|
||||
/// thread.
|
||||
fn try_with<U>(node: NodeId, fun: impl FnOnce(&T) -> U) -> Option<U>;
|
||||
|
||||
/// Applies the given function to a mutable reference to the stored value, if it exists and can be accessed from this
|
||||
/// thread.
|
||||
fn try_with_mut<U>(
|
||||
node: NodeId,
|
||||
fun: impl FnOnce(&mut T) -> U,
|
||||
) -> Option<U>;
|
||||
|
||||
/// Sets a new value for the stored value. If it has been disposed, returns `Some(T)`.
|
||||
fn try_set(node: NodeId, value: T) -> Option<T>;
|
||||
}
|
||||
|
||||
/// A form of [`Storage`] that stores the type as itself, with no wrapper.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct SyncStorage;
|
||||
|
||||
impl<T> Storage<T> for SyncStorage
|
||||
where
|
||||
T: Send + Sync + 'static,
|
||||
{
|
||||
type Wrapped = T;
|
||||
|
||||
#[inline(always)]
|
||||
fn wrap(value: T) -> Self::Wrapped {
|
||||
value
|
||||
}
|
||||
|
||||
fn try_with<U>(node: NodeId, fun: impl FnOnce(&T) -> U) -> Option<U> {
|
||||
Arena::with(|arena| {
|
||||
let m = arena.get(node);
|
||||
m.and_then(|n| n.downcast_ref::<T>()).map(fun)
|
||||
})
|
||||
}
|
||||
|
||||
fn try_with_mut<U>(
|
||||
node: NodeId,
|
||||
fun: impl FnOnce(&mut T) -> U,
|
||||
) -> Option<U> {
|
||||
Arena::with_mut(|arena| {
|
||||
let m = arena.get_mut(node);
|
||||
m.and_then(|n| n.downcast_mut::<T>()).map(fun)
|
||||
})
|
||||
}
|
||||
|
||||
fn try_set(node: NodeId, value: T) -> Option<T> {
|
||||
Arena::with_mut(|arena| {
|
||||
let m = arena.get_mut(node);
|
||||
match m.and_then(|n| n.downcast_mut::<T>()) {
|
||||
Some(inner) => {
|
||||
*inner = value;
|
||||
None
|
||||
}
|
||||
None => Some(value),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A form of [`Storage`] that stores the type with a wrapper that makes it `Send + Sync`, but only
|
||||
/// allows it to be accessed from the thread on which it was created.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct LocalStorage;
|
||||
|
||||
impl<T> Storage<T> for LocalStorage
|
||||
where
|
||||
T: 'static,
|
||||
{
|
||||
type Wrapped = SendWrapper<T>;
|
||||
|
||||
fn wrap(value: T) -> Self::Wrapped {
|
||||
SendWrapper::new(value)
|
||||
}
|
||||
|
||||
fn try_with<U>(node: NodeId, fun: impl FnOnce(&T) -> U) -> Option<U> {
|
||||
Arena::with(|arena| {
|
||||
let m = arena.get(node);
|
||||
m.and_then(|n| n.downcast_ref::<SendWrapper<T>>())
|
||||
.map(|inner| fun(inner))
|
||||
})
|
||||
}
|
||||
|
||||
fn try_with_mut<U>(
|
||||
node: NodeId,
|
||||
fun: impl FnOnce(&mut T) -> U,
|
||||
) -> Option<U> {
|
||||
Arena::with_mut(|arena| {
|
||||
let m = arena.get_mut(node);
|
||||
m.and_then(|n| n.downcast_mut::<SendWrapper<T>>())
|
||||
.map(|inner| fun(&mut *inner))
|
||||
})
|
||||
}
|
||||
|
||||
fn try_set(node: NodeId, value: T) -> Option<T> {
|
||||
Arena::with_mut(|arena| {
|
||||
let m = arena.get_mut(node);
|
||||
match m.and_then(|n| n.downcast_mut::<SendWrapper<T>>()) {
|
||||
Some(inner) => {
|
||||
*inner = SendWrapper::new(value);
|
||||
None
|
||||
}
|
||||
None => Some(value),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
use or_poisoned::OrPoisoned;
|
||||
use std::{
|
||||
hash::Hash,
|
||||
panic::Location,
|
||||
sync::{Arc, RwLock},
|
||||
};
|
||||
|
||||
/// A **non-reactive**, `Copy` handle for any value.
|
||||
///
|
||||
@@ -167,8 +19,7 @@ where
|
||||
/// updating it does not notify anything else.
|
||||
#[derive(Debug)]
|
||||
pub struct StoredValue<T, S = SyncStorage> {
|
||||
node: NodeId,
|
||||
ty: PhantomData<(SendWrapper<T>, S)>,
|
||||
value: ArenaItem<Arc<RwLock<T>>, S>,
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: &'static Location<'static>,
|
||||
}
|
||||
@@ -183,7 +34,7 @@ impl<T, S> Clone for StoredValue<T, S> {
|
||||
|
||||
impl<T, S> PartialEq for StoredValue<T, S> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.node == other.node
|
||||
self.value == other.value
|
||||
}
|
||||
}
|
||||
|
||||
@@ -191,7 +42,7 @@ impl<T, S> Eq for StoredValue<T, S> {}
|
||||
|
||||
impl<T, S> Hash for StoredValue<T, S> {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.node.hash(state);
|
||||
self.value.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -211,27 +62,13 @@ impl<T, S> DefinedAt for StoredValue<T, S> {
|
||||
impl<T, S> StoredValue<T, S>
|
||||
where
|
||||
T: 'static,
|
||||
S: Storage<T>,
|
||||
S: Storage<Arc<RwLock<T>>>,
|
||||
{
|
||||
/// Stores the given value in the arena allocator.
|
||||
#[track_caller]
|
||||
pub fn new_with_storage(value: T) -> Self {
|
||||
let node = {
|
||||
Arena::with_mut(|arena| {
|
||||
arena.insert(
|
||||
Box::new(S::wrap(value)) as Box<dyn Any + Send + Sync>
|
||||
)
|
||||
})
|
||||
};
|
||||
OWNER.with(|o| {
|
||||
if let Some(owner) = &*o.borrow() {
|
||||
owner.register(node);
|
||||
}
|
||||
});
|
||||
|
||||
Self {
|
||||
node,
|
||||
ty: PhantomData,
|
||||
value: ArenaItem::new_with_storage(Arc::new(RwLock::new(value))),
|
||||
#[cfg(debug_assertions)]
|
||||
defined_at: Location::caller(),
|
||||
}
|
||||
@@ -241,7 +78,7 @@ where
|
||||
impl<T, S> Default for StoredValue<T, S>
|
||||
where
|
||||
T: Default + 'static,
|
||||
S: Storage<T>,
|
||||
S: Storage<Arc<RwLock<T>>>,
|
||||
{
|
||||
#[track_caller] // Default trait is not annotated with #[track_caller]
|
||||
fn default() -> Self {
|
||||
@@ -271,7 +108,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S: Storage<T>> StoredValue<T, S> {
|
||||
impl<T, S: Storage<Arc<RwLock<T>>>> StoredValue<T, S> {
|
||||
/// Returns an [`Option`] of applying a function to the value within the [`StoredValue`].
|
||||
///
|
||||
/// If the owner of the reactive node has not been disposed [`Some`] is returned. Calling this
|
||||
@@ -313,8 +150,11 @@ impl<T, S: Storage<T>> StoredValue<T, S> {
|
||||
/// assert_eq!(last, None);
|
||||
/// assert_eq!(length_fn(), None);
|
||||
/// ```
|
||||
#[track_caller]
|
||||
pub fn try_with_value<U>(&self, fun: impl FnOnce(&T) -> U) -> Option<U> {
|
||||
S::try_with(self.node, fun)
|
||||
self.value
|
||||
.try_get_value()
|
||||
.map(|inner| fun(&*inner.read().or_poisoned()))
|
||||
}
|
||||
|
||||
/// Returns the output of applying a function to the value within the [`StoredValue`].
|
||||
@@ -347,6 +187,7 @@ impl<T, S: Storage<T>> StoredValue<T, S> {
|
||||
/// assert_eq!(sum, 6);
|
||||
/// assert_eq!(length_fn(), 3);
|
||||
/// ```
|
||||
#[track_caller]
|
||||
pub fn with_value<U>(&self, fun: impl FnOnce(&T) -> U) -> U {
|
||||
self.try_with_value(fun)
|
||||
.unwrap_or_else(unwrap_signal!(self))
|
||||
@@ -358,7 +199,9 @@ impl<T, S: Storage<T>> StoredValue<T, S> {
|
||||
&self,
|
||||
fun: impl FnOnce(&mut T) -> U,
|
||||
) -> Option<U> {
|
||||
S::try_with_mut(self.node, fun)
|
||||
self.value
|
||||
.try_get_value()
|
||||
.map(|inner| fun(&mut *inner.write().or_poisoned()))
|
||||
}
|
||||
|
||||
/// Updates the value within [`StoredValue`] by applying a function to it.
|
||||
@@ -451,7 +294,13 @@ impl<T, S: Storage<T>> StoredValue<T, S> {
|
||||
/// assert_eq!(reset().as_deref(), Some(""));
|
||||
/// ```
|
||||
pub fn try_set_value(&self, value: T) -> Option<T> {
|
||||
S::try_set(self.node, value)
|
||||
match self.value.try_get_value() {
|
||||
Some(inner) => {
|
||||
*inner.write().or_poisoned() = value;
|
||||
None
|
||||
}
|
||||
None => Some(value),
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets the value within [`StoredValue`].
|
||||
@@ -488,11 +337,11 @@ impl<T, S: Storage<T>> StoredValue<T, S> {
|
||||
|
||||
impl<T, S> IsDisposed for StoredValue<T, S> {
|
||||
fn is_disposed(&self) -> bool {
|
||||
Arena::with(|arena| !arena.contains_key(self.node))
|
||||
self.value.is_disposed()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, S: Storage<T>> StoredValue<T, S>
|
||||
impl<T, S: Storage<Arc<RwLock<T>>>> StoredValue<T, S>
|
||||
where
|
||||
T: Clone + 'static,
|
||||
{
|
||||
@@ -572,7 +421,7 @@ where
|
||||
|
||||
impl<T, S> Dispose for StoredValue<T, S> {
|
||||
fn dispose(self) {
|
||||
Arena::with_mut(|arena| arena.remove(self.node));
|
||||
self.value.dispose();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -580,7 +429,7 @@ impl<T, S> Dispose for StoredValue<T, S> {
|
||||
#[inline(always)]
|
||||
#[track_caller]
|
||||
#[deprecated(
|
||||
since = "0.7.0-beta4",
|
||||
since = "0.7.0-beta5",
|
||||
note = "This function is being removed to conform to Rust idioms. Please \
|
||||
use `StoredValue::new()` or `StoredValue::new_local()` instead."
|
||||
)]
|
||||
|
||||
@@ -9,6 +9,7 @@ pub mod guards;
|
||||
mod read;
|
||||
mod rw;
|
||||
mod subscriber_traits;
|
||||
mod trigger;
|
||||
mod write;
|
||||
|
||||
use crate::owner::LocalStorage;
|
||||
@@ -18,6 +19,7 @@ pub use arc_trigger::*;
|
||||
pub use arc_write::*;
|
||||
pub use read::*;
|
||||
pub use rw::*;
|
||||
pub use trigger::*;
|
||||
pub use write::*;
|
||||
|
||||
/// Creates a reference-counted signal.
|
||||
|
||||
@@ -5,7 +5,7 @@ use super::{
|
||||
};
|
||||
use crate::{
|
||||
graph::{ReactiveNode, SubscriberSet},
|
||||
prelude::{IsDisposed, Trigger},
|
||||
prelude::{IsDisposed, Notify},
|
||||
traits::{DefinedAt, ReadUntracked, UntrackableGuard, Writeable},
|
||||
};
|
||||
use core::fmt::{Debug, Formatter, Result};
|
||||
@@ -56,7 +56,7 @@ use std::{
|
||||
/// > Each of these has a related `_untracked()` method, which updates the signal
|
||||
/// > without notifying subscribers. Untracked updates are not desirable in most
|
||||
/// > cases, as they cause “tearing” between the signal’s value and its observed
|
||||
/// > value. If you want a non-reactive container, used [`StoredValue`](crate::owner::StoredValue)
|
||||
/// > value. If you want a non-reactive container, used [`ArenaItem`](crate::owner::ArenaItem)
|
||||
/// > instead.
|
||||
///
|
||||
/// ## Examples
|
||||
@@ -247,8 +247,8 @@ impl<T: 'static> ReadUntracked for ArcRwSignal<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Trigger for ArcRwSignal<T> {
|
||||
fn trigger(&self) {
|
||||
impl<T> Notify for ArcRwSignal<T> {
|
||||
fn notify(&self) {
|
||||
self.mark_dirty();
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user