Compare commits

..

1 Commits

Author SHA1 Message Date
Greg Johnston
2cc6ccaacb feat: custom_view macro to allow implementing view types on arbitrary data 2024-08-04 21:05:04 -04:00
794 changed files with 19904 additions and 74629 deletions

View File

@@ -29,15 +29,11 @@ Steps to reproduce the behavior:
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Next Steps**
- [ ] I will make a PR
- [ ] I would like to make a PR, but need help getting started
- [ ] I want someone else to take the time to fix this
- [ ] This is a low priority for me and is just shared for your information
**Additional context**
Add any other context about the problem here.

View File

@@ -1,19 +0,0 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
# Grouping all dependencies in one PR weekly
- package-ecosystem: cargo
directory: "/"
schedule:
interval: weekly
day: monday
open-pull-requests-limit: 1
allow:
- dependency-type: "all"
groups:
rust-dependencies:
patterns:
- "*"

View File

@@ -1,39 +0,0 @@
name: autofix.ci
on:
pull_request:
# Running this workflow on main branch pushes requires write permission to apply changes.
# Leave it alone for future uses.
# push:
# branches: ["main"]
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
CARGO_TERM_COLOR: always
RUST_BACKTRACE: 1
DEBIAN_FRONTEND: noninteractive
jobs:
autofix:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions-rust-lang/setup-rust-toolchain@v1
with: {toolchain: "nightly-2025-07-16", components: "rustfmt, clippy", target: "wasm32-unknown-unknown", rustflags: ""}
- name: Install Glib
run: |
sudo apt-get update
sudo apt-get install -y libglib2.0-dev
- name: Install cargo-all-features
run: cargo install --git https://github.com/sabify/cargo-all-features --branch arbitrary-command-support
- name: Install jq
run: sudo apt-get install jq
- name: Format the workspace
run: cargo fmt --all
- name: Clippy the workspace
run: cargo all-features clippy --allow-dirty --fix --lib --no-deps
- uses: autofix-ci/action@v1.3.2
if: ${{ always() }}
with:
fail-fast: false

View File

@@ -0,0 +1,32 @@
name: CI Changed Examples
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
get-example-changed:
uses: ./.github/workflows/get-example-changed.yml
get-matrix:
needs: [get-example-changed]
uses: ./.github/workflows/get-changed-examples-matrix.yml
with:
example_changed: ${{ fromJSON(needs.get-example-changed.outputs.example_changed) }}
test:
name: CI
needs: [get-example-changed, get-matrix]
if: needs.get-example-changed.outputs.example_changed == 'true'
strategy:
matrix: ${{ fromJSON(needs.get-matrix.outputs.matrix) }}
fail-fast: false
uses: ./.github/workflows/run-cargo-make-task.yml
with:
directory: ${{ matrix.directory }}
cargo_make_task: "ci"
toolchain: stable

27
.github/workflows/ci-examples.yml vendored Normal file
View File

@@ -0,0 +1,27 @@
name: CI Examples
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
get-leptos-changed:
uses: ./.github/workflows/get-leptos-changed.yml
get-examples-matrix:
uses: ./.github/workflows/get-examples-matrix.yml
test:
name: CI
needs: [get-leptos-changed, get-examples-matrix]
if: needs.get-leptos-changed.outputs.leptos_changed == 'true'
strategy:
matrix: ${{ fromJSON(needs.get-examples-matrix.outputs.matrix) }}
fail-fast: false
uses: ./.github/workflows/run-cargo-make-task.yml
with:
directory: ${{ matrix.directory }}
cargo_make_task: "ci"
toolchain: stable

28
.github/workflows/ci-semver.yml vendored Normal file
View File

@@ -0,0 +1,28 @@
name: CI semver
on:
push:
branches:
- main
pull_request:
branches:
- main
jobs:
get-leptos-changed:
uses: ./.github/workflows/get-leptos-changed.yml
test:
needs: [get-leptos-changed]
if: needs.get-leptos-changed.outputs.leptos_changed == 'true' && github.event.pull_request.labels[0].name != 'breaking'
name: Run semver check (nightly-2024-08-01)
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Semver Checks
uses: obi1kenobi/cargo-semver-checks-action@v2
with:
rust-toolchain: nightly-2024-08-01

View File

@@ -1,68 +1,52 @@
name: CI
on:
push:
branches:
- main
- leptos_0.6
- leptos_0.8
pull_request:
branches:
- main
- leptos_0.6
- leptos_0.8
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
get-leptos-changed:
uses: ./.github/workflows/get-leptos-changed.yml
get-leptos-matrix:
uses: ./.github/workflows/get-leptos-matrix.yml
get-example-changed:
uses: ./.github/workflows/get-example-changed.yml
get-examples-matrix:
uses: ./.github/workflows/get-examples-matrix.yml
test-members:
name: CI (members)
needs: [get-leptos-changed, get-leptos-matrix]
test:
name: CI
needs: [get-leptos-changed]
if: needs.get-leptos-changed.outputs.leptos_changed == 'true'
strategy:
matrix: ${{ fromJSON(needs.get-leptos-matrix.outputs.matrix) }}
fail-fast: false
matrix:
directory:
[
any_error,
any_spawner,
const_str_slice_concat,
either_of,
hydration_context,
integrations/actix,
integrations/axum,
integrations/utils,
leptos,
leptos_config,
leptos_dom,
leptos_hot_reload,
leptos_macro,
leptos_server,
meta,
next_tuple,
oco,
or_poisoned,
reactive_graph,
router,
router_macro,
server_fn,
server_fn/server_fn_macro_default,
server_fn_macro,
]
uses: ./.github/workflows/run-cargo-make-task.yml
with:
directory: ${{ matrix.directory }}
test-examples:
name: CI (examples)
needs: [test-members, get-examples-matrix]
if: ${{ success() }}
strategy:
matrix: ${{ fromJSON(needs.get-examples-matrix.outputs.matrix) }}
fail-fast: false
uses: ./.github/workflows/run-cargo-make-task.yml
with:
directory: ${{ matrix.directory }}
test-only-examples:
name: CI (examples)
needs: [get-leptos-changed, get-example-changed]
if: needs.get-leptos-changed.outputs.leptos_changed != 'true' && needs.get-example-changed.outputs.example_changed == 'true'
strategy:
matrix: ${{ fromJSON(needs.get-example-changed.outputs.matrix) }}
fail-fast: false
uses: ./.github/workflows/run-cargo-make-task.yml
with:
directory: ${{ matrix.directory }}
semver-check:
name: SemVer check (stable)
needs: [get-leptos-changed, test-members, test-examples]
if: ${{ success() && needs.get-leptos-changed.outputs.leptos_changed == 'true' && !contains(github.event.pull_request.labels.*.name, 'breaking') }}
runs-on: ubuntu-latest
steps:
- name: Install Glib
run: |
sudo apt-get update
sudo apt-get install -y libglib2.0-dev
- name: Checkout
uses: actions/checkout@v4
- name: Semver Checks
uses: obi1kenobi/cargo-semver-checks-action@v2
cargo_make_task: "ci"
toolchain: nightly-2024-08-01

View File

@@ -0,0 +1,54 @@
name: Changed Examples Matrix Call
on:
workflow_call:
inputs:
example_changed:
description: "Example Changed"
required: true
type: boolean
outputs:
matrix:
description: "Matrix"
value: ${{ jobs.get-example-changed.outputs.matrix }}
jobs:
get-example-changed:
name: Get Changed Example Matrix
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get example project directories that changed
id: changed-dirs
uses: tj-actions/changed-files@v41
with:
dir_names: true
dir_names_max_depth: "2"
files: |
examples/**
!examples/cargo-make/**
!examples/gtk/**
!examples/Makefile.toml
!examples/*.md
json: true
quotepath: false
- name: List example project directories that changed
run: echo '${{ steps.changed-dirs.outputs.all_changed_files }}'
- name: Set Matrix
id: set-matrix
run: |
if [ ${{ inputs.example_changed }} == 'true' ]; then
# Create matrix with changed directories
echo "matrix={\"directory\":${{ steps.changed-dirs.outputs.all_changed_files }}}" >> "$GITHUB_OUTPUT"
else
# Create matrix with one item to prevent an empty vector error
echo "matrix={\"directory\":[\"NO_CHANGE\"]}" >> "$GITHUB_OUTPUT"
fi

View File

@@ -1,43 +1,39 @@
name: Examples Changed Call
on:
workflow_call:
outputs:
example_changed:
description: "Example Changed"
value: ${{ jobs.get-example-changed.outputs.example_changed }}
# This is for test-only-examples workflow in ci.yml
matrix:
description: "Example Changed Directories"
value: ${{ jobs.get-example-changed.outputs.matrix }}
jobs:
get-example-changed:
name: Get Example Changed
runs-on: ubuntu-latest
outputs:
example_changed: ${{ steps.set-example-changed.outputs.example_changed }}
# This is for test-only-examples workflow in ci.yml
matrix: ${{ steps.set-example-changed.outputs.matrix }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get example files that changed
id: changed-files
uses: tj-actions/changed-files@v46
uses: tj-actions/changed-files@v43
with:
files: |
examples/**
!examples/cargo-make/**
!examples/gtk/**
!examples/Makefile.toml
!examples/*.md
- name: List example files that changed
run: echo '${{ steps.changed-files.outputs.all_changed_files }}'
- name: Install jq
run: sudo apt-get install jq
- name: Set example_changed
id: set-example-changed
run: |
echo "example_changed=${{ steps.changed-files.outputs.any_changed }}" >> "$GITHUB_OUTPUT"
# This is for test-only-examples workflow in ci.yml
echo "matrix={\"directory\": $(echo '${{ steps.changed-files.outputs.all_changed_files }}' | tr ' ' '\n' | awk -F'/' '{print $1 "/" $2}'| sort -u | jq -R -s -c 'split("\n") | .[:-1]')}" >> "$GITHUB_OUTPUT"

View File

@@ -1,34 +1,38 @@
name: Get Examples Matrix Call
on:
workflow_call:
outputs:
matrix:
description: "Matrix"
value: ${{ jobs.create.outputs.matrix }}
jobs:
create:
name: Create Examples Matrix
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
env:
# separate examples using "|" (vertical bar) char like "a|b|c".
# cargo-make should be excluded by default.
EXCLUDED_EXAMPLES: cargo-make
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install jq
run: sudo apt-get install jq
- name: Set Matrix
id: set-matrix
run: |
examples=$(ls -1d examples/*/ |
grep -vE "($EXCLUDED_EXAMPLES)" |
sed 's/\/$//' |
examples=$(ls examples |
awk '{print "examples/" $0}' |
grep -v .md |
grep -v examples/Makefile.toml |
grep -v examples/cargo-make |
grep -v examples/gtk |
jq -R -s -c 'split("\n")[:-1]')
echo "Example Directories: $examples"
echo "matrix={\"directory\":$examples}" >> "$GITHUB_OUTPUT"
- name: Print Location Info
run: |
echo "Workspace: ${{ github.workspace }}"

View File

@@ -1,10 +1,12 @@
name: Get Leptos Changed Call
on:
workflow_call:
outputs:
leptos_changed:
description: "Leptos Changed"
value: ${{ jobs.create.outputs.leptos_changed }}
jobs:
create:
name: Detect Source Change
@@ -16,19 +18,40 @@ jobs:
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Get source files that changed
id: changed-source
uses: tj-actions/changed-files@v46
uses: tj-actions/changed-files@v43
with:
files_ignore: |
.*/**/*
cargo-make/**/*
examples/**/*
projects/**/*
benchmarks/**/*
docs/**/*
files: |
any_error/**
any_spawner/**
const_str_slice_concat/**
either_of/**
hydration_context/**
integrations/actix/**
integrations/axum/**
integrations/utils/**
leptos/**
leptos_config/**
leptos_dom/**
leptos_hot_reload/**
leptos_macro/**
leptos_server/**
meta/**
next_tuple/**
oco/**
or_poisoned/**
reactive_graph/**
router/**
router_macro/**
server_fn/**
server_fn/server_fn_macro_default/**
server_fn_macro/**
- name: List source files that changed
run: echo '${{ steps.changed-source.outputs.all_changed_files }}'
- name: Set leptos_changed
id: set-source-changed
run: |

View File

@@ -1,32 +0,0 @@
name: Get Leptos Matrix Call
on:
workflow_call:
outputs:
matrix:
description: "Matrix"
value: ${{ jobs.create.outputs.matrix }}
jobs:
create:
name: Create Leptos Matrix
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install jq
run: sudo apt-get install jq
- name: Set Matrix
id: set-matrix
run: |
crates=$(cargo metadata --no-deps --quiet --format-version 1 |
jq -r '.packages[] | select(.name != "workspace") | .manifest_path| rtrimstr("/Cargo.toml")' |
sed "s|$(pwd)/||" |
jq -R -s -c 'split("\n")[:-1]')
echo "Leptos Directories: $crates"
echo "matrix={\"directory\":$crates}" >> "$GITHUB_OUTPUT"
- name: Print Location Info
run: |
echo "Workspace: ${{ github.workspace }}"
pwd
ls | sort -u

View File

@@ -1,120 +1,92 @@
name: Run Task
on:
workflow_call:
inputs:
directory:
required: true
type: string
cargo_make_task:
required: true
type: string
toolchain:
required: true
type: string
env:
CARGO_TERM_COLOR: always
CARGO_REGISTRIES_CRATES_IO_PROTOCOL: sparse
DEBIAN_FRONTEND: noninteractive
RUSTFLAGS: ${{ inputs.erased_mode && '--cfg erase_components' || '' }}
LEPTOS_TAILWIND_VERSION: v4.0.14
LEPTOS_SASS_VERSION: 1.86.0
jobs:
test:
name: "Run (${{ matrix.toolchain }}) (erased_mode: ${{ matrix.erased_mode && 'enabled' || 'disabled' }})"
name: Run ${{ inputs.cargo_make_task }} (${{ inputs.toolchain }})
runs-on: ubuntu-latest
strategy:
matrix:
toolchain: [stable, nightly-2025-07-16]
erased_mode: [true, false]
steps:
- name: Free Disk Space
run: |
echo "Disk space before cleanup:"
df -h
sudo rm -rf /usr/local/.ghcup
sudo rm -rf /opt/hostedtoolcache/CodeQL
sudo rm -rf /usr/local/lib/android
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf /usr/local/share/boost
sudo rm -rf /usr/local/lib/node_modules
# following lines currenly not needed as it takes too much time
# the new isolated CI doesn't need much space to test libraries
#
# uncommet only if nneded
#
# sudo apt-get clean
# sudo apt-get purge -y '^ghc-.*' '^dotnet-.*' '^llvm-.*' '^mono-.*' '^php.*' '^ruby.*'
# sudo apt-get autoremove -y
# sudo apt-get clean
# sudo rm -rf "$AGENT_TOOLSDIRECTORY"
# docker system prune -af
# docker image prune -af
# docker volume prune -f
echo "Disk space after cleanup:"
df -h
steps:
# Setup environment
- name: Install Glib
run: |
sudo apt-get update
sudo apt-get install -y libglib2.0-dev
- uses: actions/checkout@v4
- name: Setup Rust
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ matrix.toolchain }}
targets: wasm32-unknown-unknown
components: clippy,rustfmt
toolchain: ${{ inputs.toolchain }}
- name: Add wasm32-unknown-unknown
run: rustup target add wasm32-unknown-unknown
- name: Setup cargo-make
uses: davidB/rust-cargo-make@v1
- name: Cargo generate-lockfile
run: cargo generate-lockfile
- uses: Swatinem/rust-cache@v2
- name: Install binstall
uses: cargo-bins/cargo-binstall@main
- name: Install wasm-bindgen
run: cargo binstall wasm-bindgen-cli --no-confirm
- name: Install cargo-leptos
run: cargo binstall cargo-leptos --locked --no-confirm
- name: Install cargo-make
run: cargo binstall cargo-make --no-confirm
- name: Install nextest
run: cargo binstall cargo-nextest --no-confirm
- name: Install cargo-all-features
run: cargo install --git https://github.com/sabify/cargo-all-features --branch arbitrary-command-support
# Part of direct-minimal-versions check
- name: Install cargo-hack
if: contains(matrix.toolchain, 'nightly')
uses: taiki-e/install-action@cargo-hack
# Part of direct-minimal-versions check
- name: Install cargo-minimal-versions
if: contains(matrix.toolchain, 'nightly')
uses: taiki-e/install-action@cargo-minimal-versions
run: cargo binstall cargo-leptos --no-confirm
- name: Install Trunk
if: contains(inputs.directory, 'examples')
run: cargo binstall trunk --no-confirm
uses: jetli/trunk-action@v0.5.0
with:
version: "latest"
- name: Print Trunk Version
if: contains(inputs.directory, 'examples')
run: trunk --version
- name: Install Node.js
if: contains(inputs.directory, 'examples')
uses: actions/setup-node@v4
with:
node-version: 20
- uses: pnpm/action-setup@v4
- uses: pnpm/action-setup@v3
name: Install pnpm
if: contains(inputs.directory, 'examples')
id: pnpm-install
with:
version: 8
run_install: false
- name: Get pnpm store directory
if: contains(inputs.directory, 'examples')
id: pnpm-cache
run: |
echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
- uses: actions/cache@v4
if: contains(inputs.directory, 'examples')
name: Setup pnpm cache
with:
path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Maybe install chromedriver
if: contains(inputs.directory, 'examples')
run: |
project_makefile='${{inputs.directory}}/Makefile.toml'
project_makefile=${{inputs.directory}}/Makefile.toml
webdriver_count=$(cat $project_makefile | grep "cargo-make/webdriver.toml" | wc -l)
if [ $webdriver_count -eq 1 ]; then
if ! command -v chromedriver &>/dev/null; then
@@ -127,10 +99,10 @@ jobs:
else
echo chromedriver is not required
fi
- name: Maybe install playwright browser dependencies
if: contains(inputs.directory, 'examples')
run: |
for pw_path in $(find '${{inputs.directory}}' -name playwright.config.ts)
for pw_path in $(find ${{inputs.directory}} -name playwright.config.ts)
do
pw_dir=$(dirname $pw_path)
if [ ! -v $pw_dir ]; then
@@ -141,48 +113,14 @@ jobs:
echo Playwright is not required
fi
done
- name: Install Deno
if: contains(inputs.directory, 'examples')
uses: denoland/setup-deno@v2
uses: denoland/setup-deno@v1
with:
deno-version: v1.x
- name: Maybe install gtk-rs dependencies
if: contains(inputs.directory, 'gtk')
run: |
sudo apt-get install -y libglib2.0-dev libgio2.0-cil-dev libgraphene-1.0-dev libcairo2-dev libpango1.0-dev libgtk-4-dev
- name: Install Tailwind and Sass dependencies
if: contains(inputs.directory, 'examples')
run: |
cd '${{ inputs.directory }}'
tailwindcss_version=$(echo "$LEPTOS_TAILWIND_VERSION" | sed 's/^v//')
sass_version="$LEPTOS_SASS_VERSION"
pnpm add "tailwindcss@$tailwindcss_version" "@tailwindcss/cli@$tailwindcss_version" "sass@$sass_version"
echo "Tailwind CSS version:"
./node_modules/.bin/tailwindcss --version
echo "Sass version:"
./node_modules/.bin/sass --version
# Run Cargo Make Task
- name: ${{ inputs.cargo_make_task }}
run: |
cd '${{ inputs.directory }}'
cargo make --no-workspace --profile=github-actions ci
# check the direct-minimal-versions on release
COMMIT_MSG=$(git log -1 --pretty=format:'%s')
# Supports: v1.2.3, v1.2.3-alpha, v1.2.3-beta1, v1.2.3-rc.1, etc.
if [[ "$COMMIT_MSG" =~ ^v[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9]+(\.?[0-9]+)?)?$ ]]; then
cargo make --no-workspace --profile=github-actions check-minimal-versions
fi
# Check if the counter_isomorphic can be built with leptos_debuginfo cfg flag in release mode
- name: ${{ inputs.cargo_make_task }} with --cfg=leptos_debuginfo
if: contains(inputs.directory, 'counter_isomorphic')
run: |
cd '${{ inputs.directory }}'
RUSTFLAGS="$RUSTFLAGS --cfg leptos_debuginfo" cargo leptos build --release
- name: Clean up ${{ inputs.directory }}
if: always()
run: |
cd '${{ inputs.directory }}'
cargo clean || true
rm -rf node_modules || true
cd ${{ inputs.directory }}
cargo make --profile=github-actions ${{ inputs.cargo_make_task }}

7
.gitignore vendored
View File

@@ -3,9 +3,7 @@ dist
pkg
comparisons
blob.rs
**/projects/**/Cargo.lock
**/examples/**/Cargo.lock
**/benchmarks/**/Cargo.lock
Cargo.lock
**/*.rs.bk
.DS_Store
.idea
@@ -13,5 +11,4 @@ blob.rs
.envrc
.vscode
vendor
hash.txt
vendor

4720
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -37,139 +37,38 @@ members = [
"router_macro",
"any_error",
]
exclude = ["benchmarks", "examples", "projects"]
exclude = ["benchmarks", "examples"]
[workspace.package]
edition = "2021"
rust-version = "1.88"
version = "0.7.0-beta"
rust-version = "1.75"
[workspace.dependencies]
# members
throw_error = { path = "./any_error/", version = "0.3.0" }
any_spawner = { path = "./any_spawner/", version = "0.3.0" }
throw_error = { path = "./any_error/", version = "0.2.0-beta" }
any_spawner = { path = "./any_spawner/", version = "0.1" }
const_str_slice_concat = { path = "./const_str_slice_concat", version = "0.1" }
either_of = { path = "./either_of/", version = "0.1.6" }
hydration_context = { path = "./hydration_context", version = "0.3.0" }
leptos = { path = "./leptos", version = "0.8.6" }
leptos_config = { path = "./leptos_config", version = "0.8.5" }
leptos_dom = { path = "./leptos_dom", version = "0.8.5" }
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.8.5" }
leptos_integration_utils = { path = "./integrations/utils", version = "0.8.5" }
leptos_macro = { path = "./leptos_macro", version = "0.8.6" }
leptos_router = { path = "./router", version = "0.8.5" }
leptos_router_macro = { path = "./router_macro", version = "0.8.5" }
leptos_server = { path = "./leptos_server", version = "0.8.5" }
leptos_meta = { path = "./meta", version = "0.8.5" }
next_tuple = { path = "./next_tuple", version = "0.1.0" }
oco_ref = { path = "./oco", version = "0.2.1" }
or_poisoned = { path = "./or_poisoned", version = "0.1.0" }
reactive_graph = { path = "./reactive_graph", version = "0.2.5" }
reactive_stores = { path = "./reactive_stores", version = "0.2.5" }
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.2.5" }
server_fn = { path = "./server_fn", version = "0.8.5" }
server_fn_macro = { path = "./server_fn_macro", version = "0.8.6" }
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.8.5" }
tachys = { path = "./tachys", version = "0.2.6" }
wasm_split_helpers = { path = "./wasm_split", version = "0.1.2" }
wasm_split_macros = { path = "./wasm_split_macros", version = "0.1.2" }
# members deps
async-once-cell = { default-features = false, version = "0.5.3" }
itertools = { default-features = false, version = "0.14.0" }
convert_case = { default-features = false, version = "0.8.0" }
serde_json = { default-features = false, version = "1.0.140" }
trybuild = { default-features = false, version = "1.0.106" }
typed-builder = { default-features = false, version = "0.21.0" }
thiserror = { default-features = false, version = "2.0.12" }
wasm-bindgen = { default-features = false, version = "0.2.100" }
indexmap = { default-features = false, version = "2.9.0" }
rstml = { default-features = false, version = "0.12.1" }
rustc_version = { default-features = false, version = "0.4.1" }
guardian = { default-features = false, version = "1.3.0" }
rustc-hash = { default-features = false, version = "2.1.1" }
actix-web = { default-features = false, version = "4.11.0" }
tracing = { default-features = false, version = "0.1.41" }
slotmap = { default-features = false, version = "1.0.7" }
futures = { default-features = false, version = "0.3.31" }
dashmap = { default-features = false, version = "6.1.0" }
pin-project-lite = { default-features = false, version = "0.2.16" }
send_wrapper = { default-features = false, version = "0.6.0" }
tokio-test = { default-features = false, version = "0.4.4" }
html-escape = { default-features = false, version = "0.2.13" }
proc-macro-error2 = { default-features = false, version = "2.0.1" }
const_format = { default-features = false, version = "0.2.34" }
gloo-net = { default-features = false, version = "0.6.0" }
url = { default-features = false, version = "2.5.4" }
tokio = { default-features = false, version = "1.46.1" }
base64 = { default-features = false, version = "0.22.1" }
cfg-if = { default-features = false, version = "1.0.0" }
wasm-bindgen-futures = { default-features = false, version = "0.4.50" }
tower = { default-features = false, version = "0.5.2" }
proc-macro2 = { default-features = false, version = "1.0.95" }
serde = { default-features = false, version = "1.0.219" }
parking_lot = { default-features = false, version = "0.12.4" }
axum = { default-features = false, version = "0.8.4" }
serde_qs = { default-features = false, version = "0.15.0" }
syn = { default-features = false, version = "2.0.104" }
xxhash-rust = { default-features = false, version = "0.8.15" }
paste = { default-features = false, version = "1.0.15" }
quote = { default-features = false, version = "1.0.40" }
web-sys = { default-features = false, version = "0.3.77" }
js-sys = { default-features = false, version = "0.3.77" }
rand = { default-features = false, version = "0.9.1" }
serde-lite = { default-features = false, version = "0.5.0" }
tokio-tungstenite = { default-features = false, version = "0.27.0" }
serial_test = { default-features = false, version = "3.2.0" }
erased = { default-features = false, version = "0.1.2" }
glib = { default-features = false, version = "0.20.12" }
async-trait = { default-features = false, version = "0.1.88" }
typed-builder-macro = { default-features = false, version = "0.21.0" }
linear-map = { default-features = false, version = "1.2.0" }
anyhow = { default-features = false, version = "1.0.98" }
walkdir = { default-features = false, version = "2.5.0" }
actix-ws = { default-features = false, version = "0.3.0" }
tower-http = { default-features = false, version = "0.6.4" }
prettyplease = { default-features = false, version = "0.2.35" }
inventory = { default-features = false, version = "0.3.20" }
config = { default-features = false, version = "0.15.13" }
camino = { default-features = false, version = "1.1.9" }
ciborium = { default-features = false, version = "0.2.2" }
multer = { default-features = false, version = "3.1.0" }
leptos-spin-macro = { default-features = false, version = "0.2.0" }
sledgehammer_utils = { default-features = false, version = "0.3.1" }
sledgehammer_bindgen = { default-features = false, version = "0.6.0" }
wasm-streams = { default-features = false, version = "0.4.2" }
rkyv = { default-features = false, version = "0.8.10" }
temp-env = { default-features = false, version = "0.3.6" }
uuid = { default-features = false, version = "1.17.0" }
bytes = { default-features = false, version = "1.10.1" }
http = { default-features = false, version = "1.3.1" }
regex = { default-features = false, version = "1.11.1" }
drain_filter_polyfill = { default-features = false, version = "0.1.3" }
tempfile = { default-features = false, version = "3.20.0" }
futures-lite = { default-features = false, version = "2.6.0" }
log = { default-features = false, version = "0.4.27" }
percent-encoding = { default-features = false, version = "2.3.1" }
async-executor = { default-features = false, version = "1.13.2" }
const-str = { default-features = false, version = "0.6.3" }
http-body-util = { default-features = false, version = "0.1.3" }
hyper = { default-features = false, version = "1.6.0" }
postcard = { default-features = false, version = "1.1.1" }
rmp-serde = { default-features = false, version = "1.3.0" }
reqwest = { default-features = false, version = "0.12.22" }
tower-layer = { default-features = false, version = "0.3.3" }
attribute-derive = { default-features = false, version = "0.10.3" }
insta = { default-features = false, version = "1.43.1" }
codee = { default-features = false, version = "0.3.0" }
actix-http = { default-features = false, version = "3.11.0" }
wasm-bindgen-test = { default-features = false, version = "0.3.50" }
rustversion = { default-features = false, version = "1.0.21" }
getrandom = { default-features = false, version = "0.3.3" }
actix-files = { default-features = false, version = "0.6.6" }
async-lock = { default-features = false, version = "3.4.0" }
base16 = { default-features = false, version = "0.2.1" }
digest = { default-features = false, version = "0.10.7" }
sha2 = { default-features = false, version = "0.10.8" }
either_of = { path = "./either_of/", version = "0.1" }
hydration_context = { path = "./hydration_context", version = "0.2.0-beta" }
leptos = { path = "./leptos", version = "0.7.0-beta" }
leptos_config = { path = "./leptos_config", version = "0.7.0-beta" }
leptos_dom = { path = "./leptos_dom", version = "0.7.0-beta" }
leptos_hot_reload = { path = "./leptos_hot_reload", version = "0.7.0-beta" }
leptos_integration_utils = { path = "./integrations/utils", version = "0.7.0-beta" }
leptos_macro = { path = "./leptos_macro", version = "0.7.0-beta" }
leptos_router = { path = "./router", version = "0.7.0-beta" }
leptos_router_macro = { path = "./router_macro", version = "0.7.0-beta" }
leptos_server = { path = "./leptos_server", version = "0.7.0-beta" }
leptos_meta = { path = "./meta", version = "0.7.0-beta" }
next_tuple = { path = "./next_tuple", version = "0.1.0-beta" }
oco_ref = { path = "./oco", version = "0.2" }
or_poisoned = { path = "./or_poisoned", version = "0.1" }
reactive_graph = { path = "./reactive_graph", version = "0.1.0-beta" }
reactive_stores = { path = "./reactive_stores", version = "0.1.0-beta" }
reactive_stores_macro = { path = "./reactive_stores_macro", version = "0.1.0-beta" }
server_fn = { path = "./server_fn", version = "0.7.0-beta" }
server_fn_macro = { path = "./server_fn_macro", version = "0.7.0-beta" }
server_fn_macro_default = { path = "./server_fn/server_fn_macro_default", version = "0.7.0-beta" }
tachys = { path = "./tachys", version = "0.1.0-beta" }
[profile.release]
codegen-units = 1
@@ -178,10 +77,3 @@ opt-level = 'z'
[workspace.metadata.cargo-all-features]
skip_feature_sets = [["csr", "ssr"], ["csr", "hydrate"], ["ssr", "hydrate"]]
max_combination_size = 2
[workspace.lints.rust]
unexpected_cfgs = { level = "warn", check-cfg = [
'cfg(leptos_debuginfo)',
'cfg(erase_components)',
] }

View File

@@ -10,8 +10,8 @@ CARGO_MAKE_EXTEND_WORKSPACE_MAKEFILE = true
workspace = false
clear = true
dependencies = [
{ name = "lint", path = "examples/counter_without_macros" },
{ name = "lint", path = "examples/counters_stable" },
{ name = "check", path = "examples/counter_without_macros" },
{ name = "check", path = "examples/counters_stable" },
]
[tasks.ci-examples]

View File

@@ -5,7 +5,6 @@
[![crates.io](https://img.shields.io/crates/v/leptos.svg)](https://crates.io/crates/leptos)
[![docs.rs](https://docs.rs/leptos/badge.svg)](https://docs.rs/leptos)
![Crates.io MSRV](https://img.shields.io/crates/msrv/leptos)
[![Discord](https://img.shields.io/discord/1031524867910148188?color=%237289DA&label=discord)](https://discord.gg/YdRAhS7eQB)
[![Matrix](https://img.shields.io/badge/Matrix-leptos-grey?logo=matrix&labelColor=white&logoColor=black)](https://matrix.to/#/#leptos:matrix.org)
@@ -21,7 +20,7 @@ use leptos::*;
#[component]
pub fn SimpleCounter(initial_value: i32) -> impl IntoView {
// create a reactive signal with the initial value
let (value, set_value) = signal(initial_value);
let (value, set_value) = create_signal(initial_value);
// create event handlers for our buttons
// note that `value` and `set_value` are `Copy`, so it's super easy to move them into closures
@@ -46,7 +45,7 @@ pub fn SimpleCounter(initial_value: i32) -> impl IntoView {
pub fn SimpleCounterWithBuilder(initial_value: i32) -> impl IntoView {
use leptos::html::*;
let (value, set_value) = signal(initial_value);
let (value, set_value) = create_signal(initial_value);
let clear = move |_| set_value(0);
let decrement = move |_| set_value.update(|value| *value -= 1);
let increment = move |_| set_value.update(|value| *value += 1);
@@ -90,13 +89,35 @@ Here are some resources for learning more about Leptos:
- [API Documentation](https://docs.rs/leptos/latest/leptos/)
- [Common Bugs](https://github.com/leptos-rs/leptos/tree/main/docs/COMMON_BUGS.md) (and how to fix them!)
## `nightly` Note
Most of the examples assume youre using `nightly` version of Rust and the `nightly` feature of Leptos. To use `nightly` Rust, you can either set your toolchain globally or on per-project basis.
To set `nightly` as a default toolchain for all projects (and add the ability to compile Rust to WebAssembly, if you havent already):
```
rustup toolchain install nightly
rustup default nightly
rustup target add wasm32-unknown-unknown
```
If you'd like to use `nightly` only in your Leptos project however, add [`rust-toolchain.toml`](https://rust-lang.github.io/rustup/overrides.html#the-toolchain-file) file with the following content:
```toml
[toolchain]
channel = "nightly"
targets = ["wasm32-unknown-unknown"]
```
The `nightly` feature enables the function call syntax for accessing and setting signals, as opposed to `.get()` and `.set()`. This leads to a consistent mental model in which accessing a reactive value of any kind (a signal, memo, or derived signal) is always represented as a function call. This is only possible with nightly Rust and the `nightly` feature.
## `cargo-leptos`
[`cargo-leptos`](https://github.com/leptos-rs/cargo-leptos) is a build tool that's designed to make it easy to build apps that run on both the client and the server, with seamless integration. The best way to get started with a real Leptos project right now is to use `cargo-leptos` and our starter templates for [Actix](https://github.com/leptos-rs/start) or [Axum](https://github.com/leptos-rs/start-axum).
```bash
cargo install cargo-leptos
cargo leptos new --git https://github.com/leptos-rs/start-axum
cargo leptos new --git https://github.com/leptos-rs/start
cd [your project name]
cargo leptos watch
```
@@ -125,7 +146,7 @@ Yes, Im sure there are. You can see from the state of our issue tracker over
This may be the big one: “production ready” implies a certain orientation to a library: that you can basically use it, without any special knowledge of its internals or ability to contribute. Everyone has this at some level in their stack: for example I (@gbj) dont have the capacity or knowledge to contribute to something like `wasm-bindgen` at this point: I simply rely on it to work.
There are several people in the community using Leptos right now for many websites at work, who have also become significant contributors. There may be missing features that you need, and you may end up building them! But, if you're willing to contribute a few missing pieces along the way, the framework is most definitely usable for production applications, especially given the ecosystem of libraries that have sprung up around it.
There are several people in the community using Leptos right now for internal apps at work, who have also become significant contributors. I think this is the right level of production use for now. There may be missing features that you need, and you may end up building them! But for internal apps, if youre willing to build and contribute missing pieces along the way, the framework is definitely usable right now.
### Can I use this for native GUI?
@@ -136,7 +157,9 @@ Sure! Obviously the `view` macro is for generating DOM nodes but you can use the
- Use event listeners to update signals
- Create effects to update the UI
The 0.7 update originally set out to create a "generic rendering" approach that would allow us to reuse most of the same view logic to do all of the above. Unfortunately, this has had to be shelved for now due to difficulties encountered by the Rust compiler when building larger-scale applications with the number of generics spread throughout the codebase that this required. It's an approach I'm looking forward to exploring again in the future; feel free to reach out if you're interested in this kind of work.
I've put together a [very simple GTK example](https://github.com/leptos-rs/leptos/blob/main/examples/gtk/src/main.rs) so you can see what I mean.
The new rendering approach being developed for 0.7 supports “universal rendering,” i.e., it can use any rendering library that supports a small set of 6-8 functions. (This is intended as a layer over typical retained-mode, OOP-style GUI toolkits like the DOM, GTK, etc.) That future rendering work will allow creating native UI in a way that is much more similar to the declarative approach used by the web framework.
### How is this different from Yew?
@@ -146,14 +169,14 @@ Yew is the most-used library for Rust web UI development, but there are several
- **Performance:** This has huge performance implications: Leptos is simply much faster at both creating and updating the UI than Yew is.
- **Server integration:** Yew was created in an era in which browser-rendered single-page apps (SPAs) were the dominant paradigm. While Leptos supports client-side rendering, it also focuses on integrating with the server side of your application via server functions and multiple modes of serving HTML, including out-of-order streaming.
### How is this different from Dioxus?
- ### How is this different from Dioxus?
Like Leptos, Dioxus is a framework for building UIs using web technologies. However, there are significant differences in approach and features.
- **VDOM vs. fine-grained:** While Dioxus has a performant virtual DOM (VDOM), it still uses coarse-grained/component-scoped reactivity: changing a stateful value reruns the component function and diffs the old UI against the new one. Leptos components use a different mental model, creating (and returning) actual DOM nodes and setting up a reactive system to update those DOM nodes.
- **Web vs. desktop priorities:** Dioxus uses Leptos server functions in its fullstack mode, but does not have the same `<Suspense>`-based support for things like streaming HTML rendering, or share the same focus on holistic web performance. Leptos tends to prioritize holistic web performance (streaming HTML rendering, smaller WASM binary sizes, etc.), whereas Dioxus has an unparalleled experience when building desktop apps, because your application logic runs as a native Rust binary.
### How is this different from Sycamore?
- ### How is this different from Sycamore?
Sycamore and Leptos are both heavily influenced by SolidJS. At this point, Leptos has a larger community and ecosystem and is more actively developed. Other differences:

View File

@@ -1,13 +1,13 @@
[package]
name = "throw_error"
version = "0.3.0"
edition = "2021"
version = "0.2.0-beta"
authors = ["Greg Johnston"]
license = "MIT"
readme = "../README.md"
repository = "https://github.com/leptos-rs/leptos"
description = "Utilities for wrapping, throwing, and catching errors."
rust-version.workspace = true
edition.workspace = true
[dependencies]
pin-project-lite = { workspace = true, default-features = true }
pin-project-lite = "0.2"

View File

@@ -9,7 +9,7 @@ use std::{
error,
fmt::{self, Display},
future::Future,
ops,
mem, ops,
pin::Pin,
sync::Arc,
task::{Context, Poll},
@@ -17,6 +17,11 @@ use std::{
/* Wrapper Types */
/// This is a result type into which any error can be converted.
///
/// Results are stored as [`Error`].
pub type Result<T, E = Error> = core::result::Result<T, E>;
/// A generic wrapper for any error.
#[derive(Debug, Clone)]
#[repr(transparent)]
@@ -104,7 +109,7 @@ pub fn get_error_hook() -> Option<Arc<dyn ErrorHook>> {
/// Sets the current thread-local error hook, which will be invoked when [`throw`] is called.
pub fn set_error_hook(hook: Arc<dyn ErrorHook>) -> ResetErrorHookOnDrop {
ResetErrorHookOnDrop(
ERROR_HOOK.with_borrow_mut(|this| Option::replace(this, hook)),
ERROR_HOOK.with_borrow_mut(|this| mem::replace(this, Some(hook))),
)
}

View File

@@ -1,47 +1,30 @@
[package]
name = "any_spawner"
version = "0.3.0"
edition = "2021"
version = "0.1.1"
authors = ["Greg Johnston"]
license = "MIT"
readme = "../README.md"
repository = "https://github.com/leptos-rs/leptos"
description = "Spawn asynchronous tasks in an executor-independent way."
edition.workspace = true
[dependencies]
async-executor = { optional = true , workspace = true, default-features = true }
futures = { workspace = true, default-features = true }
glib = { optional = true , workspace = true, default-features = true }
thiserror = { workspace = true , default-features = true }
tokio = { optional = true, default-features = false, features = [
"rt",
] , workspace = true }
tracing = { optional = true , workspace = true, default-features = true }
wasm-bindgen-futures = { optional = true , workspace = true, default-features = true }
[dev-dependencies]
futures-lite = { default-features = false , workspace = true }
tokio = { default-features = false, features = [
"rt",
"macros",
"time",
] , workspace = true }
wasm-bindgen-test = { workspace = true, default-features = true }
serial_test = { workspace = true, default-features = true }
futures = "0.3"
glib = { version = "0.19", optional = true }
thiserror = "1"
tokio = { version = "1", optional = true, default-features = false, features = [
"rt",
] }
tracing = { version = "0.1", optional = true }
wasm-bindgen-futures = { version = "0.4", optional = true }
[features]
async-executor = ["dep:async-executor"]
tracing = ["dep:tracing"]
tokio = ["dep:tokio"]
glib = ["dep:glib"]
wasm-bindgen = ["dep:wasm-bindgen-futures"]
futures-executor = ["futures/thread-pool", "futures/executor"]
[package.metadata.docs.rs]
all-features = true
rustdoc-args = ["--cfg", "docsrs"]
[package.metadata.cargo-all-features]
denylist = ["tracing"]
max_combination_size = 2

View File

@@ -1,4 +1 @@
extend = [
{ path = "../cargo-make/main.toml" },
{ path = "../cargo-make/wasm-test.toml" },
]
extend = { path = "../cargo-make/main.toml" }

View File

@@ -11,16 +11,18 @@
//! - no "join handle" or other result is returned from the spawn
//! - the `Future` must output `()`
//!
//! ```no_run
//! ```rust
//! use any_spawner::Executor;
//!
//! // make sure an Executor has been initialized with one of the init_ functions
//!
//! # if false {
//! // spawn a thread-safe Future
//! Executor::spawn(async { /* ... */ });
//!
//! // spawn a Future that is !Send
//! Executor::spawn_local(async { /* ... */ });
//! # }
//! ```
#![forbid(unsafe_code)]
@@ -30,72 +32,17 @@
use std::{future::Future, pin::Pin, sync::OnceLock};
use thiserror::Error;
/// A future that has been pinned.
pub type PinnedFuture<T> = Pin<Box<dyn Future<Output = T> + Send>>;
/// A future that has been pinned.
pub type PinnedLocalFuture<T> = Pin<Box<dyn Future<Output = T>>>;
pub(crate) type PinnedFuture<T> = Pin<Box<dyn Future<Output = T> + Send>>;
pub(crate) type PinnedLocalFuture<T> = Pin<Box<dyn Future<Output = T>>>;
// Type alias for the spawn function pointer.
type SpawnFn = fn(PinnedFuture<()>);
// Type alias for the spawn_local function pointer.
type SpawnLocalFn = fn(PinnedLocalFuture<()>);
// Type alias for the poll_local function pointer.
type PollLocalFn = fn();
/// Holds the function pointers for the current global executor.
#[derive(Clone, Copy)]
struct ExecutorFns {
spawn: SpawnFn,
spawn_local: SpawnLocalFn,
poll_local: PollLocalFn,
}
// Use a single OnceLock to ensure atomic initialization of all functions.
static EXECUTOR_FNS: OnceLock<ExecutorFns> = OnceLock::new();
// No-op functions to use when an executor doesn't support a specific operation.
#[cfg(any(feature = "tokio", feature = "wasm-bindgen", feature = "glib"))]
#[cold]
#[inline(never)]
fn no_op_poll() {}
#[cfg(all(not(feature = "wasm-bindgen"), not(debug_assertions)))]
#[cold]
#[inline(never)]
fn no_op_spawn(_: PinnedFuture<()>) {
#[cfg(debug_assertions)]
eprintln!(
"Warning: Executor::spawn called, but no global 'spawn' function is \
configured (perhaps only spawn_local is supported, e.g., on wasm \
without threading?)."
);
}
// Wasm panics if you spawn without an executor
#[cfg(feature = "wasm-bindgen")]
#[cold]
#[inline(never)]
fn no_op_spawn(_: PinnedFuture<()>) {
panic!(
"Executor::spawn called, but no global 'spawn' function is configured."
);
}
#[cfg(not(debug_assertions))]
#[cold]
#[inline(never)]
fn no_op_spawn_local(_: PinnedLocalFuture<()>) {
panic!(
"Executor::spawn_local called, but no global 'spawn_local' function \
is configured."
);
}
static SPAWN: OnceLock<fn(PinnedFuture<()>)> = OnceLock::new();
static SPAWN_LOCAL: OnceLock<fn(PinnedLocalFuture<()>)> = OnceLock::new();
/// Errors that can occur when using the executor.
#[derive(Error, Debug)]
pub enum ExecutorError {
/// The executor has already been set.
#[error("Global executor has already been set.")]
#[error("Executor has already been set.")]
AlreadySet,
}
@@ -104,160 +51,155 @@ pub struct Executor;
impl Executor {
/// Spawns a thread-safe [`Future`].
///
/// Uses the globally configured executor.
/// Panics if no global executor has been initialized.
#[inline(always)]
/// ```rust
/// use any_spawner::Executor;
/// # if false {
/// // spawn a thread-safe Future
/// Executor::spawn(async { /* ... */ });
/// # }
/// ```
#[track_caller]
pub fn spawn(fut: impl Future<Output = ()> + Send + 'static) {
let pinned_fut = Box::pin(fut);
if let Some(fns) = EXECUTOR_FNS.get() {
(fns.spawn)(pinned_fut)
if let Some(spawner) = SPAWN.get() {
spawner(Box::pin(fut))
} else {
// No global executor set.
handle_uninitialized_spawn(pinned_fut);
#[cfg(all(debug_assertions, feature = "tracing"))]
tracing::error!(
"At {}, tried to spawn a Future with Executor::spawn() before \
the Executor had been set.",
std::panic::Location::caller()
);
#[cfg(all(debug_assertions, not(feature = "tracing")))]
panic!(
"At {}, tried to spawn a Future with Executor::spawn() before \
the Executor had been set.",
std::panic::Location::caller()
);
}
}
/// Spawns a [`Future`] that cannot be sent across threads.
/// ```rust
/// use any_spawner::Executor;
///
/// Uses the globally configured executor.
/// Panics if no global executor has been initialized.
#[inline(always)]
/// # if false {
/// // spawn a thread-safe Future
/// Executor::spawn_local(async { /* ... */ });
/// # }
/// ```
#[track_caller]
pub fn spawn_local(fut: impl Future<Output = ()> + 'static) {
let pinned_fut = Box::pin(fut);
if let Some(fns) = EXECUTOR_FNS.get() {
(fns.spawn_local)(pinned_fut)
if let Some(spawner) = SPAWN_LOCAL.get() {
spawner(Box::pin(fut))
} else {
// No global executor set.
handle_uninitialized_spawn_local(pinned_fut);
#[cfg(all(debug_assertions, feature = "tracing"))]
tracing::error!(
"At {}, tried to spawn a Future with Executor::spawn_local() \
before the Executor had been set.",
std::panic::Location::caller()
);
#[cfg(all(debug_assertions, not(feature = "tracing")))]
panic!(
"At {}, tried to spawn a Future with Executor::spawn_local() \
before the Executor had been set.",
std::panic::Location::caller()
);
}
}
/// Waits until the next "tick" of the current async executor.
/// Respects the global executor.
#[inline(always)]
pub async fn tick() {
let (tx, rx) = futures::channel::oneshot::channel();
#[cfg(not(all(feature = "wasm-bindgen", target_family = "wasm")))]
Executor::spawn(async move {
_ = tx.send(());
});
#[cfg(all(feature = "wasm-bindgen", target_family = "wasm"))]
Executor::spawn_local(async move {
_ = tx.send(());
});
_ = rx.await;
}
/// Polls the global async executor.
///
/// Uses the globally configured executor.
/// Does nothing if the global executor does not support polling.
#[inline(always)]
pub fn poll_local() {
if let Some(fns) = EXECUTOR_FNS.get() {
(fns.poll_local)()
}
// If not initialized or doesn't support polling, do nothing gracefully.
}
}
impl Executor {
/// Globally sets the [`tokio`] runtime as the executor used to spawn tasks.
///
/// Returns `Err(_)` if a global executor has already been set.
/// Returns `Err(_)` if an executor has already been set.
///
/// Requires the `tokio` feature to be activated on this crate.
#[cfg(feature = "tokio")]
#[cfg_attr(docsrs, doc(cfg(feature = "tokio")))]
pub fn init_tokio() -> Result<(), ExecutorError> {
let executor_impl = ExecutorFns {
spawn: |fut| {
SPAWN
.set(|fut| {
tokio::spawn(fut);
},
spawn_local: |fut| {
})
.map_err(|_| ExecutorError::AlreadySet)?;
SPAWN_LOCAL
.set(|fut| {
tokio::task::spawn_local(fut);
},
// Tokio doesn't have an explicit global poll function like LocalPool::run_until_stalled
poll_local: no_op_poll,
};
EXECUTOR_FNS
.set(executor_impl)
.map_err(|_| ExecutorError::AlreadySet)
})
.map_err(|_| ExecutorError::AlreadySet)?;
Ok(())
}
/// Globally sets the [`wasm-bindgen-futures`] runtime as the executor used to spawn tasks.
///
/// Returns `Err(_)` if a global executor has already been set.
/// Returns `Err(_)` if an executor has already been set.
///
/// Requires the `wasm-bindgen` feature to be activated on this crate.
#[cfg(feature = "wasm-bindgen")]
#[cfg_attr(docsrs, doc(cfg(feature = "wasm-bindgen")))]
pub fn init_wasm_bindgen() -> Result<(), ExecutorError> {
let executor_impl = ExecutorFns {
// wasm-bindgen-futures only supports spawn_local
spawn: no_op_spawn,
spawn_local: |fut| {
SPAWN
.set(|fut| {
wasm_bindgen_futures::spawn_local(fut);
},
poll_local: no_op_poll,
};
EXECUTOR_FNS
.set(executor_impl)
.map_err(|_| ExecutorError::AlreadySet)
})
.map_err(|_| ExecutorError::AlreadySet)?;
SPAWN_LOCAL
.set(|fut| {
wasm_bindgen_futures::spawn_local(fut);
})
.map_err(|_| ExecutorError::AlreadySet)?;
Ok(())
}
/// Globally sets the [`glib`] runtime as the executor used to spawn tasks.
///
/// Returns `Err(_)` if a global executor has already been set.
/// Returns `Err(_)` if an executor has already been set.
///
/// Requires the `glib` feature to be activated on this crate.
#[cfg(feature = "glib")]
#[cfg_attr(docsrs, doc(cfg(feature = "glib")))]
pub fn init_glib() -> Result<(), ExecutorError> {
let executor_impl = ExecutorFns {
spawn: |fut| {
SPAWN
.set(|fut| {
let main_context = glib::MainContext::default();
main_context.spawn(fut);
},
spawn_local: |fut| {
})
.map_err(|_| ExecutorError::AlreadySet)?;
SPAWN_LOCAL
.set(|fut| {
let main_context = glib::MainContext::default();
main_context.spawn_local(fut);
},
// Glib needs event loop integration, explicit polling isn't the standard model here.
poll_local: no_op_poll,
};
EXECUTOR_FNS
.set(executor_impl)
.map_err(|_| ExecutorError::AlreadySet)
})
.map_err(|_| ExecutorError::AlreadySet)?;
Ok(())
}
/// Globally sets the [`futures`] executor as the executor used to spawn tasks,
/// lazily creating a thread pool to spawn tasks into.
///
/// Returns `Err(_)` if a global executor has already been set.
/// Returns `Err(_)` if an executor has already been set.
///
/// Requires the `futures-executor` feature to be activated on this crate.
#[cfg(feature = "futures-executor")]
#[cfg_attr(docsrs, doc(cfg(feature = "futures-executor")))]
pub fn init_futures_executor() -> Result<(), ExecutorError> {
use futures::{
executor::{LocalPool, LocalSpawner, ThreadPool},
executor::{LocalPool, ThreadPool},
task::{LocalSpawnExt, SpawnExt},
};
use std::cell::RefCell;
// Keep the lazy-init ThreadPool and thread-local LocalPool for spawn_local impl
static THREAD_POOL: OnceLock<ThreadPool> = OnceLock::new();
thread_local! {
static LOCAL_POOL: RefCell<LocalPool> = RefCell::new(LocalPool::new());
// SPAWNER is derived from LOCAL_POOL, keep it for efficiency inside the closure
static SPAWNER: LocalSpawner = LOCAL_POOL.with(|pool| pool.borrow().spawner());
static LOCAL_POOL: LocalPool = LocalPool::new();
}
fn get_thread_pool() -> &'static ThreadPool {
@@ -267,248 +209,37 @@ impl Executor {
})
}
let executor_impl = ExecutorFns {
spawn: |fut| {
SPAWN
.set(|fut| {
get_thread_pool()
.spawn(fut)
.expect("failed to spawn future on ThreadPool");
},
spawn_local: |fut| {
// Use the thread_local SPAWNER derived from LOCAL_POOL
SPAWNER.with(|spawner| {
spawner
.spawn_local(fut)
.expect("failed to spawn local future");
});
},
poll_local: || {
// Use the thread_local LOCAL_POOL
LOCAL_POOL.with(|pool| {
// Use try_borrow_mut to prevent panic during re-entrant calls
if let Ok(mut pool) = pool.try_borrow_mut() {
pool.run_until_stalled();
}
// If already borrowed, we're likely in a nested poll, so do nothing.
});
},
};
EXECUTOR_FNS
.set(executor_impl)
.map_err(|_| ExecutorError::AlreadySet)
}
/// Globally sets the [`async_executor`] executor as the executor used to spawn tasks,
/// lazily creating a thread pool to spawn tasks into.
///
/// Returns `Err(_)` if a global executor has already been set.
///
/// Requires the `async-executor` feature to be activated on this crate.
#[cfg(feature = "async-executor")]
#[cfg_attr(docsrs, doc(cfg(feature = "async-executor")))]
pub fn init_async_executor() -> Result<(), ExecutorError> {
use async_executor::{Executor as AsyncExecutor, LocalExecutor};
// Keep the lazy-init global Executor and thread-local LocalExecutor for spawn_local impl
static ASYNC_EXECUTOR: OnceLock<AsyncExecutor<'static>> =
OnceLock::new();
thread_local! {
static LOCAL_EXECUTOR_POOL: LocalExecutor<'static> = const { LocalExecutor::new() };
}
fn get_async_executor() -> &'static AsyncExecutor<'static> {
ASYNC_EXECUTOR.get_or_init(AsyncExecutor::new)
}
let executor_impl = ExecutorFns {
spawn: |fut| {
get_async_executor().spawn(fut).detach();
},
spawn_local: |fut| {
LOCAL_EXECUTOR_POOL.with(|pool| pool.spawn(fut).detach());
},
poll_local: || {
LOCAL_EXECUTOR_POOL.with(|pool| {
// try_tick polls the local executor without blocking
// This prevents issues if called recursively or from within a task.
pool.try_tick();
});
},
};
EXECUTOR_FNS
.set(executor_impl)
.map_err(|_| ExecutorError::AlreadySet)
}
/// Globally sets a custom executor as the executor used to spawn tasks.
///
/// Requires the custom executor to be `Send + Sync` as it will be stored statically.
///
/// Returns `Err(_)` if a global executor has already been set.
pub fn init_custom_executor(
custom_executor: impl CustomExecutor + Send + Sync + 'static,
) -> Result<(), ExecutorError> {
// Store the custom executor instance itself to call its methods.
// Use Box for dynamic dispatch.
static CUSTOM_EXECUTOR_INSTANCE: OnceLock<
Box<dyn CustomExecutor + Send + Sync>,
> = OnceLock::new();
CUSTOM_EXECUTOR_INSTANCE
.set(Box::new(custom_executor))
.expect("failed to spawn future");
})
.map_err(|_| ExecutorError::AlreadySet)?;
// Now set the ExecutorFns using the stored instance
let executor_impl = ExecutorFns {
spawn: |fut| {
// Unwrap is safe because we just set it successfully or returned Err.
CUSTOM_EXECUTOR_INSTANCE.get().unwrap().spawn(fut);
},
spawn_local: |fut| {
CUSTOM_EXECUTOR_INSTANCE.get().unwrap().spawn_local(fut);
},
poll_local: || {
CUSTOM_EXECUTOR_INSTANCE.get().unwrap().poll_local();
},
};
EXECUTOR_FNS
.set(executor_impl)
.map_err(|_| ExecutorError::AlreadySet)
// If setting EXECUTOR_FNS fails (extremely unlikely race if called *concurrently*
// with another init_* after CUSTOM_EXECUTOR_INSTANCE was set), we technically
// leave CUSTOM_EXECUTOR_INSTANCE set but EXECUTOR_FNS not. This is an edge case,
// but the primary race condition is solved.
}
/// Sets a custom executor *for the current thread only*.
///
/// This overrides the global executor for calls to `spawn`, `spawn_local`, and `poll_local`
/// made *from the current thread*. It does not affect other threads or the global state.
///
/// The provided `custom_executor` must implement [`CustomExecutor`] and `'static`, but does
/// **not** need to be `Send` or `Sync`.
///
/// Returns `Err(ExecutorError::AlreadySet)` if a *local* executor has already been set
/// *for this thread*.
pub fn init_local_custom_executor(
custom_executor: impl CustomExecutor + 'static,
) -> Result<(), ExecutorError> {
// Store the custom executor instance itself to call its methods.
// Use Box for dynamic dispatch.
thread_local! {
static CUSTOM_EXECUTOR_INSTANCE: OnceLock<
Box<dyn CustomExecutor>,
> = OnceLock::new();
};
CUSTOM_EXECUTOR_INSTANCE.with(|this| {
this.set(Box::new(custom_executor))
.map_err(|_| ExecutorError::AlreadySet)
})?;
// Now set the ExecutorFns using the stored instance
let executor_impl = ExecutorFns {
spawn: |fut| {
// Unwrap is safe because we just set it successfully or returned Err.
CUSTOM_EXECUTOR_INSTANCE
.with(|this| this.get().unwrap().spawn(fut));
},
spawn_local: |fut| {
CUSTOM_EXECUTOR_INSTANCE
.with(|this| this.get().unwrap().spawn_local(fut));
},
poll_local: || {
CUSTOM_EXECUTOR_INSTANCE
.with(|this| this.get().unwrap().poll_local());
},
};
EXECUTOR_FNS
.set(executor_impl)
.map_err(|_| ExecutorError::AlreadySet)
SPAWN_LOCAL
.set(|fut| {
LOCAL_POOL.with(|pool| {
let spawner = pool.spawner();
spawner.spawn_local(fut).expect("failed to spawn future");
});
})
.map_err(|_| ExecutorError::AlreadySet)?;
Ok(())
}
}
/// A trait for custom executors.
/// Custom executors can be used to integrate with any executor that supports spawning futures.
///
/// If used with `init_custom_executor`, the implementation must be `Send + Sync + 'static`.
///
/// All methods can be called recursively. Implementors should be mindful of potential
/// deadlocks or excessive resource consumption if recursive calls are not handled carefully
/// (e.g., using `try_borrow_mut` or non-blocking polls within implementations).
pub trait CustomExecutor {
/// Spawns a future, usually on a thread pool.
fn spawn(&self, fut: PinnedFuture<()>);
/// Spawns a local future. May require calling `poll_local` to make progress.
fn spawn_local(&self, fut: PinnedLocalFuture<()>);
/// Polls the executor, if it supports polling. Implementations should ideally be
/// non-blocking or use mechanisms like `try_tick` or `try_borrow_mut` to handle
/// re-entrant calls safely.
fn poll_local(&self);
}
// Ensure CustomExecutor is object-safe
#[allow(dead_code)]
fn test_object_safety(_: Box<dyn CustomExecutor + Send + Sync>) {} // Added Send + Sync constraint here for global usage
/// Handles the case where `Executor::spawn` is called without an initialized executor.
#[cold] // Less likely path
#[inline(never)]
#[track_caller]
fn handle_uninitialized_spawn(_fut: PinnedFuture<()>) {
let caller = std::panic::Location::caller();
#[cfg(all(debug_assertions, feature = "tracing"))]
{
tracing::error!(
target: "any_spawner",
spawn_caller=%caller,
"Executor::spawn called before a global executor was initialized. Task dropped."
);
// Drop the future implicitly after logging
drop(_fut);
}
#[cfg(all(debug_assertions, not(feature = "tracing")))]
{
panic!(
"At {caller}, tried to spawn a Future with Executor::spawn() \
before a global executor was initialized."
);
}
// In release builds (without tracing), call the specific no-op function.
#[cfg(not(debug_assertions))]
{
no_op_spawn(_fut);
}
}
/// Handles the case where `Executor::spawn_local` is called without an initialized executor.
#[cold] // Less likely path
#[inline(never)]
#[track_caller]
fn handle_uninitialized_spawn_local(_fut: PinnedLocalFuture<()>) {
let caller = std::panic::Location::caller();
#[cfg(all(debug_assertions, feature = "tracing"))]
{
tracing::error!(
target: "any_spawner",
spawn_caller=%caller,
"Executor::spawn_local called before a global executor was initialized. \
Task likely dropped or panicked."
);
// Fall through to panic or no-op depending on build/target
}
#[cfg(all(debug_assertions, not(feature = "tracing")))]
{
panic!(
"At {caller}, tried to spawn a Future with \
Executor::spawn_local() before a global executor was initialized."
);
}
// In release builds (without tracing), call the specific no-op function (which usually panics).
#[cfg(not(debug_assertions))]
{
no_op_spawn_local(_fut);
#[cfg(test)]
mod tests {
#[cfg(feature = "futures-executor")]
#[test]
fn can_spawn_local_future() {
use crate::Executor;
use std::rc::Rc;
Executor::init_futures_executor().expect("couldn't set executor");
let rc = Rc::new(());
Executor::spawn_local(async {
_ = rc;
});
Executor::spawn(async {});
}
}

View File

@@ -1,24 +0,0 @@
use any_spawner::{Executor, ExecutorError};
#[test]
fn test_already_set_error() {
struct SimpleExecutor;
impl any_spawner::CustomExecutor for SimpleExecutor {
fn spawn(&self, _fut: any_spawner::PinnedFuture<()>) {}
fn spawn_local(&self, _fut: any_spawner::PinnedLocalFuture<()>) {}
fn poll_local(&self) {}
}
// First initialization should succeed
Executor::init_custom_executor(SimpleExecutor)
.expect("First initialization failed");
// Second initialization should fail with AlreadySet error
let result = Executor::init_custom_executor(SimpleExecutor);
assert!(matches!(result, Err(ExecutorError::AlreadySet)));
// First local initialization should fail
let result = Executor::init_local_custom_executor(SimpleExecutor);
assert!(matches!(result, Err(ExecutorError::AlreadySet)));
}

View File

@@ -1,74 +0,0 @@
#![cfg(feature = "async-executor")]
use std::{
future::Future,
pin::Pin,
sync::{Arc, Mutex},
};
// A simple async executor for testing
struct TestExecutor {
tasks: Mutex<Vec<Pin<Box<dyn Future<Output = ()> + Send + 'static>>>>,
}
impl TestExecutor {
fn new() -> Self {
TestExecutor {
tasks: Mutex::new(Vec::new()),
}
}
fn spawn<F>(&self, future: F)
where
F: Future<Output = ()> + Send + 'static,
{
self.tasks.lock().unwrap().push(Box::pin(future));
}
fn run_all(&self) {
// Take all tasks out to process them
let tasks = self.tasks.lock().unwrap().drain(..).collect::<Vec<_>>();
// Use a basic future executor to run each task to completion
for mut task in tasks {
// Use futures-lite's block_on to complete the future
futures::executor::block_on(async {
unsafe {
let task_mut = Pin::new_unchecked(&mut task);
let _ = std::future::Future::poll(
task_mut,
&mut std::task::Context::from_waker(
futures::task::noop_waker_ref(),
),
);
}
});
}
}
}
#[test]
fn test_async_executor() {
let executor = Arc::new(TestExecutor::new());
let executor_clone = executor.clone();
// Create a spawner function that will use our test executor
let spawner = move |future| {
executor_clone.spawn(future);
};
// Prepare test data
let counter = Arc::new(Mutex::new(0));
let counter_clone = counter.clone();
// Use the spawner to spawn a task
spawner(async move {
*counter_clone.lock().unwrap() += 1;
});
// Run all tasks
executor.run_all();
// Check if the task completed correctly
assert_eq!(*counter.lock().unwrap(), 1);
}

View File

@@ -1,63 +0,0 @@
use any_spawner::Executor;
use std::sync::{
atomic::{AtomicBool, Ordering},
Arc,
};
#[test]
fn test_custom_executor() {
// Define a simple custom executor
struct TestExecutor {
spawn_called: Arc<AtomicBool>,
spawn_local_called: Arc<AtomicBool>,
poll_local_called: Arc<AtomicBool>,
}
impl any_spawner::CustomExecutor for TestExecutor {
fn spawn(&self, fut: any_spawner::PinnedFuture<()>) {
self.spawn_called.store(true, Ordering::SeqCst);
// Execute the future immediately (this works for simple test futures)
futures::executor::block_on(fut);
}
fn spawn_local(&self, fut: any_spawner::PinnedLocalFuture<()>) {
self.spawn_local_called.store(true, Ordering::SeqCst);
// Execute the future immediately
futures::executor::block_on(fut);
}
fn poll_local(&self) {
self.poll_local_called.store(true, Ordering::SeqCst);
}
}
let spawn_called = Arc::new(AtomicBool::new(false));
let spawn_local_called = Arc::new(AtomicBool::new(false));
let poll_local_called = Arc::new(AtomicBool::new(false));
let executor = TestExecutor {
spawn_called: spawn_called.clone(),
spawn_local_called: spawn_local_called.clone(),
poll_local_called: poll_local_called.clone(),
};
// Initialize with our custom executor
Executor::init_custom_executor(executor)
.expect("Failed to initialize custom executor");
// Test spawn
Executor::spawn(async {
// Simple task
});
assert!(spawn_called.load(Ordering::SeqCst));
// Test spawn_local
Executor::spawn_local(async {
// Simple local task
});
assert!(spawn_local_called.load(Ordering::SeqCst));
// Test poll_local
Executor::poll_local();
assert!(poll_local_called.load(Ordering::SeqCst));
}

View File

@@ -1,56 +0,0 @@
#![cfg(feature = "futures-executor")]
use any_spawner::{CustomExecutor, Executor, PinnedFuture, PinnedLocalFuture};
#[test]
fn can_create_custom_executor() {
use futures::{
executor::{LocalPool, LocalSpawner},
task::LocalSpawnExt,
};
use std::{
cell::RefCell,
sync::{
atomic::{AtomicUsize, Ordering},
Arc,
},
};
thread_local! {
static LOCAL_POOL: RefCell<LocalPool> = RefCell::new(LocalPool::new());
static SPAWNER: LocalSpawner = LOCAL_POOL.with(|pool| pool.borrow().spawner());
}
struct CustomFutureExecutor;
impl CustomExecutor for CustomFutureExecutor {
fn spawn(&self, _fut: PinnedFuture<()>) {
panic!("not supported in this test");
}
fn spawn_local(&self, fut: PinnedLocalFuture<()>) {
SPAWNER.with(|spawner| {
spawner.spawn_local(fut).expect("failed to spawn future");
});
}
fn poll_local(&self) {
LOCAL_POOL.with(|pool| {
if let Ok(mut pool) = pool.try_borrow_mut() {
pool.run_until_stalled();
}
// If we couldn't borrow_mut, we're in a nested call to poll, so we don't need to do anything.
});
}
}
Executor::init_custom_executor(CustomFutureExecutor)
.expect("couldn't set executor");
let counter = Arc::new(AtomicUsize::new(0));
let counter_clone = Arc::clone(&counter);
Executor::spawn_local(async move {
counter_clone.store(1, Ordering::Release);
});
Executor::poll_local();
assert_eq!(counter.load(Ordering::Acquire), 1);
}

View File

@@ -1,28 +0,0 @@
#![cfg(feature = "tokio")]
use any_spawner::Executor;
use std::{
sync::{Arc, Mutex},
time::Duration,
};
#[tokio::test]
async fn test_executor_tick() {
// Initialize the tokio executor
Executor::init_tokio().expect("Failed to initialize tokio executor");
let value = Arc::new(Mutex::new(false));
let value_clone = value.clone();
// Spawn a task that sets the value after a tick
Executor::spawn(async move {
Executor::tick().await;
*value_clone.lock().unwrap() = true;
});
// Allow some time for the task to complete
tokio::time::sleep(Duration::from_millis(50)).await;
// Check that the value was set
assert!(*value.lock().unwrap());
}

View File

@@ -1,44 +0,0 @@
#![cfg(feature = "futures-executor")]
use any_spawner::Executor;
use futures::channel::oneshot;
use std::{
sync::{Arc, Mutex},
time::Duration,
};
#[test]
fn test_futures_executor() {
// Initialize the futures executor
Executor::init_futures_executor()
.expect("Failed to initialize futures executor");
let (tx, rx) = oneshot::channel();
let result = Arc::new(Mutex::new(None));
let result_clone = result.clone();
// Spawn a task
Executor::spawn(async move {
tx.send(84).expect("Failed to send value");
});
// Spawn a task that waits for the result
Executor::spawn(async move {
match rx.await {
Ok(val) => *result_clone.lock().unwrap() = Some(val),
Err(_) => panic!("Failed to receive value"),
}
});
// Poll a few times to ensure the task completes
for _ in 0..10 {
Executor::poll_local();
std::thread::sleep(Duration::from_millis(10));
if result.lock().unwrap().is_some() {
break;
}
}
assert_eq!(*result.lock().unwrap(), Some(84));
}

View File

@@ -1,37 +0,0 @@
#![cfg(feature = "futures-executor")]
use any_spawner::Executor;
// All tests in this file use the same executor.
#[test]
fn can_spawn_local_future() {
use std::rc::Rc;
let _ = Executor::init_futures_executor();
let rc = Rc::new(());
Executor::spawn_local(async {
_ = rc;
});
Executor::spawn(async {});
}
#[test]
fn can_make_local_progress() {
use std::sync::{
atomic::{AtomicUsize, Ordering},
Arc,
};
let _ = Executor::init_futures_executor();
let counter = Arc::new(AtomicUsize::new(0));
Executor::spawn_local({
let counter = Arc::clone(&counter);
async move {
assert_eq!(counter.fetch_add(1, Ordering::AcqRel), 0);
Executor::spawn_local(async {
// Should not crash
});
}
});
Executor::poll_local();
assert_eq!(counter.load(Ordering::Acquire), 1);
}

View File

@@ -1,151 +0,0 @@
#![cfg(feature = "glib")]
use any_spawner::Executor;
use glib::{MainContext, MainLoop};
use serial_test::serial;
use std::{
cell::Cell,
future::Future,
rc::Rc,
sync::{
atomic::{AtomicBool, Ordering},
Arc, Mutex,
},
time::Duration,
};
// Helper to run a future to completion on a dedicated glib MainContext.
// Returns true if the future completed within the timeout, false otherwise.
fn run_on_glib_context<F>(fut: F)
where
F: Future<Output = ()> + Send + 'static,
{
let _ = Executor::init_glib();
let context = MainContext::default();
let main_loop = MainLoop::new(Some(&context), false);
let main_loop_clone = main_loop.clone();
Executor::spawn(async move {
fut.await;
main_loop_clone.quit();
});
main_loop.run();
}
// Helper to run a local (!Send) future on the glib context.
fn run_local_on_glib_context<F>(fut: F)
where
F: Future<Output = ()> + 'static,
{
let _ = Executor::init_glib();
let context = MainContext::default();
let main_loop = MainLoop::new(Some(&context), false);
let main_loop_clone = main_loop.clone();
Executor::spawn_local(async move {
fut.await;
main_loop_clone.quit();
});
main_loop.run();
}
// This test must run after a test that successfully initializes glib,
// or within its own process.
#[test]
#[serial]
fn test_glib_spawn() {
let success_flag = Arc::new(AtomicBool::new(false));
let flag_clone = success_flag.clone();
run_on_glib_context(async move {
// Simulate async work
futures_lite::future::yield_now().await;
flag_clone.store(true, Ordering::SeqCst);
// We need to give the spawned task time to run.
// The run_on_glib_context handles the main loop.
// We just need to ensure spawn happened correctly.
// Let's wait a tiny bit within the driving future to ensure spawn gets processed.
glib::timeout_future(Duration::from_millis(10)).await;
});
assert!(
success_flag.load(Ordering::SeqCst),
"Spawned future did not complete successfully"
);
}
// Similar conditions as test_glib_spawn regarding initialization state.
#[test]
#[serial]
fn test_glib_spawn_local() {
let success_flag = Rc::new(Cell::new(false));
let flag_clone = success_flag.clone();
run_local_on_glib_context(async move {
// Use Rc to make the future !Send
let non_send_data = Rc::new(Cell::new(10));
let data = non_send_data.get();
assert_eq!(data, 10, "Rc data should be accessible");
non_send_data.set(20); // Modify non-Send data
// Simulate async work
futures_lite::future::yield_now().await;
assert_eq!(
non_send_data.get(),
20,
"Rc data should persist modification"
);
flag_clone.set(true);
// Wait a tiny bit
glib::timeout_future(Duration::from_millis(10)).await;
});
assert!(
success_flag.get(),
"Spawned local future did not complete successfully"
);
}
// Test Executor::tick with glib backend
#[test]
#[serial]
fn test_glib_tick() {
run_on_glib_context(async {
let value = Arc::new(Mutex::new(false));
let value_clone = value.clone();
// Spawn a task that sets the value after a tick
Executor::spawn(async move {
Executor::tick().await;
*value_clone.lock().unwrap() = true;
});
// Allow some time for the task to complete
glib::timeout_future(Duration::from_millis(10)).await;
// Check that the value was set
assert!(*value.lock().unwrap());
});
}
// Test Executor::poll_local with glib backend (should be a no-op)
#[test]
#[serial]
fn test_glib_poll_local_is_no_op() {
// Ensure glib executor is initialized
let _ = Executor::init_glib();
// poll_local for glib is configured as a no-op
// Calling it should not panic or cause issues.
Executor::poll_local();
Executor::poll_local();
println!("Executor::poll_local called successfully (expected no-op).");
}

View File

@@ -1,54 +0,0 @@
use any_spawner::Executor;
use std::sync::{
atomic::{AtomicBool, Ordering},
Arc,
};
#[test]
fn test_local_custom_executor() {
// Define a thread-local custom executor
struct LocalTestExecutor {
spawn_called: Arc<AtomicBool>,
spawn_local_called: Arc<AtomicBool>,
}
impl any_spawner::CustomExecutor for LocalTestExecutor {
fn spawn(&self, fut: any_spawner::PinnedFuture<()>) {
self.spawn_called.store(true, Ordering::SeqCst);
futures::executor::block_on(fut);
}
fn spawn_local(&self, fut: any_spawner::PinnedLocalFuture<()>) {
self.spawn_local_called.store(true, Ordering::SeqCst);
futures::executor::block_on(fut);
}
fn poll_local(&self) {
// No-op for this test
}
}
let local_spawn_called = Arc::new(AtomicBool::new(false));
let local_spawn_local_called = Arc::new(AtomicBool::new(false));
let local_executor = LocalTestExecutor {
spawn_called: local_spawn_called.clone(),
spawn_local_called: local_spawn_local_called.clone(),
};
// Initialize a thread-local executor
Executor::init_local_custom_executor(local_executor)
.expect("Failed to initialize local custom executor");
// Test spawn - should use the thread-local executor
Executor::spawn(async {
// Simple task
});
assert!(local_spawn_called.load(Ordering::SeqCst));
// Test spawn_local - should use the thread-local executor
Executor::spawn_local(async {
// Simple local task
});
assert!(local_spawn_local_called.load(Ordering::SeqCst));
}

View File

@@ -1,35 +0,0 @@
#![cfg(feature = "tokio")]
use any_spawner::Executor;
use futures::channel::oneshot;
use std::sync::{Arc, Mutex};
#[tokio::test]
async fn test_multiple_tasks() {
Executor::init_tokio().expect("Failed to initialize tokio executor");
let counter = Arc::new(Mutex::new(0));
let tasks = 10;
let mut handles = Vec::new();
// Spawn multiple tasks that increment the counter
for _ in 0..tasks {
let counter_clone = counter.clone();
let (tx, rx) = oneshot::channel();
Executor::spawn(async move {
*counter_clone.lock().unwrap() += 1;
tx.send(()).expect("Failed to send completion signal");
});
handles.push(rx);
}
// Wait for all tasks to complete
for handle in handles {
handle.await.expect("Task failed");
}
// Verify that all tasks incremented the counter
assert_eq!(*counter.lock().unwrap(), tasks);
}

View File

@@ -1,20 +0,0 @@
#![cfg(feature = "tokio")]
use any_spawner::Executor;
use futures::channel::oneshot;
#[tokio::test]
async fn test_tokio_executor() {
// Initialize the tokio executor
Executor::init_tokio().expect("Failed to initialize tokio executor");
let (tx, rx) = oneshot::channel();
// Spawn a task that sends a value
Executor::spawn(async move {
tx.send(42).expect("Failed to send value");
});
// Wait for the spawned task to complete
assert_eq!(rx.await.unwrap(), 42);
}

View File

@@ -1,88 +0,0 @@
#![cfg(all(feature = "wasm-bindgen", target_family = "wasm"))]
use any_spawner::Executor;
use futures::channel::oneshot;
use std::sync::{
atomic::{AtomicBool, Ordering},
Arc,
};
use wasm_bindgen_test::*;
wasm_bindgen_test_configure!(run_in_browser);
#[wasm_bindgen_test]
async fn test_wasm_bindgen_spawn_local() {
// Initialize the wasm-bindgen executor
let _ = Executor::init_wasm_bindgen();
// Create a channel to verify the task completes
let (tx, rx) = oneshot::channel();
// Spawn a local task (wasm doesn't support sending futures between threads)
Executor::spawn_local(async move {
// Simulate some async work
Executor::tick().await;
tx.send(42).expect("Failed to send result");
});
// Wait for the task to complete
let result = rx.await.expect("Failed to receive result");
assert_eq!(result, 42);
}
#[wasm_bindgen_test]
async fn test_wasm_bindgen_tick() {
// Initialize the wasm-bindgen executor if not already initialized
let _ = Executor::init_wasm_bindgen();
let flag = Arc::new(AtomicBool::new(false));
let flag_clone = flag.clone();
// Spawn a task that will set the flag
Executor::spawn_local(async move {
flag_clone.store(true, Ordering::SeqCst);
});
// Wait for a tick, which should allow the spawned task to run
Executor::tick().await;
// Verify the flag was set
assert!(flag.load(Ordering::SeqCst));
}
#[wasm_bindgen_test]
async fn test_multiple_wasm_bindgen_tasks() {
// Initialize once for all tests
let _ = Executor::init_wasm_bindgen();
// Create channels for multiple tasks
let (tx1, rx1) = oneshot::channel();
let (tx2, rx2) = oneshot::channel();
// Spawn multiple tasks
Executor::spawn_local(async move {
tx1.send("task1").expect("Failed to send from task1");
});
Executor::spawn_local(async move {
tx2.send("task2").expect("Failed to send from task2");
});
// Wait for both tasks to complete
let (result1, result2) = futures::join!(rx1, rx2);
assert_eq!(result1.unwrap(), "task1");
assert_eq!(result2.unwrap(), "task2");
}
// This test verifies that spawn (not local) fails on wasm as expected
#[wasm_bindgen_test]
#[should_panic]
fn test_wasm_bindgen_spawn_errors() {
let _ = Executor::init_wasm_bindgen();
// Using should_panic to test that Executor::spawn panics in wasm
Executor::spawn(async {
// This should panic since wasm-bindgen doesn't support Send futures
});
}

View File

@@ -2,34 +2,35 @@
name = "benchmarks"
version = "0.1.0"
edition = "2021"
rust-version.workspace = true
[dependencies]
l0410 = { package = "leptos", version = "0.4.10", features = [
"nightly",
"ssr",
"nightly",
"ssr",
] }
leptos = { path = "../leptos", features = ["ssr", "nightly"] }
leptos_reactive = { path = "../leptos_reactive", features = ["ssr", "nightly"] }
tachydom = { git = "https://github.com/gbj/tachys", features = [
"nightly",
"leptos",
"nightly",
"leptos",
] }
tachy_maccy = { git = "https://github.com/gbj/tachys", features = ["nightly"] }
sycamore = { version = "0.8.0", features = ["ssr"] }
yew = { version = "0.20.0", features = ["ssr"] }
tokio-test = "0.4.0"
miniserde = "0.1.0"
gloo = "0.8.0"
uuid = { version = "1.0", features = ["serde", "v4", "wasm-bindgen"] }
wasm-bindgen = "0.2.100"
lazy_static = "1.0"
log = "0.4.0"
strum = "0.24.0"
strum_macros = "0.24.0"
serde = { version = "1.0", features = ["derive", "rc"] }
serde_json = "1.0"
tera = "1.0"
sycamore = { version = "0.8", features = ["ssr"] }
yew = { version = "0.20", features = ["ssr"] }
tokio-test = "0.4"
miniserde = "0.1"
gloo = "0.8"
uuid = { version = "1", features = ["serde", "v4", "wasm-bindgen"] }
wasm-bindgen = "0.2"
lazy_static = "1"
log = "0.4"
strum = "0.24"
strum_macros = "0.24"
serde = { version = "1", features = ["derive", "rc"] }
serde_json = "1"
tera = "1"
[dependencies.web-sys]
version = "0.3.0"
version = "0.3"
features = ["Window", "Document", "HtmlElement", "HtmlInputElement"]

View File

@@ -18,7 +18,7 @@ fn leptos_ssr_bench(b: &mut Bencher) {
}
}
let rendered = view! {
let rendered = view! {
<main>
<h1>"Welcome to our benchmark page."</h1>
<p>"Here's some introductory text."</p>
@@ -58,7 +58,7 @@ fn tachys_ssr_bench(b: &mut Bencher) {
}
}
let rendered = view! {
let rendered = view! {
<main>
<h1>"Welcome to our benchmark page."</h1>
<p>"Here's some introductory text."</p>
@@ -92,13 +92,13 @@ fn tera_ssr_bench(b: &mut Bencher) {
{% endfor %}
</main>"#;
static LazyCell<TERA>: Tera = LazyLock::new(|| {
let mut tera = Tera::default();
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
tera
});
lazy_static::lazy_static! {
static ref TERA: Tera = {
let mut tera = Tera::default();
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
tera
};
}
#[derive(Serialize, Deserialize)]
struct Counter {

View File

@@ -55,7 +55,7 @@ static TEMPLATE: &str = r#"<main>
{% else %}
<li><a href="/">All</a></li>
{% endif %}
{% if mode_active %}
<li><a href="/active" class="selected">Active</a></li>
{% else %}
@@ -91,13 +91,13 @@ fn tera_todomvc_ssr(b: &mut Bencher) {
use serde::{Deserialize, Serialize};
use tera::*;
static LazyLock<TERA>: Tera = LazyLock( || {
lazy_static::lazy_static! {
static ref TERA: Tera = {
let mut tera = Tera::default();
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
tera
});
};
}
#[derive(Serialize, Deserialize)]
struct Todo {
@@ -131,13 +131,13 @@ fn tera_todomvc_ssr_1000(b: &mut Bencher) {
use serde::{Deserialize, Serialize};
use tera::*;
static TERA: LazyLock<Tera> = LazyLock::new(|| {
let mut tera = Tera::default();
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
tera
});
lazy_static::lazy_static! {
static ref TERA: Tera = {
let mut tera = Tera::default();
tera.add_raw_templates(vec![("template.html", TEMPLATE)]).unwrap();
tera
};
}
#[derive(Serialize, Deserialize)]
struct Todo {

View File

@@ -1,14 +0,0 @@
[tasks.check-minimal-versions]
condition = { channels = ["nightly"] }
command = "cargo"
args = [
"all-features",
"minimal-versions",
"check",
"--ignore-private",
"--detach-path-deps",
"--direct",
]
install_script = '''
cargo install --git https://github.com/sabify/cargo-all-features --branch arbitrary-command-support
'''

7
cargo-make/check.toml Normal file
View File

@@ -0,0 +1,7 @@
[tasks.check]
alias = "check-all"
[tasks.check-all]
command = "cargo"
args = ["check-all-features"]
install_crate = "cargo-all-features"

View File

@@ -6,15 +6,13 @@ env = { LEPTOS_PROJECT_DIRECTORY = "../" }
args = ["fmt", "--", "--check", "--config-path", "${LEPTOS_PROJECT_DIRECTORY}"]
[tasks.clippy-each-feature]
dependencies = ["install-clippy"]
command = "cargo"
args = [
"all-features",
"clippy",
"--no-deps",
"--",
"-D",
"clippy::print_stdout",
"clippy",
"--all-features",
"--no-deps",
"--",
"-D",
"clippy::print_stdout",
]
install_script = '''
cargo install --git https://github.com/sabify/cargo-all-features --branch arbitrary-command-support
'''

View File

@@ -1,7 +1,7 @@
extend = [
{ path = "./lint.toml" },
{ path = "./test.toml" },
{ path = "./check-minimal-versions.toml" },
{ path = "./check.toml" },
{ path = "./lint.toml" },
{ path = "./test.toml" },
]
[env]
@@ -12,4 +12,4 @@ LEPTOS_OUTPUT_NAME = "ci" # allows examples to check/build without cargo-leptos
RUSTFLAGS = "-D warnings"
[tasks.ci]
dependencies = ["lint", "test-each-feature", "doctests"]
dependencies = ["lint", "test"]

View File

@@ -1,16 +1,7 @@
[tasks.test-each-feature]
env = { "NEXTEST_NO_TESTS" = "warn" }
command = "cargo"
args = ["all-features", "nextest", "run", "--all-targets"]
install_script = '''
cargo install --git https://github.com/sabify/cargo-all-features --branch arbitrary-command-support
'''
[tasks.test]
alias = "test-all"
# This can be removed once doctests is supported in nextest
# https://github.com/nextest-rs/nextest/issues/16
[tasks.doctests]
[tasks.test-all]
command = "cargo"
args = ["all-features", "test", "--doc"]
install_script = '''
cargo install --git https://github.com/sabify/cargo-all-features --branch arbitrary-command-support
'''
args = ["test-all-features"]
install_crate = "cargo-all-features"

View File

@@ -1,7 +0,0 @@
[tasks.post-test]
dependencies = ["test-wasm"]
[tasks.test-wasm]
env = { CARGO_MAKE_WASM_TEST_ARGS = "--headless --chrome --features=wasm-bindgen" }
command = "cargo"
args = ["make", "wasm-pack-test"]

View File

@@ -1,5 +1,6 @@
[package]
name = "const_str_slice_concat"
edition = "2021"
version = "0.1.0"
authors = ["Greg Johnston"]
license = "MIT"
@@ -7,6 +8,5 @@ readme = "../README.md"
repository = "https://github.com/leptos-rs/leptos"
description = "Utilities for const concatenation of string slices."
rust-version.workspace = true
edition.workspace = true
[dependencies]

View File

@@ -31,7 +31,7 @@ pub const fn const_concat(
let mut i = 0;
// have it iterate over bytes manually, because, again,
// no mutable references in const fns
// no mutable refernces in const fns
while i < x.len() {
buffer[position] = x[i];
position += 1;
@@ -59,7 +59,7 @@ pub const fn const_concat_with_prefix(
let mut i = 0;
// have it iterate over bytes manually, because, again,
// no mutable references in const fns
// no mutable refernces in const fns
while i < x.len() {
buffer[position] = x[i];
position += 1;
@@ -116,7 +116,7 @@ pub const fn const_concat_with_separator(
let mut i = 0;
// have it iterate over bytes manually, because, again,
// no mutable references in const fns
// no mutable refernces in const fns
while i < x.len() {
buffer[position] = x[i];
position += 1;

View File

@@ -1,18 +1,13 @@
[package]
name = "either_of"
version = "0.1.6"
edition = "2021"
version = "0.1.0"
authors = ["Greg Johnston"]
license = "MIT"
readme = "../README.md"
repository = "https://github.com/leptos-rs/leptos"
description = "Utilities for working with enumerated types that contain one of 2..n other types."
rust-version.workspace = true
edition.workspace = true
[dependencies]
pin-project-lite = { workspace = true, default-features = true }
paste = { workspace = true, default-features = true }
[features]
default = ["no_std"]
no_std = []
pin-project-lite = "0.2"

View File

@@ -1,907 +1,135 @@
#![cfg_attr(feature = "no_std", no_std)]
#![no_std]
#![forbid(unsafe_code)]
//! Utilities for working with enumerated types that contain one of `2..n` other types.
use core::{
cmp::Ordering,
fmt::Display,
future::Future,
iter::{Product, Sum},
pin::Pin,
task::{Context, Poll},
};
use paste::paste;
use pin_project_lite::pin_project;
#[cfg(not(feature = "no_std"))]
use std::error::Error; // TODO: replace with core::error::Error once MSRV is >= 1.81.0
#[derive(Debug, Clone, Copy)]
pub enum Either<A, B> {
Left(A),
Right(B),
}
impl<Item, A, B> Iterator for Either<A, B>
where
A: Iterator<Item = Item>,
B: Iterator<Item = Item>,
{
type Item = Item;
fn next(&mut self) -> Option<Self::Item> {
match self {
Either::Left(i) => i.next(),
Either::Right(i) => i.next(),
}
}
}
pin_project! {
#[project = EitherFutureProj]
pub enum EitherFuture<A, B> {
Left { #[pin] inner: A },
Right { #[pin] inner: B },
}
}
impl<A, B> Future for EitherFuture<A, B>
where
A: Future,
B: Future,
{
type Output = Either<A::Output, B::Output>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = self.project();
match this {
EitherFutureProj::Left { inner } => match inner.poll(cx) {
Poll::Pending => Poll::Pending,
Poll::Ready(inner) => Poll::Ready(Either::Left(inner)),
},
EitherFutureProj::Right { inner } => match inner.poll(cx) {
Poll::Pending => Poll::Pending,
Poll::Ready(inner) => Poll::Ready(Either::Right(inner)),
},
}
}
}
macro_rules! tuples {
($name:ident + $fut_name:ident + $fut_proj:ident {
$($ty:ident => ($($rest_variant:ident),*) + <$($mapped_ty:ident),+>),+$(,)?
}) => {
tuples!($name + $fut_name + $fut_proj {
$($ty($ty) => ($($rest_variant),*) + <$($mapped_ty),+>),+
});
};
($name:ident + $fut_name:ident + $fut_proj:ident {
$($variant:ident($ty:ident) => ($($rest_variant:ident),*) + <$($mapped_ty:ident),+>),+$(,)?
}) => {
($name:ident + $fut_name:ident + $fut_proj:ident => $($ty:ident),*) => {
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub enum $name<$($ty),+> {
$($variant ($ty),)+
pub enum $name<$($ty,)*> {
$($ty ($ty),)*
}
impl<$($ty),+> $name<$($ty),+> {
paste! {
#[allow(clippy::too_many_arguments)]
pub fn map<$([<F $ty>]),+, $([<$ty 1>]),+>(self, $([<$variant:lower>]: [<F $ty>]),+) -> $name<$([<$ty 1>]),+>
where
$([<F $ty>]: FnOnce($ty) -> [<$ty 1>],)+
{
match self {
$($name::$variant(inner) => $name::$variant([<$variant:lower>](inner)),)+
}
}
$(
pub fn [<map_ $variant:lower>]<Fun, [<$ty 1>]>(self, f: Fun) -> $name<$($mapped_ty),+>
where
Fun: FnOnce($ty) -> [<$ty 1>],
{
match self {
$name::$variant(inner) => $name::$variant(f(inner)),
$($name::$rest_variant(inner) => $name::$rest_variant(inner),)*
}
}
pub fn [<inspect_ $variant:lower>]<Fun, [<$ty 1>]>(self, f: Fun) -> Self
where
Fun: FnOnce(&$ty),
{
if let $name::$variant(inner) = &self {
f(inner);
}
self
}
pub fn [<is_ $variant:lower>](&self) -> bool {
matches!(self, $name::$variant(_))
}
pub fn [<as_ $variant:lower>](&self) -> Option<&$ty> {
match self {
$name::$variant(inner) => Some(inner),
_ => None,
}
}
pub fn [<as_ $variant:lower _mut>](&mut self) -> Option<&mut $ty> {
match self {
$name::$variant(inner) => Some(inner),
_ => None,
}
}
pub fn [<unwrap_ $variant:lower>](self) -> $ty {
match self {
$name::$variant(inner) => inner,
_ => panic!(concat!(
"called `unwrap_", stringify!([<$variant:lower>]), "()` on a non-`", stringify!($variant), "` variant of `", stringify!($name), "`"
)),
}
}
pub fn [<into_ $variant:lower>](self) -> Result<$ty, Self> {
match self {
$name::$variant(inner) => Ok(inner),
_ => Err(self),
}
}
)+
}
}
impl<$($ty),+> Display for $name<$($ty),+>
impl<$($ty,)*> Display for $name<$($ty,)*>
where
$($ty: Display,)+
$($ty: Display,)*
{
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
$($name::$variant(this) => this.fmt(f),)+
$($name::$ty(this) => this.fmt(f),)*
}
}
}
#[cfg(not(feature = "no_std"))]
impl<$($ty),+> Error for $name<$($ty),+>
impl<Item, $($ty,)*> Iterator for $name<$($ty,)*>
where
$($ty: Error,)+
{
fn source(&self) -> Option<&(dyn Error + 'static)> {
match self {
$($name::$variant(this) => this.source(),)+
}
}
}
impl<Item, $($ty),+> Iterator for $name<$($ty),+>
where
$($ty: Iterator<Item = Item>,)+
$($ty: Iterator<Item = Item>,)*
{
type Item = Item;
fn next(&mut self) -> Option<Self::Item> {
match self {
$($name::$variant(i) => i.next(),)+
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
match self {
$($name::$variant(i) => i.size_hint(),)+
}
}
fn count(self) -> usize
where
Self: Sized,
{
match self {
$($name::$variant(i) => i.count(),)+
}
}
fn last(self) -> Option<Self::Item>
where
Self: Sized,
{
match self {
$($name::$variant(i) => i.last(),)+
}
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
match self {
$($name::$variant(i) => i.nth(n),)+
}
}
fn for_each<Fun>(self, f: Fun)
where
Self: Sized,
Fun: FnMut(Self::Item),
{
match self {
$($name::$variant(i) => i.for_each(f),)+
}
}
fn collect<Col: FromIterator<Self::Item>>(self) -> Col
where
Self: Sized,
{
match self {
$($name::$variant(i) => i.collect(),)+
}
}
fn partition<Col, Fun>(self, f: Fun) -> (Col, Col)
where
Self: Sized,
Col: Default + Extend<Self::Item>,
Fun: FnMut(&Self::Item) -> bool,
{
match self {
$($name::$variant(i) => i.partition(f),)+
}
}
fn fold<Acc, Fun>(self, init: Acc, f: Fun) -> Acc
where
Self: Sized,
Fun: FnMut(Acc, Self::Item) -> Acc,
{
match self {
$($name::$variant(i) => i.fold(init, f),)+
}
}
fn reduce<Fun>(self, f: Fun) -> Option<Self::Item>
where
Self: Sized,
Fun: FnMut(Self::Item, Self::Item) -> Self::Item,
{
match self {
$($name::$variant(i) => i.reduce(f),)+
}
}
fn all<Fun>(&mut self, f: Fun) -> bool
where
Self: Sized,
Fun: FnMut(Self::Item) -> bool,
{
match self {
$($name::$variant(i) => i.all(f),)+
}
}
fn any<Fun>(&mut self, f: Fun) -> bool
where
Self: Sized,
Fun: FnMut(Self::Item) -> bool,
{
match self {
$($name::$variant(i) => i.any(f),)+
}
}
fn find<Pre>(&mut self, predicate: Pre) -> Option<Self::Item>
where
Self: Sized,
Pre: FnMut(&Self::Item) -> bool,
{
match self {
$($name::$variant(i) => i.find(predicate),)+
}
}
fn find_map<Out, Fun>(&mut self, f: Fun) -> Option<Out>
where
Self: Sized,
Fun: FnMut(Self::Item) -> Option<Out>,
{
match self {
$($name::$variant(i) => i.find_map(f),)+
}
}
fn position<Pre>(&mut self, predicate: Pre) -> Option<usize>
where
Self: Sized,
Pre: FnMut(Self::Item) -> bool,
{
match self {
$($name::$variant(i) => i.position(predicate),)+
}
}
fn max(self) -> Option<Self::Item>
where
Self: Sized,
Self::Item: Ord,
{
match self {
$($name::$variant(i) => i.max(),)+
}
}
fn min(self) -> Option<Self::Item>
where
Self: Sized,
Self::Item: Ord,
{
match self {
$($name::$variant(i) => i.min(),)+
}
}
fn max_by_key<Key: Ord, Fun>(self, f: Fun) -> Option<Self::Item>
where
Self: Sized,
Fun: FnMut(&Self::Item) -> Key,
{
match self {
$($name::$variant(i) => i.max_by_key(f),)+
}
}
fn max_by<Cmp>(self, compare: Cmp) -> Option<Self::Item>
where
Self: Sized,
Cmp: FnMut(&Self::Item, &Self::Item) -> Ordering,
{
match self {
$($name::$variant(i) => i.max_by(compare),)+
}
}
fn min_by_key<Key: Ord, Fun>(self, f: Fun) -> Option<Self::Item>
where
Self: Sized,
Fun: FnMut(&Self::Item) -> Key,
{
match self {
$($name::$variant(i) => i.min_by_key(f),)+
}
}
fn min_by<Cmp>(self, compare: Cmp) -> Option<Self::Item>
where
Self: Sized,
Cmp: FnMut(&Self::Item, &Self::Item) -> Ordering,
{
match self {
$($name::$variant(i) => i.min_by(compare),)+
}
}
fn sum<Out>(self) -> Out
where
Self: Sized,
Out: Sum<Self::Item>,
{
match self {
$($name::$variant(i) => i.sum(),)+
}
}
fn product<Out>(self) -> Out
where
Self: Sized,
Out: Product<Self::Item>,
{
match self {
$($name::$variant(i) => i.product(),)+
}
}
fn cmp<Other>(self, other: Other) -> Ordering
where
Other: IntoIterator<Item = Self::Item>,
Self::Item: Ord,
Self: Sized,
{
match self {
$($name::$variant(i) => i.cmp(other),)+
}
}
fn partial_cmp<Other>(self, other: Other) -> Option<Ordering>
where
Other: IntoIterator,
Self::Item: PartialOrd<Other::Item>,
Self: Sized,
{
match self {
$($name::$variant(i) => i.partial_cmp(other),)+
}
}
// TODO: uncomment once MSRV is >= 1.82.0
// fn is_sorted(self) -> bool
// where
// Self: Sized,
// Self::Item: PartialOrd,
// {
// match self {
// $($name::$variant(i) => i.is_sorted(),)+
// }
// }
//
// fn is_sorted_by<Cmp>(self, compare: Cmp) -> bool
// where
// Self: Sized,
// Cmp: FnMut(&Self::Item, &Self::Item) -> bool,
// {
// match self {
// $($name::$variant(i) => i.is_sorted_by(compare),)+
// }
// }
//
// fn is_sorted_by_key<Fun, Key>(self, f: Fun) -> bool
// where
// Self: Sized,
// Fun: FnMut(Self::Item) -> Key,
// Key: PartialOrd,
// {
// match self {
// $($name::$variant(i) => i.is_sorted_by_key(f),)+
// }
// }
}
impl<Item, $($ty),+> ExactSizeIterator for $name<$($ty),+>
where
$($ty: ExactSizeIterator<Item = Item>,)+
{
fn len(&self) -> usize {
match self {
$($name::$variant(i) => i.len(),)+
}
}
}
impl<Item, $($ty),+> DoubleEndedIterator for $name<$($ty),+>
where
$($ty: DoubleEndedIterator<Item = Item>,)+
{
fn next_back(&mut self) -> Option<Self::Item> {
match self {
$($name::$variant(i) => i.next_back(),)+
}
}
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
match self {
$($name::$variant(i) => i.nth_back(n),)+
}
}
fn rfind<Pre>(&mut self, predicate: Pre) -> Option<Self::Item>
where
Pre: FnMut(&Self::Item) -> bool,
{
match self {
$($name::$variant(i) => i.rfind(predicate),)+
$($name::$ty(i) => i.next(),)*
}
}
}
pin_project! {
#[project = $fut_proj]
pub enum $fut_name<$($ty),+> {
$($variant { #[pin] inner: $ty },)+
pub enum $fut_name<$($ty,)*> {
$($ty { #[pin] inner: $ty },)*
}
}
impl<$($ty),+> Future for $fut_name<$($ty),+>
impl<$($ty,)*> Future for $fut_name<$($ty,)*>
where
$($ty: Future,)+
$($ty: Future,)*
{
type Output = $name<$($ty::Output),+>;
type Output = $name<$($ty::Output,)*>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = self.project();
match this {
$($fut_proj::$variant { inner } => match inner.poll(cx) {
$($fut_proj::$ty { inner } => match inner.poll(cx) {
Poll::Pending => Poll::Pending,
Poll::Ready(inner) => Poll::Ready($name::$variant(inner)),
},)+
Poll::Ready(inner) => Poll::Ready($name::$ty(inner)),
},)*
}
}
}
}
}
tuples!(Either + EitherFuture + EitherFutureProj {
Left(A) => (Right) + <A1, B>,
Right(B) => (Left) + <A, B1>,
});
impl<A, B> Either<A, B> {
pub fn swap(self) -> Either<B, A> {
match self {
Either::Left(a) => Either::Right(a),
Either::Right(b) => Either::Left(b),
}
}
}
impl<A, B> From<Result<A, B>> for Either<A, B> {
fn from(value: Result<A, B>) -> Self {
match value {
Ok(left) => Either::Left(left),
Err(right) => Either::Right(right),
}
}
}
pub trait EitherOr {
type Left;
type Right;
fn either_or<FA, A, FB, B>(self, a: FA, b: FB) -> Either<A, B>
where
FA: FnOnce(Self::Left) -> A,
FB: FnOnce(Self::Right) -> B;
}
impl EitherOr for bool {
type Left = ();
type Right = ();
fn either_or<FA, A, FB, B>(self, a: FA, b: FB) -> Either<A, B>
where
FA: FnOnce(Self::Left) -> A,
FB: FnOnce(Self::Right) -> B,
{
if self {
Either::Left(a(()))
} else {
Either::Right(b(()))
}
}
}
impl<T> EitherOr for Option<T> {
type Left = T;
type Right = ();
fn either_or<FA, A, FB, B>(self, a: FA, b: FB) -> Either<A, B>
where
FA: FnOnce(Self::Left) -> A,
FB: FnOnce(Self::Right) -> B,
{
match self {
Some(t) => Either::Left(a(t)),
None => Either::Right(b(())),
}
}
}
impl<T, E> EitherOr for Result<T, E> {
type Left = T;
type Right = E;
fn either_or<FA, A, FB, B>(self, a: FA, b: FB) -> Either<A, B>
where
FA: FnOnce(Self::Left) -> A,
FB: FnOnce(Self::Right) -> B,
{
match self {
Ok(t) => Either::Left(a(t)),
Err(err) => Either::Right(b(err)),
}
}
}
impl<A, B> EitherOr for Either<A, B> {
type Left = A;
type Right = B;
#[inline]
fn either_or<FA, A1, FB, B1>(self, a: FA, b: FB) -> Either<A1, B1>
where
FA: FnOnce(<Self as EitherOr>::Left) -> A1,
FB: FnOnce(<Self as EitherOr>::Right) -> B1,
{
self.map(a, b)
}
}
#[test]
fn test_either_or() {
let right = false.either_or(|_| 'a', |_| 12);
assert!(matches!(right, Either::Right(12)));
let left = true.either_or(|_| 'a', |_| 12);
assert!(matches!(left, Either::Left('a')));
let left = Some(12).either_or(|a| a, |_| 'a');
assert!(matches!(left, Either::Left(12)));
let right = None.either_or(|a: i32| a, |_| 'a');
assert!(matches!(right, Either::Right('a')));
let result: Result<_, ()> = Ok(1.2f32);
let left = result.either_or(|a| a * 2f32, |b| b);
assert!(matches!(left, Either::Left(2.4f32)));
let result: Result<i32, _> = Err("12");
let right = result.either_or(|a| a, |b| b.chars().next());
assert!(matches!(right, Either::Right(Some('1'))));
let either = Either::<i32, char>::Left(12);
let left = either.either_or(|a| a, |b| b);
assert!(matches!(left, Either::Left(12)));
let either = Either::<i32, char>::Right('a');
let right = either.either_or(|a| a, |b| b);
assert!(matches!(right, Either::Right('a')));
}
tuples!(EitherOf3 + EitherOf3Future + EitherOf3FutureProj {
A => (B, C) + <A1, B, C>,
B => (A, C) + <A, B1, C>,
C => (A, B) + <A, B, C1>,
});
tuples!(EitherOf4 + EitherOf4Future + EitherOf4FutureProj {
A => (B, C, D) + <A1, B, C, D>,
B => (A, C, D) + <A, B1, C, D>,
C => (A, B, D) + <A, B, C1, D>,
D => (A, B, C) + <A, B, C, D1>,
});
tuples!(EitherOf5 + EitherOf5Future + EitherOf5FutureProj {
A => (B, C, D, E) + <A1, B, C, D, E>,
B => (A, C, D, E) + <A, B1, C, D, E>,
C => (A, B, D, E) + <A, B, C1, D, E>,
D => (A, B, C, E) + <A, B, C, D1, E>,
E => (A, B, C, D) + <A, B, C, D, E1>,
});
tuples!(EitherOf6 + EitherOf6Future + EitherOf6FutureProj {
A => (B, C, D, E, F) + <A1, B, C, D, E, F>,
B => (A, C, D, E, F) + <A, B1, C, D, E, F>,
C => (A, B, D, E, F) + <A, B, C1, D, E, F>,
D => (A, B, C, E, F) + <A, B, C, D1, E, F>,
E => (A, B, C, D, F) + <A, B, C, D, E1, F>,
F => (A, B, C, D, E) + <A, B, C, D, E, F1>,
});
tuples!(EitherOf7 + EitherOf7Future + EitherOf7FutureProj {
A => (B, C, D, E, F, G) + <A1, B, C, D, E, F, G>,
B => (A, C, D, E, F, G) + <A, B1, C, D, E, F, G>,
C => (A, B, D, E, F, G) + <A, B, C1, D, E, F, G>,
D => (A, B, C, E, F, G) + <A, B, C, D1, E, F, G>,
E => (A, B, C, D, F, G) + <A, B, C, D, E1, F, G>,
F => (A, B, C, D, E, G) + <A, B, C, D, E, F1, G>,
G => (A, B, C, D, E, F) + <A, B, C, D, E, F, G1>,
});
tuples!(EitherOf8 + EitherOf8Future + EitherOf8FutureProj {
A => (B, C, D, E, F, G, H) + <A1, B, C, D, E, F, G, H>,
B => (A, C, D, E, F, G, H) + <A, B1, C, D, E, F, G, H>,
C => (A, B, D, E, F, G, H) + <A, B, C1, D, E, F, G, H>,
D => (A, B, C, E, F, G, H) + <A, B, C, D1, E, F, G, H>,
E => (A, B, C, D, F, G, H) + <A, B, C, D, E1, F, G, H>,
F => (A, B, C, D, E, G, H) + <A, B, C, D, E, F1, G, H>,
G => (A, B, C, D, E, F, H) + <A, B, C, D, E, F, G1, H>,
H => (A, B, C, D, E, F, G) + <A, B, C, D, E, F, G, H1>,
});
tuples!(EitherOf9 + EitherOf9Future + EitherOf9FutureProj {
A => (B, C, D, E, F, G, H, I) + <A1, B, C, D, E, F, G, H, I>,
B => (A, C, D, E, F, G, H, I) + <A, B1, C, D, E, F, G, H, I>,
C => (A, B, D, E, F, G, H, I) + <A, B, C1, D, E, F, G, H, I>,
D => (A, B, C, E, F, G, H, I) + <A, B, C, D1, E, F, G, H, I>,
E => (A, B, C, D, F, G, H, I) + <A, B, C, D, E1, F, G, H, I>,
F => (A, B, C, D, E, G, H, I) + <A, B, C, D, E, F1, G, H, I>,
G => (A, B, C, D, E, F, H, I) + <A, B, C, D, E, F, G1, H, I>,
H => (A, B, C, D, E, F, G, I) + <A, B, C, D, E, F, G, H1, I>,
I => (A, B, C, D, E, F, G, H) + <A, B, C, D, E, F, G, H, I1>,
});
tuples!(EitherOf10 + EitherOf10Future + EitherOf10FutureProj {
A => (B, C, D, E, F, G, H, I, J) + <A1, B, C, D, E, F, G, H, I, J>,
B => (A, C, D, E, F, G, H, I, J) + <A, B1, C, D, E, F, G, H, I, J>,
C => (A, B, D, E, F, G, H, I, J) + <A, B, C1, D, E, F, G, H, I, J>,
D => (A, B, C, E, F, G, H, I, J) + <A, B, C, D1, E, F, G, H, I, J>,
E => (A, B, C, D, F, G, H, I, J) + <A, B, C, D, E1, F, G, H, I, J>,
F => (A, B, C, D, E, G, H, I, J) + <A, B, C, D, E, F1, G, H, I, J>,
G => (A, B, C, D, E, F, H, I, J) + <A, B, C, D, E, F, G1, H, I, J>,
H => (A, B, C, D, E, F, G, I, J) + <A, B, C, D, E, F, G, H1, I, J>,
I => (A, B, C, D, E, F, G, H, J) + <A, B, C, D, E, F, G, H, I1, J>,
J => (A, B, C, D, E, F, G, H, I) + <A, B, C, D, E, F, G, H, I, J1>,
});
tuples!(EitherOf11 + EitherOf11Future + EitherOf11FutureProj {
A => (B, C, D, E, F, G, H, I, J, K) + <A1, B, C, D, E, F, G, H, I, J, K>,
B => (A, C, D, E, F, G, H, I, J, K) + <A, B1, C, D, E, F, G, H, I, J, K>,
C => (A, B, D, E, F, G, H, I, J, K) + <A, B, C1, D, E, F, G, H, I, J, K>,
D => (A, B, C, E, F, G, H, I, J, K) + <A, B, C, D1, E, F, G, H, I, J, K>,
E => (A, B, C, D, F, G, H, I, J, K) + <A, B, C, D, E1, F, G, H, I, J, K>,
F => (A, B, C, D, E, G, H, I, J, K) + <A, B, C, D, E, F1, G, H, I, J, K>,
G => (A, B, C, D, E, F, H, I, J, K) + <A, B, C, D, E, F, G1, H, I, J, K>,
H => (A, B, C, D, E, F, G, I, J, K) + <A, B, C, D, E, F, G, H1, I, J, K>,
I => (A, B, C, D, E, F, G, H, J, K) + <A, B, C, D, E, F, G, H, I1, J, K>,
J => (A, B, C, D, E, F, G, H, I, K) + <A, B, C, D, E, F, G, H, I, J1, K>,
K => (A, B, C, D, E, F, G, H, I, J) + <A, B, C, D, E, F, G, H, I, J, K1>,
});
tuples!(EitherOf12 + EitherOf12Future + EitherOf12FutureProj {
A => (B, C, D, E, F, G, H, I, J, K, L) + <A1, B, C, D, E, F, G, H, I, J, K, L>,
B => (A, C, D, E, F, G, H, I, J, K, L) + <A, B1, C, D, E, F, G, H, I, J, K, L>,
C => (A, B, D, E, F, G, H, I, J, K, L) + <A, B, C1, D, E, F, G, H, I, J, K, L>,
D => (A, B, C, E, F, G, H, I, J, K, L) + <A, B, C, D1, E, F, G, H, I, J, K, L>,
E => (A, B, C, D, F, G, H, I, J, K, L) + <A, B, C, D, E1, F, G, H, I, J, K, L>,
F => (A, B, C, D, E, G, H, I, J, K, L) + <A, B, C, D, E, F1, G, H, I, J, K, L>,
G => (A, B, C, D, E, F, H, I, J, K, L) + <A, B, C, D, E, F, G1, H, I, J, K, L>,
H => (A, B, C, D, E, F, G, I, J, K, L) + <A, B, C, D, E, F, G, H1, I, J, K, L>,
I => (A, B, C, D, E, F, G, H, J, K, L) + <A, B, C, D, E, F, G, H, I1, J, K, L>,
J => (A, B, C, D, E, F, G, H, I, K, L) + <A, B, C, D, E, F, G, H, I, J1, K, L>,
K => (A, B, C, D, E, F, G, H, I, J, L) + <A, B, C, D, E, F, G, H, I, J, K1, L>,
L => (A, B, C, D, E, F, G, H, I, J, K) + <A, B, C, D, E, F, G, H, I, J, K, L1>,
});
tuples!(EitherOf13 + EitherOf13Future + EitherOf13FutureProj {
A => (B, C, D, E, F, G, H, I, J, K, L, M) + <A1, B, C, D, E, F, G, H, I, J, K, L, M>,
B => (A, C, D, E, F, G, H, I, J, K, L, M) + <A, B1, C, D, E, F, G, H, I, J, K, L, M>,
C => (A, B, D, E, F, G, H, I, J, K, L, M) + <A, B, C1, D, E, F, G, H, I, J, K, L, M>,
D => (A, B, C, E, F, G, H, I, J, K, L, M) + <A, B, C, D1, E, F, G, H, I, J, K, L, M>,
E => (A, B, C, D, F, G, H, I, J, K, L, M) + <A, B, C, D, E1, F, G, H, I, J, K, L, M>,
F => (A, B, C, D, E, G, H, I, J, K, L, M) + <A, B, C, D, E, F1, G, H, I, J, K, L, M>,
G => (A, B, C, D, E, F, H, I, J, K, L, M) + <A, B, C, D, E, F, G1, H, I, J, K, L, M>,
H => (A, B, C, D, E, F, G, I, J, K, L, M) + <A, B, C, D, E, F, G, H1, I, J, K, L, M>,
I => (A, B, C, D, E, F, G, H, J, K, L, M) + <A, B, C, D, E, F, G, H, I1, J, K, L, M>,
J => (A, B, C, D, E, F, G, H, I, K, L, M) + <A, B, C, D, E, F, G, H, I, J1, K, L, M>,
K => (A, B, C, D, E, F, G, H, I, J, L, M) + <A, B, C, D, E, F, G, H, I, J, K1, L, M>,
L => (A, B, C, D, E, F, G, H, I, J, K, M) + <A, B, C, D, E, F, G, H, I, J, K, L1, M>,
M => (A, B, C, D, E, F, G, H, I, J, K, L) + <A, B, C, D, E, F, G, H, I, J, K, L, M1>,
});
tuples!(EitherOf14 + EitherOf14Future + EitherOf14FutureProj {
A => (B, C, D, E, F, G, H, I, J, K, L, M, N) + <A1, B, C, D, E, F, G, H, I, J, K, L, M, N>,
B => (A, C, D, E, F, G, H, I, J, K, L, M, N) + <A, B1, C, D, E, F, G, H, I, J, K, L, M, N>,
C => (A, B, D, E, F, G, H, I, J, K, L, M, N) + <A, B, C1, D, E, F, G, H, I, J, K, L, M, N>,
D => (A, B, C, E, F, G, H, I, J, K, L, M, N) + <A, B, C, D1, E, F, G, H, I, J, K, L, M, N>,
E => (A, B, C, D, F, G, H, I, J, K, L, M, N) + <A, B, C, D, E1, F, G, H, I, J, K, L, M, N>,
F => (A, B, C, D, E, G, H, I, J, K, L, M, N) + <A, B, C, D, E, F1, G, H, I, J, K, L, M, N>,
G => (A, B, C, D, E, F, H, I, J, K, L, M, N) + <A, B, C, D, E, F, G1, H, I, J, K, L, M, N>,
H => (A, B, C, D, E, F, G, I, J, K, L, M, N) + <A, B, C, D, E, F, G, H1, I, J, K, L, M, N>,
I => (A, B, C, D, E, F, G, H, J, K, L, M, N) + <A, B, C, D, E, F, G, H, I1, J, K, L, M, N>,
J => (A, B, C, D, E, F, G, H, I, K, L, M, N) + <A, B, C, D, E, F, G, H, I, J1, K, L, M, N>,
K => (A, B, C, D, E, F, G, H, I, J, L, M, N) + <A, B, C, D, E, F, G, H, I, J, K1, L, M, N>,
L => (A, B, C, D, E, F, G, H, I, J, K, M, N) + <A, B, C, D, E, F, G, H, I, J, K, L1, M, N>,
M => (A, B, C, D, E, F, G, H, I, J, K, L, N) + <A, B, C, D, E, F, G, H, I, J, K, L, M1, N>,
N => (A, B, C, D, E, F, G, H, I, J, K, L, M) + <A, B, C, D, E, F, G, H, I, J, K, L, M, N1>,
});
tuples!(EitherOf15 + EitherOf15Future + EitherOf15FutureProj {
A => (B, C, D, E, F, G, H, I, J, K, L, M, N, O) + <A1, B, C, D, E, F, G, H, I, J, K, L, M, N, O>,
B => (A, C, D, E, F, G, H, I, J, K, L, M, N, O) + <A, B1, C, D, E, F, G, H, I, J, K, L, M, N, O>,
C => (A, B, D, E, F, G, H, I, J, K, L, M, N, O) + <A, B, C1, D, E, F, G, H, I, J, K, L, M, N, O>,
D => (A, B, C, E, F, G, H, I, J, K, L, M, N, O) + <A, B, C, D1, E, F, G, H, I, J, K, L, M, N, O>,
E => (A, B, C, D, F, G, H, I, J, K, L, M, N, O) + <A, B, C, D, E1, F, G, H, I, J, K, L, M, N, O>,
F => (A, B, C, D, E, G, H, I, J, K, L, M, N, O) + <A, B, C, D, E, F1, G, H, I, J, K, L, M, N, O>,
G => (A, B, C, D, E, F, H, I, J, K, L, M, N, O) + <A, B, C, D, E, F, G1, H, I, J, K, L, M, N, O>,
H => (A, B, C, D, E, F, G, I, J, K, L, M, N, O) + <A, B, C, D, E, F, G, H1, I, J, K, L, M, N, O>,
I => (A, B, C, D, E, F, G, H, J, K, L, M, N, O) + <A, B, C, D, E, F, G, H, I1, J, K, L, M, N, O>,
J => (A, B, C, D, E, F, G, H, I, K, L, M, N, O) + <A, B, C, D, E, F, G, H, I, J1, K, L, M, N, O>,
K => (A, B, C, D, E, F, G, H, I, J, L, M, N, O) + <A, B, C, D, E, F, G, H, I, J, K1, L, M, N, O>,
L => (A, B, C, D, E, F, G, H, I, J, K, M, N, O) + <A, B, C, D, E, F, G, H, I, J, K, L1, M, N, O>,
M => (A, B, C, D, E, F, G, H, I, J, K, L, N, O) + <A, B, C, D, E, F, G, H, I, J, K, L, M1, N, O>,
N => (A, B, C, D, E, F, G, H, I, J, K, L, M, O) + <A, B, C, D, E, F, G, H, I, J, K, L, M, N1, O>,
O => (A, B, C, D, E, F, G, H, I, J, K, L, M, N) + <A, B, C, D, E, F, G, H, I, J, K, L, M, N, O1>,
});
tuples!(EitherOf16 + EitherOf16Future + EitherOf16FutureProj {
A => (B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) + <A1, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P>,
B => (A, C, D, E, F, G, H, I, J, K, L, M, N, O, P) + <A, B1, C, D, E, F, G, H, I, J, K, L, M, N, O, P>,
C => (A, B, D, E, F, G, H, I, J, K, L, M, N, O, P) + <A, B, C1, D, E, F, G, H, I, J, K, L, M, N, O, P>,
D => (A, B, C, E, F, G, H, I, J, K, L, M, N, O, P) + <A, B, C, D1, E, F, G, H, I, J, K, L, M, N, O, P>,
E => (A, B, C, D, F, G, H, I, J, K, L, M, N, O, P) + <A, B, C, D, E1, F, G, H, I, J, K, L, M, N, O, P>,
F => (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P) + <A, B, C, D, E, F1, G, H, I, J, K, L, M, N, O, P>,
G => (A, B, C, D, E, F, H, I, J, K, L, M, N, O, P) + <A, B, C, D, E, F, G1, H, I, J, K, L, M, N, O, P>,
H => (A, B, C, D, E, F, G, I, J, K, L, M, N, O, P) + <A, B, C, D, E, F, G, H1, I, J, K, L, M, N, O, P>,
I => (A, B, C, D, E, F, G, H, J, K, L, M, N, O, P) + <A, B, C, D, E, F, G, H, I1, J, K, L, M, N, O, P>,
J => (A, B, C, D, E, F, G, H, I, K, L, M, N, O, P) + <A, B, C, D, E, F, G, H, I, J1, K, L, M, N, O, P>,
K => (A, B, C, D, E, F, G, H, I, J, L, M, N, O, P) + <A, B, C, D, E, F, G, H, I, J, K1, L, M, N, O, P>,
L => (A, B, C, D, E, F, G, H, I, J, K, M, N, O, P) + <A, B, C, D, E, F, G, H, I, J, K, L1, M, N, O, P>,
M => (A, B, C, D, E, F, G, H, I, J, K, L, N, O, P) + <A, B, C, D, E, F, G, H, I, J, K, L, M1, N, O, P>,
N => (A, B, C, D, E, F, G, H, I, J, K, L, M, O, P) + <A, B, C, D, E, F, G, H, I, J, K, L, M, N1, O, P>,
O => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, P) + <A, B, C, D, E, F, G, H, I, J, K, L, M, N, O1, P>,
P => (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) + <A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P1>,
});
/// Matches over the first expression and returns an either ([`Either`], [`EitherOf3`], ... [`EitherOf8`])
/// composed of the values returned by the match arms.
///
/// The pattern syntax is exactly the same as found in a match arm.
///
/// # Examples
///
/// ```
/// # use either_of::*;
/// let either2 = either!(Some("hello"),
/// Some(s) => s.len(),
/// None => 0.0,
/// );
/// assert!(matches!(either2, Either::<usize, f64>::Left(5)));
///
/// let either3 = either!(Some("admin"),
/// Some("admin") => "hello admin",
/// Some(_) => 'x',
/// _ => 0,
/// );
/// assert!(matches!(either3, EitherOf3::<&str, char, i32>::A("hello admin")));
/// ```
#[macro_export]
macro_rules! either {
($match:expr, $left_pattern:pat => $left_expression:expr, $right_pattern:pat => $right_expression:expr$(,)?) => {
match $match {
$left_pattern => $crate::Either::Left($left_expression),
$right_pattern => $crate::Either::Right($right_expression),
}
};
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr$(,)?) => {
match $match {
$a_pattern => $crate::EitherOf3::A($a_expression),
$b_pattern => $crate::EitherOf3::B($b_expression),
$c_pattern => $crate::EitherOf3::C($c_expression),
}
};
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr, $d_pattern:pat => $d_expression:expr$(,)?) => {
match $match {
$a_pattern => $crate::EitherOf4::A($a_expression),
$b_pattern => $crate::EitherOf4::B($b_expression),
$c_pattern => $crate::EitherOf4::C($c_expression),
$d_pattern => $crate::EitherOf4::D($d_expression),
}
};
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr, $d_pattern:pat => $d_expression:expr, $e_pattern:pat => $e_expression:expr$(,)?) => {
match $match {
$a_pattern => $crate::EitherOf5::A($a_expression),
$b_pattern => $crate::EitherOf5::B($b_expression),
$c_pattern => $crate::EitherOf5::C($c_expression),
$d_pattern => $crate::EitherOf5::D($d_expression),
$e_pattern => $crate::EitherOf5::E($e_expression),
}
};
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr, $d_pattern:pat => $d_expression:expr, $e_pattern:pat => $e_expression:expr, $f_pattern:pat => $f_expression:expr$(,)?) => {
match $match {
$a_pattern => $crate::EitherOf6::A($a_expression),
$b_pattern => $crate::EitherOf6::B($b_expression),
$c_pattern => $crate::EitherOf6::C($c_expression),
$d_pattern => $crate::EitherOf6::D($d_expression),
$e_pattern => $crate::EitherOf6::E($e_expression),
$f_pattern => $crate::EitherOf6::F($f_expression),
}
};
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr, $d_pattern:pat => $d_expression:expr, $e_pattern:pat => $e_expression:expr, $f_pattern:pat => $f_expression:expr, $g_pattern:pat => $g_expression:expr$(,)?) => {
match $match {
$a_pattern => $crate::EitherOf7::A($a_expression),
$b_pattern => $crate::EitherOf7::B($b_expression),
$c_pattern => $crate::EitherOf7::C($c_expression),
$d_pattern => $crate::EitherOf7::D($d_expression),
$e_pattern => $crate::EitherOf7::E($e_expression),
$f_pattern => $crate::EitherOf7::F($f_expression),
$g_pattern => $crate::EitherOf7::G($g_expression),
}
};
($match:expr, $a_pattern:pat => $a_expression:expr, $b_pattern:pat => $b_expression:expr, $c_pattern:pat => $c_expression:expr, $d_pattern:pat => $d_expression:expr, $e_pattern:pat => $e_expression:expr, $f_pattern:pat => $f_expression:expr, $g_pattern:pat => $g_expression:expr, $h_pattern:pat => $h_expression:expr$(,)?) => {
match $match {
$a_pattern => $crate::EitherOf8::A($a_expression),
$b_pattern => $crate::EitherOf8::B($b_expression),
$c_pattern => $crate::EitherOf8::C($c_expression),
$d_pattern => $crate::EitherOf8::D($d_expression),
$e_pattern => $crate::EitherOf8::E($e_expression),
$f_pattern => $crate::EitherOf8::F($f_expression),
$g_pattern => $crate::EitherOf8::G($g_expression),
$h_pattern => $crate::EitherOf8::H($h_expression),
}
}; // if you need more eithers feel free to open a PR ;-)
}
#[cfg(test)]
mod tests {
use super::*;
// compile time test
#[test]
fn either_macro() {
let _: Either<&str, f64> = either!(12,
12 => "12",
_ => 0.0,
);
let _: EitherOf3<&str, f64, i32> = either!(12,
12 => "12",
13 => 0.0,
_ => 12,
);
let _: EitherOf4<&str, f64, char, i32> = either!(12,
12 => "12",
13 => 0.0,
14 => ' ',
_ => 12,
);
let _: EitherOf5<&str, f64, char, f32, i32> = either!(12,
12 => "12",
13 => 0.0,
14 => ' ',
15 => 0.0f32,
_ => 12,
);
let _: EitherOf6<&str, f64, char, f32, u8, i32> = either!(12,
12 => "12",
13 => 0.0,
14 => ' ',
15 => 0.0f32,
16 => 24u8,
_ => 12,
);
let _: EitherOf7<&str, f64, char, f32, u8, i8, i32> = either!(12,
12 => "12",
13 => 0.0,
14 => ' ',
15 => 0.0f32,
16 => 24u8,
17 => 2i8,
_ => 12,
);
let _: EitherOf8<&str, f64, char, f32, u8, i8, u32, i32> = either!(12,
12 => "12",
13 => 0.0,
14 => ' ',
15 => 0.0f32,
16 => 24u8,
17 => 2i8,
18 => 42u32,
_ => 12,
);
}
#[test]
#[should_panic]
fn unwrap_wrong_either() {
Either::<i32, &str>::Left(0).unwrap_right();
}
}
tuples!(EitherOf3 + EitherOf3Future + EitherOf3FutureProj => A, B, C);
tuples!(EitherOf4 + EitherOf4Future + EitherOf4FutureProj => A, B, C, D);
tuples!(EitherOf5 + EitherOf5Future + EitherOf5FutureProj => A, B, C, D, E);
tuples!(EitherOf6 + EitherOf6Future + EitherOf6FutureProj => A, B, C, D, E, F);
tuples!(EitherOf7 + EitherOf7Future + EitherOf7FutureProj => A, B, C, D, E, F, G);
tuples!(EitherOf8 + EitherOf8Future + EitherOf8FutureProj => A, B, C, D, E, F, G, H);
tuples!(EitherOf9 + EitherOf9Future + EitherOf9FutureProj => A, B, C, D, E, F, G, H, I);
tuples!(EitherOf10 + EitherOf10Future + EitherOf10FutureProj => A, B, C, D, E, F, G, H, I, J);
tuples!(EitherOf11 + EitherOf11Future + EitherOf11FutureProj => A, B, C, D, E, F, G, H, I, J, K);
tuples!(EitherOf12 + EitherOf12Future + EitherOf12FutureProj => A, B, C, D, E, F, G, H, I, J, K, L);
tuples!(EitherOf13 + EitherOf13Future + EitherOf13FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M);
tuples!(EitherOf14 + EitherOf14Future + EitherOf14FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M, N);
tuples!(EitherOf15 + EitherOf15Future + EitherOf15FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M, N, O);
tuples!(EitherOf16 + EitherOf16Future + EitherOf16FutureProj => A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P);

View File

@@ -7,18 +7,19 @@ edition = "2021"
crate-type = ["cdylib", "rlib"]
[dependencies]
actix-files = { version = "0.6.6", optional = true }
actix-web = { version = "4.8", optional = true, features = ["macros"] }
console_error_panic_hook = "0.1.7"
cfg-if = "1.0"
actix-files = { version = "0.6", optional = true }
actix-web = { version = "4", optional = true, features = ["macros"] }
console_error_panic_hook = "0.1"
cfg-if = "1"
leptos = { path = "../../leptos" }
leptos_meta = { path = "../../meta" }
leptos_actix = { path = "../../integrations/actix", optional = true }
leptos_router = { path = "../../router" }
wasm-bindgen = "0.2.93"
serde = { version = "1.0", features = ["derive"] }
wasm-bindgen = "0.2"
serde = { version = "1", features = ["derive"] }
[features]
csr = ["leptos/csr"]
hydrate = ["leptos/hydrate"]
ssr = [
"dep:actix-files",

View File

@@ -1,9 +1,68 @@
# Action Form Error Handling Example
<picture>
<source srcset="https://raw.githubusercontent.com/leptos-rs/leptos/main/docs/logos/Leptos_logo_Solid_White.svg" media="(prefers-color-scheme: dark)">
<img src="https://raw.githubusercontent.com/leptos-rs/leptos/main/docs/logos/Leptos_logo_RGB.svg" alt="Leptos Logo">
</picture>
## Getting Started
# Leptos Starter Template
See the [Examples README](../README.md) for setup and run instructions.
This is a template for use with the [Leptos](https://github.com/leptos-rs/leptos) web framework and the [cargo-leptos](https://github.com/akesson/cargo-leptos) tool.
## Quick Start
## Creating your template repo
Execute `cargo leptos watch` to run this example.
If you don't have `cargo-leptos` installed you can install it with
`cargo install cargo-leptos`
Then run
`cargo leptos new --git leptos-rs/start`
to generate a new project template (you will be prompted to enter a project name).
`cd {projectname}`
to go to your newly created project.
Of course, you should explore around the project structure, but the best place to start with your application code is in `src/app.rs`.
## Running your project
`cargo leptos watch`
By default, you can access your local project at `http://localhost:3000`
## Installing Additional Tools
By default, `cargo-leptos` uses `nightly` Rust, `cargo-generate`, and `sass`. If you run into any trouble, you may need to install one or more of these tools.
1. `rustup toolchain install nightly --allow-downgrade` - make sure you have Rust nightly
2. `rustup target add wasm32-unknown-unknown` - add the ability to compile Rust to WebAssembly
3. `cargo install cargo-generate` - install `cargo-generate` binary (should be installed automatically in future)
4. `npm install -g sass` - install `dart-sass` (should be optional in future)
## Executing a Server on a Remote Machine Without the Toolchain
After running a `cargo leptos build --release` the minimum files needed are:
1. The server binary located in `target/server/release`
2. The `site` directory and all files within located in `target/site`
Copy these files to your remote server. The directory structure should be:
```text
leptos_start
site/
```
Set the following environment variables (updating for your project as needed):
```sh
export LEPTOS_OUTPUT_NAME="leptos_start"
export LEPTOS_SITE_ROOT="site"
export LEPTOS_SITE_PKG_DIR="pkg"
export LEPTOS_SITE_ADDR="127.0.0.1:3000"
export LEPTOS_RELOAD_PORT="3001"
```
Finally, run the server binary.
## Notes about CSR and Trunk:
Although it is not recommended, you can also run your project without server integration using the feature `csr` and `trunk serve`:
`trunk serve --open --features csr`
This may be useful for integrating external tools which require a static site, e.g. `tauri`.

View File

@@ -52,10 +52,23 @@ async fn main() -> std::io::Result<()> {
.await
}
#[cfg(not(feature = "ssr"))]
#[cfg(not(any(feature = "ssr", feature = "csr")))]
pub fn main() {
// no client-side main function
// unless we want this to work with e.g., Trunk for pure client-side testing
// see lib.rs for hydration function instead
// see optional feature `csr` instead
}
#[cfg(all(not(feature = "ssr"), feature = "csr"))]
pub fn main() {
// a client-side main function is required for using `trunk serve`
// prefer using `cargo leptos serve` instead
// to run: `trunk serve --open --features csr`
use action_form_error_handling::app::*;
use leptos::prelude::*;
console_error_panic_hook::set_once();
mount_to_body(App);
}

View File

@@ -1,120 +0,0 @@
[package]
name = "axum_js_ssr"
version = "0.1.0"
edition = "2021"
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies]
axum = { version = "0.8.1", optional = true }
console_error_panic_hook = "0.1.7"
console_log = "1.0"
gloo-utils = "0.2.0"
html-escape = "0.2.13"
http-body-util = { version = "0.1.0", optional = true }
js-sys = { version = "0.3.69", optional = true }
leptos = { path = "../../leptos", features = ["tracing"] }
leptos_meta = { path = "../../meta" }
leptos_axum = { path = "../../integrations/axum", optional = true }
leptos_router = { path = "../../router" }
serde = { version = "1.0", features = ["derive"] }
thiserror = "2.0.12"
tokio = { version = "1.39", features = [
"rt-multi-thread",
"macros",
"time",
], optional = true }
tower = { version = "0.4.13", optional = true }
tower-http = { version = "0.5.2", features = ["fs"], optional = true }
wasm-bindgen = "0.2.92"
web-sys = { version = "0.3.69", features = [
"AddEventListenerOptions",
"Document",
"Element",
"Event",
"EventListener",
"EventTarget",
"Performance",
"Window",
], optional = true }
[features]
hydrate = ["leptos/hydrate", "dep:js-sys", "dep:web-sys"]
ssr = [
"dep:axum",
"dep:http-body-util",
"dep:tower",
"dep:tower-http",
"dep:tokio",
"leptos/ssr",
"leptos_meta/ssr",
"dep:leptos_axum",
"leptos_router/ssr",
]
[profile.release]
panic = "abort"
[profile.wasm-release]
inherits = "release"
opt-level = 'z'
lto = true
codegen-units = 1
panic = "abort"
[package.metadata.cargo-all-features]
denylist = ["axum", "tower", "tower-http", "tokio", "sqlx", "leptos_axum"]
skip_feature_sets = [["ssr", "hydrate"], []]
[package.metadata.leptos]
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
output-name = "axum_js_ssr"
# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
site-root = "target/site"
# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
# Defaults to pkg
site-pkg-dir = "pkg"
# [Optional] The source CSS file. If it ends with .sass or .scss then it will be compiled by dart-sass into CSS. The CSS is optimized by Lightning CSS before being written to <site-root>/<site-pkg>/app.css
style-file = "style/main.scss"
# Assets source dir. All files found here will be copied and synchronized to site-root.
# The assets-dir cannot have a sub directory with the same name/path as site-pkg-dir.
#
# Optional. Env: LEPTOS_ASSETS_DIR.
assets-dir = "assets"
# The IP and port (ex: 127.0.0.1:3000) where the server serves the content. Use it in your server setup.
site-addr = "127.0.0.1:3000"
# The port to use for automatic reload monitoring
reload-port = 3001
# [Optional] Command to use when running end2end tests. It will run in the end2end dir.
# [Windows] for non-WSL use "npx.cmd playwright test"
# This binary name can be checked in Powershell with Get-Command npx
end2end-cmd = "npx playwright test"
end2end-dir = "end2end"
# The browserlist query used for optimizing the CSS.
browserquery = "defaults"
# Set by cargo-leptos watch when building with that tool. Controls whether autoreload JS will be included in the head
watch = false
# The environment Leptos will run in, usually either "DEV" or "PROD"
env = "DEV"
# The features to use when compiling the bin target
#
# Optional. Can be over-ridden with the command line parameter --bin-features
bin-features = ["ssr"]
# If the --no-default-features flag should be used when compiling the bin target
#
# Optional. Defaults to false.
bin-default-features = false
# The features to use when compiling the lib target
#
# Optional. Can be over-ridden with the command line parameter --lib-features
lib-features = ["hydrate"]
# If the --no-default-features flag should be used when compiling the lib target
#
# Optional. Defaults to false.
lib-default-features = false
lib-profile-release = "wasm-release"

View File

@@ -1,21 +0,0 @@
MIT License
Copyright (c) 2024 Tommy Yu
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -1,8 +0,0 @@
extend = [
{ path = "../cargo-make/main.toml" },
{ path = "../cargo-make/cargo-leptos.toml" },
]
[env]
CLIENT_PROCESS_NAME = "axum_js_ssr"

View File

@@ -1,10 +0,0 @@
# Leptos Axum JS SSR Example
This example shows the various ways that JavaScript may be included into
a Leptos application. The intent is to demonstrate how this may be done
and how it may cause the application to fail in an unexpected manner if
done incorrectly.
## Quick Start
Run `cargo leptos watch` to run this example.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -1,29 +0,0 @@
BSD 3-Clause License
Copyright (c) 2006, Ivan Sagalaev.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@@ -1,47 +0,0 @@
# Highlight.js CDN Assets
**Note: this contains only a subset of files from the full package from NPM.**
[![install size](https://packagephobia.now.sh/badge?p=highlight.js)](https://packagephobia.now.sh/result?p=highlight.js)
**This package contains only the CDN build assets of highlight.js.**
This may be what you want if you'd like to install the pre-built distributable highlight.js client-side assets via NPM. If you're wanting to use highlight.js mainly on the server-side you likely want the [highlight.js][1] package instead.
To access these files via CDN:<br>
https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@latest/build/
**If you just want a single .js file with the common languages built-in:
<https://cdn.jsdelivr.net/gh/highlightjs/cdn-release@latest/build/highlight.min.js>**
---
## Highlight.js
Highlight.js is a syntax highlighter written in JavaScript. It works in
the browser as well as on the server. It works with pretty much any
markup, doesnt depend on any framework, and has automatic language
detection.
If you'd like to read the full README:<br>
<https://github.com/highlightjs/highlight.js/blob/main/README.md>
## License
Highlight.js is released under the BSD License. See [LICENSE][7] file
for details.
## Links
The official site for the library is at <https://highlightjs.org/>.
The Github project may be found at: <https://github.com/highlightjs/highlight.js>
Further in-depth documentation for the API and other topics is at
<http://highlightjs.readthedocs.io/>.
A list of the Core Team and contributors can be found in the [CONTRIBUTORS.md][8] file.
[1]: https://www.npmjs.com/package/highlight.js
[7]: https://github.com/highlightjs/highlight.js/blob/main/LICENSE
[8]: https://github.com/highlightjs/highlight.js/blob/main/CONTRIBUTORS.md

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,93 +0,0 @@
{
"name": "@highlightjs/cdn-assets",
"description": "Syntax highlighting with language autodetection. (pre-compiled CDN assets)",
"keywords": [
"highlight",
"syntax"
],
"homepage": "https://highlightjs.org/",
"version": "11.10.0",
"author": "Josh Goebel <hello@joshgoebel.com>",
"contributors": [
"Josh Goebel <hello@joshgoebel.com>",
"Egor Rogov <e.rogov@postgrespro.ru>",
"Vladimir Jimenez <me@allejo.io>",
"Ivan Sagalaev <maniac@softwaremaniacs.org>",
"Jeremy Hull <sourdrums@gmail.com>",
"Oleg Efimov <efimovov@gmail.com>",
"Gidi Meir Morris <gidi@gidi.io>",
"Jan T. Sott <git@idleberg.com>",
"Li Xuanji <xuanji@gmail.com>",
"Marcos Cáceres <marcos@marcosc.com>",
"Sang Dang <sang.dang@polku.io>"
],
"bugs": {
"url": "https://github.com/highlightjs/highlight.js/issues"
},
"license": "BSD-3-Clause",
"repository": {
"type": "git",
"url": "git://github.com/highlightjs/highlight.js.git"
},
"sideEffects": [
"./es/common.js",
"./lib/common.js",
"*.css",
"*.scss"
],
"scripts": {
"mocha": "mocha",
"lint": "eslint src/*.js src/lib/*.js demo/*.js tools/**/*.js --ignore-pattern vendor",
"lint-languages": "eslint --no-eslintrc -c .eslintrc.lang.js src/languages/**/*.js",
"build_and_test": "npm run build && npm run test",
"build_and_test_browser": "npm run build-browser && npm run test-browser",
"build": "node ./tools/build.js -t node",
"build-cdn": "node ./tools/build.js -t cdn",
"build-browser": "node ./tools/build.js -t browser :common",
"devtool": "npx http-server",
"test": "mocha test",
"test-markup": "mocha test/markup",
"test-detect": "mocha test/detect",
"test-browser": "mocha test/browser",
"test-parser": "mocha test/parser"
},
"engines": {
"node": ">=12.0.0"
},
"devDependencies": {
"@colors/colors": "^1.6.0",
"@rollup/plugin-commonjs": "^26.0.1",
"@rollup/plugin-json": "^6.0.1",
"@rollup/plugin-node-resolve": "^15.2.3",
"@types/mocha": "^10.0.2",
"@typescript-eslint/eslint-plugin": "^7.15.0",
"@typescript-eslint/parser": "^7.15.0",
"clean-css": "^5.3.2",
"cli-table": "^0.3.1",
"commander": "^12.1.0",
"css": "^3.0.0",
"css-color-names": "^1.0.1",
"deep-freeze-es6": "^3.0.2",
"del": "^7.1.0",
"dependency-resolver": "^2.0.1",
"eslint": "^8.57.0",
"eslint-config-standard": "^17.1.0",
"eslint-plugin-import": "^2.28.1",
"eslint-plugin-node": "^11.1.0",
"eslint-plugin-promise": "^6.1.1",
"glob": "^8.1.0",
"glob-promise": "^6.0.5",
"handlebars": "^4.7.8",
"http-server": "^14.1.1",
"jsdom": "^24.1.0",
"lodash": "^4.17.20",
"mocha": "^10.2.0",
"refa": "^0.4.1",
"rollup": "^4.0.2",
"should": "^13.2.3",
"terser": "^5.21.0",
"tiny-worker": "^2.3.0",
"typescript": "^5.2.2",
"wcag-contrast": "^3.0.0"
}
}

View File

@@ -1,10 +0,0 @@
pre code.hljs{display:block;overflow-x:auto;padding:1em}code.hljs{padding:3px 5px}/*!
Theme: GitHub Dark
Description: Dark theme as seen on github.com
Author: github.com
Maintainer: @Hirse
Updated: 2021-05-15
Outdated base version: https://github.com/primer/github-syntax-dark
Current colors taken from GitHub's CSS
*/.hljs{color:#c9d1d9;background:#0d1117}.hljs-doctag,.hljs-keyword,.hljs-meta .hljs-keyword,.hljs-template-tag,.hljs-template-variable,.hljs-type,.hljs-variable.language_{color:#ff7b72}.hljs-title,.hljs-title.class_,.hljs-title.class_.inherited__,.hljs-title.function_{color:#d2a8ff}.hljs-attr,.hljs-attribute,.hljs-literal,.hljs-meta,.hljs-number,.hljs-operator,.hljs-selector-attr,.hljs-selector-class,.hljs-selector-id,.hljs-variable{color:#79c0ff}.hljs-meta .hljs-string,.hljs-regexp,.hljs-string{color:#a5d6ff}.hljs-built_in,.hljs-symbol{color:#ffa657}.hljs-code,.hljs-comment,.hljs-formula{color:#8b949e}.hljs-name,.hljs-quote,.hljs-selector-pseudo,.hljs-selector-tag{color:#7ee787}.hljs-subst{color:#c9d1d9}.hljs-section{color:#1f6feb;font-weight:700}.hljs-bullet{color:#f2cc60}.hljs-emphasis{color:#c9d1d9;font-style:italic}.hljs-strong{color:#c9d1d9;font-weight:700}.hljs-addition{color:#aff5b4;background-color:#033a16}.hljs-deletion{color:#ffdcd7;background-color:#67060c}

View File

@@ -1,10 +0,0 @@
pre code.hljs{display:block;overflow-x:auto;padding:1em}code.hljs{padding:3px 5px}/*!
Theme: GitHub
Description: Light theme as seen on github.com
Author: github.com
Maintainer: @Hirse
Updated: 2021-05-15
Outdated base version: https://github.com/primer/github-syntax-light
Current colors taken from GitHub's CSS
*/.hljs{color:#24292e;background:#fff}.hljs-doctag,.hljs-keyword,.hljs-meta .hljs-keyword,.hljs-template-tag,.hljs-template-variable,.hljs-type,.hljs-variable.language_{color:#d73a49}.hljs-title,.hljs-title.class_,.hljs-title.class_.inherited__,.hljs-title.function_{color:#6f42c1}.hljs-attr,.hljs-attribute,.hljs-literal,.hljs-meta,.hljs-number,.hljs-operator,.hljs-selector-attr,.hljs-selector-class,.hljs-selector-id,.hljs-variable{color:#005cc5}.hljs-meta .hljs-string,.hljs-regexp,.hljs-string{color:#032f62}.hljs-built_in,.hljs-symbol{color:#e36209}.hljs-code,.hljs-comment,.hljs-formula{color:#6a737d}.hljs-name,.hljs-quote,.hljs-selector-pseudo,.hljs-selector-tag{color:#22863a}.hljs-subst{color:#24292e}.hljs-section{color:#005cc5;font-weight:700}.hljs-bullet{color:#735c0f}.hljs-emphasis{color:#24292e;font-style:italic}.hljs-strong{color:#24292e;font-weight:700}.hljs-addition{color:#22863a;background-color:#f0fff4}.hljs-deletion{color:#b31d28;background-color:#ffeef0}

View File

@@ -1,6 +0,0 @@
{
"name": "axum_js_ssr",
"dependencies": {
"@highlightjs/cdn-assets": "^11.10.0"
}
}

View File

@@ -1,8 +0,0 @@
use leptos::{prelude::ServerFnError, server};
#[server]
pub async fn fetch_code() -> Result<String, ServerFnError> {
// emulate loading of code from a database/version control/etc
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
Ok(crate::consts::CH05_02A.to_string())
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,39 +0,0 @@
// Example programs from the Rust Programming Language Book
pub const CH03_05A: &str = r#"fn main() {
let number = 3;
if number < 5 {
println!("condition was true");
} else {
println!("condition was false");
}
}
"#;
// For some reason, swapping the code examples "fixes" example 6. It
// might have something to do with the lower complexity of highlighting
// a shorter example. Anyway, including extra newlines for the shorter
// example to match with the longer in order to avoid reflowing the
// table during the async resource loading for CSR.
pub const CH05_02A: &str = r#"fn main() {
let width1 = 30;
let height1 = 50;
println!(
"The area of the rectangle is {} square pixels.",
area(width1, height1)
);
}
fn area(width: u32, height: u32) -> u32 {
width * height
}
"#;
pub const LEPTOS_HYDRATED: &str = "_leptos_hydrated";

View File

@@ -1,59 +0,0 @@
#[cfg(not(feature = "ssr"))]
mod csr {
use gloo_utils::format::JsValueSerdeExt;
use js_sys::{
Object,
Reflect::{get, set},
};
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
#[wasm_bindgen(
module = "/node_modules/@highlightjs/cdn-assets/es/highlight.min.js"
)]
extern "C" {
type HighlightOptions;
#[wasm_bindgen(catch, js_namespace = defaultMod, js_name = highlight)]
fn highlight_lang(
code: String,
options: Object,
) -> Result<Object, JsValue>;
#[wasm_bindgen(js_namespace = defaultMod, js_name = highlightAll)]
pub fn highlight_all();
}
// Keeping the `ignoreIllegals` argument out of the default case, and since there is no optional arguments
// in Rust, this will have to be provided in a separate function (e.g. `highlight_ignore_illegals`), much
// like how `web_sys` does it for the browser APIs. For simplicity, only the highlighted HTML code is
// returned on success, and None on error.
pub fn highlight(code: String, lang: String) -> Option<String> {
let options = js_sys::Object::new();
set(&options, &"language".into(), &lang.into())
.expect("failed to assign lang to options");
highlight_lang(code, options)
.map(|result| {
let value = get(&result, &"value".into())
.expect("HighlightResult failed to contain the value key");
value.into_serde().expect("Value should have been a string")
})
.ok()
}
}
#[cfg(feature = "ssr")]
mod ssr {
// noop under ssr
pub fn highlight_all() {}
// TODO see if there is a Rust-based solution that will enable isomorphic rendering for this feature.
// the current (disabled) implementation simply calls html_escape.
// pub fn highlight(code: String, _lang: String) -> Option<String> {
// Some(html_escape::encode_text(&code).into_owned())
// }
}
#[cfg(not(feature = "ssr"))]
pub use csr::*;
#[cfg(feature = "ssr")]
pub use ssr::*;

View File

@@ -1,51 +0,0 @@
pub mod api;
pub mod app;
pub mod consts;
pub mod hljs;
#[cfg(feature = "hydrate")]
#[wasm_bindgen::prelude::wasm_bindgen]
pub fn hydrate() {
use app::*;
use consts::LEPTOS_HYDRATED;
use std::panic;
panic::set_hook(Box::new(|info| {
// this custom hook will call out to show the usual error log at
// the console while also attempt to update the UI to indicate
// a restart of the application is required to continue.
console_error_panic_hook::hook(info);
let window = leptos::prelude::window();
if !matches!(
js_sys::Reflect::get(&window, &wasm_bindgen::JsValue::from_str(LEPTOS_HYDRATED)),
Ok(t) if t == true
) {
let document = leptos::prelude::document();
let _ = document.query_selector("#reset").map(|el| {
el.map(|el| {
el.set_class_name("panicked");
})
});
let _ = document.query_selector("#notice").map(|el| {
el.map(|el| {
el.set_class_name("panicked");
})
});
}
}));
leptos::mount::hydrate_body(App);
let window = leptos::prelude::window();
js_sys::Reflect::set(
&window,
&wasm_bindgen::JsValue::from_str(LEPTOS_HYDRATED),
&wasm_bindgen::JsValue::TRUE,
)
.expect("error setting hydrated status");
let event = web_sys::Event::new(LEPTOS_HYDRATED)
.expect("error creating hydrated event");
let document = leptos::prelude::document();
document
.dispatch_event(&event)
.expect("error dispatching hydrated event");
leptos::logging::log!("dispatched hydrated event");
}

View File

@@ -1,152 +0,0 @@
#[cfg(feature = "ssr")]
mod latency {
use std::sync::{Mutex, OnceLock};
pub static LATENCY: OnceLock<
Mutex<std::iter::Cycle<std::slice::Iter<'_, u64>>>,
> = OnceLock::new();
pub static ES_LATENCY: OnceLock<
Mutex<std::iter::Cycle<std::slice::Iter<'_, u64>>>,
> = OnceLock::new();
}
#[cfg(feature = "ssr")]
#[tokio::main]
async fn main() {
use axum::{
body::Body,
extract::Request,
http::{
header::{self, HeaderValue},
StatusCode,
},
middleware::{self, Next},
response::{IntoResponse, Response},
routing::get,
Router,
};
use axum_js_ssr::app::*;
use http_body_util::BodyExt;
use leptos::{logging::log, prelude::*};
use leptos_axum::{generate_route_list, LeptosRoutes};
latency::LATENCY.get_or_init(|| [0, 4, 40, 400].iter().cycle().into());
latency::ES_LATENCY.get_or_init(|| [0].iter().cycle().into());
// Having the ES_LATENCY (a cycle of latency for the loading of the es
// module) in an identical cycle as LATENCY (for the standard version)
// adversely influences the intended demo, as this ultimately delays
// hydration when set too high which can cause panic under every case.
// If you want to test the effects of the delay just modify the list of
// values for the desired cycle of delays.
let conf = get_configuration(None).unwrap();
let addr = conf.leptos_options.site_addr;
let leptos_options = conf.leptos_options;
// Generate the list of routes in your Leptos App
let routes = generate_route_list(App);
async fn highlight_js() -> impl IntoResponse {
(
[(header::CONTENT_TYPE, "text/javascript")],
include_str!(
"../node_modules/@highlightjs/cdn-assets/highlight.min.js"
),
)
}
async fn latency_for_highlight_js(
req: Request,
next: Next,
) -> Result<impl IntoResponse, (StatusCode, String)> {
let uri_parts = &mut req.uri().path().rsplit('/');
let is_highlightjs = uri_parts.next() == Some("highlight.min.js");
let es = uri_parts.next() == Some("es");
let module_type = if es { "es module " } else { "standard " };
let res = next.run(req).await;
if is_highlightjs {
// additional processing if the filename is the test subject
let (mut parts, body) = res.into_parts();
let bytes = body
.collect()
.await
.map_err(|err| {
(
StatusCode::BAD_REQUEST,
format!("error reading body: {err}"),
)
})?
.to_bytes();
let latency = if es {
&latency::ES_LATENCY
} else {
&latency::LATENCY
};
let delay = match latency
.get()
.expect("latency cycle wasn't set up")
.try_lock()
{
Ok(ref mut mutex) => {
*mutex.next().expect("cycle always has next")
}
Err(_) => 0,
};
// inject the logging of the delay used into the target script
log!(
"loading {module_type}highlight.min.js with latency of \
{delay} ms"
);
let js_log = format!(
"\nconsole.log('loaded {module_type}highlight.js with a \
minimum latency of {delay} ms');"
);
tokio::time::sleep(std::time::Duration::from_millis(delay)).await;
let bytes = [bytes, js_log.into()].concat();
let length = bytes.len();
let body = Body::from(bytes);
// Provide the bare minimum set of headers to avoid browser cache.
parts.headers = header::HeaderMap::from_iter(
[
(
header::CONTENT_TYPE,
HeaderValue::from_static("text/javascript"),
),
(header::CONTENT_LENGTH, HeaderValue::from(length)),
]
.into_iter(),
);
Ok(Response::from_parts(parts, body))
} else {
Ok(res)
}
}
let app = Router::new()
.route("/highlight.min.js", get(highlight_js))
.leptos_routes(&leptos_options, routes, {
let leptos_options = leptos_options.clone();
move || shell(leptos_options.clone())
})
.fallback(leptos_axum::file_and_error_handler(shell))
.layer(middleware::from_fn(latency_for_highlight_js))
.with_state(leptos_options);
// run our app with hyper
// `axum::Server` is a re-export of `hyper::Server`
log!("listening on http://{}", &addr);
let listener = tokio::net::TcpListener::bind(&addr).await.unwrap();
axum::serve(listener, app.into_make_service())
.await
.unwrap();
}
#[cfg(not(feature = "ssr"))]
pub fn main() {
// no client-side main function
// unless we want this to work with e.g., Trunk for pure client-side testing
// see lib.rs for hydration function instead
}

View File

@@ -1,171 +0,0 @@
html, body {
margin: 0;
padding: 0;
font-family: sans-serif;
height: 100vh;
overflow: hidden;
}
body {
display: flex;
flex-flow: row nowrap;
}
nav {
min-width: 17em;
height: 100vh;
counter-reset: example-counter 0;
list-style-type: none;
list-style-position: outside;
overflow: auto;
}
nav a {
display: block;
padding: 0.5em 2em;
text-decoration: none;
}
nav a small {
display: block;
}
nav a.example::before {
counter-reset: subexample-counter 0;
counter-increment: example-counter 1;
content: counter(example-counter) ". ";
}
nav a.subexample::before {
counter-increment: subexample-counter 1;
content: counter(example-counter) "." counter(subexample-counter) " ";
}
div#notice {
display: none;
}
main div#notice.panicked {
position: sticky;
top: 0;
padding: 0.5em 2em;
display: block;
}
main {
width: 100%;
overflow: auto;
}
main article {
max-width: 60em;
margin: 0 1em;
padding: 0 1em;
}
main p, main li {
line-height: 1.3em;
}
main li pre code, main div pre code {
display: block;
line-height: normal;
}
main ol, main ul {
padding-left: 2em;
}
h2>code, p>code, li>code {
border-radius: 3px;
padding: 2px;
}
li pre code, div pre code {
margin: 0 !important;
padding: 0 !important;
}
#code-demo {
overflow-x: auto;
}
#code-demo table {
width: 50em;
margin: auto;
}
#code-demo table td {
vertical-align: top;
}
#code-demo table code {
display: block;
padding: 1em;
}
@media (prefers-color-scheme: light) {
nav {
background: #f7f7f7;
}
nav a {
color: #000;
}
nav a[aria-current="page"] {
background-color: #e0e0e0;
}
nav a:hover, h2>code, p>code, li>code {
background-color: #e7e7e7;
}
nav a.panicked, main div#notice.panicked {
background: #fdd;
}
main div#notice.panicked a {
color: #000;
}
nav a.section {
border-bottom: 1px solid #777;
}
}
@media (prefers-color-scheme: dark) {
nav {
background: #080808;
}
nav a {
color: #fff;
}
nav a[aria-current="page"] {
background-color: #3f3f3f;
}
nav a:hover, h2>code, p>code, li>code {
background-color: #383838;
}
nav a.panicked, main div#notice.panicked {
background: #733;
}
main div#notice.panicked a {
color: #fff;
}
nav a.section {
border-bottom: 1px solid #888;
}
}
// Just include the raw style as-is because I can't find a quick and easy way to import them just for the
// appropriate media type...
pre code.hljs{display:block;overflow-x:auto;padding:1em}code.hljs{padding:3px 5px}
@media (prefers-color-scheme: light){.hljs{color:#24292e;background:#fff}.hljs-doctag,.hljs-keyword,.hljs-meta .hljs-keyword,.hljs-template-tag,.hljs-template-variable,.hljs-type,.hljs-variable.language_{color:#d73a49}.hljs-title,.hljs-title.class_,.hljs-title.class_.inherited__,.hljs-title.function_{color:#6f42c1}.hljs-attr,.hljs-attribute,.hljs-literal,.hljs-meta,.hljs-number,.hljs-operator,.hljs-selector-attr,.hljs-selector-class,.hljs-selector-id,.hljs-variable{color:#005cc5}.hljs-meta .hljs-string,.hljs-regexp,.hljs-string{color:#032f62}.hljs-built_in,.hljs-symbol{color:#e36209}.hljs-code,.hljs-comment,.hljs-formula{color:#6a737d}.hljs-name,.hljs-quote,.hljs-selector-pseudo,.hljs-selector-tag{color:#22863a}.hljs-subst{color:#24292e}.hljs-section{color:#005cc5;font-weight:700}.hljs-bullet{color:#735c0f}.hljs-emphasis{color:#24292e;font-style:italic}.hljs-strong{color:#24292e;font-weight:700}.hljs-addition{color:#22863a;background-color:#f0fff4}.hljs-deletion{color:#b31d28;background-color:#ffeef0}}
@media (prefers-color-scheme: dark){.hljs{color:#c9d1d9;background:#0d1117}.hljs-doctag,.hljs-keyword,.hljs-meta .hljs-keyword,.hljs-template-tag,.hljs-template-variable,.hljs-type,.hljs-variable.language_{color:#ff7b72}.hljs-title,.hljs-title.class_,.hljs-title.class_.inherited__,.hljs-title.function_{color:#d2a8ff}.hljs-attr,.hljs-attribute,.hljs-literal,.hljs-meta,.hljs-number,.hljs-operator,.hljs-selector-attr,.hljs-selector-class,.hljs-selector-id,.hljs-variable{color:#79c0ff}.hljs-meta .hljs-string,.hljs-regexp,.hljs-string{color:#a5d6ff}.hljs-built_in,.hljs-symbol{color:#ffa657}.hljs-code,.hljs-comment,.hljs-formula{color:#8b949e}.hljs-name,.hljs-quote,.hljs-selector-pseudo,.hljs-selector-tag{color:#7ee787}.hljs-subst{color:#c9d1d9}.hljs-section{color:#1f6feb;font-weight:700}.hljs-bullet{color:#f2cc60}.hljs-emphasis{color:#c9d1d9;font-style:italic}.hljs-strong{color:#c9d1d9;font-weight:700}.hljs-addition{color:#aff5b4;background-color:#033a16}.hljs-deletion{color:#ffdcd7;background-color:#67060c}}

View File

@@ -1,3 +1,7 @@
extend = [
{ path = "./lint.toml" }
]
[tasks.make-target-site-dir]
command = "mkdir"
args = ["-p", "target/site"]
@@ -20,16 +24,21 @@ clear = true
dependencies = ["check-debug", "check-release"]
[tasks.check-debug]
dependencies = ["cargo-all-features"]
toolchain = "stable"
command = "cargo"
args = ["all-features", "clippy"]
args = ["check-all-features"]
install_crate = "cargo-all-features"
[tasks.check-release]
dependencies = ["cargo-all-features"]
toolchain = "stable"
command = "cargo"
args = ["all-features", "clippy", "--release"]
args = ["check-all-features", "--release"]
install_crate = "cargo-all-features"
[tasks.lint]
dependencies = ["make-target-site-dir", "check-style"]
[tasks.start-client]
dependencies = ["install-cargo-leptos"]
command = "cargo"
args = ["leptos", "watch", "--release", "-P"]
args = ["leptos", "watch", "--release", "-P"]

View File

@@ -1,11 +0,0 @@
extend = [
{ path = "./cargo-leptos.toml" },
{ path = "../cargo-make/webdriver.toml" },
]
[tasks.integration-test]
dependencies = [
"install-cargo-leptos",
"start-webdriver",
"cargo-leptos-e2e-split",
]

View File

@@ -1,20 +1,10 @@
[tasks.cargo-all-features]
install_script = '''
cargo install --git https://github.com/sabify/cargo-all-features --branch arbitrary-command-support
'''
[tasks.install-cargo-leptos]
install_crate = { crate_name = "cargo-leptos", binary = "cargo-leptos", test_arg = "--help" }
args = ["--locked"]
[tasks.cargo-leptos-e2e]
command = "cargo"
args = ["leptos", "end-to-end"]
[tasks.cargo-leptos-e2e-split]
command = "cargo"
args = ["leptos", "end-to-end", "--split"]
[tasks.build]
clear = true
command = "cargo"
@@ -25,14 +15,16 @@ clear = true
dependencies = ["check-debug", "check-release"]
[tasks.check-debug]
dependencies = ["cargo-all-features"]
toolchain = "stable"
command = "cargo"
args = ["all-features", "clippy"]
args = ["check-all-features"]
install_crate = "cargo-all-features"
[tasks.check-release]
dependencies = ["cargo-all-features"]
toolchain = "stable"
command = "cargo"
args = ["all-features", "clippy", "--release"]
args = ["check-all-features", "--release"]
install_crate = "cargo-all-features"
[tasks.start-client]
dependencies = ["install-cargo-leptos"]

View File

@@ -1,9 +1,11 @@
[tasks.cargo-all-features]
install_script = '''
cargo install --git https://github.com/sabify/cargo-all-features --branch arbitrary-command-support
'''
[tasks.build]
dependencies = ["cargo-all-features"]
toolchain = "stable"
command = "cargo"
args = ["all-features", "build"]
args = ["build-all-features"]
install_crate = "cargo-all-features"
[tasks.check]
toolchain = "stable"
command = "cargo"
args = ["check-all-features"]
install_crate = "cargo-all-features"

View File

@@ -1,8 +1,3 @@
[tasks.cargo-all-features]
install_script = '''
cargo install --git https://github.com/sabify/cargo-all-features --branch arbitrary-command-support
'''
[tasks.build]
install_crate = { crate_name = "wasm-pack", binary = "wasm-pack", test_arg = "--help" }
clear = true
@@ -19,11 +14,13 @@ clear = true
dependencies = ["check-debug", "check-release"]
[tasks.check-debug]
dependencies = ["cargo-all-features"]
toolchain = "stable"
command = "cargo"
args = ["all-features", "clippy"]
args = ["check-all-features"]
install_crate = "cargo-all-features"
[tasks.check-release]
dependencies = ["cargo-all-features"]
toolchain = "stable"
command = "cargo"
args = ["all-features", "clippy", "--release"]
args = ["check-all-features", "--release"]
install_crate = "cargo-all-features"

View File

@@ -1,15 +1,8 @@
[tasks.cargo-all-features]
install_script = '''
cargo install --git https://github.com/sabify/cargo-all-features --branch arbitrary-command-support
'''
[tasks.pre-clippy]
env = { CARGO_MAKE_CLIPPY_ARGS = "--no-deps --all-targets --all-features -- -D warnings" }
[tasks.lint]
dependencies = ["check-format-flow", "clippy-each-feature"]
[tasks.clippy-each-feature]
dependencies = ["cargo-all-features"]
command = "cargo"
args = ["all-features", "clippy", "--no-deps", "--", "-D", "warnings"]
[tasks.check-style]
dependencies = ["check-format-flow", "clippy-flow"]
[tasks.check-format]
env = { LEPTOS_PROJECT_DIRECTORY = "../../" }

View File

@@ -1,19 +1,22 @@
extend = [
{ path = "./compile.toml" },
{ path = "./clean.toml" },
{ path = "./lint.toml" },
{ path = "./node.toml" },
{ path = "./process.toml" },
{ path = "./compile.toml" },
{ path = "./clean.toml" },
{ path = "./lint.toml" },
{ path = "./node.toml" },
{ path = "./process.toml" },
]
# CI Stages
[tasks.ci]
dependencies = ["prepare", "lint", "test-flow", "integration-test"]
dependencies = ["prepare", "lint", "build", "test-flow", "integration-test"]
[tasks.prepare]
dependencies = ["setup-node"]
[tasks.lint]
dependencies = ["check-style"]
[tasks.integration-test]
# Support Local Runs

View File

@@ -1,8 +1,22 @@
extend = [
{ path = "../cargo-make/playwright.toml" },
{ path = "../cargo-make/trunk_server.toml" },
{ path = "../cargo-make/playwright.toml" },
{ path = "../cargo-make/trunk_server.toml" },
]
[tasks.integration-test]
dependencies = ["build", "start-client", "test-playwright"]
description = "Run integration test with automated start and stop of processes"
env = { SPAWN_CLIENT_PROCESS = "1" }
run_task = { name = ["start", "wait-test-stop"], parallel = true }
[tasks.wait-test-stop]
private = true
dependencies = ["wait-server", "test-playwright", "stop"]
[tasks.wait-server]
script = '''
for run in {1..12}; do
echo "Waiting to ensure server is started..."
sleep 10
done
echo "Times up, running tests"
'''

View File

@@ -6,6 +6,5 @@ command = "trunk"
args = ["build"]
[tasks.start-client]
script = '''
trunk serve -q "${@}" &
'''
command = "trunk"
args = ["serve", "${@}"]

View File

@@ -1,3 +1,7 @@
[tasks.test]
env = { RUN_CARGO_TEST = false }
condition = { env_true = ["RUN_CARGO_TEST"] }
[tasks.post-test]
dependencies = ["test-wasm"]

View File

@@ -10,12 +10,12 @@ lto = true
[dependencies]
leptos = { path = "../../leptos", features = ["csr"] }
console_log = "1.0"
log = "0.4.22"
console_log = "1"
log = "0.4"
console_error_panic_hook = "0.1.7"
gloo-timers = { version = "0.3.0", features = ["futures"] }
[dev-dependencies]
wasm-bindgen = "0.2.93"
wasm-bindgen-test = "0.3.42"
web-sys = "0.3.70"
wasm-bindgen = "0.2"
wasm-bindgen-test = "0.3.0"
web-sys = "0.3"

View File

@@ -1,9 +1,8 @@
<!doctype html>
<!DOCTYPE html>
<html>
<head>
<link data-trunk rel="rust" data-wasm-opt="z" />
<link data-trunk rel="icon" type="image/ico" href="/public/favicon.ico" />
</head>
<body></body>
</html>
<head>
<link data-trunk rel="rust" data-wasm-opt="z"/>
<link data-trunk rel="icon" type="image/ico" href="/public/favicon.ico"/>
</head>
<body></body>
</html>

View File

@@ -1,2 +1,2 @@
[toolchain]
targets = ["wasm32-unknown-unknown"]
channel = "stable" # test change

View File

@@ -1,7 +1,7 @@
#![allow(dead_code)]
use counter::*;
use leptos::{mount::mount_to, prelude::*, task::tick};
use leptos::mount::mount_to;
use leptos::prelude::*;
use leptos::spawn::tick;
use wasm_bindgen::JsCast;
use wasm_bindgen_test::*;

View File

@@ -11,21 +11,23 @@ codegen-units = 1
lto = true
[dependencies]
actix-files = { version = "0.6.6", optional = true }
actix-web = { version = "4.8", optional = true, features = ["macros"] }
broadcaster = "1.0"
console_log = "1.0"
console_error_panic_hook = "0.1.7"
futures = "0.3.30"
actix-files = { version = "0.6", optional = true }
actix-web = { version = "4", optional = true, features = ["macros"] }
broadcaster = "1"
console_log = "1"
console_error_panic_hook = "0.1"
futures = "0.3"
lazy_static = "1"
leptos = { path = "../../leptos" }
leptos_actix = { path = "../../integrations/actix", optional = true }
leptos_router = { path = "../../router" }
log = "0.4.22"
gloo-net = { version = "0.6.0" }
wasm-bindgen = "0.2.93"
serde = { version = "1.0", features = ["derive"] }
simple_logger = "5.0"
tracing = { version = "0.1.40", optional = true }
log = "0.4"
once_cell = "1.18"
gloo-net = { version = "0.6" }
wasm-bindgen = "0.2"
serde = { version = "1", features = ["derive"] }
simple_logger = "4.3"
tracing = { version = "0.1", optional = true }
send_wrapper = "0.6.0"
[features]
@@ -41,16 +43,16 @@ ssr = [
[package.metadata.cargo-all-features]
denylist = ["actix-files", "actix-web", "leptos_actix"]
skip_feature_sets = [["ssr", "hydrate"], []]
skip_feature_sets = [["ssr", "hydrate"]]
[package.metadata.leptos]
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
# The name used by wasm-bindgen/cargo-leptos for the JS/WASM bundle. Defaults to the crate name
output-name = "counter_isomorphic"
# The site root folder is where cargo-leptos generate all output. WARNING: all content of this folder will be erased on a rebuild. Use it in your server setup.
# When NOT using cargo-leptos this must be updated to "." or the counters will not work. The above warning still applies if you do switch to cargo-leptos later.
site-root = "target/site"
# The site-root relative folder where all compiled output (JS, WASM and CSS) is written
# Defaults to pkg
# Defaults to pkg
site-pkg-dir = "pkg"
# [Optional] The source CSS file. If it ends with .sass or .scss then it will be compiled by dart-sass into CSS. The CSS is optimized by Lightning CSS before being written to <site-root>/<site-pkg>/app.css
# style-file = "src/styles/tailwind.css"

View File

@@ -1,4 +1,4 @@
use leptos::prelude::*;
use leptos::{prelude::*, reactive_graph::actions::Action};
use leptos_router::{
components::{FlatRoutes, Route, Router, A},
StaticSegment,
@@ -10,12 +10,12 @@ use tracing::instrument;
pub mod ssr_imports {
pub use broadcaster::BroadcastChannel;
pub use std::sync::atomic::{AtomicI32, Ordering};
use std::sync::LazyLock;
pub static COUNT: AtomicI32 = AtomicI32::new(0);
pub static COUNT_CHANNEL: LazyLock<BroadcastChannel<i32>> =
LazyLock::new(BroadcastChannel::<i32>::new);
lazy_static::lazy_static! {
pub static ref COUNT_CHANNEL: BroadcastChannel<i32> = BroadcastChannel::new();
}
}
#[server]

View File

@@ -63,7 +63,7 @@ async fn main() -> std::io::Result<()> {
</html>
}
}})
.service(Files::new("/", site_root.as_ref()))
.service(Files::new("/", site_root))
})
.bind(&addr)?
.run()

View File

@@ -13,6 +13,6 @@ leptos_router = { path = "../../router", features = [] }
console_error_panic_hook = "0.1.7"
[dev-dependencies]
wasm-bindgen = "0.2.93"
wasm-bindgen-test = "0.3.42"
web-sys = "0.3.70"
wasm-bindgen = "0.2"
wasm-bindgen-test = "0.3.0"
web-sys = "0.3"

View File

@@ -1,2 +1,2 @@
[toolchain]
targets = ["wasm32-unknown-unknown"]
channel = "stable" # test change

View File

@@ -14,10 +14,10 @@ console_error_panic_hook = "0.1.7"
[dev-dependencies]
wasm-bindgen = "0.2"
wasm-bindgen-test = "0.3.42"
pretty_assertions = "1.4"
rstest = "0.22.0"
wasm-bindgen-test = "0.3.34"
pretty_assertions = "1.3.0"
rstest = "0.17.0"
[dev-dependencies.web-sys]
features = ["HtmlElement", "XPathResult"]
version = "0.3.70"
version = "0.3.61"

View File

@@ -1,15 +1,17 @@
extend = [
{ path = "../cargo-make/main.toml" },
{ path = "../cargo-make/wasm-test.toml" },
{ path = "../cargo-make/trunk_server.toml" },
{ path = "../cargo-make/main.toml" },
{ path = "../cargo-make/wasm-test.toml" },
{ path = "../cargo-make/trunk_server.toml" },
]
[tasks.build]
dependencies = ["cargo-all-features"]
toolchain = "stable"
command = "cargo"
args = ["all-features", "build"]
args = ["build-all-features"]
install_crate = "cargo-all-features"
[tasks.check]
dependencies = ["cargo-all-features"]
toolchain = "stable"
command = "cargo"
args = ["all-features", "clippy"]
args = ["check-all-features"]
install_crate = "cargo-all-features"

View File

@@ -1,7 +1,5 @@
#![allow(dead_code)]
use counter_without_macros::counter;
use leptos::{prelude::*, task::tick};
use leptos::{prelude::*, spawn::tick};
use pretty_assertions::assert_eq;
use wasm_bindgen::JsCast;
use wasm_bindgen_test::*;

View File

@@ -1,6 +0,0 @@
# Support playwright testing
node_modules/
test-results/
end2end/playwright-report/
playwright/.cache/
pnpm-lock.yaml

View File

@@ -4,10 +4,10 @@ version = "0.1.0"
edition = "2021"
[dependencies]
leptos = { path = "../../leptos", features = ["csr"] }
leptos = { path = "../../leptos", features = ["csr"] }
console_error_panic_hook = "0.1.7"
[dev-dependencies]
wasm-bindgen-test = "0.3.42"
wasm-bindgen = "0.2.93"
web-sys = "0.3.70"
wasm-bindgen-test = "0.3.0"
wasm-bindgen = "0.2"
web-sys = "0.3"

View File

@@ -2,5 +2,4 @@ extend = [
{ path = "../cargo-make/main.toml" },
{ path = "../cargo-make/wasm-test.toml" },
{ path = "../cargo-make/trunk_server.toml" },
{ path = "../cargo-make/playwright-trunk-test.toml" },
]

View File

@@ -1,4 +0,0 @@
node_modules/
/test-results/
/playwright-report/
/playwright/.cache/

View File

@@ -1,83 +0,0 @@
{
"name": "grip",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "grip",
"devDependencies": {
"@playwright/test": "^1.35.1"
}
},
"node_modules/.pnpm/@playwright+test@1.33.0": {
"extraneous": true
},
"node_modules/.pnpm/@types+node@20.2.1/node_modules/@types/node": {
"version": "20.2.1",
"extraneous": true,
"license": "MIT"
},
"node_modules/.pnpm/playwright-core@1.33.0/node_modules/playwright-core": {
"version": "1.33.0",
"extraneous": true,
"license": "Apache-2.0",
"bin": {
"playwright": "cli.js"
},
"engines": {
"node": ">=14"
}
},
"node_modules/@playwright/test": {
"version": "1.35.1",
"resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.35.1.tgz",
"integrity": "sha512-b5YoFe6J9exsMYg0pQAobNDR85T1nLumUYgUTtKm4d21iX2L7WqKq9dW8NGJ+2vX0etZd+Y7UeuqsxDXm9+5ZA==",
"dev": true,
"dependencies": {
"@types/node": "*",
"playwright-core": "1.35.1"
},
"bin": {
"playwright": "cli.js"
},
"engines": {
"node": ">=16"
},
"optionalDependencies": {
"fsevents": "2.3.2"
}
},
"node_modules/@types/node": {
"version": "20.3.1",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.3.1.tgz",
"integrity": "sha512-EhcH/wvidPy1WeML3TtYFGR83UzjxeWRen9V402T8aUGYsCHOmfoisV3ZSg03gAFIbLq8TnWOJ0f4cALtnSEUg==",
"dev": true
},
"node_modules/fsevents": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz",
"integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==",
"dev": true,
"hasInstallScript": true,
"optional": true,
"os": [
"darwin"
],
"engines": {
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
}
},
"node_modules/playwright-core": {
"version": "1.35.1",
"resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.35.1.tgz",
"integrity": "sha512-pNXb6CQ7OqmGDRspEjlxE49w+4YtR6a3X6mT1hZXeJHWmsEz7SunmvZeiG/+y1yyMZdHnnn73WKYdtV1er0Xyg==",
"dev": true,
"bin": {
"playwright-core": "cli.js"
},
"engines": {
"node": ">=16"
}
}
}
}

View File

@@ -1,10 +0,0 @@
{
"private": "true",
"scripts": {},
"devDependencies": {
"@playwright/test": "^1.46.1"
},
"dependencies": {
"pnpm": "^9.7.1"
}
}

View File

@@ -1,77 +0,0 @@
import { defineConfig, devices } from "@playwright/test";
/**
* Read environment variables from file.
* https://github.com/motdotla/dotenv
*/
// require('dotenv').config();
/**
* See https://playwright.dev/docs/test-configuration.
*/
export default defineConfig({
testDir: "./tests",
/* Run tests in files in parallel */
fullyParallel: true,
/* Fail the build on CI if you accidentally left test.only in the source code. */
forbidOnly: !process.env.DEV,
/* Retry on CI only */
retries: process.env.DEV ? 0 : 10,
/* Opt out of parallel tests on CI. */
workers: process.env.DEV ? 1 : 1,
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
reporter: [["html", { open: "never" }], ["list"]],
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
use: {
/* Base URL to use in actions like `await page.goto('/')`. */
baseURL: "http://127.0.0.1:8080",
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
trace: "on-first-retry",
},
/* Configure projects for major browsers */
projects: [
{
name: "chromium",
use: { ...devices["Desktop Chrome"] },
},
// {
// name: "firefox",
// use: { ...devices["Desktop Firefox"] },
// },
// {
// name: "webkit",
// use: { ...devices["Desktop Safari"] },
// },
/* Test against mobile viewports. */
// {
// name: 'Mobile Chrome',
// use: { ...devices['Pixel 5'] },
// },
// {
// name: 'Mobile Safari',
// use: { ...devices['iPhone 12'] },
// },
/* Test against branded browsers. */
// {
// name: 'Microsoft Edge',
// use: { ...devices['Desktop Edge'], channel: 'msedge' },
// },
// {
// name: 'Google Chrome',
// use: { ..devices['Desktop Chrome'], channel: 'chrome' },
// },
],
/* Run your local dev server before starting the tests */
// webServer: {
// command: "cd ../ && trunk serve",
// url: "http://127.0.0.1:8080",
// reuseExistingServer: false, //!process.env.CI,
// },
});

Some files were not shown because too many files have changed in this diff Show More