Merge branch 'main' into lendemor/add_sticky_logo
This commit is contained in:
commit
2dcd516e28
42
.github/workflows/benchmarks.yml
vendored
42
.github/workflows/benchmarks.yml
vendored
@ -5,7 +5,7 @@ on:
|
||||
types:
|
||||
- closed
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@ -15,21 +15,21 @@ defaults:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PYTHONIOENCODING: 'utf8'
|
||||
PYTHONIOENCODING: "utf8"
|
||||
TELEMETRY_ENABLED: false
|
||||
NODE_OPTIONS: '--max_old_space_size=8192'
|
||||
NODE_OPTIONS: "--max_old_space_size=8192"
|
||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||
|
||||
jobs:
|
||||
reflex-web:
|
||||
# if: github.event.pull_request.merged == true
|
||||
# if: github.event.pull_request.merged == true
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Show OS combos first in GUI
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.11.4']
|
||||
node-version: ['18.x']
|
||||
python-version: ["3.12.8"]
|
||||
node-version: ["18.x"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@ -81,24 +81,24 @@ jobs:
|
||||
matrix:
|
||||
# Show OS combos first in GUI
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: ['3.9.18', '3.10.13', '3.11.5', '3.12.0']
|
||||
python-version: ["3.9.21", "3.10.16", "3.11.11", "3.12.8"]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
python-version: '3.10.13'
|
||||
python-version: "3.10.16"
|
||||
- os: windows-latest
|
||||
python-version: '3.9.18'
|
||||
python-version: "3.9.21"
|
||||
# keep only one python version for MacOS
|
||||
- os: macos-latest
|
||||
python-version: '3.9.18'
|
||||
python-version: "3.9.21"
|
||||
- os: macos-latest
|
||||
python-version: '3.10.13'
|
||||
python-version: "3.10.16"
|
||||
- os: macos-latest
|
||||
python-version: '3.12.0'
|
||||
python-version: "3.11.11"
|
||||
include:
|
||||
- os: windows-latest
|
||||
python-version: '3.10.11'
|
||||
python-version: "3.10.11"
|
||||
- os: windows-latest
|
||||
python-version: '3.9.13'
|
||||
python-version: "3.9.13"
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@ -123,7 +123,7 @@ jobs:
|
||||
--event-type "${{ github.event_name }}" --pr-id "${{ github.event.pull_request.id }}"
|
||||
|
||||
reflex-dist-size: # This job is used to calculate the size of the Reflex distribution (wheel file)
|
||||
if: github.event.pull_request.merged == true
|
||||
if: github.event.pull_request.merged == true
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
# Prioritize getting more information out of the workflow (even if something fails)
|
||||
@ -133,7 +133,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: 3.11.5
|
||||
python-version: 3.12.8
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- name: Build reflex
|
||||
@ -143,12 +143,12 @@ jobs:
|
||||
# Only run if the database creds are available in this context.
|
||||
run:
|
||||
poetry run python benchmarks/benchmark_package_size.py --os ubuntu-latest
|
||||
--python-version 3.11.5 --commit-sha "${{ github.sha }}" --pr-id "${{ github.event.pull_request.id }}"
|
||||
--python-version 3.12.8 --commit-sha "${{ github.sha }}" --pr-id "${{ github.event.pull_request.id }}"
|
||||
--branch-name "${{ github.head_ref || github.ref_name }}"
|
||||
--path ./dist
|
||||
|
||||
reflex-venv-size: # This job calculates the total size of Reflex and its dependencies
|
||||
if: github.event.pull_request.merged == true
|
||||
if: github.event.pull_request.merged == true
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
# Prioritize getting more information out of the workflow (even if something fails)
|
||||
@ -156,7 +156,7 @@ jobs:
|
||||
matrix:
|
||||
# Show OS combos first in GUI
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: ['3.11.5']
|
||||
python-version: ["3.12.8"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@ -186,6 +186,6 @@ jobs:
|
||||
run:
|
||||
poetry run python benchmarks/benchmark_package_size.py --os "${{ matrix.os }}"
|
||||
--python-version "${{ matrix.python-version }}" --commit-sha "${{ github.sha }}"
|
||||
--pr-id "${{ github.event.pull_request.id }}"
|
||||
--pr-id "${{ github.event.pull_request.id }}"
|
||||
--branch-name "${{ github.head_ref || github.ref_name }}"
|
||||
--path ./.venv
|
||||
--path ./.venv
|
||||
|
10
.github/workflows/check_generated_pyi.yml
vendored
10
.github/workflows/check_generated_pyi.yml
vendored
@ -6,16 +6,16 @@ concurrency:
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
# We don't just trigger on make_pyi.py and the components dir, because
|
||||
# there are other things that can change the generator output
|
||||
# e.g. black version, reflex.Component, reflex.Var.
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
pull_request:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
|
||||
jobs:
|
||||
check-generated-pyi-components:
|
||||
@ -25,7 +25,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: '3.11.5'
|
||||
python-version: "3.12.8"
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- run: |
|
||||
|
67
.github/workflows/check_node_latest.yml
vendored
67
.github/workflows/check_node_latest.yml
vendored
@ -1,43 +1,40 @@
|
||||
name: integration-node-latest
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
TELEMETRY_ENABLED: false
|
||||
REFLEX_USE_SYSTEM_NODE: true
|
||||
TELEMETRY_ENABLED: false
|
||||
REFLEX_USE_SYSTEM_NODE: true
|
||||
|
||||
jobs:
|
||||
check_latest_node:
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.12']
|
||||
split_index: [1, 2]
|
||||
node-version: ['node']
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- run: |
|
||||
poetry run uv pip install pyvirtualdisplay pillow pytest-split
|
||||
poetry run playwright install --with-deps
|
||||
- run: |
|
||||
poetry run pytest tests/test_node_version.py
|
||||
poetry run pytest tests/integration --splits 2 --group ${{matrix.split_index}}
|
||||
|
||||
|
||||
check_latest_node:
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.12.8"]
|
||||
split_index: [1, 2]
|
||||
node-version: ["node"]
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- run: |
|
||||
poetry run uv pip install pyvirtualdisplay pillow pytest-split
|
||||
poetry run playwright install --with-deps
|
||||
- run: |
|
||||
poetry run pytest tests/test_node_version.py
|
||||
poetry run pytest tests/integration --splits 2 --group ${{matrix.split_index}}
|
||||
|
130
.github/workflows/check_outdated_dependencies.yml
vendored
130
.github/workflows/check_outdated_dependencies.yml
vendored
@ -1,88 +1,86 @@
|
||||
name: check-outdated-dependencies
|
||||
|
||||
on:
|
||||
push: # This will trigger the action when a pull request is opened or updated.
|
||||
push: # This will trigger the action when a pull request is opened or updated.
|
||||
branches:
|
||||
- 'release/**' # This will trigger the action when any branch starting with "release/" is created.
|
||||
workflow_dispatch: # Allow manual triggering if needed.
|
||||
- "release/**" # This will trigger the action when any branch starting with "release/" is created.
|
||||
workflow_dispatch: # Allow manual triggering if needed.
|
||||
|
||||
jobs:
|
||||
backend:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: '3.9'
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: "3.9.21"
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
|
||||
- name: Check outdated backend dependencies
|
||||
run: |
|
||||
outdated=$(poetry show -oT)
|
||||
echo "Outdated:"
|
||||
echo "$outdated"
|
||||
- name: Check outdated backend dependencies
|
||||
run: |
|
||||
outdated=$(poetry show -oT)
|
||||
echo "Outdated:"
|
||||
echo "$outdated"
|
||||
|
||||
filtered_outdated=$(echo "$outdated" | grep -vE 'pyright|ruff' || true)
|
||||
|
||||
if [ ! -z "$filtered_outdated" ]; then
|
||||
echo "Outdated dependencies found:"
|
||||
echo "$filtered_outdated"
|
||||
exit 1
|
||||
else
|
||||
echo "All dependencies are up to date. (pyright and ruff are ignored)"
|
||||
fi
|
||||
filtered_outdated=$(echo "$outdated" | grep -vE 'pyright|ruff' || true)
|
||||
|
||||
if [ ! -z "$filtered_outdated" ]; then
|
||||
echo "Outdated dependencies found:"
|
||||
echo "$filtered_outdated"
|
||||
exit 1
|
||||
else
|
||||
echo "All dependencies are up to date. (pyright and ruff are ignored)"
|
||||
fi
|
||||
|
||||
frontend:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: '3.10.11'
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- name: Clone Reflex Website Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: reflex-dev/reflex-web
|
||||
ref: main
|
||||
path: reflex-web
|
||||
- name: Install Requirements for reflex-web
|
||||
working-directory: ./reflex-web
|
||||
run: poetry run uv pip install -r requirements.txt
|
||||
- name: Install additional dependencies for DB access
|
||||
run: poetry run uv pip install psycopg
|
||||
- name: Init Website for reflex-web
|
||||
working-directory: ./reflex-web
|
||||
run: poetry run reflex init
|
||||
- name: Run Website and Check for errors
|
||||
run: |
|
||||
poetry run bash scripts/integration.sh ./reflex-web dev
|
||||
- name: Check outdated frontend dependencies
|
||||
working-directory: ./reflex-web/.web
|
||||
run: |
|
||||
raw_outdated=$(/home/runner/.local/share/reflex/bun/bin/bun outdated)
|
||||
outdated=$(echo "$raw_outdated" | grep -vE '\|\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\|' || true)
|
||||
echo "Outdated:"
|
||||
echo "$outdated"
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: "3.10.16"
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- name: Clone Reflex Website Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: reflex-dev/reflex-web
|
||||
ref: main
|
||||
path: reflex-web
|
||||
- name: Install Requirements for reflex-web
|
||||
working-directory: ./reflex-web
|
||||
run: poetry run uv pip install -r requirements.txt
|
||||
- name: Install additional dependencies for DB access
|
||||
run: poetry run uv pip install psycopg
|
||||
- name: Init Website for reflex-web
|
||||
working-directory: ./reflex-web
|
||||
run: poetry run reflex init
|
||||
- name: Run Website and Check for errors
|
||||
run: |
|
||||
poetry run bash scripts/integration.sh ./reflex-web dev
|
||||
- name: Check outdated frontend dependencies
|
||||
working-directory: ./reflex-web/.web
|
||||
run: |
|
||||
raw_outdated=$(/home/runner/.local/share/reflex/bun/bin/bun outdated)
|
||||
outdated=$(echo "$raw_outdated" | grep -vE '\|\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\|' || true)
|
||||
echo "Outdated:"
|
||||
echo "$outdated"
|
||||
|
||||
# Ignore 3rd party dependencies that are not updated.
|
||||
filtered_outdated=$(echo "$outdated" | grep -vE 'Package|@chakra-ui|lucide-react|@splinetool/runtime|ag-grid-react|framer-motion|react-markdown|remark-math|remark-gfm|rehype-katex|rehype-raw|remark-unwrap-images' || true)
|
||||
no_extra=$(echo "$filtered_outdated" | grep -vE '\|\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-' || true)
|
||||
# Ignore 3rd party dependencies that are not updated.
|
||||
filtered_outdated=$(echo "$outdated" | grep -vE 'Package|@chakra-ui|lucide-react|@splinetool/runtime|ag-grid-react|framer-motion|react-markdown|remark-math|remark-gfm|rehype-katex|rehype-raw|remark-unwrap-images' || true)
|
||||
no_extra=$(echo "$filtered_outdated" | grep -vE '\|\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-' || true)
|
||||
|
||||
|
||||
if [ ! -z "$no_extra" ]; then
|
||||
echo "Outdated dependencies found:"
|
||||
echo "$filtered_outdated"
|
||||
exit 1
|
||||
else
|
||||
echo "All dependencies are up to date. (3rd party packages are ignored)"
|
||||
fi
|
||||
|
||||
if [ ! -z "$no_extra" ]; then
|
||||
echo "Outdated dependencies found:"
|
||||
echo "$filtered_outdated"
|
||||
exit 1
|
||||
else
|
||||
echo "All dependencies are up to date. (3rd party packages are ignored)"
|
||||
fi
|
||||
|
@ -22,8 +22,8 @@ jobs:
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
matrix:
|
||||
state_manager: ['redis', 'memory']
|
||||
python-version: ['3.11.5', '3.12.0', '3.13.0']
|
||||
state_manager: ["redis", "memory"]
|
||||
python-version: ["3.11.11", "3.12.8", "3.13.1"]
|
||||
split_index: [1, 2]
|
||||
fail-fast: false
|
||||
runs-on: ubuntu-22.04
|
||||
@ -53,7 +53,7 @@ jobs:
|
||||
SCREENSHOT_DIR: /tmp/screenshots/${{ matrix.state_manager }}/${{ matrix.python-version }}/${{ matrix.split_index }}
|
||||
REDIS_URL: ${{ matrix.state_manager == 'redis' && 'redis://localhost:6379' || '' }}
|
||||
run: |
|
||||
poetry run playwright install --with-deps
|
||||
poetry run playwright install chromium
|
||||
poetry run pytest tests/integration --splits 2 --group ${{matrix.split_index}}
|
||||
- uses: actions/upload-artifact@v4
|
||||
name: Upload failed test screenshots
|
||||
|
60
.github/workflows/integration_tests.yml
vendored
60
.github/workflows/integration_tests.yml
vendored
@ -2,13 +2,13 @@ name: integration-tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
pull_request:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.id }}
|
||||
@ -27,13 +27,13 @@ env:
|
||||
# TODO: can we fix windows encoding natively within reflex? Bug above can hit real users too (less common, but possible)
|
||||
# - Catch encoding errors when printing logs
|
||||
# - Best effort print lines that contain illegal chars (map to some default char, etc.)
|
||||
PYTHONIOENCODING: 'utf8'
|
||||
PYTHONIOENCODING: "utf8"
|
||||
TELEMETRY_ENABLED: false
|
||||
NODE_OPTIONS: '--max_old_space_size=8192'
|
||||
NODE_OPTIONS: "--max_old_space_size=8192"
|
||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||
|
||||
jobs:
|
||||
example-counter:
|
||||
example-counter-and-nba-proxy:
|
||||
env:
|
||||
OUTPUT_FILE: import_benchmark.json
|
||||
timeout-minutes: 30
|
||||
@ -43,17 +43,22 @@ jobs:
|
||||
matrix:
|
||||
# Show OS combos first in GUI
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
python-version: ['3.9.18', '3.10.13', '3.11.5', '3.12.0', '3.13.0']
|
||||
python-version: ["3.9.21", "3.10.16", "3.11.11", "3.12.8", "3.13.1"]
|
||||
# Windows is a bit behind on Python version availability in Github
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
python-version: '3.10.13'
|
||||
python-version: "3.11.11"
|
||||
- os: windows-latest
|
||||
python-version: '3.9.18'
|
||||
python-version: "3.10.16"
|
||||
- os: windows-latest
|
||||
python-version: "3.9.21"
|
||||
include:
|
||||
- os: windows-latest
|
||||
python-version: '3.10.11'
|
||||
python-version: "3.11.9"
|
||||
- os: windows-latest
|
||||
python-version: '3.9.13'
|
||||
python-version: "3.10.11"
|
||||
- os: windows-latest
|
||||
python-version: "3.9.13"
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@ -114,7 +119,25 @@ jobs:
|
||||
--benchmark-json "./reflex-examples/counter/${{ env.OUTPUT_FILE }}"
|
||||
--branch-name "${{ github.head_ref || github.ref_name }}" --pr-id "${{ github.event.pull_request.id }}"
|
||||
--app-name "counter"
|
||||
|
||||
- name: Install requirements for nba proxy example
|
||||
working-directory: ./reflex-examples/nba-proxy
|
||||
run: |
|
||||
poetry run uv pip install -r requirements.txt
|
||||
- name: Install additional dependencies for DB access
|
||||
run: poetry run uv pip install psycopg
|
||||
- name: Check export --backend-only before init for nba-proxy example
|
||||
working-directory: ./reflex-examples/nba-proxy
|
||||
run: |
|
||||
poetry run reflex export --backend-only
|
||||
- name: Init Website for nba-proxy example
|
||||
working-directory: ./reflex-examples/nba-proxy
|
||||
run: |
|
||||
poetry run reflex init --loglevel debug
|
||||
- name: Run Website and Check for errors
|
||||
run: |
|
||||
# Check that npm is home
|
||||
npm -v
|
||||
poetry run bash scripts/integration.sh ./reflex-examples/nba-proxy dev
|
||||
|
||||
|
||||
reflex-web:
|
||||
@ -123,10 +146,10 @@ jobs:
|
||||
matrix:
|
||||
# Show OS combos first in GUI
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.10.11', '3.11.4']
|
||||
python-version: ["3.11.11", "3.12.8"]
|
||||
|
||||
env:
|
||||
REFLEX_WEB_WINDOWS_OVERRIDE: '1'
|
||||
REFLEX_WEB_WINDOWS_OVERRIDE: "1"
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -171,7 +194,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: '3.11.4'
|
||||
python-version: "3.11.11"
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- name: Create app directory
|
||||
@ -190,14 +213,14 @@ jobs:
|
||||
# Check that npm is home
|
||||
npm -v
|
||||
poetry run bash scripts/integration.sh ./rx-shout-from-template prod
|
||||
|
||||
|
||||
reflex-web-macos:
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ['3.11.5', '3.12.0']
|
||||
# Note: py311 version chosen due to available arm64 darwin builds.
|
||||
python-version: ["3.11.9", "3.12.8"]
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -231,4 +254,3 @@ jobs:
|
||||
--python-version "${{ matrix.python-version }}" --commit-sha "${{ github.sha }}"
|
||||
--pr-id "${{ github.event.pull_request.id }}" --branch-name "${{ github.head_ref || github.ref_name }}"
|
||||
--app-name "reflex-web" --path ./reflex-web/.web
|
||||
|
6
.github/workflows/pre-commit.yml
vendored
6
.github/workflows/pre-commit.yml
vendored
@ -6,12 +6,12 @@ concurrency:
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
push:
|
||||
# Note even though this job is called "pre-commit" and runs "pre-commit", this job will run
|
||||
# also POST-commit on main also! In case there are mishandled merge conflicts / bad auto-resolves
|
||||
# when merging into main branch.
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
|
||||
jobs:
|
||||
pre-commit:
|
||||
@ -23,7 +23,7 @@ jobs:
|
||||
with:
|
||||
# running vs. one version of Python is OK
|
||||
# i.e. ruff, black, etc.
|
||||
python-version: 3.11.5
|
||||
python-version: 3.12.8
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
# TODO pre-commit related stuff can be cached too (not a bottleneck yet)
|
||||
|
28
.github/workflows/unit_tests.yml
vendored
28
.github/workflows/unit_tests.yml
vendored
@ -6,13 +6,13 @@ concurrency:
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
pull_request:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@ -28,18 +28,22 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
python-version: ['3.9.18', '3.10.13', '3.11.5', '3.12.0', '3.13.0']
|
||||
python-version: ["3.9.21", "3.10.16", "3.11.11", "3.12.8", "3.13.1"]
|
||||
# Windows is a bit behind on Python version availability in Github
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
python-version: '3.10.13'
|
||||
python-version: "3.11.11"
|
||||
- os: windows-latest
|
||||
python-version: '3.9.18'
|
||||
python-version: "3.10.16"
|
||||
- os: windows-latest
|
||||
python-version: "3.9.21"
|
||||
include:
|
||||
- os: windows-latest
|
||||
python-version: '3.10.11'
|
||||
python-version: "3.11.9"
|
||||
- os: windows-latest
|
||||
python-version: '3.9.13'
|
||||
python-version: "3.10.11"
|
||||
- os: windows-latest
|
||||
python-version: "3.9.13"
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
# Service containers to run with `runner-job`
|
||||
@ -88,8 +92,8 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Note: py39, py310 versions chosen due to available arm64 darwin builds.
|
||||
python-version: ['3.9.13', '3.10.11', '3.11.5', '3.12.0', '3.13.0']
|
||||
# Note: py39, py310, py311 versions chosen due to available arm64 darwin builds.
|
||||
python-version: ["3.9.13", "3.10.11", "3.11.9", "3.12.8", "3.13.1"]
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -106,4 +110,4 @@ jobs:
|
||||
run: |
|
||||
export PYTHONUNBUFFERED=1
|
||||
poetry run uv pip install "pydantic~=1.10"
|
||||
poetry run pytest tests/units --cov --no-cov-on-fail --cov-report=
|
||||
poetry run pytest tests/units --cov --no-cov-on-fail --cov-report=
|
||||
|
@ -21,7 +21,7 @@ def get_package_size(venv_path: Path, os_name):
|
||||
ValueError: when venv does not exist or python version is None.
|
||||
"""
|
||||
python_version = get_python_version(venv_path, os_name)
|
||||
print("Python version:", python_version)
|
||||
print("Python version:", python_version) # noqa: T201
|
||||
if python_version is None:
|
||||
raise ValueError("Error: Failed to determine Python version.")
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "reflex"
|
||||
version = "0.6.8dev1"
|
||||
version = "0.7.0dev1"
|
||||
description = "Web apps in pure Python."
|
||||
license = "Apache-2.0"
|
||||
authors = [
|
||||
@ -16,7 +16,6 @@ repository = "https://github.com/reflex-dev/reflex"
|
||||
documentation = "https://reflex.dev/docs/getting-started/introduction"
|
||||
keywords = ["web", "framework"]
|
||||
classifiers = ["Development Status :: 4 - Beta"]
|
||||
packages = [{ include = "reflex" }]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.9"
|
||||
@ -87,13 +86,13 @@ build-backend = "poetry.core.masonry.api"
|
||||
target-version = "py39"
|
||||
output-format = "concise"
|
||||
lint.isort.split-on-trailing-comma = false
|
||||
lint.select = ["B", "C4", "D", "E", "ERA", "F", "FURB", "I", "PERF", "PTH", "RUF", "SIM", "W"]
|
||||
lint.ignore = ["B008", "D205", "E501", "F403", "SIM115", "RUF006", "RUF012"]
|
||||
lint.select = ["B", "C4", "D", "E", "ERA", "F", "FURB", "I", "PERF", "PTH", "RUF", "SIM", "T", "TRY", "W"]
|
||||
lint.ignore = ["B008", "D205", "E501", "F403", "SIM115", "RUF006", "RUF012", "TRY0"]
|
||||
lint.pydocstyle.convention = "google"
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"__init__.py" = ["F401"]
|
||||
"tests/*.py" = ["D100", "D103", "D104", "B018", "PERF"]
|
||||
"tests/*.py" = ["D100", "D103", "D104", "B018", "PERF", "T"]
|
||||
"reflex/.templates/*.py" = ["D100", "D103", "D104"]
|
||||
"*.pyi" = ["D301", "D415", "D417", "D418", "E742"]
|
||||
"*/blank.py" = ["I001"]
|
||||
@ -104,4 +103,4 @@ asyncio_mode = "auto"
|
||||
|
||||
[tool.codespell]
|
||||
skip = "docs/*,*.html,examples/*, *.pyi"
|
||||
ignore-words-list = "te, TreeE"
|
||||
ignore-words-list = "te, TreeE"
|
||||
|
@ -1,4 +1,5 @@
|
||||
{% extends "web/pages/base_page.js.jinja2" %}
|
||||
{% from "web/pages/macros.js.jinja2" import renderHooks %}
|
||||
|
||||
{% block early_imports %}
|
||||
import '$/styles/styles.css'
|
||||
@ -18,10 +19,7 @@ import * as {{library_alias}} from "{{library_path}}";
|
||||
|
||||
{% block export %}
|
||||
function AppWrap({children}) {
|
||||
|
||||
{% for hook in hooks %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
{{ renderHooks(hooks) }}
|
||||
|
||||
return (
|
||||
{{utils.render(render, indent_width=0)}}
|
||||
|
@ -1,5 +1,5 @@
|
||||
{% extends "web/pages/base_page.js.jinja2" %}
|
||||
|
||||
{% from "web/pages/macros.js.jinja2" import renderHooks %}
|
||||
{% block export %}
|
||||
{% for component in components %}
|
||||
|
||||
@ -8,9 +8,8 @@
|
||||
{% endfor %}
|
||||
|
||||
export const {{component.name}} = memo(({ {{-component.props|join(", ")-}} }) => {
|
||||
{% for hook in component.hooks %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
{{ renderHooks(component.hooks) }}
|
||||
|
||||
return(
|
||||
{{utils.render(component.render)}}
|
||||
)
|
||||
|
@ -1,4 +1,5 @@
|
||||
{% extends "web/pages/base_page.js.jinja2" %}
|
||||
{% from "web/pages/macros.js.jinja2" import renderHooks %}
|
||||
|
||||
{% block declaration %}
|
||||
{% for custom_code in custom_codes %}
|
||||
@ -8,9 +9,7 @@
|
||||
|
||||
{% block export %}
|
||||
export default function Component() {
|
||||
{% for hook in hooks %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
{{ renderHooks(hooks)}}
|
||||
|
||||
return (
|
||||
{{utils.render(render, indent_width=0)}}
|
||||
|
38
reflex/.templates/jinja/web/pages/macros.js.jinja2
Normal file
38
reflex/.templates/jinja/web/pages/macros.js.jinja2
Normal file
@ -0,0 +1,38 @@
|
||||
{% macro renderHooks(hooks) %}
|
||||
{% set sorted_hooks = sort_hooks(hooks) %}
|
||||
|
||||
{# Render the grouped hooks #}
|
||||
{% for hook, _ in sorted_hooks[const.hook_position.INTERNAL] %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook, _ in sorted_hooks[const.hook_position.PRE_TRIGGER] %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook, _ in sorted_hooks[const.hook_position.POST_TRIGGER] %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro renderHooksWithMemo(hooks, memo)%}
|
||||
{% set sorted_hooks = sort_hooks(hooks) %}
|
||||
|
||||
{# Render the grouped hooks #}
|
||||
{% for hook, _ in sorted_hooks[const.hook_position.INTERNAL] %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook, _ in sorted_hooks[const.hook_position.PRE_TRIGGER] %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook in memo %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook, _ in sorted_hooks[const.hook_position.POST_TRIGGER] %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% endmacro %}
|
@ -1,22 +1,10 @@
|
||||
{% import 'web/pages/utils.js.jinja2' as utils %}
|
||||
{% from 'web/pages/macros.js.jinja2' import renderHooksWithMemo %}
|
||||
{% set all_hooks = component._get_all_hooks() %}
|
||||
|
||||
export function {{tag_name}} () {
|
||||
{% for hook in component._get_all_hooks_internal() %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook, data in component._get_all_hooks().items() if not data.position or data.position == const.hook_position.PRE_TRIGGER %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook in memo_trigger_hooks %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook, data in component._get_all_hooks().items() if data.position and data.position == const.hook_position.POST_TRIGGER %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{{ renderHooksWithMemo(all_hooks, memo_trigger_hooks) }}
|
||||
|
||||
return (
|
||||
{{utils.render(component.render(), indent_width=0)}}
|
||||
)
|
||||
|
@ -208,11 +208,16 @@ export const applyEvent = async (event, socket) => {
|
||||
if (event.name == "_download") {
|
||||
const a = document.createElement("a");
|
||||
a.hidden = true;
|
||||
a.href = event.payload.url;
|
||||
// Special case when linking to uploaded files
|
||||
a.href = event.payload.url.replace(
|
||||
"${getBackendURL(env.UPLOAD)}",
|
||||
getBackendURL(env.UPLOAD)
|
||||
);
|
||||
if (a.href.includes("getBackendURL(env.UPLOAD)")) {
|
||||
a.href = eval?.(
|
||||
event.payload.url.replace(
|
||||
"getBackendURL(env.UPLOAD)",
|
||||
`"${getBackendURL(env.UPLOAD)}"`
|
||||
)
|
||||
);
|
||||
}
|
||||
a.download = event.payload.filename;
|
||||
a.click();
|
||||
a.remove();
|
||||
@ -405,7 +410,14 @@ export const connect = async (
|
||||
autoUnref: false,
|
||||
});
|
||||
// Ensure undefined fields in events are sent as null instead of removed
|
||||
socket.current.io.encoder.replacer = (k, v) => (v === undefined ? null : v)
|
||||
socket.current.io.encoder.replacer = (k, v) => (v === undefined ? null : v);
|
||||
socket.current.io.decoder.tryParse = (str) => {
|
||||
try {
|
||||
return JSON5.parse(str);
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
function checkVisibility() {
|
||||
if (document.visibilityState === "visible") {
|
||||
|
@ -68,6 +68,7 @@ from reflex.components.core.upload import Upload, get_upload_dir
|
||||
from reflex.components.radix import themes
|
||||
from reflex.config import environment, get_config
|
||||
from reflex.event import (
|
||||
_EVENT_FIELDS,
|
||||
BASE_STATE,
|
||||
Event,
|
||||
EventHandler,
|
||||
@ -462,14 +463,8 @@ class App(MiddlewareMixin, LifespanMixin):
|
||||
|
||||
Returns:
|
||||
The generated component.
|
||||
|
||||
Raises:
|
||||
exceptions.MatchTypeError: If the return types of match cases in rx.match are different.
|
||||
"""
|
||||
try:
|
||||
return component if isinstance(component, Component) else component()
|
||||
except exceptions.MatchTypeError:
|
||||
raise
|
||||
return component if isinstance(component, Component) else component()
|
||||
|
||||
def add_page(
|
||||
self,
|
||||
@ -1562,12 +1557,36 @@ class EventNamespace(AsyncNamespace):
|
||||
Args:
|
||||
sid: The Socket.IO session id.
|
||||
data: The event data.
|
||||
|
||||
Raises:
|
||||
EventDeserializationError: If the event data is not a dictionary.
|
||||
"""
|
||||
fields = data
|
||||
# Get the event.
|
||||
event = Event(
|
||||
**{k: v for k, v in fields.items() if k not in ("handler", "event_actions")}
|
||||
)
|
||||
|
||||
if isinstance(fields, str):
|
||||
console.warn(
|
||||
"Received event data as a string. This generally should not happen and may indicate a bug."
|
||||
f" Event data: {fields}"
|
||||
)
|
||||
try:
|
||||
fields = json.loads(fields)
|
||||
except json.JSONDecodeError as ex:
|
||||
raise exceptions.EventDeserializationError(
|
||||
f"Failed to deserialize event data: {fields}."
|
||||
) from ex
|
||||
|
||||
if not isinstance(fields, dict):
|
||||
raise exceptions.EventDeserializationError(
|
||||
f"Event data must be a dictionary, but received {fields} of type {type(fields)}."
|
||||
)
|
||||
|
||||
try:
|
||||
# Get the event.
|
||||
event = Event(**{k: v for k, v in fields.items() if k in _EVENT_FIELDS})
|
||||
except (TypeError, ValueError) as ex:
|
||||
raise exceptions.EventDeserializationError(
|
||||
f"Failed to deserialize event data: {fields}."
|
||||
) from ex
|
||||
|
||||
self.token_to_sid[event.token] = sid
|
||||
self.sid_to_token[sid] = event.token
|
||||
|
@ -7,14 +7,13 @@ from concurrent.futures import ThreadPoolExecutor
|
||||
from reflex import constants
|
||||
from reflex.utils import telemetry
|
||||
from reflex.utils.exec import is_prod_mode
|
||||
from reflex.utils.prerequisites import get_app
|
||||
from reflex.utils.prerequisites import get_and_validate_app
|
||||
|
||||
if constants.CompileVars.APP != "app":
|
||||
raise AssertionError("unexpected variable name for 'app'")
|
||||
|
||||
telemetry.send("compile")
|
||||
app_module = get_app(reload=False)
|
||||
app = getattr(app_module, constants.CompileVars.APP)
|
||||
app, app_module = get_and_validate_app(reload=False)
|
||||
# For py3.9 compatibility when redis is used, we MUST add any decorator pages
|
||||
# before compiling the app in a thread to avoid event loop error (REF-2172).
|
||||
app._apply_decorated_pages()
|
||||
@ -30,7 +29,7 @@ if is_prod_mode():
|
||||
# ensure only "app" is exposed.
|
||||
del app_module
|
||||
del compile_future
|
||||
del get_app
|
||||
del get_and_validate_app
|
||||
del is_prod_mode
|
||||
del telemetry
|
||||
del constants
|
||||
|
@ -75,7 +75,7 @@ def _compile_app(app_root: Component) -> str:
|
||||
return templates.APP_ROOT.render(
|
||||
imports=utils.compile_imports(app_root._get_all_imports()),
|
||||
custom_codes=app_root._get_all_custom_code(),
|
||||
hooks={**app_root._get_all_hooks_internal(), **app_root._get_all_hooks()},
|
||||
hooks=app_root._get_all_hooks(),
|
||||
window_libraries=window_libraries,
|
||||
render=app_root.render(),
|
||||
)
|
||||
@ -149,7 +149,7 @@ def _compile_page(
|
||||
imports=imports,
|
||||
dynamic_imports=component._get_all_dynamic_imports(),
|
||||
custom_codes=component._get_all_custom_code(),
|
||||
hooks={**component._get_all_hooks_internal(), **component._get_all_hooks()},
|
||||
hooks=component._get_all_hooks(),
|
||||
render=component.render(),
|
||||
**kwargs,
|
||||
)
|
||||
|
@ -1,9 +1,46 @@
|
||||
"""Templates to use in the reflex compiler."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader, Template
|
||||
|
||||
from reflex import constants
|
||||
from reflex.constants import Hooks
|
||||
from reflex.utils.format import format_state_name, json_dumps
|
||||
from reflex.vars.base import VarData
|
||||
|
||||
|
||||
def _sort_hooks(hooks: dict[str, VarData | None]):
|
||||
"""Sort the hooks by their position.
|
||||
|
||||
Args:
|
||||
hooks: The hooks to sort.
|
||||
|
||||
Returns:
|
||||
The sorted hooks.
|
||||
"""
|
||||
sorted_hooks = {
|
||||
Hooks.HookPosition.INTERNAL: [],
|
||||
Hooks.HookPosition.PRE_TRIGGER: [],
|
||||
Hooks.HookPosition.POST_TRIGGER: [],
|
||||
}
|
||||
|
||||
for hook, data in hooks.items():
|
||||
if data and data.position and data.position == Hooks.HookPosition.INTERNAL:
|
||||
sorted_hooks[Hooks.HookPosition.INTERNAL].append((hook, data))
|
||||
elif not data or (
|
||||
not data.position
|
||||
or data.position == constants.Hooks.HookPosition.PRE_TRIGGER
|
||||
):
|
||||
sorted_hooks[Hooks.HookPosition.PRE_TRIGGER].append((hook, data))
|
||||
elif (
|
||||
data
|
||||
and data.position
|
||||
and data.position == constants.Hooks.HookPosition.POST_TRIGGER
|
||||
):
|
||||
sorted_hooks[Hooks.HookPosition.POST_TRIGGER].append((hook, data))
|
||||
|
||||
return sorted_hooks
|
||||
|
||||
|
||||
class ReflexJinjaEnvironment(Environment):
|
||||
@ -47,6 +84,7 @@ class ReflexJinjaEnvironment(Environment):
|
||||
"frontend_exception_state": constants.CompileVars.FRONTEND_EXCEPTION_STATE_FULL,
|
||||
"hook_position": constants.Hooks.HookPosition,
|
||||
}
|
||||
self.globals["sort_hooks"] = _sort_hooks
|
||||
|
||||
|
||||
def get_template(name: str) -> Template:
|
||||
@ -103,6 +141,9 @@ STYLE = get_template("web/styles/styles.css.jinja2")
|
||||
# Code that generate the package json file
|
||||
PACKAGE_JSON = get_template("web/package.json.jinja2")
|
||||
|
||||
# Template containing some macros used in the web pages.
|
||||
MACROS = get_template("web/pages/macros.js.jinja2")
|
||||
|
||||
# Code that generate the pyproject.toml file for custom components.
|
||||
CUSTOM_COMPONENTS_PYPROJECT_TOML = get_template(
|
||||
"custom_components/pyproject.toml.jinja2"
|
||||
|
@ -290,7 +290,7 @@ def compile_custom_component(
|
||||
"name": component.tag,
|
||||
"props": props,
|
||||
"render": render.render(),
|
||||
"hooks": {**render._get_all_hooks_internal(), **render._get_all_hooks()},
|
||||
"hooks": render._get_all_hooks(),
|
||||
"custom_code": render._get_all_custom_code(),
|
||||
},
|
||||
imports,
|
||||
|
@ -9,6 +9,7 @@ from reflex.components.tags import Tag
|
||||
from reflex.components.tags.tagless import Tagless
|
||||
from reflex.utils.imports import ParsedImportDict
|
||||
from reflex.vars import BooleanVar, ObjectVar, Var
|
||||
from reflex.vars.base import VarData
|
||||
|
||||
|
||||
class Bare(Component):
|
||||
@ -32,7 +33,7 @@ class Bare(Component):
|
||||
contents = str(contents) if contents is not None else ""
|
||||
return cls(contents=contents) # type: ignore
|
||||
|
||||
def _get_all_hooks_internal(self) -> dict[str, None]:
|
||||
def _get_all_hooks_internal(self) -> dict[str, VarData | None]:
|
||||
"""Include the hooks for the component.
|
||||
|
||||
Returns:
|
||||
@ -43,7 +44,7 @@ class Bare(Component):
|
||||
hooks |= self.contents._var_value._get_all_hooks_internal()
|
||||
return hooks
|
||||
|
||||
def _get_all_hooks(self) -> dict[str, None]:
|
||||
def _get_all_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Include the hooks for the component.
|
||||
|
||||
Returns:
|
||||
@ -107,11 +108,14 @@ class Bare(Component):
|
||||
return Tagless(contents=f"{{{self.contents!s}}}")
|
||||
return Tagless(contents=str(self.contents))
|
||||
|
||||
def _get_vars(self, include_children: bool = False) -> Iterator[Var]:
|
||||
def _get_vars(
|
||||
self, include_children: bool = False, ignore_ids: set[int] | None = None
|
||||
) -> Iterator[Var]:
|
||||
"""Walk all Vars used in this component.
|
||||
|
||||
Args:
|
||||
include_children: Whether to include Vars from children.
|
||||
ignore_ids: The ids to ignore.
|
||||
|
||||
Yields:
|
||||
The contents if it is a Var, otherwise nothing.
|
||||
|
@ -23,6 +23,8 @@ from typing import (
|
||||
Union,
|
||||
)
|
||||
|
||||
from typing_extensions import deprecated
|
||||
|
||||
import reflex.state
|
||||
from reflex.base import Base
|
||||
from reflex.compiler.templates import STATEFUL_COMPONENT
|
||||
@ -43,17 +45,13 @@ from reflex.constants.state import FRONTEND_EVENT_STATE
|
||||
from reflex.event import (
|
||||
EventCallback,
|
||||
EventChain,
|
||||
EventChainVar,
|
||||
EventHandler,
|
||||
EventSpec,
|
||||
EventVar,
|
||||
call_event_fn,
|
||||
call_event_handler,
|
||||
get_handler_args,
|
||||
no_args_event_spec,
|
||||
)
|
||||
from reflex.style import Style, format_as_emotion
|
||||
from reflex.utils import format, imports, types
|
||||
from reflex.utils import console, format, imports, types
|
||||
from reflex.utils.imports import (
|
||||
ImmutableParsedImportDict,
|
||||
ImportDict,
|
||||
@ -104,7 +102,7 @@ class BaseComponent(Base, ABC):
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def _get_all_hooks_internal(self) -> dict[str, None]:
|
||||
def _get_all_hooks_internal(self) -> dict[str, VarData | None]:
|
||||
"""Get the reflex internal hooks for the component and its children.
|
||||
|
||||
Returns:
|
||||
@ -112,7 +110,7 @@ class BaseComponent(Base, ABC):
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def _get_all_hooks(self) -> dict[str, None]:
|
||||
def _get_all_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Get the React hooks for this component.
|
||||
|
||||
Returns:
|
||||
@ -431,20 +429,22 @@ class Component(BaseComponent, ABC):
|
||||
else:
|
||||
continue
|
||||
|
||||
def determine_key(value):
|
||||
# Try to create a var from the value
|
||||
key = value if isinstance(value, Var) else LiteralVar.create(value)
|
||||
|
||||
# Check that the var type is not None.
|
||||
if key is None:
|
||||
raise TypeError
|
||||
|
||||
return key
|
||||
|
||||
# Check whether the key is a component prop.
|
||||
if types._issubclass(field_type, Var):
|
||||
# Used to store the passed types if var type is a union.
|
||||
passed_types = None
|
||||
try:
|
||||
# Try to create a var from the value.
|
||||
if isinstance(value, Var):
|
||||
kwargs[key] = value
|
||||
else:
|
||||
kwargs[key] = LiteralVar.create(value)
|
||||
|
||||
# Check that the var type is not None.
|
||||
if kwargs[key] is None:
|
||||
raise TypeError
|
||||
kwargs[key] = determine_key(value)
|
||||
|
||||
expected_type = fields[key].outer_type_.__args__[0]
|
||||
# validate literal fields.
|
||||
@ -493,8 +493,7 @@ class Component(BaseComponent, ABC):
|
||||
)
|
||||
# Check if the key is an event trigger.
|
||||
if key in component_specific_triggers:
|
||||
# Temporarily disable full control for event triggers.
|
||||
kwargs["event_triggers"][key] = self._create_event_chain(
|
||||
kwargs["event_triggers"][key] = EventChain.create(
|
||||
value=value, # type: ignore
|
||||
args_spec=component_specific_triggers[key],
|
||||
key=key,
|
||||
@ -548,6 +547,7 @@ class Component(BaseComponent, ABC):
|
||||
# Construct the component.
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@deprecated("Use rx.EventChain.create instead.")
|
||||
def _create_event_chain(
|
||||
self,
|
||||
args_spec: types.ArgsSpec | Sequence[types.ArgsSpec],
|
||||
@ -569,82 +569,18 @@ class Component(BaseComponent, ABC):
|
||||
|
||||
Returns:
|
||||
The event chain.
|
||||
|
||||
Raises:
|
||||
ValueError: If the value is not a valid event chain.
|
||||
"""
|
||||
# If it's an event chain var, return it.
|
||||
if isinstance(value, Var):
|
||||
if isinstance(value, EventChainVar):
|
||||
return value
|
||||
elif isinstance(value, EventVar):
|
||||
value = [value]
|
||||
elif issubclass(value._var_type, (EventChain, EventSpec)):
|
||||
return self._create_event_chain(args_spec, value.guess_type(), key=key)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Invalid event chain: {value!s} of type {value._var_type}"
|
||||
)
|
||||
elif isinstance(value, EventChain):
|
||||
# Trust that the caller knows what they're doing passing an EventChain directly
|
||||
return value
|
||||
|
||||
# If the input is a single event handler, wrap it in a list.
|
||||
if isinstance(value, (EventHandler, EventSpec)):
|
||||
value = [value]
|
||||
|
||||
# If the input is a list of event handlers, create an event chain.
|
||||
if isinstance(value, List):
|
||||
events: List[Union[EventSpec, EventVar]] = []
|
||||
for v in value:
|
||||
if isinstance(v, (EventHandler, EventSpec)):
|
||||
# Call the event handler to get the event.
|
||||
events.append(call_event_handler(v, args_spec, key=key))
|
||||
elif isinstance(v, Callable):
|
||||
# Call the lambda to get the event chain.
|
||||
result = call_event_fn(v, args_spec, key=key)
|
||||
if isinstance(result, Var):
|
||||
raise ValueError(
|
||||
f"Invalid event chain: {v}. Cannot use a Var-returning "
|
||||
"lambda inside an EventChain list."
|
||||
)
|
||||
events.extend(result)
|
||||
elif isinstance(v, EventVar):
|
||||
events.append(v)
|
||||
else:
|
||||
raise ValueError(f"Invalid event: {v}")
|
||||
|
||||
# If the input is a callable, create an event chain.
|
||||
elif isinstance(value, Callable):
|
||||
result = call_event_fn(value, args_spec, key=key)
|
||||
if isinstance(result, Var):
|
||||
# Recursively call this function if the lambda returned an EventChain Var.
|
||||
return self._create_event_chain(args_spec, result, key=key)
|
||||
events = [*result]
|
||||
|
||||
# Otherwise, raise an error.
|
||||
else:
|
||||
raise ValueError(f"Invalid event chain: {value}")
|
||||
|
||||
# Add args to the event specs if necessary.
|
||||
events = [
|
||||
(e.with_args(get_handler_args(e)) if isinstance(e, EventSpec) else e)
|
||||
for e in events
|
||||
]
|
||||
|
||||
# Return the event chain.
|
||||
if isinstance(args_spec, Var):
|
||||
return EventChain(
|
||||
events=events,
|
||||
args_spec=None,
|
||||
event_actions={},
|
||||
)
|
||||
else:
|
||||
return EventChain(
|
||||
events=events,
|
||||
args_spec=args_spec,
|
||||
event_actions={},
|
||||
)
|
||||
console.deprecate(
|
||||
"Component._create_event_chain",
|
||||
"Use rx.EventChain.create instead.",
|
||||
deprecation_version="0.6.8",
|
||||
removal_version="0.7.0",
|
||||
)
|
||||
return EventChain.create(
|
||||
value=value, # type: ignore
|
||||
args_spec=args_spec,
|
||||
key=key,
|
||||
)
|
||||
|
||||
def get_event_triggers(
|
||||
self,
|
||||
@ -806,22 +742,21 @@ class Component(BaseComponent, ABC):
|
||||
# Import here to avoid circular imports.
|
||||
from reflex.components.base.bare import Bare
|
||||
from reflex.components.base.fragment import Fragment
|
||||
from reflex.utils.exceptions import ComponentTypeError
|
||||
from reflex.utils.exceptions import ChildrenTypeError
|
||||
|
||||
# Filter out None props
|
||||
props = {key: value for key, value in props.items() if value is not None}
|
||||
|
||||
def validate_children(children):
|
||||
for child in children:
|
||||
if isinstance(child, tuple):
|
||||
if isinstance(child, (tuple, list)):
|
||||
validate_children(child)
|
||||
|
||||
# Make sure the child is a valid type.
|
||||
if not types._isinstance(child, ComponentChild):
|
||||
raise ComponentTypeError(
|
||||
"Children of Reflex components must be other components, "
|
||||
"state vars, or primitive Python types. "
|
||||
f"Got child {child} of type {type(child)}.",
|
||||
)
|
||||
if isinstance(child, dict) or not types._isinstance(
|
||||
child, ComponentChild
|
||||
):
|
||||
raise ChildrenTypeError(component=cls.__name__, child=child)
|
||||
|
||||
# Validate all the children.
|
||||
validate_children(children)
|
||||
@ -1086,18 +1021,22 @@ class Component(BaseComponent, ABC):
|
||||
event_args.append(spec)
|
||||
yield event_trigger, event_args
|
||||
|
||||
def _get_vars(self, include_children: bool = False) -> list[Var]:
|
||||
def _get_vars(
|
||||
self, include_children: bool = False, ignore_ids: set[int] | None = None
|
||||
) -> Iterator[Var]:
|
||||
"""Walk all Vars used in this component.
|
||||
|
||||
Args:
|
||||
include_children: Whether to include Vars from children.
|
||||
ignore_ids: The ids to ignore.
|
||||
|
||||
Returns:
|
||||
Yields:
|
||||
Each var referenced by the component (props, styles, event handlers).
|
||||
"""
|
||||
vars = getattr(self, "__vars", None)
|
||||
ignore_ids = ignore_ids or set()
|
||||
vars: List[Var] | None = getattr(self, "__vars", None)
|
||||
if vars is not None:
|
||||
return vars
|
||||
yield from vars
|
||||
vars = self.__vars = []
|
||||
# Get Vars associated with event trigger arguments.
|
||||
for _, event_vars in self._get_vars_from_event_triggers(self.event_triggers):
|
||||
@ -1141,12 +1080,15 @@ class Component(BaseComponent, ABC):
|
||||
# Get Vars associated with children.
|
||||
if include_children:
|
||||
for child in self.children:
|
||||
if not isinstance(child, Component):
|
||||
if not isinstance(child, Component) or id(child) in ignore_ids:
|
||||
continue
|
||||
child_vars = child._get_vars(include_children=include_children)
|
||||
ignore_ids.add(id(child))
|
||||
child_vars = child._get_vars(
|
||||
include_children=include_children, ignore_ids=ignore_ids
|
||||
)
|
||||
vars.extend(child_vars)
|
||||
|
||||
return vars
|
||||
yield from vars
|
||||
|
||||
def _event_trigger_values_use_state(self) -> bool:
|
||||
"""Check if the values of a component's event trigger use state.
|
||||
@ -1338,7 +1280,7 @@ class Component(BaseComponent, ABC):
|
||||
"""
|
||||
_imports = {}
|
||||
|
||||
if self._get_ref_hook():
|
||||
if self._get_ref_hook() is not None:
|
||||
# Handle hooks needed for attaching react refs to DOM nodes.
|
||||
_imports.setdefault("react", set()).add(ImportVar(tag="useRef"))
|
||||
_imports.setdefault(f"$/{Dirs.STATE_PATH}", set()).add(
|
||||
@ -1454,7 +1396,7 @@ class Component(BaseComponent, ABC):
|
||||
}}
|
||||
}}, []);"""
|
||||
|
||||
def _get_ref_hook(self) -> str | None:
|
||||
def _get_ref_hook(self) -> Var | None:
|
||||
"""Generate the ref hook for the component.
|
||||
|
||||
Returns:
|
||||
@ -1462,11 +1404,12 @@ class Component(BaseComponent, ABC):
|
||||
"""
|
||||
ref = self.get_ref()
|
||||
if ref is not None:
|
||||
return (
|
||||
f"const {ref} = useRef(null); {Var(_js_expr=ref)._as_ref()!s} = {ref};"
|
||||
return Var(
|
||||
f"const {ref} = useRef(null); {Var(_js_expr=ref)._as_ref()!s} = {ref};",
|
||||
_var_data=VarData(position=Hooks.HookPosition.INTERNAL),
|
||||
)
|
||||
|
||||
def _get_vars_hooks(self) -> dict[str, None]:
|
||||
def _get_vars_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Get the hooks required by vars referenced in this component.
|
||||
|
||||
Returns:
|
||||
@ -1479,27 +1422,38 @@ class Component(BaseComponent, ABC):
|
||||
vars_hooks.update(
|
||||
var_data.hooks
|
||||
if isinstance(var_data.hooks, dict)
|
||||
else {k: None for k in var_data.hooks}
|
||||
else {
|
||||
k: VarData(position=Hooks.HookPosition.INTERNAL)
|
||||
for k in var_data.hooks
|
||||
}
|
||||
)
|
||||
return vars_hooks
|
||||
|
||||
def _get_events_hooks(self) -> dict[str, None]:
|
||||
def _get_events_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Get the hooks required by events referenced in this component.
|
||||
|
||||
Returns:
|
||||
The hooks for the events.
|
||||
"""
|
||||
return {Hooks.EVENTS: None} if self.event_triggers else {}
|
||||
return (
|
||||
{Hooks.EVENTS: VarData(position=Hooks.HookPosition.INTERNAL)}
|
||||
if self.event_triggers
|
||||
else {}
|
||||
)
|
||||
|
||||
def _get_special_hooks(self) -> dict[str, None]:
|
||||
def _get_special_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Get the hooks required by special actions referenced in this component.
|
||||
|
||||
Returns:
|
||||
The hooks for special actions.
|
||||
"""
|
||||
return {Hooks.AUTOFOCUS: None} if self.autofocus else {}
|
||||
return (
|
||||
{Hooks.AUTOFOCUS: VarData(position=Hooks.HookPosition.INTERNAL)}
|
||||
if self.autofocus
|
||||
else {}
|
||||
)
|
||||
|
||||
def _get_hooks_internal(self) -> dict[str, None]:
|
||||
def _get_hooks_internal(self) -> dict[str, VarData | None]:
|
||||
"""Get the React hooks for this component managed by the framework.
|
||||
|
||||
Downstream components should NOT override this method to avoid breaking
|
||||
@ -1510,7 +1464,7 @@ class Component(BaseComponent, ABC):
|
||||
"""
|
||||
return {
|
||||
**{
|
||||
hook: None
|
||||
str(hook): VarData(position=Hooks.HookPosition.INTERNAL)
|
||||
for hook in [self._get_ref_hook(), self._get_mount_lifecycle_hook()]
|
||||
if hook is not None
|
||||
},
|
||||
@ -1559,7 +1513,7 @@ class Component(BaseComponent, ABC):
|
||||
"""
|
||||
return
|
||||
|
||||
def _get_all_hooks_internal(self) -> dict[str, None]:
|
||||
def _get_all_hooks_internal(self) -> dict[str, VarData | None]:
|
||||
"""Get the reflex internal hooks for the component and its children.
|
||||
|
||||
Returns:
|
||||
@ -1574,7 +1528,7 @@ class Component(BaseComponent, ABC):
|
||||
|
||||
return code
|
||||
|
||||
def _get_all_hooks(self) -> dict[str, None]:
|
||||
def _get_all_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Get the React hooks for this component and its children.
|
||||
|
||||
Returns:
|
||||
@ -1582,6 +1536,9 @@ class Component(BaseComponent, ABC):
|
||||
"""
|
||||
code = {}
|
||||
|
||||
# Add the internal hooks for this component.
|
||||
code.update(self._get_hooks_internal())
|
||||
|
||||
# Add the hook code for this component.
|
||||
hooks = self._get_hooks()
|
||||
if hooks is not None:
|
||||
@ -1737,7 +1694,7 @@ class CustomComponent(Component):
|
||||
|
||||
# Handle event chains.
|
||||
if types._issubclass(type_, EventChain):
|
||||
value = self._create_event_chain(
|
||||
value = EventChain.create(
|
||||
value=value,
|
||||
args_spec=event_triggers_in_component_declaration.get(
|
||||
key, no_args_event_spec
|
||||
@ -1862,19 +1819,25 @@ class CustomComponent(Component):
|
||||
for name, prop in self.props.items()
|
||||
]
|
||||
|
||||
def _get_vars(self, include_children: bool = False) -> list[Var]:
|
||||
def _get_vars(
|
||||
self, include_children: bool = False, ignore_ids: set[int] | None = None
|
||||
) -> Iterator[Var]:
|
||||
"""Walk all Vars used in this component.
|
||||
|
||||
Args:
|
||||
include_children: Whether to include Vars from children.
|
||||
ignore_ids: The ids to ignore.
|
||||
|
||||
Returns:
|
||||
Yields:
|
||||
Each var referenced by the component (props, styles, event handlers).
|
||||
"""
|
||||
return (
|
||||
super()._get_vars(include_children=include_children)
|
||||
+ [prop for prop in self.props.values() if isinstance(prop, Var)]
|
||||
+ self.get_component(self)._get_vars(include_children=include_children)
|
||||
ignore_ids = ignore_ids or set()
|
||||
yield from super()._get_vars(
|
||||
include_children=include_children, ignore_ids=ignore_ids
|
||||
)
|
||||
yield from filter(lambda prop: isinstance(prop, Var), self.props.values())
|
||||
yield from self.get_component(self)._get_vars(
|
||||
include_children=include_children, ignore_ids=ignore_ids
|
||||
)
|
||||
|
||||
@lru_cache(maxsize=None) # noqa
|
||||
@ -2277,7 +2240,7 @@ class StatefulComponent(BaseComponent):
|
||||
)
|
||||
return trigger_memo
|
||||
|
||||
def _get_all_hooks_internal(self) -> dict[str, None]:
|
||||
def _get_all_hooks_internal(self) -> dict[str, VarData | None]:
|
||||
"""Get the reflex internal hooks for the component and its children.
|
||||
|
||||
Returns:
|
||||
@ -2285,7 +2248,7 @@ class StatefulComponent(BaseComponent):
|
||||
"""
|
||||
return {}
|
||||
|
||||
def _get_all_hooks(self) -> dict[str, None]:
|
||||
def _get_all_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Get the React hooks for this component.
|
||||
|
||||
Returns:
|
||||
@ -2403,7 +2366,7 @@ class MemoizationLeaf(Component):
|
||||
The memoization leaf
|
||||
"""
|
||||
comp = super().create(*children, **props)
|
||||
if comp._get_all_hooks() or comp._get_all_hooks_internal():
|
||||
if comp._get_all_hooks():
|
||||
comp._memoization_mode = cls._memoization_mode.copy(
|
||||
update={"disposition": MemoizationDisposition.ALWAYS}
|
||||
)
|
||||
|
@ -502,8 +502,8 @@ class CodeBlock(Component, MarkdownComponentMap):
|
||||
|
||||
theme = self.theme
|
||||
|
||||
out.add_props(style=theme).remove_props("theme", "code", "language").add_props(
|
||||
children=self.code, language=_LANGUAGE
|
||||
out.add_props(style=theme).remove_props("theme", "code").add_props(
|
||||
children=self.code,
|
||||
)
|
||||
|
||||
return out
|
||||
@ -512,20 +512,25 @@ class CodeBlock(Component, MarkdownComponentMap):
|
||||
return ["can_copy", "copy_button"]
|
||||
|
||||
@classmethod
|
||||
def _get_language_registration_hook(cls) -> str:
|
||||
def _get_language_registration_hook(cls, language_var: Var = _LANGUAGE) -> str:
|
||||
"""Get the hook to register the language.
|
||||
|
||||
Args:
|
||||
language_var: The const/literal Var of the language module to import.
|
||||
For markdown, uses the default placeholder _LANGUAGE. For direct use,
|
||||
a LiteralStringVar should be passed via the language prop.
|
||||
|
||||
Returns:
|
||||
The hook to register the language.
|
||||
"""
|
||||
return f"""
|
||||
if ({_LANGUAGE!s}) {{
|
||||
if ({language_var!s}) {{
|
||||
(async () => {{
|
||||
try {{
|
||||
const module = await import(`react-syntax-highlighter/dist/cjs/languages/prism/${{{_LANGUAGE!s}}}`);
|
||||
SyntaxHighlighter.registerLanguage({_LANGUAGE!s}, module.default);
|
||||
const module = await import(`react-syntax-highlighter/dist/cjs/languages/prism/${{{language_var!s}}}`);
|
||||
SyntaxHighlighter.registerLanguage({language_var!s}, module.default);
|
||||
}} catch (error) {{
|
||||
console.error(`Error importing language module for ${{{_LANGUAGE!s}}}:`, error);
|
||||
console.error(`Error importing language module for ${{{language_var!s}}}:`, error);
|
||||
}}
|
||||
}})();
|
||||
}}
|
||||
@ -547,8 +552,7 @@ class CodeBlock(Component, MarkdownComponentMap):
|
||||
The hooks for the component.
|
||||
"""
|
||||
return [
|
||||
f"const {_LANGUAGE!s} = {self.language!s}",
|
||||
self._get_language_registration_hook(),
|
||||
self._get_language_registration_hook(language_var=self.language),
|
||||
]
|
||||
|
||||
|
||||
|
@ -136,6 +136,23 @@ def load_dynamic_serializer():
|
||||
|
||||
module_code_lines.insert(0, "const React = window.__reflex.react;")
|
||||
|
||||
function_line = next(
|
||||
index
|
||||
for index, line in enumerate(module_code_lines)
|
||||
if line.startswith("export default function")
|
||||
)
|
||||
|
||||
module_code_lines = [
|
||||
line
|
||||
for _, line in sorted(
|
||||
enumerate(module_code_lines),
|
||||
key=lambda x: (
|
||||
not (x[1].startswith("import ") and x[0] < function_line),
|
||||
x[0],
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
return "\n".join(
|
||||
[
|
||||
"//__reflex_evaluate",
|
||||
|
@ -182,9 +182,7 @@ class Form(BaseHTML):
|
||||
props["handle_submit_unique_name"] = ""
|
||||
form = super().create(*children, **props)
|
||||
form.handle_submit_unique_name = md5(
|
||||
str({**form._get_all_hooks_internal(), **form._get_all_hooks()}).encode(
|
||||
"utf-8"
|
||||
)
|
||||
str(form._get_all_hooks()).encode("utf-8")
|
||||
).hexdigest()
|
||||
return form
|
||||
|
||||
@ -252,8 +250,12 @@ class Form(BaseHTML):
|
||||
)
|
||||
return form_refs
|
||||
|
||||
def _get_vars(self, include_children: bool = True) -> Iterator[Var]:
|
||||
yield from super()._get_vars(include_children=include_children)
|
||||
def _get_vars(
|
||||
self, include_children: bool = True, ignore_ids: set[int] | None = None
|
||||
) -> Iterator[Var]:
|
||||
yield from super()._get_vars(
|
||||
include_children=include_children, ignore_ids=ignore_ids
|
||||
)
|
||||
yield from self._get_form_refs().values()
|
||||
|
||||
def _exclude_props(self) -> list[str]:
|
||||
|
@ -2,13 +2,15 @@
|
||||
|
||||
from reflex.components.component import Component
|
||||
from reflex.utils import format
|
||||
from reflex.vars.base import Var
|
||||
from reflex.utils.imports import ImportVar
|
||||
from reflex.vars.base import LiteralVar, Var
|
||||
from reflex.vars.sequence import LiteralStringVar
|
||||
|
||||
|
||||
class LucideIconComponent(Component):
|
||||
"""Lucide Icon Component."""
|
||||
|
||||
library = "lucide-react@0.359.0"
|
||||
library = "lucide-react@0.471.1"
|
||||
|
||||
|
||||
class Icon(LucideIconComponent):
|
||||
@ -32,6 +34,7 @@ class Icon(LucideIconComponent):
|
||||
Raises:
|
||||
AttributeError: The errors tied to bad usage of the Icon component.
|
||||
ValueError: If the icon tag is invalid.
|
||||
TypeError: If the icon name is not a string.
|
||||
|
||||
Returns:
|
||||
The created component.
|
||||
@ -39,7 +42,6 @@ class Icon(LucideIconComponent):
|
||||
if children:
|
||||
if len(children) == 1 and isinstance(children[0], str):
|
||||
props["tag"] = children[0]
|
||||
children = []
|
||||
else:
|
||||
raise AttributeError(
|
||||
f"Passing multiple children to Icon component is not allowed: remove positional arguments {children[1:]} to fix"
|
||||
@ -47,19 +49,46 @@ class Icon(LucideIconComponent):
|
||||
if "tag" not in props:
|
||||
raise AttributeError("Missing 'tag' keyword-argument for Icon")
|
||||
|
||||
tag: str | Var | LiteralVar = props.pop("tag")
|
||||
if isinstance(tag, LiteralVar):
|
||||
if isinstance(tag, LiteralStringVar):
|
||||
tag = tag._var_value
|
||||
else:
|
||||
raise TypeError(f"Icon name must be a string, got {type(tag)}")
|
||||
elif isinstance(tag, Var):
|
||||
return DynamicIcon.create(name=tag, **props)
|
||||
|
||||
if (
|
||||
not isinstance(props["tag"], str)
|
||||
or format.to_snake_case(props["tag"]) not in LUCIDE_ICON_LIST
|
||||
not isinstance(tag, str)
|
||||
or format.to_snake_case(tag) not in LUCIDE_ICON_LIST
|
||||
):
|
||||
raise ValueError(
|
||||
f"Invalid icon tag: {props['tag']}. Please use one of the following: {', '.join(LUCIDE_ICON_LIST[0:25])}, ..."
|
||||
f"Invalid icon tag: {tag}. Please use one of the following: {', '.join(LUCIDE_ICON_LIST[0:25])}, ..."
|
||||
"\nSee full list at https://lucide.dev/icons."
|
||||
)
|
||||
|
||||
props["tag"] = format.to_title_case(format.to_snake_case(props["tag"])) + "Icon"
|
||||
if tag in LUCIDE_ICON_MAPPING_OVERRIDE:
|
||||
props["tag"] = LUCIDE_ICON_MAPPING_OVERRIDE[tag]
|
||||
else:
|
||||
props["tag"] = format.to_title_case(format.to_snake_case(tag)) + "Icon"
|
||||
props["alias"] = f"Lucide{props['tag']}"
|
||||
props.setdefault("color", "var(--current-color)")
|
||||
return super().create(*children, **props)
|
||||
return super().create(**props)
|
||||
|
||||
|
||||
class DynamicIcon(LucideIconComponent):
|
||||
"""A DynamicIcon component."""
|
||||
|
||||
tag = "DynamicIcon"
|
||||
|
||||
name: Var[str]
|
||||
|
||||
def _get_imports(self):
|
||||
_imports = super()._get_imports()
|
||||
if self.library:
|
||||
_imports.pop(self.library)
|
||||
_imports["lucide-react/dynamic"] = [ImportVar("DynamicIcon", install=False)]
|
||||
return _imports
|
||||
|
||||
|
||||
LUCIDE_ICON_LIST = [
|
||||
@ -106,6 +135,7 @@ LUCIDE_ICON_LIST = [
|
||||
"ambulance",
|
||||
"ampersand",
|
||||
"ampersands",
|
||||
"amphora",
|
||||
"anchor",
|
||||
"angry",
|
||||
"annoyed",
|
||||
@ -193,6 +223,7 @@ LUCIDE_ICON_LIST = [
|
||||
"baggage_claim",
|
||||
"ban",
|
||||
"banana",
|
||||
"bandage",
|
||||
"banknote",
|
||||
"bar_chart",
|
||||
"bar_chart_2",
|
||||
@ -230,8 +261,10 @@ LUCIDE_ICON_LIST = [
|
||||
"between_horizontal_start",
|
||||
"between_vertical_end",
|
||||
"between_vertical_start",
|
||||
"biceps_flexed",
|
||||
"bike",
|
||||
"binary",
|
||||
"binoculars",
|
||||
"biohazard",
|
||||
"bird",
|
||||
"bitcoin",
|
||||
@ -278,6 +311,7 @@ LUCIDE_ICON_LIST = [
|
||||
"boom_box",
|
||||
"bot",
|
||||
"bot_message_square",
|
||||
"bot_off",
|
||||
"box",
|
||||
"box_select",
|
||||
"boxes",
|
||||
@ -289,6 +323,7 @@ LUCIDE_ICON_LIST = [
|
||||
"brick_wall",
|
||||
"briefcase",
|
||||
"briefcase_business",
|
||||
"briefcase_conveyor_belt",
|
||||
"briefcase_medical",
|
||||
"bring_to_front",
|
||||
"brush",
|
||||
@ -305,9 +340,13 @@ LUCIDE_ICON_LIST = [
|
||||
"cake_slice",
|
||||
"calculator",
|
||||
"calendar",
|
||||
"calendar_1",
|
||||
"calendar_arrow_down",
|
||||
"calendar_arrow_up",
|
||||
"calendar_check",
|
||||
"calendar_check_2",
|
||||
"calendar_clock",
|
||||
"calendar_cog",
|
||||
"calendar_days",
|
||||
"calendar_fold",
|
||||
"calendar_heart",
|
||||
@ -318,6 +357,7 @@ LUCIDE_ICON_LIST = [
|
||||
"calendar_plus_2",
|
||||
"calendar_range",
|
||||
"calendar_search",
|
||||
"calendar_sync",
|
||||
"calendar_x",
|
||||
"calendar_x_2",
|
||||
"camera",
|
||||
@ -342,6 +382,29 @@ LUCIDE_ICON_LIST = [
|
||||
"castle",
|
||||
"cat",
|
||||
"cctv",
|
||||
"chart_area",
|
||||
"chart_bar",
|
||||
"chart_bar_big",
|
||||
"chart_bar_decreasing",
|
||||
"chart_bar_increasing",
|
||||
"chart_bar_stacked",
|
||||
"chart_candlestick",
|
||||
"chart_column",
|
||||
"chart_column_big",
|
||||
"chart_column_decreasing",
|
||||
"chart_column_increasing",
|
||||
"chart_column_stacked",
|
||||
"chart_gantt",
|
||||
"chart_line",
|
||||
"chart_network",
|
||||
"chart_no_axes_column",
|
||||
"chart_no_axes_column_decreasing",
|
||||
"chart_no_axes_column_increasing",
|
||||
"chart_no_axes_combined",
|
||||
"chart_no_axes_gantt",
|
||||
"chart_pie",
|
||||
"chart_scatter",
|
||||
"chart_spline",
|
||||
"check",
|
||||
"check_check",
|
||||
"chef_hat",
|
||||
@ -356,6 +419,7 @@ LUCIDE_ICON_LIST = [
|
||||
"chevrons_down_up",
|
||||
"chevrons_left",
|
||||
"chevrons_left_right",
|
||||
"chevrons_left_right_ellipsis",
|
||||
"chevrons_right",
|
||||
"chevrons_right_left",
|
||||
"chevrons_up",
|
||||
@ -374,8 +438,8 @@ LUCIDE_ICON_LIST = [
|
||||
"circle_arrow_out_up_right",
|
||||
"circle_arrow_right",
|
||||
"circle_arrow_up",
|
||||
"circle_check_big",
|
||||
"circle_check",
|
||||
"circle_check_big",
|
||||
"circle_chevron_down",
|
||||
"circle_chevron_left",
|
||||
"circle_chevron_right",
|
||||
@ -387,13 +451,14 @@ LUCIDE_ICON_LIST = [
|
||||
"circle_dot_dashed",
|
||||
"circle_ellipsis",
|
||||
"circle_equal",
|
||||
"circle_fading_arrow_up",
|
||||
"circle_fading_plus",
|
||||
"circle_gauge",
|
||||
"circle_help",
|
||||
"circle_minus",
|
||||
"circle_off",
|
||||
"circle_parking_off",
|
||||
"circle_parking",
|
||||
"circle_parking_off",
|
||||
"circle_pause",
|
||||
"circle_percent",
|
||||
"circle_play",
|
||||
@ -432,7 +497,11 @@ LUCIDE_ICON_LIST = [
|
||||
"clock_7",
|
||||
"clock_8",
|
||||
"clock_9",
|
||||
"clock_alert",
|
||||
"clock_arrow_down",
|
||||
"clock_arrow_up",
|
||||
"cloud",
|
||||
"cloud_alert",
|
||||
"cloud_cog",
|
||||
"cloud_download",
|
||||
"cloud_drizzle",
|
||||
@ -503,6 +572,7 @@ LUCIDE_ICON_LIST = [
|
||||
"cup_soda",
|
||||
"currency",
|
||||
"cylinder",
|
||||
"dam",
|
||||
"database",
|
||||
"database_backup",
|
||||
"database_zap",
|
||||
@ -510,7 +580,9 @@ LUCIDE_ICON_LIST = [
|
||||
"dessert",
|
||||
"diameter",
|
||||
"diamond",
|
||||
"diamond_minus",
|
||||
"diamond_percent",
|
||||
"diamond_plus",
|
||||
"dice_1",
|
||||
"dice_2",
|
||||
"dice_3",
|
||||
@ -539,6 +611,7 @@ LUCIDE_ICON_LIST = [
|
||||
"dribbble",
|
||||
"drill",
|
||||
"droplet",
|
||||
"droplet_off",
|
||||
"droplets",
|
||||
"drum",
|
||||
"drumstick",
|
||||
@ -554,12 +627,15 @@ LUCIDE_ICON_LIST = [
|
||||
"ellipsis",
|
||||
"ellipsis_vertical",
|
||||
"equal",
|
||||
"equal_approximately",
|
||||
"equal_not",
|
||||
"eraser",
|
||||
"ethernet_port",
|
||||
"euro",
|
||||
"expand",
|
||||
"external_link",
|
||||
"eye",
|
||||
"eye_closed",
|
||||
"eye_off",
|
||||
"facebook",
|
||||
"factory",
|
||||
@ -579,6 +655,10 @@ LUCIDE_ICON_LIST = [
|
||||
"file_bar_chart",
|
||||
"file_bar_chart_2",
|
||||
"file_box",
|
||||
"file_chart_column",
|
||||
"file_chart_column_increasing",
|
||||
"file_chart_line",
|
||||
"file_chart_pie",
|
||||
"file_check",
|
||||
"file_check_2",
|
||||
"file_clock",
|
||||
@ -620,6 +700,7 @@ LUCIDE_ICON_LIST = [
|
||||
"file_type",
|
||||
"file_type_2",
|
||||
"file_up",
|
||||
"file_user",
|
||||
"file_video",
|
||||
"file_video_2",
|
||||
"file_volume",
|
||||
@ -661,6 +742,7 @@ LUCIDE_ICON_LIST = [
|
||||
"folder_check",
|
||||
"folder_clock",
|
||||
"folder_closed",
|
||||
"folder_code",
|
||||
"folder_cog",
|
||||
"folder_dot",
|
||||
"folder_down",
|
||||
@ -733,7 +815,12 @@ LUCIDE_ICON_LIST = [
|
||||
"graduation_cap",
|
||||
"grape",
|
||||
"grid_2x2",
|
||||
"grid_2x_2",
|
||||
"grid_2x_2_check",
|
||||
"grid_2x_2_plus",
|
||||
"grid_2x_2_x",
|
||||
"grid_3x3",
|
||||
"grid_3x_3",
|
||||
"grip",
|
||||
"grip_horizontal",
|
||||
"grip_vertical",
|
||||
@ -762,6 +849,7 @@ LUCIDE_ICON_LIST = [
|
||||
"heading_4",
|
||||
"heading_5",
|
||||
"heading_6",
|
||||
"headphone_off",
|
||||
"headphones",
|
||||
"headset",
|
||||
"heart",
|
||||
@ -779,14 +867,21 @@ LUCIDE_ICON_LIST = [
|
||||
"hospital",
|
||||
"hotel",
|
||||
"hourglass",
|
||||
"house",
|
||||
"house_plug",
|
||||
"house_plus",
|
||||
"house_wifi",
|
||||
"ice_cream_bowl",
|
||||
"ice_cream_cone",
|
||||
"id_card",
|
||||
"image",
|
||||
"image_down",
|
||||
"image_minus",
|
||||
"image_off",
|
||||
"image_play",
|
||||
"image_plus",
|
||||
"image_up",
|
||||
"image_upscale",
|
||||
"images",
|
||||
"import",
|
||||
"inbox",
|
||||
@ -808,6 +903,7 @@ LUCIDE_ICON_LIST = [
|
||||
"key_square",
|
||||
"keyboard",
|
||||
"keyboard_music",
|
||||
"keyboard_off",
|
||||
"lamp",
|
||||
"lamp_ceiling",
|
||||
"lamp_desk",
|
||||
@ -817,8 +913,9 @@ LUCIDE_ICON_LIST = [
|
||||
"land_plot",
|
||||
"landmark",
|
||||
"languages",
|
||||
"laptop_minimal",
|
||||
"laptop",
|
||||
"laptop_minimal",
|
||||
"laptop_minimal_check",
|
||||
"lasso",
|
||||
"lasso_select",
|
||||
"laugh",
|
||||
@ -833,6 +930,8 @@ LUCIDE_ICON_LIST = [
|
||||
"layout_template",
|
||||
"leaf",
|
||||
"leafy_green",
|
||||
"lectern",
|
||||
"letter_text",
|
||||
"library",
|
||||
"library_big",
|
||||
"life_buoy",
|
||||
@ -845,10 +944,12 @@ LUCIDE_ICON_LIST = [
|
||||
"link_2_off",
|
||||
"linkedin",
|
||||
"list",
|
||||
"list_check",
|
||||
"list_checks",
|
||||
"list_collapse",
|
||||
"list_end",
|
||||
"list_filter",
|
||||
"list_filter_plus",
|
||||
"list_minus",
|
||||
"list_music",
|
||||
"list_ordered",
|
||||
@ -861,15 +962,17 @@ LUCIDE_ICON_LIST = [
|
||||
"list_x",
|
||||
"loader",
|
||||
"loader_circle",
|
||||
"loader_pinwheel",
|
||||
"locate",
|
||||
"locate_fixed",
|
||||
"locate_off",
|
||||
"lock",
|
||||
"lock_keyhole_open",
|
||||
"lock_keyhole",
|
||||
"lock_keyhole_open",
|
||||
"lock_open",
|
||||
"log_in",
|
||||
"log_out",
|
||||
"logs",
|
||||
"lollipop",
|
||||
"luggage",
|
||||
"magnet",
|
||||
@ -886,7 +989,16 @@ LUCIDE_ICON_LIST = [
|
||||
"mails",
|
||||
"map",
|
||||
"map_pin",
|
||||
"map_pin_check",
|
||||
"map_pin_check_inside",
|
||||
"map_pin_house",
|
||||
"map_pin_minus",
|
||||
"map_pin_minus_inside",
|
||||
"map_pin_off",
|
||||
"map_pin_plus",
|
||||
"map_pin_plus_inside",
|
||||
"map_pin_x",
|
||||
"map_pin_x_inside",
|
||||
"map_pinned",
|
||||
"martini",
|
||||
"maximize",
|
||||
@ -915,6 +1027,7 @@ LUCIDE_ICON_LIST = [
|
||||
"message_square_diff",
|
||||
"message_square_dot",
|
||||
"message_square_heart",
|
||||
"message_square_lock",
|
||||
"message_square_more",
|
||||
"message_square_off",
|
||||
"message_square_plus",
|
||||
@ -926,8 +1039,9 @@ LUCIDE_ICON_LIST = [
|
||||
"message_square_x",
|
||||
"messages_square",
|
||||
"mic",
|
||||
"mic_vocal",
|
||||
"mic_off",
|
||||
"mic_vocal",
|
||||
"microchip",
|
||||
"microscope",
|
||||
"microwave",
|
||||
"milestone",
|
||||
@ -938,6 +1052,7 @@ LUCIDE_ICON_LIST = [
|
||||
"minus",
|
||||
"monitor",
|
||||
"monitor_check",
|
||||
"monitor_cog",
|
||||
"monitor_dot",
|
||||
"monitor_down",
|
||||
"monitor_off",
|
||||
@ -953,8 +1068,10 @@ LUCIDE_ICON_LIST = [
|
||||
"mountain",
|
||||
"mountain_snow",
|
||||
"mouse",
|
||||
"mouse_off",
|
||||
"mouse_pointer",
|
||||
"mouse_pointer_2",
|
||||
"mouse_pointer_ban",
|
||||
"mouse_pointer_click",
|
||||
"move",
|
||||
"move_3d",
|
||||
@ -991,10 +1108,13 @@ LUCIDE_ICON_LIST = [
|
||||
"nut_off",
|
||||
"octagon",
|
||||
"octagon_alert",
|
||||
"octagon_minus",
|
||||
"octagon_pause",
|
||||
"octagon_x",
|
||||
"omega",
|
||||
"option",
|
||||
"orbit",
|
||||
"origami",
|
||||
"package",
|
||||
"package_2",
|
||||
"package_check",
|
||||
@ -1007,6 +1127,7 @@ LUCIDE_ICON_LIST = [
|
||||
"paint_roller",
|
||||
"paintbrush",
|
||||
"paintbrush_2",
|
||||
"paintbrush_vertical",
|
||||
"palette",
|
||||
"panel_bottom",
|
||||
"panel_bottom_close",
|
||||
@ -1036,13 +1157,16 @@ LUCIDE_ICON_LIST = [
|
||||
"pc_case",
|
||||
"pen",
|
||||
"pen_line",
|
||||
"pen_off",
|
||||
"pen_tool",
|
||||
"pencil",
|
||||
"pencil_line",
|
||||
"pencil_off",
|
||||
"pencil_ruler",
|
||||
"pentagon",
|
||||
"percent",
|
||||
"person_standing",
|
||||
"philippine_peso",
|
||||
"phone",
|
||||
"phone_call",
|
||||
"phone_forwarded",
|
||||
@ -1058,7 +1182,10 @@ LUCIDE_ICON_LIST = [
|
||||
"pie_chart",
|
||||
"piggy_bank",
|
||||
"pilcrow",
|
||||
"pilcrow_left",
|
||||
"pilcrow_right",
|
||||
"pill",
|
||||
"pill_bottle",
|
||||
"pin",
|
||||
"pin_off",
|
||||
"pipette",
|
||||
@ -1084,6 +1211,7 @@ LUCIDE_ICON_LIST = [
|
||||
"power_off",
|
||||
"presentation",
|
||||
"printer",
|
||||
"printer_check",
|
||||
"projector",
|
||||
"proportions",
|
||||
"puzzle",
|
||||
@ -1158,6 +1286,7 @@ LUCIDE_ICON_LIST = [
|
||||
"satellite_dish",
|
||||
"save",
|
||||
"save_all",
|
||||
"save_off",
|
||||
"scale",
|
||||
"scale_3d",
|
||||
"scaling",
|
||||
@ -1165,7 +1294,9 @@ LUCIDE_ICON_LIST = [
|
||||
"scan_barcode",
|
||||
"scan_eye",
|
||||
"scan_face",
|
||||
"scan_heart",
|
||||
"scan_line",
|
||||
"scan_qr_code",
|
||||
"scan_search",
|
||||
"scan_text",
|
||||
"scatter_chart",
|
||||
@ -1181,6 +1312,7 @@ LUCIDE_ICON_LIST = [
|
||||
"search_code",
|
||||
"search_slash",
|
||||
"search_x",
|
||||
"section",
|
||||
"send",
|
||||
"send_horizontal",
|
||||
"send_to_back",
|
||||
@ -1225,6 +1357,7 @@ LUCIDE_ICON_LIST = [
|
||||
"signal_low",
|
||||
"signal_medium",
|
||||
"signal_zero",
|
||||
"signature",
|
||||
"signpost",
|
||||
"signpost_big",
|
||||
"siren",
|
||||
@ -1234,8 +1367,8 @@ LUCIDE_ICON_LIST = [
|
||||
"slack",
|
||||
"slash",
|
||||
"slice",
|
||||
"sliders_vertical",
|
||||
"sliders_horizontal",
|
||||
"sliders_vertical",
|
||||
"smartphone",
|
||||
"smartphone_charging",
|
||||
"smartphone_nfc",
|
||||
@ -1259,29 +1392,31 @@ LUCIDE_ICON_LIST = [
|
||||
"sprout",
|
||||
"square",
|
||||
"square_activity",
|
||||
"square_arrow_down",
|
||||
"square_arrow_down_left",
|
||||
"square_arrow_down_right",
|
||||
"square_arrow_down",
|
||||
"square_arrow_left",
|
||||
"square_arrow_out_down_left",
|
||||
"square_arrow_out_down_right",
|
||||
"square_arrow_out_up_left",
|
||||
"square_arrow_out_up_right",
|
||||
"square_arrow_right",
|
||||
"square_arrow_up",
|
||||
"square_arrow_up_left",
|
||||
"square_arrow_up_right",
|
||||
"square_arrow_up",
|
||||
"square_asterisk",
|
||||
"square_bottom_dashed_scissors",
|
||||
"square_check_big",
|
||||
"square_chart_gantt",
|
||||
"square_check",
|
||||
"square_check_big",
|
||||
"square_chevron_down",
|
||||
"square_chevron_left",
|
||||
"square_chevron_right",
|
||||
"square_chevron_up",
|
||||
"square_code",
|
||||
"square_dashed_bottom_code",
|
||||
"square_dashed",
|
||||
"square_dashed_bottom",
|
||||
"square_dashed_bottom_code",
|
||||
"square_dashed_kanban",
|
||||
"square_dashed_mouse_pointer",
|
||||
"square_divide",
|
||||
@ -1295,8 +1430,8 @@ LUCIDE_ICON_LIST = [
|
||||
"square_menu",
|
||||
"square_minus",
|
||||
"square_mouse_pointer",
|
||||
"square_parking_off",
|
||||
"square_parking",
|
||||
"square_parking_off",
|
||||
"square_pen",
|
||||
"square_percent",
|
||||
"square_pi",
|
||||
@ -1310,10 +1445,11 @@ LUCIDE_ICON_LIST = [
|
||||
"square_slash",
|
||||
"square_split_horizontal",
|
||||
"square_split_vertical",
|
||||
"square_square",
|
||||
"square_stack",
|
||||
"square_terminal",
|
||||
"square_user_round",
|
||||
"square_user",
|
||||
"square_user_round",
|
||||
"square_x",
|
||||
"squircle",
|
||||
"squirrel",
|
||||
@ -1350,6 +1486,7 @@ LUCIDE_ICON_LIST = [
|
||||
"table_cells_merge",
|
||||
"table_cells_split",
|
||||
"table_columns_split",
|
||||
"table_of_contents",
|
||||
"table_properties",
|
||||
"table_rows_split",
|
||||
"tablet",
|
||||
@ -1365,11 +1502,11 @@ LUCIDE_ICON_LIST = [
|
||||
"tangent",
|
||||
"target",
|
||||
"telescope",
|
||||
"tent",
|
||||
"tent_tree",
|
||||
"terminal",
|
||||
"test_tube_diagonal",
|
||||
"test_tube",
|
||||
"tent",
|
||||
"test_tube_diagonal",
|
||||
"test_tubes",
|
||||
"text",
|
||||
"text_cursor",
|
||||
@ -1390,11 +1527,14 @@ LUCIDE_ICON_LIST = [
|
||||
"ticket_plus",
|
||||
"ticket_slash",
|
||||
"ticket_x",
|
||||
"tickets",
|
||||
"tickets_plane",
|
||||
"timer",
|
||||
"timer_off",
|
||||
"timer_reset",
|
||||
"toggle_left",
|
||||
"toggle_right",
|
||||
"toilet",
|
||||
"tornado",
|
||||
"torus",
|
||||
"touchpad",
|
||||
@ -1416,17 +1556,22 @@ LUCIDE_ICON_LIST = [
|
||||
"trello",
|
||||
"trending_down",
|
||||
"trending_up",
|
||||
"trending_up_down",
|
||||
"triangle",
|
||||
"triangle_right",
|
||||
"triangle_alert",
|
||||
"triangle_dashed",
|
||||
"triangle_right",
|
||||
"trophy",
|
||||
"truck",
|
||||
"turtle",
|
||||
"tv",
|
||||
"tv_2",
|
||||
"tv_minimal",
|
||||
"tv_minimal_play",
|
||||
"twitch",
|
||||
"twitter",
|
||||
"type",
|
||||
"type_outline",
|
||||
"umbrella",
|
||||
"umbrella_off",
|
||||
"underline",
|
||||
@ -1437,8 +1582,8 @@ LUCIDE_ICON_LIST = [
|
||||
"unfold_vertical",
|
||||
"ungroup",
|
||||
"university",
|
||||
"unlink_2",
|
||||
"unlink",
|
||||
"unlink_2",
|
||||
"unplug",
|
||||
"upload",
|
||||
"usb",
|
||||
@ -1446,11 +1591,13 @@ LUCIDE_ICON_LIST = [
|
||||
"user_check",
|
||||
"user_cog",
|
||||
"user_minus",
|
||||
"user_pen",
|
||||
"user_plus",
|
||||
"user_round",
|
||||
"user_round_check",
|
||||
"user_round_cog",
|
||||
"user_round_minus",
|
||||
"user_round_pen",
|
||||
"user_round_plus",
|
||||
"user_round_search",
|
||||
"user_round_x",
|
||||
@ -1472,14 +1619,16 @@ LUCIDE_ICON_LIST = [
|
||||
"videotape",
|
||||
"view",
|
||||
"voicemail",
|
||||
"volleyball",
|
||||
"volume",
|
||||
"volume_1",
|
||||
"volume_2",
|
||||
"volume_off",
|
||||
"volume_x",
|
||||
"vote",
|
||||
"wallet",
|
||||
"wallet_minimal",
|
||||
"wallet_cards",
|
||||
"wallet_minimal",
|
||||
"wallpaper",
|
||||
"wand",
|
||||
"wand_sparkles",
|
||||
@ -1487,17 +1636,22 @@ LUCIDE_ICON_LIST = [
|
||||
"washing_machine",
|
||||
"watch",
|
||||
"waves",
|
||||
"waves_ladder",
|
||||
"waypoints",
|
||||
"webcam",
|
||||
"webhook_off",
|
||||
"webhook",
|
||||
"webhook_off",
|
||||
"weight",
|
||||
"wheat",
|
||||
"wheat_off",
|
||||
"whole_word",
|
||||
"wifi",
|
||||
"wifi_high",
|
||||
"wifi_low",
|
||||
"wifi_off",
|
||||
"wifi_zero",
|
||||
"wind",
|
||||
"wind_arrow_down",
|
||||
"wine",
|
||||
"wine_off",
|
||||
"workflow",
|
||||
@ -1511,3 +1665,10 @@ LUCIDE_ICON_LIST = [
|
||||
"zoom_in",
|
||||
"zoom_out",
|
||||
]
|
||||
|
||||
# The default transformation of some icon names doesn't match how the
|
||||
# icons are exported from Lucide. Manual overrides can go here.
|
||||
LUCIDE_ICON_MAPPING_OVERRIDE = {
|
||||
"grid_2x_2_check": "Grid2x2Check",
|
||||
"grid_2x_2_x": "Grid2x2X",
|
||||
}
|
||||
|
@ -104,12 +104,60 @@ class Icon(LucideIconComponent):
|
||||
Raises:
|
||||
AttributeError: The errors tied to bad usage of the Icon component.
|
||||
ValueError: If the icon tag is invalid.
|
||||
TypeError: If the icon name is not a string.
|
||||
|
||||
Returns:
|
||||
The created component.
|
||||
"""
|
||||
...
|
||||
|
||||
class DynamicIcon(LucideIconComponent):
|
||||
@overload
|
||||
@classmethod
|
||||
def create( # type: ignore
|
||||
cls,
|
||||
*children,
|
||||
name: Optional[Union[Var[str], str]] = None,
|
||||
style: Optional[Style] = None,
|
||||
key: Optional[Any] = None,
|
||||
id: Optional[Any] = None,
|
||||
class_name: Optional[Any] = None,
|
||||
autofocus: Optional[bool] = None,
|
||||
custom_attrs: Optional[Dict[str, Union[Var, Any]]] = None,
|
||||
on_blur: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_click: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_context_menu: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_double_click: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_focus: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_mount: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_mouse_down: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_mouse_enter: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_mouse_leave: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_mouse_move: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_mouse_out: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_mouse_over: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_mouse_up: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_scroll: Optional[EventType[[], BASE_STATE]] = None,
|
||||
on_unmount: Optional[EventType[[], BASE_STATE]] = None,
|
||||
**props,
|
||||
) -> "DynamicIcon":
|
||||
"""Create the component.
|
||||
|
||||
Args:
|
||||
*children: The children of the component.
|
||||
style: The style of the component.
|
||||
key: A unique key for the component.
|
||||
id: The id for the component.
|
||||
class_name: The class name for the component.
|
||||
autofocus: Whether the component should take the focus once the page is loaded
|
||||
custom_attrs: custom attribute
|
||||
**props: The props of the component.
|
||||
|
||||
Returns:
|
||||
The component.
|
||||
"""
|
||||
...
|
||||
|
||||
LUCIDE_ICON_LIST = [
|
||||
"a_arrow_down",
|
||||
"a_arrow_up",
|
||||
@ -154,6 +202,7 @@ LUCIDE_ICON_LIST = [
|
||||
"ambulance",
|
||||
"ampersand",
|
||||
"ampersands",
|
||||
"amphora",
|
||||
"anchor",
|
||||
"angry",
|
||||
"annoyed",
|
||||
@ -241,6 +290,7 @@ LUCIDE_ICON_LIST = [
|
||||
"baggage_claim",
|
||||
"ban",
|
||||
"banana",
|
||||
"bandage",
|
||||
"banknote",
|
||||
"bar_chart",
|
||||
"bar_chart_2",
|
||||
@ -278,8 +328,10 @@ LUCIDE_ICON_LIST = [
|
||||
"between_horizontal_start",
|
||||
"between_vertical_end",
|
||||
"between_vertical_start",
|
||||
"biceps_flexed",
|
||||
"bike",
|
||||
"binary",
|
||||
"binoculars",
|
||||
"biohazard",
|
||||
"bird",
|
||||
"bitcoin",
|
||||
@ -326,6 +378,7 @@ LUCIDE_ICON_LIST = [
|
||||
"boom_box",
|
||||
"bot",
|
||||
"bot_message_square",
|
||||
"bot_off",
|
||||
"box",
|
||||
"box_select",
|
||||
"boxes",
|
||||
@ -337,6 +390,7 @@ LUCIDE_ICON_LIST = [
|
||||
"brick_wall",
|
||||
"briefcase",
|
||||
"briefcase_business",
|
||||
"briefcase_conveyor_belt",
|
||||
"briefcase_medical",
|
||||
"bring_to_front",
|
||||
"brush",
|
||||
@ -353,9 +407,13 @@ LUCIDE_ICON_LIST = [
|
||||
"cake_slice",
|
||||
"calculator",
|
||||
"calendar",
|
||||
"calendar_1",
|
||||
"calendar_arrow_down",
|
||||
"calendar_arrow_up",
|
||||
"calendar_check",
|
||||
"calendar_check_2",
|
||||
"calendar_clock",
|
||||
"calendar_cog",
|
||||
"calendar_days",
|
||||
"calendar_fold",
|
||||
"calendar_heart",
|
||||
@ -366,6 +424,7 @@ LUCIDE_ICON_LIST = [
|
||||
"calendar_plus_2",
|
||||
"calendar_range",
|
||||
"calendar_search",
|
||||
"calendar_sync",
|
||||
"calendar_x",
|
||||
"calendar_x_2",
|
||||
"camera",
|
||||
@ -390,6 +449,29 @@ LUCIDE_ICON_LIST = [
|
||||
"castle",
|
||||
"cat",
|
||||
"cctv",
|
||||
"chart_area",
|
||||
"chart_bar",
|
||||
"chart_bar_big",
|
||||
"chart_bar_decreasing",
|
||||
"chart_bar_increasing",
|
||||
"chart_bar_stacked",
|
||||
"chart_candlestick",
|
||||
"chart_column",
|
||||
"chart_column_big",
|
||||
"chart_column_decreasing",
|
||||
"chart_column_increasing",
|
||||
"chart_column_stacked",
|
||||
"chart_gantt",
|
||||
"chart_line",
|
||||
"chart_network",
|
||||
"chart_no_axes_column",
|
||||
"chart_no_axes_column_decreasing",
|
||||
"chart_no_axes_column_increasing",
|
||||
"chart_no_axes_combined",
|
||||
"chart_no_axes_gantt",
|
||||
"chart_pie",
|
||||
"chart_scatter",
|
||||
"chart_spline",
|
||||
"check",
|
||||
"check_check",
|
||||
"chef_hat",
|
||||
@ -404,6 +486,7 @@ LUCIDE_ICON_LIST = [
|
||||
"chevrons_down_up",
|
||||
"chevrons_left",
|
||||
"chevrons_left_right",
|
||||
"chevrons_left_right_ellipsis",
|
||||
"chevrons_right",
|
||||
"chevrons_right_left",
|
||||
"chevrons_up",
|
||||
@ -422,8 +505,8 @@ LUCIDE_ICON_LIST = [
|
||||
"circle_arrow_out_up_right",
|
||||
"circle_arrow_right",
|
||||
"circle_arrow_up",
|
||||
"circle_check_big",
|
||||
"circle_check",
|
||||
"circle_check_big",
|
||||
"circle_chevron_down",
|
||||
"circle_chevron_left",
|
||||
"circle_chevron_right",
|
||||
@ -435,13 +518,14 @@ LUCIDE_ICON_LIST = [
|
||||
"circle_dot_dashed",
|
||||
"circle_ellipsis",
|
||||
"circle_equal",
|
||||
"circle_fading_arrow_up",
|
||||
"circle_fading_plus",
|
||||
"circle_gauge",
|
||||
"circle_help",
|
||||
"circle_minus",
|
||||
"circle_off",
|
||||
"circle_parking_off",
|
||||
"circle_parking",
|
||||
"circle_parking_off",
|
||||
"circle_pause",
|
||||
"circle_percent",
|
||||
"circle_play",
|
||||
@ -480,7 +564,11 @@ LUCIDE_ICON_LIST = [
|
||||
"clock_7",
|
||||
"clock_8",
|
||||
"clock_9",
|
||||
"clock_alert",
|
||||
"clock_arrow_down",
|
||||
"clock_arrow_up",
|
||||
"cloud",
|
||||
"cloud_alert",
|
||||
"cloud_cog",
|
||||
"cloud_download",
|
||||
"cloud_drizzle",
|
||||
@ -551,6 +639,7 @@ LUCIDE_ICON_LIST = [
|
||||
"cup_soda",
|
||||
"currency",
|
||||
"cylinder",
|
||||
"dam",
|
||||
"database",
|
||||
"database_backup",
|
||||
"database_zap",
|
||||
@ -558,7 +647,9 @@ LUCIDE_ICON_LIST = [
|
||||
"dessert",
|
||||
"diameter",
|
||||
"diamond",
|
||||
"diamond_minus",
|
||||
"diamond_percent",
|
||||
"diamond_plus",
|
||||
"dice_1",
|
||||
"dice_2",
|
||||
"dice_3",
|
||||
@ -587,6 +678,7 @@ LUCIDE_ICON_LIST = [
|
||||
"dribbble",
|
||||
"drill",
|
||||
"droplet",
|
||||
"droplet_off",
|
||||
"droplets",
|
||||
"drum",
|
||||
"drumstick",
|
||||
@ -602,12 +694,15 @@ LUCIDE_ICON_LIST = [
|
||||
"ellipsis",
|
||||
"ellipsis_vertical",
|
||||
"equal",
|
||||
"equal_approximately",
|
||||
"equal_not",
|
||||
"eraser",
|
||||
"ethernet_port",
|
||||
"euro",
|
||||
"expand",
|
||||
"external_link",
|
||||
"eye",
|
||||
"eye_closed",
|
||||
"eye_off",
|
||||
"facebook",
|
||||
"factory",
|
||||
@ -627,6 +722,10 @@ LUCIDE_ICON_LIST = [
|
||||
"file_bar_chart",
|
||||
"file_bar_chart_2",
|
||||
"file_box",
|
||||
"file_chart_column",
|
||||
"file_chart_column_increasing",
|
||||
"file_chart_line",
|
||||
"file_chart_pie",
|
||||
"file_check",
|
||||
"file_check_2",
|
||||
"file_clock",
|
||||
@ -668,6 +767,7 @@ LUCIDE_ICON_LIST = [
|
||||
"file_type",
|
||||
"file_type_2",
|
||||
"file_up",
|
||||
"file_user",
|
||||
"file_video",
|
||||
"file_video_2",
|
||||
"file_volume",
|
||||
@ -709,6 +809,7 @@ LUCIDE_ICON_LIST = [
|
||||
"folder_check",
|
||||
"folder_clock",
|
||||
"folder_closed",
|
||||
"folder_code",
|
||||
"folder_cog",
|
||||
"folder_dot",
|
||||
"folder_down",
|
||||
@ -781,7 +882,12 @@ LUCIDE_ICON_LIST = [
|
||||
"graduation_cap",
|
||||
"grape",
|
||||
"grid_2x2",
|
||||
"grid_2x_2",
|
||||
"grid_2x_2_check",
|
||||
"grid_2x_2_plus",
|
||||
"grid_2x_2_x",
|
||||
"grid_3x3",
|
||||
"grid_3x_3",
|
||||
"grip",
|
||||
"grip_horizontal",
|
||||
"grip_vertical",
|
||||
@ -810,6 +916,7 @@ LUCIDE_ICON_LIST = [
|
||||
"heading_4",
|
||||
"heading_5",
|
||||
"heading_6",
|
||||
"headphone_off",
|
||||
"headphones",
|
||||
"headset",
|
||||
"heart",
|
||||
@ -827,14 +934,21 @@ LUCIDE_ICON_LIST = [
|
||||
"hospital",
|
||||
"hotel",
|
||||
"hourglass",
|
||||
"house",
|
||||
"house_plug",
|
||||
"house_plus",
|
||||
"house_wifi",
|
||||
"ice_cream_bowl",
|
||||
"ice_cream_cone",
|
||||
"id_card",
|
||||
"image",
|
||||
"image_down",
|
||||
"image_minus",
|
||||
"image_off",
|
||||
"image_play",
|
||||
"image_plus",
|
||||
"image_up",
|
||||
"image_upscale",
|
||||
"images",
|
||||
"import",
|
||||
"inbox",
|
||||
@ -856,6 +970,7 @@ LUCIDE_ICON_LIST = [
|
||||
"key_square",
|
||||
"keyboard",
|
||||
"keyboard_music",
|
||||
"keyboard_off",
|
||||
"lamp",
|
||||
"lamp_ceiling",
|
||||
"lamp_desk",
|
||||
@ -865,8 +980,9 @@ LUCIDE_ICON_LIST = [
|
||||
"land_plot",
|
||||
"landmark",
|
||||
"languages",
|
||||
"laptop_minimal",
|
||||
"laptop",
|
||||
"laptop_minimal",
|
||||
"laptop_minimal_check",
|
||||
"lasso",
|
||||
"lasso_select",
|
||||
"laugh",
|
||||
@ -881,6 +997,8 @@ LUCIDE_ICON_LIST = [
|
||||
"layout_template",
|
||||
"leaf",
|
||||
"leafy_green",
|
||||
"lectern",
|
||||
"letter_text",
|
||||
"library",
|
||||
"library_big",
|
||||
"life_buoy",
|
||||
@ -893,10 +1011,12 @@ LUCIDE_ICON_LIST = [
|
||||
"link_2_off",
|
||||
"linkedin",
|
||||
"list",
|
||||
"list_check",
|
||||
"list_checks",
|
||||
"list_collapse",
|
||||
"list_end",
|
||||
"list_filter",
|
||||
"list_filter_plus",
|
||||
"list_minus",
|
||||
"list_music",
|
||||
"list_ordered",
|
||||
@ -909,15 +1029,17 @@ LUCIDE_ICON_LIST = [
|
||||
"list_x",
|
||||
"loader",
|
||||
"loader_circle",
|
||||
"loader_pinwheel",
|
||||
"locate",
|
||||
"locate_fixed",
|
||||
"locate_off",
|
||||
"lock",
|
||||
"lock_keyhole_open",
|
||||
"lock_keyhole",
|
||||
"lock_keyhole_open",
|
||||
"lock_open",
|
||||
"log_in",
|
||||
"log_out",
|
||||
"logs",
|
||||
"lollipop",
|
||||
"luggage",
|
||||
"magnet",
|
||||
@ -934,7 +1056,16 @@ LUCIDE_ICON_LIST = [
|
||||
"mails",
|
||||
"map",
|
||||
"map_pin",
|
||||
"map_pin_check",
|
||||
"map_pin_check_inside",
|
||||
"map_pin_house",
|
||||
"map_pin_minus",
|
||||
"map_pin_minus_inside",
|
||||
"map_pin_off",
|
||||
"map_pin_plus",
|
||||
"map_pin_plus_inside",
|
||||
"map_pin_x",
|
||||
"map_pin_x_inside",
|
||||
"map_pinned",
|
||||
"martini",
|
||||
"maximize",
|
||||
@ -963,6 +1094,7 @@ LUCIDE_ICON_LIST = [
|
||||
"message_square_diff",
|
||||
"message_square_dot",
|
||||
"message_square_heart",
|
||||
"message_square_lock",
|
||||
"message_square_more",
|
||||
"message_square_off",
|
||||
"message_square_plus",
|
||||
@ -974,8 +1106,9 @@ LUCIDE_ICON_LIST = [
|
||||
"message_square_x",
|
||||
"messages_square",
|
||||
"mic",
|
||||
"mic_vocal",
|
||||
"mic_off",
|
||||
"mic_vocal",
|
||||
"microchip",
|
||||
"microscope",
|
||||
"microwave",
|
||||
"milestone",
|
||||
@ -986,6 +1119,7 @@ LUCIDE_ICON_LIST = [
|
||||
"minus",
|
||||
"monitor",
|
||||
"monitor_check",
|
||||
"monitor_cog",
|
||||
"monitor_dot",
|
||||
"monitor_down",
|
||||
"monitor_off",
|
||||
@ -1001,8 +1135,10 @@ LUCIDE_ICON_LIST = [
|
||||
"mountain",
|
||||
"mountain_snow",
|
||||
"mouse",
|
||||
"mouse_off",
|
||||
"mouse_pointer",
|
||||
"mouse_pointer_2",
|
||||
"mouse_pointer_ban",
|
||||
"mouse_pointer_click",
|
||||
"move",
|
||||
"move_3d",
|
||||
@ -1039,10 +1175,13 @@ LUCIDE_ICON_LIST = [
|
||||
"nut_off",
|
||||
"octagon",
|
||||
"octagon_alert",
|
||||
"octagon_minus",
|
||||
"octagon_pause",
|
||||
"octagon_x",
|
||||
"omega",
|
||||
"option",
|
||||
"orbit",
|
||||
"origami",
|
||||
"package",
|
||||
"package_2",
|
||||
"package_check",
|
||||
@ -1055,6 +1194,7 @@ LUCIDE_ICON_LIST = [
|
||||
"paint_roller",
|
||||
"paintbrush",
|
||||
"paintbrush_2",
|
||||
"paintbrush_vertical",
|
||||
"palette",
|
||||
"panel_bottom",
|
||||
"panel_bottom_close",
|
||||
@ -1084,13 +1224,16 @@ LUCIDE_ICON_LIST = [
|
||||
"pc_case",
|
||||
"pen",
|
||||
"pen_line",
|
||||
"pen_off",
|
||||
"pen_tool",
|
||||
"pencil",
|
||||
"pencil_line",
|
||||
"pencil_off",
|
||||
"pencil_ruler",
|
||||
"pentagon",
|
||||
"percent",
|
||||
"person_standing",
|
||||
"philippine_peso",
|
||||
"phone",
|
||||
"phone_call",
|
||||
"phone_forwarded",
|
||||
@ -1106,7 +1249,10 @@ LUCIDE_ICON_LIST = [
|
||||
"pie_chart",
|
||||
"piggy_bank",
|
||||
"pilcrow",
|
||||
"pilcrow_left",
|
||||
"pilcrow_right",
|
||||
"pill",
|
||||
"pill_bottle",
|
||||
"pin",
|
||||
"pin_off",
|
||||
"pipette",
|
||||
@ -1132,6 +1278,7 @@ LUCIDE_ICON_LIST = [
|
||||
"power_off",
|
||||
"presentation",
|
||||
"printer",
|
||||
"printer_check",
|
||||
"projector",
|
||||
"proportions",
|
||||
"puzzle",
|
||||
@ -1206,6 +1353,7 @@ LUCIDE_ICON_LIST = [
|
||||
"satellite_dish",
|
||||
"save",
|
||||
"save_all",
|
||||
"save_off",
|
||||
"scale",
|
||||
"scale_3d",
|
||||
"scaling",
|
||||
@ -1213,7 +1361,9 @@ LUCIDE_ICON_LIST = [
|
||||
"scan_barcode",
|
||||
"scan_eye",
|
||||
"scan_face",
|
||||
"scan_heart",
|
||||
"scan_line",
|
||||
"scan_qr_code",
|
||||
"scan_search",
|
||||
"scan_text",
|
||||
"scatter_chart",
|
||||
@ -1229,6 +1379,7 @@ LUCIDE_ICON_LIST = [
|
||||
"search_code",
|
||||
"search_slash",
|
||||
"search_x",
|
||||
"section",
|
||||
"send",
|
||||
"send_horizontal",
|
||||
"send_to_back",
|
||||
@ -1273,6 +1424,7 @@ LUCIDE_ICON_LIST = [
|
||||
"signal_low",
|
||||
"signal_medium",
|
||||
"signal_zero",
|
||||
"signature",
|
||||
"signpost",
|
||||
"signpost_big",
|
||||
"siren",
|
||||
@ -1282,8 +1434,8 @@ LUCIDE_ICON_LIST = [
|
||||
"slack",
|
||||
"slash",
|
||||
"slice",
|
||||
"sliders_vertical",
|
||||
"sliders_horizontal",
|
||||
"sliders_vertical",
|
||||
"smartphone",
|
||||
"smartphone_charging",
|
||||
"smartphone_nfc",
|
||||
@ -1307,29 +1459,31 @@ LUCIDE_ICON_LIST = [
|
||||
"sprout",
|
||||
"square",
|
||||
"square_activity",
|
||||
"square_arrow_down",
|
||||
"square_arrow_down_left",
|
||||
"square_arrow_down_right",
|
||||
"square_arrow_down",
|
||||
"square_arrow_left",
|
||||
"square_arrow_out_down_left",
|
||||
"square_arrow_out_down_right",
|
||||
"square_arrow_out_up_left",
|
||||
"square_arrow_out_up_right",
|
||||
"square_arrow_right",
|
||||
"square_arrow_up",
|
||||
"square_arrow_up_left",
|
||||
"square_arrow_up_right",
|
||||
"square_arrow_up",
|
||||
"square_asterisk",
|
||||
"square_bottom_dashed_scissors",
|
||||
"square_check_big",
|
||||
"square_chart_gantt",
|
||||
"square_check",
|
||||
"square_check_big",
|
||||
"square_chevron_down",
|
||||
"square_chevron_left",
|
||||
"square_chevron_right",
|
||||
"square_chevron_up",
|
||||
"square_code",
|
||||
"square_dashed_bottom_code",
|
||||
"square_dashed",
|
||||
"square_dashed_bottom",
|
||||
"square_dashed_bottom_code",
|
||||
"square_dashed_kanban",
|
||||
"square_dashed_mouse_pointer",
|
||||
"square_divide",
|
||||
@ -1343,8 +1497,8 @@ LUCIDE_ICON_LIST = [
|
||||
"square_menu",
|
||||
"square_minus",
|
||||
"square_mouse_pointer",
|
||||
"square_parking_off",
|
||||
"square_parking",
|
||||
"square_parking_off",
|
||||
"square_pen",
|
||||
"square_percent",
|
||||
"square_pi",
|
||||
@ -1358,10 +1512,11 @@ LUCIDE_ICON_LIST = [
|
||||
"square_slash",
|
||||
"square_split_horizontal",
|
||||
"square_split_vertical",
|
||||
"square_square",
|
||||
"square_stack",
|
||||
"square_terminal",
|
||||
"square_user_round",
|
||||
"square_user",
|
||||
"square_user_round",
|
||||
"square_x",
|
||||
"squircle",
|
||||
"squirrel",
|
||||
@ -1398,6 +1553,7 @@ LUCIDE_ICON_LIST = [
|
||||
"table_cells_merge",
|
||||
"table_cells_split",
|
||||
"table_columns_split",
|
||||
"table_of_contents",
|
||||
"table_properties",
|
||||
"table_rows_split",
|
||||
"tablet",
|
||||
@ -1413,11 +1569,11 @@ LUCIDE_ICON_LIST = [
|
||||
"tangent",
|
||||
"target",
|
||||
"telescope",
|
||||
"tent",
|
||||
"tent_tree",
|
||||
"terminal",
|
||||
"test_tube_diagonal",
|
||||
"test_tube",
|
||||
"tent",
|
||||
"test_tube_diagonal",
|
||||
"test_tubes",
|
||||
"text",
|
||||
"text_cursor",
|
||||
@ -1438,11 +1594,14 @@ LUCIDE_ICON_LIST = [
|
||||
"ticket_plus",
|
||||
"ticket_slash",
|
||||
"ticket_x",
|
||||
"tickets",
|
||||
"tickets_plane",
|
||||
"timer",
|
||||
"timer_off",
|
||||
"timer_reset",
|
||||
"toggle_left",
|
||||
"toggle_right",
|
||||
"toilet",
|
||||
"tornado",
|
||||
"torus",
|
||||
"touchpad",
|
||||
@ -1464,17 +1623,22 @@ LUCIDE_ICON_LIST = [
|
||||
"trello",
|
||||
"trending_down",
|
||||
"trending_up",
|
||||
"trending_up_down",
|
||||
"triangle",
|
||||
"triangle_right",
|
||||
"triangle_alert",
|
||||
"triangle_dashed",
|
||||
"triangle_right",
|
||||
"trophy",
|
||||
"truck",
|
||||
"turtle",
|
||||
"tv",
|
||||
"tv_2",
|
||||
"tv_minimal",
|
||||
"tv_minimal_play",
|
||||
"twitch",
|
||||
"twitter",
|
||||
"type",
|
||||
"type_outline",
|
||||
"umbrella",
|
||||
"umbrella_off",
|
||||
"underline",
|
||||
@ -1485,8 +1649,8 @@ LUCIDE_ICON_LIST = [
|
||||
"unfold_vertical",
|
||||
"ungroup",
|
||||
"university",
|
||||
"unlink_2",
|
||||
"unlink",
|
||||
"unlink_2",
|
||||
"unplug",
|
||||
"upload",
|
||||
"usb",
|
||||
@ -1494,11 +1658,13 @@ LUCIDE_ICON_LIST = [
|
||||
"user_check",
|
||||
"user_cog",
|
||||
"user_minus",
|
||||
"user_pen",
|
||||
"user_plus",
|
||||
"user_round",
|
||||
"user_round_check",
|
||||
"user_round_cog",
|
||||
"user_round_minus",
|
||||
"user_round_pen",
|
||||
"user_round_plus",
|
||||
"user_round_search",
|
||||
"user_round_x",
|
||||
@ -1520,14 +1686,16 @@ LUCIDE_ICON_LIST = [
|
||||
"videotape",
|
||||
"view",
|
||||
"voicemail",
|
||||
"volleyball",
|
||||
"volume",
|
||||
"volume_1",
|
||||
"volume_2",
|
||||
"volume_off",
|
||||
"volume_x",
|
||||
"vote",
|
||||
"wallet",
|
||||
"wallet_minimal",
|
||||
"wallet_cards",
|
||||
"wallet_minimal",
|
||||
"wallpaper",
|
||||
"wand",
|
||||
"wand_sparkles",
|
||||
@ -1535,17 +1703,22 @@ LUCIDE_ICON_LIST = [
|
||||
"washing_machine",
|
||||
"watch",
|
||||
"waves",
|
||||
"waves_ladder",
|
||||
"waypoints",
|
||||
"webcam",
|
||||
"webhook_off",
|
||||
"webhook",
|
||||
"webhook_off",
|
||||
"weight",
|
||||
"wheat",
|
||||
"wheat_off",
|
||||
"whole_word",
|
||||
"wifi",
|
||||
"wifi_high",
|
||||
"wifi_low",
|
||||
"wifi_off",
|
||||
"wifi_zero",
|
||||
"wind",
|
||||
"wind_arrow_down",
|
||||
"wine",
|
||||
"wine_off",
|
||||
"workflow",
|
||||
@ -1559,3 +1732,7 @@ LUCIDE_ICON_LIST = [
|
||||
"zoom_in",
|
||||
"zoom_out",
|
||||
]
|
||||
LUCIDE_ICON_MAPPING_OVERRIDE = {
|
||||
"grid_2x_2_check": "Grid2x2Check",
|
||||
"grid_2x_2_x": "Grid2x2X",
|
||||
}
|
||||
|
@ -420,11 +420,12 @@ const {_LANGUAGE!s} = match ? match[1] : '';
|
||||
|
||||
def _get_custom_code(self) -> str | None:
|
||||
hooks = {}
|
||||
from reflex.compiler.templates import MACROS
|
||||
|
||||
for _component in self.component_map.values():
|
||||
comp = _component(_MOCK_ARG)
|
||||
hooks.update(comp._get_all_hooks_internal())
|
||||
hooks.update(comp._get_all_hooks())
|
||||
formatted_hooks = "\n".join(hooks.keys())
|
||||
formatted_hooks = MACROS.module.renderHooks(hooks) # type: ignore
|
||||
return f"""
|
||||
function {self._get_component_map_name()} () {{
|
||||
{formatted_hooks}
|
||||
|
@ -151,8 +151,8 @@ class ColorModeIconButton(IconButton):
|
||||
dropdown_menu.trigger(
|
||||
super().create(
|
||||
ColorModeIcon.create(),
|
||||
**props,
|
||||
)
|
||||
),
|
||||
**props,
|
||||
),
|
||||
dropdown_menu.content(
|
||||
color_mode_item("light"),
|
||||
|
@ -76,7 +76,7 @@ class Link(RadixThemesComponent, A, MemoizationLeaf, MarkdownComponentMap):
|
||||
Returns:
|
||||
Component: The link component
|
||||
"""
|
||||
props.setdefault(":hover", {"color": color("accent", 8)})
|
||||
props.setdefault("_hover", {"color": color("accent", 8)})
|
||||
href = props.get("href")
|
||||
|
||||
is_external = props.pop("is_external", None)
|
||||
|
@ -85,8 +85,8 @@ class ChartBase(RechartsCharts):
|
||||
cls._ensure_valid_dimension("height", height)
|
||||
|
||||
dim_props = {
|
||||
"width": width or "100%",
|
||||
"height": height or "100%",
|
||||
"width": width if width is not None else "100%",
|
||||
"height": height if height is not None else "100%",
|
||||
}
|
||||
# Provide min dimensions so the graph always appears, even if the outer container is zero-size.
|
||||
if width is None:
|
||||
|
@ -167,7 +167,7 @@ class ToastProps(PropsBase, NoExtrasAllowedProps):
|
||||
class Toaster(Component):
|
||||
"""A Toaster Component for displaying toast notifications."""
|
||||
|
||||
library: str = "sonner@1.5.0"
|
||||
library: str = "sonner@1.7.1"
|
||||
|
||||
tag = "Toaster"
|
||||
|
||||
|
@ -12,6 +12,7 @@ import threading
|
||||
import urllib.parse
|
||||
from importlib.util import find_spec
|
||||
from pathlib import Path
|
||||
from types import ModuleType
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
@ -567,6 +568,9 @@ class EnvironmentVariables:
|
||||
# The maximum size of the reflex state in kilobytes.
|
||||
REFLEX_STATE_SIZE_LIMIT: EnvVar[int] = env_var(1000)
|
||||
|
||||
# Whether to use the turbopack bundler.
|
||||
REFLEX_USE_TURBOPACK: EnvVar[bool] = env_var(True)
|
||||
|
||||
|
||||
environment = EnvironmentVariables()
|
||||
|
||||
@ -604,6 +608,9 @@ class Config(Base):
|
||||
# The name of the app (should match the name of the app directory).
|
||||
app_name: str
|
||||
|
||||
# The path to the app module.
|
||||
app_module_import: Optional[str] = None
|
||||
|
||||
# The log level to use.
|
||||
loglevel: constants.LogLevel = constants.LogLevel.DEFAULT
|
||||
|
||||
@ -726,6 +733,19 @@ class Config(Base):
|
||||
"REDIS_URL is required when using the redis state manager."
|
||||
)
|
||||
|
||||
@property
|
||||
def app_module(self) -> ModuleType | None:
|
||||
"""Return the app module if `app_module_import` is set.
|
||||
|
||||
Returns:
|
||||
The app module.
|
||||
"""
|
||||
return (
|
||||
importlib.import_module(self.app_module_import)
|
||||
if self.app_module_import
|
||||
else None
|
||||
)
|
||||
|
||||
@property
|
||||
def module(self) -> str:
|
||||
"""Get the module name of the app.
|
||||
@ -733,6 +753,8 @@ class Config(Base):
|
||||
Returns:
|
||||
The module name.
|
||||
"""
|
||||
if self.app_module is not None:
|
||||
return self.app_module.__name__
|
||||
return ".".join([self.app_name, self.app_name])
|
||||
|
||||
def update_from_env(self) -> dict[str, Any]:
|
||||
@ -871,7 +893,7 @@ def get_config(reload: bool = False) -> Config:
|
||||
return cached_rxconfig.config
|
||||
|
||||
with _config_lock:
|
||||
sys_path = sys.path.copy()
|
||||
orig_sys_path = sys.path.copy()
|
||||
sys.path.clear()
|
||||
sys.path.append(str(Path.cwd()))
|
||||
try:
|
||||
@ -879,9 +901,14 @@ def get_config(reload: bool = False) -> Config:
|
||||
return _get_config()
|
||||
except Exception:
|
||||
# If the module import fails, try to import with the original sys.path.
|
||||
sys.path.extend(sys_path)
|
||||
sys.path.extend(orig_sys_path)
|
||||
return _get_config()
|
||||
finally:
|
||||
# Find any entries added to sys.path by rxconfig.py itself.
|
||||
extra_paths = [
|
||||
p for p in sys.path if p not in orig_sys_path and p != str(Path.cwd())
|
||||
]
|
||||
# Restore the original sys.path.
|
||||
sys.path.clear()
|
||||
sys.path.extend(sys_path)
|
||||
sys.path.extend(extra_paths)
|
||||
sys.path.extend(orig_sys_path)
|
||||
|
@ -135,6 +135,7 @@ class Hooks(SimpleNamespace):
|
||||
class HookPosition(enum.Enum):
|
||||
"""The position of the hook in the component."""
|
||||
|
||||
INTERNAL = "internal"
|
||||
PRE_TRIGGER = "pre_trigger"
|
||||
POST_TRIGGER = "post_trigger"
|
||||
|
||||
|
@ -182,7 +182,7 @@ class PackageJson(SimpleNamespace):
|
||||
"@emotion/react": "11.13.3",
|
||||
"axios": "1.7.7",
|
||||
"json5": "2.2.3",
|
||||
"next": "14.2.16",
|
||||
"next": "15.1.4",
|
||||
"next-sitemap": "4.2.3",
|
||||
"next-themes": "0.4.3",
|
||||
"react": "18.3.1",
|
||||
|
@ -421,12 +421,13 @@ def _run_commands_in_subprocess(cmds: list[str]) -> bool:
|
||||
console.debug(f"Running command: {' '.join(cmds)}")
|
||||
try:
|
||||
result = subprocess.run(cmds, capture_output=True, text=True, check=True)
|
||||
console.debug(result.stdout)
|
||||
return True
|
||||
except subprocess.CalledProcessError as cpe:
|
||||
console.error(cpe.stdout)
|
||||
console.error(cpe.stderr)
|
||||
return False
|
||||
else:
|
||||
console.debug(result.stdout)
|
||||
return True
|
||||
|
||||
|
||||
def _make_pyi_files():
|
||||
@ -931,10 +932,11 @@ def _get_file_from_prompt_in_loop() -> Tuple[bytes, str] | None:
|
||||
file_extension = image_filepath.suffix
|
||||
try:
|
||||
image_file = image_filepath.read_bytes()
|
||||
return image_file, file_extension
|
||||
except OSError as ose:
|
||||
console.error(f"Unable to read the {file_extension} file due to {ose}")
|
||||
raise typer.Exit(code=1) from ose
|
||||
else:
|
||||
return image_file, file_extension
|
||||
|
||||
console.debug(f"File extension detected: {file_extension}")
|
||||
return None
|
||||
|
101
reflex/event.py
101
reflex/event.py
@ -91,6 +91,8 @@ class Event:
|
||||
return f"{self.token}_{substate}"
|
||||
|
||||
|
||||
_EVENT_FIELDS: set[str] = {f.name for f in dataclasses.fields(Event)}
|
||||
|
||||
BACKGROUND_TASK_MARKER = "_reflex_background_task"
|
||||
|
||||
|
||||
@ -431,6 +433,101 @@ class EventChain(EventActionsMixin):
|
||||
|
||||
invocation: Optional[Var] = dataclasses.field(default=None)
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls,
|
||||
value: EventType,
|
||||
args_spec: ArgsSpec | Sequence[ArgsSpec],
|
||||
key: Optional[str] = None,
|
||||
**event_chain_kwargs,
|
||||
) -> Union[EventChain, Var]:
|
||||
"""Create an event chain from a variety of input types.
|
||||
|
||||
Args:
|
||||
value: The value to create the event chain from.
|
||||
args_spec: The args_spec of the event trigger being bound.
|
||||
key: The key of the event trigger being bound.
|
||||
**event_chain_kwargs: Additional kwargs to pass to the EventChain constructor.
|
||||
|
||||
Returns:
|
||||
The event chain.
|
||||
|
||||
Raises:
|
||||
ValueError: If the value is not a valid event chain.
|
||||
"""
|
||||
# If it's an event chain var, return it.
|
||||
if isinstance(value, Var):
|
||||
if isinstance(value, EventChainVar):
|
||||
return value
|
||||
elif isinstance(value, EventVar):
|
||||
value = [value]
|
||||
elif issubclass(value._var_type, (EventChain, EventSpec)):
|
||||
return cls.create(
|
||||
value=value.guess_type(),
|
||||
args_spec=args_spec,
|
||||
key=key,
|
||||
**event_chain_kwargs,
|
||||
)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Invalid event chain: {value!s} of type {value._var_type}"
|
||||
)
|
||||
elif isinstance(value, EventChain):
|
||||
# Trust that the caller knows what they're doing passing an EventChain directly
|
||||
return value
|
||||
|
||||
# If the input is a single event handler, wrap it in a list.
|
||||
if isinstance(value, (EventHandler, EventSpec)):
|
||||
value = [value]
|
||||
|
||||
# If the input is a list of event handlers, create an event chain.
|
||||
if isinstance(value, List):
|
||||
events: List[Union[EventSpec, EventVar]] = []
|
||||
for v in value:
|
||||
if isinstance(v, (EventHandler, EventSpec)):
|
||||
# Call the event handler to get the event.
|
||||
events.append(call_event_handler(v, args_spec, key=key))
|
||||
elif isinstance(v, Callable):
|
||||
# Call the lambda to get the event chain.
|
||||
result = call_event_fn(v, args_spec, key=key)
|
||||
if isinstance(result, Var):
|
||||
raise ValueError(
|
||||
f"Invalid event chain: {v}. Cannot use a Var-returning "
|
||||
"lambda inside an EventChain list."
|
||||
)
|
||||
events.extend(result)
|
||||
elif isinstance(v, EventVar):
|
||||
events.append(v)
|
||||
else:
|
||||
raise ValueError(f"Invalid event: {v}")
|
||||
|
||||
# If the input is a callable, create an event chain.
|
||||
elif isinstance(value, Callable):
|
||||
result = call_event_fn(value, args_spec, key=key)
|
||||
if isinstance(result, Var):
|
||||
# Recursively call this function if the lambda returned an EventChain Var.
|
||||
return cls.create(
|
||||
value=result, args_spec=args_spec, key=key, **event_chain_kwargs
|
||||
)
|
||||
events = [*result]
|
||||
|
||||
# Otherwise, raise an error.
|
||||
else:
|
||||
raise ValueError(f"Invalid event chain: {value}")
|
||||
|
||||
# Add args to the event specs if necessary.
|
||||
events = [
|
||||
(e.with_args(get_handler_args(e)) if isinstance(e, EventSpec) else e)
|
||||
for e in events
|
||||
]
|
||||
|
||||
# Return the event chain.
|
||||
return cls(
|
||||
events=events,
|
||||
args_spec=args_spec,
|
||||
**event_chain_kwargs,
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass(
|
||||
init=True,
|
||||
@ -1100,7 +1197,7 @@ def call_function(
|
||||
Returns:
|
||||
EventSpec: An event that will execute the client side javascript.
|
||||
"""
|
||||
callback_kwargs = {}
|
||||
callback_kwargs = {"callback": None}
|
||||
if callback is not None:
|
||||
callback_kwargs = {
|
||||
"callback": format.format_queue_events(
|
||||
@ -1494,7 +1591,7 @@ def get_handler_args(
|
||||
|
||||
|
||||
def fix_events(
|
||||
events: list[EventHandler | EventSpec] | None,
|
||||
events: list[EventSpec | EventHandler] | None,
|
||||
token: str,
|
||||
router_data: dict[str, Any] | None = None,
|
||||
) -> list[Event]:
|
||||
|
@ -12,7 +12,7 @@ from reflex.event import EventChain, EventHandler, EventSpec, run_script
|
||||
from reflex.utils.imports import ImportVar
|
||||
from reflex.vars import VarData, get_unique_variable_name
|
||||
from reflex.vars.base import LiteralVar, Var
|
||||
from reflex.vars.function import FunctionVar
|
||||
from reflex.vars.function import ArgsFunctionOperationBuilder, FunctionVar
|
||||
|
||||
NoValue = object()
|
||||
|
||||
@ -45,6 +45,7 @@ class ClientStateVar(Var):
|
||||
# Track the names of the getters and setters
|
||||
_setter_name: str = dataclasses.field(default="")
|
||||
_getter_name: str = dataclasses.field(default="")
|
||||
_id_name: str = dataclasses.field(default="")
|
||||
|
||||
# Whether to add the var and setter to the global `refs` object for use in any Component.
|
||||
_global_ref: bool = dataclasses.field(default=True)
|
||||
@ -96,6 +97,7 @@ class ClientStateVar(Var):
|
||||
"""
|
||||
if var_name is None:
|
||||
var_name = get_unique_variable_name()
|
||||
id_name = "id_" + get_unique_variable_name()
|
||||
if not isinstance(var_name, str):
|
||||
raise ValueError("var_name must be a string.")
|
||||
if default is NoValue:
|
||||
@ -105,20 +107,24 @@ class ClientStateVar(Var):
|
||||
else:
|
||||
default_var = default
|
||||
setter_name = f"set{var_name.capitalize()}"
|
||||
hooks = {
|
||||
hooks: dict[str, VarData | None] = {
|
||||
f"const {id_name} = useId()": None,
|
||||
f"const [{var_name}, {setter_name}] = useState({default_var!s})": None,
|
||||
}
|
||||
imports = {
|
||||
"react": [ImportVar(tag="useState")],
|
||||
"react": [ImportVar(tag="useState"), ImportVar(tag="useId")],
|
||||
}
|
||||
if global_ref:
|
||||
hooks[f"{_client_state_ref(var_name)} = {var_name}"] = None
|
||||
hooks[f"{_client_state_ref(setter_name)} = {setter_name}"] = None
|
||||
hooks[f"{_client_state_ref(var_name)} ??= {{}}"] = None
|
||||
hooks[f"{_client_state_ref(setter_name)} ??= {{}}"] = None
|
||||
hooks[f"{_client_state_ref(var_name)}[{id_name}] = {var_name}"] = None
|
||||
hooks[f"{_client_state_ref(setter_name)}[{id_name}] = {setter_name}"] = None
|
||||
imports.update(_refs_import)
|
||||
return cls(
|
||||
_js_expr="",
|
||||
_setter_name=setter_name,
|
||||
_getter_name=var_name,
|
||||
_id_name=id_name,
|
||||
_global_ref=global_ref,
|
||||
_var_type=default_var._var_type,
|
||||
_var_data=VarData.merge(
|
||||
@ -144,10 +150,11 @@ class ClientStateVar(Var):
|
||||
return (
|
||||
Var(
|
||||
_js_expr=(
|
||||
_client_state_ref(self._getter_name)
|
||||
_client_state_ref(self._getter_name) + f"[{self._id_name}]"
|
||||
if self._global_ref
|
||||
else self._getter_name
|
||||
)
|
||||
),
|
||||
_var_data=self._var_data,
|
||||
)
|
||||
.to(self._var_type)
|
||||
._replace(
|
||||
@ -170,28 +177,43 @@ class ClientStateVar(Var):
|
||||
Returns:
|
||||
A special EventChain Var which will set the value when triggered.
|
||||
"""
|
||||
setter = (
|
||||
_client_state_ref(self._setter_name)
|
||||
if self._global_ref
|
||||
else self._setter_name
|
||||
)
|
||||
_var_data = VarData(imports=_refs_import if self._global_ref else {})
|
||||
|
||||
arg_name = get_unique_variable_name()
|
||||
setter = (
|
||||
ArgsFunctionOperationBuilder.create(
|
||||
args_names=(arg_name,),
|
||||
return_expr=Var("Array.prototype.forEach.call")
|
||||
.to(FunctionVar)
|
||||
.call(
|
||||
Var("Object.values")
|
||||
.to(FunctionVar)
|
||||
.call(Var(_client_state_ref(self._setter_name))),
|
||||
ArgsFunctionOperationBuilder.create(
|
||||
args_names=("setter",),
|
||||
return_expr=Var("setter").to(FunctionVar).call(Var(arg_name)),
|
||||
),
|
||||
),
|
||||
_var_data=_var_data,
|
||||
)
|
||||
if self._global_ref
|
||||
else Var(self._setter_name, _var_data=_var_data).to(FunctionVar)
|
||||
)
|
||||
|
||||
if value is not NoValue:
|
||||
# This is a hack to make it work like an EventSpec taking an arg
|
||||
value_var = LiteralVar.create(value)
|
||||
_var_data = VarData.merge(_var_data, value_var._get_all_var_data())
|
||||
value_str = str(value_var)
|
||||
|
||||
if value_str.startswith("_"):
|
||||
setter = ArgsFunctionOperationBuilder.create(
|
||||
# remove patterns of ["*"] from the value_str using regex
|
||||
arg = re.sub(r"\[\".*\"\]", "", value_str)
|
||||
setter = f"(({arg}) => {setter}({value_str}))"
|
||||
else:
|
||||
setter = f"(() => {setter}({value_str}))"
|
||||
return Var(
|
||||
_js_expr=setter,
|
||||
_var_data=_var_data,
|
||||
).to(FunctionVar, EventChain)
|
||||
args_names=(re.sub(r"\[\".*\"\]", "", value_str),)
|
||||
if value_str.startswith("_")
|
||||
else (),
|
||||
return_expr=setter.call(value_var),
|
||||
)
|
||||
|
||||
return setter.to(FunctionVar, EventChain)
|
||||
|
||||
@property
|
||||
def set(self) -> Var:
|
||||
|
@ -533,6 +533,7 @@ def asession(url: str | None = None) -> AsyncSession:
|
||||
_AsyncSessionLocal[url] = sqlalchemy.ext.asyncio.async_sessionmaker(
|
||||
bind=get_async_engine(url),
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
autocommit=False,
|
||||
autoflush=False,
|
||||
)
|
||||
|
@ -485,6 +485,11 @@ def deploy(
|
||||
"--token",
|
||||
help="token to use for auth",
|
||||
),
|
||||
config_path: Optional[str] = typer.Option(
|
||||
None,
|
||||
"--config",
|
||||
help="path to the config file",
|
||||
),
|
||||
):
|
||||
"""Deploy the app to the Reflex hosting service."""
|
||||
from reflex_cli.utils import dependency
|
||||
@ -514,7 +519,9 @@ def deploy(
|
||||
if prerequisites.needs_reinit(frontend=True):
|
||||
_init(name=config.app_name, loglevel=loglevel)
|
||||
prerequisites.check_latest_package_version(constants.ReflexHostingCLI.MODULE_NAME)
|
||||
|
||||
extra: dict[str, str] = (
|
||||
{"config_path": config_path} if config_path is not None else {}
|
||||
)
|
||||
hosting_cli.deploy(
|
||||
app_name=app_name,
|
||||
export_fn=lambda zip_dest_dir,
|
||||
@ -540,6 +547,7 @@ def deploy(
|
||||
loglevel=type(loglevel).INFO, # type: ignore
|
||||
token=token,
|
||||
project=project,
|
||||
**extra,
|
||||
)
|
||||
|
||||
|
||||
|
170
reflex/state.py
170
reflex/state.py
@ -104,9 +104,11 @@ from reflex.utils.exceptions import (
|
||||
LockExpiredError,
|
||||
ReflexRuntimeError,
|
||||
SetUndefinedStateVarError,
|
||||
StateMismatchError,
|
||||
StateSchemaMismatchError,
|
||||
StateSerializationError,
|
||||
StateTooLargeError,
|
||||
UnretrievableVarValueError,
|
||||
)
|
||||
from reflex.utils.exec import is_testing_env
|
||||
from reflex.utils.serializers import serializer
|
||||
@ -143,6 +145,9 @@ HANDLED_PICKLE_ERRORS = (
|
||||
ValueError,
|
||||
)
|
||||
|
||||
# For BaseState.get_var_value
|
||||
VAR_TYPE = TypeVar("VAR_TYPE")
|
||||
|
||||
|
||||
def _no_chain_background_task(
|
||||
state_cls: Type["BaseState"], name: str, fn: Callable
|
||||
@ -1193,7 +1198,8 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
||||
continue
|
||||
dynamic_vars[param] = DynamicRouteVar(
|
||||
fget=func,
|
||||
cache=True,
|
||||
auto_deps=False,
|
||||
deps=["router"],
|
||||
_js_expr=param,
|
||||
_var_data=VarData.from_state(cls),
|
||||
)
|
||||
@ -1537,7 +1543,7 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
||||
# Return the direct parent of target_state_cls for subsequent linking.
|
||||
return parent_state
|
||||
|
||||
def _get_state_from_cache(self, state_cls: Type[BaseState]) -> BaseState:
|
||||
def _get_state_from_cache(self, state_cls: Type[T_STATE]) -> T_STATE:
|
||||
"""Get a state instance from the cache.
|
||||
|
||||
Args:
|
||||
@ -1545,11 +1551,19 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
||||
|
||||
Returns:
|
||||
The instance of state_cls associated with this state's client_token.
|
||||
|
||||
Raises:
|
||||
StateMismatchError: If the state instance is not of the expected type.
|
||||
"""
|
||||
root_state = self._get_root_state()
|
||||
return root_state.get_substate(state_cls.get_full_name().split("."))
|
||||
substate = root_state.get_substate(state_cls.get_full_name().split("."))
|
||||
if not isinstance(substate, state_cls):
|
||||
raise StateMismatchError(
|
||||
f"Searched for state {state_cls.get_full_name()} but found {substate}."
|
||||
)
|
||||
return substate
|
||||
|
||||
async def _get_state_from_redis(self, state_cls: Type[BaseState]) -> BaseState:
|
||||
async def _get_state_from_redis(self, state_cls: Type[T_STATE]) -> T_STATE:
|
||||
"""Get a state instance from redis.
|
||||
|
||||
Args:
|
||||
@ -1560,6 +1574,7 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
||||
|
||||
Raises:
|
||||
RuntimeError: If redis is not used in this backend process.
|
||||
StateMismatchError: If the state instance is not of the expected type.
|
||||
"""
|
||||
# Fetch all missing parent states from redis.
|
||||
parent_state_of_state_cls = await self._populate_parent_states(state_cls)
|
||||
@ -1571,14 +1586,22 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
||||
f"Requested state {state_cls.get_full_name()} is not cached and cannot be accessed without redis. "
|
||||
"(All states should already be available -- this is likely a bug).",
|
||||
)
|
||||
return await state_manager.get_state(
|
||||
|
||||
state_in_redis = await state_manager.get_state(
|
||||
token=_substate_key(self.router.session.client_token, state_cls),
|
||||
top_level=False,
|
||||
get_substates=True,
|
||||
parent_state=parent_state_of_state_cls,
|
||||
)
|
||||
|
||||
async def get_state(self, state_cls: Type[BaseState]) -> BaseState:
|
||||
if not isinstance(state_in_redis, state_cls):
|
||||
raise StateMismatchError(
|
||||
f"Searched for state {state_cls.get_full_name()} but found {state_in_redis}."
|
||||
)
|
||||
|
||||
return state_in_redis
|
||||
|
||||
async def get_state(self, state_cls: Type[T_STATE]) -> T_STATE:
|
||||
"""Get an instance of the state associated with this token.
|
||||
|
||||
Allows for arbitrary access to sibling states from within an event handler.
|
||||
@ -1598,6 +1621,42 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
||||
# Slow case - fetch missing parent states from redis.
|
||||
return await self._get_state_from_redis(state_cls)
|
||||
|
||||
async def get_var_value(self, var: Var[VAR_TYPE]) -> VAR_TYPE:
|
||||
"""Get the value of an rx.Var from another state.
|
||||
|
||||
Args:
|
||||
var: The var to get the value for.
|
||||
|
||||
Returns:
|
||||
The value of the var.
|
||||
|
||||
Raises:
|
||||
UnretrievableVarValueError: If the var does not have a literal value
|
||||
or associated state.
|
||||
"""
|
||||
# Oopsie case: you didn't give me a Var... so get what you give.
|
||||
if not isinstance(var, Var):
|
||||
return var # type: ignore
|
||||
|
||||
# Fast case: this is a literal var and the value is known.
|
||||
if hasattr(var, "_var_value"):
|
||||
return var._var_value
|
||||
|
||||
var_data = var._get_all_var_data()
|
||||
if var_data is None or not var_data.state:
|
||||
raise UnretrievableVarValueError(
|
||||
f"Unable to retrieve value for {var._js_expr}: not associated with any state."
|
||||
)
|
||||
# Fastish case: this var belongs to this state
|
||||
if var_data.state == self.get_full_name():
|
||||
return getattr(self, var_data.field_name)
|
||||
|
||||
# Slow case: this var belongs to another state
|
||||
other_state = await self.get_state(
|
||||
self._get_root_state().get_class_substate(var_data.state)
|
||||
)
|
||||
return getattr(other_state, var_data.field_name)
|
||||
|
||||
def _get_event_handler(
|
||||
self, event: Event
|
||||
) -> tuple[BaseState | StateProxy, EventHandler]:
|
||||
@ -1717,9 +1776,9 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
||||
except Exception as ex:
|
||||
state._clean()
|
||||
|
||||
app_instance = getattr(prerequisites.get_app(), constants.CompileVars.APP)
|
||||
|
||||
event_specs = app_instance.backend_exception_handler(ex)
|
||||
event_specs = (
|
||||
prerequisites.get_and_validate_app().app.backend_exception_handler(ex)
|
||||
)
|
||||
|
||||
if event_specs is None:
|
||||
return StateUpdate()
|
||||
@ -1829,9 +1888,9 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
||||
except Exception as ex:
|
||||
telemetry.send_error(ex, context="backend")
|
||||
|
||||
app_instance = getattr(prerequisites.get_app(), constants.CompileVars.APP)
|
||||
|
||||
event_specs = app_instance.backend_exception_handler(ex)
|
||||
event_specs = (
|
||||
prerequisites.get_and_validate_app().app.backend_exception_handler(ex)
|
||||
)
|
||||
|
||||
yield state._as_state_update(
|
||||
handler,
|
||||
@ -2274,6 +2333,9 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
||||
return state
|
||||
|
||||
|
||||
T_STATE = TypeVar("T_STATE", bound=BaseState)
|
||||
|
||||
|
||||
class State(BaseState):
|
||||
"""The app Base State."""
|
||||
|
||||
@ -2341,8 +2403,9 @@ class FrontendEventExceptionState(State):
|
||||
component_stack: The stack trace of the component where the exception occurred.
|
||||
|
||||
"""
|
||||
app_instance = getattr(prerequisites.get_app(), constants.CompileVars.APP)
|
||||
app_instance.frontend_exception_handler(Exception(stack))
|
||||
prerequisites.get_and_validate_app().app.frontend_exception_handler(
|
||||
Exception(stack)
|
||||
)
|
||||
|
||||
|
||||
class UpdateVarsInternalState(State):
|
||||
@ -2380,15 +2443,16 @@ class OnLoadInternalState(State):
|
||||
The list of events to queue for on load handling.
|
||||
"""
|
||||
# Do not app._compile()! It should be already compiled by now.
|
||||
app = getattr(prerequisites.get_app(), constants.CompileVars.APP)
|
||||
load_events = app.get_load_events(self.router.page.path)
|
||||
load_events = prerequisites.get_and_validate_app().app.get_load_events(
|
||||
self.router.page.path
|
||||
)
|
||||
if not load_events:
|
||||
self.is_hydrated = True
|
||||
return # Fast path for navigation with no on_load events defined.
|
||||
self.is_hydrated = False
|
||||
return [
|
||||
*fix_events(
|
||||
load_events,
|
||||
cast(list[Union[EventSpec, EventHandler]], load_events),
|
||||
self.router.session.client_token,
|
||||
router_data=self.router_data,
|
||||
),
|
||||
@ -2547,7 +2611,7 @@ class StateProxy(wrapt.ObjectProxy):
|
||||
"""
|
||||
super().__init__(state_instance)
|
||||
# compile is not relevant to backend logic
|
||||
self._self_app = getattr(prerequisites.get_app(), constants.CompileVars.APP)
|
||||
self._self_app = prerequisites.get_and_validate_app().app
|
||||
self._self_substate_path = tuple(state_instance.get_full_name().split("."))
|
||||
self._self_actx = None
|
||||
self._self_mutable = False
|
||||
@ -3640,13 +3704,15 @@ def get_state_manager() -> StateManager:
|
||||
Returns:
|
||||
The state manager.
|
||||
"""
|
||||
app = getattr(prerequisites.get_app(), constants.CompileVars.APP)
|
||||
return app.state_manager
|
||||
return prerequisites.get_and_validate_app().app.state_manager
|
||||
|
||||
|
||||
class MutableProxy(wrapt.ObjectProxy):
|
||||
"""A proxy for a mutable object that tracks changes."""
|
||||
|
||||
# Hint for finding the base class of the proxy.
|
||||
__base_proxy__ = "MutableProxy"
|
||||
|
||||
# Methods on wrapped objects which should mark the state as dirty.
|
||||
__mark_dirty_attrs__ = {
|
||||
"add",
|
||||
@ -3689,6 +3755,39 @@ class MutableProxy(wrapt.ObjectProxy):
|
||||
BaseModelV1,
|
||||
)
|
||||
|
||||
# Dynamically generated classes for tracking dataclass mutations.
|
||||
__dataclass_proxies__: Dict[type, type] = {}
|
||||
|
||||
def __new__(cls, wrapped: Any, *args, **kwargs) -> MutableProxy:
|
||||
"""Create a proxy instance for a mutable object that tracks changes.
|
||||
|
||||
Args:
|
||||
wrapped: The object to proxy.
|
||||
*args: Other args passed to MutableProxy (ignored).
|
||||
**kwargs: Other kwargs passed to MutableProxy (ignored).
|
||||
|
||||
Returns:
|
||||
The proxy instance.
|
||||
"""
|
||||
if dataclasses.is_dataclass(wrapped):
|
||||
wrapped_cls = type(wrapped)
|
||||
wrapper_cls_name = wrapped_cls.__name__ + cls.__name__
|
||||
# Find the associated class
|
||||
if wrapper_cls_name not in cls.__dataclass_proxies__:
|
||||
# Create a new class that has the __dataclass_fields__ defined
|
||||
cls.__dataclass_proxies__[wrapper_cls_name] = type(
|
||||
wrapper_cls_name,
|
||||
(cls,),
|
||||
{
|
||||
dataclasses._FIELDS: getattr( # pyright: ignore [reportGeneralTypeIssues]
|
||||
wrapped_cls,
|
||||
dataclasses._FIELDS, # pyright: ignore [reportGeneralTypeIssues]
|
||||
),
|
||||
},
|
||||
)
|
||||
cls = cls.__dataclass_proxies__[wrapper_cls_name]
|
||||
return super().__new__(cls)
|
||||
|
||||
def __init__(self, wrapped: Any, state: BaseState, field_name: str):
|
||||
"""Create a proxy for a mutable object that tracks changes.
|
||||
|
||||
@ -3745,7 +3844,27 @@ class MutableProxy(wrapt.ObjectProxy):
|
||||
Returns:
|
||||
Whether the value is of a mutable type.
|
||||
"""
|
||||
return isinstance(value, cls.__mutable_types__)
|
||||
return isinstance(value, cls.__mutable_types__) or (
|
||||
dataclasses.is_dataclass(value) and not isinstance(value, Var)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _is_called_from_dataclasses_internal() -> bool:
|
||||
"""Check if the current function is called from dataclasses helper.
|
||||
|
||||
Returns:
|
||||
Whether the current function is called from dataclasses internal code.
|
||||
"""
|
||||
# Walk up the stack a bit to see if we are called from dataclasses
|
||||
# internal code, for example `asdict` or `astuple`.
|
||||
frame = inspect.currentframe()
|
||||
for _ in range(5):
|
||||
# Why not `inspect.stack()` -- this is much faster!
|
||||
if not (frame := frame and frame.f_back):
|
||||
break
|
||||
if inspect.getfile(frame) == dataclasses.__file__:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _wrap_recursive(self, value: Any) -> Any:
|
||||
"""Wrap a value recursively if it is mutable.
|
||||
@ -3756,9 +3875,13 @@ class MutableProxy(wrapt.ObjectProxy):
|
||||
Returns:
|
||||
The wrapped value.
|
||||
"""
|
||||
# When called from dataclasses internal code, return the unwrapped value
|
||||
if self._is_called_from_dataclasses_internal():
|
||||
return value
|
||||
# Recursively wrap mutable types, but do not re-wrap MutableProxy instances.
|
||||
if self._is_mutable_type(value) and not isinstance(value, MutableProxy):
|
||||
return type(self)(
|
||||
base_cls = globals()[self.__base_proxy__]
|
||||
return base_cls(
|
||||
wrapped=value,
|
||||
state=self._self_state,
|
||||
field_name=self._self_field_name,
|
||||
@ -3966,6 +4089,9 @@ class ImmutableMutableProxy(MutableProxy):
|
||||
to modify the wrapped object when the StateProxy is immutable.
|
||||
"""
|
||||
|
||||
# Ensure that recursively wrapped proxies use ImmutableMutableProxy as base.
|
||||
__base_proxy__ = "ImmutableMutableProxy"
|
||||
|
||||
def _mark_dirty(
|
||||
self,
|
||||
wrapped=None,
|
||||
|
@ -52,6 +52,7 @@ from reflex.state import (
|
||||
StateManagerRedis,
|
||||
reload_state_module,
|
||||
)
|
||||
from reflex.utils import console
|
||||
|
||||
try:
|
||||
from selenium import webdriver # pyright: ignore [reportMissingImports]
|
||||
@ -385,7 +386,7 @@ class AppHarness:
|
||||
)
|
||||
if not line:
|
||||
break
|
||||
print(line) # for pytest diagnosis
|
||||
print(line) # for pytest diagnosis #noqa: T201
|
||||
m = re.search(reflex.constants.Next.FRONTEND_LISTENING_REGEX, line)
|
||||
if m is not None:
|
||||
self.frontend_url = m.group(1)
|
||||
@ -403,11 +404,10 @@ class AppHarness:
|
||||
)
|
||||
# catch I/O operation on closed file.
|
||||
except ValueError as e:
|
||||
print(e)
|
||||
console.error(str(e))
|
||||
break
|
||||
if not line:
|
||||
break
|
||||
print(line)
|
||||
|
||||
self.frontend_output_thread = threading.Thread(target=consume_frontend_output)
|
||||
self.frontend_output_thread.start()
|
||||
|
@ -2,6 +2,11 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from types import FrameType
|
||||
|
||||
from rich.console import Console
|
||||
from rich.progress import MofNCompleteColumn, Progress, TimeElapsedColumn
|
||||
from rich.prompt import Prompt
|
||||
@ -188,6 +193,33 @@ def warn(msg: str, dedupe: bool = False, **kwargs):
|
||||
print(f"[orange1]Warning: {msg}[/orange1]", **kwargs)
|
||||
|
||||
|
||||
def _get_first_non_framework_frame() -> FrameType | None:
|
||||
import click
|
||||
import typer
|
||||
import typing_extensions
|
||||
|
||||
import reflex as rx
|
||||
|
||||
# Exclude utility modules that should never be the source of deprecated reflex usage.
|
||||
exclude_modules = [click, rx, typer, typing_extensions]
|
||||
exclude_roots = [
|
||||
p.parent.resolve()
|
||||
if (p := Path(m.__file__)).name == "__init__.py"
|
||||
else p.resolve()
|
||||
for m in exclude_modules
|
||||
]
|
||||
# Specifically exclude the reflex cli module.
|
||||
if reflex_bin := shutil.which(b"reflex"):
|
||||
exclude_roots.append(Path(reflex_bin.decode()))
|
||||
|
||||
frame = inspect.currentframe()
|
||||
while frame := frame and frame.f_back:
|
||||
frame_path = Path(inspect.getfile(frame)).resolve()
|
||||
if not any(frame_path.is_relative_to(root) for root in exclude_roots):
|
||||
break
|
||||
return frame
|
||||
|
||||
|
||||
def deprecate(
|
||||
feature_name: str,
|
||||
reason: str,
|
||||
@ -206,15 +238,27 @@ def deprecate(
|
||||
dedupe: If True, suppress multiple console logs of deprecation message.
|
||||
kwargs: Keyword arguments to pass to the print function.
|
||||
"""
|
||||
if feature_name not in _EMITTED_DEPRECATION_WARNINGS:
|
||||
dedupe_key = feature_name
|
||||
loc = ""
|
||||
|
||||
# See if we can find where the deprecation exists in "user code"
|
||||
origin_frame = _get_first_non_framework_frame()
|
||||
if origin_frame is not None:
|
||||
filename = Path(origin_frame.f_code.co_filename)
|
||||
if filename.is_relative_to(Path.cwd()):
|
||||
filename = filename.relative_to(Path.cwd())
|
||||
loc = f"{filename}:{origin_frame.f_lineno}"
|
||||
dedupe_key = f"{dedupe_key} {loc}"
|
||||
|
||||
if dedupe_key not in _EMITTED_DEPRECATION_WARNINGS:
|
||||
msg = (
|
||||
f"{feature_name} has been deprecated in version {deprecation_version} {reason.rstrip('.')}. It will be completely "
|
||||
f"removed in {removal_version}"
|
||||
f"removed in {removal_version}. ({loc})"
|
||||
)
|
||||
if _LOG_LEVEL <= LogLevel.WARNING:
|
||||
print(f"[yellow]DeprecationWarning: {msg}[/yellow]", **kwargs)
|
||||
if dedupe:
|
||||
_EMITTED_DEPRECATION_WARNINGS.add(feature_name)
|
||||
_EMITTED_DEPRECATION_WARNINGS.add(dedupe_key)
|
||||
|
||||
|
||||
def error(msg: str, dedupe: bool = False, **kwargs):
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""Custom Exceptions."""
|
||||
|
||||
from typing import NoReturn
|
||||
from typing import Any, NoReturn
|
||||
|
||||
|
||||
class ReflexError(Exception):
|
||||
@ -31,6 +31,22 @@ class ComponentTypeError(ReflexError, TypeError):
|
||||
"""Custom TypeError for component related errors."""
|
||||
|
||||
|
||||
class ChildrenTypeError(ComponentTypeError):
|
||||
"""Raised when the children prop of a component is not a valid type."""
|
||||
|
||||
def __init__(self, component: str, child: Any):
|
||||
"""Initialize the exception.
|
||||
|
||||
Args:
|
||||
component: The name of the component.
|
||||
child: The child that caused the error.
|
||||
"""
|
||||
super().__init__(
|
||||
f"Component {component} received child {child} of type {type(child)}. "
|
||||
"Accepted types are other components, state vars, or primitive Python types (dict excluded)."
|
||||
)
|
||||
|
||||
|
||||
class EventHandlerTypeError(ReflexError, TypeError):
|
||||
"""Custom TypeError for event handler related errors."""
|
||||
|
||||
@ -163,10 +179,18 @@ class StateSerializationError(ReflexError):
|
||||
"""Raised when the state cannot be serialized."""
|
||||
|
||||
|
||||
class StateMismatchError(ReflexError, ValueError):
|
||||
"""Raised when the state retrieved does not match the expected state."""
|
||||
|
||||
|
||||
class SystemPackageMissingError(ReflexError):
|
||||
"""Raised when a system package is missing."""
|
||||
|
||||
|
||||
class EventDeserializationError(ReflexError, ValueError):
|
||||
"""Raised when an event cannot be deserialized."""
|
||||
|
||||
|
||||
def raise_system_package_missing_error(package: str) -> NoReturn:
|
||||
"""Raise a SystemPackageMissingError.
|
||||
|
||||
@ -187,3 +211,7 @@ def raise_system_package_missing_error(package: str) -> NoReturn:
|
||||
|
||||
class InvalidLockWarningThresholdError(ReflexError):
|
||||
"""Raised when an invalid lock warning threshold is provided."""
|
||||
|
||||
|
||||
class UnretrievableVarValueError(ReflexError):
|
||||
"""Raised when the value of a var is not retrievable."""
|
||||
|
@ -240,6 +240,28 @@ def run_backend(
|
||||
run_uvicorn_backend(host, port, loglevel)
|
||||
|
||||
|
||||
def get_reload_dirs() -> list[str]:
|
||||
"""Get the reload directories for the backend.
|
||||
|
||||
Returns:
|
||||
The reload directories for the backend.
|
||||
"""
|
||||
config = get_config()
|
||||
reload_dirs = [config.app_name]
|
||||
if config.app_module is not None and config.app_module.__file__:
|
||||
module_path = Path(config.app_module.__file__).resolve().parent
|
||||
while module_path.parent.name:
|
||||
for parent_file in module_path.parent.iterdir():
|
||||
if parent_file == "__init__.py":
|
||||
# go up a level to find dir without `__init__.py`
|
||||
module_path = module_path.parent
|
||||
break
|
||||
else:
|
||||
break
|
||||
reload_dirs.append(str(module_path))
|
||||
return reload_dirs
|
||||
|
||||
|
||||
def run_uvicorn_backend(host, port, loglevel: LogLevel):
|
||||
"""Run the backend in development mode using Uvicorn.
|
||||
|
||||
@ -256,7 +278,7 @@ def run_uvicorn_backend(host, port, loglevel: LogLevel):
|
||||
port=port,
|
||||
log_level=loglevel.value,
|
||||
reload=True,
|
||||
reload_dirs=[get_config().app_name],
|
||||
reload_dirs=get_reload_dirs(),
|
||||
)
|
||||
|
||||
|
||||
@ -281,7 +303,7 @@ def run_granian_backend(host, port, loglevel: LogLevel):
|
||||
interface=Interfaces.ASGI,
|
||||
log_level=LogLevels(loglevel.value),
|
||||
reload=True,
|
||||
reload_paths=[Path(get_config().app_name)],
|
||||
reload_paths=get_reload_dirs(),
|
||||
reload_ignore_dirs=[".web"],
|
||||
).serve()
|
||||
except ImportError:
|
||||
|
@ -17,19 +17,20 @@ import stat
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import typing
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from types import ModuleType
|
||||
from typing import Callable, List, Optional
|
||||
from typing import Callable, List, NamedTuple, Optional
|
||||
|
||||
import httpx
|
||||
import typer
|
||||
from alembic.util.exc import CommandError
|
||||
from packaging import version
|
||||
from redis import Redis as RedisSync
|
||||
from redis import exceptions
|
||||
from redis.asyncio import Redis
|
||||
from redis.exceptions import RedisError
|
||||
|
||||
from reflex import constants, model
|
||||
from reflex.compiler import templates
|
||||
@ -42,9 +43,19 @@ from reflex.utils.exceptions import (
|
||||
from reflex.utils.format import format_library_name
|
||||
from reflex.utils.registry import _get_npm_registry
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from reflex.app import App
|
||||
|
||||
CURRENTLY_INSTALLING_NODE = False
|
||||
|
||||
|
||||
class AppInfo(NamedTuple):
|
||||
"""A tuple containing the app instance and module."""
|
||||
|
||||
app: App
|
||||
module: ModuleType
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class Template:
|
||||
"""A template for a Reflex app."""
|
||||
@ -267,6 +278,22 @@ def windows_npm_escape_hatch() -> bool:
|
||||
return environment.REFLEX_USE_NPM.get()
|
||||
|
||||
|
||||
def _check_app_name(config: Config):
|
||||
"""Check if the app name is set in the config.
|
||||
|
||||
Args:
|
||||
config: The config object.
|
||||
|
||||
Raises:
|
||||
RuntimeError: If the app name is not set in the config.
|
||||
"""
|
||||
if not config.app_name:
|
||||
raise RuntimeError(
|
||||
"Cannot get the app module because `app_name` is not set in rxconfig! "
|
||||
"If this error occurs in a reflex test case, ensure that `get_app` is mocked."
|
||||
)
|
||||
|
||||
|
||||
def get_app(reload: bool = False) -> ModuleType:
|
||||
"""Get the app module based on the default config.
|
||||
|
||||
@ -277,22 +304,23 @@ def get_app(reload: bool = False) -> ModuleType:
|
||||
The app based on the default config.
|
||||
|
||||
Raises:
|
||||
RuntimeError: If the app name is not set in the config.
|
||||
Exception: If an error occurs while getting the app module.
|
||||
"""
|
||||
from reflex.utils import telemetry
|
||||
|
||||
try:
|
||||
environment.RELOAD_CONFIG.set(reload)
|
||||
config = get_config()
|
||||
if not config.app_name:
|
||||
raise RuntimeError(
|
||||
"Cannot get the app module because `app_name` is not set in rxconfig! "
|
||||
"If this error occurs in a reflex test case, ensure that `get_app` is mocked."
|
||||
)
|
||||
|
||||
_check_app_name(config)
|
||||
|
||||
module = config.module
|
||||
sys.path.insert(0, str(Path.cwd()))
|
||||
app = __import__(module, fromlist=(constants.CompileVars.APP,))
|
||||
|
||||
app = (
|
||||
__import__(module, fromlist=(constants.CompileVars.APP,))
|
||||
if not config.app_module
|
||||
else config.app_module
|
||||
)
|
||||
if reload:
|
||||
from reflex.state import reload_state_module
|
||||
|
||||
@ -301,11 +329,34 @@ def get_app(reload: bool = False) -> ModuleType:
|
||||
|
||||
# Reload the app module.
|
||||
importlib.reload(app)
|
||||
|
||||
return app
|
||||
except Exception as ex:
|
||||
telemetry.send_error(ex, context="frontend")
|
||||
raise
|
||||
else:
|
||||
return app
|
||||
|
||||
|
||||
def get_and_validate_app(reload: bool = False) -> AppInfo:
|
||||
"""Get the app instance based on the default config and validate it.
|
||||
|
||||
Args:
|
||||
reload: Re-import the app module from disk
|
||||
|
||||
Returns:
|
||||
The app instance and the app module.
|
||||
|
||||
Raises:
|
||||
RuntimeError: If the app instance is not an instance of rx.App.
|
||||
"""
|
||||
from reflex.app import App
|
||||
|
||||
app_module = get_app(reload=reload)
|
||||
app = getattr(app_module, constants.CompileVars.APP)
|
||||
if not isinstance(app, App):
|
||||
raise RuntimeError(
|
||||
"The app instance in the specified app_module_import in rxconfig must be an instance of rx.App."
|
||||
)
|
||||
return AppInfo(app=app, module=app_module)
|
||||
|
||||
|
||||
def get_compiled_app(reload: bool = False, export: bool = False) -> ModuleType:
|
||||
@ -318,8 +369,7 @@ def get_compiled_app(reload: bool = False, export: bool = False) -> ModuleType:
|
||||
Returns:
|
||||
The compiled app based on the default config.
|
||||
"""
|
||||
app_module = get_app(reload=reload)
|
||||
app = getattr(app_module, constants.CompileVars.APP)
|
||||
app, app_module = get_and_validate_app(reload=reload)
|
||||
# For py3.9 compatibility when redis is used, we MUST add any decorator pages
|
||||
# before compiling the app in a thread to avoid event loop error (REF-2172).
|
||||
app._apply_decorated_pages()
|
||||
@ -333,10 +383,11 @@ def get_redis() -> Redis | None:
|
||||
Returns:
|
||||
The asynchronous redis client.
|
||||
"""
|
||||
if isinstance((redis_url_or_options := parse_redis_url()), str):
|
||||
return Redis.from_url(redis_url_or_options)
|
||||
elif isinstance(redis_url_or_options, dict):
|
||||
return Redis(**redis_url_or_options)
|
||||
if (redis_url := parse_redis_url()) is not None:
|
||||
return Redis.from_url(
|
||||
redis_url,
|
||||
retry_on_error=[RedisError],
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@ -346,14 +397,15 @@ def get_redis_sync() -> RedisSync | None:
|
||||
Returns:
|
||||
The synchronous redis client.
|
||||
"""
|
||||
if isinstance((redis_url_or_options := parse_redis_url()), str):
|
||||
return RedisSync.from_url(redis_url_or_options)
|
||||
elif isinstance(redis_url_or_options, dict):
|
||||
return RedisSync(**redis_url_or_options)
|
||||
if (redis_url := parse_redis_url()) is not None:
|
||||
return RedisSync.from_url(
|
||||
redis_url,
|
||||
retry_on_error=[RedisError],
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def parse_redis_url() -> str | dict | None:
|
||||
def parse_redis_url() -> str | None:
|
||||
"""Parse the REDIS_URL in config if applicable.
|
||||
|
||||
Returns:
|
||||
@ -387,7 +439,7 @@ async def get_redis_status() -> dict[str, bool | None]:
|
||||
redis_client.ping()
|
||||
else:
|
||||
status = None
|
||||
except exceptions.RedisError:
|
||||
except RedisError:
|
||||
status = False
|
||||
|
||||
return {"redis": status}
|
||||
@ -608,10 +660,14 @@ def initialize_web_directory():
|
||||
init_reflex_json(project_hash=project_hash)
|
||||
|
||||
|
||||
def _turbopack_flag() -> str:
|
||||
return " --turbopack" if environment.REFLEX_USE_TURBOPACK.get() else ""
|
||||
|
||||
|
||||
def _compile_package_json():
|
||||
return templates.PACKAGE_JSON.render(
|
||||
scripts={
|
||||
"dev": constants.PackageJson.Commands.DEV,
|
||||
"dev": constants.PackageJson.Commands.DEV + _turbopack_flag(),
|
||||
"export": constants.PackageJson.Commands.EXPORT,
|
||||
"export_sitemap": constants.PackageJson.Commands.EXPORT_SITEMAP,
|
||||
"prod": constants.PackageJson.Commands.PROD,
|
||||
@ -1147,11 +1203,12 @@ def ensure_reflex_installation_id() -> Optional[int]:
|
||||
if installation_id is None:
|
||||
installation_id = random.getrandbits(128)
|
||||
installation_id_file.write_text(str(installation_id))
|
||||
# If we get here, installation_id is definitely set
|
||||
return installation_id
|
||||
except Exception as e:
|
||||
console.debug(f"Failed to ensure reflex installation id: {e}")
|
||||
return None
|
||||
else:
|
||||
# If we get here, installation_id is definitely set
|
||||
return installation_id
|
||||
|
||||
|
||||
def initialize_reflex_user_directory():
|
||||
@ -1365,19 +1422,22 @@ def create_config_init_app_from_remote_template(app_name: str, template_url: str
|
||||
except OSError as ose:
|
||||
console.error(f"Failed to create temp directory for extracting zip: {ose}")
|
||||
raise typer.Exit(1) from ose
|
||||
|
||||
try:
|
||||
zipfile.ZipFile(zip_file_path).extractall(path=unzip_dir)
|
||||
# The zip file downloaded from github looks like:
|
||||
# repo-name-branch/**/*, so we need to remove the top level directory.
|
||||
if len(subdirs := os.listdir(unzip_dir)) != 1:
|
||||
console.error(f"Expected one directory in the zip, found {subdirs}")
|
||||
raise typer.Exit(1)
|
||||
template_dir = unzip_dir / subdirs[0]
|
||||
console.debug(f"Template folder is located at {template_dir}")
|
||||
except Exception as uze:
|
||||
console.error(f"Failed to unzip the template: {uze}")
|
||||
raise typer.Exit(1) from uze
|
||||
|
||||
if len(subdirs := os.listdir(unzip_dir)) != 1:
|
||||
console.error(f"Expected one directory in the zip, found {subdirs}")
|
||||
raise typer.Exit(1)
|
||||
|
||||
template_dir = unzip_dir / subdirs[0]
|
||||
console.debug(f"Template folder is located at {template_dir}")
|
||||
|
||||
# Move the rxconfig file here first.
|
||||
path_ops.mv(str(template_dir / constants.Config.FILE), constants.Config.FILE)
|
||||
new_config = get_config(reload=True)
|
||||
|
@ -17,6 +17,7 @@ import typer
|
||||
from redis.exceptions import RedisError
|
||||
|
||||
from reflex import constants
|
||||
from reflex.config import environment
|
||||
from reflex.utils import console, path_ops, prerequisites
|
||||
|
||||
|
||||
@ -156,24 +157,30 @@ def new_process(args, run: bool = False, show_logs: bool = False, **kwargs):
|
||||
Raises:
|
||||
Exit: When attempting to run a command with a None value.
|
||||
"""
|
||||
node_bin_path = str(path_ops.get_node_bin_path())
|
||||
if not node_bin_path and not prerequisites.CURRENTLY_INSTALLING_NODE:
|
||||
console.warn(
|
||||
"The path to the Node binary could not be found. Please ensure that Node is properly "
|
||||
"installed and added to your system's PATH environment variable or try running "
|
||||
"`reflex init` again."
|
||||
)
|
||||
# Check for invalid command first.
|
||||
if None in args:
|
||||
console.error(f"Invalid command: {args}")
|
||||
raise typer.Exit(1)
|
||||
# Add the node bin path to the PATH environment variable.
|
||||
|
||||
path_env: str = os.environ.get("PATH", "")
|
||||
|
||||
# Add node_bin_path to the PATH environment variable.
|
||||
if not environment.REFLEX_BACKEND_ONLY.get():
|
||||
node_bin_path = str(path_ops.get_node_bin_path())
|
||||
if not node_bin_path and not prerequisites.CURRENTLY_INSTALLING_NODE:
|
||||
console.warn(
|
||||
"The path to the Node binary could not be found. Please ensure that Node is properly "
|
||||
"installed and added to your system's PATH environment variable or try running "
|
||||
"`reflex init` again."
|
||||
)
|
||||
path_env = os.pathsep.join([node_bin_path, path_env])
|
||||
|
||||
env: dict[str, str] = {
|
||||
**os.environ,
|
||||
"PATH": os.pathsep.join(
|
||||
[node_bin_path if node_bin_path else "", os.environ["PATH"]]
|
||||
), # type: ignore
|
||||
"PATH": path_env,
|
||||
**kwargs.pop("env", {}),
|
||||
}
|
||||
|
||||
kwargs = {
|
||||
"env": env,
|
||||
"stderr": None if show_logs else subprocess.STDOUT,
|
||||
|
@ -1202,4 +1202,4 @@ class PyiGenerator:
|
||||
or "Var[Template]" in line
|
||||
):
|
||||
line = line.rstrip() + " # type: ignore\n"
|
||||
print(line, end="")
|
||||
print(line, end="") # noqa: T201
|
||||
|
@ -156,9 +156,10 @@ def _prepare_event(event: str, **kwargs) -> dict:
|
||||
def _send_event(event_data: dict) -> bool:
|
||||
try:
|
||||
httpx.post(POSTHOG_API_URL, json=event_data)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def _send(event, telemetry_enabled, **kwargs):
|
||||
|
@ -127,7 +127,7 @@ class VarData:
|
||||
state: str = "",
|
||||
field_name: str = "",
|
||||
imports: ImportDict | ParsedImportDict | None = None,
|
||||
hooks: dict[str, None] | None = None,
|
||||
hooks: dict[str, VarData | None] | None = None,
|
||||
deps: list[Var] | None = None,
|
||||
position: Hooks.HookPosition | None = None,
|
||||
):
|
||||
@ -194,7 +194,9 @@ class VarData:
|
||||
(var_data.state for var_data in all_var_datas if var_data.state), ""
|
||||
)
|
||||
|
||||
hooks = {hook: None for var_data in all_var_datas for hook in var_data.hooks}
|
||||
hooks: dict[str, VarData | None] = {
|
||||
hook: None for var_data in all_var_datas for hook in var_data.hooks
|
||||
}
|
||||
|
||||
_imports = imports.merge_imports(
|
||||
*(var_data.imports for var_data in all_var_datas)
|
||||
@ -559,7 +561,7 @@ class Var(Generic[VAR_TYPE]):
|
||||
if _var_is_local is not None:
|
||||
console.deprecate(
|
||||
feature_name="_var_is_local",
|
||||
reason="The _var_is_local argument is not supported for Var."
|
||||
reason="The _var_is_local argument is not supported for Var. "
|
||||
"If you want to create a Var from a raw Javascript expression, use the constructor directly",
|
||||
deprecation_version="0.6.0",
|
||||
removal_version="0.7.0",
|
||||
@ -567,7 +569,7 @@ class Var(Generic[VAR_TYPE]):
|
||||
if _var_is_string is not None:
|
||||
console.deprecate(
|
||||
feature_name="_var_is_string",
|
||||
reason="The _var_is_string argument is not supported for Var."
|
||||
reason="The _var_is_string argument is not supported for Var. "
|
||||
"If you want to create a Var from a raw Javascript expression, use the constructor directly",
|
||||
deprecation_version="0.6.0",
|
||||
removal_version="0.7.0",
|
||||
@ -579,7 +581,7 @@ class Var(Generic[VAR_TYPE]):
|
||||
|
||||
# Try to pull the imports and hooks from contained values.
|
||||
if not isinstance(value, str):
|
||||
return LiteralVar.create(value)
|
||||
return LiteralVar.create(value, _var_data=_var_data)
|
||||
|
||||
if _var_is_string is False or _var_is_local is True:
|
||||
return cls(
|
||||
@ -1836,7 +1838,7 @@ class ComputedVar(Var[RETURN_TYPE]):
|
||||
self,
|
||||
fget: Callable[[BASE_STATE], RETURN_TYPE],
|
||||
initial_value: RETURN_TYPE | types.Unset = types.Unset(),
|
||||
cache: bool = False,
|
||||
cache: bool = True,
|
||||
deps: Optional[List[Union[str, Var]]] = None,
|
||||
auto_deps: bool = True,
|
||||
interval: Optional[Union[int, datetime.timedelta]] = None,
|
||||
@ -2251,7 +2253,7 @@ if TYPE_CHECKING:
|
||||
def computed_var(
|
||||
fget: None = None,
|
||||
initial_value: Any | types.Unset = types.Unset(),
|
||||
cache: bool = False,
|
||||
cache: bool = True,
|
||||
deps: Optional[List[Union[str, Var]]] = None,
|
||||
auto_deps: bool = True,
|
||||
interval: Optional[Union[datetime.timedelta, int]] = None,
|
||||
@ -2264,7 +2266,7 @@ def computed_var(
|
||||
def computed_var(
|
||||
fget: Callable[[BASE_STATE], RETURN_TYPE],
|
||||
initial_value: RETURN_TYPE | types.Unset = types.Unset(),
|
||||
cache: bool = False,
|
||||
cache: bool = True,
|
||||
deps: Optional[List[Union[str, Var]]] = None,
|
||||
auto_deps: bool = True,
|
||||
interval: Optional[Union[datetime.timedelta, int]] = None,
|
||||
@ -2276,7 +2278,7 @@ def computed_var(
|
||||
def computed_var(
|
||||
fget: Callable[[BASE_STATE], Any] | None = None,
|
||||
initial_value: Any | types.Unset = types.Unset(),
|
||||
cache: bool = False,
|
||||
cache: bool = True,
|
||||
deps: Optional[List[Union[str, Var]]] = None,
|
||||
auto_deps: bool = True,
|
||||
interval: Optional[Union[datetime.timedelta, int]] = None,
|
||||
|
@ -390,6 +390,7 @@ class ArgsFunctionOperation(CachedVarOperation, FunctionVar):
|
||||
Returns:
|
||||
The function var.
|
||||
"""
|
||||
return_expr = Var.create(return_expr)
|
||||
return cls(
|
||||
_js_expr="",
|
||||
_var_type=_var_type,
|
||||
@ -445,6 +446,7 @@ class ArgsFunctionOperationBuilder(CachedVarOperation, BuilderFunctionVar):
|
||||
Returns:
|
||||
The function var.
|
||||
"""
|
||||
return_expr = Var.create(return_expr)
|
||||
return cls(
|
||||
_js_expr="",
|
||||
_var_type=_var_type,
|
||||
|
@ -20,7 +20,6 @@ from typing import (
|
||||
from reflex.constants.base import Dirs
|
||||
from reflex.utils.exceptions import PrimitiveUnserializableToJSON, VarTypeError
|
||||
from reflex.utils.imports import ImportDict, ImportVar
|
||||
from reflex.utils.types import is_optional
|
||||
|
||||
from .base import (
|
||||
CustomVarOperationReturn,
|
||||
@ -431,7 +430,7 @@ class NumberVar(Var[NUMBER_T], python_types=(int, float)):
|
||||
"""
|
||||
if not isinstance(other, NUMBER_TYPES):
|
||||
raise_unsupported_operand_types("<", (type(self), type(other)))
|
||||
return less_than_operation(self, +other)
|
||||
return less_than_operation(+self, +other)
|
||||
|
||||
@overload
|
||||
def __le__(self, other: number_types) -> BooleanVar: ...
|
||||
@ -450,7 +449,7 @@ class NumberVar(Var[NUMBER_T], python_types=(int, float)):
|
||||
"""
|
||||
if not isinstance(other, NUMBER_TYPES):
|
||||
raise_unsupported_operand_types("<=", (type(self), type(other)))
|
||||
return less_than_or_equal_operation(self, +other)
|
||||
return less_than_or_equal_operation(+self, +other)
|
||||
|
||||
def __eq__(self, other: Any):
|
||||
"""Equal comparison.
|
||||
@ -462,7 +461,7 @@ class NumberVar(Var[NUMBER_T], python_types=(int, float)):
|
||||
The result of the comparison.
|
||||
"""
|
||||
if isinstance(other, NUMBER_TYPES):
|
||||
return equal_operation(self, +other)
|
||||
return equal_operation(+self, +other)
|
||||
return equal_operation(self, other)
|
||||
|
||||
def __ne__(self, other: Any):
|
||||
@ -475,7 +474,7 @@ class NumberVar(Var[NUMBER_T], python_types=(int, float)):
|
||||
The result of the comparison.
|
||||
"""
|
||||
if isinstance(other, NUMBER_TYPES):
|
||||
return not_equal_operation(self, +other)
|
||||
return not_equal_operation(+self, +other)
|
||||
return not_equal_operation(self, other)
|
||||
|
||||
@overload
|
||||
@ -495,7 +494,7 @@ class NumberVar(Var[NUMBER_T], python_types=(int, float)):
|
||||
"""
|
||||
if not isinstance(other, NUMBER_TYPES):
|
||||
raise_unsupported_operand_types(">", (type(self), type(other)))
|
||||
return greater_than_operation(self, +other)
|
||||
return greater_than_operation(+self, +other)
|
||||
|
||||
@overload
|
||||
def __ge__(self, other: number_types) -> BooleanVar: ...
|
||||
@ -514,17 +513,7 @@ class NumberVar(Var[NUMBER_T], python_types=(int, float)):
|
||||
"""
|
||||
if not isinstance(other, NUMBER_TYPES):
|
||||
raise_unsupported_operand_types(">=", (type(self), type(other)))
|
||||
return greater_than_or_equal_operation(self, +other)
|
||||
|
||||
def bool(self):
|
||||
"""Boolean conversion.
|
||||
|
||||
Returns:
|
||||
The boolean value of the number.
|
||||
"""
|
||||
if is_optional(self._var_type):
|
||||
return boolify((self != None) & (self != 0)) # noqa: E711
|
||||
return self != 0
|
||||
return greater_than_or_equal_operation(+self, +other)
|
||||
|
||||
def _is_strict_float(self) -> bool:
|
||||
"""Check if the number is a float.
|
||||
|
@ -271,6 +271,25 @@ class StringVar(Var[STRING_TYPE], python_types=str):
|
||||
raise_unsupported_operand_types("startswith", (type(self), type(prefix)))
|
||||
return string_starts_with_operation(self, prefix)
|
||||
|
||||
@overload
|
||||
def endswith(self, suffix: StringVar | str) -> BooleanVar: ...
|
||||
|
||||
@overload
|
||||
def endswith(self, suffix: NoReturn) -> NoReturn: ...
|
||||
|
||||
def endswith(self, suffix: Any) -> BooleanVar:
|
||||
"""Check if the string ends with a suffix.
|
||||
|
||||
Args:
|
||||
suffix: The suffix.
|
||||
|
||||
Returns:
|
||||
The string ends with operation.
|
||||
"""
|
||||
if not isinstance(suffix, (StringVar, str)):
|
||||
raise_unsupported_operand_types("endswith", (type(self), type(suffix)))
|
||||
return string_ends_with_operation(self, suffix)
|
||||
|
||||
@overload
|
||||
def __lt__(self, other: StringVar | str) -> BooleanVar: ...
|
||||
|
||||
@ -501,6 +520,24 @@ def string_starts_with_operation(
|
||||
)
|
||||
|
||||
|
||||
@var_operation
|
||||
def string_ends_with_operation(
|
||||
full_string: StringVar[Any], suffix: StringVar[Any] | str
|
||||
):
|
||||
"""Check if a string ends with a suffix.
|
||||
|
||||
Args:
|
||||
full_string: The full string.
|
||||
suffix: The suffix.
|
||||
|
||||
Returns:
|
||||
Whether the string ends with the suffix.
|
||||
"""
|
||||
return var_operation_return(
|
||||
js_expression=f"{full_string}.endsWith({suffix})", var_type=bool
|
||||
)
|
||||
|
||||
|
||||
@var_operation
|
||||
def string_item_operation(string: StringVar[Any], index: NumberVar | int):
|
||||
"""Get an item from a string.
|
||||
|
@ -25,7 +25,7 @@ def _pid_exists(pid):
|
||||
|
||||
def _wait_for_port(port, server_pid, timeout) -> Tuple[bool, str]:
|
||||
start = time.time()
|
||||
print(f"Waiting for up to {timeout} seconds for port {port} to start listening.")
|
||||
print(f"Waiting for up to {timeout} seconds for port {port} to start listening.") # noqa: T201
|
||||
while True:
|
||||
if not _pid_exists(server_pid):
|
||||
return False, f"Server PID {server_pid} is not running."
|
||||
@ -56,9 +56,9 @@ def main():
|
||||
for f in as_completed(futures):
|
||||
ok, msg = f.result()
|
||||
if ok:
|
||||
print(f"OK: {msg}")
|
||||
print(f"OK: {msg}") # noqa: T201
|
||||
else:
|
||||
print(f"FAIL: {msg}")
|
||||
print(f"FAIL: {msg}") # noqa: T201
|
||||
exit(1)
|
||||
|
||||
|
||||
|
@ -22,22 +22,22 @@ def ComputedVars():
|
||||
count: int = 0
|
||||
|
||||
# cached var with dep on count
|
||||
@rx.var(cache=True, interval=15)
|
||||
@rx.var(interval=15)
|
||||
def count1(self) -> int:
|
||||
return self.count
|
||||
|
||||
# cached backend var with dep on count
|
||||
@rx.var(cache=True, interval=15, backend=True)
|
||||
@rx.var(interval=15, backend=True)
|
||||
def count1_backend(self) -> int:
|
||||
return self.count
|
||||
|
||||
# same as above but implicit backend with `_` prefix
|
||||
@rx.var(cache=True, interval=15)
|
||||
@rx.var(interval=15)
|
||||
def _count1_backend(self) -> int:
|
||||
return self.count
|
||||
|
||||
# explicit disabled auto_deps
|
||||
@rx.var(interval=15, cache=True, auto_deps=False)
|
||||
@rx.var(interval=15, auto_deps=False)
|
||||
def count3(self) -> int:
|
||||
# this will not add deps, because auto_deps is False
|
||||
print(self.count1)
|
||||
@ -45,19 +45,27 @@ def ComputedVars():
|
||||
return self.count
|
||||
|
||||
# explicit dependency on count var
|
||||
@rx.var(cache=True, deps=["count"], auto_deps=False)
|
||||
@rx.var(deps=["count"], auto_deps=False)
|
||||
def depends_on_count(self) -> int:
|
||||
return self.count
|
||||
|
||||
# explicit dependency on count1 var
|
||||
@rx.var(cache=True, deps=[count1], auto_deps=False)
|
||||
@rx.var(deps=[count1], auto_deps=False)
|
||||
def depends_on_count1(self) -> int:
|
||||
return self.count
|
||||
|
||||
@rx.var(deps=[count3], auto_deps=False, cache=True)
|
||||
@rx.var(
|
||||
deps=[count3],
|
||||
auto_deps=False,
|
||||
)
|
||||
def depends_on_count3(self) -> int:
|
||||
return self.count
|
||||
|
||||
# special floats should be properly decoded on the frontend
|
||||
@rx.var(cache=True, initial_value=[])
|
||||
def special_floats(self) -> list[float]:
|
||||
return [42.9, float("nan"), float("inf"), float("-inf")]
|
||||
|
||||
@rx.event
|
||||
def increment(self):
|
||||
self.count += 1
|
||||
@ -103,6 +111,11 @@ def ComputedVars():
|
||||
State.depends_on_count3,
|
||||
id="depends_on_count3",
|
||||
),
|
||||
rx.text("special_floats:"),
|
||||
rx.text(
|
||||
State.special_floats.join(", "),
|
||||
id="special_floats",
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@ -224,6 +237,10 @@ async def test_computed_vars(
|
||||
assert depends_on_count3
|
||||
assert depends_on_count3.text == "0"
|
||||
|
||||
special_floats = driver.find_element(By.ID, "special_floats")
|
||||
assert special_floats
|
||||
assert special_floats.text == "42.9, NaN, Infinity, -Infinity"
|
||||
|
||||
increment = driver.find_element(By.ID, "increment")
|
||||
assert increment.is_enabled()
|
||||
|
||||
|
@ -71,9 +71,10 @@ def has_error_modal(driver: WebDriver) -> bool:
|
||||
"""
|
||||
try:
|
||||
driver.find_element(By.XPATH, CONNECTION_ERROR_XPATH)
|
||||
return True
|
||||
except NoSuchElementException:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
|
@ -74,16 +74,16 @@ def DynamicRoute():
|
||||
class ArgState(rx.State):
|
||||
"""The app state."""
|
||||
|
||||
@rx.var
|
||||
@rx.var(cache=False)
|
||||
def arg(self) -> int:
|
||||
return int(self.arg_str or 0)
|
||||
|
||||
class ArgSubState(ArgState):
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def cached_arg(self) -> int:
|
||||
return self.arg
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def cached_arg_str(self) -> str:
|
||||
return self.arg_str
|
||||
|
||||
|
@ -36,18 +36,20 @@ def LifespanApp():
|
||||
print("Lifespan global started.")
|
||||
try:
|
||||
while True:
|
||||
lifespan_task_global += inc # pyright: ignore[reportUnboundVariable]
|
||||
lifespan_task_global += inc # pyright: ignore[reportUnboundVariable, reportPossiblyUnboundVariable]
|
||||
await asyncio.sleep(0.1)
|
||||
except asyncio.CancelledError as ce:
|
||||
print(f"Lifespan global cancelled: {ce}.")
|
||||
lifespan_task_global = 0
|
||||
|
||||
class LifespanState(rx.State):
|
||||
@rx.var
|
||||
interval: int = 100
|
||||
|
||||
@rx.var(cache=False)
|
||||
def task_global(self) -> int:
|
||||
return lifespan_task_global
|
||||
|
||||
@rx.var
|
||||
@rx.var(cache=False)
|
||||
def context_global(self) -> int:
|
||||
return lifespan_context_global
|
||||
|
||||
@ -59,7 +61,15 @@ def LifespanApp():
|
||||
return rx.vstack(
|
||||
rx.text(LifespanState.task_global, id="task_global"),
|
||||
rx.text(LifespanState.context_global, id="context_global"),
|
||||
rx.moment(interval=100, on_change=LifespanState.tick),
|
||||
rx.button(
|
||||
rx.moment(
|
||||
interval=LifespanState.interval, on_change=LifespanState.tick
|
||||
),
|
||||
on_click=LifespanState.set_interval( # type: ignore
|
||||
rx.cond(LifespanState.interval, 0, 100)
|
||||
),
|
||||
id="toggle-tick",
|
||||
),
|
||||
)
|
||||
|
||||
app = rx.App()
|
||||
@ -108,6 +118,7 @@ async def test_lifespan(lifespan_app: AppHarness):
|
||||
original_task_global_text = task_global.text
|
||||
original_task_global_value = int(original_task_global_text)
|
||||
lifespan_app.poll_for_content(task_global, exp_not_equal=original_task_global_text)
|
||||
driver.find_element(By.ID, "toggle-tick").click() # avoid teardown errors
|
||||
assert lifespan_app.app_module.lifespan_task_global > original_task_global_value # type: ignore
|
||||
assert int(task_global.text) > original_task_global_value
|
||||
|
||||
|
@ -22,31 +22,31 @@ def MediaApp():
|
||||
img.format = format # type: ignore
|
||||
return img
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def img_default(self) -> Image.Image:
|
||||
return self._blue()
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def img_bmp(self) -> Image.Image:
|
||||
return self._blue(format="BMP")
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def img_jpg(self) -> Image.Image:
|
||||
return self._blue(format="JPEG")
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def img_png(self) -> Image.Image:
|
||||
return self._blue(format="PNG")
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def img_gif(self) -> Image.Image:
|
||||
return self._blue(format="GIF")
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def img_webp(self) -> Image.Image:
|
||||
return self._blue(format="WEBP")
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def img_from_url(self) -> Image.Image:
|
||||
img_url = "https://picsum.photos/id/1/200/300"
|
||||
img_resp = httpx.get(img_url, follow_redirects=True)
|
||||
|
@ -6,12 +6,16 @@ import asyncio
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Generator
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
import pytest
|
||||
from selenium.webdriver.common.by import By
|
||||
|
||||
from reflex.constants.event import Endpoint
|
||||
from reflex.testing import AppHarness, WebDriver
|
||||
|
||||
from .utils import poll_for_navigation
|
||||
|
||||
|
||||
def UploadFile():
|
||||
"""App for testing dynamic routes."""
|
||||
@ -23,7 +27,7 @@ def UploadFile():
|
||||
|
||||
class UploadState(rx.State):
|
||||
_file_data: Dict[str, str] = {}
|
||||
event_order: List[str] = []
|
||||
event_order: rx.Field[List[str]] = rx.field([])
|
||||
progress_dicts: List[dict] = []
|
||||
disabled: bool = False
|
||||
large_data: str = ""
|
||||
@ -50,6 +54,15 @@ def UploadFile():
|
||||
self.large_data = ""
|
||||
self.event_order.append("chain_event")
|
||||
|
||||
async def handle_upload_tertiary(self, files: List[rx.UploadFile]):
|
||||
for file in files:
|
||||
(rx.get_upload_dir() / (file.filename or "INVALID")).write_bytes(
|
||||
await file.read()
|
||||
)
|
||||
|
||||
def do_download(self):
|
||||
return rx.download(rx.get_upload_url("test.txt"))
|
||||
|
||||
def index():
|
||||
return rx.vstack(
|
||||
rx.input(
|
||||
@ -123,6 +136,34 @@ def UploadFile():
|
||||
on_click=rx.cancel_upload("secondary"),
|
||||
id="cancel_button_secondary",
|
||||
),
|
||||
rx.heading("Tertiary Upload/Download"),
|
||||
rx.upload.root(
|
||||
rx.vstack(
|
||||
rx.button("Select File"),
|
||||
rx.text("Drag and drop files here or click to select files"),
|
||||
),
|
||||
id="tertiary",
|
||||
),
|
||||
rx.button(
|
||||
"Upload",
|
||||
on_click=UploadState.handle_upload_tertiary( # type: ignore
|
||||
rx.upload_files(
|
||||
upload_id="tertiary",
|
||||
),
|
||||
),
|
||||
id="upload_button_tertiary",
|
||||
),
|
||||
rx.button(
|
||||
"Download - Frontend",
|
||||
on_click=rx.download(rx.get_upload_url("test.txt")),
|
||||
id="download-frontend",
|
||||
),
|
||||
rx.button(
|
||||
"Download - Backend",
|
||||
on_click=UploadState.do_download,
|
||||
id="download-backend",
|
||||
),
|
||||
rx.text(UploadState.event_order.to_string(), id="event-order"),
|
||||
)
|
||||
|
||||
app = rx.App(state=rx.State)
|
||||
@ -164,6 +205,24 @@ def driver(upload_file: AppHarness):
|
||||
driver.quit()
|
||||
|
||||
|
||||
def poll_for_token(driver: WebDriver, upload_file: AppHarness) -> str:
|
||||
"""Poll for the token input to be populated.
|
||||
|
||||
Args:
|
||||
driver: WebDriver instance.
|
||||
upload_file: harness for UploadFile app.
|
||||
|
||||
Returns:
|
||||
token value
|
||||
"""
|
||||
token_input = driver.find_element(By.ID, "token")
|
||||
assert token_input
|
||||
# wait for the backend connection to send the token
|
||||
token = upload_file.poll_for_value(token_input)
|
||||
assert token is not None
|
||||
return token
|
||||
|
||||
|
||||
@pytest.mark.parametrize("secondary", [False, True])
|
||||
@pytest.mark.asyncio
|
||||
async def test_upload_file(
|
||||
@ -178,11 +237,7 @@ async def test_upload_file(
|
||||
secondary: whether to use the secondary upload form
|
||||
"""
|
||||
assert upload_file.app_instance is not None
|
||||
token_input = driver.find_element(By.ID, "token")
|
||||
assert token_input
|
||||
# wait for the backend connection to send the token
|
||||
token = upload_file.poll_for_value(token_input)
|
||||
assert token is not None
|
||||
token = poll_for_token(driver, upload_file)
|
||||
full_state_name = upload_file.get_full_state_name(["_upload_state"])
|
||||
state_name = upload_file.get_state_name("_upload_state")
|
||||
substate_token = f"{token}_{full_state_name}"
|
||||
@ -204,6 +259,19 @@ async def test_upload_file(
|
||||
upload_box.send_keys(str(target_file))
|
||||
upload_button.click()
|
||||
|
||||
# check that the selected files are displayed
|
||||
selected_files = driver.find_element(By.ID, f"selected_files{suffix}")
|
||||
assert Path(selected_files.text).name == Path(exp_name).name
|
||||
|
||||
if secondary:
|
||||
event_order_displayed = driver.find_element(By.ID, "event-order")
|
||||
AppHarness._poll_for(lambda: "chain_event" in event_order_displayed.text)
|
||||
|
||||
state = await upload_file.get_state(substate_token)
|
||||
# only the secondary form tracks progress and chain events
|
||||
assert state.substates[state_name].event_order.count("upload_progress") == 1
|
||||
assert state.substates[state_name].event_order.count("chain_event") == 1
|
||||
|
||||
# look up the backend state and assert on uploaded contents
|
||||
async def get_file_data():
|
||||
return (
|
||||
@ -217,16 +285,6 @@ async def test_upload_file(
|
||||
normalized_file_data = {Path(k).name: v for k, v in file_data.items()}
|
||||
assert normalized_file_data[Path(exp_name).name] == exp_contents
|
||||
|
||||
# check that the selected files are displayed
|
||||
selected_files = driver.find_element(By.ID, f"selected_files{suffix}")
|
||||
assert Path(selected_files.text).name == Path(exp_name).name
|
||||
|
||||
state = await upload_file.get_state(substate_token)
|
||||
if secondary:
|
||||
# only the secondary form tracks progress and chain events
|
||||
assert state.substates[state_name].event_order.count("upload_progress") == 1
|
||||
assert state.substates[state_name].event_order.count("chain_event") == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_upload_file_multiple(tmp_path, upload_file: AppHarness, driver):
|
||||
@ -238,11 +296,7 @@ async def test_upload_file_multiple(tmp_path, upload_file: AppHarness, driver):
|
||||
driver: WebDriver instance.
|
||||
"""
|
||||
assert upload_file.app_instance is not None
|
||||
token_input = driver.find_element(By.ID, "token")
|
||||
assert token_input
|
||||
# wait for the backend connection to send the token
|
||||
token = upload_file.poll_for_value(token_input)
|
||||
assert token is not None
|
||||
token = poll_for_token(driver, upload_file)
|
||||
full_state_name = upload_file.get_full_state_name(["_upload_state"])
|
||||
state_name = upload_file.get_state_name("_upload_state")
|
||||
substate_token = f"{token}_{full_state_name}"
|
||||
@ -301,11 +355,7 @@ def test_clear_files(
|
||||
secondary: whether to use the secondary upload form.
|
||||
"""
|
||||
assert upload_file.app_instance is not None
|
||||
token_input = driver.find_element(By.ID, "token")
|
||||
assert token_input
|
||||
# wait for the backend connection to send the token
|
||||
token = upload_file.poll_for_value(token_input)
|
||||
assert token is not None
|
||||
poll_for_token(driver, upload_file)
|
||||
|
||||
suffix = "_secondary" if secondary else ""
|
||||
|
||||
@ -357,11 +407,7 @@ async def test_cancel_upload(tmp_path, upload_file: AppHarness, driver: WebDrive
|
||||
driver: WebDriver instance.
|
||||
"""
|
||||
assert upload_file.app_instance is not None
|
||||
token_input = driver.find_element(By.ID, "token")
|
||||
assert token_input
|
||||
# wait for the backend connection to send the token
|
||||
token = upload_file.poll_for_value(token_input)
|
||||
assert token is not None
|
||||
token = poll_for_token(driver, upload_file)
|
||||
state_name = upload_file.get_state_name("_upload_state")
|
||||
state_full_name = upload_file.get_full_state_name(["_upload_state"])
|
||||
substate_token = f"{token}_{state_full_name}"
|
||||
@ -403,3 +449,55 @@ async def test_cancel_upload(tmp_path, upload_file: AppHarness, driver: WebDrive
|
||||
assert Path(exp_name).name not in normalized_file_data
|
||||
|
||||
target_file.unlink()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_upload_download_file(
|
||||
tmp_path,
|
||||
upload_file: AppHarness,
|
||||
driver: WebDriver,
|
||||
):
|
||||
"""Submit a file upload and then fetch it with rx.download.
|
||||
|
||||
This checks the special case `getBackendURL` logic in the _download event
|
||||
handler in state.js.
|
||||
|
||||
Args:
|
||||
tmp_path: pytest tmp_path fixture
|
||||
upload_file: harness for UploadFile app.
|
||||
driver: WebDriver instance.
|
||||
"""
|
||||
assert upload_file.app_instance is not None
|
||||
poll_for_token(driver, upload_file)
|
||||
|
||||
upload_box = driver.find_elements(By.XPATH, "//input[@type='file']")[2]
|
||||
assert upload_box
|
||||
upload_button = driver.find_element(By.ID, "upload_button_tertiary")
|
||||
assert upload_button
|
||||
|
||||
exp_name = "test.txt"
|
||||
exp_contents = "test file contents!"
|
||||
target_file = tmp_path / exp_name
|
||||
target_file.write_text(exp_contents)
|
||||
|
||||
upload_box.send_keys(str(target_file))
|
||||
upload_button.click()
|
||||
|
||||
# Download via event embedded in frontend code.
|
||||
download_frontend = driver.find_element(By.ID, "download-frontend")
|
||||
with poll_for_navigation(driver):
|
||||
download_frontend.click()
|
||||
assert urlsplit(driver.current_url).path == f"/{Endpoint.UPLOAD.value}/test.txt"
|
||||
assert driver.find_element(by=By.TAG_NAME, value="body").text == exp_contents
|
||||
|
||||
# Go back and wait for the app to reload.
|
||||
with poll_for_navigation(driver):
|
||||
driver.back()
|
||||
poll_for_token(driver, upload_file)
|
||||
|
||||
# Download via backend event handler.
|
||||
download_backend = driver.find_element(By.ID, "download-backend")
|
||||
with poll_for_navigation(driver):
|
||||
download_backend.click()
|
||||
assert urlsplit(driver.current_url).path == f"/{Endpoint.UPLOAD.value}/test.txt"
|
||||
assert driver.find_element(by=By.TAG_NAME, value="body").text == exp_contents
|
||||
|
46
tests/integration/tests_playwright/test_link_hover.py
Normal file
46
tests/integration/tests_playwright/test_link_hover.py
Normal file
@ -0,0 +1,46 @@
|
||||
from typing import Generator
|
||||
|
||||
import pytest
|
||||
from playwright.sync_api import Page, expect
|
||||
|
||||
from reflex.testing import AppHarness
|
||||
|
||||
|
||||
def LinkApp():
|
||||
import reflex as rx
|
||||
|
||||
app = rx.App()
|
||||
|
||||
def index():
|
||||
return rx.vstack(
|
||||
rx.box(height="10em"), # spacer, so the link isn't hovered initially
|
||||
rx.link(
|
||||
"Click me",
|
||||
href="#",
|
||||
color="blue",
|
||||
_hover=rx.Style({"color": "red"}),
|
||||
),
|
||||
)
|
||||
|
||||
app.add_page(index, "/")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def link_app(tmp_path_factory) -> Generator[AppHarness, None, None]:
|
||||
with AppHarness.create(
|
||||
root=tmp_path_factory.mktemp("link_app"),
|
||||
app_source=LinkApp, # type: ignore
|
||||
) as harness:
|
||||
assert harness.app_instance is not None, "app is not running"
|
||||
yield harness
|
||||
|
||||
|
||||
def test_link_hover(link_app: AppHarness, page: Page):
|
||||
assert link_app.frontend_url is not None
|
||||
page.goto(link_app.frontend_url)
|
||||
|
||||
link = page.get_by_role("link")
|
||||
expect(link).to_have_text("Click me")
|
||||
expect(link).to_have_css("color", "rgb(0, 0, 255)")
|
||||
link.hover()
|
||||
expect(link).to_have_css("color", "rgb(255, 0, 0)")
|
@ -1,13 +1,19 @@
|
||||
import pytest
|
||||
|
||||
from reflex.components.lucide.icon import LUCIDE_ICON_LIST, Icon
|
||||
from reflex.components.lucide.icon import (
|
||||
LUCIDE_ICON_LIST,
|
||||
LUCIDE_ICON_MAPPING_OVERRIDE,
|
||||
Icon,
|
||||
)
|
||||
from reflex.utils import format
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", LUCIDE_ICON_LIST)
|
||||
def test_icon(tag):
|
||||
icon = Icon.create(tag)
|
||||
assert icon.alias == f"Lucide{format.to_title_case(tag)}Icon"
|
||||
assert icon.alias == "Lucide" + LUCIDE_ICON_MAPPING_OVERRIDE.get(
|
||||
tag, f"{format.to_title_case(tag)}Icon"
|
||||
)
|
||||
|
||||
|
||||
def test_icon_missing_tag():
|
||||
|
@ -27,7 +27,7 @@ from reflex.event import (
|
||||
from reflex.state import BaseState
|
||||
from reflex.style import Style
|
||||
from reflex.utils import imports
|
||||
from reflex.utils.exceptions import EventFnArgMismatch
|
||||
from reflex.utils.exceptions import ChildrenTypeError, EventFnArgMismatch
|
||||
from reflex.utils.imports import ImportDict, ImportVar, ParsedImportDict, parse_imports
|
||||
from reflex.vars import VarData
|
||||
from reflex.vars.base import LiteralVar, Var
|
||||
@ -645,14 +645,17 @@ def test_create_filters_none_props(test_component):
|
||||
assert str(component.style["text-align"]) == '"center"'
|
||||
|
||||
|
||||
@pytest.mark.parametrize("children", [((None,),), ("foo", ("bar", (None,)))])
|
||||
@pytest.mark.parametrize(
|
||||
"children",
|
||||
[
|
||||
((None,),),
|
||||
("foo", ("bar", (None,))),
|
||||
({"foo": "bar"},),
|
||||
],
|
||||
)
|
||||
def test_component_create_unallowed_types(children, test_component):
|
||||
with pytest.raises(TypeError) as err:
|
||||
with pytest.raises(ChildrenTypeError):
|
||||
test_component.create(*children)
|
||||
assert (
|
||||
err.value.args[0]
|
||||
== "Children of Reflex components must be other components, state vars, or primitive Python types. Got child None of type <class 'NoneType'>."
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -908,7 +908,7 @@ class DynamicState(BaseState):
|
||||
"""Increment the counter var."""
|
||||
self.counter = self.counter + 1
|
||||
|
||||
@computed_var(cache=True)
|
||||
@computed_var
|
||||
def comp_dynamic(self) -> str:
|
||||
"""A computed var that depends on the dynamic var.
|
||||
|
||||
@ -1549,11 +1549,11 @@ def test_app_with_valid_var_dependencies(compilable_app: tuple[App, Path]):
|
||||
base: int = 0
|
||||
_backend: int = 0
|
||||
|
||||
@computed_var(cache=True)
|
||||
@computed_var()
|
||||
def foo(self) -> str:
|
||||
return "foo"
|
||||
|
||||
@computed_var(deps=["_backend", "base", foo], cache=True)
|
||||
@computed_var(deps=["_backend", "base", foo])
|
||||
def bar(self) -> str:
|
||||
return "bar"
|
||||
|
||||
@ -1565,7 +1565,7 @@ def test_app_with_invalid_var_dependencies(compilable_app: tuple[App, Path]):
|
||||
app, _ = compilable_app
|
||||
|
||||
class InvalidDepState(BaseState):
|
||||
@computed_var(deps=["foolksjdf"], cache=True)
|
||||
@computed_var(deps=["foolksjdf"])
|
||||
def bar(self) -> str:
|
||||
return "bar"
|
||||
|
||||
|
@ -223,12 +223,17 @@ def test_event_console_log():
|
||||
)
|
||||
assert (
|
||||
format.format_event(spec)
|
||||
== 'Event("_call_function", {function:(() => (console["log"]("message")))})'
|
||||
== 'Event("_call_function", {function:(() => (console["log"]("message"))),callback:null})'
|
||||
)
|
||||
spec = event.console_log(Var(_js_expr="message"))
|
||||
assert (
|
||||
format.format_event(spec)
|
||||
== 'Event("_call_function", {function:(() => (console["log"](message)))})'
|
||||
== 'Event("_call_function", {function:(() => (console["log"](message))),callback:null})'
|
||||
)
|
||||
spec2 = event.console_log(Var(_js_expr="message2")).add_args(Var("throwaway"))
|
||||
assert (
|
||||
format.format_event(spec2)
|
||||
== 'Event("_call_function", {function:(() => (console["log"](message2))),callback:null})'
|
||||
)
|
||||
|
||||
|
||||
@ -243,12 +248,17 @@ def test_event_window_alert():
|
||||
)
|
||||
assert (
|
||||
format.format_event(spec)
|
||||
== 'Event("_call_function", {function:(() => (window["alert"]("message")))})'
|
||||
== 'Event("_call_function", {function:(() => (window["alert"]("message"))),callback:null})'
|
||||
)
|
||||
spec = event.window_alert(Var(_js_expr="message"))
|
||||
assert (
|
||||
format.format_event(spec)
|
||||
== 'Event("_call_function", {function:(() => (window["alert"](message)))})'
|
||||
== 'Event("_call_function", {function:(() => (window["alert"](message))),callback:null})'
|
||||
)
|
||||
spec2 = event.window_alert(Var(_js_expr="message2")).add_args(Var("throwaway"))
|
||||
assert (
|
||||
format.format_event(spec2)
|
||||
== 'Event("_call_function", {function:(() => (window["alert"](message2))),callback:null})'
|
||||
)
|
||||
|
||||
|
||||
|
@ -60,6 +60,7 @@ from reflex.utils.exceptions import (
|
||||
ReflexRuntimeError,
|
||||
SetUndefinedStateVarError,
|
||||
StateSerializationError,
|
||||
UnretrievableVarValueError,
|
||||
)
|
||||
from reflex.utils.format import json_dumps
|
||||
from reflex.vars.base import Var, computed_var
|
||||
@ -115,7 +116,7 @@ class TestState(BaseState):
|
||||
# Set this class as not test one
|
||||
__test__ = False
|
||||
|
||||
num1: int
|
||||
num1: rx.Field[int]
|
||||
num2: float = 3.14
|
||||
key: str
|
||||
map_key: str = "a"
|
||||
@ -163,7 +164,7 @@ class ChildState(TestState):
|
||||
"""A child state fixture."""
|
||||
|
||||
value: str
|
||||
count: int = 23
|
||||
count: rx.Field[int] = rx.field(23)
|
||||
|
||||
def change_both(self, value: str, count: int):
|
||||
"""Change both the value and count.
|
||||
@ -201,7 +202,7 @@ class GrandchildState(ChildState):
|
||||
class GrandchildState2(ChildState2):
|
||||
"""A grandchild state fixture."""
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def cached(self) -> str:
|
||||
"""A cached var.
|
||||
|
||||
@ -214,7 +215,7 @@ class GrandchildState2(ChildState2):
|
||||
class GrandchildState3(ChildState3):
|
||||
"""A great grandchild state fixture."""
|
||||
|
||||
@rx.var
|
||||
@rx.var(cache=False)
|
||||
def computed(self) -> str:
|
||||
"""A computed var.
|
||||
|
||||
@ -795,7 +796,7 @@ async def test_process_event_simple(test_state):
|
||||
|
||||
# The delta should contain the changes, including computed vars.
|
||||
assert update.delta == {
|
||||
TestState.get_full_name(): {"num1": 69, "sum": 72.14, "upper": ""},
|
||||
TestState.get_full_name(): {"num1": 69, "sum": 72.14},
|
||||
GrandchildState3.get_full_name(): {"computed": ""},
|
||||
}
|
||||
assert update.events == []
|
||||
@ -822,7 +823,7 @@ async def test_process_event_substate(test_state, child_state, grandchild_state)
|
||||
assert child_state.value == "HI"
|
||||
assert child_state.count == 24
|
||||
assert update.delta == {
|
||||
TestState.get_full_name(): {"sum": 3.14, "upper": ""},
|
||||
# TestState.get_full_name(): {"sum": 3.14, "upper": ""},
|
||||
ChildState.get_full_name(): {"value": "HI", "count": 24},
|
||||
GrandchildState3.get_full_name(): {"computed": ""},
|
||||
}
|
||||
@ -838,7 +839,7 @@ async def test_process_event_substate(test_state, child_state, grandchild_state)
|
||||
update = await test_state._process(event).__anext__()
|
||||
assert grandchild_state.value2 == "new"
|
||||
assert update.delta == {
|
||||
TestState.get_full_name(): {"sum": 3.14, "upper": ""},
|
||||
# TestState.get_full_name(): {"sum": 3.14, "upper": ""},
|
||||
GrandchildState.get_full_name(): {"value2": "new"},
|
||||
GrandchildState3.get_full_name(): {"computed": ""},
|
||||
}
|
||||
@ -988,7 +989,7 @@ class InterdependentState(BaseState):
|
||||
v1: int = 0
|
||||
_v2: int = 1
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def v1x2(self) -> int:
|
||||
"""Depends on var v1.
|
||||
|
||||
@ -997,7 +998,7 @@ class InterdependentState(BaseState):
|
||||
"""
|
||||
return self.v1 * 2
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def v2x2(self) -> int:
|
||||
"""Depends on backend var _v2.
|
||||
|
||||
@ -1006,7 +1007,7 @@ class InterdependentState(BaseState):
|
||||
"""
|
||||
return self._v2 * 2
|
||||
|
||||
@rx.var(cache=True, backend=True)
|
||||
@rx.var(backend=True)
|
||||
def v2x2_backend(self) -> int:
|
||||
"""Depends on backend var _v2.
|
||||
|
||||
@ -1015,7 +1016,7 @@ class InterdependentState(BaseState):
|
||||
"""
|
||||
return self._v2 * 2
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def v1x2x2(self) -> int:
|
||||
"""Depends on ComputedVar v1x2.
|
||||
|
||||
@ -1024,7 +1025,7 @@ class InterdependentState(BaseState):
|
||||
"""
|
||||
return self.v1x2 * 2 # type: ignore
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def _v3(self) -> int:
|
||||
"""Depends on backend var _v2.
|
||||
|
||||
@ -1033,7 +1034,7 @@ class InterdependentState(BaseState):
|
||||
"""
|
||||
return self._v2
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def v3x2(self) -> int:
|
||||
"""Depends on ComputedVar _v3.
|
||||
|
||||
@ -1238,7 +1239,7 @@ def test_computed_var_cached():
|
||||
class ComputedState(BaseState):
|
||||
v: int = 0
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def comp_v(self) -> int:
|
||||
nonlocal comp_v_calls
|
||||
comp_v_calls += 1
|
||||
@ -1263,15 +1264,15 @@ def test_computed_var_cached_depends_on_non_cached():
|
||||
class ComputedState(BaseState):
|
||||
v: int = 0
|
||||
|
||||
@rx.var
|
||||
@rx.var(cache=False)
|
||||
def no_cache_v(self) -> int:
|
||||
return self.v
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def dep_v(self) -> int:
|
||||
return self.no_cache_v # type: ignore
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def comp_v(self) -> int:
|
||||
return self.v
|
||||
|
||||
@ -1303,14 +1304,14 @@ def test_computed_var_depends_on_parent_non_cached():
|
||||
counter = 0
|
||||
|
||||
class ParentState(BaseState):
|
||||
@rx.var
|
||||
@rx.var(cache=False)
|
||||
def no_cache_v(self) -> int:
|
||||
nonlocal counter
|
||||
counter += 1
|
||||
return counter
|
||||
|
||||
class ChildState(ParentState):
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def dep_v(self) -> int:
|
||||
return self.no_cache_v # type: ignore
|
||||
|
||||
@ -1356,7 +1357,7 @@ def test_cached_var_depends_on_event_handler(use_partial: bool):
|
||||
def handler(self):
|
||||
self.x = self.x + 1
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def cached_x_side_effect(self) -> int:
|
||||
self.handler()
|
||||
nonlocal counter
|
||||
@ -1392,7 +1393,7 @@ def test_computed_var_dependencies():
|
||||
def testprop(self) -> int:
|
||||
return self.v
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def comp_v(self) -> int:
|
||||
"""Direct access.
|
||||
|
||||
@ -1401,7 +1402,7 @@ def test_computed_var_dependencies():
|
||||
"""
|
||||
return self.v
|
||||
|
||||
@rx.var(cache=True, backend=True)
|
||||
@rx.var(backend=True)
|
||||
def comp_v_backend(self) -> int:
|
||||
"""Direct access backend var.
|
||||
|
||||
@ -1410,7 +1411,7 @@ def test_computed_var_dependencies():
|
||||
"""
|
||||
return self.v
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def comp_v_via_property(self) -> int:
|
||||
"""Access v via property.
|
||||
|
||||
@ -1419,7 +1420,7 @@ def test_computed_var_dependencies():
|
||||
"""
|
||||
return self.testprop
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def comp_w(self):
|
||||
"""Nested lambda.
|
||||
|
||||
@ -1428,7 +1429,7 @@ def test_computed_var_dependencies():
|
||||
"""
|
||||
return lambda: self.w
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def comp_x(self):
|
||||
"""Nested function.
|
||||
|
||||
@ -1441,7 +1442,7 @@ def test_computed_var_dependencies():
|
||||
|
||||
return _
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def comp_y(self) -> List[int]:
|
||||
"""Comprehension iterating over attribute.
|
||||
|
||||
@ -1450,7 +1451,7 @@ def test_computed_var_dependencies():
|
||||
"""
|
||||
return [round(y) for y in self.y]
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def comp_z(self) -> List[bool]:
|
||||
"""Comprehension accesses attribute.
|
||||
|
||||
@ -1663,7 +1664,7 @@ async def state_manager(request) -> AsyncGenerator[StateManager, None]:
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def substate_token(state_manager, token):
|
||||
def substate_token(state_manager, token) -> str:
|
||||
"""A token + substate name for looking up in state manager.
|
||||
|
||||
Args:
|
||||
@ -1936,6 +1937,14 @@ def mock_app(mock_app_simple: rx.App, state_manager: StateManager) -> rx.App:
|
||||
return mock_app_simple
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ModelDC:
|
||||
"""A dataclass."""
|
||||
|
||||
foo: str = "bar"
|
||||
ls: list[dict] = dataclasses.field(default_factory=list)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_state_proxy(grandchild_state: GrandchildState, mock_app: rx.App):
|
||||
"""Test that the state proxy works.
|
||||
@ -2018,10 +2027,6 @@ async def test_state_proxy(grandchild_state: GrandchildState, mock_app: rx.App):
|
||||
assert mcall.args[0] == str(SocketEvent.EVENT)
|
||||
assert mcall.args[1] == StateUpdate(
|
||||
delta={
|
||||
parent_state.get_full_name(): {
|
||||
"upper": "",
|
||||
"sum": 3.14,
|
||||
},
|
||||
grandchild_state.get_full_name(): {
|
||||
"value2": "42",
|
||||
},
|
||||
@ -2038,12 +2043,13 @@ class BackgroundTaskState(BaseState):
|
||||
|
||||
order: List[str] = []
|
||||
dict_list: Dict[str, List[int]] = {"foo": [1, 2, 3]}
|
||||
dc: ModelDC = ModelDC()
|
||||
|
||||
def __init__(self, **kwargs): # noqa: D107
|
||||
super().__init__(**kwargs)
|
||||
self.router_data = {"simulate": "hydrate"}
|
||||
|
||||
@rx.var
|
||||
@rx.var(cache=False)
|
||||
def computed_order(self) -> List[str]:
|
||||
"""Get the order as a computed var.
|
||||
|
||||
@ -2063,10 +2069,18 @@ class BackgroundTaskState(BaseState):
|
||||
with pytest.raises(ImmutableStateError):
|
||||
self.order.append("bad idea")
|
||||
|
||||
with pytest.raises(ImmutableStateError):
|
||||
# Cannot manipulate dataclass attributes.
|
||||
self.dc.foo = "baz"
|
||||
|
||||
with pytest.raises(ImmutableStateError):
|
||||
# Even nested access to mutables raises an exception.
|
||||
self.dict_list["foo"].append(42)
|
||||
|
||||
with pytest.raises(ImmutableStateError):
|
||||
# Cannot modify dataclass list attribute.
|
||||
self.dc.ls.append({"foo": "bar"})
|
||||
|
||||
with pytest.raises(ImmutableStateError):
|
||||
# Direct calling another handler that modifies state raises an exception.
|
||||
self.other()
|
||||
@ -3022,10 +3036,6 @@ async def test_get_state(mock_app: rx.App, token: str):
|
||||
grandchild_state.value2 = "set_value"
|
||||
|
||||
assert test_state.get_delta() == {
|
||||
TestState.get_full_name(): {
|
||||
"sum": 3.14,
|
||||
"upper": "",
|
||||
},
|
||||
GrandchildState.get_full_name(): {
|
||||
"value2": "set_value",
|
||||
},
|
||||
@ -3063,10 +3073,6 @@ async def test_get_state(mock_app: rx.App, token: str):
|
||||
child_state2.value = "set_c2_value"
|
||||
|
||||
assert new_test_state.get_delta() == {
|
||||
TestState.get_full_name(): {
|
||||
"sum": 3.14,
|
||||
"upper": "",
|
||||
},
|
||||
ChildState2.get_full_name(): {
|
||||
"value": "set_c2_value",
|
||||
},
|
||||
@ -3121,7 +3127,7 @@ async def test_get_state_from_sibling_not_cached(mock_app: rx.App, token: str):
|
||||
|
||||
child3_var: int = 0
|
||||
|
||||
@rx.var
|
||||
@rx.var(cache=False)
|
||||
def v(self):
|
||||
pass
|
||||
|
||||
@ -3192,8 +3198,8 @@ def test_potentially_dirty_substates():
|
||||
def bar(self) -> str:
|
||||
return ""
|
||||
|
||||
assert RxState._potentially_dirty_substates() == {State}
|
||||
assert State._potentially_dirty_substates() == {C1}
|
||||
assert RxState._potentially_dirty_substates() == set()
|
||||
assert State._potentially_dirty_substates() == set()
|
||||
assert C1._potentially_dirty_substates() == set()
|
||||
|
||||
|
||||
@ -3208,7 +3214,7 @@ def test_router_var_dep() -> None:
|
||||
class RouterVarDepState(RouterVarParentState):
|
||||
"""A state with a router var dependency."""
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def foo(self) -> str:
|
||||
return self.router.page.params.get("foo", "")
|
||||
|
||||
@ -3403,7 +3409,7 @@ class MixinState(State, mixin=True):
|
||||
_backend: int = 0
|
||||
_backend_no_default: dict
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def computed(self) -> str:
|
||||
"""A computed var on mixin state.
|
||||
|
||||
@ -3582,13 +3588,6 @@ class ModelV2(BaseModelV2):
|
||||
foo: str = "bar"
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ModelDC:
|
||||
"""A dataclass."""
|
||||
|
||||
foo: str = "bar"
|
||||
|
||||
|
||||
class PydanticState(rx.State):
|
||||
"""A state with pydantic BaseModel vars."""
|
||||
|
||||
@ -3610,11 +3609,22 @@ def test_mutable_models():
|
||||
assert state.dirty_vars == {"v2"}
|
||||
state.dirty_vars.clear()
|
||||
|
||||
# Not yet supported ENG-4083
|
||||
# assert isinstance(state.dc, MutableProxy) #noqa: ERA001
|
||||
# state.dc.foo = "baz" #noqa: ERA001
|
||||
# assert state.dirty_vars == {"dc"} #noqa: ERA001
|
||||
# state.dirty_vars.clear() #noqa: ERA001
|
||||
assert isinstance(state.dc, MutableProxy)
|
||||
state.dc.foo = "baz"
|
||||
assert state.dirty_vars == {"dc"}
|
||||
state.dirty_vars.clear()
|
||||
assert state.dirty_vars == set()
|
||||
state.dc.ls.append({"hi": "reflex"})
|
||||
assert state.dirty_vars == {"dc"}
|
||||
state.dirty_vars.clear()
|
||||
assert state.dirty_vars == set()
|
||||
assert dataclasses.asdict(state.dc) == {"foo": "baz", "ls": [{"hi": "reflex"}]}
|
||||
assert dataclasses.astuple(state.dc) == ("baz", [{"hi": "reflex"}])
|
||||
# creating a new instance shouldn't mark the state dirty
|
||||
assert dataclasses.replace(state.dc, foo="quuc") == ModelDC(
|
||||
foo="quuc", ls=[{"hi": "reflex"}]
|
||||
)
|
||||
assert state.dirty_vars == set()
|
||||
|
||||
|
||||
def test_get_value():
|
||||
@ -3764,3 +3774,32 @@ async def test_upcast_event_handler_arg(handler, payload):
|
||||
state = UpcastState()
|
||||
async for update in state._process_event(handler, state, payload):
|
||||
assert update.delta == {UpcastState.get_full_name(): {"passed": True}}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_var_value(state_manager: StateManager, substate_token: str):
|
||||
"""Test that get_var_value works correctly.
|
||||
|
||||
Args:
|
||||
state_manager: The state manager to use.
|
||||
substate_token: Token for the substate used by state_manager.
|
||||
"""
|
||||
state = await state_manager.get_state(substate_token)
|
||||
|
||||
# State Var from same state
|
||||
assert await state.get_var_value(TestState.num1) == 0
|
||||
state.num1 = 42
|
||||
assert await state.get_var_value(TestState.num1) == 42
|
||||
|
||||
# State Var from another state
|
||||
child_state = await state.get_state(ChildState)
|
||||
assert await state.get_var_value(ChildState.count) == 23
|
||||
child_state.count = 66
|
||||
assert await state.get_var_value(ChildState.count) == 66
|
||||
|
||||
# LiteralVar with known value
|
||||
assert await state.get_var_value(rx.Var.create([1, 2, 3])) == [1, 2, 3]
|
||||
|
||||
# Generic Var with no state
|
||||
with pytest.raises(UnretrievableVarValueError):
|
||||
await state.get_var_value(rx.Var("undefined"))
|
||||
|
@ -42,7 +42,7 @@ class SubA_A_A_A(SubA_A_A):
|
||||
class SubA_A_A_B(SubA_A_A):
|
||||
"""SubA_A_A_B is a child of SubA_A_A."""
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def sub_a_a_a_cached(self) -> int:
|
||||
"""A cached var.
|
||||
|
||||
@ -117,7 +117,7 @@ class TreeD(Root):
|
||||
|
||||
d: int
|
||||
|
||||
@rx.var
|
||||
@rx.var(cache=False)
|
||||
def d_var(self) -> int:
|
||||
"""A computed var.
|
||||
|
||||
@ -156,7 +156,7 @@ class SubE_A_A_A_A(SubE_A_A_A):
|
||||
|
||||
sub_e_a_a_a_a: int
|
||||
|
||||
@rx.var
|
||||
@rx.var(cache=False)
|
||||
def sub_e_a_a_a_a_var(self) -> int:
|
||||
"""A computed var.
|
||||
|
||||
@ -183,7 +183,7 @@ class SubE_A_A_A_D(SubE_A_A_A):
|
||||
|
||||
sub_e_a_a_a_d: int
|
||||
|
||||
@rx.var(cache=True)
|
||||
@rx.var
|
||||
def sub_e_a_a_a_d_var(self) -> int:
|
||||
"""A computed var.
|
||||
|
||||
|
@ -1004,7 +1004,7 @@ def test_all_number_operations():
|
||||
|
||||
assert (
|
||||
str(even_more_complicated_number)
|
||||
== "!(((Math.abs(Math.floor(((Math.floor(((-((-5.4 + 1)) * 2) / 3) / 2) % 3) ** 2))) || (2 && Math.round(((Math.floor(((-((-5.4 + 1)) * 2) / 3) / 2) % 3) ** 2)))) !== 0))"
|
||||
== "!(isTrue((Math.abs(Math.floor(((Math.floor(((-((-5.4 + 1)) * 2) / 3) / 2) % 3) ** 2))) || (2 && Math.round(((Math.floor(((-((-5.4 + 1)) * 2) / 3) / 2) % 3) ** 2))))))"
|
||||
)
|
||||
|
||||
assert str(LiteralNumberVar.create(5) > False) == "(5 > 0)"
|
||||
@ -1814,10 +1814,7 @@ def cv_fget(state: BaseState) -> int:
|
||||
],
|
||||
)
|
||||
def test_computed_var_deps(deps: List[Union[str, Var]], expected: Set[str]):
|
||||
@computed_var(
|
||||
deps=deps,
|
||||
cache=True,
|
||||
)
|
||||
@computed_var(deps=deps)
|
||||
def test_var(state) -> int:
|
||||
return 1
|
||||
|
||||
@ -1835,10 +1832,7 @@ def test_computed_var_deps(deps: List[Union[str, Var]], expected: Set[str]):
|
||||
def test_invalid_computed_var_deps(deps: List):
|
||||
with pytest.raises(TypeError):
|
||||
|
||||
@computed_var(
|
||||
deps=deps,
|
||||
cache=True,
|
||||
)
|
||||
@computed_var(deps=deps)
|
||||
def test_var(state) -> int:
|
||||
return 1
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user