Merge remote-tracking branch 'upstream/main' into test-input-none
This commit is contained in:
commit
37c0cdad34
2
.github/actions/setup_build_env/action.yml
vendored
2
.github/actions/setup_build_env/action.yml
vendored
@ -6,7 +6,7 @@
|
||||
#
|
||||
# Exit conditions:
|
||||
# - Python of version `python-version` is ready to be invoked as `python`.
|
||||
# - Poetry of version `poetry-version` is ready ot be invoked as `poetry`.
|
||||
# - Poetry of version `poetry-version` is ready to be invoked as `poetry`.
|
||||
# - If `run-poetry-install` is true, deps as defined in `pyproject.toml` will have been installed into the venv at `create-venv-at-path`.
|
||||
|
||||
name: 'Setup Reflex build environment'
|
||||
|
42
.github/workflows/benchmarks.yml
vendored
42
.github/workflows/benchmarks.yml
vendored
@ -5,7 +5,7 @@ on:
|
||||
types:
|
||||
- closed
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@ -15,21 +15,21 @@ defaults:
|
||||
shell: bash
|
||||
|
||||
env:
|
||||
PYTHONIOENCODING: 'utf8'
|
||||
PYTHONIOENCODING: "utf8"
|
||||
TELEMETRY_ENABLED: false
|
||||
NODE_OPTIONS: '--max_old_space_size=8192'
|
||||
NODE_OPTIONS: "--max_old_space_size=8192"
|
||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||
|
||||
jobs:
|
||||
reflex-web:
|
||||
# if: github.event.pull_request.merged == true
|
||||
# if: github.event.pull_request.merged == true
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Show OS combos first in GUI
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.11.4']
|
||||
node-version: ['18.x']
|
||||
python-version: ["3.12.8"]
|
||||
node-version: ["18.x"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@ -81,24 +81,24 @@ jobs:
|
||||
matrix:
|
||||
# Show OS combos first in GUI
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: ['3.9.18', '3.10.13', '3.11.5', '3.12.0']
|
||||
python-version: ["3.9.21", "3.10.16", "3.11.11", "3.12.8"]
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
python-version: '3.10.13'
|
||||
python-version: "3.10.16"
|
||||
- os: windows-latest
|
||||
python-version: '3.9.18'
|
||||
python-version: "3.9.21"
|
||||
# keep only one python version for MacOS
|
||||
- os: macos-latest
|
||||
python-version: '3.9.18'
|
||||
python-version: "3.9.21"
|
||||
- os: macos-latest
|
||||
python-version: '3.10.13'
|
||||
python-version: "3.10.16"
|
||||
- os: macos-latest
|
||||
python-version: '3.12.0'
|
||||
python-version: "3.11.11"
|
||||
include:
|
||||
- os: windows-latest
|
||||
python-version: '3.10.11'
|
||||
python-version: "3.10.11"
|
||||
- os: windows-latest
|
||||
python-version: '3.9.13'
|
||||
python-version: "3.9.13"
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@ -123,7 +123,7 @@ jobs:
|
||||
--event-type "${{ github.event_name }}" --pr-id "${{ github.event.pull_request.id }}"
|
||||
|
||||
reflex-dist-size: # This job is used to calculate the size of the Reflex distribution (wheel file)
|
||||
if: github.event.pull_request.merged == true
|
||||
if: github.event.pull_request.merged == true
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
# Prioritize getting more information out of the workflow (even if something fails)
|
||||
@ -133,7 +133,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: 3.11.5
|
||||
python-version: 3.12.8
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- name: Build reflex
|
||||
@ -143,12 +143,12 @@ jobs:
|
||||
# Only run if the database creds are available in this context.
|
||||
run:
|
||||
poetry run python benchmarks/benchmark_package_size.py --os ubuntu-latest
|
||||
--python-version 3.11.5 --commit-sha "${{ github.sha }}" --pr-id "${{ github.event.pull_request.id }}"
|
||||
--python-version 3.12.8 --commit-sha "${{ github.sha }}" --pr-id "${{ github.event.pull_request.id }}"
|
||||
--branch-name "${{ github.head_ref || github.ref_name }}"
|
||||
--path ./dist
|
||||
|
||||
reflex-venv-size: # This job calculates the total size of Reflex and its dependencies
|
||||
if: github.event.pull_request.merged == true
|
||||
if: github.event.pull_request.merged == true
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
# Prioritize getting more information out of the workflow (even if something fails)
|
||||
@ -156,7 +156,7 @@ jobs:
|
||||
matrix:
|
||||
# Show OS combos first in GUI
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
python-version: ['3.11.5']
|
||||
python-version: ["3.12.8"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@ -186,6 +186,6 @@ jobs:
|
||||
run:
|
||||
poetry run python benchmarks/benchmark_package_size.py --os "${{ matrix.os }}"
|
||||
--python-version "${{ matrix.python-version }}" --commit-sha "${{ github.sha }}"
|
||||
--pr-id "${{ github.event.pull_request.id }}"
|
||||
--pr-id "${{ github.event.pull_request.id }}"
|
||||
--branch-name "${{ github.head_ref || github.ref_name }}"
|
||||
--path ./.venv
|
||||
--path ./.venv
|
||||
|
10
.github/workflows/check_generated_pyi.yml
vendored
10
.github/workflows/check_generated_pyi.yml
vendored
@ -6,16 +6,16 @@ concurrency:
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
# We don't just trigger on make_pyi.py and the components dir, because
|
||||
# there are other things that can change the generator output
|
||||
# e.g. black version, reflex.Component, reflex.Var.
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
pull_request:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
|
||||
jobs:
|
||||
check-generated-pyi-components:
|
||||
@ -25,7 +25,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: '3.11.5'
|
||||
python-version: "3.12.8"
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- run: |
|
||||
|
67
.github/workflows/check_node_latest.yml
vendored
67
.github/workflows/check_node_latest.yml
vendored
@ -1,43 +1,40 @@
|
||||
name: integration-node-latest
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
TELEMETRY_ENABLED: false
|
||||
REFLEX_USE_SYSTEM_NODE: true
|
||||
TELEMETRY_ENABLED: false
|
||||
REFLEX_USE_SYSTEM_NODE: true
|
||||
|
||||
jobs:
|
||||
check_latest_node:
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.12']
|
||||
split_index: [1, 2]
|
||||
node-version: ['node']
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- run: |
|
||||
poetry run uv pip install pyvirtualdisplay pillow pytest-split
|
||||
poetry run playwright install --with-deps
|
||||
- run: |
|
||||
poetry run pytest tests/test_node_version.py
|
||||
poetry run pytest tests/integration --splits 2 --group ${{matrix.split_index}}
|
||||
|
||||
|
||||
check_latest_node:
|
||||
runs-on: ubuntu-22.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["3.12.8"]
|
||||
split_index: [1, 2]
|
||||
node-version: ["node"]
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ matrix.node-version }}
|
||||
- run: |
|
||||
poetry run uv pip install pyvirtualdisplay pillow pytest-split
|
||||
poetry run playwright install --with-deps
|
||||
- run: |
|
||||
poetry run pytest tests/test_node_version.py
|
||||
poetry run pytest tests/integration --splits 2 --group ${{matrix.split_index}}
|
||||
|
130
.github/workflows/check_outdated_dependencies.yml
vendored
130
.github/workflows/check_outdated_dependencies.yml
vendored
@ -1,88 +1,86 @@
|
||||
name: check-outdated-dependencies
|
||||
|
||||
on:
|
||||
push: # This will trigger the action when a pull request is opened or updated.
|
||||
push: # This will trigger the action when a pull request is opened or updated.
|
||||
branches:
|
||||
- 'release/**' # This will trigger the action when any branch starting with "release/" is created.
|
||||
workflow_dispatch: # Allow manual triggering if needed.
|
||||
- "release/**" # This will trigger the action when any branch starting with "release/" is created.
|
||||
workflow_dispatch: # Allow manual triggering if needed.
|
||||
|
||||
jobs:
|
||||
backend:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: '3.9'
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: "3.9.21"
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
|
||||
- name: Check outdated backend dependencies
|
||||
run: |
|
||||
outdated=$(poetry show -oT)
|
||||
echo "Outdated:"
|
||||
echo "$outdated"
|
||||
- name: Check outdated backend dependencies
|
||||
run: |
|
||||
outdated=$(poetry show -oT)
|
||||
echo "Outdated:"
|
||||
echo "$outdated"
|
||||
|
||||
filtered_outdated=$(echo "$outdated" | grep -vE 'pyright|ruff' || true)
|
||||
|
||||
if [ ! -z "$filtered_outdated" ]; then
|
||||
echo "Outdated dependencies found:"
|
||||
echo "$filtered_outdated"
|
||||
exit 1
|
||||
else
|
||||
echo "All dependencies are up to date. (pyright and ruff are ignored)"
|
||||
fi
|
||||
filtered_outdated=$(echo "$outdated" | grep -vE 'pyright|ruff' || true)
|
||||
|
||||
if [ ! -z "$filtered_outdated" ]; then
|
||||
echo "Outdated dependencies found:"
|
||||
echo "$filtered_outdated"
|
||||
exit 1
|
||||
else
|
||||
echo "All dependencies are up to date. (pyright and ruff are ignored)"
|
||||
fi
|
||||
|
||||
frontend:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: '3.10.11'
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- name: Clone Reflex Website Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: reflex-dev/reflex-web
|
||||
ref: main
|
||||
path: reflex-web
|
||||
- name: Install Requirements for reflex-web
|
||||
working-directory: ./reflex-web
|
||||
run: poetry run uv pip install -r requirements.txt
|
||||
- name: Install additional dependencies for DB access
|
||||
run: poetry run uv pip install psycopg
|
||||
- name: Init Website for reflex-web
|
||||
working-directory: ./reflex-web
|
||||
run: poetry run reflex init
|
||||
- name: Run Website and Check for errors
|
||||
run: |
|
||||
poetry run bash scripts/integration.sh ./reflex-web dev
|
||||
- name: Check outdated frontend dependencies
|
||||
working-directory: ./reflex-web/.web
|
||||
run: |
|
||||
raw_outdated=$(/home/runner/.local/share/reflex/bun/bin/bun outdated)
|
||||
outdated=$(echo "$raw_outdated" | grep -vE '\|\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\|' || true)
|
||||
echo "Outdated:"
|
||||
echo "$outdated"
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: "3.10.16"
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- name: Clone Reflex Website Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: reflex-dev/reflex-web
|
||||
ref: main
|
||||
path: reflex-web
|
||||
- name: Install Requirements for reflex-web
|
||||
working-directory: ./reflex-web
|
||||
run: poetry run uv pip install -r requirements.txt
|
||||
- name: Install additional dependencies for DB access
|
||||
run: poetry run uv pip install psycopg
|
||||
- name: Init Website for reflex-web
|
||||
working-directory: ./reflex-web
|
||||
run: poetry run reflex init
|
||||
- name: Run Website and Check for errors
|
||||
run: |
|
||||
poetry run bash scripts/integration.sh ./reflex-web dev
|
||||
- name: Check outdated frontend dependencies
|
||||
working-directory: ./reflex-web/.web
|
||||
run: |
|
||||
raw_outdated=$(/home/runner/.local/share/reflex/bun/bin/bun outdated)
|
||||
outdated=$(echo "$raw_outdated" | grep -vE '\|\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\|' || true)
|
||||
echo "Outdated:"
|
||||
echo "$outdated"
|
||||
|
||||
# Ignore 3rd party dependencies that are not updated.
|
||||
filtered_outdated=$(echo "$outdated" | grep -vE 'Package|@chakra-ui|lucide-react|@splinetool/runtime|ag-grid-react|framer-motion|react-markdown|remark-math|remark-gfm|rehype-katex|rehype-raw|remark-unwrap-images' || true)
|
||||
no_extra=$(echo "$filtered_outdated" | grep -vE '\|\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-' || true)
|
||||
# Ignore 3rd party dependencies that are not updated.
|
||||
filtered_outdated=$(echo "$outdated" | grep -vE 'Package|@chakra-ui|lucide-react|@splinetool/runtime|ag-grid-react|framer-motion|react-markdown|remark-math|remark-gfm|rehype-katex|rehype-raw|remark-unwrap-images' || true)
|
||||
no_extra=$(echo "$filtered_outdated" | grep -vE '\|\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-' || true)
|
||||
|
||||
|
||||
if [ ! -z "$no_extra" ]; then
|
||||
echo "Outdated dependencies found:"
|
||||
echo "$filtered_outdated"
|
||||
exit 1
|
||||
else
|
||||
echo "All dependencies are up to date. (3rd party packages are ignored)"
|
||||
fi
|
||||
|
||||
if [ ! -z "$no_extra" ]; then
|
||||
echo "Outdated dependencies found:"
|
||||
echo "$filtered_outdated"
|
||||
exit 1
|
||||
else
|
||||
echo "All dependencies are up to date. (3rd party packages are ignored)"
|
||||
fi
|
||||
|
@ -22,8 +22,8 @@ jobs:
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
matrix:
|
||||
state_manager: ['redis', 'memory']
|
||||
python-version: ['3.11.5', '3.12.0', '3.13.0']
|
||||
state_manager: ["redis", "memory"]
|
||||
python-version: ["3.11.11", "3.12.8", "3.13.1"]
|
||||
split_index: [1, 2]
|
||||
fail-fast: false
|
||||
runs-on: ubuntu-22.04
|
||||
@ -53,7 +53,7 @@ jobs:
|
||||
SCREENSHOT_DIR: /tmp/screenshots/${{ matrix.state_manager }}/${{ matrix.python-version }}/${{ matrix.split_index }}
|
||||
REDIS_URL: ${{ matrix.state_manager == 'redis' && 'redis://localhost:6379' || '' }}
|
||||
run: |
|
||||
poetry run playwright install --with-deps
|
||||
poetry run playwright install chromium
|
||||
poetry run pytest tests/integration --splits 2 --group ${{matrix.split_index}}
|
||||
- uses: actions/upload-artifact@v4
|
||||
name: Upload failed test screenshots
|
||||
|
40
.github/workflows/integration_tests.yml
vendored
40
.github/workflows/integration_tests.yml
vendored
@ -2,13 +2,13 @@ name: integration-tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
pull_request:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.id }}
|
||||
@ -27,9 +27,9 @@ env:
|
||||
# TODO: can we fix windows encoding natively within reflex? Bug above can hit real users too (less common, but possible)
|
||||
# - Catch encoding errors when printing logs
|
||||
# - Best effort print lines that contain illegal chars (map to some default char, etc.)
|
||||
PYTHONIOENCODING: 'utf8'
|
||||
PYTHONIOENCODING: "utf8"
|
||||
TELEMETRY_ENABLED: false
|
||||
NODE_OPTIONS: '--max_old_space_size=8192'
|
||||
NODE_OPTIONS: "--max_old_space_size=8192"
|
||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||
|
||||
jobs:
|
||||
@ -43,17 +43,22 @@ jobs:
|
||||
matrix:
|
||||
# Show OS combos first in GUI
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
python-version: ['3.9.18', '3.10.13', '3.11.5', '3.12.0', '3.13.0']
|
||||
python-version: ["3.9.21", "3.10.16", "3.11.11", "3.12.8", "3.13.1"]
|
||||
# Windows is a bit behind on Python version availability in Github
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
python-version: '3.10.13'
|
||||
python-version: "3.11.11"
|
||||
- os: windows-latest
|
||||
python-version: '3.9.18'
|
||||
python-version: "3.10.16"
|
||||
- os: windows-latest
|
||||
python-version: "3.9.21"
|
||||
include:
|
||||
- os: windows-latest
|
||||
python-version: '3.10.11'
|
||||
python-version: "3.11.9"
|
||||
- os: windows-latest
|
||||
python-version: '3.9.13'
|
||||
python-version: "3.10.11"
|
||||
- os: windows-latest
|
||||
python-version: "3.9.13"
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@ -115,18 +120,16 @@ jobs:
|
||||
--branch-name "${{ github.head_ref || github.ref_name }}" --pr-id "${{ github.event.pull_request.id }}"
|
||||
--app-name "counter"
|
||||
|
||||
|
||||
|
||||
reflex-web:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Show OS combos first in GUI
|
||||
os: [ubuntu-latest]
|
||||
python-version: ['3.10.11', '3.11.4']
|
||||
python-version: ["3.11.11", "3.12.8"]
|
||||
|
||||
env:
|
||||
REFLEX_WEB_WINDOWS_OVERRIDE: '1'
|
||||
REFLEX_WEB_WINDOWS_OVERRIDE: "1"
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -171,7 +174,7 @@ jobs:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: ./.github/actions/setup_build_env
|
||||
with:
|
||||
python-version: '3.11.4'
|
||||
python-version: "3.11.11"
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
- name: Create app directory
|
||||
@ -190,14 +193,14 @@ jobs:
|
||||
# Check that npm is home
|
||||
npm -v
|
||||
poetry run bash scripts/integration.sh ./rx-shout-from-template prod
|
||||
|
||||
|
||||
reflex-web-macos:
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ['3.11.5', '3.12.0']
|
||||
# Note: py311 version chosen due to available arm64 darwin builds.
|
||||
python-version: ["3.11.9", "3.12.8"]
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -231,4 +234,3 @@ jobs:
|
||||
--python-version "${{ matrix.python-version }}" --commit-sha "${{ github.sha }}"
|
||||
--pr-id "${{ github.event.pull_request.id }}" --branch-name "${{ github.head_ref || github.ref_name }}"
|
||||
--app-name "reflex-web" --path ./reflex-web/.web
|
||||
|
6
.github/workflows/pre-commit.yml
vendored
6
.github/workflows/pre-commit.yml
vendored
@ -6,12 +6,12 @@ concurrency:
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
push:
|
||||
# Note even though this job is called "pre-commit" and runs "pre-commit", this job will run
|
||||
# also POST-commit on main also! In case there are mishandled merge conflicts / bad auto-resolves
|
||||
# when merging into main branch.
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
|
||||
jobs:
|
||||
pre-commit:
|
||||
@ -23,7 +23,7 @@ jobs:
|
||||
with:
|
||||
# running vs. one version of Python is OK
|
||||
# i.e. ruff, black, etc.
|
||||
python-version: 3.11.5
|
||||
python-version: 3.12.8
|
||||
run-poetry-install: true
|
||||
create-venv-at-path: .venv
|
||||
# TODO pre-commit related stuff can be cached too (not a bottleneck yet)
|
||||
|
28
.github/workflows/unit_tests.yml
vendored
28
.github/workflows/unit_tests.yml
vendored
@ -6,13 +6,13 @@ concurrency:
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
pull_request:
|
||||
branches: ['main']
|
||||
branches: ["main"]
|
||||
paths-ignore:
|
||||
- '**/*.md'
|
||||
- "**/*.md"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
@ -28,18 +28,22 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest]
|
||||
python-version: ['3.9.18', '3.10.13', '3.11.5', '3.12.0', '3.13.0']
|
||||
python-version: ["3.9.21", "3.10.16", "3.11.11", "3.12.8", "3.13.1"]
|
||||
# Windows is a bit behind on Python version availability in Github
|
||||
exclude:
|
||||
- os: windows-latest
|
||||
python-version: '3.10.13'
|
||||
python-version: "3.11.11"
|
||||
- os: windows-latest
|
||||
python-version: '3.9.18'
|
||||
python-version: "3.10.16"
|
||||
- os: windows-latest
|
||||
python-version: "3.9.21"
|
||||
include:
|
||||
- os: windows-latest
|
||||
python-version: '3.10.11'
|
||||
python-version: "3.11.9"
|
||||
- os: windows-latest
|
||||
python-version: '3.9.13'
|
||||
python-version: "3.10.11"
|
||||
- os: windows-latest
|
||||
python-version: "3.9.13"
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
# Service containers to run with `runner-job`
|
||||
@ -88,8 +92,8 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# Note: py39, py310 versions chosen due to available arm64 darwin builds.
|
||||
python-version: ['3.9.13', '3.10.11', '3.11.5', '3.12.0', '3.13.0']
|
||||
# Note: py39, py310, py311 versions chosen due to available arm64 darwin builds.
|
||||
python-version: ["3.9.13", "3.10.11", "3.11.9", "3.12.8", "3.13.1"]
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -106,4 +110,4 @@ jobs:
|
||||
run: |
|
||||
export PYTHONUNBUFFERED=1
|
||||
poetry run uv pip install "pydantic~=1.10"
|
||||
poetry run pytest tests/units --cov --no-cov-on-fail --cov-report=
|
||||
poetry run pytest tests/units --cov --no-cov-on-fail --cov-report=
|
||||
|
@ -11,6 +11,12 @@ repos:
|
||||
args: ["--fix", "--exit-non-zero-on-fix"]
|
||||
exclude: '^integration/benchmarks/'
|
||||
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.3.0
|
||||
hooks:
|
||||
- id: codespell
|
||||
args: ["reflex"]
|
||||
|
||||
# Run pyi check before pyright because pyright can fail if pyi files are wrong.
|
||||
- repo: local
|
||||
hooks:
|
||||
|
@ -5,7 +5,7 @@
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
identity and expression, level of experience, education, socioeconomic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
|
@ -249,7 +249,7 @@ We welcome contributions of any size! Below are some good ways to get started in
|
||||
- **GitHub Discussions**: A great way to talk about features you want added or things that are confusing/need clarification.
|
||||
- **GitHub Issues**: [Issues](https://github.com/reflex-dev/reflex/issues) are an excellent way to report bugs. Additionally, you can try and solve an existing issue and submit a PR.
|
||||
|
||||
We are actively looking for contributors, no matter your skill level or experience. To contribute check out [CONTIBUTING.md](https://github.com/reflex-dev/reflex/blob/main/CONTRIBUTING.md)
|
||||
We are actively looking for contributors, no matter your skill level or experience. To contribute check out [CONTRIBUTING.md](https://github.com/reflex-dev/reflex/blob/main/CONTRIBUTING.md)
|
||||
|
||||
|
||||
## All Thanks To Our Contributors:
|
||||
|
@ -21,7 +21,7 @@ def get_package_size(venv_path: Path, os_name):
|
||||
ValueError: when venv does not exist or python version is None.
|
||||
"""
|
||||
python_version = get_python_version(venv_path, os_name)
|
||||
print("Python version:", python_version)
|
||||
print("Python version:", python_version) # noqa: T201
|
||||
if python_version is None:
|
||||
raise ValueError("Error: Failed to determine Python version.")
|
||||
|
||||
|
@ -27,7 +27,7 @@ FROM python:3.13 as init
|
||||
|
||||
ARG uv=/root/.local/bin/uv
|
||||
|
||||
# Install `uv` for faster package boostrapping
|
||||
# Install `uv` for faster package bootstrapping
|
||||
ADD --chmod=755 https://astral.sh/uv/install.sh /install.sh
|
||||
RUN /install.sh && rm /install.sh
|
||||
|
||||
|
@ -6,7 +6,7 @@ FROM python:3.13 as init
|
||||
|
||||
ARG uv=/root/.local/bin/uv
|
||||
|
||||
# Install `uv` for faster package boostrapping
|
||||
# Install `uv` for faster package bootstrapping
|
||||
ADD --chmod=755 https://astral.sh/uv/install.sh /install.sh
|
||||
RUN /install.sh && rm /install.sh
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "reflex"
|
||||
version = "0.6.7dev1"
|
||||
version = "0.7.0dev1"
|
||||
description = "Web apps in pure Python."
|
||||
license = "Apache-2.0"
|
||||
authors = [
|
||||
@ -16,7 +16,6 @@ repository = "https://github.com/reflex-dev/reflex"
|
||||
documentation = "https://reflex.dev/docs/getting-started/introduction"
|
||||
keywords = ["web", "framework"]
|
||||
classifiers = ["Development Status :: 4 - Beta"]
|
||||
packages = [{ include = "reflex" }]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.9"
|
||||
@ -87,13 +86,13 @@ build-backend = "poetry.core.masonry.api"
|
||||
target-version = "py39"
|
||||
output-format = "concise"
|
||||
lint.isort.split-on-trailing-comma = false
|
||||
lint.select = ["B", "C4", "D", "E", "ERA", "F", "FURB", "I", "PERF", "PTH", "RUF", "SIM", "W"]
|
||||
lint.select = ["B", "C4", "D", "E", "ERA", "F", "FURB", "I", "PERF", "PTH", "RUF", "SIM", "T", "W"]
|
||||
lint.ignore = ["B008", "D205", "E501", "F403", "SIM115", "RUF006", "RUF012"]
|
||||
lint.pydocstyle.convention = "google"
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"__init__.py" = ["F401"]
|
||||
"tests/*.py" = ["D100", "D103", "D104", "B018", "PERF"]
|
||||
"tests/*.py" = ["D100", "D103", "D104", "B018", "PERF", "T"]
|
||||
"reflex/.templates/*.py" = ["D100", "D103", "D104"]
|
||||
"*.pyi" = ["D301", "D415", "D417", "D418", "E742"]
|
||||
"*/blank.py" = ["I001"]
|
||||
@ -101,3 +100,7 @@ lint.pydocstyle.convention = "google"
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_default_fixture_loop_scope = "function"
|
||||
asyncio_mode = "auto"
|
||||
|
||||
[tool.codespell]
|
||||
skip = "docs/*,*.html,examples/*, *.pyi"
|
||||
ignore-words-list = "te, TreeE"
|
||||
|
@ -1,4 +1,5 @@
|
||||
{% extends "web/pages/base_page.js.jinja2" %}
|
||||
{% from "web/pages/macros.js.jinja2" import renderHooks %}
|
||||
|
||||
{% block early_imports %}
|
||||
import '$/styles/styles.css'
|
||||
@ -18,10 +19,7 @@ import * as {{library_alias}} from "{{library_path}}";
|
||||
|
||||
{% block export %}
|
||||
function AppWrap({children}) {
|
||||
|
||||
{% for hook in hooks %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
{{ renderHooks(hooks) }}
|
||||
|
||||
return (
|
||||
{{utils.render(render, indent_width=0)}}
|
||||
|
@ -1,5 +1,5 @@
|
||||
{% extends "web/pages/base_page.js.jinja2" %}
|
||||
|
||||
{% from "web/pages/macros.js.jinja2" import renderHooks %}
|
||||
{% block export %}
|
||||
{% for component in components %}
|
||||
|
||||
@ -8,9 +8,8 @@
|
||||
{% endfor %}
|
||||
|
||||
export const {{component.name}} = memo(({ {{-component.props|join(", ")-}} }) => {
|
||||
{% for hook in component.hooks %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
{{ renderHooks(component.hooks) }}
|
||||
|
||||
return(
|
||||
{{utils.render(component.render)}}
|
||||
)
|
||||
|
@ -1,4 +1,5 @@
|
||||
{% extends "web/pages/base_page.js.jinja2" %}
|
||||
{% from "web/pages/macros.js.jinja2" import renderHooks %}
|
||||
|
||||
{% block declaration %}
|
||||
{% for custom_code in custom_codes %}
|
||||
@ -8,9 +9,7 @@
|
||||
|
||||
{% block export %}
|
||||
export default function Component() {
|
||||
{% for hook in hooks %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
{{ renderHooks(hooks)}}
|
||||
|
||||
return (
|
||||
{{utils.render(render, indent_width=0)}}
|
||||
|
38
reflex/.templates/jinja/web/pages/macros.js.jinja2
Normal file
38
reflex/.templates/jinja/web/pages/macros.js.jinja2
Normal file
@ -0,0 +1,38 @@
|
||||
{% macro renderHooks(hooks) %}
|
||||
{% set sorted_hooks = sort_hooks(hooks) %}
|
||||
|
||||
{# Render the grouped hooks #}
|
||||
{% for hook, _ in sorted_hooks[const.hook_position.INTERNAL] %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook, _ in sorted_hooks[const.hook_position.PRE_TRIGGER] %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook, _ in sorted_hooks[const.hook_position.POST_TRIGGER] %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
{% endmacro %}
|
||||
|
||||
{% macro renderHooksWithMemo(hooks, memo)%}
|
||||
{% set sorted_hooks = sort_hooks(hooks) %}
|
||||
|
||||
{# Render the grouped hooks #}
|
||||
{% for hook, _ in sorted_hooks[const.hook_position.INTERNAL] %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook, _ in sorted_hooks[const.hook_position.PRE_TRIGGER] %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook in memo %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook, _ in sorted_hooks[const.hook_position.POST_TRIGGER] %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% endmacro %}
|
@ -1,22 +1,10 @@
|
||||
{% import 'web/pages/utils.js.jinja2' as utils %}
|
||||
{% from 'web/pages/macros.js.jinja2' import renderHooksWithMemo %}
|
||||
{% set all_hooks = component._get_all_hooks() %}
|
||||
|
||||
export function {{tag_name}} () {
|
||||
{% for hook in component._get_all_hooks_internal() %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook, data in component._get_all_hooks().items() if not data.position or data.position == const.hook_position.PRE_TRIGGER %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook in memo_trigger_hooks %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{% for hook, data in component._get_all_hooks().items() if data.position and data.position == const.hook_position.POST_TRIGGER %}
|
||||
{{ hook }}
|
||||
{% endfor %}
|
||||
|
||||
{{ renderHooksWithMemo(all_hooks, memo_trigger_hooks) }}
|
||||
|
||||
return (
|
||||
{{utils.render(component.render(), indent_width=0)}}
|
||||
)
|
||||
|
@ -28,7 +28,7 @@ export const state_name = "{{state_name}}"
|
||||
|
||||
export const exception_state_name = "{{const.frontend_exception_state}}"
|
||||
|
||||
// Theses events are triggered on initial load and each page navigation.
|
||||
// These events are triggered on initial load and each page navigation.
|
||||
export const onLoadInternalEvent = () => {
|
||||
const internal_events = [];
|
||||
|
||||
|
@ -208,11 +208,16 @@ export const applyEvent = async (event, socket) => {
|
||||
if (event.name == "_download") {
|
||||
const a = document.createElement("a");
|
||||
a.hidden = true;
|
||||
a.href = event.payload.url;
|
||||
// Special case when linking to uploaded files
|
||||
a.href = event.payload.url.replace(
|
||||
"${getBackendURL(env.UPLOAD)}",
|
||||
getBackendURL(env.UPLOAD)
|
||||
);
|
||||
if (a.href.includes("getBackendURL(env.UPLOAD)")) {
|
||||
a.href = eval?.(
|
||||
event.payload.url.replace(
|
||||
"getBackendURL(env.UPLOAD)",
|
||||
`"${getBackendURL(env.UPLOAD)}"`
|
||||
)
|
||||
);
|
||||
}
|
||||
a.download = event.payload.filename;
|
||||
a.click();
|
||||
a.remove();
|
||||
|
@ -68,6 +68,7 @@ from reflex.components.core.upload import Upload, get_upload_dir
|
||||
from reflex.components.radix import themes
|
||||
from reflex.config import environment, get_config
|
||||
from reflex.event import (
|
||||
_EVENT_FIELDS,
|
||||
BASE_STATE,
|
||||
Event,
|
||||
EventHandler,
|
||||
@ -1356,20 +1357,22 @@ async def health() -> JSONResponse:
|
||||
health_status = {"status": True}
|
||||
status_code = 200
|
||||
|
||||
db_status, redis_status = await asyncio.gather(
|
||||
get_db_status(), prerequisites.get_redis_status()
|
||||
)
|
||||
tasks = []
|
||||
|
||||
health_status["db"] = db_status
|
||||
if prerequisites.check_db_used():
|
||||
tasks.append(get_db_status())
|
||||
if prerequisites.check_redis_used():
|
||||
tasks.append(prerequisites.get_redis_status())
|
||||
|
||||
if redis_status is None:
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
for result in results:
|
||||
health_status |= result
|
||||
|
||||
if "redis" in health_status and health_status["redis"] is None:
|
||||
health_status["redis"] = False
|
||||
else:
|
||||
health_status["redis"] = redis_status
|
||||
|
||||
if not health_status["db"] or (
|
||||
not health_status["redis"] and redis_status is not None
|
||||
):
|
||||
if not all(health_status.values()):
|
||||
health_status["status"] = False
|
||||
status_code = 503
|
||||
|
||||
@ -1563,9 +1566,7 @@ class EventNamespace(AsyncNamespace):
|
||||
"""
|
||||
fields = data
|
||||
# Get the event.
|
||||
event = Event(
|
||||
**{k: v for k, v in fields.items() if k not in ("handler", "event_actions")}
|
||||
)
|
||||
event = Event(**{k: v for k, v in fields.items() if k in _EVENT_FIELDS})
|
||||
|
||||
self.token_to_sid[event.token] = sid
|
||||
self.sid_to_token[sid] = event.token
|
||||
|
@ -75,7 +75,7 @@ def _compile_app(app_root: Component) -> str:
|
||||
return templates.APP_ROOT.render(
|
||||
imports=utils.compile_imports(app_root._get_all_imports()),
|
||||
custom_codes=app_root._get_all_custom_code(),
|
||||
hooks={**app_root._get_all_hooks_internal(), **app_root._get_all_hooks()},
|
||||
hooks=app_root._get_all_hooks(),
|
||||
window_libraries=window_libraries,
|
||||
render=app_root.render(),
|
||||
)
|
||||
@ -149,7 +149,7 @@ def _compile_page(
|
||||
imports=imports,
|
||||
dynamic_imports=component._get_all_dynamic_imports(),
|
||||
custom_codes=component._get_all_custom_code(),
|
||||
hooks={**component._get_all_hooks_internal(), **component._get_all_hooks()},
|
||||
hooks=component._get_all_hooks(),
|
||||
render=component.render(),
|
||||
**kwargs,
|
||||
)
|
||||
|
@ -1,9 +1,46 @@
|
||||
"""Templates to use in the reflex compiler."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader, Template
|
||||
|
||||
from reflex import constants
|
||||
from reflex.constants import Hooks
|
||||
from reflex.utils.format import format_state_name, json_dumps
|
||||
from reflex.vars.base import VarData
|
||||
|
||||
|
||||
def _sort_hooks(hooks: dict[str, VarData | None]):
|
||||
"""Sort the hooks by their position.
|
||||
|
||||
Args:
|
||||
hooks: The hooks to sort.
|
||||
|
||||
Returns:
|
||||
The sorted hooks.
|
||||
"""
|
||||
sorted_hooks = {
|
||||
Hooks.HookPosition.INTERNAL: [],
|
||||
Hooks.HookPosition.PRE_TRIGGER: [],
|
||||
Hooks.HookPosition.POST_TRIGGER: [],
|
||||
}
|
||||
|
||||
for hook, data in hooks.items():
|
||||
if data and data.position and data.position == Hooks.HookPosition.INTERNAL:
|
||||
sorted_hooks[Hooks.HookPosition.INTERNAL].append((hook, data))
|
||||
elif not data or (
|
||||
not data.position
|
||||
or data.position == constants.Hooks.HookPosition.PRE_TRIGGER
|
||||
):
|
||||
sorted_hooks[Hooks.HookPosition.PRE_TRIGGER].append((hook, data))
|
||||
elif (
|
||||
data
|
||||
and data.position
|
||||
and data.position == constants.Hooks.HookPosition.POST_TRIGGER
|
||||
):
|
||||
sorted_hooks[Hooks.HookPosition.POST_TRIGGER].append((hook, data))
|
||||
|
||||
return sorted_hooks
|
||||
|
||||
|
||||
class ReflexJinjaEnvironment(Environment):
|
||||
@ -47,6 +84,7 @@ class ReflexJinjaEnvironment(Environment):
|
||||
"frontend_exception_state": constants.CompileVars.FRONTEND_EXCEPTION_STATE_FULL,
|
||||
"hook_position": constants.Hooks.HookPosition,
|
||||
}
|
||||
self.globals["sort_hooks"] = _sort_hooks
|
||||
|
||||
|
||||
def get_template(name: str) -> Template:
|
||||
@ -103,6 +141,9 @@ STYLE = get_template("web/styles/styles.css.jinja2")
|
||||
# Code that generate the package json file
|
||||
PACKAGE_JSON = get_template("web/package.json.jinja2")
|
||||
|
||||
# Template containing some macros used in the web pages.
|
||||
MACROS = get_template("web/pages/macros.js.jinja2")
|
||||
|
||||
# Code that generate the pyproject.toml file for custom components.
|
||||
CUSTOM_COMPONENTS_PYPROJECT_TOML = get_template(
|
||||
"custom_components/pyproject.toml.jinja2"
|
||||
|
@ -290,7 +290,7 @@ def compile_custom_component(
|
||||
"name": component.tag,
|
||||
"props": props,
|
||||
"render": render.render(),
|
||||
"hooks": {**render._get_all_hooks_internal(), **render._get_all_hooks()},
|
||||
"hooks": render._get_all_hooks(),
|
||||
"custom_code": render._get_all_custom_code(),
|
||||
},
|
||||
imports,
|
||||
|
@ -9,6 +9,7 @@ from reflex.components.tags import Tag
|
||||
from reflex.components.tags.tagless import Tagless
|
||||
from reflex.utils.imports import ParsedImportDict
|
||||
from reflex.vars import BooleanVar, ObjectVar, Var
|
||||
from reflex.vars.base import VarData
|
||||
|
||||
|
||||
class Bare(Component):
|
||||
@ -32,7 +33,7 @@ class Bare(Component):
|
||||
contents = str(contents) if contents is not None else ""
|
||||
return cls(contents=contents) # type: ignore
|
||||
|
||||
def _get_all_hooks_internal(self) -> dict[str, None]:
|
||||
def _get_all_hooks_internal(self) -> dict[str, VarData | None]:
|
||||
"""Include the hooks for the component.
|
||||
|
||||
Returns:
|
||||
@ -43,7 +44,7 @@ class Bare(Component):
|
||||
hooks |= self.contents._var_value._get_all_hooks_internal()
|
||||
return hooks
|
||||
|
||||
def _get_all_hooks(self) -> dict[str, None]:
|
||||
def _get_all_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Include the hooks for the component.
|
||||
|
||||
Returns:
|
||||
@ -107,11 +108,14 @@ class Bare(Component):
|
||||
return Tagless(contents=f"{{{self.contents!s}}}")
|
||||
return Tagless(contents=str(self.contents))
|
||||
|
||||
def _get_vars(self, include_children: bool = False) -> Iterator[Var]:
|
||||
def _get_vars(
|
||||
self, include_children: bool = False, ignore_ids: set[int] | None = None
|
||||
) -> Iterator[Var]:
|
||||
"""Walk all Vars used in this component.
|
||||
|
||||
Args:
|
||||
include_children: Whether to include Vars from children.
|
||||
ignore_ids: The ids to ignore.
|
||||
|
||||
Yields:
|
||||
The contents if it is a Var, otherwise nothing.
|
||||
|
@ -23,6 +23,8 @@ from typing import (
|
||||
Union,
|
||||
)
|
||||
|
||||
from typing_extensions import deprecated
|
||||
|
||||
import reflex.state
|
||||
from reflex.base import Base
|
||||
from reflex.compiler.templates import STATEFUL_COMPONENT
|
||||
@ -43,17 +45,13 @@ from reflex.constants.state import FRONTEND_EVENT_STATE
|
||||
from reflex.event import (
|
||||
EventCallback,
|
||||
EventChain,
|
||||
EventChainVar,
|
||||
EventHandler,
|
||||
EventSpec,
|
||||
EventVar,
|
||||
call_event_fn,
|
||||
call_event_handler,
|
||||
get_handler_args,
|
||||
no_args_event_spec,
|
||||
)
|
||||
from reflex.style import Style, format_as_emotion
|
||||
from reflex.utils import format, imports, types
|
||||
from reflex.utils import console, format, imports, types
|
||||
from reflex.utils.imports import (
|
||||
ImmutableParsedImportDict,
|
||||
ImportDict,
|
||||
@ -104,7 +102,7 @@ class BaseComponent(Base, ABC):
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def _get_all_hooks_internal(self) -> dict[str, None]:
|
||||
def _get_all_hooks_internal(self) -> dict[str, VarData | None]:
|
||||
"""Get the reflex internal hooks for the component and its children.
|
||||
|
||||
Returns:
|
||||
@ -112,7 +110,7 @@ class BaseComponent(Base, ABC):
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def _get_all_hooks(self) -> dict[str, None]:
|
||||
def _get_all_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Get the React hooks for this component.
|
||||
|
||||
Returns:
|
||||
@ -493,8 +491,7 @@ class Component(BaseComponent, ABC):
|
||||
)
|
||||
# Check if the key is an event trigger.
|
||||
if key in component_specific_triggers:
|
||||
# Temporarily disable full control for event triggers.
|
||||
kwargs["event_triggers"][key] = self._create_event_chain(
|
||||
kwargs["event_triggers"][key] = EventChain.create(
|
||||
value=value, # type: ignore
|
||||
args_spec=component_specific_triggers[key],
|
||||
key=key,
|
||||
@ -548,6 +545,7 @@ class Component(BaseComponent, ABC):
|
||||
# Construct the component.
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@deprecated("Use rx.EventChain.create instead.")
|
||||
def _create_event_chain(
|
||||
self,
|
||||
args_spec: types.ArgsSpec | Sequence[types.ArgsSpec],
|
||||
@ -569,82 +567,18 @@ class Component(BaseComponent, ABC):
|
||||
|
||||
Returns:
|
||||
The event chain.
|
||||
|
||||
Raises:
|
||||
ValueError: If the value is not a valid event chain.
|
||||
"""
|
||||
# If it's an event chain var, return it.
|
||||
if isinstance(value, Var):
|
||||
if isinstance(value, EventChainVar):
|
||||
return value
|
||||
elif isinstance(value, EventVar):
|
||||
value = [value]
|
||||
elif issubclass(value._var_type, (EventChain, EventSpec)):
|
||||
return self._create_event_chain(args_spec, value.guess_type(), key=key)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Invalid event chain: {value!s} of type {value._var_type}"
|
||||
)
|
||||
elif isinstance(value, EventChain):
|
||||
# Trust that the caller knows what they're doing passing an EventChain directly
|
||||
return value
|
||||
|
||||
# If the input is a single event handler, wrap it in a list.
|
||||
if isinstance(value, (EventHandler, EventSpec)):
|
||||
value = [value]
|
||||
|
||||
# If the input is a list of event handlers, create an event chain.
|
||||
if isinstance(value, List):
|
||||
events: List[Union[EventSpec, EventVar]] = []
|
||||
for v in value:
|
||||
if isinstance(v, (EventHandler, EventSpec)):
|
||||
# Call the event handler to get the event.
|
||||
events.append(call_event_handler(v, args_spec, key=key))
|
||||
elif isinstance(v, Callable):
|
||||
# Call the lambda to get the event chain.
|
||||
result = call_event_fn(v, args_spec, key=key)
|
||||
if isinstance(result, Var):
|
||||
raise ValueError(
|
||||
f"Invalid event chain: {v}. Cannot use a Var-returning "
|
||||
"lambda inside an EventChain list."
|
||||
)
|
||||
events.extend(result)
|
||||
elif isinstance(v, EventVar):
|
||||
events.append(v)
|
||||
else:
|
||||
raise ValueError(f"Invalid event: {v}")
|
||||
|
||||
# If the input is a callable, create an event chain.
|
||||
elif isinstance(value, Callable):
|
||||
result = call_event_fn(value, args_spec, key=key)
|
||||
if isinstance(result, Var):
|
||||
# Recursively call this function if the lambda returned an EventChain Var.
|
||||
return self._create_event_chain(args_spec, result, key=key)
|
||||
events = [*result]
|
||||
|
||||
# Otherwise, raise an error.
|
||||
else:
|
||||
raise ValueError(f"Invalid event chain: {value}")
|
||||
|
||||
# Add args to the event specs if necessary.
|
||||
events = [
|
||||
(e.with_args(get_handler_args(e)) if isinstance(e, EventSpec) else e)
|
||||
for e in events
|
||||
]
|
||||
|
||||
# Return the event chain.
|
||||
if isinstance(args_spec, Var):
|
||||
return EventChain(
|
||||
events=events,
|
||||
args_spec=None,
|
||||
event_actions={},
|
||||
)
|
||||
else:
|
||||
return EventChain(
|
||||
events=events,
|
||||
args_spec=args_spec,
|
||||
event_actions={},
|
||||
)
|
||||
console.deprecate(
|
||||
"Component._create_event_chain",
|
||||
"Use rx.EventChain.create instead.",
|
||||
deprecation_version="0.6.8",
|
||||
removal_version="0.7.0",
|
||||
)
|
||||
return EventChain.create(
|
||||
value=value, # type: ignore
|
||||
args_spec=args_spec,
|
||||
key=key,
|
||||
)
|
||||
|
||||
def get_event_triggers(
|
||||
self,
|
||||
@ -1086,18 +1020,22 @@ class Component(BaseComponent, ABC):
|
||||
event_args.append(spec)
|
||||
yield event_trigger, event_args
|
||||
|
||||
def _get_vars(self, include_children: bool = False) -> list[Var]:
|
||||
def _get_vars(
|
||||
self, include_children: bool = False, ignore_ids: set[int] | None = None
|
||||
) -> Iterator[Var]:
|
||||
"""Walk all Vars used in this component.
|
||||
|
||||
Args:
|
||||
include_children: Whether to include Vars from children.
|
||||
ignore_ids: The ids to ignore.
|
||||
|
||||
Returns:
|
||||
Yields:
|
||||
Each var referenced by the component (props, styles, event handlers).
|
||||
"""
|
||||
vars = getattr(self, "__vars", None)
|
||||
ignore_ids = ignore_ids or set()
|
||||
vars: List[Var] | None = getattr(self, "__vars", None)
|
||||
if vars is not None:
|
||||
return vars
|
||||
yield from vars
|
||||
vars = self.__vars = []
|
||||
# Get Vars associated with event trigger arguments.
|
||||
for _, event_vars in self._get_vars_from_event_triggers(self.event_triggers):
|
||||
@ -1141,12 +1079,15 @@ class Component(BaseComponent, ABC):
|
||||
# Get Vars associated with children.
|
||||
if include_children:
|
||||
for child in self.children:
|
||||
if not isinstance(child, Component):
|
||||
if not isinstance(child, Component) or id(child) in ignore_ids:
|
||||
continue
|
||||
child_vars = child._get_vars(include_children=include_children)
|
||||
ignore_ids.add(id(child))
|
||||
child_vars = child._get_vars(
|
||||
include_children=include_children, ignore_ids=ignore_ids
|
||||
)
|
||||
vars.extend(child_vars)
|
||||
|
||||
return vars
|
||||
yield from vars
|
||||
|
||||
def _event_trigger_values_use_state(self) -> bool:
|
||||
"""Check if the values of a component's event trigger use state.
|
||||
@ -1338,7 +1279,7 @@ class Component(BaseComponent, ABC):
|
||||
"""
|
||||
_imports = {}
|
||||
|
||||
if self._get_ref_hook():
|
||||
if self._get_ref_hook() is not None:
|
||||
# Handle hooks needed for attaching react refs to DOM nodes.
|
||||
_imports.setdefault("react", set()).add(ImportVar(tag="useRef"))
|
||||
_imports.setdefault(f"$/{Dirs.STATE_PATH}", set()).add(
|
||||
@ -1454,7 +1395,7 @@ class Component(BaseComponent, ABC):
|
||||
}}
|
||||
}}, []);"""
|
||||
|
||||
def _get_ref_hook(self) -> str | None:
|
||||
def _get_ref_hook(self) -> Var | None:
|
||||
"""Generate the ref hook for the component.
|
||||
|
||||
Returns:
|
||||
@ -1462,11 +1403,12 @@ class Component(BaseComponent, ABC):
|
||||
"""
|
||||
ref = self.get_ref()
|
||||
if ref is not None:
|
||||
return (
|
||||
f"const {ref} = useRef(null); {Var(_js_expr=ref)._as_ref()!s} = {ref};"
|
||||
return Var(
|
||||
f"const {ref} = useRef(null); {Var(_js_expr=ref)._as_ref()!s} = {ref};",
|
||||
_var_data=VarData(position=Hooks.HookPosition.INTERNAL),
|
||||
)
|
||||
|
||||
def _get_vars_hooks(self) -> dict[str, None]:
|
||||
def _get_vars_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Get the hooks required by vars referenced in this component.
|
||||
|
||||
Returns:
|
||||
@ -1479,27 +1421,38 @@ class Component(BaseComponent, ABC):
|
||||
vars_hooks.update(
|
||||
var_data.hooks
|
||||
if isinstance(var_data.hooks, dict)
|
||||
else {k: None for k in var_data.hooks}
|
||||
else {
|
||||
k: VarData(position=Hooks.HookPosition.INTERNAL)
|
||||
for k in var_data.hooks
|
||||
}
|
||||
)
|
||||
return vars_hooks
|
||||
|
||||
def _get_events_hooks(self) -> dict[str, None]:
|
||||
def _get_events_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Get the hooks required by events referenced in this component.
|
||||
|
||||
Returns:
|
||||
The hooks for the events.
|
||||
"""
|
||||
return {Hooks.EVENTS: None} if self.event_triggers else {}
|
||||
return (
|
||||
{Hooks.EVENTS: VarData(position=Hooks.HookPosition.INTERNAL)}
|
||||
if self.event_triggers
|
||||
else {}
|
||||
)
|
||||
|
||||
def _get_special_hooks(self) -> dict[str, None]:
|
||||
def _get_special_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Get the hooks required by special actions referenced in this component.
|
||||
|
||||
Returns:
|
||||
The hooks for special actions.
|
||||
"""
|
||||
return {Hooks.AUTOFOCUS: None} if self.autofocus else {}
|
||||
return (
|
||||
{Hooks.AUTOFOCUS: VarData(position=Hooks.HookPosition.INTERNAL)}
|
||||
if self.autofocus
|
||||
else {}
|
||||
)
|
||||
|
||||
def _get_hooks_internal(self) -> dict[str, None]:
|
||||
def _get_hooks_internal(self) -> dict[str, VarData | None]:
|
||||
"""Get the React hooks for this component managed by the framework.
|
||||
|
||||
Downstream components should NOT override this method to avoid breaking
|
||||
@ -1510,7 +1463,7 @@ class Component(BaseComponent, ABC):
|
||||
"""
|
||||
return {
|
||||
**{
|
||||
hook: None
|
||||
str(hook): VarData(position=Hooks.HookPosition.INTERNAL)
|
||||
for hook in [self._get_ref_hook(), self._get_mount_lifecycle_hook()]
|
||||
if hook is not None
|
||||
},
|
||||
@ -1559,7 +1512,7 @@ class Component(BaseComponent, ABC):
|
||||
"""
|
||||
return
|
||||
|
||||
def _get_all_hooks_internal(self) -> dict[str, None]:
|
||||
def _get_all_hooks_internal(self) -> dict[str, VarData | None]:
|
||||
"""Get the reflex internal hooks for the component and its children.
|
||||
|
||||
Returns:
|
||||
@ -1574,7 +1527,7 @@ class Component(BaseComponent, ABC):
|
||||
|
||||
return code
|
||||
|
||||
def _get_all_hooks(self) -> dict[str, None]:
|
||||
def _get_all_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Get the React hooks for this component and its children.
|
||||
|
||||
Returns:
|
||||
@ -1582,6 +1535,9 @@ class Component(BaseComponent, ABC):
|
||||
"""
|
||||
code = {}
|
||||
|
||||
# Add the internal hooks for this component.
|
||||
code.update(self._get_hooks_internal())
|
||||
|
||||
# Add the hook code for this component.
|
||||
hooks = self._get_hooks()
|
||||
if hooks is not None:
|
||||
@ -1737,7 +1693,7 @@ class CustomComponent(Component):
|
||||
|
||||
# Handle event chains.
|
||||
if types._issubclass(type_, EventChain):
|
||||
value = self._create_event_chain(
|
||||
value = EventChain.create(
|
||||
value=value,
|
||||
args_spec=event_triggers_in_component_declaration.get(
|
||||
key, no_args_event_spec
|
||||
@ -1862,19 +1818,25 @@ class CustomComponent(Component):
|
||||
for name, prop in self.props.items()
|
||||
]
|
||||
|
||||
def _get_vars(self, include_children: bool = False) -> list[Var]:
|
||||
def _get_vars(
|
||||
self, include_children: bool = False, ignore_ids: set[int] | None = None
|
||||
) -> Iterator[Var]:
|
||||
"""Walk all Vars used in this component.
|
||||
|
||||
Args:
|
||||
include_children: Whether to include Vars from children.
|
||||
ignore_ids: The ids to ignore.
|
||||
|
||||
Returns:
|
||||
Yields:
|
||||
Each var referenced by the component (props, styles, event handlers).
|
||||
"""
|
||||
return (
|
||||
super()._get_vars(include_children=include_children)
|
||||
+ [prop for prop in self.props.values() if isinstance(prop, Var)]
|
||||
+ self.get_component(self)._get_vars(include_children=include_children)
|
||||
ignore_ids = ignore_ids or set()
|
||||
yield from super()._get_vars(
|
||||
include_children=include_children, ignore_ids=ignore_ids
|
||||
)
|
||||
yield from filter(lambda prop: isinstance(prop, Var), self.props.values())
|
||||
yield from self.get_component(self)._get_vars(
|
||||
include_children=include_children, ignore_ids=ignore_ids
|
||||
)
|
||||
|
||||
@lru_cache(maxsize=None) # noqa
|
||||
@ -2277,7 +2239,7 @@ class StatefulComponent(BaseComponent):
|
||||
)
|
||||
return trigger_memo
|
||||
|
||||
def _get_all_hooks_internal(self) -> dict[str, None]:
|
||||
def _get_all_hooks_internal(self) -> dict[str, VarData | None]:
|
||||
"""Get the reflex internal hooks for the component and its children.
|
||||
|
||||
Returns:
|
||||
@ -2285,7 +2247,7 @@ class StatefulComponent(BaseComponent):
|
||||
"""
|
||||
return {}
|
||||
|
||||
def _get_all_hooks(self) -> dict[str, None]:
|
||||
def _get_all_hooks(self) -> dict[str, VarData | None]:
|
||||
"""Get the React hooks for this component.
|
||||
|
||||
Returns:
|
||||
@ -2403,7 +2365,7 @@ class MemoizationLeaf(Component):
|
||||
The memoization leaf
|
||||
"""
|
||||
comp = super().create(*children, **props)
|
||||
if comp._get_all_hooks() or comp._get_all_hooks_internal():
|
||||
if comp._get_all_hooks():
|
||||
comp._memoization_mode = cls._memoization_mode.copy(
|
||||
update={"disposition": MemoizationDisposition.ALWAYS}
|
||||
)
|
||||
|
@ -241,7 +241,7 @@ class WifiOffPulse(Icon):
|
||||
size=props.pop("size", 32),
|
||||
z_index=props.pop("z_index", 9999),
|
||||
position=props.pop("position", "fixed"),
|
||||
bottom=props.pop("botton", "33px"),
|
||||
bottom=props.pop("bottom", "33px"),
|
||||
right=props.pop("right", "33px"),
|
||||
animation=LiteralVar.create(f"{pulse_var} 1s infinite"),
|
||||
**props,
|
||||
|
@ -58,7 +58,7 @@ class Breakpoints(Dict[K, V]):
|
||||
|
||||
Args:
|
||||
custom: Custom mapping using CSS values or variables.
|
||||
initial: Styling when in the inital width
|
||||
initial: Styling when in the initial width
|
||||
xs: Styling when in the extra-small width
|
||||
sm: Styling when in the small width
|
||||
md: Styling when in the medium width
|
||||
|
@ -445,7 +445,7 @@ class CodeBlock(Component, MarkdownComponentMap):
|
||||
dark=Theme.one_dark,
|
||||
)
|
||||
|
||||
# react-syntax-highlighter doesnt have an explicit "light" or "dark" theme so we use one-light and one-dark
|
||||
# react-syntax-highlighter doesn't have an explicit "light" or "dark" theme so we use one-light and one-dark
|
||||
# themes respectively to ensure code compatibility.
|
||||
if "theme" in props and not isinstance(props["theme"], Var):
|
||||
props["theme"] = getattr(Theme, format.to_snake_case(props["theme"])) # type: ignore
|
||||
@ -502,8 +502,8 @@ class CodeBlock(Component, MarkdownComponentMap):
|
||||
|
||||
theme = self.theme
|
||||
|
||||
out.add_props(style=theme).remove_props("theme", "code", "language").add_props(
|
||||
children=self.code, language=_LANGUAGE
|
||||
out.add_props(style=theme).remove_props("theme", "code").add_props(
|
||||
children=self.code,
|
||||
)
|
||||
|
||||
return out
|
||||
@ -512,20 +512,25 @@ class CodeBlock(Component, MarkdownComponentMap):
|
||||
return ["can_copy", "copy_button"]
|
||||
|
||||
@classmethod
|
||||
def _get_language_registration_hook(cls) -> str:
|
||||
def _get_language_registration_hook(cls, language_var: Var = _LANGUAGE) -> str:
|
||||
"""Get the hook to register the language.
|
||||
|
||||
Args:
|
||||
language_var: The const/literal Var of the language module to import.
|
||||
For markdown, uses the default placeholder _LANGUAGE. For direct use,
|
||||
a LiteralStringVar should be passed via the language prop.
|
||||
|
||||
Returns:
|
||||
The hook to register the language.
|
||||
"""
|
||||
return f"""
|
||||
if ({_LANGUAGE!s}) {{
|
||||
if ({language_var!s}) {{
|
||||
(async () => {{
|
||||
try {{
|
||||
const module = await import(`react-syntax-highlighter/dist/cjs/languages/prism/${{{_LANGUAGE!s}}}`);
|
||||
SyntaxHighlighter.registerLanguage({_LANGUAGE!s}, module.default);
|
||||
const module = await import(`react-syntax-highlighter/dist/cjs/languages/prism/${{{language_var!s}}}`);
|
||||
SyntaxHighlighter.registerLanguage({language_var!s}, module.default);
|
||||
}} catch (error) {{
|
||||
console.error(`Error importing language module for ${{{_LANGUAGE!s}}}:`, error);
|
||||
console.error(`Error importing language module for ${{{language_var!s}}}:`, error);
|
||||
}}
|
||||
}})();
|
||||
}}
|
||||
@ -547,8 +552,7 @@ class CodeBlock(Component, MarkdownComponentMap):
|
||||
The hooks for the component.
|
||||
"""
|
||||
return [
|
||||
f"const {_LANGUAGE!s} = {self.language!s}",
|
||||
self._get_language_registration_hook(),
|
||||
self._get_language_registration_hook(language_var=self.language),
|
||||
]
|
||||
|
||||
|
||||
|
@ -219,7 +219,7 @@ class DataEditor(NoSSRComponent):
|
||||
# The minimum width a column can be resized to.
|
||||
min_column_width: Var[int]
|
||||
|
||||
# Determins the height of each row.
|
||||
# Determines the height of each row.
|
||||
row_height: Var[int]
|
||||
|
||||
# Kind of row markers.
|
||||
|
@ -291,7 +291,7 @@ class DataEditor(NoSSRComponent):
|
||||
max_column_auto_width: The maximum width a column can be automatically sized to.
|
||||
max_column_width: The maximum width a column can be resized to.
|
||||
min_column_width: The minimum width a column can be resized to.
|
||||
row_height: Determins the height of each row.
|
||||
row_height: Determines the height of each row.
|
||||
row_markers: Kind of row markers.
|
||||
row_marker_start_index: Changes the starting index for row markers.
|
||||
row_marker_width: Sets the width of row markers in pixels, if unset row markers will automatically size.
|
||||
|
@ -182,9 +182,7 @@ class Form(BaseHTML):
|
||||
props["handle_submit_unique_name"] = ""
|
||||
form = super().create(*children, **props)
|
||||
form.handle_submit_unique_name = md5(
|
||||
str({**form._get_all_hooks_internal(), **form._get_all_hooks()}).encode(
|
||||
"utf-8"
|
||||
)
|
||||
str(form._get_all_hooks()).encode("utf-8")
|
||||
).hexdigest()
|
||||
return form
|
||||
|
||||
@ -252,8 +250,12 @@ class Form(BaseHTML):
|
||||
)
|
||||
return form_refs
|
||||
|
||||
def _get_vars(self, include_children: bool = True) -> Iterator[Var]:
|
||||
yield from super()._get_vars(include_children=include_children)
|
||||
def _get_vars(
|
||||
self, include_children: bool = True, ignore_ids: set[int] | None = None
|
||||
) -> Iterator[Var]:
|
||||
yield from super()._get_vars(
|
||||
include_children=include_children, ignore_ids=ignore_ids
|
||||
)
|
||||
yield from self._get_form_refs().values()
|
||||
|
||||
def _exclude_props(self) -> list[str]:
|
||||
|
@ -81,7 +81,7 @@ class Title(Element):
|
||||
tag = "title"
|
||||
|
||||
|
||||
# Had to be named with an underscore so it doesnt conflict with reflex.style Style in pyi
|
||||
# Had to be named with an underscore so it doesn't conflict with reflex.style Style in pyi
|
||||
class StyleEl(Element):
|
||||
"""Display the style element."""
|
||||
|
||||
|
@ -8,7 +8,7 @@ from reflex.vars.base import Var
|
||||
class LucideIconComponent(Component):
|
||||
"""Lucide Icon Component."""
|
||||
|
||||
library = "lucide-react@0.359.0"
|
||||
library = "lucide-react@0.469.0"
|
||||
|
||||
|
||||
class Icon(LucideIconComponent):
|
||||
@ -56,7 +56,12 @@ class Icon(LucideIconComponent):
|
||||
"\nSee full list at https://lucide.dev/icons."
|
||||
)
|
||||
|
||||
props["tag"] = format.to_title_case(format.to_snake_case(props["tag"])) + "Icon"
|
||||
if props["tag"] in LUCIDE_ICON_MAPPING_OVERRIDE:
|
||||
props["tag"] = LUCIDE_ICON_MAPPING_OVERRIDE[props["tag"]]
|
||||
else:
|
||||
props["tag"] = (
|
||||
format.to_title_case(format.to_snake_case(props["tag"])) + "Icon"
|
||||
)
|
||||
props["alias"] = f"Lucide{props['tag']}"
|
||||
props.setdefault("color", "var(--current-color)")
|
||||
return super().create(*children, **props)
|
||||
@ -106,6 +111,7 @@ LUCIDE_ICON_LIST = [
|
||||
"ambulance",
|
||||
"ampersand",
|
||||
"ampersands",
|
||||
"amphora",
|
||||
"anchor",
|
||||
"angry",
|
||||
"annoyed",
|
||||
@ -193,6 +199,7 @@ LUCIDE_ICON_LIST = [
|
||||
"baggage_claim",
|
||||
"ban",
|
||||
"banana",
|
||||
"bandage",
|
||||
"banknote",
|
||||
"bar_chart",
|
||||
"bar_chart_2",
|
||||
@ -230,8 +237,10 @@ LUCIDE_ICON_LIST = [
|
||||
"between_horizontal_start",
|
||||
"between_vertical_end",
|
||||
"between_vertical_start",
|
||||
"biceps_flexed",
|
||||
"bike",
|
||||
"binary",
|
||||
"binoculars",
|
||||
"biohazard",
|
||||
"bird",
|
||||
"bitcoin",
|
||||
@ -278,6 +287,7 @@ LUCIDE_ICON_LIST = [
|
||||
"boom_box",
|
||||
"bot",
|
||||
"bot_message_square",
|
||||
"bot_off",
|
||||
"box",
|
||||
"box_select",
|
||||
"boxes",
|
||||
@ -289,6 +299,7 @@ LUCIDE_ICON_LIST = [
|
||||
"brick_wall",
|
||||
"briefcase",
|
||||
"briefcase_business",
|
||||
"briefcase_conveyor_belt",
|
||||
"briefcase_medical",
|
||||
"bring_to_front",
|
||||
"brush",
|
||||
@ -305,9 +316,13 @@ LUCIDE_ICON_LIST = [
|
||||
"cake_slice",
|
||||
"calculator",
|
||||
"calendar",
|
||||
"calendar_1",
|
||||
"calendar_arrow_down",
|
||||
"calendar_arrow_up",
|
||||
"calendar_check",
|
||||
"calendar_check_2",
|
||||
"calendar_clock",
|
||||
"calendar_cog",
|
||||
"calendar_days",
|
||||
"calendar_fold",
|
||||
"calendar_heart",
|
||||
@ -318,6 +333,7 @@ LUCIDE_ICON_LIST = [
|
||||
"calendar_plus_2",
|
||||
"calendar_range",
|
||||
"calendar_search",
|
||||
"calendar_sync",
|
||||
"calendar_x",
|
||||
"calendar_x_2",
|
||||
"camera",
|
||||
@ -342,6 +358,29 @@ LUCIDE_ICON_LIST = [
|
||||
"castle",
|
||||
"cat",
|
||||
"cctv",
|
||||
"chart_area",
|
||||
"chart_bar",
|
||||
"chart_bar_big",
|
||||
"chart_bar_decreasing",
|
||||
"chart_bar_increasing",
|
||||
"chart_bar_stacked",
|
||||
"chart_candlestick",
|
||||
"chart_column",
|
||||
"chart_column_big",
|
||||
"chart_column_decreasing",
|
||||
"chart_column_increasing",
|
||||
"chart_column_stacked",
|
||||
"chart_gantt",
|
||||
"chart_line",
|
||||
"chart_network",
|
||||
"chart_no_axes_column",
|
||||
"chart_no_axes_column_decreasing",
|
||||
"chart_no_axes_column_increasing",
|
||||
"chart_no_axes_combined",
|
||||
"chart_no_axes_gantt",
|
||||
"chart_pie",
|
||||
"chart_scatter",
|
||||
"chart_spline",
|
||||
"check",
|
||||
"check_check",
|
||||
"chef_hat",
|
||||
@ -356,6 +395,7 @@ LUCIDE_ICON_LIST = [
|
||||
"chevrons_down_up",
|
||||
"chevrons_left",
|
||||
"chevrons_left_right",
|
||||
"chevrons_left_right_ellipsis",
|
||||
"chevrons_right",
|
||||
"chevrons_right_left",
|
||||
"chevrons_up",
|
||||
@ -374,8 +414,8 @@ LUCIDE_ICON_LIST = [
|
||||
"circle_arrow_out_up_right",
|
||||
"circle_arrow_right",
|
||||
"circle_arrow_up",
|
||||
"circle_check_big",
|
||||
"circle_check",
|
||||
"circle_check_big",
|
||||
"circle_chevron_down",
|
||||
"circle_chevron_left",
|
||||
"circle_chevron_right",
|
||||
@ -387,13 +427,14 @@ LUCIDE_ICON_LIST = [
|
||||
"circle_dot_dashed",
|
||||
"circle_ellipsis",
|
||||
"circle_equal",
|
||||
"circle_fading_arrow_up",
|
||||
"circle_fading_plus",
|
||||
"circle_gauge",
|
||||
"circle_help",
|
||||
"circle_minus",
|
||||
"circle_off",
|
||||
"circle_parking_off",
|
||||
"circle_parking",
|
||||
"circle_parking_off",
|
||||
"circle_pause",
|
||||
"circle_percent",
|
||||
"circle_play",
|
||||
@ -432,7 +473,11 @@ LUCIDE_ICON_LIST = [
|
||||
"clock_7",
|
||||
"clock_8",
|
||||
"clock_9",
|
||||
"clock_alert",
|
||||
"clock_arrow_down",
|
||||
"clock_arrow_up",
|
||||
"cloud",
|
||||
"cloud_alert",
|
||||
"cloud_cog",
|
||||
"cloud_download",
|
||||
"cloud_drizzle",
|
||||
@ -503,6 +548,7 @@ LUCIDE_ICON_LIST = [
|
||||
"cup_soda",
|
||||
"currency",
|
||||
"cylinder",
|
||||
"dam",
|
||||
"database",
|
||||
"database_backup",
|
||||
"database_zap",
|
||||
@ -510,7 +556,9 @@ LUCIDE_ICON_LIST = [
|
||||
"dessert",
|
||||
"diameter",
|
||||
"diamond",
|
||||
"diamond_minus",
|
||||
"diamond_percent",
|
||||
"diamond_plus",
|
||||
"dice_1",
|
||||
"dice_2",
|
||||
"dice_3",
|
||||
@ -539,6 +587,7 @@ LUCIDE_ICON_LIST = [
|
||||
"dribbble",
|
||||
"drill",
|
||||
"droplet",
|
||||
"droplet_off",
|
||||
"droplets",
|
||||
"drum",
|
||||
"drumstick",
|
||||
@ -554,12 +603,15 @@ LUCIDE_ICON_LIST = [
|
||||
"ellipsis",
|
||||
"ellipsis_vertical",
|
||||
"equal",
|
||||
"equal_approximately",
|
||||
"equal_not",
|
||||
"eraser",
|
||||
"ethernet_port",
|
||||
"euro",
|
||||
"expand",
|
||||
"external_link",
|
||||
"eye",
|
||||
"eye_closed",
|
||||
"eye_off",
|
||||
"facebook",
|
||||
"factory",
|
||||
@ -579,6 +631,10 @@ LUCIDE_ICON_LIST = [
|
||||
"file_bar_chart",
|
||||
"file_bar_chart_2",
|
||||
"file_box",
|
||||
"file_chart_column",
|
||||
"file_chart_column_increasing",
|
||||
"file_chart_line",
|
||||
"file_chart_pie",
|
||||
"file_check",
|
||||
"file_check_2",
|
||||
"file_clock",
|
||||
@ -620,6 +676,7 @@ LUCIDE_ICON_LIST = [
|
||||
"file_type",
|
||||
"file_type_2",
|
||||
"file_up",
|
||||
"file_user",
|
||||
"file_video",
|
||||
"file_video_2",
|
||||
"file_volume",
|
||||
@ -661,6 +718,7 @@ LUCIDE_ICON_LIST = [
|
||||
"folder_check",
|
||||
"folder_clock",
|
||||
"folder_closed",
|
||||
"folder_code",
|
||||
"folder_cog",
|
||||
"folder_dot",
|
||||
"folder_down",
|
||||
@ -733,7 +791,12 @@ LUCIDE_ICON_LIST = [
|
||||
"graduation_cap",
|
||||
"grape",
|
||||
"grid_2x2",
|
||||
"grid_2x_2",
|
||||
"grid_2x_2_check",
|
||||
"grid_2x_2_plus",
|
||||
"grid_2x_2_x",
|
||||
"grid_3x3",
|
||||
"grid_3x_3",
|
||||
"grip",
|
||||
"grip_horizontal",
|
||||
"grip_vertical",
|
||||
@ -762,6 +825,7 @@ LUCIDE_ICON_LIST = [
|
||||
"heading_4",
|
||||
"heading_5",
|
||||
"heading_6",
|
||||
"headphone_off",
|
||||
"headphones",
|
||||
"headset",
|
||||
"heart",
|
||||
@ -779,14 +843,20 @@ LUCIDE_ICON_LIST = [
|
||||
"hospital",
|
||||
"hotel",
|
||||
"hourglass",
|
||||
"house",
|
||||
"house_plug",
|
||||
"house_plus",
|
||||
"ice_cream_bowl",
|
||||
"ice_cream_cone",
|
||||
"id_card",
|
||||
"image",
|
||||
"image_down",
|
||||
"image_minus",
|
||||
"image_off",
|
||||
"image_play",
|
||||
"image_plus",
|
||||
"image_up",
|
||||
"image_upscale",
|
||||
"images",
|
||||
"import",
|
||||
"inbox",
|
||||
@ -808,6 +878,7 @@ LUCIDE_ICON_LIST = [
|
||||
"key_square",
|
||||
"keyboard",
|
||||
"keyboard_music",
|
||||
"keyboard_off",
|
||||
"lamp",
|
||||
"lamp_ceiling",
|
||||
"lamp_desk",
|
||||
@ -817,8 +888,9 @@ LUCIDE_ICON_LIST = [
|
||||
"land_plot",
|
||||
"landmark",
|
||||
"languages",
|
||||
"laptop_minimal",
|
||||
"laptop",
|
||||
"laptop_minimal",
|
||||
"laptop_minimal_check",
|
||||
"lasso",
|
||||
"lasso_select",
|
||||
"laugh",
|
||||
@ -833,6 +905,8 @@ LUCIDE_ICON_LIST = [
|
||||
"layout_template",
|
||||
"leaf",
|
||||
"leafy_green",
|
||||
"lectern",
|
||||
"letter_text",
|
||||
"library",
|
||||
"library_big",
|
||||
"life_buoy",
|
||||
@ -845,10 +919,12 @@ LUCIDE_ICON_LIST = [
|
||||
"link_2_off",
|
||||
"linkedin",
|
||||
"list",
|
||||
"list_check",
|
||||
"list_checks",
|
||||
"list_collapse",
|
||||
"list_end",
|
||||
"list_filter",
|
||||
"list_filter_plus",
|
||||
"list_minus",
|
||||
"list_music",
|
||||
"list_ordered",
|
||||
@ -861,15 +937,17 @@ LUCIDE_ICON_LIST = [
|
||||
"list_x",
|
||||
"loader",
|
||||
"loader_circle",
|
||||
"loader_pinwheel",
|
||||
"locate",
|
||||
"locate_fixed",
|
||||
"locate_off",
|
||||
"lock",
|
||||
"lock_keyhole_open",
|
||||
"lock_keyhole",
|
||||
"lock_keyhole_open",
|
||||
"lock_open",
|
||||
"log_in",
|
||||
"log_out",
|
||||
"logs",
|
||||
"lollipop",
|
||||
"luggage",
|
||||
"magnet",
|
||||
@ -886,7 +964,16 @@ LUCIDE_ICON_LIST = [
|
||||
"mails",
|
||||
"map",
|
||||
"map_pin",
|
||||
"map_pin_check",
|
||||
"map_pin_check_inside",
|
||||
"map_pin_house",
|
||||
"map_pin_minus",
|
||||
"map_pin_minus_inside",
|
||||
"map_pin_off",
|
||||
"map_pin_plus",
|
||||
"map_pin_plus_inside",
|
||||
"map_pin_x",
|
||||
"map_pin_x_inside",
|
||||
"map_pinned",
|
||||
"martini",
|
||||
"maximize",
|
||||
@ -915,6 +1002,7 @@ LUCIDE_ICON_LIST = [
|
||||
"message_square_diff",
|
||||
"message_square_dot",
|
||||
"message_square_heart",
|
||||
"message_square_lock",
|
||||
"message_square_more",
|
||||
"message_square_off",
|
||||
"message_square_plus",
|
||||
@ -926,8 +1014,9 @@ LUCIDE_ICON_LIST = [
|
||||
"message_square_x",
|
||||
"messages_square",
|
||||
"mic",
|
||||
"mic_vocal",
|
||||
"mic_off",
|
||||
"mic_vocal",
|
||||
"microchip",
|
||||
"microscope",
|
||||
"microwave",
|
||||
"milestone",
|
||||
@ -938,6 +1027,7 @@ LUCIDE_ICON_LIST = [
|
||||
"minus",
|
||||
"monitor",
|
||||
"monitor_check",
|
||||
"monitor_cog",
|
||||
"monitor_dot",
|
||||
"monitor_down",
|
||||
"monitor_off",
|
||||
@ -953,8 +1043,10 @@ LUCIDE_ICON_LIST = [
|
||||
"mountain",
|
||||
"mountain_snow",
|
||||
"mouse",
|
||||
"mouse_off",
|
||||
"mouse_pointer",
|
||||
"mouse_pointer_2",
|
||||
"mouse_pointer_ban",
|
||||
"mouse_pointer_click",
|
||||
"move",
|
||||
"move_3d",
|
||||
@ -991,10 +1083,13 @@ LUCIDE_ICON_LIST = [
|
||||
"nut_off",
|
||||
"octagon",
|
||||
"octagon_alert",
|
||||
"octagon_minus",
|
||||
"octagon_pause",
|
||||
"octagon_x",
|
||||
"omega",
|
||||
"option",
|
||||
"orbit",
|
||||
"origami",
|
||||
"package",
|
||||
"package_2",
|
||||
"package_check",
|
||||
@ -1007,6 +1102,7 @@ LUCIDE_ICON_LIST = [
|
||||
"paint_roller",
|
||||
"paintbrush",
|
||||
"paintbrush_2",
|
||||
"paintbrush_vertical",
|
||||
"palette",
|
||||
"panel_bottom",
|
||||
"panel_bottom_close",
|
||||
@ -1036,13 +1132,16 @@ LUCIDE_ICON_LIST = [
|
||||
"pc_case",
|
||||
"pen",
|
||||
"pen_line",
|
||||
"pen_off",
|
||||
"pen_tool",
|
||||
"pencil",
|
||||
"pencil_line",
|
||||
"pencil_off",
|
||||
"pencil_ruler",
|
||||
"pentagon",
|
||||
"percent",
|
||||
"person_standing",
|
||||
"philippine_peso",
|
||||
"phone",
|
||||
"phone_call",
|
||||
"phone_forwarded",
|
||||
@ -1058,7 +1157,10 @@ LUCIDE_ICON_LIST = [
|
||||
"pie_chart",
|
||||
"piggy_bank",
|
||||
"pilcrow",
|
||||
"pilcrow_left",
|
||||
"pilcrow_right",
|
||||
"pill",
|
||||
"pill_bottle",
|
||||
"pin",
|
||||
"pin_off",
|
||||
"pipette",
|
||||
@ -1084,6 +1186,7 @@ LUCIDE_ICON_LIST = [
|
||||
"power_off",
|
||||
"presentation",
|
||||
"printer",
|
||||
"printer_check",
|
||||
"projector",
|
||||
"proportions",
|
||||
"puzzle",
|
||||
@ -1158,6 +1261,7 @@ LUCIDE_ICON_LIST = [
|
||||
"satellite_dish",
|
||||
"save",
|
||||
"save_all",
|
||||
"save_off",
|
||||
"scale",
|
||||
"scale_3d",
|
||||
"scaling",
|
||||
@ -1165,7 +1269,9 @@ LUCIDE_ICON_LIST = [
|
||||
"scan_barcode",
|
||||
"scan_eye",
|
||||
"scan_face",
|
||||
"scan_heart",
|
||||
"scan_line",
|
||||
"scan_qr_code",
|
||||
"scan_search",
|
||||
"scan_text",
|
||||
"scatter_chart",
|
||||
@ -1181,6 +1287,7 @@ LUCIDE_ICON_LIST = [
|
||||
"search_code",
|
||||
"search_slash",
|
||||
"search_x",
|
||||
"section",
|
||||
"send",
|
||||
"send_horizontal",
|
||||
"send_to_back",
|
||||
@ -1225,6 +1332,7 @@ LUCIDE_ICON_LIST = [
|
||||
"signal_low",
|
||||
"signal_medium",
|
||||
"signal_zero",
|
||||
"signature",
|
||||
"signpost",
|
||||
"signpost_big",
|
||||
"siren",
|
||||
@ -1234,8 +1342,8 @@ LUCIDE_ICON_LIST = [
|
||||
"slack",
|
||||
"slash",
|
||||
"slice",
|
||||
"sliders_vertical",
|
||||
"sliders_horizontal",
|
||||
"sliders_vertical",
|
||||
"smartphone",
|
||||
"smartphone_charging",
|
||||
"smartphone_nfc",
|
||||
@ -1259,29 +1367,31 @@ LUCIDE_ICON_LIST = [
|
||||
"sprout",
|
||||
"square",
|
||||
"square_activity",
|
||||
"square_arrow_down",
|
||||
"square_arrow_down_left",
|
||||
"square_arrow_down_right",
|
||||
"square_arrow_down",
|
||||
"square_arrow_left",
|
||||
"square_arrow_out_down_left",
|
||||
"square_arrow_out_down_right",
|
||||
"square_arrow_out_up_left",
|
||||
"square_arrow_out_up_right",
|
||||
"square_arrow_right",
|
||||
"square_arrow_up",
|
||||
"square_arrow_up_left",
|
||||
"square_arrow_up_right",
|
||||
"square_arrow_up",
|
||||
"square_asterisk",
|
||||
"square_bottom_dashed_scissors",
|
||||
"square_check_big",
|
||||
"square_chart_gantt",
|
||||
"square_check",
|
||||
"square_check_big",
|
||||
"square_chevron_down",
|
||||
"square_chevron_left",
|
||||
"square_chevron_right",
|
||||
"square_chevron_up",
|
||||
"square_code",
|
||||
"square_dashed_bottom_code",
|
||||
"square_dashed",
|
||||
"square_dashed_bottom",
|
||||
"square_dashed_bottom_code",
|
||||
"square_dashed_kanban",
|
||||
"square_dashed_mouse_pointer",
|
||||
"square_divide",
|
||||
@ -1295,8 +1405,8 @@ LUCIDE_ICON_LIST = [
|
||||
"square_menu",
|
||||
"square_minus",
|
||||
"square_mouse_pointer",
|
||||
"square_parking_off",
|
||||
"square_parking",
|
||||
"square_parking_off",
|
||||
"square_pen",
|
||||
"square_percent",
|
||||
"square_pi",
|
||||
@ -1310,10 +1420,11 @@ LUCIDE_ICON_LIST = [
|
||||
"square_slash",
|
||||
"square_split_horizontal",
|
||||
"square_split_vertical",
|
||||
"square_square",
|
||||
"square_stack",
|
||||
"square_terminal",
|
||||
"square_user_round",
|
||||
"square_user",
|
||||
"square_user_round",
|
||||
"square_x",
|
||||
"squircle",
|
||||
"squirrel",
|
||||
@ -1350,6 +1461,7 @@ LUCIDE_ICON_LIST = [
|
||||
"table_cells_merge",
|
||||
"table_cells_split",
|
||||
"table_columns_split",
|
||||
"table_of_contents",
|
||||
"table_properties",
|
||||
"table_rows_split",
|
||||
"tablet",
|
||||
@ -1365,11 +1477,11 @@ LUCIDE_ICON_LIST = [
|
||||
"tangent",
|
||||
"target",
|
||||
"telescope",
|
||||
"tent",
|
||||
"tent_tree",
|
||||
"terminal",
|
||||
"test_tube_diagonal",
|
||||
"test_tube",
|
||||
"tent",
|
||||
"test_tube_diagonal",
|
||||
"test_tubes",
|
||||
"text",
|
||||
"text_cursor",
|
||||
@ -1390,11 +1502,14 @@ LUCIDE_ICON_LIST = [
|
||||
"ticket_plus",
|
||||
"ticket_slash",
|
||||
"ticket_x",
|
||||
"tickets",
|
||||
"tickets_plane",
|
||||
"timer",
|
||||
"timer_off",
|
||||
"timer_reset",
|
||||
"toggle_left",
|
||||
"toggle_right",
|
||||
"toilet",
|
||||
"tornado",
|
||||
"torus",
|
||||
"touchpad",
|
||||
@ -1416,17 +1531,21 @@ LUCIDE_ICON_LIST = [
|
||||
"trello",
|
||||
"trending_down",
|
||||
"trending_up",
|
||||
"trending_up_down",
|
||||
"triangle",
|
||||
"triangle_right",
|
||||
"triangle_alert",
|
||||
"triangle_right",
|
||||
"trophy",
|
||||
"truck",
|
||||
"turtle",
|
||||
"tv",
|
||||
"tv_2",
|
||||
"tv_minimal",
|
||||
"tv_minimal_play",
|
||||
"twitch",
|
||||
"twitter",
|
||||
"type",
|
||||
"type_outline",
|
||||
"umbrella",
|
||||
"umbrella_off",
|
||||
"underline",
|
||||
@ -1437,8 +1556,8 @@ LUCIDE_ICON_LIST = [
|
||||
"unfold_vertical",
|
||||
"ungroup",
|
||||
"university",
|
||||
"unlink_2",
|
||||
"unlink",
|
||||
"unlink_2",
|
||||
"unplug",
|
||||
"upload",
|
||||
"usb",
|
||||
@ -1446,11 +1565,13 @@ LUCIDE_ICON_LIST = [
|
||||
"user_check",
|
||||
"user_cog",
|
||||
"user_minus",
|
||||
"user_pen",
|
||||
"user_plus",
|
||||
"user_round",
|
||||
"user_round_check",
|
||||
"user_round_cog",
|
||||
"user_round_minus",
|
||||
"user_round_pen",
|
||||
"user_round_plus",
|
||||
"user_round_search",
|
||||
"user_round_x",
|
||||
@ -1472,14 +1593,16 @@ LUCIDE_ICON_LIST = [
|
||||
"videotape",
|
||||
"view",
|
||||
"voicemail",
|
||||
"volleyball",
|
||||
"volume",
|
||||
"volume_1",
|
||||
"volume_2",
|
||||
"volume_off",
|
||||
"volume_x",
|
||||
"vote",
|
||||
"wallet",
|
||||
"wallet_minimal",
|
||||
"wallet_cards",
|
||||
"wallet_minimal",
|
||||
"wallpaper",
|
||||
"wand",
|
||||
"wand_sparkles",
|
||||
@ -1487,17 +1610,22 @@ LUCIDE_ICON_LIST = [
|
||||
"washing_machine",
|
||||
"watch",
|
||||
"waves",
|
||||
"waves_ladder",
|
||||
"waypoints",
|
||||
"webcam",
|
||||
"webhook_off",
|
||||
"webhook",
|
||||
"webhook_off",
|
||||
"weight",
|
||||
"wheat",
|
||||
"wheat_off",
|
||||
"whole_word",
|
||||
"wifi",
|
||||
"wifi_high",
|
||||
"wifi_low",
|
||||
"wifi_off",
|
||||
"wifi_zero",
|
||||
"wind",
|
||||
"wind_arrow_down",
|
||||
"wine",
|
||||
"wine_off",
|
||||
"workflow",
|
||||
@ -1511,3 +1639,10 @@ LUCIDE_ICON_LIST = [
|
||||
"zoom_in",
|
||||
"zoom_out",
|
||||
]
|
||||
|
||||
# The default transformation of some icon names doesn't match how the
|
||||
# icons are exported from Lucide. Manual overrides can go here.
|
||||
LUCIDE_ICON_MAPPING_OVERRIDE = {
|
||||
"grid_2x_2_check": "Grid2x2Check",
|
||||
"grid_2x_2_x": "Grid2x2X",
|
||||
}
|
||||
|
@ -154,6 +154,7 @@ LUCIDE_ICON_LIST = [
|
||||
"ambulance",
|
||||
"ampersand",
|
||||
"ampersands",
|
||||
"amphora",
|
||||
"anchor",
|
||||
"angry",
|
||||
"annoyed",
|
||||
@ -241,6 +242,7 @@ LUCIDE_ICON_LIST = [
|
||||
"baggage_claim",
|
||||
"ban",
|
||||
"banana",
|
||||
"bandage",
|
||||
"banknote",
|
||||
"bar_chart",
|
||||
"bar_chart_2",
|
||||
@ -278,8 +280,10 @@ LUCIDE_ICON_LIST = [
|
||||
"between_horizontal_start",
|
||||
"between_vertical_end",
|
||||
"between_vertical_start",
|
||||
"biceps_flexed",
|
||||
"bike",
|
||||
"binary",
|
||||
"binoculars",
|
||||
"biohazard",
|
||||
"bird",
|
||||
"bitcoin",
|
||||
@ -326,6 +330,7 @@ LUCIDE_ICON_LIST = [
|
||||
"boom_box",
|
||||
"bot",
|
||||
"bot_message_square",
|
||||
"bot_off",
|
||||
"box",
|
||||
"box_select",
|
||||
"boxes",
|
||||
@ -337,6 +342,7 @@ LUCIDE_ICON_LIST = [
|
||||
"brick_wall",
|
||||
"briefcase",
|
||||
"briefcase_business",
|
||||
"briefcase_conveyor_belt",
|
||||
"briefcase_medical",
|
||||
"bring_to_front",
|
||||
"brush",
|
||||
@ -353,9 +359,13 @@ LUCIDE_ICON_LIST = [
|
||||
"cake_slice",
|
||||
"calculator",
|
||||
"calendar",
|
||||
"calendar_1",
|
||||
"calendar_arrow_down",
|
||||
"calendar_arrow_up",
|
||||
"calendar_check",
|
||||
"calendar_check_2",
|
||||
"calendar_clock",
|
||||
"calendar_cog",
|
||||
"calendar_days",
|
||||
"calendar_fold",
|
||||
"calendar_heart",
|
||||
@ -366,6 +376,7 @@ LUCIDE_ICON_LIST = [
|
||||
"calendar_plus_2",
|
||||
"calendar_range",
|
||||
"calendar_search",
|
||||
"calendar_sync",
|
||||
"calendar_x",
|
||||
"calendar_x_2",
|
||||
"camera",
|
||||
@ -390,6 +401,29 @@ LUCIDE_ICON_LIST = [
|
||||
"castle",
|
||||
"cat",
|
||||
"cctv",
|
||||
"chart_area",
|
||||
"chart_bar",
|
||||
"chart_bar_big",
|
||||
"chart_bar_decreasing",
|
||||
"chart_bar_increasing",
|
||||
"chart_bar_stacked",
|
||||
"chart_candlestick",
|
||||
"chart_column",
|
||||
"chart_column_big",
|
||||
"chart_column_decreasing",
|
||||
"chart_column_increasing",
|
||||
"chart_column_stacked",
|
||||
"chart_gantt",
|
||||
"chart_line",
|
||||
"chart_network",
|
||||
"chart_no_axes_column",
|
||||
"chart_no_axes_column_decreasing",
|
||||
"chart_no_axes_column_increasing",
|
||||
"chart_no_axes_combined",
|
||||
"chart_no_axes_gantt",
|
||||
"chart_pie",
|
||||
"chart_scatter",
|
||||
"chart_spline",
|
||||
"check",
|
||||
"check_check",
|
||||
"chef_hat",
|
||||
@ -404,6 +438,7 @@ LUCIDE_ICON_LIST = [
|
||||
"chevrons_down_up",
|
||||
"chevrons_left",
|
||||
"chevrons_left_right",
|
||||
"chevrons_left_right_ellipsis",
|
||||
"chevrons_right",
|
||||
"chevrons_right_left",
|
||||
"chevrons_up",
|
||||
@ -422,8 +457,8 @@ LUCIDE_ICON_LIST = [
|
||||
"circle_arrow_out_up_right",
|
||||
"circle_arrow_right",
|
||||
"circle_arrow_up",
|
||||
"circle_check_big",
|
||||
"circle_check",
|
||||
"circle_check_big",
|
||||
"circle_chevron_down",
|
||||
"circle_chevron_left",
|
||||
"circle_chevron_right",
|
||||
@ -435,13 +470,14 @@ LUCIDE_ICON_LIST = [
|
||||
"circle_dot_dashed",
|
||||
"circle_ellipsis",
|
||||
"circle_equal",
|
||||
"circle_fading_arrow_up",
|
||||
"circle_fading_plus",
|
||||
"circle_gauge",
|
||||
"circle_help",
|
||||
"circle_minus",
|
||||
"circle_off",
|
||||
"circle_parking_off",
|
||||
"circle_parking",
|
||||
"circle_parking_off",
|
||||
"circle_pause",
|
||||
"circle_percent",
|
||||
"circle_play",
|
||||
@ -480,7 +516,11 @@ LUCIDE_ICON_LIST = [
|
||||
"clock_7",
|
||||
"clock_8",
|
||||
"clock_9",
|
||||
"clock_alert",
|
||||
"clock_arrow_down",
|
||||
"clock_arrow_up",
|
||||
"cloud",
|
||||
"cloud_alert",
|
||||
"cloud_cog",
|
||||
"cloud_download",
|
||||
"cloud_drizzle",
|
||||
@ -551,6 +591,7 @@ LUCIDE_ICON_LIST = [
|
||||
"cup_soda",
|
||||
"currency",
|
||||
"cylinder",
|
||||
"dam",
|
||||
"database",
|
||||
"database_backup",
|
||||
"database_zap",
|
||||
@ -558,7 +599,9 @@ LUCIDE_ICON_LIST = [
|
||||
"dessert",
|
||||
"diameter",
|
||||
"diamond",
|
||||
"diamond_minus",
|
||||
"diamond_percent",
|
||||
"diamond_plus",
|
||||
"dice_1",
|
||||
"dice_2",
|
||||
"dice_3",
|
||||
@ -587,6 +630,7 @@ LUCIDE_ICON_LIST = [
|
||||
"dribbble",
|
||||
"drill",
|
||||
"droplet",
|
||||
"droplet_off",
|
||||
"droplets",
|
||||
"drum",
|
||||
"drumstick",
|
||||
@ -602,12 +646,15 @@ LUCIDE_ICON_LIST = [
|
||||
"ellipsis",
|
||||
"ellipsis_vertical",
|
||||
"equal",
|
||||
"equal_approximately",
|
||||
"equal_not",
|
||||
"eraser",
|
||||
"ethernet_port",
|
||||
"euro",
|
||||
"expand",
|
||||
"external_link",
|
||||
"eye",
|
||||
"eye_closed",
|
||||
"eye_off",
|
||||
"facebook",
|
||||
"factory",
|
||||
@ -627,6 +674,10 @@ LUCIDE_ICON_LIST = [
|
||||
"file_bar_chart",
|
||||
"file_bar_chart_2",
|
||||
"file_box",
|
||||
"file_chart_column",
|
||||
"file_chart_column_increasing",
|
||||
"file_chart_line",
|
||||
"file_chart_pie",
|
||||
"file_check",
|
||||
"file_check_2",
|
||||
"file_clock",
|
||||
@ -668,6 +719,7 @@ LUCIDE_ICON_LIST = [
|
||||
"file_type",
|
||||
"file_type_2",
|
||||
"file_up",
|
||||
"file_user",
|
||||
"file_video",
|
||||
"file_video_2",
|
||||
"file_volume",
|
||||
@ -709,6 +761,7 @@ LUCIDE_ICON_LIST = [
|
||||
"folder_check",
|
||||
"folder_clock",
|
||||
"folder_closed",
|
||||
"folder_code",
|
||||
"folder_cog",
|
||||
"folder_dot",
|
||||
"folder_down",
|
||||
@ -781,7 +834,12 @@ LUCIDE_ICON_LIST = [
|
||||
"graduation_cap",
|
||||
"grape",
|
||||
"grid_2x2",
|
||||
"grid_2x_2",
|
||||
"grid_2x_2_check",
|
||||
"grid_2x_2_plus",
|
||||
"grid_2x_2_x",
|
||||
"grid_3x3",
|
||||
"grid_3x_3",
|
||||
"grip",
|
||||
"grip_horizontal",
|
||||
"grip_vertical",
|
||||
@ -810,6 +868,7 @@ LUCIDE_ICON_LIST = [
|
||||
"heading_4",
|
||||
"heading_5",
|
||||
"heading_6",
|
||||
"headphone_off",
|
||||
"headphones",
|
||||
"headset",
|
||||
"heart",
|
||||
@ -827,14 +886,20 @@ LUCIDE_ICON_LIST = [
|
||||
"hospital",
|
||||
"hotel",
|
||||
"hourglass",
|
||||
"house",
|
||||
"house_plug",
|
||||
"house_plus",
|
||||
"ice_cream_bowl",
|
||||
"ice_cream_cone",
|
||||
"id_card",
|
||||
"image",
|
||||
"image_down",
|
||||
"image_minus",
|
||||
"image_off",
|
||||
"image_play",
|
||||
"image_plus",
|
||||
"image_up",
|
||||
"image_upscale",
|
||||
"images",
|
||||
"import",
|
||||
"inbox",
|
||||
@ -856,6 +921,7 @@ LUCIDE_ICON_LIST = [
|
||||
"key_square",
|
||||
"keyboard",
|
||||
"keyboard_music",
|
||||
"keyboard_off",
|
||||
"lamp",
|
||||
"lamp_ceiling",
|
||||
"lamp_desk",
|
||||
@ -865,8 +931,9 @@ LUCIDE_ICON_LIST = [
|
||||
"land_plot",
|
||||
"landmark",
|
||||
"languages",
|
||||
"laptop_minimal",
|
||||
"laptop",
|
||||
"laptop_minimal",
|
||||
"laptop_minimal_check",
|
||||
"lasso",
|
||||
"lasso_select",
|
||||
"laugh",
|
||||
@ -881,6 +948,8 @@ LUCIDE_ICON_LIST = [
|
||||
"layout_template",
|
||||
"leaf",
|
||||
"leafy_green",
|
||||
"lectern",
|
||||
"letter_text",
|
||||
"library",
|
||||
"library_big",
|
||||
"life_buoy",
|
||||
@ -893,10 +962,12 @@ LUCIDE_ICON_LIST = [
|
||||
"link_2_off",
|
||||
"linkedin",
|
||||
"list",
|
||||
"list_check",
|
||||
"list_checks",
|
||||
"list_collapse",
|
||||
"list_end",
|
||||
"list_filter",
|
||||
"list_filter_plus",
|
||||
"list_minus",
|
||||
"list_music",
|
||||
"list_ordered",
|
||||
@ -909,15 +980,17 @@ LUCIDE_ICON_LIST = [
|
||||
"list_x",
|
||||
"loader",
|
||||
"loader_circle",
|
||||
"loader_pinwheel",
|
||||
"locate",
|
||||
"locate_fixed",
|
||||
"locate_off",
|
||||
"lock",
|
||||
"lock_keyhole_open",
|
||||
"lock_keyhole",
|
||||
"lock_keyhole_open",
|
||||
"lock_open",
|
||||
"log_in",
|
||||
"log_out",
|
||||
"logs",
|
||||
"lollipop",
|
||||
"luggage",
|
||||
"magnet",
|
||||
@ -934,7 +1007,16 @@ LUCIDE_ICON_LIST = [
|
||||
"mails",
|
||||
"map",
|
||||
"map_pin",
|
||||
"map_pin_check",
|
||||
"map_pin_check_inside",
|
||||
"map_pin_house",
|
||||
"map_pin_minus",
|
||||
"map_pin_minus_inside",
|
||||
"map_pin_off",
|
||||
"map_pin_plus",
|
||||
"map_pin_plus_inside",
|
||||
"map_pin_x",
|
||||
"map_pin_x_inside",
|
||||
"map_pinned",
|
||||
"martini",
|
||||
"maximize",
|
||||
@ -963,6 +1045,7 @@ LUCIDE_ICON_LIST = [
|
||||
"message_square_diff",
|
||||
"message_square_dot",
|
||||
"message_square_heart",
|
||||
"message_square_lock",
|
||||
"message_square_more",
|
||||
"message_square_off",
|
||||
"message_square_plus",
|
||||
@ -974,8 +1057,9 @@ LUCIDE_ICON_LIST = [
|
||||
"message_square_x",
|
||||
"messages_square",
|
||||
"mic",
|
||||
"mic_vocal",
|
||||
"mic_off",
|
||||
"mic_vocal",
|
||||
"microchip",
|
||||
"microscope",
|
||||
"microwave",
|
||||
"milestone",
|
||||
@ -986,6 +1070,7 @@ LUCIDE_ICON_LIST = [
|
||||
"minus",
|
||||
"monitor",
|
||||
"monitor_check",
|
||||
"monitor_cog",
|
||||
"monitor_dot",
|
||||
"monitor_down",
|
||||
"monitor_off",
|
||||
@ -1001,8 +1086,10 @@ LUCIDE_ICON_LIST = [
|
||||
"mountain",
|
||||
"mountain_snow",
|
||||
"mouse",
|
||||
"mouse_off",
|
||||
"mouse_pointer",
|
||||
"mouse_pointer_2",
|
||||
"mouse_pointer_ban",
|
||||
"mouse_pointer_click",
|
||||
"move",
|
||||
"move_3d",
|
||||
@ -1039,10 +1126,13 @@ LUCIDE_ICON_LIST = [
|
||||
"nut_off",
|
||||
"octagon",
|
||||
"octagon_alert",
|
||||
"octagon_minus",
|
||||
"octagon_pause",
|
||||
"octagon_x",
|
||||
"omega",
|
||||
"option",
|
||||
"orbit",
|
||||
"origami",
|
||||
"package",
|
||||
"package_2",
|
||||
"package_check",
|
||||
@ -1055,6 +1145,7 @@ LUCIDE_ICON_LIST = [
|
||||
"paint_roller",
|
||||
"paintbrush",
|
||||
"paintbrush_2",
|
||||
"paintbrush_vertical",
|
||||
"palette",
|
||||
"panel_bottom",
|
||||
"panel_bottom_close",
|
||||
@ -1084,13 +1175,16 @@ LUCIDE_ICON_LIST = [
|
||||
"pc_case",
|
||||
"pen",
|
||||
"pen_line",
|
||||
"pen_off",
|
||||
"pen_tool",
|
||||
"pencil",
|
||||
"pencil_line",
|
||||
"pencil_off",
|
||||
"pencil_ruler",
|
||||
"pentagon",
|
||||
"percent",
|
||||
"person_standing",
|
||||
"philippine_peso",
|
||||
"phone",
|
||||
"phone_call",
|
||||
"phone_forwarded",
|
||||
@ -1106,7 +1200,10 @@ LUCIDE_ICON_LIST = [
|
||||
"pie_chart",
|
||||
"piggy_bank",
|
||||
"pilcrow",
|
||||
"pilcrow_left",
|
||||
"pilcrow_right",
|
||||
"pill",
|
||||
"pill_bottle",
|
||||
"pin",
|
||||
"pin_off",
|
||||
"pipette",
|
||||
@ -1132,6 +1229,7 @@ LUCIDE_ICON_LIST = [
|
||||
"power_off",
|
||||
"presentation",
|
||||
"printer",
|
||||
"printer_check",
|
||||
"projector",
|
||||
"proportions",
|
||||
"puzzle",
|
||||
@ -1206,6 +1304,7 @@ LUCIDE_ICON_LIST = [
|
||||
"satellite_dish",
|
||||
"save",
|
||||
"save_all",
|
||||
"save_off",
|
||||
"scale",
|
||||
"scale_3d",
|
||||
"scaling",
|
||||
@ -1213,7 +1312,9 @@ LUCIDE_ICON_LIST = [
|
||||
"scan_barcode",
|
||||
"scan_eye",
|
||||
"scan_face",
|
||||
"scan_heart",
|
||||
"scan_line",
|
||||
"scan_qr_code",
|
||||
"scan_search",
|
||||
"scan_text",
|
||||
"scatter_chart",
|
||||
@ -1229,6 +1330,7 @@ LUCIDE_ICON_LIST = [
|
||||
"search_code",
|
||||
"search_slash",
|
||||
"search_x",
|
||||
"section",
|
||||
"send",
|
||||
"send_horizontal",
|
||||
"send_to_back",
|
||||
@ -1273,6 +1375,7 @@ LUCIDE_ICON_LIST = [
|
||||
"signal_low",
|
||||
"signal_medium",
|
||||
"signal_zero",
|
||||
"signature",
|
||||
"signpost",
|
||||
"signpost_big",
|
||||
"siren",
|
||||
@ -1282,8 +1385,8 @@ LUCIDE_ICON_LIST = [
|
||||
"slack",
|
||||
"slash",
|
||||
"slice",
|
||||
"sliders_vertical",
|
||||
"sliders_horizontal",
|
||||
"sliders_vertical",
|
||||
"smartphone",
|
||||
"smartphone_charging",
|
||||
"smartphone_nfc",
|
||||
@ -1307,29 +1410,31 @@ LUCIDE_ICON_LIST = [
|
||||
"sprout",
|
||||
"square",
|
||||
"square_activity",
|
||||
"square_arrow_down",
|
||||
"square_arrow_down_left",
|
||||
"square_arrow_down_right",
|
||||
"square_arrow_down",
|
||||
"square_arrow_left",
|
||||
"square_arrow_out_down_left",
|
||||
"square_arrow_out_down_right",
|
||||
"square_arrow_out_up_left",
|
||||
"square_arrow_out_up_right",
|
||||
"square_arrow_right",
|
||||
"square_arrow_up",
|
||||
"square_arrow_up_left",
|
||||
"square_arrow_up_right",
|
||||
"square_arrow_up",
|
||||
"square_asterisk",
|
||||
"square_bottom_dashed_scissors",
|
||||
"square_check_big",
|
||||
"square_chart_gantt",
|
||||
"square_check",
|
||||
"square_check_big",
|
||||
"square_chevron_down",
|
||||
"square_chevron_left",
|
||||
"square_chevron_right",
|
||||
"square_chevron_up",
|
||||
"square_code",
|
||||
"square_dashed_bottom_code",
|
||||
"square_dashed",
|
||||
"square_dashed_bottom",
|
||||
"square_dashed_bottom_code",
|
||||
"square_dashed_kanban",
|
||||
"square_dashed_mouse_pointer",
|
||||
"square_divide",
|
||||
@ -1343,8 +1448,8 @@ LUCIDE_ICON_LIST = [
|
||||
"square_menu",
|
||||
"square_minus",
|
||||
"square_mouse_pointer",
|
||||
"square_parking_off",
|
||||
"square_parking",
|
||||
"square_parking_off",
|
||||
"square_pen",
|
||||
"square_percent",
|
||||
"square_pi",
|
||||
@ -1358,10 +1463,11 @@ LUCIDE_ICON_LIST = [
|
||||
"square_slash",
|
||||
"square_split_horizontal",
|
||||
"square_split_vertical",
|
||||
"square_square",
|
||||
"square_stack",
|
||||
"square_terminal",
|
||||
"square_user_round",
|
||||
"square_user",
|
||||
"square_user_round",
|
||||
"square_x",
|
||||
"squircle",
|
||||
"squirrel",
|
||||
@ -1398,6 +1504,7 @@ LUCIDE_ICON_LIST = [
|
||||
"table_cells_merge",
|
||||
"table_cells_split",
|
||||
"table_columns_split",
|
||||
"table_of_contents",
|
||||
"table_properties",
|
||||
"table_rows_split",
|
||||
"tablet",
|
||||
@ -1413,11 +1520,11 @@ LUCIDE_ICON_LIST = [
|
||||
"tangent",
|
||||
"target",
|
||||
"telescope",
|
||||
"tent",
|
||||
"tent_tree",
|
||||
"terminal",
|
||||
"test_tube_diagonal",
|
||||
"test_tube",
|
||||
"tent",
|
||||
"test_tube_diagonal",
|
||||
"test_tubes",
|
||||
"text",
|
||||
"text_cursor",
|
||||
@ -1438,11 +1545,14 @@ LUCIDE_ICON_LIST = [
|
||||
"ticket_plus",
|
||||
"ticket_slash",
|
||||
"ticket_x",
|
||||
"tickets",
|
||||
"tickets_plane",
|
||||
"timer",
|
||||
"timer_off",
|
||||
"timer_reset",
|
||||
"toggle_left",
|
||||
"toggle_right",
|
||||
"toilet",
|
||||
"tornado",
|
||||
"torus",
|
||||
"touchpad",
|
||||
@ -1464,17 +1574,21 @@ LUCIDE_ICON_LIST = [
|
||||
"trello",
|
||||
"trending_down",
|
||||
"trending_up",
|
||||
"trending_up_down",
|
||||
"triangle",
|
||||
"triangle_right",
|
||||
"triangle_alert",
|
||||
"triangle_right",
|
||||
"trophy",
|
||||
"truck",
|
||||
"turtle",
|
||||
"tv",
|
||||
"tv_2",
|
||||
"tv_minimal",
|
||||
"tv_minimal_play",
|
||||
"twitch",
|
||||
"twitter",
|
||||
"type",
|
||||
"type_outline",
|
||||
"umbrella",
|
||||
"umbrella_off",
|
||||
"underline",
|
||||
@ -1485,8 +1599,8 @@ LUCIDE_ICON_LIST = [
|
||||
"unfold_vertical",
|
||||
"ungroup",
|
||||
"university",
|
||||
"unlink_2",
|
||||
"unlink",
|
||||
"unlink_2",
|
||||
"unplug",
|
||||
"upload",
|
||||
"usb",
|
||||
@ -1494,11 +1608,13 @@ LUCIDE_ICON_LIST = [
|
||||
"user_check",
|
||||
"user_cog",
|
||||
"user_minus",
|
||||
"user_pen",
|
||||
"user_plus",
|
||||
"user_round",
|
||||
"user_round_check",
|
||||
"user_round_cog",
|
||||
"user_round_minus",
|
||||
"user_round_pen",
|
||||
"user_round_plus",
|
||||
"user_round_search",
|
||||
"user_round_x",
|
||||
@ -1520,14 +1636,16 @@ LUCIDE_ICON_LIST = [
|
||||
"videotape",
|
||||
"view",
|
||||
"voicemail",
|
||||
"volleyball",
|
||||
"volume",
|
||||
"volume_1",
|
||||
"volume_2",
|
||||
"volume_off",
|
||||
"volume_x",
|
||||
"vote",
|
||||
"wallet",
|
||||
"wallet_minimal",
|
||||
"wallet_cards",
|
||||
"wallet_minimal",
|
||||
"wallpaper",
|
||||
"wand",
|
||||
"wand_sparkles",
|
||||
@ -1535,17 +1653,22 @@ LUCIDE_ICON_LIST = [
|
||||
"washing_machine",
|
||||
"watch",
|
||||
"waves",
|
||||
"waves_ladder",
|
||||
"waypoints",
|
||||
"webcam",
|
||||
"webhook_off",
|
||||
"webhook",
|
||||
"webhook_off",
|
||||
"weight",
|
||||
"wheat",
|
||||
"wheat_off",
|
||||
"whole_word",
|
||||
"wifi",
|
||||
"wifi_high",
|
||||
"wifi_low",
|
||||
"wifi_off",
|
||||
"wifi_zero",
|
||||
"wind",
|
||||
"wind_arrow_down",
|
||||
"wine",
|
||||
"wine_off",
|
||||
"workflow",
|
||||
@ -1559,3 +1682,7 @@ LUCIDE_ICON_LIST = [
|
||||
"zoom_in",
|
||||
"zoom_out",
|
||||
]
|
||||
LUCIDE_ICON_MAPPING_OVERRIDE = {
|
||||
"grid_2x_2_check": "Grid2x2Check",
|
||||
"grid_2x_2_x": "Grid2x2X",
|
||||
}
|
||||
|
@ -420,11 +420,12 @@ const {_LANGUAGE!s} = match ? match[1] : '';
|
||||
|
||||
def _get_custom_code(self) -> str | None:
|
||||
hooks = {}
|
||||
from reflex.compiler.templates import MACROS
|
||||
|
||||
for _component in self.component_map.values():
|
||||
comp = _component(_MOCK_ARG)
|
||||
hooks.update(comp._get_all_hooks_internal())
|
||||
hooks.update(comp._get_all_hooks())
|
||||
formatted_hooks = "\n".join(hooks.keys())
|
||||
formatted_hooks = MACROS.module.renderHooks(hooks) # type: ignore
|
||||
return f"""
|
||||
function {self._get_component_map_name()} () {{
|
||||
{formatted_hooks}
|
||||
|
@ -149,10 +149,10 @@ class Plotly(NoSSRComponent):
|
||||
# Fired when a plot element is hovered over.
|
||||
on_hover: EventHandler[_event_points_data_signature]
|
||||
|
||||
# Fired after the plot is layed out (zoom, pan, etc).
|
||||
# Fired after the plot is laid out (zoom, pan, etc).
|
||||
on_relayout: EventHandler[no_args_event_spec]
|
||||
|
||||
# Fired while the plot is being layed out.
|
||||
# Fired while the plot is being laid out.
|
||||
on_relayouting: EventHandler[no_args_event_spec]
|
||||
|
||||
# Fired after the plot style is changed.
|
||||
@ -167,7 +167,7 @@ class Plotly(NoSSRComponent):
|
||||
# Fired while dragging a selection.
|
||||
on_selecting: EventHandler[_event_points_data_signature]
|
||||
|
||||
# Fired while an animation is occuring.
|
||||
# Fired while an animation is occurring.
|
||||
on_transitioning: EventHandler[no_args_event_spec]
|
||||
|
||||
# Fired when a transition is stopped early.
|
||||
|
@ -130,13 +130,13 @@ class Plotly(NoSSRComponent):
|
||||
on_deselect: Fired when a selection is cleared (via double click).
|
||||
on_double_click: Fired when the plot is double clicked.
|
||||
on_hover: Fired when a plot element is hovered over.
|
||||
on_relayout: Fired after the plot is layed out (zoom, pan, etc).
|
||||
on_relayouting: Fired while the plot is being layed out.
|
||||
on_relayout: Fired after the plot is laid out (zoom, pan, etc).
|
||||
on_relayouting: Fired while the plot is being laid out.
|
||||
on_restyle: Fired after the plot style is changed.
|
||||
on_redraw: Fired after the plot is redrawn.
|
||||
on_selected: Fired after selecting plot elements.
|
||||
on_selecting: Fired while dragging a selection.
|
||||
on_transitioning: Fired while an animation is occuring.
|
||||
on_transitioning: Fired while an animation is occurring.
|
||||
on_transition_interrupted: Fired when a transition is stopped early.
|
||||
on_unhover: Fired when a hovered element is no longer hovered.
|
||||
style: The style of the component.
|
||||
|
@ -34,7 +34,7 @@ def on_value_event_spec(
|
||||
|
||||
|
||||
class SliderRoot(SliderComponent):
|
||||
"""The Slider component comtaining all slider parts."""
|
||||
"""The Slider component containing all slider parts."""
|
||||
|
||||
tag = "Root"
|
||||
alias = "RadixSliderRoot"
|
||||
|
@ -150,7 +150,7 @@ class Center(Flex):
|
||||
Args:
|
||||
*children: Child components.
|
||||
as_child: Change the default rendered element for the one passed as a child, merging their props and behavior.
|
||||
direction: How child items are layed out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
direction: How child items are laid out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
align: Alignment of children along the main axis: "start" | "center" | "end" | "baseline" | "stretch"
|
||||
justify: Alignment of children along the cross axis: "start" | "center" | "end" | "between"
|
||||
wrap: Whether children should wrap when they reach the end of their container: "nowrap" | "wrap" | "wrap-reverse"
|
||||
|
@ -22,7 +22,7 @@ class Flex(elements.Div, RadixThemesComponent):
|
||||
# Change the default rendered element for the one passed as a child, merging their props and behavior.
|
||||
as_child: Var[bool]
|
||||
|
||||
# How child items are layed out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
# How child items are laid out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
direction: Var[Responsive[LiteralFlexDirection]]
|
||||
|
||||
# Alignment of children along the main axis: "start" | "center" | "end" | "baseline" | "stretch"
|
||||
|
@ -153,7 +153,7 @@ class Flex(elements.Div, RadixThemesComponent):
|
||||
Args:
|
||||
*children: Child components.
|
||||
as_child: Change the default rendered element for the one passed as a child, merging their props and behavior.
|
||||
direction: How child items are layed out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
direction: How child items are laid out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
align: Alignment of children along the main axis: "start" | "center" | "end" | "baseline" | "stretch"
|
||||
justify: Alignment of children along the cross axis: "start" | "center" | "end" | "between"
|
||||
wrap: Whether children should wrap when they reach the end of their container: "nowrap" | "wrap" | "wrap-reverse"
|
||||
|
@ -27,7 +27,7 @@ class Grid(elements.Div, RadixThemesComponent):
|
||||
# Number of rows
|
||||
rows: Var[Responsive[str]]
|
||||
|
||||
# How the grid items are layed out: "row" | "column" | "dense" | "row-dense" | "column-dense"
|
||||
# How the grid items are laid out: "row" | "column" | "dense" | "row-dense" | "column-dense"
|
||||
flow: Var[Responsive[LiteralGridFlow]]
|
||||
|
||||
# Alignment of children along the main axis: "start" | "center" | "end" | "baseline" | "stretch"
|
||||
|
@ -184,7 +184,7 @@ class Grid(elements.Div, RadixThemesComponent):
|
||||
as_child: Change the default rendered element for the one passed as a child, merging their props and behavior.
|
||||
columns: Number of columns
|
||||
rows: Number of rows
|
||||
flow: How the grid items are layed out: "row" | "column" | "dense" | "row-dense" | "column-dense"
|
||||
flow: How the grid items are laid out: "row" | "column" | "dense" | "row-dense" | "column-dense"
|
||||
align: Alignment of children along the main axis: "start" | "center" | "end" | "baseline" | "stretch"
|
||||
justify: Alignment of children along the cross axis: "start" | "center" | "end" | "between"
|
||||
spacing: Gap between children: "0" - "9"
|
||||
|
@ -150,7 +150,7 @@ class Spacer(Flex):
|
||||
Args:
|
||||
*children: Child components.
|
||||
as_child: Change the default rendered element for the one passed as a child, merging their props and behavior.
|
||||
direction: How child items are layed out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
direction: How child items are laid out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
align: Alignment of children along the main axis: "start" | "center" | "end" | "baseline" | "stretch"
|
||||
justify: Alignment of children along the cross axis: "start" | "center" | "end" | "between"
|
||||
wrap: Whether children should wrap when they reach the end of their container: "nowrap" | "wrap" | "wrap-reverse"
|
||||
|
@ -126,7 +126,7 @@ class Stack(Flex):
|
||||
spacing: Gap between children: "0" - "9"
|
||||
align: Alignment of children along the main axis: "start" | "center" | "end" | "baseline" | "stretch"
|
||||
as_child: Change the default rendered element for the one passed as a child, merging their props and behavior.
|
||||
direction: How child items are layed out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
direction: How child items are laid out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
justify: Alignment of children along the cross axis: "start" | "center" | "end" | "between"
|
||||
wrap: Whether children should wrap when they reach the end of their container: "nowrap" | "wrap" | "wrap-reverse"
|
||||
access_key: Provides a hint for generating a keyboard shortcut for the current element.
|
||||
@ -258,7 +258,7 @@ class VStack(Stack):
|
||||
|
||||
Args:
|
||||
*children: The children of the stack.
|
||||
direction: How child items are layed out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
direction: How child items are laid out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
spacing: Gap between children: "0" - "9"
|
||||
align: Alignment of children along the main axis: "start" | "center" | "end" | "baseline" | "stretch"
|
||||
as_child: Change the default rendered element for the one passed as a child, merging their props and behavior.
|
||||
@ -393,7 +393,7 @@ class HStack(Stack):
|
||||
|
||||
Args:
|
||||
*children: The children of the stack.
|
||||
direction: How child items are layed out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
direction: How child items are laid out: "row" | "column" | "row-reverse" | "column-reverse"
|
||||
spacing: Gap between children: "0" - "9"
|
||||
align: Alignment of children along the main axis: "start" | "center" | "end" | "baseline" | "stretch"
|
||||
as_child: Change the default rendered element for the one passed as a child, merging their props and behavior.
|
||||
|
@ -76,7 +76,7 @@ class Link(RadixThemesComponent, A, MemoizationLeaf, MarkdownComponentMap):
|
||||
Returns:
|
||||
Component: The link component
|
||||
"""
|
||||
props.setdefault(":hover", {"color": color("accent", 8)})
|
||||
props.setdefault("_hover", {"color": color("accent", 8)})
|
||||
href = props.get("href")
|
||||
|
||||
is_external = props.pop("is_external", None)
|
||||
|
@ -42,7 +42,7 @@ class Axis(Recharts):
|
||||
# The width of axis which is usually calculated internally.
|
||||
width: Var[Union[str, int]]
|
||||
|
||||
# The height of axis, which can be setted by user.
|
||||
# The height of axis, which can be set by user.
|
||||
height: Var[Union[str, int]]
|
||||
|
||||
# The type of axis 'number' | 'category'
|
||||
@ -60,7 +60,7 @@ class Axis(Recharts):
|
||||
# Allow the axis has duplicated categorys or not when the type of axis is "category". Default: True
|
||||
allow_duplicated_category: Var[bool]
|
||||
|
||||
# The range of the axis. Work best in conjuction with allow_data_overflow. Default: [0, "auto"]
|
||||
# The range of the axis. Work best in conjunction with allow_data_overflow. Default: [0, "auto"]
|
||||
domain: Var[List]
|
||||
|
||||
# If set false, no axis line will be drawn. Default: True
|
||||
|
@ -144,13 +144,13 @@ class Axis(Recharts):
|
||||
data_key: The key of data displayed in the axis.
|
||||
hide: If set true, the axis do not display in the chart. Default: False
|
||||
width: The width of axis which is usually calculated internally.
|
||||
height: The height of axis, which can be setted by user.
|
||||
height: The height of axis, which can be set by user.
|
||||
type_: The type of axis 'number' | 'category'
|
||||
interval: If set 0, all the ticks will be shown. If set preserveStart", "preserveEnd" or "preserveStartEnd", the ticks which is to be shown or hidden will be calculated automatically. Default: "preserveEnd"
|
||||
allow_decimals: Allow the ticks of Axis to be decimals or not. Default: True
|
||||
allow_data_overflow: When domain of the axis is specified and the type of the axis is 'number', if allowDataOverflow is set to be false, the domain will be adjusted when the minimum value of data is smaller than domain[0] or the maximum value of data is greater than domain[1] so that the axis displays all data values. If set to true, graphic elements (line, area, bars) will be clipped to conform to the specified domain. Default: False
|
||||
allow_duplicated_category: Allow the axis has duplicated categorys or not when the type of axis is "category". Default: True
|
||||
domain: The range of the axis. Work best in conjuction with allow_data_overflow. Default: [0, "auto"]
|
||||
domain: The range of the axis. Work best in conjunction with allow_data_overflow. Default: [0, "auto"]
|
||||
axis_line: If set false, no axis line will be drawn. Default: True
|
||||
mirror: If set true, flips ticks around the axis line, displaying the labels inside the chart instead of outside. Default: False
|
||||
reversed: Reverse the ticks or not. Default: False
|
||||
@ -330,13 +330,13 @@ class XAxis(Axis):
|
||||
data_key: The key of data displayed in the axis.
|
||||
hide: If set true, the axis do not display in the chart. Default: False
|
||||
width: The width of axis which is usually calculated internally.
|
||||
height: The height of axis, which can be setted by user.
|
||||
height: The height of axis, which can be set by user.
|
||||
type_: The type of axis 'number' | 'category'
|
||||
interval: If set 0, all the ticks will be shown. If set preserveStart", "preserveEnd" or "preserveStartEnd", the ticks which is to be shown or hidden will be calculated automatically. Default: "preserveEnd"
|
||||
allow_decimals: Allow the ticks of Axis to be decimals or not. Default: True
|
||||
allow_data_overflow: When domain of the axis is specified and the type of the axis is 'number', if allowDataOverflow is set to be false, the domain will be adjusted when the minimum value of data is smaller than domain[0] or the maximum value of data is greater than domain[1] so that the axis displays all data values. If set to true, graphic elements (line, area, bars) will be clipped to conform to the specified domain. Default: False
|
||||
allow_duplicated_category: Allow the axis has duplicated categorys or not when the type of axis is "category". Default: True
|
||||
domain: The range of the axis. Work best in conjuction with allow_data_overflow. Default: [0, "auto"]
|
||||
domain: The range of the axis. Work best in conjunction with allow_data_overflow. Default: [0, "auto"]
|
||||
axis_line: If set false, no axis line will be drawn. Default: True
|
||||
mirror: If set true, flips ticks around the axis line, displaying the labels inside the chart instead of outside. Default: False
|
||||
reversed: Reverse the ticks or not. Default: False
|
||||
@ -512,13 +512,13 @@ class YAxis(Axis):
|
||||
data_key: The key of data displayed in the axis.
|
||||
hide: If set true, the axis do not display in the chart. Default: False
|
||||
width: The width of axis which is usually calculated internally.
|
||||
height: The height of axis, which can be setted by user.
|
||||
height: The height of axis, which can be set by user.
|
||||
type_: The type of axis 'number' | 'category'
|
||||
interval: If set 0, all the ticks will be shown. If set preserveStart", "preserveEnd" or "preserveStartEnd", the ticks which is to be shown or hidden will be calculated automatically. Default: "preserveEnd"
|
||||
allow_decimals: Allow the ticks of Axis to be decimals or not. Default: True
|
||||
allow_data_overflow: When domain of the axis is specified and the type of the axis is 'number', if allowDataOverflow is set to be false, the domain will be adjusted when the minimum value of data is smaller than domain[0] or the maximum value of data is greater than domain[1] so that the axis displays all data values. If set to true, graphic elements (line, area, bars) will be clipped to conform to the specified domain. Default: False
|
||||
allow_duplicated_category: Allow the axis has duplicated categorys or not when the type of axis is "category". Default: True
|
||||
domain: The range of the axis. Work best in conjuction with allow_data_overflow. Default: [0, "auto"]
|
||||
domain: The range of the axis. Work best in conjunction with allow_data_overflow. Default: [0, "auto"]
|
||||
axis_line: If set false, no axis line will be drawn. Default: True
|
||||
mirror: If set true, flips ticks around the axis line, displaying the labels inside the chart instead of outside. Default: False
|
||||
reversed: Reverse the ticks or not. Default: False
|
||||
|
@ -85,8 +85,8 @@ class ChartBase(RechartsCharts):
|
||||
cls._ensure_valid_dimension("height", height)
|
||||
|
||||
dim_props = {
|
||||
"width": width or "100%",
|
||||
"height": height or "100%",
|
||||
"width": width if width is not None else "100%",
|
||||
"height": height if height is not None else "100%",
|
||||
}
|
||||
# Provide min dimensions so the graph always appears, even if the outer container is zero-size.
|
||||
if width is None:
|
||||
|
@ -124,7 +124,7 @@ class Radar(Recharts):
|
||||
# The key of a group of data which should be unique in a radar chart.
|
||||
data_key: Var[Union[str, int]]
|
||||
|
||||
# The coordinates of all the vertexes of the radar shape, like [{ x, y }].
|
||||
# The coordinates of all the vertices of the radar shape, like [{ x, y }].
|
||||
points: Var[List[Dict[str, Any]]]
|
||||
|
||||
# If false set, dots will not be drawn. Default: True
|
||||
@ -373,7 +373,7 @@ class PolarRadiusAxis(Recharts):
|
||||
# The count of axis ticks. Not used if 'type' is 'category'. Default: 5
|
||||
tick_count: Var[int]
|
||||
|
||||
# If 'auto' set, the scale funtion is linear scale. 'auto' | 'linear' | 'pow' | 'sqrt' | 'log' | 'identity' | 'time' | 'band' | 'point' | 'ordinal' | 'quantile' | 'quantize' | 'utc' | 'sequential' | 'threshold'. Default: "auto"
|
||||
# If 'auto' set, the scale function is linear scale. 'auto' | 'linear' | 'pow' | 'sqrt' | 'log' | 'identity' | 'time' | 'band' | 'point' | 'ordinal' | 'quantile' | 'quantize' | 'utc' | 'sequential' | 'threshold'. Default: "auto"
|
||||
scale: Var[LiteralScale]
|
||||
|
||||
# Valid children components
|
||||
|
@ -200,7 +200,7 @@ class Radar(Recharts):
|
||||
Args:
|
||||
*children: The children of the component.
|
||||
data_key: The key of a group of data which should be unique in a radar chart.
|
||||
points: The coordinates of all the vertexes of the radar shape, like [{ x, y }].
|
||||
points: The coordinates of all the vertices of the radar shape, like [{ x, y }].
|
||||
dot: If false set, dots will not be drawn. Default: True
|
||||
stroke: Stoke color. Default: rx.color("accent", 9)
|
||||
fill: Fill color. Default: rx.color("accent", 3)
|
||||
@ -574,7 +574,7 @@ class PolarRadiusAxis(Recharts):
|
||||
axis_line: If false set, axis line will not be drawn. If true set, axis line will be drawn which have the props calculated internally. If object set, axis line will be drawn which have the props mergered by the internal calculated props and the option. Default: True
|
||||
tick: If false set, ticks will not be drawn. If true set, ticks will be drawn which have the props calculated internally. If object set, ticks will be drawn which have the props mergered by the internal calculated props and the option. Default: True
|
||||
tick_count: The count of axis ticks. Not used if 'type' is 'category'. Default: 5
|
||||
scale: If 'auto' set, the scale funtion is linear scale. 'auto' | 'linear' | 'pow' | 'sqrt' | 'log' | 'identity' | 'time' | 'band' | 'point' | 'ordinal' | 'quantile' | 'quantize' | 'utc' | 'sequential' | 'threshold'. Default: "auto"
|
||||
scale: If 'auto' set, the scale function is linear scale. 'auto' | 'linear' | 'pow' | 'sqrt' | 'log' | 'identity' | 'time' | 'band' | 'point' | 'ordinal' | 'quantile' | 'quantize' | 'utc' | 'sequential' | 'threshold'. Default: "auto"
|
||||
domain: The domain of the polar radius axis, specifying the minimum and maximum values. Default: [0, "auto"]
|
||||
stroke: The stroke color of axis. Default: rx.color("gray", 10)
|
||||
style: The style of the component.
|
||||
|
@ -167,7 +167,7 @@ class ToastProps(PropsBase, NoExtrasAllowedProps):
|
||||
class Toaster(Component):
|
||||
"""A Toaster Component for displaying toast notifications."""
|
||||
|
||||
library: str = "sonner@1.5.0"
|
||||
library: str = "sonner@1.7.1"
|
||||
|
||||
tag = "Toaster"
|
||||
|
||||
|
@ -27,7 +27,7 @@ class Dirs(SimpleNamespace):
|
||||
UPLOADED_FILES = "uploaded_files"
|
||||
# The name of the assets directory.
|
||||
APP_ASSETS = "assets"
|
||||
# The name of the assets directory for external ressource (a subfolder of APP_ASSETS).
|
||||
# The name of the assets directory for external resources (a subfolder of APP_ASSETS).
|
||||
EXTERNAL_APP_ASSETS = "external"
|
||||
# The name of the utils file.
|
||||
UTILS = "utils"
|
||||
|
@ -135,6 +135,7 @@ class Hooks(SimpleNamespace):
|
||||
class HookPosition(enum.Enum):
|
||||
"""The position of the hook in the component."""
|
||||
|
||||
INTERNAL = "internal"
|
||||
PRE_TRIGGER = "pre_trigger"
|
||||
POST_TRIGGER = "post_trigger"
|
||||
|
||||
|
@ -91,6 +91,8 @@ class Event:
|
||||
return f"{self.token}_{substate}"
|
||||
|
||||
|
||||
_EVENT_FIELDS: set[str] = {f.name for f in dataclasses.fields(Event)}
|
||||
|
||||
BACKGROUND_TASK_MARKER = "_reflex_background_task"
|
||||
|
||||
|
||||
@ -431,6 +433,101 @@ class EventChain(EventActionsMixin):
|
||||
|
||||
invocation: Optional[Var] = dataclasses.field(default=None)
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls,
|
||||
value: EventType,
|
||||
args_spec: ArgsSpec | Sequence[ArgsSpec],
|
||||
key: Optional[str] = None,
|
||||
**event_chain_kwargs,
|
||||
) -> Union[EventChain, Var]:
|
||||
"""Create an event chain from a variety of input types.
|
||||
|
||||
Args:
|
||||
value: The value to create the event chain from.
|
||||
args_spec: The args_spec of the event trigger being bound.
|
||||
key: The key of the event trigger being bound.
|
||||
**event_chain_kwargs: Additional kwargs to pass to the EventChain constructor.
|
||||
|
||||
Returns:
|
||||
The event chain.
|
||||
|
||||
Raises:
|
||||
ValueError: If the value is not a valid event chain.
|
||||
"""
|
||||
# If it's an event chain var, return it.
|
||||
if isinstance(value, Var):
|
||||
if isinstance(value, EventChainVar):
|
||||
return value
|
||||
elif isinstance(value, EventVar):
|
||||
value = [value]
|
||||
elif issubclass(value._var_type, (EventChain, EventSpec)):
|
||||
return cls.create(
|
||||
value=value.guess_type(),
|
||||
args_spec=args_spec,
|
||||
key=key,
|
||||
**event_chain_kwargs,
|
||||
)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Invalid event chain: {value!s} of type {value._var_type}"
|
||||
)
|
||||
elif isinstance(value, EventChain):
|
||||
# Trust that the caller knows what they're doing passing an EventChain directly
|
||||
return value
|
||||
|
||||
# If the input is a single event handler, wrap it in a list.
|
||||
if isinstance(value, (EventHandler, EventSpec)):
|
||||
value = [value]
|
||||
|
||||
# If the input is a list of event handlers, create an event chain.
|
||||
if isinstance(value, List):
|
||||
events: List[Union[EventSpec, EventVar]] = []
|
||||
for v in value:
|
||||
if isinstance(v, (EventHandler, EventSpec)):
|
||||
# Call the event handler to get the event.
|
||||
events.append(call_event_handler(v, args_spec, key=key))
|
||||
elif isinstance(v, Callable):
|
||||
# Call the lambda to get the event chain.
|
||||
result = call_event_fn(v, args_spec, key=key)
|
||||
if isinstance(result, Var):
|
||||
raise ValueError(
|
||||
f"Invalid event chain: {v}. Cannot use a Var-returning "
|
||||
"lambda inside an EventChain list."
|
||||
)
|
||||
events.extend(result)
|
||||
elif isinstance(v, EventVar):
|
||||
events.append(v)
|
||||
else:
|
||||
raise ValueError(f"Invalid event: {v}")
|
||||
|
||||
# If the input is a callable, create an event chain.
|
||||
elif isinstance(value, Callable):
|
||||
result = call_event_fn(value, args_spec, key=key)
|
||||
if isinstance(result, Var):
|
||||
# Recursively call this function if the lambda returned an EventChain Var.
|
||||
return cls.create(
|
||||
value=result, args_spec=args_spec, key=key, **event_chain_kwargs
|
||||
)
|
||||
events = [*result]
|
||||
|
||||
# Otherwise, raise an error.
|
||||
else:
|
||||
raise ValueError(f"Invalid event chain: {value}")
|
||||
|
||||
# Add args to the event specs if necessary.
|
||||
events = [
|
||||
(e.with_args(get_handler_args(e)) if isinstance(e, EventSpec) else e)
|
||||
for e in events
|
||||
]
|
||||
|
||||
# Return the event chain.
|
||||
return cls(
|
||||
events=events,
|
||||
args_spec=args_spec,
|
||||
**event_chain_kwargs,
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass(
|
||||
init=True,
|
||||
@ -1100,7 +1197,7 @@ def call_function(
|
||||
Returns:
|
||||
EventSpec: An event that will execute the client side javascript.
|
||||
"""
|
||||
callback_kwargs = {}
|
||||
callback_kwargs = {"callback": None}
|
||||
if callback is not None:
|
||||
callback_kwargs = {
|
||||
"callback": format.format_queue_events(
|
||||
|
@ -12,7 +12,7 @@ from reflex.event import EventChain, EventHandler, EventSpec, run_script
|
||||
from reflex.utils.imports import ImportVar
|
||||
from reflex.vars import VarData, get_unique_variable_name
|
||||
from reflex.vars.base import LiteralVar, Var
|
||||
from reflex.vars.function import FunctionVar
|
||||
from reflex.vars.function import ArgsFunctionOperationBuilder, FunctionVar
|
||||
|
||||
NoValue = object()
|
||||
|
||||
@ -45,6 +45,7 @@ class ClientStateVar(Var):
|
||||
# Track the names of the getters and setters
|
||||
_setter_name: str = dataclasses.field(default="")
|
||||
_getter_name: str = dataclasses.field(default="")
|
||||
_id_name: str = dataclasses.field(default="")
|
||||
|
||||
# Whether to add the var and setter to the global `refs` object for use in any Component.
|
||||
_global_ref: bool = dataclasses.field(default=True)
|
||||
@ -96,6 +97,7 @@ class ClientStateVar(Var):
|
||||
"""
|
||||
if var_name is None:
|
||||
var_name = get_unique_variable_name()
|
||||
id_name = "id_" + get_unique_variable_name()
|
||||
if not isinstance(var_name, str):
|
||||
raise ValueError("var_name must be a string.")
|
||||
if default is NoValue:
|
||||
@ -105,20 +107,24 @@ class ClientStateVar(Var):
|
||||
else:
|
||||
default_var = default
|
||||
setter_name = f"set{var_name.capitalize()}"
|
||||
hooks = {
|
||||
hooks: dict[str, VarData | None] = {
|
||||
f"const {id_name} = useId()": None,
|
||||
f"const [{var_name}, {setter_name}] = useState({default_var!s})": None,
|
||||
}
|
||||
imports = {
|
||||
"react": [ImportVar(tag="useState")],
|
||||
"react": [ImportVar(tag="useState"), ImportVar(tag="useId")],
|
||||
}
|
||||
if global_ref:
|
||||
hooks[f"{_client_state_ref(var_name)} = {var_name}"] = None
|
||||
hooks[f"{_client_state_ref(setter_name)} = {setter_name}"] = None
|
||||
hooks[f"{_client_state_ref(var_name)} ??= {{}}"] = None
|
||||
hooks[f"{_client_state_ref(setter_name)} ??= {{}}"] = None
|
||||
hooks[f"{_client_state_ref(var_name)}[{id_name}] = {var_name}"] = None
|
||||
hooks[f"{_client_state_ref(setter_name)}[{id_name}] = {setter_name}"] = None
|
||||
imports.update(_refs_import)
|
||||
return cls(
|
||||
_js_expr="",
|
||||
_setter_name=setter_name,
|
||||
_getter_name=var_name,
|
||||
_id_name=id_name,
|
||||
_global_ref=global_ref,
|
||||
_var_type=default_var._var_type,
|
||||
_var_data=VarData.merge(
|
||||
@ -144,10 +150,11 @@ class ClientStateVar(Var):
|
||||
return (
|
||||
Var(
|
||||
_js_expr=(
|
||||
_client_state_ref(self._getter_name)
|
||||
_client_state_ref(self._getter_name) + f"[{self._id_name}]"
|
||||
if self._global_ref
|
||||
else self._getter_name
|
||||
)
|
||||
),
|
||||
_var_data=self._var_data,
|
||||
)
|
||||
.to(self._var_type)
|
||||
._replace(
|
||||
@ -170,28 +177,43 @@ class ClientStateVar(Var):
|
||||
Returns:
|
||||
A special EventChain Var which will set the value when triggered.
|
||||
"""
|
||||
setter = (
|
||||
_client_state_ref(self._setter_name)
|
||||
if self._global_ref
|
||||
else self._setter_name
|
||||
)
|
||||
_var_data = VarData(imports=_refs_import if self._global_ref else {})
|
||||
|
||||
arg_name = get_unique_variable_name()
|
||||
setter = (
|
||||
ArgsFunctionOperationBuilder.create(
|
||||
args_names=(arg_name,),
|
||||
return_expr=Var("Array.prototype.forEach.call")
|
||||
.to(FunctionVar)
|
||||
.call(
|
||||
Var("Object.values")
|
||||
.to(FunctionVar)
|
||||
.call(Var(_client_state_ref(self._setter_name))),
|
||||
ArgsFunctionOperationBuilder.create(
|
||||
args_names=("setter",),
|
||||
return_expr=Var("setter").to(FunctionVar).call(Var(arg_name)),
|
||||
),
|
||||
),
|
||||
_var_data=_var_data,
|
||||
)
|
||||
if self._global_ref
|
||||
else Var(self._setter_name, _var_data=_var_data).to(FunctionVar)
|
||||
)
|
||||
|
||||
if value is not NoValue:
|
||||
# This is a hack to make it work like an EventSpec taking an arg
|
||||
value_var = LiteralVar.create(value)
|
||||
_var_data = VarData.merge(_var_data, value_var._get_all_var_data())
|
||||
value_str = str(value_var)
|
||||
|
||||
if value_str.startswith("_"):
|
||||
setter = ArgsFunctionOperationBuilder.create(
|
||||
# remove patterns of ["*"] from the value_str using regex
|
||||
arg = re.sub(r"\[\".*\"\]", "", value_str)
|
||||
setter = f"(({arg}) => {setter}({value_str}))"
|
||||
else:
|
||||
setter = f"(() => {setter}({value_str}))"
|
||||
return Var(
|
||||
_js_expr=setter,
|
||||
_var_data=_var_data,
|
||||
).to(FunctionVar, EventChain)
|
||||
args_names=(re.sub(r"\[\".*\"\]", "", value_str),)
|
||||
if value_str.startswith("_")
|
||||
else (),
|
||||
return_expr=setter.call(value_var),
|
||||
)
|
||||
|
||||
return setter.to(FunctionVar, EventChain)
|
||||
|
||||
@property
|
||||
def set(self) -> Var:
|
||||
|
@ -26,7 +26,7 @@ class HeaderData:
|
||||
accept_language: str = ""
|
||||
|
||||
def __init__(self, router_data: Optional[dict] = None):
|
||||
"""Initalize the HeaderData object based on router_data.
|
||||
"""Initialize the HeaderData object based on router_data.
|
||||
|
||||
Args:
|
||||
router_data: the router_data dict.
|
||||
@ -51,7 +51,7 @@ class PageData:
|
||||
params: dict = dataclasses.field(default_factory=dict)
|
||||
|
||||
def __init__(self, router_data: Optional[dict] = None):
|
||||
"""Initalize the PageData object based on router_data.
|
||||
"""Initialize the PageData object based on router_data.
|
||||
|
||||
Args:
|
||||
router_data: the router_data dict.
|
||||
@ -91,7 +91,7 @@ class SessionData:
|
||||
session_id: str = ""
|
||||
|
||||
def __init__(self, router_data: Optional[dict] = None):
|
||||
"""Initalize the SessionData object based on router_data.
|
||||
"""Initialize the SessionData object based on router_data.
|
||||
|
||||
Args:
|
||||
router_data: the router_data dict.
|
||||
|
@ -141,15 +141,13 @@ def get_async_engine(url: str | None) -> sqlalchemy.ext.asyncio.AsyncEngine:
|
||||
return _ASYNC_ENGINE[url]
|
||||
|
||||
|
||||
async def get_db_status() -> bool:
|
||||
async def get_db_status() -> dict[str, bool]:
|
||||
"""Checks the status of the database connection.
|
||||
|
||||
Attempts to connect to the database and execute a simple query to verify connectivity.
|
||||
|
||||
Returns:
|
||||
bool: The status of the database connection:
|
||||
- True: The database is accessible.
|
||||
- False: The database is not accessible.
|
||||
The status of the database connection.
|
||||
"""
|
||||
status = True
|
||||
try:
|
||||
@ -159,7 +157,7 @@ async def get_db_status() -> bool:
|
||||
except sqlalchemy.exc.OperationalError:
|
||||
status = False
|
||||
|
||||
return status
|
||||
return {"db": status}
|
||||
|
||||
|
||||
SQLModelOrSqlAlchemy = Union[
|
||||
@ -535,6 +533,7 @@ def asession(url: str | None = None) -> AsyncSession:
|
||||
_AsyncSessionLocal[url] = sqlalchemy.ext.asyncio.async_sessionmaker(
|
||||
bind=get_async_engine(url),
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
autocommit=False,
|
||||
autoflush=False,
|
||||
)
|
||||
|
@ -70,7 +70,7 @@ def get_decorated_pages(omit_implicit_routes=True) -> list[dict[str, Any]]:
|
||||
"""Get the decorated pages.
|
||||
|
||||
Args:
|
||||
omit_implicit_routes: Whether to omit pages where the route will be implicitely guessed later.
|
||||
omit_implicit_routes: Whether to omit pages where the route will be implicitly guessed later.
|
||||
|
||||
Returns:
|
||||
The decorated pages.
|
||||
|
@ -329,13 +329,14 @@ def export(
|
||||
|
||||
@cli.command()
|
||||
def login(loglevel: constants.LogLevel = typer.Option(config.loglevel)):
|
||||
"""Authenicate with experimental Reflex hosting service."""
|
||||
"""Authenticate with experimental Reflex hosting service."""
|
||||
from reflex_cli.v2 import cli as hosting_cli
|
||||
|
||||
check_version()
|
||||
|
||||
validated_info = hosting_cli.login()
|
||||
if validated_info is not None:
|
||||
_skip_compile() # Allow running outside of an app dir
|
||||
telemetry.send("login", user_uuid=validated_info.get("user_id"))
|
||||
|
||||
|
||||
@ -484,6 +485,11 @@ def deploy(
|
||||
"--token",
|
||||
help="token to use for auth",
|
||||
),
|
||||
config_path: Optional[str] = typer.Option(
|
||||
None,
|
||||
"--config",
|
||||
help="path to the config file",
|
||||
),
|
||||
):
|
||||
"""Deploy the app to the Reflex hosting service."""
|
||||
from reflex_cli.utils import dependency
|
||||
@ -539,6 +545,7 @@ def deploy(
|
||||
loglevel=type(loglevel).INFO, # type: ignore
|
||||
token=token,
|
||||
project=project,
|
||||
config_path=config_path,
|
||||
)
|
||||
|
||||
|
||||
|
128
reflex/state.py
128
reflex/state.py
@ -107,6 +107,7 @@ from reflex.utils.exceptions import (
|
||||
StateSchemaMismatchError,
|
||||
StateSerializationError,
|
||||
StateTooLargeError,
|
||||
UnretrievableVarValueError,
|
||||
)
|
||||
from reflex.utils.exec import is_testing_env
|
||||
from reflex.utils.serializers import serializer
|
||||
@ -143,6 +144,9 @@ HANDLED_PICKLE_ERRORS = (
|
||||
ValueError,
|
||||
)
|
||||
|
||||
# For BaseState.get_var_value
|
||||
VAR_TYPE = TypeVar("VAR_TYPE")
|
||||
|
||||
|
||||
def _no_chain_background_task(
|
||||
state_cls: Type["BaseState"], name: str, fn: Callable
|
||||
@ -1193,6 +1197,8 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
||||
continue
|
||||
dynamic_vars[param] = DynamicRouteVar(
|
||||
fget=func,
|
||||
auto_deps=False,
|
||||
deps=["router"],
|
||||
cache=True,
|
||||
_js_expr=param,
|
||||
_var_data=VarData.from_state(cls),
|
||||
@ -1240,13 +1246,16 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
||||
if not super().__getattribute__("__dict__"):
|
||||
return super().__getattribute__(name)
|
||||
|
||||
inherited_vars = {
|
||||
**super().__getattribute__("inherited_vars"),
|
||||
**super().__getattribute__("inherited_backend_vars"),
|
||||
}
|
||||
# Fast path for dunder
|
||||
if name.startswith("__"):
|
||||
return super().__getattribute__(name)
|
||||
|
||||
# For now, handle router_data updates as a special case.
|
||||
if name in inherited_vars or name == constants.ROUTER_DATA:
|
||||
if (
|
||||
name == constants.ROUTER_DATA
|
||||
or name in super().__getattribute__("inherited_vars")
|
||||
or name in super().__getattribute__("inherited_backend_vars")
|
||||
):
|
||||
parent_state = super().__getattribute__("parent_state")
|
||||
if parent_state is not None:
|
||||
return getattr(parent_state, name)
|
||||
@ -1301,15 +1310,11 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
||||
value = value.__wrapped__
|
||||
|
||||
# Set the var on the parent state.
|
||||
inherited_vars = {**self.inherited_vars, **self.inherited_backend_vars}
|
||||
if name in inherited_vars:
|
||||
if name in self.inherited_vars or name in self.inherited_backend_vars:
|
||||
setattr(self.parent_state, name, value)
|
||||
return
|
||||
|
||||
if name in self.backend_vars:
|
||||
# abort if unchanged
|
||||
if self._backend_vars.get(name) == value:
|
||||
return
|
||||
self._backend_vars.__setitem__(name, value)
|
||||
self.dirty_vars.add(name)
|
||||
self._mark_dirty()
|
||||
@ -1599,6 +1604,42 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
|
||||
# Slow case - fetch missing parent states from redis.
|
||||
return await self._get_state_from_redis(state_cls)
|
||||
|
||||
async def get_var_value(self, var: Var[VAR_TYPE]) -> VAR_TYPE:
|
||||
"""Get the value of an rx.Var from another state.
|
||||
|
||||
Args:
|
||||
var: The var to get the value for.
|
||||
|
||||
Returns:
|
||||
The value of the var.
|
||||
|
||||
Raises:
|
||||
UnretrievableVarValueError: If the var does not have a literal value
|
||||
or associated state.
|
||||
"""
|
||||
# Oopsie case: you didn't give me a Var... so get what you give.
|
||||
if not isinstance(var, Var):
|
||||
return var # type: ignore
|
||||
|
||||
# Fast case: this is a literal var and the value is known.
|
||||
if hasattr(var, "_var_value"):
|
||||
return var._var_value
|
||||
|
||||
var_data = var._get_all_var_data()
|
||||
if var_data is None or not var_data.state:
|
||||
raise UnretrievableVarValueError(
|
||||
f"Unable to retrieve value for {var._js_expr}: not associated with any state."
|
||||
)
|
||||
# Fastish case: this var belongs to this state
|
||||
if var_data.state == self.get_full_name():
|
||||
return getattr(self, var_data.field_name)
|
||||
|
||||
# Slow case: this var belongs to another state
|
||||
other_state = await self.get_state(
|
||||
self._get_root_state().get_class_substate(var_data.state)
|
||||
)
|
||||
return getattr(other_state, var_data.field_name)
|
||||
|
||||
def _get_event_handler(
|
||||
self, event: Event
|
||||
) -> tuple[BaseState | StateProxy, EventHandler]:
|
||||
@ -3648,6 +3689,9 @@ def get_state_manager() -> StateManager:
|
||||
class MutableProxy(wrapt.ObjectProxy):
|
||||
"""A proxy for a mutable object that tracks changes."""
|
||||
|
||||
# Hint for finding the base class of the proxy.
|
||||
__base_proxy__ = "MutableProxy"
|
||||
|
||||
# Methods on wrapped objects which should mark the state as dirty.
|
||||
__mark_dirty_attrs__ = {
|
||||
"add",
|
||||
@ -3690,6 +3734,39 @@ class MutableProxy(wrapt.ObjectProxy):
|
||||
BaseModelV1,
|
||||
)
|
||||
|
||||
# Dynamically generated classes for tracking dataclass mutations.
|
||||
__dataclass_proxies__: Dict[type, type] = {}
|
||||
|
||||
def __new__(cls, wrapped: Any, *args, **kwargs) -> MutableProxy:
|
||||
"""Create a proxy instance for a mutable object that tracks changes.
|
||||
|
||||
Args:
|
||||
wrapped: The object to proxy.
|
||||
*args: Other args passed to MutableProxy (ignored).
|
||||
**kwargs: Other kwargs passed to MutableProxy (ignored).
|
||||
|
||||
Returns:
|
||||
The proxy instance.
|
||||
"""
|
||||
if dataclasses.is_dataclass(wrapped):
|
||||
wrapped_cls = type(wrapped)
|
||||
wrapper_cls_name = wrapped_cls.__name__ + cls.__name__
|
||||
# Find the associated class
|
||||
if wrapper_cls_name not in cls.__dataclass_proxies__:
|
||||
# Create a new class that has the __dataclass_fields__ defined
|
||||
cls.__dataclass_proxies__[wrapper_cls_name] = type(
|
||||
wrapper_cls_name,
|
||||
(cls,),
|
||||
{
|
||||
dataclasses._FIELDS: getattr( # pyright: ignore [reportGeneralTypeIssues]
|
||||
wrapped_cls,
|
||||
dataclasses._FIELDS, # pyright: ignore [reportGeneralTypeIssues]
|
||||
),
|
||||
},
|
||||
)
|
||||
cls = cls.__dataclass_proxies__[wrapper_cls_name]
|
||||
return super().__new__(cls)
|
||||
|
||||
def __init__(self, wrapped: Any, state: BaseState, field_name: str):
|
||||
"""Create a proxy for a mutable object that tracks changes.
|
||||
|
||||
@ -3746,7 +3823,27 @@ class MutableProxy(wrapt.ObjectProxy):
|
||||
Returns:
|
||||
Whether the value is of a mutable type.
|
||||
"""
|
||||
return isinstance(value, cls.__mutable_types__)
|
||||
return isinstance(value, cls.__mutable_types__) or (
|
||||
dataclasses.is_dataclass(value) and not isinstance(value, Var)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _is_called_from_dataclasses_internal() -> bool:
|
||||
"""Check if the current function is called from dataclasses helper.
|
||||
|
||||
Returns:
|
||||
Whether the current function is called from dataclasses internal code.
|
||||
"""
|
||||
# Walk up the stack a bit to see if we are called from dataclasses
|
||||
# internal code, for example `asdict` or `astuple`.
|
||||
frame = inspect.currentframe()
|
||||
for _ in range(5):
|
||||
# Why not `inspect.stack()` -- this is much faster!
|
||||
if not (frame := frame and frame.f_back):
|
||||
break
|
||||
if inspect.getfile(frame) == dataclasses.__file__:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _wrap_recursive(self, value: Any) -> Any:
|
||||
"""Wrap a value recursively if it is mutable.
|
||||
@ -3757,9 +3854,13 @@ class MutableProxy(wrapt.ObjectProxy):
|
||||
Returns:
|
||||
The wrapped value.
|
||||
"""
|
||||
# When called from dataclasses internal code, return the unwrapped value
|
||||
if self._is_called_from_dataclasses_internal():
|
||||
return value
|
||||
# Recursively wrap mutable types, but do not re-wrap MutableProxy instances.
|
||||
if self._is_mutable_type(value) and not isinstance(value, MutableProxy):
|
||||
return type(self)(
|
||||
base_cls = globals()[self.__base_proxy__]
|
||||
return base_cls(
|
||||
wrapped=value,
|
||||
state=self._self_state,
|
||||
field_name=self._self_field_name,
|
||||
@ -3967,6 +4068,9 @@ class ImmutableMutableProxy(MutableProxy):
|
||||
to modify the wrapped object when the StateProxy is immutable.
|
||||
"""
|
||||
|
||||
# Ensure that recursively wrapped proxies use ImmutableMutableProxy as base.
|
||||
__base_proxy__ = "ImmutableMutableProxy"
|
||||
|
||||
def _mark_dirty(
|
||||
self,
|
||||
wrapped=None,
|
||||
|
@ -52,6 +52,7 @@ from reflex.state import (
|
||||
StateManagerRedis,
|
||||
reload_state_module,
|
||||
)
|
||||
from reflex.utils import console
|
||||
|
||||
try:
|
||||
from selenium import webdriver # pyright: ignore [reportMissingImports]
|
||||
@ -385,7 +386,7 @@ class AppHarness:
|
||||
)
|
||||
if not line:
|
||||
break
|
||||
print(line) # for pytest diagnosis
|
||||
print(line) # for pytest diagnosis #noqa: T201
|
||||
m = re.search(reflex.constants.Next.FRONTEND_LISTENING_REGEX, line)
|
||||
if m is not None:
|
||||
self.frontend_url = m.group(1)
|
||||
@ -403,11 +404,10 @@ class AppHarness:
|
||||
)
|
||||
# catch I/O operation on closed file.
|
||||
except ValueError as e:
|
||||
print(e)
|
||||
console.error(str(e))
|
||||
break
|
||||
if not line:
|
||||
break
|
||||
print(line)
|
||||
|
||||
self.frontend_output_thread = threading.Thread(target=consume_frontend_output)
|
||||
self.frontend_output_thread.start()
|
||||
|
@ -187,3 +187,7 @@ def raise_system_package_missing_error(package: str) -> NoReturn:
|
||||
|
||||
class InvalidLockWarningThresholdError(ReflexError):
|
||||
"""Raised when an invalid lock warning threshold is provided."""
|
||||
|
||||
|
||||
class UnretrievableVarValueError(ReflexError):
|
||||
"""Raised when the value of a var is not retrievable."""
|
||||
|
@ -28,8 +28,8 @@ import typer
|
||||
from alembic.util.exc import CommandError
|
||||
from packaging import version
|
||||
from redis import Redis as RedisSync
|
||||
from redis import exceptions
|
||||
from redis.asyncio import Redis
|
||||
from redis.exceptions import RedisError
|
||||
|
||||
from reflex import constants, model
|
||||
from reflex.compiler import templates
|
||||
@ -109,7 +109,7 @@ def check_latest_package_version(package_name: str):
|
||||
console.warn(
|
||||
f"Your version ({current_version}) of {package_name} is out of date. Upgrade to {latest_version} with 'pip install {package_name} --upgrade'"
|
||||
)
|
||||
# Check for depreacted python versions
|
||||
# Check for deprecated python versions
|
||||
_python_version_check()
|
||||
except Exception:
|
||||
pass
|
||||
@ -333,10 +333,11 @@ def get_redis() -> Redis | None:
|
||||
Returns:
|
||||
The asynchronous redis client.
|
||||
"""
|
||||
if isinstance((redis_url_or_options := parse_redis_url()), str):
|
||||
return Redis.from_url(redis_url_or_options)
|
||||
elif isinstance(redis_url_or_options, dict):
|
||||
return Redis(**redis_url_or_options)
|
||||
if (redis_url := parse_redis_url()) is not None:
|
||||
return Redis.from_url(
|
||||
redis_url,
|
||||
retry_on_error=[RedisError],
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
@ -346,14 +347,15 @@ def get_redis_sync() -> RedisSync | None:
|
||||
Returns:
|
||||
The synchronous redis client.
|
||||
"""
|
||||
if isinstance((redis_url_or_options := parse_redis_url()), str):
|
||||
return RedisSync.from_url(redis_url_or_options)
|
||||
elif isinstance(redis_url_or_options, dict):
|
||||
return RedisSync(**redis_url_or_options)
|
||||
if (redis_url := parse_redis_url()) is not None:
|
||||
return RedisSync.from_url(
|
||||
redis_url,
|
||||
retry_on_error=[RedisError],
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def parse_redis_url() -> str | dict | None:
|
||||
def parse_redis_url() -> str | None:
|
||||
"""Parse the REDIS_URL in config if applicable.
|
||||
|
||||
Returns:
|
||||
@ -372,16 +374,13 @@ def parse_redis_url() -> str | dict | None:
|
||||
return config.redis_url
|
||||
|
||||
|
||||
async def get_redis_status() -> bool | None:
|
||||
async def get_redis_status() -> dict[str, bool | None]:
|
||||
"""Checks the status of the Redis connection.
|
||||
|
||||
Attempts to connect to Redis and send a ping command to verify connectivity.
|
||||
|
||||
Returns:
|
||||
bool or None: The status of the Redis connection:
|
||||
- True: Redis is accessible and responding.
|
||||
- False: Redis is not accessible due to a connection error.
|
||||
- None: Redis not used i.e redis_url is not set in rxconfig.
|
||||
The status of the Redis connection.
|
||||
"""
|
||||
try:
|
||||
status = True
|
||||
@ -390,10 +389,10 @@ async def get_redis_status() -> bool | None:
|
||||
redis_client.ping()
|
||||
else:
|
||||
status = None
|
||||
except exceptions.RedisError:
|
||||
except RedisError:
|
||||
status = False
|
||||
|
||||
return status
|
||||
return {"redis": status}
|
||||
|
||||
|
||||
def validate_app_name(app_name: str | None = None) -> str:
|
||||
@ -594,7 +593,7 @@ def initialize_web_directory():
|
||||
"""Initialize the web directory on reflex init."""
|
||||
console.log("Initializing the web directory.")
|
||||
|
||||
# Re-use the hash if one is already created, so we don't over-write it when running reflex init
|
||||
# Reuse the hash if one is already created, so we don't over-write it when running reflex init
|
||||
project_hash = get_project_hash()
|
||||
|
||||
path_ops.cp(constants.Templates.Dirs.WEB_TEMPLATE, str(get_web_dir()))
|
||||
@ -647,7 +646,7 @@ def initialize_bun_config():
|
||||
def init_reflex_json(project_hash: int | None):
|
||||
"""Write the hash of the Reflex project to a REFLEX_JSON.
|
||||
|
||||
Re-use the hash if one is already created, therefore do not
|
||||
Reuse the hash if one is already created, therefore do not
|
||||
overwrite it every time we run the reflex init command
|
||||
.
|
||||
|
||||
@ -1177,6 +1176,24 @@ def initialize_frontend_dependencies():
|
||||
initialize_web_directory()
|
||||
|
||||
|
||||
def check_db_used() -> bool:
|
||||
"""Check if the database is used.
|
||||
|
||||
Returns:
|
||||
True if the database is used.
|
||||
"""
|
||||
return bool(get_config().db_url)
|
||||
|
||||
|
||||
def check_redis_used() -> bool:
|
||||
"""Check if Redis is used.
|
||||
|
||||
Returns:
|
||||
True if Redis is used.
|
||||
"""
|
||||
return bool(get_config().redis_url)
|
||||
|
||||
|
||||
def check_db_initialized() -> bool:
|
||||
"""Check if the database migrations are initialized.
|
||||
|
||||
|
@ -118,7 +118,7 @@ def handle_port(service_name: str, port: str, default_port: str) -> str:
|
||||
"""Change port if the specified port is in use and is not explicitly specified as a CLI arg or config arg.
|
||||
otherwise tell the user the port is in use and exit the app.
|
||||
|
||||
We make an assumption that when port is the default port,then it hasnt been explicitly set since its not straightforward
|
||||
We make an assumption that when port is the default port,then it hasn't been explicitly set since its not straightforward
|
||||
to know whether a port was explicitly provided by the user unless its any other than the default.
|
||||
|
||||
Args:
|
||||
@ -351,7 +351,7 @@ def atexit_handler():
|
||||
|
||||
def get_command_with_loglevel(command: list[str]) -> list[str]:
|
||||
"""Add the right loglevel flag to the designated command.
|
||||
npm uses --loglevel <level>, Bun doesnt use the --loglevel flag and
|
||||
npm uses --loglevel <level>, Bun doesn't use the --loglevel flag and
|
||||
runs in debug mode by default.
|
||||
|
||||
Args:
|
||||
|
@ -1023,7 +1023,7 @@ class InitStubGenerator(StubGenerator):
|
||||
|
||||
class PyiGenerator:
|
||||
"""A .pyi file generator that will scan all defined Component in Reflex and
|
||||
generate the approriate stub.
|
||||
generate the appropriate stub.
|
||||
"""
|
||||
|
||||
modules: list = []
|
||||
@ -1202,4 +1202,4 @@ class PyiGenerator:
|
||||
or "Var[Template]" in line
|
||||
):
|
||||
line = line.rstrip() + " # type: ignore\n"
|
||||
print(line, end="")
|
||||
print(line, end="") # noqa: T201
|
||||
|
@ -7,6 +7,7 @@ import dataclasses
|
||||
import multiprocessing
|
||||
import platform
|
||||
import warnings
|
||||
from contextlib import suppress
|
||||
|
||||
from reflex.config import environment
|
||||
|
||||
@ -171,10 +172,11 @@ def _send(event, telemetry_enabled, **kwargs):
|
||||
if not telemetry_enabled:
|
||||
return False
|
||||
|
||||
event_data = _prepare_event(event, **kwargs)
|
||||
if not event_data:
|
||||
return False
|
||||
return _send_event(event_data)
|
||||
with suppress(Exception):
|
||||
event_data = _prepare_event(event, **kwargs)
|
||||
if not event_data:
|
||||
return False
|
||||
return _send_event(event_data)
|
||||
|
||||
|
||||
def send(event: str, telemetry_enabled: bool | None = None, **kwargs):
|
||||
|
@ -127,7 +127,7 @@ class VarData:
|
||||
state: str = "",
|
||||
field_name: str = "",
|
||||
imports: ImportDict | ParsedImportDict | None = None,
|
||||
hooks: dict[str, None] | None = None,
|
||||
hooks: dict[str, VarData | None] | None = None,
|
||||
deps: list[Var] | None = None,
|
||||
position: Hooks.HookPosition | None = None,
|
||||
):
|
||||
@ -194,7 +194,9 @@ class VarData:
|
||||
(var_data.state for var_data in all_var_datas if var_data.state), ""
|
||||
)
|
||||
|
||||
hooks = {hook: None for var_data in all_var_datas for hook in var_data.hooks}
|
||||
hooks: dict[str, VarData | None] = {
|
||||
hook: None for var_data in all_var_datas for hook in var_data.hooks
|
||||
}
|
||||
|
||||
_imports = imports.merge_imports(
|
||||
*(var_data.imports for var_data in all_var_datas)
|
||||
@ -579,7 +581,7 @@ class Var(Generic[VAR_TYPE]):
|
||||
|
||||
# Try to pull the imports and hooks from contained values.
|
||||
if not isinstance(value, str):
|
||||
return LiteralVar.create(value)
|
||||
return LiteralVar.create(value, _var_data=_var_data)
|
||||
|
||||
if _var_is_string is False or _var_is_local is True:
|
||||
return cls(
|
||||
@ -2276,7 +2278,7 @@ def computed_var(
|
||||
def computed_var(
|
||||
fget: Callable[[BASE_STATE], Any] | None = None,
|
||||
initial_value: Any | types.Unset = types.Unset(),
|
||||
cache: bool = False,
|
||||
cache: Optional[bool] = None,
|
||||
deps: Optional[List[Union[str, Var]]] = None,
|
||||
auto_deps: bool = True,
|
||||
interval: Optional[Union[datetime.timedelta, int]] = None,
|
||||
@ -2302,6 +2304,15 @@ def computed_var(
|
||||
ValueError: If caching is disabled and an update interval is set.
|
||||
VarDependencyError: If user supplies dependencies without caching.
|
||||
"""
|
||||
if cache is None:
|
||||
cache = False
|
||||
console.deprecate(
|
||||
"Default non-cached rx.var",
|
||||
"the default value will be `@rx.var(cache=True)` in a future release. "
|
||||
"To retain uncached var, explicitly pass `@rx.var(cache=False)`",
|
||||
deprecation_version="0.6.8",
|
||||
removal_version="0.7.0",
|
||||
)
|
||||
if cache is False and interval is not None:
|
||||
raise ValueError("Cannot set update interval without caching.")
|
||||
|
||||
|
@ -271,6 +271,25 @@ class StringVar(Var[STRING_TYPE], python_types=str):
|
||||
raise_unsupported_operand_types("startswith", (type(self), type(prefix)))
|
||||
return string_starts_with_operation(self, prefix)
|
||||
|
||||
@overload
|
||||
def endswith(self, suffix: StringVar | str) -> BooleanVar: ...
|
||||
|
||||
@overload
|
||||
def endswith(self, suffix: NoReturn) -> NoReturn: ...
|
||||
|
||||
def endswith(self, suffix: Any) -> BooleanVar:
|
||||
"""Check if the string ends with a suffix.
|
||||
|
||||
Args:
|
||||
suffix: The suffix.
|
||||
|
||||
Returns:
|
||||
The string ends with operation.
|
||||
"""
|
||||
if not isinstance(suffix, (StringVar, str)):
|
||||
raise_unsupported_operand_types("endswith", (type(self), type(suffix)))
|
||||
return string_ends_with_operation(self, suffix)
|
||||
|
||||
@overload
|
||||
def __lt__(self, other: StringVar | str) -> BooleanVar: ...
|
||||
|
||||
@ -501,6 +520,24 @@ def string_starts_with_operation(
|
||||
)
|
||||
|
||||
|
||||
@var_operation
|
||||
def string_ends_with_operation(
|
||||
full_string: StringVar[Any], suffix: StringVar[Any] | str
|
||||
):
|
||||
"""Check if a string ends with a suffix.
|
||||
|
||||
Args:
|
||||
full_string: The full string.
|
||||
suffix: The suffix.
|
||||
|
||||
Returns:
|
||||
Whether the string ends with the suffix.
|
||||
"""
|
||||
return var_operation_return(
|
||||
js_expression=f"{full_string}.endsWith({suffix})", var_type=bool
|
||||
)
|
||||
|
||||
|
||||
@var_operation
|
||||
def string_item_operation(string: StringVar[Any], index: NumberVar | int):
|
||||
"""Get an item from a string.
|
||||
|
@ -25,7 +25,7 @@ def _pid_exists(pid):
|
||||
|
||||
def _wait_for_port(port, server_pid, timeout) -> Tuple[bool, str]:
|
||||
start = time.time()
|
||||
print(f"Waiting for up to {timeout} seconds for port {port} to start listening.")
|
||||
print(f"Waiting for up to {timeout} seconds for port {port} to start listening.") # noqa: T201
|
||||
while True:
|
||||
if not _pid_exists(server_pid):
|
||||
return False, f"Server PID {server_pid} is not running."
|
||||
@ -56,9 +56,9 @@ def main():
|
||||
for f in as_completed(futures):
|
||||
ok, msg = f.result()
|
||||
if ok:
|
||||
print(f"OK: {msg}")
|
||||
print(f"OK: {msg}") # noqa: T201
|
||||
else:
|
||||
print(f"FAIL: {msg}")
|
||||
print(f"FAIL: {msg}") # noqa: T201
|
||||
exit(1)
|
||||
|
||||
|
||||
|
@ -43,6 +43,8 @@ def LifespanApp():
|
||||
lifespan_task_global = 0
|
||||
|
||||
class LifespanState(rx.State):
|
||||
interval: int = 100
|
||||
|
||||
@rx.var
|
||||
def task_global(self) -> int:
|
||||
return lifespan_task_global
|
||||
@ -59,7 +61,15 @@ def LifespanApp():
|
||||
return rx.vstack(
|
||||
rx.text(LifespanState.task_global, id="task_global"),
|
||||
rx.text(LifespanState.context_global, id="context_global"),
|
||||
rx.moment(interval=100, on_change=LifespanState.tick),
|
||||
rx.button(
|
||||
rx.moment(
|
||||
interval=LifespanState.interval, on_change=LifespanState.tick
|
||||
),
|
||||
on_click=LifespanState.set_interval( # type: ignore
|
||||
rx.cond(LifespanState.interval, 0, 100)
|
||||
),
|
||||
id="toggle-tick",
|
||||
),
|
||||
)
|
||||
|
||||
app = rx.App()
|
||||
@ -108,6 +118,7 @@ async def test_lifespan(lifespan_app: AppHarness):
|
||||
original_task_global_text = task_global.text
|
||||
original_task_global_value = int(original_task_global_text)
|
||||
lifespan_app.poll_for_content(task_global, exp_not_equal=original_task_global_text)
|
||||
driver.find_element(By.ID, "toggle-tick").click() # avoid teardown errors
|
||||
assert lifespan_app.app_module.lifespan_task_global > original_task_global_value # type: ignore
|
||||
assert int(task_global.text) > original_task_global_value
|
||||
|
||||
|
@ -6,12 +6,16 @@ import asyncio
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Generator
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
import pytest
|
||||
from selenium.webdriver.common.by import By
|
||||
|
||||
from reflex.constants.event import Endpoint
|
||||
from reflex.testing import AppHarness, WebDriver
|
||||
|
||||
from .utils import poll_for_navigation
|
||||
|
||||
|
||||
def UploadFile():
|
||||
"""App for testing dynamic routes."""
|
||||
@ -23,7 +27,7 @@ def UploadFile():
|
||||
|
||||
class UploadState(rx.State):
|
||||
_file_data: Dict[str, str] = {}
|
||||
event_order: List[str] = []
|
||||
event_order: rx.Field[List[str]] = rx.field([])
|
||||
progress_dicts: List[dict] = []
|
||||
disabled: bool = False
|
||||
large_data: str = ""
|
||||
@ -50,6 +54,15 @@ def UploadFile():
|
||||
self.large_data = ""
|
||||
self.event_order.append("chain_event")
|
||||
|
||||
async def handle_upload_tertiary(self, files: List[rx.UploadFile]):
|
||||
for file in files:
|
||||
(rx.get_upload_dir() / (file.filename or "INVALID")).write_bytes(
|
||||
await file.read()
|
||||
)
|
||||
|
||||
def do_download(self):
|
||||
return rx.download(rx.get_upload_url("test.txt"))
|
||||
|
||||
def index():
|
||||
return rx.vstack(
|
||||
rx.input(
|
||||
@ -123,6 +136,34 @@ def UploadFile():
|
||||
on_click=rx.cancel_upload("secondary"),
|
||||
id="cancel_button_secondary",
|
||||
),
|
||||
rx.heading("Tertiary Upload/Download"),
|
||||
rx.upload.root(
|
||||
rx.vstack(
|
||||
rx.button("Select File"),
|
||||
rx.text("Drag and drop files here or click to select files"),
|
||||
),
|
||||
id="tertiary",
|
||||
),
|
||||
rx.button(
|
||||
"Upload",
|
||||
on_click=UploadState.handle_upload_tertiary( # type: ignore
|
||||
rx.upload_files(
|
||||
upload_id="tertiary",
|
||||
),
|
||||
),
|
||||
id="upload_button_tertiary",
|
||||
),
|
||||
rx.button(
|
||||
"Download - Frontend",
|
||||
on_click=rx.download(rx.get_upload_url("test.txt")),
|
||||
id="download-frontend",
|
||||
),
|
||||
rx.button(
|
||||
"Download - Backend",
|
||||
on_click=UploadState.do_download,
|
||||
id="download-backend",
|
||||
),
|
||||
rx.text(UploadState.event_order.to_string(), id="event-order"),
|
||||
)
|
||||
|
||||
app = rx.App(state=rx.State)
|
||||
@ -164,6 +205,24 @@ def driver(upload_file: AppHarness):
|
||||
driver.quit()
|
||||
|
||||
|
||||
def poll_for_token(driver: WebDriver, upload_file: AppHarness) -> str:
|
||||
"""Poll for the token input to be populated.
|
||||
|
||||
Args:
|
||||
driver: WebDriver instance.
|
||||
upload_file: harness for UploadFile app.
|
||||
|
||||
Returns:
|
||||
token value
|
||||
"""
|
||||
token_input = driver.find_element(By.ID, "token")
|
||||
assert token_input
|
||||
# wait for the backend connection to send the token
|
||||
token = upload_file.poll_for_value(token_input)
|
||||
assert token is not None
|
||||
return token
|
||||
|
||||
|
||||
@pytest.mark.parametrize("secondary", [False, True])
|
||||
@pytest.mark.asyncio
|
||||
async def test_upload_file(
|
||||
@ -178,11 +237,7 @@ async def test_upload_file(
|
||||
secondary: whether to use the secondary upload form
|
||||
"""
|
||||
assert upload_file.app_instance is not None
|
||||
token_input = driver.find_element(By.ID, "token")
|
||||
assert token_input
|
||||
# wait for the backend connection to send the token
|
||||
token = upload_file.poll_for_value(token_input)
|
||||
assert token is not None
|
||||
token = poll_for_token(driver, upload_file)
|
||||
full_state_name = upload_file.get_full_state_name(["_upload_state"])
|
||||
state_name = upload_file.get_state_name("_upload_state")
|
||||
substate_token = f"{token}_{full_state_name}"
|
||||
@ -204,6 +259,19 @@ async def test_upload_file(
|
||||
upload_box.send_keys(str(target_file))
|
||||
upload_button.click()
|
||||
|
||||
# check that the selected files are displayed
|
||||
selected_files = driver.find_element(By.ID, f"selected_files{suffix}")
|
||||
assert Path(selected_files.text).name == Path(exp_name).name
|
||||
|
||||
if secondary:
|
||||
event_order_displayed = driver.find_element(By.ID, "event-order")
|
||||
AppHarness._poll_for(lambda: "chain_event" in event_order_displayed.text)
|
||||
|
||||
state = await upload_file.get_state(substate_token)
|
||||
# only the secondary form tracks progress and chain events
|
||||
assert state.substates[state_name].event_order.count("upload_progress") == 1
|
||||
assert state.substates[state_name].event_order.count("chain_event") == 1
|
||||
|
||||
# look up the backend state and assert on uploaded contents
|
||||
async def get_file_data():
|
||||
return (
|
||||
@ -217,16 +285,6 @@ async def test_upload_file(
|
||||
normalized_file_data = {Path(k).name: v for k, v in file_data.items()}
|
||||
assert normalized_file_data[Path(exp_name).name] == exp_contents
|
||||
|
||||
# check that the selected files are displayed
|
||||
selected_files = driver.find_element(By.ID, f"selected_files{suffix}")
|
||||
assert Path(selected_files.text).name == Path(exp_name).name
|
||||
|
||||
state = await upload_file.get_state(substate_token)
|
||||
if secondary:
|
||||
# only the secondary form tracks progress and chain events
|
||||
assert state.substates[state_name].event_order.count("upload_progress") == 1
|
||||
assert state.substates[state_name].event_order.count("chain_event") == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_upload_file_multiple(tmp_path, upload_file: AppHarness, driver):
|
||||
@ -238,11 +296,7 @@ async def test_upload_file_multiple(tmp_path, upload_file: AppHarness, driver):
|
||||
driver: WebDriver instance.
|
||||
"""
|
||||
assert upload_file.app_instance is not None
|
||||
token_input = driver.find_element(By.ID, "token")
|
||||
assert token_input
|
||||
# wait for the backend connection to send the token
|
||||
token = upload_file.poll_for_value(token_input)
|
||||
assert token is not None
|
||||
token = poll_for_token(driver, upload_file)
|
||||
full_state_name = upload_file.get_full_state_name(["_upload_state"])
|
||||
state_name = upload_file.get_state_name("_upload_state")
|
||||
substate_token = f"{token}_{full_state_name}"
|
||||
@ -301,11 +355,7 @@ def test_clear_files(
|
||||
secondary: whether to use the secondary upload form.
|
||||
"""
|
||||
assert upload_file.app_instance is not None
|
||||
token_input = driver.find_element(By.ID, "token")
|
||||
assert token_input
|
||||
# wait for the backend connection to send the token
|
||||
token = upload_file.poll_for_value(token_input)
|
||||
assert token is not None
|
||||
poll_for_token(driver, upload_file)
|
||||
|
||||
suffix = "_secondary" if secondary else ""
|
||||
|
||||
@ -357,11 +407,7 @@ async def test_cancel_upload(tmp_path, upload_file: AppHarness, driver: WebDrive
|
||||
driver: WebDriver instance.
|
||||
"""
|
||||
assert upload_file.app_instance is not None
|
||||
token_input = driver.find_element(By.ID, "token")
|
||||
assert token_input
|
||||
# wait for the backend connection to send the token
|
||||
token = upload_file.poll_for_value(token_input)
|
||||
assert token is not None
|
||||
token = poll_for_token(driver, upload_file)
|
||||
state_name = upload_file.get_state_name("_upload_state")
|
||||
state_full_name = upload_file.get_full_state_name(["_upload_state"])
|
||||
substate_token = f"{token}_{state_full_name}"
|
||||
@ -403,3 +449,55 @@ async def test_cancel_upload(tmp_path, upload_file: AppHarness, driver: WebDrive
|
||||
assert Path(exp_name).name not in normalized_file_data
|
||||
|
||||
target_file.unlink()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_upload_download_file(
|
||||
tmp_path,
|
||||
upload_file: AppHarness,
|
||||
driver: WebDriver,
|
||||
):
|
||||
"""Submit a file upload and then fetch it with rx.download.
|
||||
|
||||
This checks the special case `getBackendURL` logic in the _download event
|
||||
handler in state.js.
|
||||
|
||||
Args:
|
||||
tmp_path: pytest tmp_path fixture
|
||||
upload_file: harness for UploadFile app.
|
||||
driver: WebDriver instance.
|
||||
"""
|
||||
assert upload_file.app_instance is not None
|
||||
poll_for_token(driver, upload_file)
|
||||
|
||||
upload_box = driver.find_elements(By.XPATH, "//input[@type='file']")[2]
|
||||
assert upload_box
|
||||
upload_button = driver.find_element(By.ID, "upload_button_tertiary")
|
||||
assert upload_button
|
||||
|
||||
exp_name = "test.txt"
|
||||
exp_contents = "test file contents!"
|
||||
target_file = tmp_path / exp_name
|
||||
target_file.write_text(exp_contents)
|
||||
|
||||
upload_box.send_keys(str(target_file))
|
||||
upload_button.click()
|
||||
|
||||
# Download via event embedded in frontend code.
|
||||
download_frontend = driver.find_element(By.ID, "download-frontend")
|
||||
with poll_for_navigation(driver):
|
||||
download_frontend.click()
|
||||
assert urlsplit(driver.current_url).path == f"/{Endpoint.UPLOAD.value}/test.txt"
|
||||
assert driver.find_element(by=By.TAG_NAME, value="body").text == exp_contents
|
||||
|
||||
# Go back and wait for the app to reload.
|
||||
with poll_for_navigation(driver):
|
||||
driver.back()
|
||||
poll_for_token(driver, upload_file)
|
||||
|
||||
# Download via backend event handler.
|
||||
download_backend = driver.find_element(By.ID, "download-backend")
|
||||
with poll_for_navigation(driver):
|
||||
download_backend.click()
|
||||
assert urlsplit(driver.current_url).path == f"/{Endpoint.UPLOAD.value}/test.txt"
|
||||
assert driver.find_element(by=By.TAG_NAME, value="body").text == exp_contents
|
||||
|
46
tests/integration/tests_playwright/test_link_hover.py
Normal file
46
tests/integration/tests_playwright/test_link_hover.py
Normal file
@ -0,0 +1,46 @@
|
||||
from typing import Generator
|
||||
|
||||
import pytest
|
||||
from playwright.sync_api import Page, expect
|
||||
|
||||
from reflex.testing import AppHarness
|
||||
|
||||
|
||||
def LinkApp():
|
||||
import reflex as rx
|
||||
|
||||
app = rx.App()
|
||||
|
||||
def index():
|
||||
return rx.vstack(
|
||||
rx.box(height="10em"), # spacer, so the link isn't hovered initially
|
||||
rx.link(
|
||||
"Click me",
|
||||
href="#",
|
||||
color="blue",
|
||||
_hover=rx.Style({"color": "red"}),
|
||||
),
|
||||
)
|
||||
|
||||
app.add_page(index, "/")
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def link_app(tmp_path_factory) -> Generator[AppHarness, None, None]:
|
||||
with AppHarness.create(
|
||||
root=tmp_path_factory.mktemp("link_app"),
|
||||
app_source=LinkApp, # type: ignore
|
||||
) as harness:
|
||||
assert harness.app_instance is not None, "app is not running"
|
||||
yield harness
|
||||
|
||||
|
||||
def test_link_hover(link_app: AppHarness, page: Page):
|
||||
assert link_app.frontend_url is not None
|
||||
page.goto(link_app.frontend_url)
|
||||
|
||||
link = page.get_by_role("link")
|
||||
expect(link).to_have_text("Click me")
|
||||
expect(link).to_have_css("color", "rgb(0, 0, 255)")
|
||||
link.hover()
|
||||
expect(link).to_have_css("color", "rgb(255, 0, 0)")
|
@ -135,7 +135,7 @@ def test_cond_computed_var():
|
||||
|
||||
comp = cond(True, CondStateComputed.computed_int, CondStateComputed.computed_str)
|
||||
|
||||
# TODO: shouln't this be a ComputedVar?
|
||||
# TODO: shouldn't this be a ComputedVar?
|
||||
assert isinstance(comp, Var)
|
||||
|
||||
state_name = format_state_name(CondStateComputed.get_full_name())
|
||||
|
@ -1,13 +1,19 @@
|
||||
import pytest
|
||||
|
||||
from reflex.components.lucide.icon import LUCIDE_ICON_LIST, Icon
|
||||
from reflex.components.lucide.icon import (
|
||||
LUCIDE_ICON_LIST,
|
||||
LUCIDE_ICON_MAPPING_OVERRIDE,
|
||||
Icon,
|
||||
)
|
||||
from reflex.utils import format
|
||||
|
||||
|
||||
@pytest.mark.parametrize("tag", LUCIDE_ICON_LIST)
|
||||
def test_icon(tag):
|
||||
icon = Icon.create(tag)
|
||||
assert icon.alias == f"Lucide{format.to_title_case(tag)}Icon"
|
||||
assert icon.alias == "Lucide" + LUCIDE_ICON_MAPPING_OVERRIDE.get(
|
||||
tag, f"{format.to_title_case(tag)}Icon"
|
||||
)
|
||||
|
||||
|
||||
def test_icon_missing_tag():
|
||||
|
@ -223,12 +223,17 @@ def test_event_console_log():
|
||||
)
|
||||
assert (
|
||||
format.format_event(spec)
|
||||
== 'Event("_call_function", {function:(() => (console["log"]("message")))})'
|
||||
== 'Event("_call_function", {function:(() => (console["log"]("message"))),callback:null})'
|
||||
)
|
||||
spec = event.console_log(Var(_js_expr="message"))
|
||||
assert (
|
||||
format.format_event(spec)
|
||||
== 'Event("_call_function", {function:(() => (console["log"](message)))})'
|
||||
== 'Event("_call_function", {function:(() => (console["log"](message))),callback:null})'
|
||||
)
|
||||
spec2 = event.console_log(Var(_js_expr="message2")).add_args(Var("throwaway"))
|
||||
assert (
|
||||
format.format_event(spec2)
|
||||
== 'Event("_call_function", {function:(() => (console["log"](message2))),callback:null})'
|
||||
)
|
||||
|
||||
|
||||
@ -243,12 +248,17 @@ def test_event_window_alert():
|
||||
)
|
||||
assert (
|
||||
format.format_event(spec)
|
||||
== 'Event("_call_function", {function:(() => (window["alert"]("message")))})'
|
||||
== 'Event("_call_function", {function:(() => (window["alert"]("message"))),callback:null})'
|
||||
)
|
||||
spec = event.window_alert(Var(_js_expr="message"))
|
||||
assert (
|
||||
format.format_event(spec)
|
||||
== 'Event("_call_function", {function:(() => (window["alert"](message)))})'
|
||||
== 'Event("_call_function", {function:(() => (window["alert"](message))),callback:null})'
|
||||
)
|
||||
spec2 = event.window_alert(Var(_js_expr="message2")).add_args(Var("throwaway"))
|
||||
assert (
|
||||
format.format_event(spec2)
|
||||
== 'Event("_call_function", {function:(() => (window["alert"](message2))),callback:null})'
|
||||
)
|
||||
|
||||
|
||||
|
@ -15,11 +15,11 @@ from reflex.utils.prerequisites import get_redis_status
|
||||
"mock_redis_client, expected_status",
|
||||
[
|
||||
# Case 1: Redis client is available and responds to ping
|
||||
(Mock(ping=lambda: None), True),
|
||||
(Mock(ping=lambda: None), {"redis": True}),
|
||||
# Case 2: Redis client raises RedisError
|
||||
(Mock(ping=lambda: (_ for _ in ()).throw(RedisError)), False),
|
||||
(Mock(ping=lambda: (_ for _ in ()).throw(RedisError)), {"redis": False}),
|
||||
# Case 3: Redis client is not used
|
||||
(None, None),
|
||||
(None, {"redis": None}),
|
||||
],
|
||||
)
|
||||
async def test_get_redis_status(mock_redis_client, expected_status, mocker):
|
||||
@ -41,12 +41,12 @@ async def test_get_redis_status(mock_redis_client, expected_status, mocker):
|
||||
"mock_engine, execute_side_effect, expected_status",
|
||||
[
|
||||
# Case 1: Database is accessible
|
||||
(MagicMock(), None, True),
|
||||
(MagicMock(), None, {"db": True}),
|
||||
# Case 2: Database connection error (OperationalError)
|
||||
(
|
||||
MagicMock(),
|
||||
sqlalchemy.exc.OperationalError("error", "error", "error"),
|
||||
False,
|
||||
{"db": False},
|
||||
),
|
||||
],
|
||||
)
|
||||
@ -74,25 +74,49 @@ async def test_get_db_status(mock_engine, execute_side_effect, expected_status,
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.parametrize(
|
||||
"db_status, redis_status, expected_status, expected_code",
|
||||
"db_enabled, redis_enabled, db_status, redis_status, expected_status, expected_code",
|
||||
[
|
||||
# Case 1: Both services are connected
|
||||
(True, True, {"status": True, "db": True, "redis": True}, 200),
|
||||
(True, True, True, True, {"status": True, "db": True, "redis": True}, 200),
|
||||
# Case 2: Database not connected, Redis connected
|
||||
(False, True, {"status": False, "db": False, "redis": True}, 503),
|
||||
(True, True, False, True, {"status": False, "db": False, "redis": True}, 503),
|
||||
# Case 3: Database connected, Redis not connected
|
||||
(True, False, {"status": False, "db": True, "redis": False}, 503),
|
||||
(True, True, True, False, {"status": False, "db": True, "redis": False}, 503),
|
||||
# Case 4: Both services not connected
|
||||
(False, False, {"status": False, "db": False, "redis": False}, 503),
|
||||
(True, True, False, False, {"status": False, "db": False, "redis": False}, 503),
|
||||
# Case 5: Database Connected, Redis not used
|
||||
(True, None, {"status": True, "db": True, "redis": False}, 200),
|
||||
(True, False, True, None, {"status": True, "db": True}, 200),
|
||||
# Case 6: Database not used, Redis Connected
|
||||
(False, True, None, True, {"status": True, "redis": True}, 200),
|
||||
# Case 7: Both services not used
|
||||
(False, False, None, None, {"status": True}, 200),
|
||||
],
|
||||
)
|
||||
async def test_health(db_status, redis_status, expected_status, expected_code, mocker):
|
||||
async def test_health(
|
||||
db_enabled,
|
||||
redis_enabled,
|
||||
db_status,
|
||||
redis_status,
|
||||
expected_status,
|
||||
expected_code,
|
||||
mocker,
|
||||
):
|
||||
# Mock get_db_status and get_redis_status
|
||||
mocker.patch("reflex.app.get_db_status", return_value=db_status)
|
||||
mocker.patch(
|
||||
"reflex.utils.prerequisites.get_redis_status", return_value=redis_status
|
||||
"reflex.utils.prerequisites.check_db_used",
|
||||
return_value=db_enabled,
|
||||
)
|
||||
mocker.patch(
|
||||
"reflex.utils.prerequisites.check_redis_used",
|
||||
return_value=redis_enabled,
|
||||
)
|
||||
mocker.patch(
|
||||
"reflex.app.get_db_status",
|
||||
return_value={"db": db_status},
|
||||
)
|
||||
mocker.patch(
|
||||
"reflex.utils.prerequisites.get_redis_status",
|
||||
return_value={"redis": redis_status},
|
||||
)
|
||||
|
||||
# Call the async health function
|
||||
|
@ -60,6 +60,7 @@ from reflex.utils.exceptions import (
|
||||
ReflexRuntimeError,
|
||||
SetUndefinedStateVarError,
|
||||
StateSerializationError,
|
||||
UnretrievableVarValueError,
|
||||
)
|
||||
from reflex.utils.format import json_dumps
|
||||
from reflex.vars.base import Var, computed_var
|
||||
@ -115,7 +116,7 @@ class TestState(BaseState):
|
||||
# Set this class as not test one
|
||||
__test__ = False
|
||||
|
||||
num1: int
|
||||
num1: rx.Field[int]
|
||||
num2: float = 3.14
|
||||
key: str
|
||||
map_key: str = "a"
|
||||
@ -163,7 +164,7 @@ class ChildState(TestState):
|
||||
"""A child state fixture."""
|
||||
|
||||
value: str
|
||||
count: int = 23
|
||||
count: rx.Field[int] = rx.field(23)
|
||||
|
||||
def change_both(self, value: str, count: int):
|
||||
"""Change both the value and count.
|
||||
@ -976,7 +977,7 @@ class InterdependentState(BaseState):
|
||||
"""A state with 3 vars and 3 computed vars.
|
||||
|
||||
x: a variable that no computed var depends on
|
||||
v1: a varable that one computed var directly depeneds on
|
||||
v1: a variable that one computed var directly depends on
|
||||
_v2: a backend variable that one computed var directly depends on
|
||||
|
||||
v1x2: a computed var that depends on v1
|
||||
@ -1663,7 +1664,7 @@ async def state_manager(request) -> AsyncGenerator[StateManager, None]:
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def substate_token(state_manager, token):
|
||||
def substate_token(state_manager, token) -> str:
|
||||
"""A token + substate name for looking up in state manager.
|
||||
|
||||
Args:
|
||||
@ -1936,6 +1937,14 @@ def mock_app(mock_app_simple: rx.App, state_manager: StateManager) -> rx.App:
|
||||
return mock_app_simple
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ModelDC:
|
||||
"""A dataclass."""
|
||||
|
||||
foo: str = "bar"
|
||||
ls: list[dict] = dataclasses.field(default_factory=list)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_state_proxy(grandchild_state: GrandchildState, mock_app: rx.App):
|
||||
"""Test that the state proxy works.
|
||||
@ -2038,6 +2047,7 @@ class BackgroundTaskState(BaseState):
|
||||
|
||||
order: List[str] = []
|
||||
dict_list: Dict[str, List[int]] = {"foo": [1, 2, 3]}
|
||||
dc: ModelDC = ModelDC()
|
||||
|
||||
def __init__(self, **kwargs): # noqa: D107
|
||||
super().__init__(**kwargs)
|
||||
@ -2063,10 +2073,18 @@ class BackgroundTaskState(BaseState):
|
||||
with pytest.raises(ImmutableStateError):
|
||||
self.order.append("bad idea")
|
||||
|
||||
with pytest.raises(ImmutableStateError):
|
||||
# Cannot manipulate dataclass attributes.
|
||||
self.dc.foo = "baz"
|
||||
|
||||
with pytest.raises(ImmutableStateError):
|
||||
# Even nested access to mutables raises an exception.
|
||||
self.dict_list["foo"].append(42)
|
||||
|
||||
with pytest.raises(ImmutableStateError):
|
||||
# Cannot modify dataclass list attribute.
|
||||
self.dc.ls.append({"foo": "bar"})
|
||||
|
||||
with pytest.raises(ImmutableStateError):
|
||||
# Direct calling another handler that modifies state raises an exception.
|
||||
self.other()
|
||||
@ -2685,7 +2703,7 @@ class Custom1(Base):
|
||||
self.foo = val
|
||||
|
||||
def double_foo(self) -> str:
|
||||
"""Concantenate foo with foo.
|
||||
"""Concatenate foo with foo.
|
||||
|
||||
Returns:
|
||||
foo + foo
|
||||
@ -3267,9 +3285,9 @@ async def test_setvar(mock_app: rx.App, token: str):
|
||||
print(update)
|
||||
assert state.array == [43]
|
||||
|
||||
# Cannot setvar for non-existant var
|
||||
# Cannot setvar for non-existent var
|
||||
with pytest.raises(AttributeError):
|
||||
TestState.setvar("non_existant_var")
|
||||
TestState.setvar("non_existent_var")
|
||||
|
||||
# Cannot setvar for computed vars
|
||||
with pytest.raises(AttributeError):
|
||||
@ -3582,13 +3600,6 @@ class ModelV2(BaseModelV2):
|
||||
foo: str = "bar"
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class ModelDC:
|
||||
"""A dataclass."""
|
||||
|
||||
foo: str = "bar"
|
||||
|
||||
|
||||
class PydanticState(rx.State):
|
||||
"""A state with pydantic BaseModel vars."""
|
||||
|
||||
@ -3610,11 +3621,22 @@ def test_mutable_models():
|
||||
assert state.dirty_vars == {"v2"}
|
||||
state.dirty_vars.clear()
|
||||
|
||||
# Not yet supported ENG-4083
|
||||
# assert isinstance(state.dc, MutableProxy) #noqa: ERA001
|
||||
# state.dc.foo = "baz" #noqa: ERA001
|
||||
# assert state.dirty_vars == {"dc"} #noqa: ERA001
|
||||
# state.dirty_vars.clear() #noqa: ERA001
|
||||
assert isinstance(state.dc, MutableProxy)
|
||||
state.dc.foo = "baz"
|
||||
assert state.dirty_vars == {"dc"}
|
||||
state.dirty_vars.clear()
|
||||
assert state.dirty_vars == set()
|
||||
state.dc.ls.append({"hi": "reflex"})
|
||||
assert state.dirty_vars == {"dc"}
|
||||
state.dirty_vars.clear()
|
||||
assert state.dirty_vars == set()
|
||||
assert dataclasses.asdict(state.dc) == {"foo": "baz", "ls": [{"hi": "reflex"}]}
|
||||
assert dataclasses.astuple(state.dc) == ("baz", [{"hi": "reflex"}])
|
||||
# creating a new instance shouldn't mark the state dirty
|
||||
assert dataclasses.replace(state.dc, foo="quuc") == ModelDC(
|
||||
foo="quuc", ls=[{"hi": "reflex"}]
|
||||
)
|
||||
assert state.dirty_vars == set()
|
||||
|
||||
|
||||
def test_get_value():
|
||||
@ -3764,3 +3786,32 @@ async def test_upcast_event_handler_arg(handler, payload):
|
||||
state = UpcastState()
|
||||
async for update in state._process_event(handler, state, payload):
|
||||
assert update.delta == {UpcastState.get_full_name(): {"passed": True}}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_var_value(state_manager: StateManager, substate_token: str):
|
||||
"""Test that get_var_value works correctly.
|
||||
|
||||
Args:
|
||||
state_manager: The state manager to use.
|
||||
substate_token: Token for the substate used by state_manager.
|
||||
"""
|
||||
state = await state_manager.get_state(substate_token)
|
||||
|
||||
# State Var from same state
|
||||
assert await state.get_var_value(TestState.num1) == 0
|
||||
state.num1 = 42
|
||||
assert await state.get_var_value(TestState.num1) == 42
|
||||
|
||||
# State Var from another state
|
||||
child_state = await state.get_state(ChildState)
|
||||
assert await state.get_var_value(ChildState.count) == 23
|
||||
child_state.count = 66
|
||||
assert await state.get_var_value(ChildState.count) == 66
|
||||
|
||||
# LiteralVar with known value
|
||||
assert await state.get_var_value(rx.Var.create([1, 2, 3])) == [1, 2, 3]
|
||||
|
||||
# Generic Var with no state
|
||||
with pytest.raises(UnretrievableVarValueError):
|
||||
await state.get_var_value(rx.Var("undefined"))
|
||||
|
@ -515,7 +515,7 @@ def test_var_indexing_types(var, type_):
|
||||
"""Test that indexing returns valid types.
|
||||
|
||||
Args:
|
||||
var : The list, typle base var.
|
||||
var : The list, tuple base var.
|
||||
type_ : The type on indexed object.
|
||||
|
||||
"""
|
||||
|
@ -262,7 +262,7 @@ def test_to_kebab_case(input: str, output: str):
|
||||
],
|
||||
)
|
||||
def test_format_string(input: str, output: str):
|
||||
"""Test formating the input as JS string literal.
|
||||
"""Test formatting the input as JS string literal.
|
||||
|
||||
Args:
|
||||
input: the input string.
|
||||
@ -680,7 +680,7 @@ def test_format_array_ref(input, output):
|
||||
],
|
||||
)
|
||||
def test_format_library_name(input: str, output: str):
|
||||
"""Test formating a library name to remove the @version part.
|
||||
"""Test formatting a library name to remove the @version part.
|
||||
|
||||
Args:
|
||||
input: the input string.
|
||||
|
Loading…
Reference in New Issue
Block a user