enable PERF rules (#4469)
* enable PERF rules * fix scripts folder * Update reflex/compiler/utils.py Co-authored-by: Masen Furer <m_github@0x26.net> --------- Co-authored-by: Masen Furer <m_github@0x26.net>
This commit is contained in:
parent
61cb72596e
commit
d7956c19d3
@ -85,14 +85,15 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py39"
|
||||
output-format = "concise"
|
||||
lint.isort.split-on-trailing-comma = false
|
||||
lint.select = ["B", "C4", "D", "E", "ERA", "F", "FURB", "I", "PTH", "RUF", "SIM", "W"]
|
||||
lint.select = ["B", "C4", "D", "E", "ERA", "F", "FURB", "I", "PERF", "PTH", "RUF", "SIM", "W"]
|
||||
lint.ignore = ["B008", "D205", "E501", "F403", "SIM115", "RUF006", "RUF012"]
|
||||
lint.pydocstyle.convention = "google"
|
||||
|
||||
[tool.ruff.lint.per-file-ignores]
|
||||
"__init__.py" = ["F401"]
|
||||
"tests/*.py" = ["D100", "D103", "D104", "B018"]
|
||||
"tests/*.py" = ["D100", "D103", "D104", "B018", "PERF"]
|
||||
"reflex/.templates/*.py" = ["D100", "D103", "D104"]
|
||||
"*.pyi" = ["D301", "D415", "D417", "D418", "E742"]
|
||||
"*/blank.py" = ["I001"]
|
||||
|
@ -30,15 +30,16 @@ def validate_field_name(bases: List[Type["BaseModel"]], field_name: str) -> None
|
||||
|
||||
# can't use reflex.config.environment here cause of circular import
|
||||
reload = os.getenv("__RELOAD_CONFIG", "").lower() == "true"
|
||||
for base in bases:
|
||||
try:
|
||||
base = None
|
||||
try:
|
||||
for base in bases:
|
||||
if not reload and getattr(base, field_name, None):
|
||||
pass
|
||||
except TypeError as te:
|
||||
raise VarNameError(
|
||||
f'State var "{field_name}" in {base} has been shadowed by a substate var; '
|
||||
f'use a different field name instead".'
|
||||
) from te
|
||||
except TypeError as te:
|
||||
raise VarNameError(
|
||||
f'State var "{field_name}" in {base} has been shadowed by a substate var; '
|
||||
f'use a different field name instead".'
|
||||
) from te
|
||||
|
||||
|
||||
# monkeypatch pydantic validate_field_name method to skip validating
|
||||
|
@ -123,8 +123,7 @@ def compile_imports(import_dict: ParsedImportDict) -> list[dict]:
|
||||
raise ValueError("No default field allowed for empty library.")
|
||||
if rest is None or len(rest) == 0:
|
||||
raise ValueError("No fields to import.")
|
||||
for module in sorted(rest):
|
||||
import_dicts.append(get_import_dict(module))
|
||||
import_dicts.extend(get_import_dict(module) for module in sorted(rest))
|
||||
continue
|
||||
|
||||
# remove the version before rendering the package imports
|
||||
|
@ -1403,8 +1403,9 @@ class Component(BaseComponent, ABC):
|
||||
if not isinstance(list_of_import_dict, list):
|
||||
list_of_import_dict = [list_of_import_dict]
|
||||
|
||||
for import_dict in list_of_import_dict:
|
||||
added_import_dicts.append(parse_imports(import_dict))
|
||||
added_import_dicts.extend(
|
||||
[parse_imports(import_dict) for import_dict in list_of_import_dict]
|
||||
)
|
||||
|
||||
return imports.merge_imports(
|
||||
*self._get_props_imports(),
|
||||
@ -1586,8 +1587,7 @@ class Component(BaseComponent, ABC):
|
||||
if hooks is not None:
|
||||
code[hooks] = None
|
||||
|
||||
for hook, var_data in self._get_added_hooks().items():
|
||||
code[hook] = var_data
|
||||
code.update(self._get_added_hooks())
|
||||
|
||||
# Add the hook code for the children.
|
||||
for child in self.children:
|
||||
|
@ -127,7 +127,7 @@ _MAPPING = {
|
||||
|
||||
|
||||
EXCLUDE = ["del_", "Del", "image"]
|
||||
for _, v in _MAPPING.items():
|
||||
for v in _MAPPING.values():
|
||||
v.extend([mod.capitalize() for mod in v if mod not in EXCLUDE])
|
||||
|
||||
_SUBMOD_ATTRS: dict[str, list[str]] = _MAPPING
|
||||
|
@ -339,5 +339,5 @@ _MAPPING = {
|
||||
],
|
||||
}
|
||||
EXCLUDE = ["del_", "Del", "image"]
|
||||
for _, v in _MAPPING.items():
|
||||
for v in _MAPPING.values():
|
||||
v.extend([mod.capitalize() for mod in v if mod not in EXCLUDE])
|
||||
|
@ -350,13 +350,14 @@ class EventSpec(EventActionsMixin):
|
||||
|
||||
# Construct the payload.
|
||||
values = []
|
||||
for arg in args:
|
||||
try:
|
||||
values.append(LiteralVar.create(arg))
|
||||
except TypeError as e:
|
||||
raise EventHandlerTypeError(
|
||||
f"Arguments to event handlers must be Vars or JSON-serializable. Got {arg} of type {type(arg)}."
|
||||
) from e
|
||||
arg = None
|
||||
try:
|
||||
for arg in args:
|
||||
values.append(LiteralVar.create(value=arg)) # noqa: PERF401
|
||||
except TypeError as e:
|
||||
raise EventHandlerTypeError(
|
||||
f"Arguments to event handlers must be Vars or JSON-serializable. Got {arg} of type {type(arg)}."
|
||||
) from e
|
||||
new_payload = tuple(zip(fn_args, values))
|
||||
return self.with_args(self.args + new_payload)
|
||||
|
||||
|
@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import re
|
||||
from collections import defaultdict
|
||||
from contextlib import suppress
|
||||
from typing import Any, ClassVar, Optional, Type, Union
|
||||
|
||||
import alembic.autogenerate
|
||||
@ -290,11 +291,10 @@ class Model(Base, sqlmodel.SQLModel): # pyright: ignore [reportGeneralTypeIssue
|
||||
relationships = {}
|
||||
# SQLModel relationships do not appear in __fields__, but should be included if present.
|
||||
for name in self.__sqlmodel_relationships__:
|
||||
try:
|
||||
with suppress(
|
||||
sqlalchemy.orm.exc.DetachedInstanceError # This happens when the relationship was never loaded and the session is closed.
|
||||
):
|
||||
relationships[name] = self._dict_recursive(getattr(self, name))
|
||||
except sqlalchemy.orm.exc.DetachedInstanceError:
|
||||
# This happens when the relationship was never loaded and the session is closed.
|
||||
continue
|
||||
return {
|
||||
**base_fields,
|
||||
**relationships,
|
||||
|
@ -3438,17 +3438,16 @@ class StateManagerRedis(StateManager):
|
||||
)
|
||||
|
||||
# Recursively set_state on all known substates.
|
||||
tasks = []
|
||||
for substate in state.substates.values():
|
||||
tasks.append(
|
||||
asyncio.create_task(
|
||||
self.set_state(
|
||||
token=_substate_key(client_token, substate),
|
||||
state=substate,
|
||||
lock_id=lock_id,
|
||||
)
|
||||
tasks = [
|
||||
asyncio.create_task(
|
||||
self.set_state(
|
||||
_substate_key(client_token, substate),
|
||||
substate,
|
||||
lock_id,
|
||||
)
|
||||
)
|
||||
for substate in state.substates.values()
|
||||
]
|
||||
# Persist only the given state (parents or substates are excluded by BaseState.__getstate__).
|
||||
if state._get_was_touched():
|
||||
pickle_state = state._serialize()
|
||||
|
@ -58,7 +58,9 @@ def get_process_on_port(port) -> Optional[psutil.Process]:
|
||||
The process on the given port.
|
||||
"""
|
||||
for proc in psutil.process_iter(["pid", "name", "cmdline"]):
|
||||
try:
|
||||
with contextlib.suppress(
|
||||
psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess
|
||||
):
|
||||
if importlib.metadata.version("psutil") >= "6.0.0":
|
||||
conns = proc.net_connections(kind="inet") # type: ignore
|
||||
else:
|
||||
@ -66,8 +68,6 @@ def get_process_on_port(port) -> Optional[psutil.Process]:
|
||||
for conn in conns:
|
||||
if conn.laddr.port == int(port):
|
||||
return proc
|
||||
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
|
@ -287,10 +287,9 @@ def _generate_docstrings(clzs: list[Type[Component]], props: list[str]) -> str:
|
||||
for line in (clz.create.__doc__ or "").splitlines():
|
||||
if "**" in line:
|
||||
indent = line.split("**")[0]
|
||||
for nline in [
|
||||
f"{indent}{n}:{' '.join(c)}" for n, c in props_comments.items()
|
||||
]:
|
||||
new_docstring.append(nline)
|
||||
new_docstring.extend(
|
||||
[f"{indent}{n}:{' '.join(c)}" for n, c in props_comments.items()]
|
||||
)
|
||||
new_docstring.append(line)
|
||||
return "\n".join(new_docstring)
|
||||
|
||||
|
@ -49,11 +49,10 @@ def main():
|
||||
parser.add_argument("--server-pid", type=int)
|
||||
args = parser.parse_args()
|
||||
executor = ThreadPoolExecutor(max_workers=len(args.port))
|
||||
futures = []
|
||||
for p in args.port:
|
||||
futures.append(
|
||||
executor.submit(_wait_for_port, p, args.server_pid, args.timeout)
|
||||
)
|
||||
futures = [
|
||||
executor.submit(_wait_for_port, p, args.server_pid, args.timeout)
|
||||
for p in args.port
|
||||
]
|
||||
for f in as_completed(futures):
|
||||
ok, msg = f.result()
|
||||
if ok:
|
||||
|
Loading…
Reference in New Issue
Block a user