enable PERF rules

This commit is contained in:
Lendemor 2024-12-03 18:23:05 +01:00
parent c721227a06
commit db675ba04a
11 changed files with 42 additions and 42 deletions

View File

@ -93,13 +93,13 @@ build-backend = "poetry.core.masonry.api"
[tool.ruff]
target-version = "py39"
lint.isort.split-on-trailing-comma = false
lint.select = ["B", "D", "E", "F", "I", "SIM", "W"]
lint.select = ["B", "D", "E", "F", "I", "SIM", "W", "PERF"]
lint.ignore = ["B008", "D205", "E501", "F403", "SIM115"]
lint.pydocstyle.convention = "google"
[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401"]
"tests/*.py" = ["D100", "D103", "D104", "B018"]
"tests/*.py" = ["D100", "D103", "D104", "B018", "PERF"]
"reflex/.templates/*.py" = ["D100", "D103", "D104"]
"*.pyi" = ["D301", "D415", "D417", "D418", "E742"]
"*/blank.py" = ["I001"]

View File

@ -30,15 +30,16 @@ def validate_field_name(bases: List[Type["BaseModel"]], field_name: str) -> None
# can't use reflex.config.environment here cause of circular import
reload = os.getenv("__RELOAD_CONFIG", "").lower() == "true"
for base in bases:
try:
base = None
try:
for base in bases:
if not reload and getattr(base, field_name, None):
pass
except TypeError as te:
raise VarNameError(
f'State var "{field_name}" in {base} has been shadowed by a substate var; '
f'use a different field name instead".'
) from te
except TypeError as te:
raise VarNameError(
f'State var "{field_name}" in {base} has been shadowed by a substate var; '
f'use a different field name instead".'
) from te
# monkeypatch pydantic validate_field_name method to skip validating

View File

@ -123,8 +123,7 @@ def compile_imports(import_dict: ParsedImportDict) -> list[dict]:
raise ValueError("No default field allowed for empty library.")
if rest is None or len(rest) == 0:
raise ValueError("No fields to import.")
for module in sorted(rest):
import_dicts.append(get_import_dict(module))
import_dicts.extend([get_import_dict(module) for module in sorted(rest)])
continue
# remove the version before rendering the package imports

View File

@ -1408,8 +1408,9 @@ class Component(BaseComponent, ABC):
if not isinstance(list_of_import_dict, list):
list_of_import_dict = [list_of_import_dict]
for import_dict in list_of_import_dict:
added_import_dicts.append(parse_imports(import_dict))
added_import_dicts.extend(
[parse_imports(import_dict) for import_dict in list_of_import_dict]
)
return imports.merge_imports(
*self._get_props_imports(),

View File

@ -127,7 +127,7 @@ _MAPPING = {
EXCLUDE = ["del_", "Del", "image"]
for _, v in _MAPPING.items():
for v in _MAPPING.values():
v.extend([mod.capitalize() for mod in v if mod not in EXCLUDE])
_SUBMOD_ATTRS: dict[str, list[str]] = _MAPPING

View File

@ -339,5 +339,5 @@ _MAPPING = {
],
}
EXCLUDE = ["del_", "Del", "image"]
for _, v in _MAPPING.items():
for v in _MAPPING.values():
v.extend([mod.capitalize() for mod in v if mod not in EXCLUDE])

View File

@ -349,13 +349,14 @@ class EventSpec(EventActionsMixin):
# Construct the payload.
values = []
for arg in args:
try:
values.append(LiteralVar.create(arg))
except TypeError as e:
raise EventHandlerTypeError(
f"Arguments to event handlers must be Vars or JSON-serializable. Got {arg} of type {type(arg)}."
) from e
arg = None
try:
for arg in args:
values.append(LiteralVar.create(value=arg)) # noqa: PERF401
except TypeError as e:
raise EventHandlerTypeError(
f"Arguments to event handlers must be Vars or JSON-serializable. Got {arg} of type {type(arg)}."
) from e
new_payload = tuple(zip(fn_args, values))
return self.with_args(self.args + new_payload)

View File

@ -3,6 +3,7 @@
from __future__ import annotations
from collections import defaultdict
from contextlib import suppress
from typing import Any, ClassVar, Optional, Type, Union
import alembic.autogenerate
@ -199,11 +200,10 @@ class Model(Base, sqlmodel.SQLModel): # pyright: ignore [reportGeneralTypeIssue
relationships = {}
# SQLModel relationships do not appear in __fields__, but should be included if present.
for name in self.__sqlmodel_relationships__:
try:
with suppress(
sqlalchemy.orm.exc.DetachedInstanceError # This happens when the relationship was never loaded and the session is closed.
):
relationships[name] = self._dict_recursive(getattr(self, name))
except sqlalchemy.orm.exc.DetachedInstanceError:
# This happens when the relationship was never loaded and the session is closed.
continue
return {
**base_fields,
**relationships,

View File

@ -3369,17 +3369,16 @@ class StateManagerRedis(StateManager):
)
# Recursively set_state on all known substates.
tasks = []
for substate in state.substates.values():
tasks.append(
asyncio.create_task(
self.set_state(
token=_substate_key(client_token, substate),
state=substate,
lock_id=lock_id,
)
tasks = [
asyncio.create_task(
self.set_state(
_substate_key(client_token, substate),
substate,
lock_id,
)
)
for substate in state.substates.values()
]
# Persist only the given state (parents or substates are excluded by BaseState.__getstate__).
if state._get_was_touched():
pickle_state = state._serialize()

View File

@ -58,7 +58,9 @@ def get_process_on_port(port) -> Optional[psutil.Process]:
The process on the given port.
"""
for proc in psutil.process_iter(["pid", "name", "cmdline"]):
try:
with contextlib.suppress(
psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess
):
if importlib.metadata.version("psutil") >= "6.0.0":
conns = proc.net_connections(kind="inet") # type: ignore
else:
@ -66,8 +68,6 @@ def get_process_on_port(port) -> Optional[psutil.Process]:
for conn in conns:
if conn.laddr.port == int(port):
return proc
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
pass
return None

View File

@ -285,10 +285,9 @@ def _generate_docstrings(clzs: list[Type[Component]], props: list[str]) -> str:
for line in (clz.create.__doc__ or "").splitlines():
if "**" in line:
indent = line.split("**")[0]
for nline in [
f"{indent}{n}:{' '.join(c)}" for n, c in props_comments.items()
]:
new_docstring.append(nline)
new_docstring.extend(
[f"{indent}{n}:{' '.join(c)}" for n, c in props_comments.items()]
)
new_docstring.append(line)
return "\n".join(new_docstring)