enable PERF rules (#4469)
* enable PERF rules * fix scripts folder * Update reflex/compiler/utils.py Co-authored-by: Masen Furer <m_github@0x26.net> --------- Co-authored-by: Masen Furer <m_github@0x26.net>
This commit is contained in:
parent
61cb72596e
commit
d7956c19d3
@ -85,14 +85,15 @@ build-backend = "poetry.core.masonry.api"
|
|||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
target-version = "py39"
|
target-version = "py39"
|
||||||
|
output-format = "concise"
|
||||||
lint.isort.split-on-trailing-comma = false
|
lint.isort.split-on-trailing-comma = false
|
||||||
lint.select = ["B", "C4", "D", "E", "ERA", "F", "FURB", "I", "PTH", "RUF", "SIM", "W"]
|
lint.select = ["B", "C4", "D", "E", "ERA", "F", "FURB", "I", "PERF", "PTH", "RUF", "SIM", "W"]
|
||||||
lint.ignore = ["B008", "D205", "E501", "F403", "SIM115", "RUF006", "RUF012"]
|
lint.ignore = ["B008", "D205", "E501", "F403", "SIM115", "RUF006", "RUF012"]
|
||||||
lint.pydocstyle.convention = "google"
|
lint.pydocstyle.convention = "google"
|
||||||
|
|
||||||
[tool.ruff.lint.per-file-ignores]
|
[tool.ruff.lint.per-file-ignores]
|
||||||
"__init__.py" = ["F401"]
|
"__init__.py" = ["F401"]
|
||||||
"tests/*.py" = ["D100", "D103", "D104", "B018"]
|
"tests/*.py" = ["D100", "D103", "D104", "B018", "PERF"]
|
||||||
"reflex/.templates/*.py" = ["D100", "D103", "D104"]
|
"reflex/.templates/*.py" = ["D100", "D103", "D104"]
|
||||||
"*.pyi" = ["D301", "D415", "D417", "D418", "E742"]
|
"*.pyi" = ["D301", "D415", "D417", "D418", "E742"]
|
||||||
"*/blank.py" = ["I001"]
|
"*/blank.py" = ["I001"]
|
||||||
|
@ -30,15 +30,16 @@ def validate_field_name(bases: List[Type["BaseModel"]], field_name: str) -> None
|
|||||||
|
|
||||||
# can't use reflex.config.environment here cause of circular import
|
# can't use reflex.config.environment here cause of circular import
|
||||||
reload = os.getenv("__RELOAD_CONFIG", "").lower() == "true"
|
reload = os.getenv("__RELOAD_CONFIG", "").lower() == "true"
|
||||||
for base in bases:
|
base = None
|
||||||
try:
|
try:
|
||||||
|
for base in bases:
|
||||||
if not reload and getattr(base, field_name, None):
|
if not reload and getattr(base, field_name, None):
|
||||||
pass
|
pass
|
||||||
except TypeError as te:
|
except TypeError as te:
|
||||||
raise VarNameError(
|
raise VarNameError(
|
||||||
f'State var "{field_name}" in {base} has been shadowed by a substate var; '
|
f'State var "{field_name}" in {base} has been shadowed by a substate var; '
|
||||||
f'use a different field name instead".'
|
f'use a different field name instead".'
|
||||||
) from te
|
) from te
|
||||||
|
|
||||||
|
|
||||||
# monkeypatch pydantic validate_field_name method to skip validating
|
# monkeypatch pydantic validate_field_name method to skip validating
|
||||||
|
@ -123,8 +123,7 @@ def compile_imports(import_dict: ParsedImportDict) -> list[dict]:
|
|||||||
raise ValueError("No default field allowed for empty library.")
|
raise ValueError("No default field allowed for empty library.")
|
||||||
if rest is None or len(rest) == 0:
|
if rest is None or len(rest) == 0:
|
||||||
raise ValueError("No fields to import.")
|
raise ValueError("No fields to import.")
|
||||||
for module in sorted(rest):
|
import_dicts.extend(get_import_dict(module) for module in sorted(rest))
|
||||||
import_dicts.append(get_import_dict(module))
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# remove the version before rendering the package imports
|
# remove the version before rendering the package imports
|
||||||
|
@ -1403,8 +1403,9 @@ class Component(BaseComponent, ABC):
|
|||||||
if not isinstance(list_of_import_dict, list):
|
if not isinstance(list_of_import_dict, list):
|
||||||
list_of_import_dict = [list_of_import_dict]
|
list_of_import_dict = [list_of_import_dict]
|
||||||
|
|
||||||
for import_dict in list_of_import_dict:
|
added_import_dicts.extend(
|
||||||
added_import_dicts.append(parse_imports(import_dict))
|
[parse_imports(import_dict) for import_dict in list_of_import_dict]
|
||||||
|
)
|
||||||
|
|
||||||
return imports.merge_imports(
|
return imports.merge_imports(
|
||||||
*self._get_props_imports(),
|
*self._get_props_imports(),
|
||||||
@ -1586,8 +1587,7 @@ class Component(BaseComponent, ABC):
|
|||||||
if hooks is not None:
|
if hooks is not None:
|
||||||
code[hooks] = None
|
code[hooks] = None
|
||||||
|
|
||||||
for hook, var_data in self._get_added_hooks().items():
|
code.update(self._get_added_hooks())
|
||||||
code[hook] = var_data
|
|
||||||
|
|
||||||
# Add the hook code for the children.
|
# Add the hook code for the children.
|
||||||
for child in self.children:
|
for child in self.children:
|
||||||
|
@ -127,7 +127,7 @@ _MAPPING = {
|
|||||||
|
|
||||||
|
|
||||||
EXCLUDE = ["del_", "Del", "image"]
|
EXCLUDE = ["del_", "Del", "image"]
|
||||||
for _, v in _MAPPING.items():
|
for v in _MAPPING.values():
|
||||||
v.extend([mod.capitalize() for mod in v if mod not in EXCLUDE])
|
v.extend([mod.capitalize() for mod in v if mod not in EXCLUDE])
|
||||||
|
|
||||||
_SUBMOD_ATTRS: dict[str, list[str]] = _MAPPING
|
_SUBMOD_ATTRS: dict[str, list[str]] = _MAPPING
|
||||||
|
@ -339,5 +339,5 @@ _MAPPING = {
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
EXCLUDE = ["del_", "Del", "image"]
|
EXCLUDE = ["del_", "Del", "image"]
|
||||||
for _, v in _MAPPING.items():
|
for v in _MAPPING.values():
|
||||||
v.extend([mod.capitalize() for mod in v if mod not in EXCLUDE])
|
v.extend([mod.capitalize() for mod in v if mod not in EXCLUDE])
|
||||||
|
@ -350,13 +350,14 @@ class EventSpec(EventActionsMixin):
|
|||||||
|
|
||||||
# Construct the payload.
|
# Construct the payload.
|
||||||
values = []
|
values = []
|
||||||
for arg in args:
|
arg = None
|
||||||
try:
|
try:
|
||||||
values.append(LiteralVar.create(arg))
|
for arg in args:
|
||||||
except TypeError as e:
|
values.append(LiteralVar.create(value=arg)) # noqa: PERF401
|
||||||
raise EventHandlerTypeError(
|
except TypeError as e:
|
||||||
f"Arguments to event handlers must be Vars or JSON-serializable. Got {arg} of type {type(arg)}."
|
raise EventHandlerTypeError(
|
||||||
) from e
|
f"Arguments to event handlers must be Vars or JSON-serializable. Got {arg} of type {type(arg)}."
|
||||||
|
) from e
|
||||||
new_payload = tuple(zip(fn_args, values))
|
new_payload = tuple(zip(fn_args, values))
|
||||||
return self.with_args(self.args + new_payload)
|
return self.with_args(self.args + new_payload)
|
||||||
|
|
||||||
|
@ -4,6 +4,7 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
from contextlib import suppress
|
||||||
from typing import Any, ClassVar, Optional, Type, Union
|
from typing import Any, ClassVar, Optional, Type, Union
|
||||||
|
|
||||||
import alembic.autogenerate
|
import alembic.autogenerate
|
||||||
@ -290,11 +291,10 @@ class Model(Base, sqlmodel.SQLModel): # pyright: ignore [reportGeneralTypeIssue
|
|||||||
relationships = {}
|
relationships = {}
|
||||||
# SQLModel relationships do not appear in __fields__, but should be included if present.
|
# SQLModel relationships do not appear in __fields__, but should be included if present.
|
||||||
for name in self.__sqlmodel_relationships__:
|
for name in self.__sqlmodel_relationships__:
|
||||||
try:
|
with suppress(
|
||||||
|
sqlalchemy.orm.exc.DetachedInstanceError # This happens when the relationship was never loaded and the session is closed.
|
||||||
|
):
|
||||||
relationships[name] = self._dict_recursive(getattr(self, name))
|
relationships[name] = self._dict_recursive(getattr(self, name))
|
||||||
except sqlalchemy.orm.exc.DetachedInstanceError:
|
|
||||||
# This happens when the relationship was never loaded and the session is closed.
|
|
||||||
continue
|
|
||||||
return {
|
return {
|
||||||
**base_fields,
|
**base_fields,
|
||||||
**relationships,
|
**relationships,
|
||||||
|
@ -3438,17 +3438,16 @@ class StateManagerRedis(StateManager):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Recursively set_state on all known substates.
|
# Recursively set_state on all known substates.
|
||||||
tasks = []
|
tasks = [
|
||||||
for substate in state.substates.values():
|
asyncio.create_task(
|
||||||
tasks.append(
|
self.set_state(
|
||||||
asyncio.create_task(
|
_substate_key(client_token, substate),
|
||||||
self.set_state(
|
substate,
|
||||||
token=_substate_key(client_token, substate),
|
lock_id,
|
||||||
state=substate,
|
|
||||||
lock_id=lock_id,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
for substate in state.substates.values()
|
||||||
|
]
|
||||||
# Persist only the given state (parents or substates are excluded by BaseState.__getstate__).
|
# Persist only the given state (parents or substates are excluded by BaseState.__getstate__).
|
||||||
if state._get_was_touched():
|
if state._get_was_touched():
|
||||||
pickle_state = state._serialize()
|
pickle_state = state._serialize()
|
||||||
|
@ -58,7 +58,9 @@ def get_process_on_port(port) -> Optional[psutil.Process]:
|
|||||||
The process on the given port.
|
The process on the given port.
|
||||||
"""
|
"""
|
||||||
for proc in psutil.process_iter(["pid", "name", "cmdline"]):
|
for proc in psutil.process_iter(["pid", "name", "cmdline"]):
|
||||||
try:
|
with contextlib.suppress(
|
||||||
|
psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess
|
||||||
|
):
|
||||||
if importlib.metadata.version("psutil") >= "6.0.0":
|
if importlib.metadata.version("psutil") >= "6.0.0":
|
||||||
conns = proc.net_connections(kind="inet") # type: ignore
|
conns = proc.net_connections(kind="inet") # type: ignore
|
||||||
else:
|
else:
|
||||||
@ -66,8 +68,6 @@ def get_process_on_port(port) -> Optional[psutil.Process]:
|
|||||||
for conn in conns:
|
for conn in conns:
|
||||||
if conn.laddr.port == int(port):
|
if conn.laddr.port == int(port):
|
||||||
return proc
|
return proc
|
||||||
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
|
|
||||||
pass
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@ -287,10 +287,9 @@ def _generate_docstrings(clzs: list[Type[Component]], props: list[str]) -> str:
|
|||||||
for line in (clz.create.__doc__ or "").splitlines():
|
for line in (clz.create.__doc__ or "").splitlines():
|
||||||
if "**" in line:
|
if "**" in line:
|
||||||
indent = line.split("**")[0]
|
indent = line.split("**")[0]
|
||||||
for nline in [
|
new_docstring.extend(
|
||||||
f"{indent}{n}:{' '.join(c)}" for n, c in props_comments.items()
|
[f"{indent}{n}:{' '.join(c)}" for n, c in props_comments.items()]
|
||||||
]:
|
)
|
||||||
new_docstring.append(nline)
|
|
||||||
new_docstring.append(line)
|
new_docstring.append(line)
|
||||||
return "\n".join(new_docstring)
|
return "\n".join(new_docstring)
|
||||||
|
|
||||||
|
@ -49,11 +49,10 @@ def main():
|
|||||||
parser.add_argument("--server-pid", type=int)
|
parser.add_argument("--server-pid", type=int)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
executor = ThreadPoolExecutor(max_workers=len(args.port))
|
executor = ThreadPoolExecutor(max_workers=len(args.port))
|
||||||
futures = []
|
futures = [
|
||||||
for p in args.port:
|
executor.submit(_wait_for_port, p, args.server_pid, args.timeout)
|
||||||
futures.append(
|
for p in args.port
|
||||||
executor.submit(_wait_for_port, p, args.server_pid, args.timeout)
|
]
|
||||||
)
|
|
||||||
for f in as_completed(futures):
|
for f in as_completed(futures):
|
||||||
ok, msg = f.result()
|
ok, msg = f.result()
|
||||||
if ok:
|
if ok:
|
||||||
|
Loading…
Reference in New Issue
Block a user