enable C4 rule (#4536)

This commit is contained in:
Thomas Brandého 2024-12-13 12:37:34 -08:00 committed by GitHub
parent ec89702137
commit ff510cacc5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 94 additions and 114 deletions

View File

@ -93,7 +93,7 @@ build-backend = "poetry.core.masonry.api"
[tool.ruff] [tool.ruff]
target-version = "py39" target-version = "py39"
lint.isort.split-on-trailing-comma = false lint.isort.split-on-trailing-comma = false
lint.select = ["B", "D", "E", "F", "I", "SIM", "W", "RUF", "FURB", "ERA"] lint.select = ["B", "C4", "D", "E", "ERA", "F", "FURB", "I", "RUF", "SIM", "W"]
lint.ignore = ["B008", "D205", "E501", "F403", "SIM115", "RUF006", "RUF012"] lint.ignore = ["B008", "D205", "E501", "F403", "SIM115", "RUF006", "RUF012"]
lint.pydocstyle.convention = "google" lint.pydocstyle.convention = "google"

View File

@ -115,7 +115,7 @@ def compile_imports(import_dict: ParsedImportDict) -> list[dict]:
default, rest = compile_import_statement(fields) default, rest = compile_import_statement(fields)
# prevent lib from being rendered on the page if all imports are non rendered kind # prevent lib from being rendered on the page if all imports are non rendered kind
if not any({f.render for f in fields}): # type: ignore if not any(f.render for f in fields): # type: ignore
continue continue
if not lib: if not lib:

View File

@ -1208,7 +1208,7 @@ class Component(BaseComponent, ABC):
Yields: Yields:
The parent classes that define the method (differently than the base). The parent classes that define the method (differently than the base).
""" """
seen_methods = set([getattr(Component, method)]) seen_methods = {getattr(Component, method)}
for clz in cls.mro(): for clz in cls.mro():
if clz is Component: if clz is Component:
break break
@ -1390,15 +1390,9 @@ class Component(BaseComponent, ABC):
# Collect imports from Vars used directly by this component. # Collect imports from Vars used directly by this component.
var_datas = [var._get_all_var_data() for var in self._get_vars()] var_datas = [var._get_all_var_data() for var in self._get_vars()]
var_imports: List[ImmutableParsedImportDict] = list( var_imports: List[ImmutableParsedImportDict] = [
map( var_data.imports for var_data in var_datas if var_data is not None
lambda var_data: var_data.imports, ]
filter(
None,
var_datas,
),
)
)
added_import_dicts: list[ParsedImportDict] = [] added_import_dicts: list[ParsedImportDict] = []
for clz in self._iter_parent_classes_with_method("add_imports"): for clz in self._iter_parent_classes_with_method("add_imports"):

View File

@ -79,7 +79,7 @@ class IconButton(elements.Button, RadixLoadingProp, RadixThemesComponent):
else: else:
size_map_var = Match.create( size_map_var = Match.create(
props["size"], props["size"],
*[(size, px) for size, px in RADIX_TO_LUCIDE_SIZE.items()], *list(RADIX_TO_LUCIDE_SIZE.items()),
12, 12,
) )
if not isinstance(size_map_var, Var): if not isinstance(size_map_var, Var):

View File

@ -84,10 +84,10 @@ class ChartBase(RechartsCharts):
cls._ensure_valid_dimension("width", width) cls._ensure_valid_dimension("width", width)
cls._ensure_valid_dimension("height", height) cls._ensure_valid_dimension("height", height)
dim_props = dict( dim_props = {
width=width or "100%", "width": width or "100%",
height=height or "100%", "height": height or "100%",
) }
# Provide min dimensions so the graph always appears, even if the outer container is zero-size. # Provide min dimensions so the graph always appears, even if the outer container is zero-size.
if width is None: if width is None:
dim_props["min_width"] = 200 dim_props["min_width"] = 200

View File

@ -31,7 +31,7 @@ class RouteVar(SimpleNamespace):
# This subset of router_data is included in chained on_load events. # This subset of router_data is included in chained on_load events.
ROUTER_DATA_INCLUDE = set((RouteVar.PATH, RouteVar.ORIGIN, RouteVar.QUERY)) ROUTER_DATA_INCLUDE = {RouteVar.PATH, RouteVar.ORIGIN, RouteVar.QUERY}
class RouteRegex(SimpleNamespace): class RouteRegex(SimpleNamespace):

View File

@ -297,7 +297,7 @@ class EventSpec(EventActionsMixin):
handler: EventHandler, handler: EventHandler,
event_actions: Dict[str, Union[bool, int]] | None = None, event_actions: Dict[str, Union[bool, int]] | None = None,
client_handler_name: str = "", client_handler_name: str = "",
args: Tuple[Tuple[Var, Var], ...] = tuple(), args: Tuple[Tuple[Var, Var], ...] = (),
): ):
"""Initialize an EventSpec. """Initialize an EventSpec.
@ -312,7 +312,7 @@ class EventSpec(EventActionsMixin):
object.__setattr__(self, "event_actions", event_actions) object.__setattr__(self, "event_actions", event_actions)
object.__setattr__(self, "handler", handler) object.__setattr__(self, "handler", handler)
object.__setattr__(self, "client_handler_name", client_handler_name) object.__setattr__(self, "client_handler_name", client_handler_name)
object.__setattr__(self, "args", args or tuple()) object.__setattr__(self, "args", args or ())
def with_args(self, args: Tuple[Tuple[Var, Var], ...]) -> EventSpec: def with_args(self, args: Tuple[Tuple[Var, Var], ...]) -> EventSpec:
"""Copy the event spec, with updated args. """Copy the event spec, with updated args.
@ -514,7 +514,7 @@ def no_args_event_spec() -> Tuple[()]:
Returns: Returns:
An empty tuple. An empty tuple.
""" """
return tuple() # type: ignore return () # type: ignore
# These chains can be used for their side effects when no other events are desired. # These chains can be used for their side effects when no other events are desired.
@ -1137,9 +1137,7 @@ def run_script(
Var(javascript_code) if isinstance(javascript_code, str) else javascript_code Var(javascript_code) if isinstance(javascript_code, str) else javascript_code
) )
return call_function( return call_function(ArgsFunctionOperation.create((), javascript_code), callback)
ArgsFunctionOperation.create(tuple(), javascript_code), callback
)
def get_event(state, event): def get_event(state, event):
@ -1491,7 +1489,7 @@ def get_handler_args(
""" """
args = inspect.getfullargspec(event_spec.handler.fn).args args = inspect.getfullargspec(event_spec.handler.fn).args
return event_spec.args if len(args) > 1 else tuple() return event_spec.args if len(args) > 1 else ()
def fix_events( def fix_events(

View File

@ -52,12 +52,12 @@ def get_engine_args(url: str | None = None) -> dict[str, Any]:
Returns: Returns:
The database engine arguments as a dict. The database engine arguments as a dict.
""" """
kwargs: dict[str, Any] = dict( kwargs: dict[str, Any] = {
# Print the SQL queries if the log level is INFO or lower. # Print the SQL queries if the log level is INFO or lower.
echo=environment.SQLALCHEMY_ECHO.get(), "echo": environment.SQLALCHEMY_ECHO.get(),
# Check connections before returning them. # Check connections before returning them.
pool_pre_ping=environment.SQLALCHEMY_POOL_PRE_PING.get(), "pool_pre_ping": environment.SQLALCHEMY_POOL_PRE_PING.get(),
) }
conf = get_config() conf = get_config()
url = url or conf.db_url url = url or conf.db_url
if url is not None and url.startswith("sqlite"): if url is not None and url.startswith("sqlite"):

View File

@ -443,13 +443,13 @@ def deploy(
hidden=True, hidden=True,
), ),
regions: List[str] = typer.Option( regions: List[str] = typer.Option(
list(), [],
"-r", "-r",
"--region", "--region",
help="The regions to deploy to. `reflex cloud regions` For multiple envs, repeat this option, e.g. --region sjc --region iad", help="The regions to deploy to. `reflex cloud regions` For multiple envs, repeat this option, e.g. --region sjc --region iad",
), ),
envs: List[str] = typer.Option( envs: List[str] = typer.Option(
list(), [],
"--env", "--env",
help="The environment variables to set: <key>=<value>. For multiple envs, repeat this option, e.g. --env k1=v2 --env k2=v2.", help="The environment variables to set: <key>=<value>. For multiple envs, repeat this option, e.g. --env k1=v2 --env k2=v2.",
), ),

View File

@ -437,9 +437,7 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
) )
# Create a fresh copy of the backend variables for this instance # Create a fresh copy of the backend variables for this instance
self._backend_vars = copy.deepcopy( self._backend_vars = copy.deepcopy(self.backend_vars)
{name: item for name, item in self.backend_vars.items()}
)
def __repr__(self) -> str: def __repr__(self) -> str:
"""Get the string representation of the state. """Get the string representation of the state.
@ -523,9 +521,7 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
cls.inherited_backend_vars = parent_state.backend_vars cls.inherited_backend_vars = parent_state.backend_vars
# Check if another substate class with the same name has already been defined. # Check if another substate class with the same name has already been defined.
if cls.get_name() in set( if cls.get_name() in {c.get_name() for c in parent_state.class_subclasses}:
c.get_name() for c in parent_state.class_subclasses
):
# This should not happen, since we have added module prefix to state names in #3214 # This should not happen, since we have added module prefix to state names in #3214
raise StateValueError( raise StateValueError(
f"The substate class '{cls.get_name()}' has been defined multiple times. " f"The substate class '{cls.get_name()}' has been defined multiple times. "
@ -788,11 +784,11 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
) )
# ComputedVar with cache=False always need to be recomputed # ComputedVar with cache=False always need to be recomputed
cls._always_dirty_computed_vars = set( cls._always_dirty_computed_vars = {
cvar_name cvar_name
for cvar_name, cvar in cls.computed_vars.items() for cvar_name, cvar in cls.computed_vars.items()
if not cvar._cache if not cvar._cache
) }
# Any substate containing a ComputedVar with cache=False always needs to be recomputed # Any substate containing a ComputedVar with cache=False always needs to be recomputed
if cls._always_dirty_computed_vars: if cls._always_dirty_computed_vars:
@ -1862,11 +1858,11 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
Returns: Returns:
Set of computed vars to include in the delta. Set of computed vars to include in the delta.
""" """
return set( return {
cvar cvar
for cvar in self.computed_vars for cvar in self.computed_vars
if self.computed_vars[cvar].needs_update(instance=self) if self.computed_vars[cvar].needs_update(instance=self)
) }
def _dirty_computed_vars( def _dirty_computed_vars(
self, from_vars: set[str] | None = None, include_backend: bool = True self, from_vars: set[str] | None = None, include_backend: bool = True
@ -1880,12 +1876,12 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
Returns: Returns:
Set of computed vars to include in the delta. Set of computed vars to include in the delta.
""" """
return set( return {
cvar cvar
for dirty_var in from_vars or self.dirty_vars for dirty_var in from_vars or self.dirty_vars
for cvar in self._computed_var_dependencies[dirty_var] for cvar in self._computed_var_dependencies[dirty_var]
if include_backend or not self.computed_vars[cvar]._backend if include_backend or not self.computed_vars[cvar]._backend
) }
@classmethod @classmethod
def _potentially_dirty_substates(cls) -> set[Type[BaseState]]: def _potentially_dirty_substates(cls) -> set[Type[BaseState]]:
@ -1895,16 +1891,16 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
Set of State classes that may need to be fetched to recalc computed vars. Set of State classes that may need to be fetched to recalc computed vars.
""" """
# _always_dirty_substates need to be fetched to recalc computed vars. # _always_dirty_substates need to be fetched to recalc computed vars.
fetch_substates = set( fetch_substates = {
cls.get_class_substate((cls.get_name(), *substate_name.split("."))) cls.get_class_substate((cls.get_name(), *substate_name.split(".")))
for substate_name in cls._always_dirty_substates for substate_name in cls._always_dirty_substates
) }
for dependent_substates in cls._substate_var_dependencies.values(): for dependent_substates in cls._substate_var_dependencies.values():
fetch_substates.update( fetch_substates.update(
set( {
cls.get_class_substate((cls.get_name(), *substate_name.split("."))) cls.get_class_substate((cls.get_name(), *substate_name.split(".")))
for substate_name in dependent_substates for substate_name in dependent_substates
) }
) )
return fetch_substates return fetch_substates
@ -2206,7 +2202,7 @@ class BaseState(Base, ABC, extra=pydantic.Extra.allow):
return md5( return md5(
pickle.dumps( pickle.dumps(
list(sorted(_field_tuple(field_name) for field_name in cls.base_vars)) sorted(_field_tuple(field_name) for field_name in cls.base_vars)
) )
).hexdigest() ).hexdigest()
@ -3654,33 +3650,30 @@ class MutableProxy(wrapt.ObjectProxy):
"""A proxy for a mutable object that tracks changes.""" """A proxy for a mutable object that tracks changes."""
# Methods on wrapped objects which should mark the state as dirty. # Methods on wrapped objects which should mark the state as dirty.
__mark_dirty_attrs__ = set( __mark_dirty_attrs__ = {
[ "add",
"add", "append",
"append", "clear",
"clear", "difference_update",
"difference_update", "discard",
"discard", "extend",
"extend", "insert",
"insert", "intersection_update",
"intersection_update", "pop",
"pop", "popitem",
"popitem", "remove",
"remove", "reverse",
"reverse", "setdefault",
"setdefault", "sort",
"sort", "symmetric_difference_update",
"symmetric_difference_update", "update",
"update", }
]
)
# Methods on wrapped objects might return mutable objects that should be tracked. # Methods on wrapped objects might return mutable objects that should be tracked.
__wrap_mutable_attrs__ = set( __wrap_mutable_attrs__ = {
[ "get",
"get", "setdefault",
"setdefault", }
]
)
# These internal attributes on rx.Base should NOT be wrapped in a MutableProxy. # These internal attributes on rx.Base should NOT be wrapped in a MutableProxy.
__never_wrap_base_attrs__ = set(Base.__dict__) - {"set"} | set( __never_wrap_base_attrs__ = set(Base.__dict__) - {"set"} | set(
@ -3723,7 +3716,7 @@ class MutableProxy(wrapt.ObjectProxy):
self, self,
wrapped=None, wrapped=None,
instance=None, instance=None,
args=tuple(), args=(),
kwargs=None, kwargs=None,
) -> Any: ) -> Any:
"""Mark the state as dirty, then call a wrapped function. """Mark the state as dirty, then call a wrapped function.
@ -3979,7 +3972,7 @@ class ImmutableMutableProxy(MutableProxy):
self, self,
wrapped=None, wrapped=None,
instance=None, instance=None,
args=tuple(), args=(),
kwargs=None, kwargs=None,
) -> Any: ) -> Any:
"""Raise an exception when an attempt is made to modify the object. """Raise an exception when an attempt is made to modify the object.

View File

@ -117,7 +117,7 @@ def run_process_and_launch_url(run_command: list[str], backend_present=True):
console.print("New packages detected: Updating app...") console.print("New packages detected: Updating app...")
else: else:
if any( if any(
[x in line for x in ("bin executable does not exist on disk",)] x in line for x in ("bin executable does not exist on disk",)
): ):
console.error( console.error(
"Try setting `REFLEX_USE_NPM=1` and re-running `reflex init` and `reflex run` to use npm instead of bun:\n" "Try setting `REFLEX_USE_NPM=1` and re-running `reflex init` and `reflex run` to use npm instead of bun:\n"

View File

@ -699,7 +699,7 @@ def _update_next_config(
} }
if transpile_packages: if transpile_packages:
next_config["transpilePackages"] = list( next_config["transpilePackages"] = list(
set((format_library_name(p) for p in transpile_packages)) {format_library_name(p) for p in transpile_packages}
) )
if export: if export:
next_config["output"] = "export" next_config["output"] = "export"
@ -925,7 +925,7 @@ def cached_procedure(cache_file: str, payload_fn: Callable[..., str]):
@cached_procedure( @cached_procedure(
cache_file=str(get_web_dir() / "reflex.install_frontend_packages.cached"), cache_file=str(get_web_dir() / "reflex.install_frontend_packages.cached"),
payload_fn=lambda p, c: f"{sorted(list(p))!r},{c.json()}", payload_fn=lambda p, c: f"{sorted(p)!r},{c.json()}",
) )
def install_frontend_packages(packages: set[str], config: Config): def install_frontend_packages(packages: set[str], config: Config):
"""Installs the base and custom frontend packages. """Installs the base and custom frontend packages.
@ -1300,7 +1300,7 @@ def fetch_app_templates(version: str) -> dict[str, Template]:
for tp in templates_data: for tp in templates_data:
if tp["hidden"] or tp["code_url"] is None: if tp["hidden"] or tp["code_url"] is None:
continue continue
known_fields = set(f.name for f in dataclasses.fields(Template)) known_fields = {f.name for f in dataclasses.fields(Template)}
filtered_templates[tp["name"]] = Template( filtered_templates[tp["name"]] = Template(
**{k: v for k, v in tp.items() if k in known_fields} **{k: v for k, v in tp.items() if k in known_fields}
) )

View File

@ -146,7 +146,7 @@ class VarData:
Returns: Returns:
The imports as a mutable dict. The imports as a mutable dict.
""" """
return dict((k, list(v)) for k, v in self.imports) return {k: list(v) for k, v in self.imports}
def merge(*all: VarData | None) -> VarData | None: def merge(*all: VarData | None) -> VarData | None:
"""Merge multiple var data objects. """Merge multiple var data objects.
@ -1591,14 +1591,12 @@ class CachedVarOperation:
The cached VarData. The cached VarData.
""" """
return VarData.merge( return VarData.merge(
*map( *(
lambda value: ( value._get_all_var_data() if isinstance(value, Var) else None
value._get_all_var_data() if isinstance(value, Var) else None for value in (
), getattr(self, field.name)
map( for field in dataclasses.fields(self) # type: ignore
lambda field: getattr(self, field.name), )
dataclasses.fields(self), # type: ignore
),
), ),
self._var_data, self._var_data,
) )
@ -1889,20 +1887,20 @@ class ComputedVar(Var[RETURN_TYPE]):
Raises: Raises:
TypeError: If kwargs contains keys that are not allowed. TypeError: If kwargs contains keys that are not allowed.
""" """
field_values = dict( field_values = {
fget=kwargs.pop("fget", self._fget), "fget": kwargs.pop("fget", self._fget),
initial_value=kwargs.pop("initial_value", self._initial_value), "initial_value": kwargs.pop("initial_value", self._initial_value),
cache=kwargs.pop("cache", self._cache), "cache": kwargs.pop("cache", self._cache),
deps=kwargs.pop("deps", self._static_deps), "deps": kwargs.pop("deps", self._static_deps),
auto_deps=kwargs.pop("auto_deps", self._auto_deps), "auto_deps": kwargs.pop("auto_deps", self._auto_deps),
interval=kwargs.pop("interval", self._update_interval), "interval": kwargs.pop("interval", self._update_interval),
backend=kwargs.pop("backend", self._backend), "backend": kwargs.pop("backend", self._backend),
_js_expr=kwargs.pop("_js_expr", self._js_expr), "_js_expr": kwargs.pop("_js_expr", self._js_expr),
_var_type=kwargs.pop("_var_type", self._var_type), "_var_type": kwargs.pop("_var_type", self._var_type),
_var_data=kwargs.pop( "_var_data": kwargs.pop(
"_var_data", VarData.merge(self._var_data, merge_var_data) "_var_data", VarData.merge(self._var_data, merge_var_data)
), ),
) }
if kwargs: if kwargs:
unexpected_kwargs = ", ".join(kwargs.keys()) unexpected_kwargs = ", ".join(kwargs.keys())
@ -2371,10 +2369,7 @@ class CustomVarOperation(CachedVarOperation, Var[T]):
The cached VarData. The cached VarData.
""" """
return VarData.merge( return VarData.merge(
*map( *(arg[1]._get_all_var_data() for arg in self._args),
lambda arg: arg[1]._get_all_var_data(),
self._args,
),
self._return._get_all_var_data(), self._return._get_all_var_data(),
self._var_data, self._var_data,
) )

View File

@ -292,7 +292,7 @@ class VarOperationCall(Generic[P, R], CachedVarOperation, Var[R]):
class DestructuredArg: class DestructuredArg:
"""Class for destructured arguments.""" """Class for destructured arguments."""
fields: Tuple[str, ...] = tuple() fields: Tuple[str, ...] = ()
rest: Optional[str] = None rest: Optional[str] = None
def to_javascript(self) -> str: def to_javascript(self) -> str:
@ -314,7 +314,7 @@ class DestructuredArg:
class FunctionArgs: class FunctionArgs:
"""Class for function arguments.""" """Class for function arguments."""
args: Tuple[Union[str, DestructuredArg], ...] = tuple() args: Tuple[Union[str, DestructuredArg], ...] = ()
rest: Optional[str] = None rest: Optional[str] = None

View File

@ -51,7 +51,7 @@ def raise_unsupported_operand_types(
VarTypeError: The operand types are unsupported. VarTypeError: The operand types are unsupported.
""" """
raise VarTypeError( raise VarTypeError(
f"Unsupported Operand type(s) for {operator}: {', '.join(map(lambda t: t.__name__, operands_types))}" f"Unsupported Operand type(s) for {operator}: {', '.join(t.__name__ for t in operands_types)}"
) )

View File

@ -1177,7 +1177,7 @@ class ArrayVar(Var[ARRAY_VAR_TYPE], python_types=(list, tuple, set)):
if num_args == 0: if num_args == 0:
return_value = fn() return_value = fn()
function_var = ArgsFunctionOperation.create(tuple(), return_value) function_var = ArgsFunctionOperation.create((), return_value)
else: else:
# generic number var # generic number var
number_var = Var("").to(NumberVar, int) number_var = Var("").to(NumberVar, int)

View File

@ -12,7 +12,7 @@ def test_websocket_target_url():
url = WebsocketTargetURL.create() url = WebsocketTargetURL.create()
var_data = url._get_all_var_data() var_data = url._get_all_var_data()
assert var_data is not None assert var_data is not None
assert sorted(tuple((key for key, _ in var_data.imports))) == sorted( assert sorted(key for key, _ in var_data.imports) == sorted(
("$/utils/state", "$/env.json") ("$/utils/state", "$/env.json")
) )
@ -20,7 +20,7 @@ def test_websocket_target_url():
def test_connection_banner(): def test_connection_banner():
banner = ConnectionBanner.create() banner = ConnectionBanner.create()
_imports = banner._get_all_imports(collapse=True) _imports = banner._get_all_imports(collapse=True)
assert sorted(tuple(_imports)) == sorted( assert sorted(_imports) == sorted(
( (
"react", "react",
"$/utils/context", "$/utils/context",
@ -38,7 +38,7 @@ def test_connection_banner():
def test_connection_modal(): def test_connection_modal():
modal = ConnectionModal.create() modal = ConnectionModal.create()
_imports = modal._get_all_imports(collapse=True) _imports = modal._get_all_imports(collapse=True)
assert sorted(tuple(_imports)) == sorted( assert sorted(_imports) == sorted(
( (
"react", "react",
"$/utils/context", "$/utils/context",

View File

@ -372,7 +372,7 @@ def test_basic_operations(TestObj):
"var, expected", "var, expected",
[ [
(v([1, 2, 3]), "[1, 2, 3]"), (v([1, 2, 3]), "[1, 2, 3]"),
(v(set([1, 2, 3])), "[1, 2, 3]"), (v({1, 2, 3}), "[1, 2, 3]"),
(v(["1", "2", "3"]), '["1", "2", "3"]'), (v(["1", "2", "3"]), '["1", "2", "3"]'),
( (
Var(_js_expr="foo")._var_set_state("state").to(list), Var(_js_expr="foo")._var_set_state("state").to(list),
@ -903,7 +903,7 @@ def test_literal_var():
True, True,
False, False,
None, None,
set([1, 2, 3]), {1, 2, 3},
] ]
) )
assert ( assert (