Skip to content

Commit

Permalink
Draft: Ruff (#520)
Browse files Browse the repository at this point in the history
* add ruff linter/formatter to CI

* show diff

* enable common ruff rules

* exclude some files from formatting

* sort imports

* apply ruff auto-fixes

* apply unsafe fixes -- NEEDS REVIEW!

* ignore line-length violations as they are covered by the formatter

* reset _version and add it to excludes

* fix logger reimport

* fix over-eager optimizations in tests

* format fixes

* some custom fixes

* deactivate F401 for main include

* Get rid of creator and wrapper caches

Signed-off-by: liamhuber <[email protected]>

* De-abstract HasCreator

* Remove unused import

Signed-off-by: liamhuber <[email protected]>

* Ignore unused imports in init files\n\nwe specify APIs this way

* Remove ABC

I personally like to use it as an indicator to devs that there's no intention of actually instantiating this class itself, but ruff insists that they are _only_ for specifying interfaces, and without and abstractmethod this doesn't make the cut. I acquiesce.

Signed-off-by: liamhuber <[email protected]>

* Don't use lru_cache

The operation is simple enough that the savings must have been minimal, and this is not used repeatedly (only when parsing class definitions to start with, e.g. at initial import), so any minor cost is going to be tolerable anyhow.

Signed-off-by: liamhuber <[email protected]>

* Use a more descriptive variable

Signed-off-by: liamhuber <[email protected]>

* Use more descriptive variables

Signed-off-by: liamhuber <[email protected]>

* Use more specific settings field

Per ruff deprecation warning

Signed-off-by: liamhuber <[email protected]>

* Keep the Workflow import separate

It was placed intentionally separate from the other imports to distinguish the pyironic single-import user interface from the more standard user/dev public API imports. The line is annotated to prevent ruff from moving it back later.

Signed-off-by: liamhuber <[email protected]>

* Remove redundant per-file linter directions

Signed-off-by: liamhuber <[email protected]>

* Accept ruff format -- except for multiline strings\n\nI'm not ready to accept the drop in readability from ruff's formatting here. On a quick search, I didn't find any way to modify just this element of the formatting.

* Remove ruff format

Until I either cave on the multiline string formatting, or ruff let's us change it. In the meantime we use black anyhow for formatting.

Signed-off-by: liamhuber <[email protected]>

* Re-instert intentional bad formatting\n\nFor testing the parser

* Adjust test expectation for reformatted test

* Desingleton creator (#527)

* De-singleton creator and wrapper and update docstring

* Sneak a fix for 522 while we're here

---------

Signed-off-by: liamhuber <[email protected]>
Co-authored-by: liamhuber <[email protected]>
  • Loading branch information
XzzX and liamhuber authored Jan 3, 2025
1 parent 67f9638 commit 92a560d
Show file tree
Hide file tree
Showing 69 changed files with 1,262 additions and 1,395 deletions.
17 changes: 17 additions & 0 deletions .github/workflows/ruff.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
name: Ruff
on: [ push, pull_request ]
jobs:
ruff-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: astral-sh/ruff-action@v1
with:
args: check
ruff-sort-imports:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: astral-sh/ruff-action@v1
with:
args: check --select I --fix --diff
14 changes: 7 additions & 7 deletions pyiron_workflow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,18 @@
# API

# User entry point
from pyiron_workflow.workflow import Workflow
from pyiron_workflow.workflow import Workflow # ruff: isort: skip

# Node developer entry points
from pyiron_workflow.channels import NOT_DATA
from pyiron_workflow.find import (
find_nodes as _find_nodes, # Not formally in API -- don't rely on interface
)
from pyiron_workflow.logging import logger
from pyiron_workflow.nodes import standard as standard_nodes
from pyiron_workflow.nodes.composite import FailedChildError
from pyiron_workflow.nodes.for_loop import for_node, for_node_factory
from pyiron_workflow.nodes.function import Function, as_function_node, function_node
from pyiron_workflow.logging import logger
from pyiron_workflow.nodes.macro import Macro, as_macro_node, macro_node
from pyiron_workflow.nodes.transform import (
as_dataclass_node,
Expand All @@ -53,11 +56,8 @@
list_to_outputs,
)
from pyiron_workflow.storage import (
StorageInterface,
PickleStorage,
available_backends,
StorageInterface,
TypeNotFoundError,
)
from pyiron_workflow.find import (
find_nodes as _find_nodes, # Not formally in API -- don't rely on interface
available_backends,
)
2 changes: 1 addition & 1 deletion pyiron_workflow/_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
Tools specifically for the test suite, not intended for general use.
"""

from pathlib import Path
import sys
from pathlib import Path


def ensure_tests_in_python_path():
Expand Down
35 changes: 19 additions & 16 deletions pyiron_workflow/channels.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,17 @@

from __future__ import annotations

import inspect
import typing
from abc import ABC, abstractmethod
import inspect

from pyiron_snippets.singleton import Singleton

from pyiron_workflow.mixin.has_interface_mixins import HasChannel, HasLabel
from pyiron_workflow.mixin.display_state import HasStateDisplay
from pyiron_workflow.mixin.has_interface_mixins import HasChannel, HasLabel
from pyiron_workflow.type_hinting import (
valid_value,
type_hint_is_as_or_more_specific_than,
valid_value,
)

if typing.TYPE_CHECKING:
Expand Down Expand Up @@ -340,10 +340,10 @@ def __init__(
self,
label: str,
owner: HasIO,
default: typing.Optional[typing.Any] = NOT_DATA,
type_hint: typing.Optional[typing.Any] = None,
default: typing.Any | None = NOT_DATA,
type_hint: typing.Any | None = None,
strict_hints: bool = True,
value_receiver: typing.Optional[InputData] = None,
value_receiver: InputData | None = None,
):
super().__init__(label=label, owner=owner)
self._value = NOT_DATA
Expand Down Expand Up @@ -404,16 +404,19 @@ def value_receiver(self, new_partner: InputData | OutputData | None):
f"itself"
)

if self._both_typed(new_partner) and new_partner.strict_hints:
if not type_hint_is_as_or_more_specific_than(
if (
self._both_typed(new_partner)
and new_partner.strict_hints
and not type_hint_is_as_or_more_specific_than(
self.type_hint, new_partner.type_hint
):
raise ValueError(
f"The channel {self.full_label} cannot take "
f"{new_partner.full_label} as a value receiver because this "
f"type hint ({self.type_hint}) is not as or more specific than "
f"the receiving type hint ({new_partner.type_hint})."
)
)
):
raise ValueError(
f"The channel {self.full_label} cannot take "
f"{new_partner.full_label} as a value receiver because this "
f"type hint ({self.type_hint}) is not as or more specific than "
f"the receiving type hint ({new_partner.type_hint})."
)

new_partner.value = self.value

Expand Down Expand Up @@ -614,7 +617,7 @@ def _has_required_args(func):
def callback(self) -> callable:
return getattr(self.owner, self._callback)

def __call__(self, other: typing.Optional[OutputSignal] = None) -> None:
def __call__(self, other: OutputSignal | None = None) -> None:
self.callback()

def __str__(self):
Expand Down
31 changes: 6 additions & 25 deletions pyiron_workflow/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,35 +4,25 @@

from __future__ import annotations

from abc import ABC
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
from functools import lru_cache

from pyiron_snippets.dotdict import DotDict
from pyiron_snippets.singleton import Singleton
from executorlib import Executor as ExecutorlibExecutor
from pyiron_snippets.dotdict import DotDict

from pyiron_workflow.executors import CloudpickleProcessPoolExecutor
from pyiron_workflow.nodes.function import function_node, as_function_node
from pyiron_workflow.nodes.function import as_function_node, function_node


class Creator(metaclass=Singleton):
class Creator:
"""
A container class for providing access to various workflow objects.
Gives access to various workflow tools and, by virtue of being a singleton, makes them
available to all composite nodes holding a creator.
Gives access to various workflow tools.
In addition to node objects, the creator also provides workflow-compliant executors
for parallel processing.
This includes a very simple in-house executor that is useful for learning, but also
choices from the :mod:`executorlib` packages.
Some :mod:`executorlib` executors may not be available on your machine (e.g. flux-
and/or slurm-based executors), in which case these attributes will return `None`
instead.
"""

def __init__(self):

# Standard lib
self.ProcessPoolExecutor = ProcessPoolExecutor
self.ThreadPoolExecutor = ThreadPoolExecutor
Expand All @@ -44,35 +34,30 @@ def __init__(self):
self.function_node = function_node

@property
@lru_cache(maxsize=1)
def standard(self):
from pyiron_workflow.nodes import standard

return standard

@property
@lru_cache(maxsize=1)
def for_node(self):
from pyiron_workflow.nodes.for_loop import for_node

return for_node

@property
@lru_cache(maxsize=1)
def macro_node(self):
from pyiron_workflow.nodes.macro import macro_node

return macro_node

@property
@lru_cache(maxsize=1)
def Workflow(self):
from pyiron_workflow.workflow import Workflow

return Workflow

@property
@lru_cache(maxsize=1)
def meta(self):
from pyiron_workflow.nodes.transform import inputs_to_list, list_to_outputs

Expand All @@ -84,7 +69,6 @@ def meta(self):
)

@property
@lru_cache(maxsize=1)
def transformer(self):
from pyiron_workflow.nodes.transform import (
dataclass_node,
Expand All @@ -106,32 +90,29 @@ def transformer(self):
]
}
)
return super().__dir__() + list(self._package_access.keys())


class Wrappers(metaclass=Singleton):
class Wrappers:
"""
A container class giving access to the decorators that transform functions to nodes.
"""

as_function_node = staticmethod(as_function_node)

@property
@lru_cache(maxsize=1)
def as_macro_node(self):
from pyiron_workflow.nodes.macro import as_macro_node

return as_macro_node

@property
@lru_cache(maxsize=1)
def as_dataclass_node(self):
from pyiron_workflow.nodes.transform import as_dataclass_node

return as_dataclass_node


class HasCreator(ABC):
class HasCreator:
"""
A mixin class for creator (including both class-like and decorator).
"""
Expand Down
8 changes: 4 additions & 4 deletions pyiron_workflow/draw.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from __future__ import annotations

from abc import ABC, abstractmethod
from typing import Literal, Optional, TYPE_CHECKING
from typing import TYPE_CHECKING, Literal

import graphviz
from pyiron_snippets.colors import SeabornColors
Expand Down Expand Up @@ -75,7 +75,7 @@ def blend_colours(color_a, color_b, fraction_a=0.5):
return _to_hex(
tuple(
fraction_a * a + (1 - fraction_a) * b
for (a, b) in zip(_to_rgb(color_a), _to_rgb(color_b))
for (a, b) in zip(_to_rgb(color_a), _to_rgb(color_b), strict=False)
)
)

Expand Down Expand Up @@ -312,10 +312,10 @@ class Node(WorkflowGraphvizMap):
def __init__(
self,
node: WorkflowNode,
parent: Optional[Node] = None,
parent: Node | None = None,
depth: int = 1,
rankdir: Literal["LR", "TB"] = "LR",
size: Optional[str] = None,
size: str | None = None,
):
self.node = node
self._parent = parent
Expand Down
5 changes: 2 additions & 3 deletions pyiron_workflow/executors/cloudpickleprocesspool.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from concurrent.futures import Future, ProcessPoolExecutor
from concurrent.futures.process import _global_shutdown, _WorkItem, BrokenProcessPool
from concurrent.futures.process import BrokenProcessPool, _global_shutdown, _WorkItem
from sys import version_info

import cloudpickle
Expand Down Expand Up @@ -195,8 +195,7 @@ def _submit_3_8(*args, **kwargs):
)
else:
raise TypeError(
"submit expected at least 1 positional argument, "
"got %d" % (len(args) - 1)
"submit expected at least 1 positional argument, got {len(args) - 1}"
)

with self._shutdown_lock:
Expand Down
6 changes: 3 additions & 3 deletions pyiron_workflow/find.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from __future__ import annotations

import inspect
import importlib.util
from pathlib import Path
import inspect
import sys
from pathlib import Path
from types import ModuleType

from pyiron_workflow.node import Node
Expand All @@ -16,7 +16,7 @@ def _get_subclasses(
get_abstract: bool = False,
get_imports_too: bool = False,
):
if isinstance(source, (str, Path)):
if isinstance(source, str | Path):
source = Path(source)
if source.is_file():
# Load the module from the file
Expand Down
Loading

0 comments on commit 92a560d

Please sign in to comment.