Skip to content

Commit

Permalink
core: Add ruff rules PTH (pathlib)
Browse files Browse the repository at this point in the history
  • Loading branch information
cbornet committed Jan 25, 2025
1 parent dbb6b7b commit ef6bc8d
Show file tree
Hide file tree
Showing 11 changed files with 40 additions and 49 deletions.
3 changes: 2 additions & 1 deletion libs/core/langchain_core/callbacks/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from __future__ import annotations

from pathlib import Path
from typing import Any, Optional, TextIO, cast

from langchain_core.agents import AgentAction, AgentFinish
Expand All @@ -28,7 +29,7 @@ def __init__(
mode: The mode to open the file in. Defaults to "a".
color: The color to use for the text. Defaults to None.
"""
self.file = cast(TextIO, open(filename, mode, encoding="utf-8")) # noqa: SIM115
self.file = cast(TextIO, Path(filename).open(mode, encoding="utf-8")) # noqa: SIM115
self.color = color

def __del__(self) -> None:
Expand Down
10 changes: 4 additions & 6 deletions libs/core/langchain_core/documents/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import mimetypes
from collections.abc import Generator
from io import BufferedReader, BytesIO
from pathlib import PurePath
from pathlib import Path, PurePath
from typing import Any, Literal, Optional, Union, cast

from pydantic import ConfigDict, Field, field_validator, model_validator
Expand Down Expand Up @@ -149,8 +149,7 @@ def check_blob_is_valid(cls, values: dict[str, Any]) -> Any:
def as_string(self) -> str:
"""Read data as a string."""
if self.data is None and self.path:
with open(str(self.path), encoding=self.encoding) as f:
return f.read()
return Path(self.path).read_text(encoding=self.encoding)
elif isinstance(self.data, bytes):
return self.data.decode(self.encoding)
elif isinstance(self.data, str):
Expand All @@ -166,8 +165,7 @@ def as_bytes(self) -> bytes:
elif isinstance(self.data, str):
return self.data.encode(self.encoding)
elif self.data is None and self.path:
with open(str(self.path), "rb") as f:
return f.read()
return Path(self.path).read_bytes()
else:
msg = f"Unable to get bytes for blob {self}"
raise ValueError(msg)
Expand All @@ -178,7 +176,7 @@ def as_bytes_io(self) -> Generator[Union[BytesIO, BufferedReader], None, None]:
if isinstance(self.data, bytes):
yield BytesIO(self.data)
elif self.data is None and self.path:
with open(str(self.path), "rb") as f:
with Path(self.path).open("rb") as f:
yield f
else:
msg = f"Unable to convert blob {self}"
Expand Down
6 changes: 3 additions & 3 deletions libs/core/langchain_core/language_models/llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -1399,7 +1399,7 @@ def save(self, file_path: Union[Path, str]) -> None:
llm.save(file_path="path/llm.yaml")
"""
# Convert file to Path object.
save_path = Path(file_path) if isinstance(file_path, str) else file_path
save_path = Path(file_path)

directory_path = save_path.parent
directory_path.mkdir(parents=True, exist_ok=True)
Expand All @@ -1408,10 +1408,10 @@ def save(self, file_path: Union[Path, str]) -> None:
prompt_dict = self.dict()

if save_path.suffix == ".json":
with open(file_path, "w") as f:
with save_path.open("w") as f:
json.dump(prompt_dict, f, indent=4)
elif save_path.suffix.endswith((".yaml", ".yml")):
with open(file_path, "w") as f:
with save_path.open("w") as f:
yaml.dump(prompt_dict, f, default_flow_style=False)
else:
msg = f"{save_path} must be json or yaml"
Expand Down
6 changes: 3 additions & 3 deletions libs/core/langchain_core/prompts/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,16 +368,16 @@ def save(self, file_path: Union[Path, str]) -> None:
raise NotImplementedError(msg)

# Convert file to Path object.
save_path = Path(file_path) if isinstance(file_path, str) else file_path
save_path = Path(file_path)

directory_path = save_path.parent
directory_path.mkdir(parents=True, exist_ok=True)

if save_path.suffix == ".json":
with open(file_path, "w") as f:
with save_path.open("w") as f:
json.dump(prompt_dict, f, indent=4)
elif save_path.suffix.endswith((".yaml", ".yml")):
with open(file_path, "w") as f:
with save_path.open("w") as f:
yaml.dump(prompt_dict, f, default_flow_style=False)
else:
msg = f"{save_path} must be json or yaml"
Expand Down
3 changes: 1 addition & 2 deletions libs/core/langchain_core/prompts/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -596,8 +596,7 @@ def from_template_file(
Returns:
A new instance of this class.
"""
with open(str(template_file)) as f:
template = f.read()
template = Path(template_file).read_text()
return cls.from_template(template, input_variables=input_variables, **kwargs)

def format_messages(self, **kwargs: Any) -> list[BaseMessage]:
Expand Down
16 changes: 8 additions & 8 deletions libs/core/langchain_core/prompts/loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,7 @@ def _load_template(var_name: str, config: dict) -> dict:
template_path = Path(config.pop(f"{var_name}_path"))
# Load the template.
if template_path.suffix == ".txt":
with open(template_path) as f:
template = f.read()
template = template_path.read_text()
else:
raise ValueError
# Set the template variable to the extracted variable.
Expand All @@ -67,10 +66,11 @@ def _load_examples(config: dict) -> dict:
if isinstance(config["examples"], list):
pass
elif isinstance(config["examples"], str):
with open(config["examples"]) as f:
if config["examples"].endswith(".json"):
path = Path(config["examples"])
with path.open() as f:
if path.suffix == ".json":
examples = json.load(f)
elif config["examples"].endswith((".yaml", ".yml")):
elif path.suffix in {".yaml", ".yml"}:
examples = yaml.safe_load(f)
else:
msg = "Invalid file format. Only json or yaml formats are supported."
Expand Down Expand Up @@ -168,13 +168,13 @@ def _load_prompt_from_file(
) -> BasePromptTemplate:
"""Load prompt from file."""
# Convert file to a Path object.
file_path = Path(file) if isinstance(file, str) else file
file_path = Path(file)
# Load from either json or yaml.
if file_path.suffix == ".json":
with open(file_path, encoding=encoding) as f:
with file_path.open(encoding=encoding) as f:
config = json.load(f)
elif file_path.suffix.endswith((".yaml", ".yml")):
with open(file_path, encoding=encoding) as f:
with file_path.open(encoding=encoding) as f:
config = yaml.safe_load(f)
else:
msg = f"Got unsupported file type {file_path.suffix}"
Expand Down
3 changes: 1 addition & 2 deletions libs/core/langchain_core/prompts/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,8 +235,7 @@ def from_file(
Returns:
The prompt loaded from the file.
"""
with open(str(template_file), encoding=encoding) as f:
template = f.read()
template = Path(template_file).read_text(encoding=encoding)
if input_variables:
warnings.warn(
"`input_variables' is deprecated and ignored.",
Expand Down
11 changes: 5 additions & 6 deletions libs/core/langchain_core/runnables/graph_mermaid.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import base64
import re
from dataclasses import asdict
from pathlib import Path
from typing import Literal, Optional

from langchain_core.runnables.graph import (
Expand Down Expand Up @@ -290,13 +291,12 @@ async def _render_mermaid_using_pyppeteer(
img_bytes = await page.screenshot({"fullPage": False})
await browser.close()

def write_to_file(path: str, bytes: bytes) -> None:
with open(path, "wb") as file:
file.write(bytes)
def write_to_file(path: Path, content: bytes) -> None:
path.write_bytes(content)

if output_file_path is not None:
await asyncio.get_event_loop().run_in_executor(
None, write_to_file, output_file_path, img_bytes
None, write_to_file, Path(output_file_path), img_bytes
)

return img_bytes
Expand Down Expand Up @@ -337,8 +337,7 @@ def _render_mermaid_using_api(
if response.status_code == 200:
img_bytes = response.content
if output_file_path is not None:
with open(output_file_path, "wb") as file:
file.write(response.content)
Path(output_file_path).write_bytes(response.content)

return img_bytes
else:
Expand Down
2 changes: 1 addition & 1 deletion libs/core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ python = ">=3.12.4"
[tool.poetry.extras]

[tool.ruff.lint]
select = [ "ASYNC", "B", "C4", "COM", "DJ", "E", "EM", "EXE", "F", "FLY", "FURB", "I", "ICN", "INT", "LOG", "N", "NPY", "PD", "PIE", "Q", "RSE", "S", "SIM", "SLOT", "T10", "T201", "TID", "TRY", "UP", "W", "YTT",]
select = [ "ASYNC", "B", "C4", "COM", "DJ", "E", "EM", "EXE", "F", "FLY", "FURB", "I", "ICN", "INT", "LOG", "N", "NPY", "PD", "PIE", "PTH", "Q", "RSE", "S", "SIM", "SLOT", "T10", "T201", "TID", "TRY", "UP", "W", "YTT",]
ignore = [ "COM812", "UP007", "S110", "S112",]

[tool.coverage.run]
Expand Down
2 changes: 1 addition & 1 deletion libs/core/tests/unit_tests/prompts/test_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,7 @@ def test_prompt_from_file_with_partial_variables() -> None:
template = "This is a {foo} test {bar}."
partial_variables = {"bar": "baz"}
# when
with mock.patch("builtins.open", mock.mock_open(read_data=template)):
with mock.patch("pathlib.Path.open", mock.mock_open(read_data=template)):
prompt = PromptTemplate.from_file(
"mock_file_name", partial_variables=partial_variables
)
Expand Down
27 changes: 11 additions & 16 deletions libs/core/tests/unit_tests/test_imports.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,17 @@
import concurrent.futures
import glob
import importlib
import subprocess
from pathlib import Path


def test_importable_all() -> None:
for path in glob.glob("../core/langchain_core/*"):
relative_path = Path(path).parts[-1]
if relative_path.endswith(".typed"):
continue
module_name = relative_path.split(".")[0]
module = importlib.import_module("langchain_core." + module_name)
all_ = getattr(module, "__all__", [])
for cls_ in all_:
getattr(module, cls_)
for path in Path("../core/langchain_core/").glob("*"):
module_name = path.stem
if not module_name.startswith(".") and path.suffix != ".typed":
module = importlib.import_module("langchain_core." + module_name)
all_ = getattr(module, "__all__", [])
for cls_ in all_:
getattr(module, cls_)


def try_to_import(module_name: str) -> tuple[int, str]:
Expand All @@ -37,12 +34,10 @@ def test_importable_all_via_subprocess() -> None:
for one sequence of imports but not another.
"""
module_names = []
for path in glob.glob("../core/langchain_core/*"):
relative_path = Path(path).parts[-1]
if relative_path.endswith(".typed"):
continue
module_name = relative_path.split(".")[0]
module_names.append(module_name)
for path in Path("../core/langchain_core/").glob("*"):
module_name = path.stem
if not module_name.startswith(".") and path.suffix != ".typed":
module_names.append(module_name)

with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
futures = [
Expand Down

0 comments on commit ef6bc8d

Please sign in to comment.