From c67c5c83e1e5f4ccaed48fea5efaa794e60026fb Mon Sep 17 00:00:00 2001
From: Chris Sewell
Date: Thu, 15 Feb 2024 13:49:20 +0000
Subject: [PATCH 01/24] =?UTF-8?q?=F0=9F=94=A7=20Update=20pre-commit=20hook?=
=?UTF-8?q?s=20(#1109)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Update hooks, fix issues, and also pin dependencies for mypy more
strictly (to reduce potential for differences between local and remote
runs of mypy)
---
.pre-commit-config.yaml | 15 ++++-----
codecov.yml | 10 ++++++
performance/performance_test.py | 1 +
pyproject.toml | 6 ----
sphinx_needs/api/configuration.py | 1 +
sphinx_needs/data.py | 1 +
sphinx_needs/debug.py | 1 +
sphinx_needs/diagrams_common.py | 2 +-
sphinx_needs/directives/need.py | 11 +++----
sphinx_needs/directives/needextend.py | 1 +
sphinx_needs/directives/needextract.py | 3 +-
sphinx_needs/directives/needfilter.py | 28 ++++++++---------
sphinx_needs/directives/needflow.py | 13 ++++----
sphinx_needs/directives/needlist.py | 1 +
sphinx_needs/directives/needtable.py | 5 +--
sphinx_needs/environment.py | 4 +--
sphinx_needs/external_needs.py | 6 ++--
sphinx_needs/filter_common.py | 3 +-
sphinx_needs/functions/functions.py | 4 +--
sphinx_needs/layout.py | 42 ++++++++++++++------------
sphinx_needs/needsfile.py | 1 +
sphinx_needs/roles/need_count.py | 2 +-
sphinx_needs/roles/need_func.py | 2 +-
sphinx_needs/roles/need_incoming.py | 2 +-
sphinx_needs/roles/need_outgoing.py | 2 +-
sphinx_needs/roles/need_part.py | 3 +-
sphinx_needs/roles/need_ref.py | 2 +-
sphinx_needs/utils.py | 10 +++---
sphinx_needs/warnings.py | 1 +
tests/conftest.py | 1 +
tests/no_mpl_tests.py | 1 +
tests/test_github_issues.py | 4 +--
32 files changed, 104 insertions(+), 85 deletions(-)
create mode 100644 codecov.yml
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index cbc338edd..4312eb7b9 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,11 +1,11 @@
repos:
- repo: https://github.com/psf/black
- rev: 23.10.1
+ rev: 24.2.0
hooks:
- id: black
- repo: https://github.com/PyCQA/flake8
- rev: 6.1.0
+ rev: 7.0.0
hooks:
- id: flake8
additional_dependencies:
@@ -15,7 +15,7 @@ repos:
- pep8-naming
- repo: https://github.com/pycqa/isort
- rev: 5.12.0
+ rev: 5.13.2
hooks:
- id: isort
@@ -24,17 +24,18 @@ repos:
hooks:
- id: pyupgrade
args:
- - --py36-plus
+ - --py38-plus
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: v1.6.1
+ rev: v1.8.0
hooks:
- id: mypy
files: sphinx_needs/.*
args: []
additional_dependencies:
- - sphinx==6
- - types-docutils
+ - sphinx==6.2.1
+ - docutils==0.19
+ - types-docutils==0.20.0.20240201
- types-jsonschema
- types-requests
diff --git a/codecov.yml b/codecov.yml
new file mode 100644
index 000000000..03fd4e928
--- /dev/null
+++ b/codecov.yml
@@ -0,0 +1,10 @@
+coverage:
+ status:
+ project:
+ default:
+ target: 80%
+ threshold: 0.5%
+ patch:
+ default:
+ target: 70%
+ threshold: 0.5%
diff --git a/performance/performance_test.py b/performance/performance_test.py
index 5a8f438c3..1733f6328 100644
--- a/performance/performance_test.py
+++ b/performance/performance_test.py
@@ -2,6 +2,7 @@
Executes several performance tests.
"""
+
import os.path
import shutil
import subprocess
diff --git a/pyproject.toml b/pyproject.toml
index bfe10e434..054de6c18 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -151,12 +151,6 @@ module = [
]
disable_error_code = ["attr-defined", "no-any-return"]
-[[tool.mypy.overrides]]
-module = [
- "sphinx_needs.directives.needextract",
-]
-disable_error_code = "no-untyped-call"
-
[build-system]
requires = ["setuptools", "poetry_core>=1.0.8"] # setuptools for deps like plantuml
build-backend = "poetry.core.masonry.api"
diff --git a/sphinx_needs/api/configuration.py b/sphinx_needs/api/configuration.py
index 156f76c5b..54b88d8d8 100644
--- a/sphinx_needs/api/configuration.py
+++ b/sphinx_needs/api/configuration.py
@@ -3,6 +3,7 @@
All functions here are available under ``sphinxcontrib.api``. So do not import this module directly.
"""
+
from typing import Callable, List, Optional
from docutils.parsers.rst import directives
diff --git a/sphinx_needs/data.py b/sphinx_needs/data.py
index 465e626a4..abea4ce70 100644
--- a/sphinx_needs/data.py
+++ b/sphinx_needs/data.py
@@ -1,6 +1,7 @@
"""Module to control access to sphinx-needs data,
which is stored in the Sphinx environment.
"""
+
from __future__ import annotations
from typing import TYPE_CHECKING, Literal, TypedDict
diff --git a/sphinx_needs/debug.py b/sphinx_needs/debug.py
index 253bd5725..1fca7239b 100644
--- a/sphinx_needs/debug.py
+++ b/sphinx_needs/debug.py
@@ -2,6 +2,7 @@
Contains debug features to track down
runtime and other problems with Sphinx-Needs
"""
+
from __future__ import annotations
import inspect
diff --git a/sphinx_needs/diagrams_common.py b/sphinx_needs/diagrams_common.py
index c6fb33d6c..4adde2680 100644
--- a/sphinx_needs/diagrams_common.py
+++ b/sphinx_needs/diagrams_common.py
@@ -140,7 +140,7 @@ def get_debug_container(puml_node: nodes.Element) -> nodes.container:
"""Return container containing the raw plantuml code"""
debug_container = nodes.container()
if isinstance(puml_node, nodes.figure):
- data = puml_node.children[0]["uml"]
+ data = puml_node.children[0]["uml"] # type: ignore
else:
data = puml_node["uml"]
data = "\n".join([html.escape(line) for line in data.split("\n")])
diff --git a/sphinx_needs/directives/need.py b/sphinx_needs/directives/need.py
index 202b0f07c..3e38947b5 100644
--- a/sphinx_needs/directives/need.py
+++ b/sphinx_needs/directives/need.py
@@ -1,6 +1,5 @@
import hashlib
import re
-import typing
from typing import Any, Dict, List, Optional, Sequence, Tuple
from docutils import nodes
@@ -240,7 +239,7 @@ def get_sections_and_signature_and_needs(
current_node = need_node
while current_node:
if isinstance(current_node, nodes.section):
- title = typing.cast(str, current_node.children[0].astext())
+ title = current_node.children[0].astext()
# If using auto-section numbering, then Sphinx inserts
# multiple non-breaking space unicode characters into the title
# we'll replace those with a simple space to make them easier to
@@ -344,16 +343,16 @@ def analyse_need_locations(app: Sphinx, doctree: nodes.document) -> None:
# we can remove the hidden needs from the doctree
for need_node in hidden_needs:
if need_node.parent is not None:
- need_node.parent.remove(need_node) # type: ignore[attr-defined]
+ need_node.parent.remove(need_node)
def previous_sibling(node: nodes.Node) -> Optional[nodes.Node]:
"""Return preceding sibling node or ``None``."""
try:
- i = node.parent.index(node) # type: ignore
+ i = node.parent.index(node)
except AttributeError:
return None
- return node.parent[i - 1] if i > 0 else None # type: ignore
+ return node.parent[i - 1] if i > 0 else None
@profile("NEEDS_POST_PROCESS")
@@ -391,7 +390,7 @@ def process_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str) -
if not needs_config.include_needs:
for node in doctree.findall(Need):
if node.parent is not None:
- node.parent.remove(node) # type: ignore
+ node.parent.remove(node)
return
needs_data = SphinxNeedsData(app.env)
diff --git a/sphinx_needs/directives/needextend.py b/sphinx_needs/directives/needextend.py
index bcd218c0b..7f6d1e48d 100644
--- a/sphinx_needs/directives/needextend.py
+++ b/sphinx_needs/directives/needextend.py
@@ -2,6 +2,7 @@
"""
+
import re
from typing import Any, Callable, Dict, Sequence
diff --git a/sphinx_needs/directives/needextract.py b/sphinx_needs/directives/needextract.py
index b3ed41b67..9b61ac91e 100644
--- a/sphinx_needs/directives/needextract.py
+++ b/sphinx_needs/directives/needextract.py
@@ -2,6 +2,7 @@
"""
+
import re
from typing import List, Sequence
@@ -131,4 +132,4 @@ def process_needextract(
# Run docutils/sphinx transformers for the by needextract added nodes.
# Transformers use the complete document (doctree), so we perform this action once per
# needextract. No matter if one or multiple needs got copied
- Substitutions(doctree).apply()
+ Substitutions(doctree).apply() # type: ignore[no-untyped-call]
diff --git a/sphinx_needs/directives/needfilter.py b/sphinx_needs/directives/needfilter.py
index 3ef350e9c..8800a189d 100644
--- a/sphinx_needs/directives/needfilter.py
+++ b/sphinx_needs/directives/needfilter.py
@@ -1,5 +1,5 @@
import os
-from typing import List, Sequence
+from typing import List, Sequence, Union
from urllib.parse import urlparse
from docutils import nodes
@@ -89,7 +89,7 @@ def process_needfilters(
id = node.attributes["ids"][0]
current_needfilter = SphinxNeedsData(env)._get_or_create_filters()[id]
- content: List[nodes.Element]
+ content: Union[nodes.Element, List[nodes.Element]]
if current_needfilter["layout"] == "list":
content = []
@@ -100,12 +100,12 @@ def process_needfilters(
raise ImportError
from sphinxcontrib.plantuml import plantuml
except ImportError:
- content = nodes.error()
- para = nodes.paragraph()
+ error_node = nodes.error()
+ para_node = nodes.paragraph()
text = nodes.Text("PlantUML is not available!")
- para += text
- content.append(para)
- node.replace_self(content)
+ para_node += text
+ error_node.append(para_node)
+ node.replace_self(error_node)
continue
plantuml_block_text = ".. plantuml::\n" "\n" " @startuml" " @enduml"
@@ -151,7 +151,7 @@ def process_needfilters(
target_id = need_info["target_id"]
if current_needfilter["layout"] == "list":
- para = nodes.line()
+ line_node = nodes.line()
description = "{}: {}".format(need_info["id"], need_info["title"])
if current_needfilter["show_status"] and need_info["status"]:
@@ -164,16 +164,16 @@ def process_needfilters(
# Create a reference
if need_info["hide"]:
- para += title
+ line_node += title
else:
ref = nodes.reference("", "")
ref["refdocname"] = need_info["docname"]
ref["refuri"] = builder.get_relative_uri(fromdocname, need_info["docname"])
ref["refuri"] += "#" + target_id
ref.append(title)
- para += ref
+ line_node += ref
- line_block.append(para)
+ line_block.append(line_node)
elif current_needfilter["layout"] == "table":
row = nodes.row()
row += row_col_maker(app, fromdocname, all_needs, need_info, "id", make_ref=True)
@@ -231,7 +231,7 @@ def process_needfilters(
if len(content) == 0:
content.append(no_needs_found_paragraph(current_needfilter.get("filter_warning")))
if current_needfilter["show_filters"]:
- para = nodes.paragraph()
+ para_node = nodes.paragraph()
filter_text = "Used filter:"
filter_text += (
" status(%s)" % " OR ".join(current_needfilter["status"])
@@ -252,7 +252,7 @@ def process_needfilters(
)
filter_node = nodes.emphasis(filter_text, filter_text)
- para += filter_node
- content.append(para)
+ para_node += filter_node
+ content.append(para_node)
node.replace_self(content)
diff --git a/sphinx_needs/directives/needflow.py b/sphinx_needs/directives/needflow.py
index c67381514..749f84b8c 100644
--- a/sphinx_needs/directives/needflow.py
+++ b/sphinx_needs/directives/needflow.py
@@ -308,20 +308,21 @@ def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
type="needs",
)
- content = []
try:
if "sphinxcontrib.plantuml" not in app.config.extensions:
raise ImportError
from sphinxcontrib.plantuml import plantuml
except ImportError:
- content = nodes.error()
+ error_node = nodes.error()
para = nodes.paragraph()
text = nodes.Text("PlantUML is not available!")
para += text
- content.append(para)
- node.replace_self(content)
+ error_node.append(para)
+ node.replace_self(error_node)
continue
+ content: List[nodes.Element] = []
+
found_needs = process_filters(app, all_needs.values(), current_needflow)
if found_needs:
@@ -490,13 +491,13 @@ def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
# Otherwise it was not been set, or we get outdated data
debug_container = nodes.container()
if isinstance(puml_node, nodes.figure):
- data = puml_node.children[0]["uml"]
+ data = puml_node.children[0]["uml"] # type: ignore
else:
data = puml_node["uml"]
data = "\n".join([html.escape(line) for line in data.split("\n")])
debug_para = nodes.raw("", f"{data} ", format="html")
debug_container += debug_para
- content += debug_container
+ content.append(debug_container)
node.replace_self(content)
diff --git a/sphinx_needs/directives/needlist.py b/sphinx_needs/directives/needlist.py
index 25a19b433..6a2b14ff9 100644
--- a/sphinx_needs/directives/needlist.py
+++ b/sphinx_needs/directives/needlist.py
@@ -2,6 +2,7 @@
"""
+
from typing import List, Sequence
from docutils import nodes
diff --git a/sphinx_needs/directives/needtable.py b/sphinx_needs/directives/needtable.py
index c647f95a5..b00b7cf48 100644
--- a/sphinx_needs/directives/needtable.py
+++ b/sphinx_needs/directives/needtable.py
@@ -317,6 +317,7 @@ def sort(need: NeedsInfoType) -> Any:
tbody += row
+ content: nodes.Element
if len(filtered_needs) == 0:
content = no_needs_found_paragraph(current_needtable.get("filter_warning"))
else:
@@ -333,7 +334,7 @@ def sort(need: NeedsInfoType) -> Any:
if current_needtable["caption"]:
title_text = current_needtable["caption"]
- title = nodes.title(title_text, "", nodes.Text(title_text))
- table_node.insert(0, title)
+ title_node = nodes.title(title_text, "", nodes.Text(title_text))
+ table_node.insert(0, title_node)
node.replace_self(content)
diff --git a/sphinx_needs/environment.py b/sphinx_needs/environment.py
index 0b5de8688..513f5e981 100644
--- a/sphinx_needs/environment.py
+++ b/sphinx_needs/environment.py
@@ -11,10 +11,10 @@
from sphinx_needs.utils import logger
try:
- from sphinx.util.display import status_iterator # type: ignore
+ from sphinx.util.display import status_iterator
except ImportError:
# will be removed in Sphinx 8.0
- from sphinx.util import status_iterator
+ from sphinx.util import status_iterator # type: ignore
IMAGE_DIR_NAME = "_static"
diff --git a/sphinx_needs/external_needs.py b/sphinx_needs/external_needs.py
index f46f7bd1f..4288c37c5 100644
--- a/sphinx_needs/external_needs.py
+++ b/sphinx_needs/external_needs.py
@@ -110,9 +110,9 @@ def load_external_needs(app: Sphinx, env: BuildEnvironment, _docname: str) -> No
cal_target_url = mem_template.render(**{"need": need})
need_params["external_url"] = f'{source["base_url"]}/{cal_target_url}'
else:
- need_params[
- "external_url"
- ] = f'{source["base_url"]}/{need.get("docname", "__error__")}.html#{need["id"]}'
+ need_params["external_url"] = (
+ f'{source["base_url"]}/{need.get("docname", "__error__")}.html#{need["id"]}'
+ )
need_params["content"] = need["description"]
need_params["links"] = need.get("links", [])
diff --git a/sphinx_needs/filter_common.py b/sphinx_needs/filter_common.py
index 20e1fe74b..896256860 100644
--- a/sphinx_needs/filter_common.py
+++ b/sphinx_needs/filter_common.py
@@ -2,6 +2,7 @@
filter_base is used to provide common filter functionality for directives
like needtable, needlist and needflow.
"""
+
from __future__ import annotations
import re
@@ -236,7 +237,7 @@ def prepare_need_list(need_list: Iterable[NeedsInfoType]) -> list[NeedsPartsInfo
for need in need_list:
for part in need["parts"].values():
id_complete = ".".join([need["id"], part["id"]])
- filter_part: NeedsPartsInfoType = {**need, **part, **{"id_parent": need["id"], "id_complete": id_complete}}
+ filter_part: NeedsPartsInfoType = {**need, **part, **{"id_parent": need["id"], "id_complete": id_complete}} # type: ignore[typeddict-item]
all_needs_incl_parts.append(filter_part)
# Be sure extra attributes, which makes only sense for need_parts, are also available on
diff --git a/sphinx_needs/functions/functions.py b/sphinx_needs/functions/functions.py
index 52a8bb441..a8c640ce2 100644
--- a/sphinx_needs/functions/functions.py
+++ b/sphinx_needs/functions/functions.py
@@ -111,7 +111,7 @@ def find_and_replace_node_content(node: nodes.Node, env: BuildEnvironment, need:
except KeyError:
# If no refuri is set, we don't need to modify anything.
# So stop here and return the untouched node.
- return node # type: ignore
+ return node
else:
new_text = node
func_match = func_pattern.findall(new_text)
@@ -144,7 +144,7 @@ def find_and_replace_node_content(node: nodes.Node, env: BuildEnvironment, need:
node.children = new_children
else:
node = nodes.Text(new_text)
- return node # type: ignore
+ return node
else:
for child in node.children:
new_child = find_and_replace_node_content(child, env, need)
diff --git a/sphinx_needs/layout.py b/sphinx_needs/layout.py
index 38b123e50..068f35d5e 100644
--- a/sphinx_needs/layout.py
+++ b/sphinx_needs/layout.py
@@ -3,6 +3,7 @@
Based on https://github.com/useblocks/sphinxcontrib-needs/issues/102
"""
+
import os
import re
import uuid
@@ -87,7 +88,7 @@ def create_need(
# Therefore, we need to manipulate this first, before we can ask Sphinx to perform the normal
# reference handling for us.
replace_pending_xref_refdoc(node_container, docname)
- env.resolve_references(node_container, docname, env.app.builder)
+ env.resolve_references(node_container, docname, env.app.builder) # type: ignore[arg-type]
node_container.attributes["ids"].append(need_id)
@@ -98,8 +99,8 @@ def create_need(
build_need(layout, node_container, app, style, docname)
# set the layout and style for the new need
- node_container[0].attributes = node_container.parent.children[0].attributes
- node_container[0].children[0].attributes = node_container.parent.children[0].children[0].attributes
+ node_container[0].attributes = node_container.parent.children[0].attributes # type: ignore
+ node_container[0].children[0].attributes = node_container.parent.children[0].children[0].attributes # type: ignore
node_container.attributes["ids"] = []
@@ -154,7 +155,7 @@ def build_need(
if need_data["hide"]:
if node.parent:
- node.parent.replace(node, []) # type: ignore
+ node.parent.replace(node, [])
return
if fromdocname is None:
@@ -170,7 +171,7 @@ def build_need(
# We need to replace the current need-node (containing content only) with our new table need node.
# node.parent.replace(node, node_container)
- node.parent.replace(node, node_container) # type: ignore
+ node.parent.replace(node, node_container)
@lru_cache(1)
@@ -201,10 +202,10 @@ def __init__(
self.layout_name = layout
available_layouts = self.needs_config.layouts
- if self.layout_name not in available_layouts.keys():
+ if self.layout_name not in available_layouts:
raise SphinxNeedLayoutException(
'Given layout "{}" is unknown for need {}. Registered layouts are: {}'.format(
- self.layout_name, need["id"], " ,".join(available_layouts.keys())
+ self.layout_name, need["id"], " ,".join(available_layouts)
)
)
self.layout = available_layouts[self.layout_name]
@@ -302,12 +303,12 @@ def __init__(
)
self.functions: Dict[str, Callable[..., Union[None, nodes.Node, List[nodes.Node]]]] = {
- "meta": self.meta,
+ "meta": self.meta, # type: ignore[dict-item]
"meta_all": self.meta_all,
"meta_links": self.meta_links,
- "meta_links_all": self.meta_links_all,
+ "meta_links_all": self.meta_links_all, # type: ignore[dict-item]
"meta_id": self.meta_id,
- "image": self.image,
+ "image": self.image, # type: ignore[dict-item]
"link": self.link,
"collapse_button": self.collapse_button,
"permalink": self.permalink,
@@ -342,16 +343,16 @@ def get_need_table(self) -> nodes.table:
return self.node_table
- def get_section(self, section: str) -> Optional[nodes.line_block]:
+ def get_section(self, section: str) -> Union[nodes.line_block, List[nodes.Element]]:
try:
lines = self.layout["layout"][section]
except KeyError:
# Return nothing, if not specific configuration is given for layout section
- return None
+ return []
# Needed for PDF/Latex output, where empty line_blocks raise exceptions during build
if len(lines) == 0:
- return None
+ return []
lines_container = nodes.line_block(classes=[f"needs_{section}"])
@@ -481,7 +482,9 @@ def _replace_place_holder(self, data: str) -> str:
data = data.replace(replace_string, self.need[item]) # type: ignore[literal-required]
return data
- def meta(self, name: str, prefix: Optional[str] = None, show_empty: bool = False) -> nodes.inline:
+ def meta(
+ self, name: str, prefix: Optional[str] = None, show_empty: bool = False
+ ) -> Union[nodes.inline, List[nodes.Element]]:
"""
Returns the specific metadata of a need inside docutils nodes.
Usage::
@@ -689,10 +692,11 @@ def meta_links(self, name: str, incoming: bool = False) -> nodes.inline:
from sphinx_needs.roles.need_incoming import NeedIncoming
from sphinx_needs.roles.need_outgoing import NeedOutgoing
- if incoming:
- node_links = NeedIncoming(reftarget=self.need["id"], link_type=f"{name}_back")
- else:
- node_links = NeedOutgoing(reftarget=self.need["id"], link_type=f"{name}")
+ node_links = (
+ NeedIncoming(reftarget=self.need["id"], link_type=f"{name}_back")
+ if incoming
+ else NeedOutgoing(reftarget=self.need["id"], link_type=f"{name}")
+ )
node_links.append(nodes.inline(self.need["id"], self.need["id"]))
data_container.append(node_links)
return data_container
@@ -739,7 +743,7 @@ def image(
prefix: str = "",
is_external: bool = False,
img_class: str = "",
- ) -> nodes.inline:
+ ) -> Union[nodes.inline, List[nodes.Element]]:
"""
See https://docutils.sourceforge.io/docs/ref/rst/directives.html#images
diff --git a/sphinx_needs/needsfile.py b/sphinx_needs/needsfile.py
index 1c6fbb771..17825637f 100644
--- a/sphinx_needs/needsfile.py
+++ b/sphinx_needs/needsfile.py
@@ -3,6 +3,7 @@
Creates, checks and imports ``needs.json`` files.
"""
+
import json
import os
import sys
diff --git a/sphinx_needs/roles/need_count.py b/sphinx_needs/roles/need_count.py
index cfe996c7b..ba0a137f1 100644
--- a/sphinx_needs/roles/need_count.py
+++ b/sphinx_needs/roles/need_count.py
@@ -18,7 +18,7 @@
log = get_logger(__name__)
-class NeedCount(nodes.Inline, nodes.Element): # type: ignore
+class NeedCount(nodes.Inline, nodes.Element):
pass
diff --git a/sphinx_needs/roles/need_func.py b/sphinx_needs/roles/need_func.py
index 32ae6a7c6..50852465a 100644
--- a/sphinx_needs/roles/need_func.py
+++ b/sphinx_needs/roles/need_func.py
@@ -13,7 +13,7 @@
log = get_logger(__name__)
-class NeedFunc(nodes.Inline, nodes.Element): # type: ignore
+class NeedFunc(nodes.Inline, nodes.Element):
pass
diff --git a/sphinx_needs/roles/need_incoming.py b/sphinx_needs/roles/need_incoming.py
index bbeb9b141..dc882b74e 100644
--- a/sphinx_needs/roles/need_incoming.py
+++ b/sphinx_needs/roles/need_incoming.py
@@ -10,7 +10,7 @@
from sphinx_needs.utils import check_and_calc_base_url_rel_path, logger
-class NeedIncoming(nodes.Inline, nodes.Element): # type: ignore
+class NeedIncoming(nodes.Inline, nodes.Element):
pass
diff --git a/sphinx_needs/roles/need_outgoing.py b/sphinx_needs/roles/need_outgoing.py
index 092d89a95..be119c735 100644
--- a/sphinx_needs/roles/need_outgoing.py
+++ b/sphinx_needs/roles/need_outgoing.py
@@ -13,7 +13,7 @@
log = get_logger(__name__)
-class NeedOutgoing(nodes.Inline, nodes.Element): # type: ignore
+class NeedOutgoing(nodes.Inline, nodes.Element):
pass
diff --git a/sphinx_needs/roles/need_part.py b/sphinx_needs/roles/need_part.py
index e6be611c5..8b398ac2d 100644
--- a/sphinx_needs/roles/need_part.py
+++ b/sphinx_needs/roles/need_part.py
@@ -6,6 +6,7 @@
Most voodoo is done in need.py
"""
+
import hashlib
import re
from typing import List, cast
@@ -20,7 +21,7 @@
log = get_logger(__name__)
-class NeedPart(nodes.Inline, nodes.Element): # type: ignore
+class NeedPart(nodes.Inline, nodes.Element):
pass
diff --git a/sphinx_needs/roles/need_ref.py b/sphinx_needs/roles/need_ref.py
index 23f9a5b1e..8795cde24 100644
--- a/sphinx_needs/roles/need_ref.py
+++ b/sphinx_needs/roles/need_ref.py
@@ -15,7 +15,7 @@
log = get_logger(__name__)
-class NeedRef(nodes.Inline, nodes.Element): # type: ignore
+class NeedRef(nodes.Inline, nodes.Element):
pass
diff --git a/sphinx_needs/utils.py b/sphinx_needs/utils.py
index 4bbddf122..018ec9691 100644
--- a/sphinx_needs/utils.py
+++ b/sphinx_needs/utils.py
@@ -400,7 +400,7 @@ def jinja_parse(context: Dict[str, Any], jinja_string: str) -> str:
return content
-@lru_cache()
+@lru_cache
def import_matplotlib() -> Optional["matplotlib"]:
"""Import and return matplotlib, or return None if it cannot be imported.
@@ -484,11 +484,9 @@ def match_string_link(
render_content = match.groupdict()
link_url = link_conf["url_template"].render(**render_content, **render_context)
link_name = link_conf["name_template"].render(**render_content, **render_context)
- if link_name:
- ref_item = nodes.reference(link_name, link_name, refuri=link_url)
- else:
- # if no string_link match was made, we handle it as normal string value
- ref_item = nodes.Text(text_item)
+
+ # if no string_link match was made, we handle it as normal string value
+ ref_item = nodes.reference(link_name, link_name, refuri=link_url) if link_name else nodes.Text(text_item)
except Exception as e:
logger.warning(
diff --git a/sphinx_needs/warnings.py b/sphinx_needs/warnings.py
index 8754551dc..8025b33ff 100644
--- a/sphinx_needs/warnings.py
+++ b/sphinx_needs/warnings.py
@@ -2,6 +2,7 @@
Cares about handling and execution warnings.
"""
+
from typing import Dict, Optional
from sphinx.application import Sphinx
diff --git a/tests/conftest.py b/tests/conftest.py
index 36bd5aa9f..2e679dc21 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,4 +1,5 @@
"""Pytest conftest module containing common test configuration and fixtures."""
+
import json
import os.path
import secrets
diff --git a/tests/no_mpl_tests.py b/tests/no_mpl_tests.py
index 17cd6aece..95aaf0747 100644
--- a/tests/no_mpl_tests.py
+++ b/tests/no_mpl_tests.py
@@ -1,4 +1,5 @@
"""These tests should only be run in an environment without matplotlib installed."""
+
import pytest
diff --git a/tests/test_github_issues.py b/tests/test_github_issues.py
index 6dd514323..2551af045 100644
--- a/tests/test_github_issues.py
+++ b/tests/test_github_issues.py
@@ -17,9 +17,7 @@ def test_doc_github_44(test_app):
app = test_app
output = str(
- check_output(
- ["sphinx-build", "-a", "-E", "-b", "html", app.srcdir, app.outdir], stderr=STDOUT, universal_newlines=True
- )
+ check_output(["sphinx-build", "-a", "-E", "-b", "html", app.srcdir, app.outdir], stderr=STDOUT, text=True)
)
# app.build() Uncomment, if build should stop on breakpoints
html = Path(app.outdir, "index.html").read_text()
From a85d49be05949424e38c08233871f32064075828 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 15 Feb 2024 14:50:31 +0100
Subject: [PATCH 02/24] Bump actions/setup-python from 4 to 5 (#1083)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [actions/setup-python](https://github.com/actions/setup-python)
from 4 to 5.
Release notes
Sourced from actions/setup-python's
releases .
v5.0.0
What's Changed
In scope of this release, we update node version runtime from node16
to node20 (actions/setup-python#772 ).
Besides, we update dependencies to the latest versions.
Full Changelog : https://github.com/actions/setup-python/compare/v4.8.0...v5.0.0
v4.8.0
What's Changed
In scope of this release we added support for GraalPy (actions/setup-python#694 ).
You can use this snippet to set up GraalPy:
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
with:
python-version: 'graalpy-22.3'
- run: python my_script.py
Besides, the release contains such changes as:
New Contributors
Full Changelog : https://github.com/actions/setup-python/compare/v4...v4.8.0
v4.7.1
What's Changed
Full Changelog : https://github.com/actions/setup-python/compare/v4...v4.7.1
v4.7.0
In scope of this release, the support for reading python version from
pyproject.toml was added (actions/setup-python#669 ).
- name: Setup Python
uses: actions/setup-python@v4
</tr></table>
... (truncated)
Commits
[![Dependabot compatibility
score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/setup-python&package-manager=github_actions&previous-version=4&new-version=5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/benchmark.yaml | 2 +-
.github/workflows/ci.yaml | 8 ++++----
.github/workflows/docs.yaml | 2 +-
.github/workflows/js_test.yml | 2 +-
.github/workflows/release.yaml | 6 +++---
5 files changed, 10 insertions(+), 10 deletions(-)
diff --git a/.github/workflows/benchmark.yaml b/.github/workflows/benchmark.yaml
index da28dd0e6..e7e5c2ba5 100644
--- a/.github/workflows/benchmark.yaml
+++ b/.github/workflows/benchmark.yaml
@@ -10,7 +10,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set Up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Update pip
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 5b9b0797e..abccea3cf 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -11,7 +11,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.8
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: '3.8'
- uses: pre-commit/action@v3.0.0
@@ -34,7 +34,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set Up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Update pip
@@ -78,7 +78,7 @@ jobs:
- name: Install Cypress Test Framework
run: npm install cypress
- name: Set Up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Update pip
@@ -97,7 +97,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set Up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Update pip
diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
index 119dbfde1..de20c3b81 100644
--- a/.github/workflows/docs.yaml
+++ b/.github/workflows/docs.yaml
@@ -9,7 +9,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set Up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Update pip
diff --git a/.github/workflows/js_test.yml b/.github/workflows/js_test.yml
index 3012c9856..2cf3dba0c 100644
--- a/.github/workflows/js_test.yml
+++ b/.github/workflows/js_test.yml
@@ -5,7 +5,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Set Up Python 3.10.8
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: 3.10.8
- name: Use Node.js
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 9ea325cba..1cc8c589a 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -10,7 +10,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.9
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: 3.9
- name: install Poetry
@@ -30,7 +30,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.9
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: 3.9
- uses: actions/download-artifact@v3
@@ -57,7 +57,7 @@ jobs:
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.9
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: 3.9
- uses: actions/download-artifact@v2
From 050bec750ff2c5acf881415fa2b5efb5fcce8414 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 15 Feb 2024 15:04:06 +0100
Subject: [PATCH 03/24] Bump actions/cache from 3 to 4 (#1092)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/benchmark.yaml | 2 +-
.github/workflows/js_test.yml | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/benchmark.yaml b/.github/workflows/benchmark.yaml
index e7e5c2ba5..4efe5c00a 100644
--- a/.github/workflows/benchmark.yaml
+++ b/.github/workflows/benchmark.yaml
@@ -22,7 +22,7 @@ jobs:
run: pytest --benchmark-json output.json -k _time tests/benchmarks
- name: Download previous benchmark data
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: ./cache
key: ${{ runner.os }}-benchmark
diff --git a/.github/workflows/js_test.yml b/.github/workflows/js_test.yml
index 2cf3dba0c..bf7070bc9 100644
--- a/.github/workflows/js_test.yml
+++ b/.github/workflows/js_test.yml
@@ -23,7 +23,7 @@ jobs:
run: |
echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
- name: Pip cache
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
From e9582edb45ffbb7bb90296d910b7c00352d7c800 Mon Sep 17 00:00:00 2001
From: Chris Sewell
Date: Thu, 15 Feb 2024 14:10:36 +0000
Subject: [PATCH 04/24] =?UTF-8?q?=F0=9F=91=8C=20Make=20needtable=20titles?=
=?UTF-8?q?=20more=20permissive=20(#1102)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
For `key as "title"` columns, the regex has been changed, to allow for the key to contain anything except whitespace characters, and the title to contain anything except the `"` character
---
sphinx_needs/defaults.py | 2 +-
tests/doc_test/doc_needtable/conf.py | 1 +
tests/doc_test/doc_needtable/test_titles.rst | 3 ++-
tests/test_needtable.py | 2 ++
4 files changed, 6 insertions(+), 2 deletions(-)
diff --git a/sphinx_needs/defaults.py b/sphinx_needs/defaults.py
index c23903fcd..2e7a11913 100644
--- a/sphinx_needs/defaults.py
+++ b/sphinx_needs/defaults.py
@@ -199,7 +199,7 @@
""",
}
-TITLE_REGEX = r'([\w]+) as "([\w ]+)"'
+TITLE_REGEX = r'([^\s]+) as "([^"]+)"'
NEED_DEFAULT_OPTIONS: Dict[str, Any] = {
diff --git a/tests/doc_test/doc_needtable/conf.py b/tests/doc_test/doc_needtable/conf.py
index 585e4438d..73fd61077 100644
--- a/tests/doc_test/doc_needtable/conf.py
+++ b/tests/doc_test/doc_needtable/conf.py
@@ -23,6 +23,7 @@
"github",
"value",
"unit",
+ "special-chars!",
]
needs_string_links = {
diff --git a/tests/doc_test/doc_needtable/test_titles.rst b/tests/doc_test/doc_needtable/test_titles.rst
index e1a598685..75db1cca4 100644
--- a/tests/doc_test/doc_needtable/test_titles.rst
+++ b/tests/doc_test/doc_needtable/test_titles.rst
@@ -12,7 +12,8 @@ TEST Titles
.. spec:: need 3
:id: titles_003
:links: titles_001
+ :special-chars!: special-chars value
.. needtable::
- :columns: id;title as "Headline" ;outgoing as "Links"; incoming as "To this need123";status;tags as "My Tags"
+ :columns: id;title as "Headline" ;outgoing as "Links"; incoming as "To this need123";status;tags as "My Tags";special-chars! as "Special Characters!"
:style: table
diff --git a/tests/test_needtable.py b/tests/test_needtable.py
index 40bb9b33d..211be333e 100644
--- a/tests/test_needtable.py
+++ b/tests/test_needtable.py
@@ -149,3 +149,5 @@ def test_doc_needtable_titles(test_app):
html = Path(app.outdir, "test_titles.html").read_text()
assert 'Headline
' in html
assert 'To this need123
' in html
+ assert 'Special Characters!
' in html
+ assert 'special-chars value
' in html
From ab6d6c2880620ad8c352d949b67bf7177635fd91 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 15 Feb 2024 15:14:37 +0100
Subject: [PATCH 05/24] Bump actions/upload-artifact from 3 to 4 (#1085)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/release.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 1cc8c589a..bbb4b6750 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -17,7 +17,7 @@ jobs:
run: python -m pip install poetry
- name: poetry build
run: poetry build
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
name: poetry_build
path: dist/
From f083bbd5e1d5148ed17f80697a0f39352ebb48d3 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 15 Feb 2024 15:21:13 +0100
Subject: [PATCH 06/24] Bump actions/download-artifact from 2 to 4 (#1086)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/release.yaml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index bbb4b6750..9c7713d0f 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -33,7 +33,7 @@ jobs:
uses: actions/setup-python@v5
with:
python-version: 3.9
- - uses: actions/download-artifact@v3
+ - uses: actions/download-artifact@v4
with:
name: poetry_build
path: dist/
@@ -60,7 +60,7 @@ jobs:
uses: actions/setup-python@v5
with:
python-version: 3.9
- - uses: actions/download-artifact@v2
+ - uses: actions/download-artifact@v4
with:
name: poetry_build
path: dist/
From 6b26526759fb97810968c882788d99a1aceee5f8 Mon Sep 17 00:00:00 2001
From: Chris Sewell
Date: Thu, 15 Feb 2024 14:39:01 +0000
Subject: [PATCH 07/24] =?UTF-8?q?=F0=9F=90=9B=F0=9F=91=8C=20Centralise=20n?=
=?UTF-8?q?eed=20missing=20link=20reporting=20(#1104)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Previously, warnings for need outgoing links that reference a non-existent ID, were only generated "indirectly" in the render phase; where `NeedOutgoing` nodes are converted to nodes that are understood by sphinx builders (HTML, PDF, ...).
Since this render phase is no longer needed/run for simply creating the `needs.json`, using the `needs` builder, these warnings were no longer generated.
Additionally, any needs that are not explicitly rendered in the documentation, like externally imported needs, were also skipped.
This commit moves the reporting of unknown links to the `check_links` function, meaning it is now run for all needs and all builders. The warnings have also been given a subtype `link_outgoing` or `external_link_outgoing`, e.g.
```
srcdir/index.rst:12: WARNING: Need 'SP_TOO_002' has unknown outgoing link 'NOT_WORKING_LINK' in field 'links' [needs.link_outgoing]
WARNING: http://my_company.com/docs/v1/index.html#TEST_01: Need 'EXT_TEST_01' has unknown outgoing link 'SPEC_1' in field 'links' [needs.external_link_outgoing]
```
This means they can be suppressed using the standard Sphinx config:
```python
suppress_warnings = ["needs.link_outgoing", "needs.external_link_outgoing"]`
```
This deprecates the need for the `needs_report_dead_links` configuration, which now emits a deprecation warning if set by the user.
---
docs/conf.py | 2 +-
docs/configuration.rst | 8 ++-
sphinx_needs/config.py | 1 +
sphinx_needs/directives/need.py | 29 ++++++--
sphinx_needs/needs.py | 44 ++++++------
sphinx_needs/roles/need_outgoing.py | 39 ++--------
sphinx_needs/roles/need_ref.py | 3 +-
tests/conftest.py | 14 ++--
.../doc_report_dead_links_false/conf.py | 2 +-
tests/test_basic_doc.py | 24 +++++--
tests/test_broken_links.py | 17 +++--
tests/test_github_issues.py | 14 ++--
tests/test_needs_external_needs_build.py | 5 +-
tests/test_report_dead_links.py | 72 +++++++++----------
14 files changed, 148 insertions(+), 126 deletions(-)
diff --git a/docs/conf.py b/docs/conf.py
index 0c8d8188e..c9826cfd5 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -90,7 +90,7 @@
# Absolute path to the needs_report_template_file based on the conf.py directory
# needs_report_template = "/needs_templates/report_template.need" # Use custom report template
-needs_report_dead_links = False
+suppress_warnings = ["needs.link_outgoing"]
needs_types = [
# Architecture types
diff --git a/docs/configuration.rst b/docs/configuration.rst
index 6d59ab252..627060ff9 100644
--- a/docs/configuration.rst
+++ b/docs/configuration.rst
@@ -294,7 +294,13 @@ In this cases, you can provide a list of tuples.
needs_report_dead_links
~~~~~~~~~~~~~~~~~~~~~~~
-Deactivate/activate log messages of outgoing dead links. If set to ``False``, then deactivate.
+.. deprecated:: 2.1.0
+
+ Instead add ``needs.link_outgoing`` to the `suppress_warnings `__ list::
+
+ suppress_warnings = ["needs.link_outgoing"]
+
+Deactivate/activate log messages of disallowed outgoing dead links. If set to ``False``, then deactivate.
Default value is ``True``.
diff --git a/sphinx_needs/config.py b/sphinx_needs/config.py
index 6b7daf4a3..a988074fe 100644
--- a/sphinx_needs/config.py
+++ b/sphinx_needs/config.py
@@ -191,6 +191,7 @@ def __setattr__(self, name: str, value: Any) -> None:
Example: [{"name": "blocks, "incoming": "is blocked by", "copy_link": True, "color": "#ffcc00"}]
"""
report_dead_links: bool = field(default=True, metadata={"rebuild": "html", "types": (bool,)})
+ """DEPRECATED: Use ``suppress_warnings = ["needs.link_outgoing"]`` instead."""
filter_data: dict[str, Any] = field(default_factory=dict, metadata={"rebuild": "html", "types": ()})
allow_unsafe_filters: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
flow_show_links: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
diff --git a/sphinx_needs/directives/need.py b/sphinx_needs/directives/need.py
index 3e38947b5..5965e71a4 100644
--- a/sphinx_needs/directives/need.py
+++ b/sphinx_needs/directives/need.py
@@ -29,7 +29,7 @@
from sphinx_needs.nodes import Need
from sphinx_needs.utils import add_doc, profile, remove_node_from_tree, split_need_id
-logger = get_logger(__name__)
+LOGGER = get_logger(__name__)
NON_BREAKING_SPACE = re.compile("\xa0+")
@@ -156,7 +156,7 @@ def read_in_links(self, name: str) -> List[str]:
if links_string:
for link in re.split(r";|,", links_string):
if link.isspace():
- logger.warning(
+ LOGGER.warning(
f"Grubby link definition found in need '{self.trimmed_title}'. "
"Defined link contains spaces only. [needs]",
type="needs",
@@ -436,10 +436,10 @@ def check_links(needs: Dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> N
the ``has_forbidden_dead_links`` field is also added.
"""
extra_links = config.extra_links
+ report_dead_links = config.report_dead_links
for need in needs.values():
for link_type in extra_links:
- dead_links_allowed = link_type.get("allow_dead_links", False)
- need_link_value: List[str] = (
+ need_link_value = (
[need[link_type["option"]]] if isinstance(need[link_type["option"]], str) else need[link_type["option"]] # type: ignore
)
for need_id_full in need_link_value:
@@ -449,9 +449,26 @@ def check_links(needs: Dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> N
need_id_main in needs and need_id_part and need_id_part not in needs[need_id_main]["parts"]
):
need["has_dead_links"] = True
- if not dead_links_allowed:
+ if not link_type.get("allow_dead_links", False):
need["has_forbidden_dead_links"] = True
- break # One found dead link is enough
+ if report_dead_links:
+ message = f"Need '{need['id']}' has unknown outgoing link '{need_id_full}' in field '{link_type['option']}'"
+ # if the need has been imported from an external URL,
+ # we want to provide that URL as the location of the warning,
+ # otherwise we use the location of the need in the source file
+ if need.get("is_external", False):
+ LOGGER.warning(
+ f"{need['external_url']}: {message} [needs.external_link_outgoing]",
+ type="needs",
+ subtype="external_link_outgoing",
+ )
+ else:
+ LOGGER.warning(
+ f"{message} [needs.link_outgoing]",
+ location=(need["docname"], need["lineno"]),
+ type="needs",
+ subtype="link_outgoing",
+ )
def create_back_links(needs: Dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> None:
diff --git a/sphinx_needs/needs.py b/sphinx_needs/needs.py
index 077d5d65b..03bbce6c0 100644
--- a/sphinx_needs/needs.py
+++ b/sphinx_needs/needs.py
@@ -132,11 +132,12 @@
NeedFunc: process_need_func,
}
+LOGGER = get_logger(__name__)
+
def setup(app: Sphinx) -> Dict[str, Any]:
- log = get_logger(__name__)
- log.debug("Starting setup of Sphinx-Needs")
- log.debug("Load Sphinx-Data-Viewer for Sphinx-Needs")
+ LOGGER.debug("Starting setup of Sphinx-Needs")
+ LOGGER.debug("Load Sphinx-Data-Viewer for Sphinx-Needs")
app.setup_extension("sphinx_data_viewer")
app.setup_extension("sphinxcontrib.jquery")
@@ -304,12 +305,10 @@ def load_config(app: Sphinx, *_args: Any) -> None:
"""
Register extra options and directive based on config from conf.py
"""
- log = get_logger(__name__)
-
needs_config = NeedsSphinxConfig(app.config)
if isinstance(needs_config.extra_options, dict):
- log.info(
+ LOGGER.info(
'Config option "needs_extra_options" supports list and dict. However new default type since '
"Sphinx-Needs 0.7.2 is list. Please see docs for details."
)
@@ -317,7 +316,9 @@ def load_config(app: Sphinx, *_args: Any) -> None:
extra_options = NEEDS_CONFIG.extra_options
for option in needs_config.extra_options:
if option in extra_options:
- log.warning(f'extra_option "{option}" already registered. [needs.config]', type="needs", subtype="config")
+ LOGGER.warning(
+ f'extra_option "{option}" already registered. [needs.config]', type="needs", subtype="config"
+ )
NEEDS_CONFIG.extra_options[option] = directives.unchanged
# Get extra links and create a dictionary of needed options.
@@ -393,17 +394,24 @@ def load_config(app: Sphinx, *_args: Any) -> None:
if name not in NEEDS_CONFIG.warnings:
NEEDS_CONFIG.warnings[name] = check
else:
- log.warning(
+ LOGGER.warning(
f"{name!r} in 'needs_warnings' is already registered. [needs.config]", type="needs", subtype="config"
)
if needs_config.constraints_failed_color:
- log.warning(
+ LOGGER.warning(
'Config option "needs_constraints_failed_color" is deprecated. Please use "needs_constraint_failed_options" styles instead. [needs.config]',
type="needs",
subtype="config",
)
+ if needs_config.report_dead_links is not True:
+ LOGGER.warning(
+ 'Config option "needs_constraints_failed_color" is deprecated. Please use `suppress_warnings = ["needs.link_outgoing"]` instead. [needs.config]',
+ type="needs",
+ subtype="config",
+ )
+
def visitor_dummy(*_args: Any, **_kwargs: Any) -> None:
"""
@@ -497,19 +505,15 @@ def prepare_env(app: Sphinx, env: BuildEnvironment, _docname: str) -> None:
def check_configuration(_app: Sphinx, config: Config) -> None:
- """
- Checks the configuration for invalid options.
+ """Checks the configuration for invalid options.
E.g. defined need-option, which is already defined internally
-
- :param app:
- :param config:
- :return:
"""
- extra_options = config["needs_extra_options"]
- link_types = [x["option"] for x in config["needs_extra_links"]]
+ needs_config = NeedsSphinxConfig(config)
+ extra_options = needs_config.extra_options
+ link_types = [x["option"] for x in needs_config.extra_links]
- external_filter = getattr(config, "needs_filter_data", {})
+ external_filter = needs_config.filter_data
for extern_filter, value in external_filter.items():
# Check if external filter values is really a string
if not isinstance(value, str):
@@ -545,8 +549,8 @@ def check_configuration(_app: Sphinx, config: Config) -> None:
" This is not allowed.".format(link + "_back")
)
- external_variants = getattr(config, "needs_variants", {})
- external_variant_options = getattr(config, "needs_variant_options", [])
+ external_variants = needs_config.variants
+ external_variant_options = needs_config.variant_options
for value in external_variants.values():
# Check if external filter values is really a string
if not isinstance(value, str):
diff --git a/sphinx_needs/roles/need_outgoing.py b/sphinx_needs/roles/need_outgoing.py
index be119c735..8248f1ce4 100644
--- a/sphinx_needs/roles/need_outgoing.py
+++ b/sphinx_needs/roles/need_outgoing.py
@@ -23,7 +23,8 @@ def process_need_outgoing(
builder = app.builder
env = app.env
needs_config = NeedsSphinxConfig(app.config)
- report_dead_links = needs_config.report_dead_links
+ link_lookup = {link["option"]: link for link in needs_config.extra_links}
+
# for node_need_ref in doctree.findall(NeedOutgoing):
for node_need_ref in found_nodes:
node_link_container = nodes.inline()
@@ -107,39 +108,11 @@ def process_need_outgoing(
dead_link_para.append(dead_link_text)
node_link_container += dead_link_para
- extra_links = getattr(env.config, "needs_extra_links", [])
- extra_links_dict = {x["option"]: x for x in extra_links}
-
- # Reduce log level to INFO, if dead links are allowed
- if (
- "allow_dead_links" in extra_links_dict[link_type]
- and extra_links_dict[link_type]["allow_dead_links"]
- ):
- log_level = "INFO"
- kwargs = {}
- else:
- # Set an extra css class, if link type is not configured to allow dead links
+ # add a CSS class for disallowed unknown links
+ # note a warning is already emitted when validating the needs list
+ # so we don't need to do it here
+ if not link_lookup.get(link_type, {}).get("allow_dead_links", False):
dead_link_para.attributes["classes"].append("forbidden")
- log_level = "WARNING"
- kwargs = {"type": "needs"}
-
- if report_dead_links:
- if node_need_ref and node_need_ref.line:
- log.log(
- log_level,
- f"linked need {need_id_main} not found "
- f"(Line {node_need_ref.line} of file {node_need_ref.source}) [needs]",
- **kwargs,
- )
- else:
- log.log(
- log_level,
- "outgoing linked need {} not found (document: {}, "
- "source need {} on line {} ) [needs]".format(
- need_id_main, ref_need["docname"], ref_need["id"], ref_need["lineno"]
- ),
- **kwargs,
- )
# If we have several links, we add an empty text between them
if (index + 1) < len(link_list):
diff --git a/sphinx_needs/roles/need_ref.py b/sphinx_needs/roles/need_ref.py
index 8795cde24..2f1526a13 100644
--- a/sphinx_needs/roles/need_ref.py
+++ b/sphinx_needs/roles/need_ref.py
@@ -141,8 +141,9 @@ def process_need_ref(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
else:
log.warning(
- f"linked need {node_need_ref['reftarget']} not found [needs]",
+ f"linked need {node_need_ref['reftarget']} not found [needs.link_ref]",
type="needs",
+ subtype="link_ref",
location=node_need_ref,
)
diff --git a/tests/conftest.py b/tests/conftest.py
index 2e679dc21..c631b27fc 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -17,6 +17,7 @@
from sphinx import version_info
from sphinx.application import Sphinx
from sphinx.testing.path import path
+from sphinx.testing.util import SphinxTestApp
from syrupy.extensions.single_file import SingleFileSnapshotExtension, WriteMode
from xprocess import ProcessStarter
@@ -231,10 +232,11 @@ def test_app(make_app, sphinx_test_tempdir, request):
builder_params = request.param
sphinx_conf_overrides = builder_params.get("confoverrides", {})
- # Since we don't want copy the plantuml.jar file for each test function,
- # we need to override the plantuml conf variable and set it to what we have already
- plantuml = "java -Djava.awt.headless=true -jar %s" % os.path.join(sphinx_test_tempdir, "utils", "plantuml.jar")
- sphinx_conf_overrides.update(plantuml=plantuml)
+ if not builder_params.get("no_plantuml", False):
+ # Since we don't want copy the plantuml.jar file for each test function,
+ # we need to override the plantuml conf variable and set it to what we have already
+ plantuml = "java -Djava.awt.headless=true -jar %s" % os.path.join(sphinx_test_tempdir, "utils", "plantuml.jar")
+ sphinx_conf_overrides.update(plantuml=plantuml)
# copy test srcdir to test temporary directory sphinx_test_tempdir
srcdir = builder_params.get("srcdir")
@@ -245,7 +247,7 @@ def test_app(make_app, sphinx_test_tempdir, request):
src_dir = Path(str(src_dir))
# return sphinx.testing fixture make_app and new srcdir which is in sphinx_test_tempdir
- app: Sphinx = make_app(
+ app: SphinxTestApp = make_app(
buildername=builder_params.get("buildername", "html"),
srcdir=src_dir,
freshenv=builder_params.get("freshenv"),
@@ -268,6 +270,8 @@ def test_app(make_app, sphinx_test_tempdir, request):
yield app
+ app.cleanup()
+
# Clean up the srcdir of each Sphinx app after the test function has executed
if request.config.getoption("--sn-build-dir") is None:
shutil.rmtree(parent_path, ignore_errors=True)
diff --git a/tests/doc_test/doc_report_dead_links_false/conf.py b/tests/doc_test/doc_report_dead_links_false/conf.py
index 3e75ba02f..813f445a4 100644
--- a/tests/doc_test/doc_report_dead_links_false/conf.py
+++ b/tests/doc_test/doc_report_dead_links_false/conf.py
@@ -10,7 +10,7 @@
{"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
]
-needs_report_dead_links = False
+suppress_warnings = ["needs.link_outgoing"]
needs_extra_links = [
{
diff --git a/tests/test_basic_doc.py b/tests/test_basic_doc.py
index d47906aca..3c12b8ac7 100644
--- a/tests/test_basic_doc.py
+++ b/tests/test_basic_doc.py
@@ -9,7 +9,9 @@
import pytest
import responses
+from sphinx import version_info
from sphinx.application import Sphinx
+from sphinx.testing.util import SphinxTestApp
from syrupy.filters import props
from sphinx_needs.api.need import NeedsNoIdException
@@ -234,14 +236,24 @@ def test_sphinx_api_build():
temp_dir = tempfile.mkdtemp()
src_dir = os.path.join(os.path.dirname(__file__), "doc_test", "doc_basic")
- sphinx_app = Sphinx(
+ if version_info >= (7, 2):
+ src_dir = Path(src_dir)
+ temp_dir = Path(temp_dir)
+ else:
+ from sphinx.testing.path import path
+
+ src_dir = path(src_dir)
+ temp_dir = path(temp_dir)
+
+ sphinx_app = SphinxTestApp(
srcdir=src_dir,
- confdir=src_dir,
- outdir=temp_dir,
- doctreedir=temp_dir,
+ builddir=temp_dir,
buildername="html",
parallel=4,
freshenv=True,
)
- sphinx_app.build()
- assert sphinx_app.statuscode == 0
+ try:
+ sphinx_app.build()
+ assert sphinx_app.statuscode == 0
+ finally:
+ sphinx_app.cleanup()
diff --git a/tests/test_broken_links.py b/tests/test_broken_links.py
index 4b2710749..0bc1613b9 100644
--- a/tests/test_broken_links.py
+++ b/tests/test_broken_links.py
@@ -1,13 +1,18 @@
import pytest
+from sphinx.util.console import strip_colors
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/broken_links"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app", [{"buildername": "html", "srcdir": "doc_test/broken_links", "no_plantuml": True}], indirect=True
+)
def test_doc_build_html(test_app):
app = test_app
app.build()
- warning = app._warning
- # stdout warnings
- warnings = warning.getvalue()
-
- assert "linked need BROKEN_LINK not found" in warnings
+ # check there are expected warnings
+ warnings = strip_colors(app._warning.getvalue().replace(str(app.srcdir), "srcdir"))
+ print(warnings.splitlines())
+ assert warnings.splitlines() == [
+ "srcdir/index.rst:12: WARNING: Need 'SP_TOO_002' has unknown outgoing link 'NOT_WORKING_LINK' in field 'links' [needs.link_outgoing]",
+ "srcdir/index.rst:21: WARNING: linked need BROKEN_LINK not found [needs.link_ref]",
+ ]
diff --git a/tests/test_github_issues.py b/tests/test_github_issues.py
index 2551af045..717f37e5f 100644
--- a/tests/test_github_issues.py
+++ b/tests/test_github_issues.py
@@ -1,6 +1,6 @@
import re
+import subprocess
from pathlib import Path
-from subprocess import STDOUT, check_output
import pytest
@@ -16,9 +16,10 @@ def test_doc_github_44(test_app):
# So we call the needed command directly, but still use the sphinx_testing app to create the outdir for us.
app = test_app
- output = str(
- check_output(["sphinx-build", "-a", "-E", "-b", "html", app.srcdir, app.outdir], stderr=STDOUT, text=True)
+ output = subprocess.run(
+ ["sphinx-build", "-a", "-E", "-b", "html", app.srcdir, app.outdir], check=True, capture_output=True
)
+
# app.build() Uncomment, if build should stop on breakpoints
html = Path(app.outdir, "index.html").read_text()
assert "Github Issue 44 test" in html
@@ -26,8 +27,11 @@ def test_doc_github_44(test_app):
assert "Test 2" in html
assert "Test 3" in html
- assert "linked need test_3 not found" not in output
- assert "outgoing linked need test_123_broken not found" in output
+ stderr = output.stderr.decode("utf-8")
+ stderr = stderr.replace(str(app.srcdir), "srcdir")
+ assert stderr.splitlines() == [
+ "srcdir/index.rst:11: WARNING: Need 'test_3' has unknown outgoing link 'test_123_broken' in field 'links' [needs.link_outgoing]"
+ ]
@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/doc_github_issue_61"}], indirect=True)
diff --git a/tests/test_needs_external_needs_build.py b/tests/test_needs_external_needs_build.py
index 0a3c8876b..a5a4ad0a8 100644
--- a/tests/test_needs_external_needs_build.py
+++ b/tests/test_needs_external_needs_build.py
@@ -22,7 +22,10 @@ def test_doc_build_html(test_app, sphinx_test_tempdir):
output = subprocess.run(
["sphinx-build", "-b", "html", "-D", rf"plantuml={plantuml}", src_dir, out_dir], capture_output=True
)
- assert not output.stderr, f"Build failed with stderr: {output.stderr}"
+ assert output.stderr.decode("utf-8").splitlines() == [
+ "WARNING: http://my_company.com/docs/v1/index.html#TEST_01: Need 'EXT_TEST_01' has unknown outgoing link 'SPEC_1' in field 'links' [needs.external_link_outgoing]",
+ "WARNING: ../../_build/html/index.html#TEST_01: Need 'EXT_REL_PATH_TEST_01' has unknown outgoing link 'SPEC_1' in field 'links' [needs.external_link_outgoing]",
+ ]
# run second time and check
output_second = subprocess.run(
diff --git a/tests/test_report_dead_links.py b/tests/test_report_dead_links.py
index 24a8d9008..fd6f88aa0 100644
--- a/tests/test_report_dead_links.py
+++ b/tests/test_report_dead_links.py
@@ -1,3 +1,4 @@
+import subprocess
from pathlib import Path
import pytest
@@ -6,61 +7,52 @@
@pytest.mark.parametrize(
"test_app", [{"buildername": "html", "srcdir": "doc_test/doc_report_dead_links_true"}], indirect=True
)
-def test_needs_report_dead_links_true(test_app):
- import subprocess
-
+def test_needs_dead_links_warnings(test_app):
app = test_app
- # Check config value of needs_report_dead_links
- assert app.config.needs_report_dead_links
-
src_dir = Path(app.srcdir)
out_dir = Path(app.outdir)
output = subprocess.run(["sphinx-build", "-M", "html", src_dir, out_dir], capture_output=True)
- # Check log info msg of dead links
- assert (
- "outgoing linked need DEAD_LINK_ALLOWED not found (document: index, source need REQ_001 on line 7 ) [needs]"
- in output.stdout.decode("utf-8")
- )
- # Check log warning msg of dead links
- assert (
- "WARNING: outgoing linked need ANOTHER_DEAD_LINK not found (document: index, "
- "source need REQ_004 on line 17 ) [needs]" in output.stderr.decode("utf-8")
- )
- assert (
- "WARNING: outgoing linked need REQ_005 not found (document: index, source need TEST_004 on line 45 ) [needs]"
- in output.stderr.decode("utf-8")
- )
+ # check there are expected warnings
+ stderr = output.stderr.decode("utf-8")
+ stderr = stderr.replace(str(src_dir), "srcdir")
+ assert stderr.splitlines() == [
+ "srcdir/index.rst:17: WARNING: Need 'REQ_004' has unknown outgoing link 'ANOTHER_DEAD_LINK' in field 'links' [needs.link_outgoing]",
+ "srcdir/index.rst:45: WARNING: Need 'TEST_004' has unknown outgoing link 'REQ_005.invalid' in field 'links' [needs.link_outgoing]",
+ "srcdir/index.rst:45: WARNING: Need 'TEST_004' has unknown outgoing link 'REQ_005.invalid' in field 'tests' [needs.link_outgoing]",
+ ]
@pytest.mark.parametrize(
- "test_app", [{"buildername": "html", "srcdir": "doc_test/doc_report_dead_links_false"}], indirect=True
+ "test_app", [{"buildername": "needs", "srcdir": "doc_test/doc_report_dead_links_true"}], indirect=True
)
-def test_needs_report_dead_links_false(test_app):
- import subprocess
-
+def test_needs_dead_links_warnings_needs_builder(test_app):
app = test_app
- # Check config value of needs_report_dead_links
- assert not app.config.needs_report_dead_links
+ src_dir = Path(app.srcdir)
+ out_dir = Path(app.outdir)
+ output = subprocess.run(["sphinx-build", "-M", "needs", src_dir, out_dir], capture_output=True)
+
+ # check there are expected warnings
+ stderr = output.stderr.decode("utf-8")
+ stderr = stderr.replace(str(src_dir), "srcdir")
+ assert stderr.splitlines() == [
+ "srcdir/index.rst:17: WARNING: Need 'REQ_004' has unknown outgoing link 'ANOTHER_DEAD_LINK' in field 'links' [needs.link_outgoing]",
+ "srcdir/index.rst:45: WARNING: Need 'TEST_004' has unknown outgoing link 'REQ_005.invalid' in field 'links' [needs.link_outgoing]",
+ "srcdir/index.rst:45: WARNING: Need 'TEST_004' has unknown outgoing link 'REQ_005.invalid' in field 'tests' [needs.link_outgoing]",
+ ]
+
+
+@pytest.mark.parametrize(
+ "test_app", [{"buildername": "html", "srcdir": "doc_test/doc_report_dead_links_false"}], indirect=True
+)
+def test_needs_dead_links_suppress_warnings(test_app):
+ app = test_app
src_dir = Path(app.srcdir)
out_dir = Path(app.outdir)
output = subprocess.run(["sphinx-build", "-M", "html", src_dir, out_dir], capture_output=True)
- # Check log info msg of dead links deactivated
- assert (
- "outgoing linked need DEAD_LINK_ALLOWED not found (document: index, source need REQ_001 on line 7 ) [needs]"
- not in output.stdout.decode("utf-8")
- )
- # Check log warning msg of dead links deactivated
- assert (
- "WARNING: outgoing linked need ANOTHER_DEAD_LINK not found (document: index, "
- "source need REQ_004 on line 17 ) [needs]" not in output.stderr.decode("utf-8")
- )
- assert (
- "WARNING: outgoing linked need REQ_005 not found (document: index, source need TEST_004 on line 45 ) [needs]"
- not in output.stderr.decode("utf-8")
- )
+ # check there are no warnings
assert not output.stderr
From e0bc813741e28edf32a4291f2cf9a7e8a3d455e3 Mon Sep 17 00:00:00 2001
From: Chris Sewell
Date: Thu, 15 Feb 2024 15:11:17 +0000
Subject: [PATCH 08/24] =?UTF-8?q?=F0=9F=94=A7=20Use=20future=20annotations?=
=?UTF-8?q?=20in=20all=20modules=20(#1111)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Just to keep things consistent across the code-base
---
sphinx_needs/api/configuration.py | 12 ++--
sphinx_needs/api/exceptions.py | 2 +
sphinx_needs/api/need.py | 36 +++++------
sphinx_needs/builder.py | 14 +++--
sphinx_needs/defaults.py | 6 +-
sphinx_needs/diagrams_common.py | 16 ++---
sphinx_needs/directives/list2need.py | 6 +-
sphinx_needs/directives/need.py | 30 +++++-----
sphinx_needs/directives/needbar.py | 6 +-
sphinx_needs/directives/needextend.py | 11 ++--
sphinx_needs/directives/needextract.py | 11 ++--
sphinx_needs/directives/needfilter.py | 8 ++-
sphinx_needs/directives/needflow.py | 16 ++---
sphinx_needs/directives/needgantt.py | 8 ++-
sphinx_needs/directives/needimport.py | 6 +-
sphinx_needs/directives/needlist.py | 11 ++--
sphinx_needs/directives/needpie.py | 6 +-
sphinx_needs/directives/needreport.py | 2 +
sphinx_needs/directives/needsequence.py | 20 ++++---
sphinx_needs/directives/needservice.py | 10 ++--
sphinx_needs/directives/needtable.py | 6 +-
sphinx_needs/directives/needuml.py | 6 +-
sphinx_needs/directives/utils.py | 14 +++--
sphinx_needs/environment.py | 6 +-
sphinx_needs/errors.py | 2 +
sphinx_needs/external_needs.py | 2 +
sphinx_needs/functions/common.py | 30 +++++-----
sphinx_needs/functions/functions.py | 24 ++++----
sphinx_needs/layout.py | 70 +++++++++++-----------
sphinx_needs/logging.py | 2 +
sphinx_needs/need_constraints.py | 6 +-
sphinx_needs/needs.py | 6 +-
sphinx_needs/needsfile.py | 6 +-
sphinx_needs/roles/need_count.py | 4 +-
sphinx_needs/roles/need_func.py | 4 +-
sphinx_needs/roles/need_incoming.py | 4 +-
sphinx_needs/roles/need_outgoing.py | 4 +-
sphinx_needs/roles/need_part.py | 10 ++--
sphinx_needs/roles/need_ref.py | 9 +--
sphinx_needs/services/base.py | 6 +-
sphinx_needs/services/config/github.py | 2 +
sphinx_needs/services/config/open_needs.py | 2 +
sphinx_needs/services/github.py | 18 +++---
sphinx_needs/services/manager.py | 8 ++-
sphinx_needs/services/open_needs.py | 16 ++---
sphinx_needs/utils.py | 61 ++++++++-----------
sphinx_needs/warnings.py | 6 +-
47 files changed, 310 insertions(+), 261 deletions(-)
diff --git a/sphinx_needs/api/configuration.py b/sphinx_needs/api/configuration.py
index 54b88d8d8..8b06ab045 100644
--- a/sphinx_needs/api/configuration.py
+++ b/sphinx_needs/api/configuration.py
@@ -4,7 +4,9 @@
All functions here are available under ``sphinxcontrib.api``. So do not import this module directly.
"""
-from typing import Callable, List, Optional
+from __future__ import annotations
+
+from typing import Callable
from docutils.parsers.rst import directives
from sphinx.application import Sphinx
@@ -17,7 +19,7 @@
from sphinx_needs.functions.functions import DynamicFunction
-def get_need_types(app: Sphinx) -> List[str]:
+def get_need_types(app: Sphinx) -> list[str]:
"""
Returns a list of directive-names from all configured need_types.
@@ -91,7 +93,7 @@ def add_extra_option(app: Sphinx, name: str) -> None:
NEEDS_CONFIG.extra_options[name] = directives.unchanged
-def add_dynamic_function(app: Sphinx, function: DynamicFunction, name: Optional[str] = None) -> None:
+def add_dynamic_function(app: Sphinx, function: DynamicFunction, name: str | None = None) -> None:
"""
Registers a new dynamic function for sphinx-needs.
@@ -122,9 +124,7 @@ def my_function(app, need, needs, *args, **kwargs):
WarningCheck = Callable[[NeedsInfoType, SphinxLoggerAdapter], bool]
-def add_warning(
- app: Sphinx, name: str, function: Optional[WarningCheck] = None, filter_string: Optional[str] = None
-) -> None:
+def add_warning(app: Sphinx, name: str, function: WarningCheck | None = None, filter_string: str | None = None) -> None:
"""
Registers a warning.
diff --git a/sphinx_needs/api/exceptions.py b/sphinx_needs/api/exceptions.py
index 0d3bef918..1720441aa 100644
--- a/sphinx_needs/api/exceptions.py
+++ b/sphinx_needs/api/exceptions.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
from sphinx.errors import SphinxError, SphinxWarning
diff --git a/sphinx_needs/api/need.py b/sphinx_needs/api/need.py
index 5ae06f619..1a1b87f8b 100644
--- a/sphinx_needs/api/need.py
+++ b/sphinx_needs/api/need.py
@@ -1,7 +1,9 @@
+from __future__ import annotations
+
import hashlib
import os
import re
-from typing import Any, List, Optional, Union
+from typing import Any
from docutils import nodes
from docutils.parsers.rst.states import RSTState
@@ -40,13 +42,13 @@ def add_need(
lineno: int,
need_type,
title: str,
- id: Optional[str] = None,
+ id: str | None = None,
content: str = "",
- status: Optional[str] = None,
+ status: str | None = None,
tags=None,
constraints=None,
constraints_passed=None,
- links_string: Optional[str] = None,
+ links_string: str | None = None,
delete: bool = False,
jinja_content: bool = False,
hide: bool = False,
@@ -56,10 +58,10 @@ def add_need(
style=None,
layout=None,
template=None,
- pre_template: Optional[str] = None,
- post_template: Optional[str] = None,
+ pre_template: str | None = None,
+ post_template: str | None = None,
is_external: bool = False,
- external_url: Optional[str] = None,
+ external_url: str | None = None,
external_css: str = "external_link",
**kwargs,
):
@@ -523,15 +525,15 @@ def del_need(app: Sphinx, need_id: str) -> None:
def add_external_need(
app: Sphinx,
need_type,
- title: Optional[str] = None,
- id: Optional[str] = None,
- external_url: Optional[str] = None,
+ title: str | None = None,
+ id: str | None = None,
+ external_url: str | None = None,
external_css: str = "external_link",
content: str = "",
- status: Optional[str] = None,
- tags: Optional[str] = None,
- constraints: Optional[str] = None,
- links_string: Optional[str] = None,
+ status: str | None = None,
+ tags: str | None = None,
+ constraints: str | None = None,
+ links_string: str | None = None,
**kwargs: Any,
):
"""
@@ -621,7 +623,7 @@ def _render_plantuml_template(content: str, docname: str, lineno: int, state: RS
return node_need_content
-def _read_in_links(links_string: Union[str, List[str]]) -> List[str]:
+def _read_in_links(links_string: str | list[str]) -> list[str]:
# Get links
links = []
if links_string:
@@ -646,7 +648,7 @@ def _read_in_links(links_string: Union[str, List[str]]) -> List[str]:
return _fix_list_dyn_func(links)
-def make_hashed_id(app: Sphinx, need_type: str, full_title: str, content: str, id_length: Optional[int] = None) -> str:
+def make_hashed_id(app: Sphinx, need_type: str, full_title: str, content: str, id_length: int | None = None) -> str:
"""
Creates an ID based on title or need.
@@ -683,7 +685,7 @@ def make_hashed_id(app: Sphinx, need_type: str, full_title: str, content: str, i
return f"{type_prefix}{cal_hashed_id[:id_length]}"
-def _fix_list_dyn_func(list: List[str]) -> List[str]:
+def _fix_list_dyn_func(list: list[str]) -> list[str]:
"""
This searches a list for dynamic function fragments, which may have been cut by generic searches for ",|;".
diff --git a/sphinx_needs/builder.py b/sphinx_needs/builder.py
index 5674d2323..e1eaf7994 100644
--- a/sphinx_needs/builder.py
+++ b/sphinx_needs/builder.py
@@ -1,5 +1,7 @@
+from __future__ import annotations
+
import os
-from typing import Iterable, List, Optional, Sequence, Set
+from typing import Iterable, Sequence
from docutils import nodes
from sphinx import version_info
@@ -78,7 +80,7 @@ def finish(self) -> None:
from sphinx_needs.filter_common import filter_needs
filter_string = needs_config.builder_filter
- filtered_needs: List[NeedsInfoType] = filter_needs(
+ filtered_needs: list[NeedsInfoType] = filter_needs(
data.get_or_create_needs().values(), needs_config, filter_string
)
@@ -96,11 +98,11 @@ def finish(self) -> None:
else:
LOGGER.info("Needs successfully exported")
- def get_target_uri(self, _docname: str, _typ: Optional[str] = None) -> str:
+ def get_target_uri(self, _docname: str, _typ: str | None = None) -> str:
# only needed if the write phase is run
return ""
- def prepare_writing(self, _docnames: Set[str]) -> None:
+ def prepare_writing(self, _docnames: set[str]) -> None:
# only needed if the write phase is run
pass
@@ -242,7 +244,7 @@ def finish(self) -> None:
def get_outdated_docs(self) -> Iterable[str]:
return []
- def prepare_writing(self, _docnames: Set[str]) -> None:
+ def prepare_writing(self, _docnames: set[str]) -> None:
pass
def write_doc_serialized(self, _docname: str, _doctree: nodes.document) -> None:
@@ -251,7 +253,7 @@ def write_doc_serialized(self, _docname: str, _doctree: nodes.document) -> None:
def cleanup(self) -> None:
pass
- def get_target_uri(self, _docname: str, _typ: Optional[str] = None) -> str:
+ def get_target_uri(self, _docname: str, _typ: str | None = None) -> str:
return ""
diff --git a/sphinx_needs/defaults.py b/sphinx_needs/defaults.py
index 2e7a11913..5445e2572 100644
--- a/sphinx_needs/defaults.py
+++ b/sphinx_needs/defaults.py
@@ -1,5 +1,7 @@
+from __future__ import annotations
+
import os
-from typing import Any, Dict
+from typing import Any
from docutils.parsers.rst import directives
@@ -202,7 +204,7 @@
TITLE_REGEX = r'([^\s]+) as "([^"]+)"'
-NEED_DEFAULT_OPTIONS: Dict[str, Any] = {
+NEED_DEFAULT_OPTIONS: dict[str, Any] = {
"id": directives.unchanged_required,
"status": directives.unchanged_required,
"tags": directives.unchanged_required,
diff --git a/sphinx_needs/diagrams_common.py b/sphinx_needs/diagrams_common.py
index 4adde2680..15de4321c 100644
--- a/sphinx_needs/diagrams_common.py
+++ b/sphinx_needs/diagrams_common.py
@@ -3,10 +3,12 @@
diagram related directive. E.g. needflow and needsequence.
"""
+from __future__ import annotations
+
import html
import os
import textwrap
-from typing import Any, Dict, List, Optional, Tuple, TypedDict
+from typing import Any, TypedDict
from urllib.parse import urlparse
from docutils import nodes
@@ -27,14 +29,14 @@ class DiagramAttributesType(TypedDict):
show_legend: bool
show_filters: bool
show_link_names: bool
- link_types: List[str]
+ link_types: list[str]
config: str
config_names: str
scale: str
highlight: str
- align: Optional[str]
+ align: str | None
debug: bool
- caption: Optional[str]
+ caption: str | None
class DiagramBase(SphinxDirective):
@@ -52,7 +54,7 @@ class DiagramBase(SphinxDirective):
"debug": directives.flag,
}
- def create_target(self, target_name: str) -> Tuple[int, str, nodes.target]:
+ def create_target(self, target_name: str) -> tuple[int, str, nodes.target]:
id = self.env.new_serialno(target_name)
targetid = f"{target_name}-{self.env.docname}-{id}"
targetnode = nodes.target("", "", ids=[targetid])
@@ -184,8 +186,8 @@ def calculate_link(app: Sphinx, need_info: NeedsPartsInfoType, _fromdocname: str
return link
-def create_legend(need_types: List[Dict[str, Any]]) -> str:
- def create_row(need_type: Dict[str, Any]) -> str:
+def create_legend(need_types: list[dict[str, Any]]) -> str:
+ def create_row(need_type: dict[str, Any]) -> str:
return "\n| {color} | {name} |".format(color=need_type["color"], name=need_type["title"])
rows = map(create_row, need_types)
diff --git a/sphinx_needs/directives/list2need.py b/sphinx_needs/directives/list2need.py
index d0a1d7bfc..bbdd27643 100644
--- a/sphinx_needs/directives/list2need.py
+++ b/sphinx_needs/directives/list2need.py
@@ -1,7 +1,9 @@
+from __future__ import annotations
+
import hashlib
import re
from contextlib import suppress
-from typing import Any, List, Sequence
+from typing import Any, Sequence
from docutils import nodes
from docutils.parsers.rst import directives
@@ -201,7 +203,7 @@ def make_hashed_id(self, type_prefix: str, title: str, id_length: int) -> str:
type_prefix, hashlib.sha1(hashable_content.encode("UTF-8")).hexdigest().upper()[:id_length]
)
- def get_down_needs(self, list_needs: List[Any], index: int) -> List[str]:
+ def get_down_needs(self, list_needs: list[Any], index: int) -> list[str]:
"""
Return all needs which are directly under the one given by the index
"""
diff --git a/sphinx_needs/directives/need.py b/sphinx_needs/directives/need.py
index 5965e71a4..f4c527791 100644
--- a/sphinx_needs/directives/need.py
+++ b/sphinx_needs/directives/need.py
@@ -1,6 +1,8 @@
+from __future__ import annotations
+
import hashlib
import re
-from typing import Any, Dict, List, Optional, Sequence, Tuple
+from typing import Any, Sequence
from docutils import nodes
from docutils.parsers.rst.states import RSTState, RSTStateMachine
@@ -53,8 +55,8 @@ class NeedDirective(SphinxDirective):
def __init__(
self,
name: str,
- arguments: List[str],
- options: Dict[str, Any],
+ arguments: list[str],
+ options: dict[str, Any],
content: StringList,
lineno: int,
content_offset: int,
@@ -149,7 +151,7 @@ def run(self) -> Sequence[nodes.Node]:
add_doc(env, self.docname)
return need_nodes # type: ignore[no-any-return]
- def read_in_links(self, name: str) -> List[str]:
+ def read_in_links(self, name: str) -> list[str]:
# Get links
links_string = self.options.get(name)
links = []
@@ -229,12 +231,12 @@ def _get_full_title(self) -> str:
def get_sections_and_signature_and_needs(
- need_node: Optional[nodes.Node],
-) -> Tuple[List[str], Optional[nodes.Text], List[str]]:
+ need_node: nodes.Node | None,
+) -> tuple[list[str], nodes.Text | None, list[str]]:
"""Gets the hierarchy of the section nodes as a list starting at the
section of the current need and then its parent sections"""
sections = []
- parent_needs: List[str] = []
+ parent_needs: list[str] = []
signature = None
current_node = need_node
while current_node:
@@ -299,7 +301,7 @@ def analyse_need_locations(app: Sphinx, doctree: nodes.document) -> None:
needs = SphinxNeedsData(env).get_or_create_needs()
- hidden_needs: List[Need] = []
+ hidden_needs: list[Need] = []
for need_node in doctree.findall(Need):
need_id = need_node["refid"]
need_info = needs[need_id]
@@ -346,7 +348,7 @@ def analyse_need_locations(app: Sphinx, doctree: nodes.document) -> None:
need_node.parent.remove(need_node)
-def previous_sibling(node: nodes.Node) -> Optional[nodes.Node]:
+def previous_sibling(node: nodes.Node) -> nodes.Node | None:
"""Return preceding sibling node or ``None``."""
try:
i = node.parent.index(node)
@@ -408,7 +410,7 @@ def process_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str) -
@profile("NEED_FORMAT")
-def format_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str, found_needs_nodes: List[Need]) -> None:
+def format_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str, found_needs_nodes: list[Need]) -> None:
"""Replace need nodes in the document with node trees suitable for output"""
env = app.env
needs = SphinxNeedsData(env).get_or_create_needs()
@@ -428,7 +430,7 @@ def format_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str, fo
build_need(layout, node_need, app, fromdocname=fromdocname)
-def check_links(needs: Dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> None:
+def check_links(needs: dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> None:
"""Checks if set links are valid or are dead (referenced need does not exist.)
For needs with dead links, an extra ``has_dead_links`` field is added and,
@@ -471,7 +473,7 @@ def check_links(needs: Dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> N
)
-def create_back_links(needs: Dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> None:
+def create_back_links(needs: dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> None:
"""Create back-links in all found needs.
These are fields for each link type, ``_back``,
@@ -482,7 +484,7 @@ def create_back_links(needs: Dict[str, NeedsInfoType], config: NeedsSphinxConfig
option_back = f"{option}_back"
for key, need in needs.items():
- need_link_value: List[str] = [need[option]] if isinstance(need[option], str) else need[option] # type: ignore[literal-required]
+ need_link_value: list[str] = [need[option]] if isinstance(need[option], str) else need[option] # type: ignore[literal-required]
for need_id_full in need_link_value:
need_id_main, need_id_part = split_need_id(need_id_full)
@@ -497,7 +499,7 @@ def create_back_links(needs: Dict[str, NeedsInfoType], config: NeedsSphinxConfig
needs[need_id_main]["parts"][need_id_part][option_back].append(key) # type: ignore[literal-required]
-def _fix_list_dyn_func(list: List[str]) -> List[str]:
+def _fix_list_dyn_func(list: list[str]) -> list[str]:
"""
This searches a list for dynamic function fragments, which may have been cut by generic searches for ",|;".
diff --git a/sphinx_needs/directives/needbar.py b/sphinx_needs/directives/needbar.py
index 1e048a3ce..623bef414 100644
--- a/sphinx_needs/directives/needbar.py
+++ b/sphinx_needs/directives/needbar.py
@@ -1,6 +1,8 @@
+from __future__ import annotations
+
import hashlib
import math
-from typing import List, Sequence
+from typing import Sequence
from docutils import nodes
from docutils.parsers.rst import directives
@@ -165,7 +167,7 @@ def run(self) -> Sequence[nodes.Node]:
# 8. create figure
# 9. final storage
# 10. cleanup matplotlib
-def process_needbar(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]) -> None:
+def process_needbar(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
env = app.env
needs_data = SphinxNeedsData(env)
needs_config = NeedsSphinxConfig(env.config)
diff --git a/sphinx_needs/directives/needextend.py b/sphinx_needs/directives/needextend.py
index 7f6d1e48d..886aaa123 100644
--- a/sphinx_needs/directives/needextend.py
+++ b/sphinx_needs/directives/needextend.py
@@ -1,10 +1,7 @@
-"""
-
-
-"""
+from __future__ import annotations
import re
-from typing import Any, Callable, Dict, Sequence
+from typing import Any, Callable, Sequence
from docutils import nodes
from docutils.parsers.rst import directives
@@ -34,7 +31,7 @@ class NeedextendDirective(SphinxDirective):
optional_arguments = 0
final_argument_whitespace = True
- option_spec: Dict[str, Callable[[str], Any]] = {
+ option_spec: dict[str, Callable[[str], Any]] = {
"strict": directives.unchanged_required,
}
@@ -72,7 +69,7 @@ def run(self) -> Sequence[nodes.Node]:
def extend_needs_data(
- all_needs: Dict[str, NeedsInfoType], extends: Dict[str, NeedsExtendType], needs_config: NeedsSphinxConfig
+ all_needs: dict[str, NeedsInfoType], extends: dict[str, NeedsExtendType], needs_config: NeedsSphinxConfig
) -> None:
"""Use data gathered from needextend directives to modify fields of existing needs."""
diff --git a/sphinx_needs/directives/needextract.py b/sphinx_needs/directives/needextract.py
index 9b61ac91e..18f5cb060 100644
--- a/sphinx_needs/directives/needextract.py
+++ b/sphinx_needs/directives/needextract.py
@@ -1,10 +1,7 @@
-"""
-
-
-"""
+from __future__ import annotations
import re
-from typing import List, Sequence
+from typing import Sequence
from docutils import nodes
from docutils.parsers.rst import directives
@@ -70,7 +67,7 @@ def run(self) -> Sequence[nodes.Node]:
def process_needextract(
- app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]
+ app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]
) -> None:
"""
Replace all needextract nodes with a list of the collected needs.
@@ -86,7 +83,7 @@ def process_needextract(
id = node.attributes["ids"][0]
current_needextract = SphinxNeedsData(env).get_or_create_extracts()[id]
all_needs = SphinxNeedsData(env).get_or_create_needs()
- content: List[nodes.Element] = []
+ content: list[nodes.Element] = []
# check if filter argument and option filter both exist
need_filter_arg = current_needextract["filter_arg"]
diff --git a/sphinx_needs/directives/needfilter.py b/sphinx_needs/directives/needfilter.py
index 8800a189d..ef0cb3d71 100644
--- a/sphinx_needs/directives/needfilter.py
+++ b/sphinx_needs/directives/needfilter.py
@@ -1,5 +1,7 @@
+from __future__ import annotations
+
import os
-from typing import List, Sequence, Union
+from typing import Sequence
from urllib.parse import urlparse
from docutils import nodes
@@ -70,7 +72,7 @@ def run(self) -> Sequence[nodes.Node]:
def process_needfilters(
- app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]
+ app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]
) -> None:
# Replace all needlist nodes with a list of the collected needs.
# Augment each need with a backlink to the original location.
@@ -89,7 +91,7 @@ def process_needfilters(
id = node.attributes["ids"][0]
current_needfilter = SphinxNeedsData(env)._get_or_create_filters()[id]
- content: Union[nodes.Element, List[nodes.Element]]
+ content: nodes.Element | list[nodes.Element]
if current_needfilter["layout"] == "list":
content = []
diff --git a/sphinx_needs/directives/needflow.py b/sphinx_needs/directives/needflow.py
index 749f84b8c..31572afb4 100644
--- a/sphinx_needs/directives/needflow.py
+++ b/sphinx_needs/directives/needflow.py
@@ -1,6 +1,8 @@
+from __future__ import annotations
+
import html
import os
-from typing import Dict, Iterable, List, Sequence
+from typing import Iterable, Sequence
from docutils import nodes
from docutils.parsers.rst import directives
@@ -32,7 +34,7 @@
logger = get_logger(__name__)
-NEEDFLOW_TEMPLATES: Dict[str, Template] = {}
+NEEDFLOW_TEMPLATES: dict[str, Template] = {}
class Needflow(nodes.General, nodes.Element):
@@ -163,7 +165,7 @@ def walk_curr_need_tree(
fromdocname: str,
current_needflow: NeedsFlowType,
all_needs: Iterable[NeedsInfoType],
- found_needs: List[NeedsPartsInfoType],
+ found_needs: list[NeedsPartsInfoType],
need: NeedsPartsInfoType,
) -> str:
"""
@@ -230,7 +232,7 @@ def walk_curr_need_tree(
return curr_need_tree
-def get_root_needs(found_needs: List[NeedsPartsInfoType]) -> List[NeedsPartsInfoType]:
+def get_root_needs(found_needs: list[NeedsPartsInfoType]) -> list[NeedsPartsInfoType]:
return_list = []
for current_need in found_needs:
if current_need["is_need"]:
@@ -253,7 +255,7 @@ def cal_needs_node(
fromdocname: str,
current_needflow: NeedsFlowType,
all_needs: Iterable[NeedsInfoType],
- found_needs: List[NeedsPartsInfoType],
+ found_needs: list[NeedsPartsInfoType],
) -> str:
"""Calculate and get needs node representaion for plantuml including all child needs and need parts."""
top_needs = get_root_needs(found_needs)
@@ -276,7 +278,7 @@ def cal_needs_node(
@measure_time("needflow")
-def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]) -> None:
+def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
# Replace all needflow nodes with a list of the collected needs.
# Augment each need with a backlink to the original location.
env = app.env
@@ -321,7 +323,7 @@ def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
node.replace_self(error_node)
continue
- content: List[nodes.Element] = []
+ content: list[nodes.Element] = []
found_needs = process_filters(app, all_needs.values(), current_needflow)
diff --git a/sphinx_needs/directives/needgantt.py b/sphinx_needs/directives/needgantt.py
index 1ad22c3ce..d0f4b9c11 100644
--- a/sphinx_needs/directives/needgantt.py
+++ b/sphinx_needs/directives/needgantt.py
@@ -1,7 +1,9 @@
+from __future__ import annotations
+
import os
import re
from datetime import datetime
-from typing import List, Sequence
+from typing import Sequence
from docutils import nodes
from docutils.parsers.rst import directives
@@ -118,7 +120,7 @@ def run(self) -> Sequence[nodes.Node]:
return [targetnode] + [Needgantt("")]
- def get_link_type_option(self, name: str, default: str = "") -> List[str]:
+ def get_link_type_option(self, name: str, default: str = "") -> list[str]:
link_types = [x.strip() for x in re.split(";|,", self.options.get(name, default))]
conf_link_types = NeedsSphinxConfig(self.env.config).extra_links
conf_link_types_name = [x["option"] for x in conf_link_types]
@@ -136,7 +138,7 @@ def get_link_type_option(self, name: str, default: str = "") -> List[str]:
return final_link_types
-def process_needgantt(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]) -> None:
+def process_needgantt(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
# Replace all needgantt nodes with a list of the collected needs.
env = app.env
needs_config = NeedsSphinxConfig(app.config)
diff --git a/sphinx_needs/directives/needimport.py b/sphinx_needs/directives/needimport.py
index 299d718d4..b66148079 100644
--- a/sphinx_needs/directives/needimport.py
+++ b/sphinx_needs/directives/needimport.py
@@ -1,7 +1,9 @@
+from __future__ import annotations
+
import json
import os
import re
-from typing import Dict, Sequence
+from typing import Sequence
from urllib.parse import urlparse
import requests
@@ -125,7 +127,7 @@ def run(self) -> Sequence[nodes.Node]:
needs_config = NeedsSphinxConfig(self.config)
# TODO this is not exactly NeedsInfoType, because the export removes/adds some keys
- needs_list: Dict[str, NeedsInfoType] = needs_import_list["versions"][version]["needs"]
+ needs_list: dict[str, NeedsInfoType] = needs_import_list["versions"][version]["needs"]
# Filter imported needs
needs_list_filtered = {}
diff --git a/sphinx_needs/directives/needlist.py b/sphinx_needs/directives/needlist.py
index 6a2b14ff9..91a2950f2 100644
--- a/sphinx_needs/directives/needlist.py
+++ b/sphinx_needs/directives/needlist.py
@@ -1,9 +1,6 @@
-"""
+from __future__ import annotations
-
-"""
-
-from typing import List, Sequence
+from typing import Sequence
from docutils import nodes
from docutils.parsers.rst import directives
@@ -63,7 +60,7 @@ def run(self) -> Sequence[nodes.Node]:
return [targetnode, Needlist("")]
-def process_needlist(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]) -> None:
+def process_needlist(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
"""
Replace all needlist nodes with a list of the collected needs.
Augment each need with a backlink to the original location.
@@ -80,7 +77,7 @@ def process_needlist(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
id = node.attributes["ids"][0]
current_needfilter = SphinxNeedsData(env).get_or_create_lists()[id]
- content: List[nodes.Node] = []
+ content: list[nodes.Node] = []
all_needs = list(SphinxNeedsData(env).get_or_create_needs().values())
found_needs = process_filters(app, all_needs, current_needfilter)
diff --git a/sphinx_needs/directives/needpie.py b/sphinx_needs/directives/needpie.py
index 72a0affea..f055dffe3 100644
--- a/sphinx_needs/directives/needpie.py
+++ b/sphinx_needs/directives/needpie.py
@@ -1,5 +1,7 @@
+from __future__ import annotations
+
import hashlib
-from typing import Iterable, List, Sequence
+from typing import Iterable, Sequence
from docutils import nodes
from docutils.parsers.rst import directives
@@ -104,7 +106,7 @@ def run(self) -> Sequence[nodes.Node]:
@measure_time("needpie")
-def process_needpie(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]) -> None:
+def process_needpie(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
env = app.env
needs_data = SphinxNeedsData(env)
needs_config = NeedsSphinxConfig(env.config)
diff --git a/sphinx_needs/directives/needreport.py b/sphinx_needs/directives/needreport.py
index b72f7523f..97f4b5c08 100644
--- a/sphinx_needs/directives/needreport.py
+++ b/sphinx_needs/directives/needreport.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
from pathlib import Path
from typing import Sequence
diff --git a/sphinx_needs/directives/needsequence.py b/sphinx_needs/directives/needsequence.py
index 4a9737175..42b1e1e25 100644
--- a/sphinx_needs/directives/needsequence.py
+++ b/sphinx_needs/directives/needsequence.py
@@ -1,6 +1,8 @@
+from __future__ import annotations
+
import os
import re
-from typing import Any, Dict, List, Optional, Sequence, Tuple
+from typing import Any, Sequence
from docutils import nodes
from docutils.parsers.rst import directives
@@ -74,7 +76,7 @@ def run(self) -> Sequence[nodes.Node]:
def process_needsequence(
- app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]
+ app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]
) -> None:
# Replace all needsequence nodes with a list of the collected needs.
env = app.env
@@ -224,18 +226,18 @@ def process_needsequence(
def get_message_needs(
app: Sphinx,
sender: NeedsInfoType,
- link_types: List[str],
- all_needs_dict: Dict[str, NeedsInfoType],
- tracked_receivers: Optional[List[str]] = None,
- filter: Optional[str] = None,
-) -> Tuple[Dict[str, Dict[str, Any]], str, str]:
- msg_needs: List[Dict[str, Any]] = []
+ link_types: list[str],
+ all_needs_dict: dict[str, NeedsInfoType],
+ tracked_receivers: list[str] | None = None,
+ filter: str | None = None,
+) -> tuple[dict[str, dict[str, Any]], str, str]:
+ msg_needs: list[dict[str, Any]] = []
if tracked_receivers is None:
tracked_receivers = []
for link_type in link_types:
msg_needs += [all_needs_dict[x] for x in sender[link_type]] # type: ignore
- messages: Dict[str, Dict[str, Any]] = {}
+ messages: dict[str, dict[str, Any]] = {}
p_string = ""
c_string = ""
for msg_need in msg_needs:
diff --git a/sphinx_needs/directives/needservice.py b/sphinx_needs/directives/needservice.py
index 2adb2cf64..bf234f7fe 100644
--- a/sphinx_needs/directives/needservice.py
+++ b/sphinx_needs/directives/needservice.py
@@ -1,4 +1,6 @@
-from typing import Any, Dict, List, Sequence
+from __future__ import annotations
+
+from typing import Any, Sequence
from docutils import nodes
from docutils.parsers.rst import directives
@@ -37,8 +39,8 @@ class NeedserviceDirective(SphinxDirective):
def __init__(
self,
name: str,
- arguments: List[str],
- options: Dict[str, Any],
+ arguments: list[str],
+ options: dict[str, Any],
content: StringList,
lineno: int,
content_offset: int,
@@ -55,7 +57,7 @@ def run(self) -> Sequence[nodes.Node]:
needs_config = NeedsSphinxConfig(self.config)
need_types = needs_config.types
all_data = needs_config.service_all_data
- needs_services: Dict[str, BaseService] = getattr(app, "needs_services", {})
+ needs_services: dict[str, BaseService] = getattr(app, "needs_services", {})
service_name = self.arguments[0]
service = needs_services.get(service_name)
diff --git a/sphinx_needs/directives/needtable.py b/sphinx_needs/directives/needtable.py
index b00b7cf48..c05dfb61b 100644
--- a/sphinx_needs/directives/needtable.py
+++ b/sphinx_needs/directives/needtable.py
@@ -1,5 +1,7 @@
+from __future__ import annotations
+
import re
-from typing import Any, Callable, List, Sequence
+from typing import Any, Callable, Sequence
from docutils import nodes
from docutils.parsers.rst import directives
@@ -113,7 +115,7 @@ def run(self) -> Sequence[nodes.Node]:
@measure_time("needtable")
@profile("NEEDTABLE")
def process_needtables(
- app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]
+ app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]
) -> None:
"""
Replace all needtables nodes with a table of filtered nodes.
diff --git a/sphinx_needs/directives/needuml.py b/sphinx_needs/directives/needuml.py
index bd679cdbb..0b2efb482 100644
--- a/sphinx_needs/directives/needuml.py
+++ b/sphinx_needs/directives/needuml.py
@@ -1,6 +1,8 @@
+from __future__ import annotations
+
import html
import os
-from typing import List, Sequence
+from typing import Sequence
from docutils import nodes
from docutils.parsers.rst import directives
@@ -405,7 +407,7 @@ def is_element_of_need(node: nodes.Element) -> str:
@measure_time("needuml")
-def process_needuml(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]) -> None:
+def process_needuml(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
env = app.env
# for node in doctree.findall(Needuml):
diff --git a/sphinx_needs/directives/utils.py b/sphinx_needs/directives/utils.py
index 3efc786fb..378342909 100644
--- a/sphinx_needs/directives/utils.py
+++ b/sphinx_needs/directives/utils.py
@@ -1,5 +1,7 @@
+from __future__ import annotations
+
import re
-from typing import Any, Dict, List, Optional, Tuple
+from typing import Any
from docutils import nodes
from sphinx.environment import BuildEnvironment
@@ -9,7 +11,7 @@
from sphinx_needs.defaults import TITLE_REGEX
-def no_needs_found_paragraph(message: Optional[str]) -> nodes.paragraph:
+def no_needs_found_paragraph(message: str | None) -> nodes.paragraph:
nothing_found = "No needs passed the filters" if message is None else message
para = nodes.paragraph()
para["classes"].append("needs_filter_warning")
@@ -40,7 +42,7 @@ def used_filter_paragraph(current_needfilter: NeedsFilteredBaseType) -> nodes.pa
return para
-def get_title(option_string: str) -> Tuple[str, str]:
+def get_title(option_string: str) -> tuple[str, str]:
"""
Returns a tuple of uppercase option and calculated title of given option string.
@@ -59,7 +61,7 @@ def get_title(option_string: str) -> Tuple[str, str]:
return option_name.upper(), title
-def get_option_list(options: Dict[str, Any], name: str) -> List[str]:
+def get_option_list(options: dict[str, Any], name: str) -> list[str]:
"""
Gets and creates a list of a given directive option value in a safe way
:param options: List of options
@@ -74,7 +76,7 @@ def get_option_list(options: Dict[str, Any], name: str) -> List[str]:
return values_list
-def analyse_needs_metrics(env: BuildEnvironment) -> Dict[str, Any]:
+def analyse_needs_metrics(env: BuildEnvironment) -> dict[str, Any]:
"""
Function to generate metrics about need objects.
@@ -82,7 +84,7 @@ def analyse_needs_metrics(env: BuildEnvironment) -> Dict[str, Any]:
:return: Dictionary consisting of needs metrics.
"""
needs = SphinxNeedsData(env).get_or_create_needs()
- metric_data: Dict[str, Any] = {"needs_amount": len(needs)}
+ metric_data: dict[str, Any] = {"needs_amount": len(needs)}
needs_types = {i["directive"]: 0 for i in NeedsSphinxConfig(env.config).types}
for i in needs.values():
diff --git a/sphinx_needs/environment.py b/sphinx_needs/environment.py
index 513f5e981..61dbf95dc 100644
--- a/sphinx_needs/environment.py
+++ b/sphinx_needs/environment.py
@@ -1,5 +1,7 @@
+from __future__ import annotations
+
from pathlib import Path, PurePosixPath
-from typing import Iterable, List
+from typing import Iterable
from jinja2 import Environment, PackageLoader, select_autoescape
from sphinx.application import Sphinx
@@ -133,7 +135,7 @@ def install_static_files(
app: Sphinx,
source_dir: Path,
destination_dir: Path,
- files_to_copy: List[Path],
+ files_to_copy: list[Path],
message: str,
) -> None:
builder = app.builder
diff --git a/sphinx_needs/errors.py b/sphinx_needs/errors.py
index a75ba9fc8..958790cc1 100644
--- a/sphinx_needs/errors.py
+++ b/sphinx_needs/errors.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
try:
# Sphinx 3.0
from sphinx.errors import NoUri
diff --git a/sphinx_needs/external_needs.py b/sphinx_needs/external_needs.py
index 4288c37c5..fc8003648 100644
--- a/sphinx_needs/external_needs.py
+++ b/sphinx_needs/external_needs.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import json
import os
from functools import lru_cache
diff --git a/sphinx_needs/functions/common.py b/sphinx_needs/functions/common.py
index 69d6f4e37..f5d96bf06 100644
--- a/sphinx_needs/functions/common.py
+++ b/sphinx_needs/functions/common.py
@@ -4,9 +4,11 @@
.. note:: The function parameters ``app``, ``need``, ``needs`` are set automatically and can not be overridden by user.
"""
+from __future__ import annotations
+
import contextlib
import re
-from typing import Any, Dict, List, Optional
+from typing import Any
from sphinx.application import Sphinx
@@ -17,7 +19,7 @@
from sphinx_needs.utils import logger
-def test(app: Sphinx, need: NeedsInfoType, needs: Dict[str, NeedsInfoType], *args: Any, **kwargs: Any) -> str:
+def test(app: Sphinx, need: NeedsInfoType, needs: dict[str, NeedsInfoType], *args: Any, **kwargs: Any) -> str:
"""
Test function for dynamic functions in sphinx needs.
@@ -39,7 +41,7 @@ def test(app: Sphinx, need: NeedsInfoType, needs: Dict[str, NeedsInfoType], *arg
def echo(
- app: Sphinx, need: NeedsInfoType, needs: Dict[str, NeedsInfoType], text: str, *args: Any, **kwargs: Any
+ app: Sphinx, need: NeedsInfoType, needs: dict[str, NeedsInfoType], text: str, *args: Any, **kwargs: Any
) -> str:
"""
.. versionadded:: 0.6.3
@@ -60,12 +62,12 @@ def echo(
def copy(
app: Sphinx,
need: NeedsInfoType,
- needs: Dict[str, NeedsInfoType],
+ needs: dict[str, NeedsInfoType],
option: str,
- need_id: Optional[str] = None,
+ need_id: str | None = None,
lower: bool = False,
upper: bool = False,
- filter: Optional[str] = None,
+ filter: str | None = None,
) -> Any:
"""
Copies the value of one need option to another
@@ -171,11 +173,11 @@ def copy(
def check_linked_values(
app: Sphinx,
need: NeedsInfoType,
- needs: Dict[str, NeedsInfoType],
+ needs: dict[str, NeedsInfoType],
result: Any,
search_option: str,
search_value: Any,
- filter_string: Optional[str] = None,
+ filter_string: str | None = None,
one_hit: bool = False,
) -> Any:
"""
@@ -335,9 +337,9 @@ def check_linked_values(
def calc_sum(
app: Sphinx,
need: NeedsInfoType,
- needs: Dict[str, NeedsInfoType],
+ needs: dict[str, NeedsInfoType],
option: str,
- filter: Optional[str] = None,
+ filter: str | None = None,
links_only: bool = False,
) -> float:
"""
@@ -443,10 +445,10 @@ def calc_sum(
def links_from_content(
app: Sphinx,
need: NeedsInfoType,
- needs: Dict[str, NeedsInfoType],
- need_id: Optional[str] = None,
- filter: Optional[str] = None,
-) -> List[str]:
+ needs: dict[str, NeedsInfoType],
+ need_id: str | None = None,
+ filter: str | None = None,
+) -> list[str]:
"""
Extracts links from content of a need.
diff --git a/sphinx_needs/functions/functions.py b/sphinx_needs/functions/functions.py
index a8c640ce2..694435d17 100644
--- a/sphinx_needs/functions/functions.py
+++ b/sphinx_needs/functions/functions.py
@@ -6,9 +6,11 @@
in need configurations.
"""
+from __future__ import annotations
+
import ast
import re
-from typing import Any, Callable, Dict, List, Optional, Tuple, Union
+from typing import Any, Callable, Dict, List, Union
from docutils import nodes
from sphinx.application import Sphinx
@@ -31,7 +33,7 @@
]
-def register_func(need_function: DynamicFunction, name: Optional[str] = None) -> None:
+def register_func(need_function: DynamicFunction, name: str | None = None) -> None:
"""
Registers a new sphinx-needs function for the given sphinx environment.
:param env: Sphinx environment
@@ -153,7 +155,7 @@ def find_and_replace_node_content(node: nodes.Node, env: BuildEnvironment, need:
return node
-def resolve_dynamic_values(needs: Dict[str, NeedsInfoType], app: Sphinx) -> None:
+def resolve_dynamic_values(needs: dict[str, NeedsInfoType], app: Sphinx) -> None:
"""
Resolve dynamic values inside need data.
@@ -178,7 +180,7 @@ def resolve_dynamic_values(needs: Dict[str, NeedsInfoType], app: Sphinx) -> None
# dynamic values in this data are not allowed.
continue
if not isinstance(need[need_option], (list, set)):
- func_call: Optional[str] = "init"
+ func_call: str | None = "init"
while func_call:
try:
func_call, func_return = _detect_and_execute(need[need_option], need, app)
@@ -231,7 +233,7 @@ def resolve_dynamic_values(needs: Dict[str, NeedsInfoType], app: Sphinx) -> None
def resolve_variants_options(
- needs: Dict[str, NeedsInfoType], needs_config: NeedsSphinxConfig, tags: Dict[str, bool]
+ needs: dict[str, NeedsInfoType], needs_config: NeedsSphinxConfig, tags: dict[str, bool]
) -> None:
"""
Resolve variants options inside need data.
@@ -252,7 +254,7 @@ def resolve_variants_options(
for need in needs.values():
# Data to use as filter context.
- need_context: Dict[str, Any] = {**need}
+ need_context: dict[str, Any] = {**need}
need_context.update(**needs_config.filter_data) # Add needs_filter_data to filter context
need_context.update(**tags) # Add sphinx tags to filter context
@@ -295,7 +297,7 @@ def check_and_get_content(content: str, need: NeedsInfoType, env: BuildEnvironme
return content
-def _detect_and_execute(content: Any, need: NeedsInfoType, app: Sphinx) -> Tuple[Optional[str], Any]:
+def _detect_and_execute(content: Any, need: NeedsInfoType, app: Sphinx) -> tuple[str | None, Any]:
"""Detects if given content is a function call and executes it."""
try:
content = str(content)
@@ -312,7 +314,7 @@ def _detect_and_execute(content: Any, need: NeedsInfoType, app: Sphinx) -> Tuple
return func_call, func_return
-def _analyze_func_string(func_string: str, need: Optional[NeedsInfoType]) -> Tuple[str, List[Any], Dict[str, Any]]:
+def _analyze_func_string(func_string: str, need: NeedsInfoType | None) -> tuple[str, list[Any], dict[str, Any]]:
"""
Analyze given function string and extract:
@@ -336,14 +338,14 @@ def _analyze_func_string(func_string: str, need: Optional[NeedsInfoType]) -> Tup
except AttributeError:
raise SphinxError(f"Given dynamic function string is not a valid python call. Got: {func_string}")
- func_args: List[Any] = []
+ func_args: list[Any] = []
for arg in func_call.args:
if isinstance(arg, ast.Num):
func_args.append(arg.n)
elif isinstance(arg, (ast.Str, ast.BoolOp)):
func_args.append(arg.s) # type: ignore
elif isinstance(arg, ast.List):
- arg_list: List[Any] = []
+ arg_list: list[Any] = []
for element in arg.elts:
if isinstance(element, ast.Num):
arg_list.append(element.n)
@@ -367,7 +369,7 @@ def _analyze_func_string(func_string: str, need: Optional[NeedsInfoType]) -> Tup
"Unsupported type found in function definition: {}. "
"Supported are numbers, strings, bool and list".format(func_string)
)
- func_kargs: Dict[str, Any] = {}
+ func_kargs: dict[str, Any] = {}
for keyword in func_call.keywords:
kvalue = keyword.value
kkey = keyword.arg
diff --git a/sphinx_needs/layout.py b/sphinx_needs/layout.py
index 068f35d5e..d9c573945 100644
--- a/sphinx_needs/layout.py
+++ b/sphinx_needs/layout.py
@@ -4,13 +4,15 @@
Based on https://github.com/useblocks/sphinxcontrib-needs/issues/102
"""
+from __future__ import annotations
+
import os
import re
import uuid
from contextlib import suppress
from functools import lru_cache
from optparse import Values
-from typing import Callable, Dict, List, Optional, Tuple, Union
+from typing import Callable
from urllib.parse import urlparse
import requests
@@ -31,7 +33,7 @@
@measure_time("need")
def create_need(
- need_id: str, app: Sphinx, layout: Optional[str] = None, style: Optional[str] = None, docname: Optional[str] = None
+ need_id: str, app: Sphinx, layout: str | None = None, style: str | None = None, docname: str | None = None
) -> nodes.container:
"""
Creates a new need-node for a given layout.
@@ -127,7 +129,7 @@ def replace_pending_xref_refdoc(node: nodes.Element, new_refdoc: str) -> None:
@measure_time("need")
def build_need(
- layout: str, node: nodes.Element, app: Sphinx, style: Optional[str] = None, fromdocname: Optional[str] = None
+ layout: str, node: nodes.Element, app: Sphinx, style: str | None = None, fromdocname: str | None = None
) -> None:
"""
Builds a need based on a given layout for a given need-node.
@@ -175,7 +177,7 @@ def build_need(
@lru_cache(1)
-def _generate_inline_parser() -> Tuple[Values, Inliner]:
+def _generate_inline_parser() -> tuple[Values, Inliner]:
doc_settings = OptionParser(components=(Parser,)).get_default_values()
inline_parser = Inliner()
inline_parser.init_customizations(doc_settings) # type: ignore
@@ -193,8 +195,8 @@ def __init__(
need: NeedsInfoType,
layout: str,
node: nodes.Element,
- style: Optional[str] = None,
- fromdocname: Optional[str] = None,
+ style: str | None = None,
+ fromdocname: str | None = None,
) -> None:
self.app = app
self.need = need
@@ -302,7 +304,7 @@ def __init__(
inliner=None,
)
- self.functions: Dict[str, Callable[..., Union[None, nodes.Node, List[nodes.Node]]]] = {
+ self.functions: dict[str, Callable[..., None | nodes.Node | list[nodes.Node]]] = {
"meta": self.meta, # type: ignore[dict-item]
"meta_all": self.meta_all,
"meta_links": self.meta_links,
@@ -343,7 +345,7 @@ def get_need_table(self) -> nodes.table:
return self.node_table
- def get_section(self, section: str) -> Union[nodes.line_block, List[nodes.Element]]:
+ def get_section(self, section: str) -> nodes.line_block | list[nodes.Element]:
try:
lines = self.layout["layout"][section]
except KeyError:
@@ -367,7 +369,7 @@ def get_section(self, section: str) -> Union[nodes.line_block, List[nodes.Elemen
return lines_container
- def _parse(self, line: str) -> List[nodes.Node]:
+ def _parse(self, line: str) -> list[nodes.Node]:
"""
Parses a single line/string for inline rst statements, like strong, emphasis, literal, ...
@@ -379,7 +381,7 @@ def _parse(self, line: str) -> List[nodes.Node]:
raise SphinxNeedLayoutException(message)
return result # type: ignore[no-any-return]
- def _func_replace(self, section_nodes: List[nodes.Node]) -> List[nodes.Node]:
+ def _func_replace(self, section_nodes: list[nodes.Node]) -> list[nodes.Node]:
"""
Replaces a function definition like ``< >`` with the related docutils nodes.
@@ -390,7 +392,7 @@ def _func_replace(self, section_nodes: List[nodes.Node]) -> List[nodes.Node]:
:return: docutils nodes
"""
return_nodes = []
- result: Union[None, nodes.Node, List[nodes.Node]]
+ result: None | nodes.Node | list[nodes.Node]
for node in section_nodes:
if not isinstance(node, nodes.Text):
for child in node.children:
@@ -483,8 +485,8 @@ def _replace_place_holder(self, data: str) -> str:
return data
def meta(
- self, name: str, prefix: Optional[str] = None, show_empty: bool = False
- ) -> Union[nodes.inline, List[nodes.Element]]:
+ self, name: str, prefix: str | None = None, show_empty: bool = False
+ ) -> nodes.inline | list[nodes.Element]:
"""
Returns the specific metadata of a need inside docutils nodes.
Usage::
@@ -520,11 +522,11 @@ def meta(
if len(data) == 0 and not show_empty:
return []
- needs_string_links_option: List[str] = []
+ needs_string_links_option: list[str] = []
for v in self.needs_config.string_links.values():
needs_string_links_option.extend(v["options"])
- data_list: List[str] = (
+ data_list: list[str] = (
[i.strip() for i in re.split(r",|;", data) if len(i) != 0]
if name in needs_string_links_option
else [data]
@@ -605,7 +607,7 @@ def meta_all(
self,
prefix: str = "",
postfix: str = "",
- exclude: Optional[List[str]] = None,
+ exclude: list[str] | None = None,
no_links: bool = False,
defaults: bool = True,
show_empty: bool = False,
@@ -701,9 +703,7 @@ def meta_links(self, name: str, incoming: bool = False) -> nodes.inline:
data_container.append(node_links)
return data_container
- def meta_links_all(
- self, prefix: str = "", postfix: str = "", exclude: Optional[List[str]] = None
- ) -> List[nodes.line]:
+ def meta_links_all(self, prefix: str = "", postfix: str = "", exclude: list[str] | None = None) -> list[nodes.line]:
"""
Documents all used link types for the current need automatically.
@@ -736,14 +736,14 @@ def meta_links_all(
def image(
self,
url: str,
- height: Optional[str] = None,
- width: Optional[str] = None,
- align: Optional[str] = None,
+ height: str | None = None,
+ width: str | None = None,
+ align: str | None = None,
no_link: bool = False,
prefix: str = "",
is_external: bool = False,
img_class: str = "",
- ) -> Union[nodes.inline, List[nodes.Element]]:
+ ) -> nodes.inline | list[nodes.Element]:
"""
See https://docutils.sourceforge.io/docs/ref/rst/directives.html#images
@@ -875,10 +875,10 @@ def image(
def link(
self,
url: str,
- text: Optional[str] = None,
- image_url: Optional[str] = None,
- image_height: Optional[str] = None,
- image_width: Optional[str] = None,
+ text: str | None = None,
+ image_url: str | None = None,
+ image_height: str | None = None,
+ image_width: str | None = None,
prefix: str = "",
is_dynamic: bool = False,
) -> nodes.inline:
@@ -927,7 +927,7 @@ def link(
def collapse_button(
self, target: str = "meta", collapsed: str = "Show", visible: str = "Close", initial: bool = False
- ) -> Optional[nodes.inline]:
+ ) -> nodes.inline | None:
"""
To show icons instead of text on the button, use collapse_button() like this::
@@ -989,10 +989,10 @@ def collapse_button(
def permalink(
self,
- image_url: Optional[str] = None,
- image_height: Optional[str] = None,
- image_width: Optional[str] = None,
- text: Optional[str] = None,
+ image_url: str | None = None,
+ image_height: str | None = None,
+ image_width: str | None = None,
+ text: str | None = None,
prefix: str = "",
) -> nodes.inline:
"""
@@ -1034,9 +1034,7 @@ def permalink(
prefix=prefix,
)
- def _grid_simple(
- self, colwidths: List[int], side_left: Union[bool, str], side_right: Union[bool, str], footer: bool
- ) -> None:
+ def _grid_simple(self, colwidths: list[int], side_left: bool | str, side_right: bool | str, footer: bool) -> None:
"""
Creates most "simple" grid layouts.
Side parts and footer can be activated via config.
@@ -1209,7 +1207,7 @@ def _grid_complex(self) -> None:
# Construct table
node_tgroup += self.node_tbody
- def _grid_content(self, colwidths: List[int], side_left: bool, side_right: bool, footer: bool) -> None:
+ def _grid_content(self, colwidths: list[int], side_left: bool, side_right: bool, footer: bool) -> None:
"""
Creates most "content" based grid layouts.
Side parts and footer can be activated via config.
diff --git a/sphinx_needs/logging.py b/sphinx_needs/logging.py
index 90c34f2f0..c328fce23 100644
--- a/sphinx_needs/logging.py
+++ b/sphinx_needs/logging.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
from sphinx.util import logging
from sphinx.util.logging import SphinxLoggerAdapter
diff --git a/sphinx_needs/need_constraints.py b/sphinx_needs/need_constraints.py
index 77db7d03b..e068a6bdf 100644
--- a/sphinx_needs/need_constraints.py
+++ b/sphinx_needs/need_constraints.py
@@ -1,4 +1,4 @@
-from typing import Dict
+from __future__ import annotations
import jinja2
@@ -11,7 +11,7 @@
logger = get_logger(__name__)
-def process_constraints(needs: Dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> None:
+def process_constraints(needs: dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> None:
"""Analyse constraints of all needs,
and set corresponding fields on the need data item:
``constraints_passed`` and ``constraints_results``.
@@ -21,7 +21,7 @@ def process_constraints(needs: Dict[str, NeedsInfoType], config: NeedsSphinxConf
"""
config_constraints = config.constraints
- error_templates_cache: Dict[str, jinja2.Template] = {}
+ error_templates_cache: dict[str, jinja2.Template] = {}
for need in needs.values():
need_id = need["id"]
diff --git a/sphinx_needs/needs.py b/sphinx_needs/needs.py
index 03bbce6c0..e252dfc7f 100644
--- a/sphinx_needs/needs.py
+++ b/sphinx_needs/needs.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
from timeit import default_timer as timer # Used for timing measurements
from typing import Any, Callable, Dict, List, Type
@@ -135,7 +137,7 @@
LOGGER = get_logger(__name__)
-def setup(app: Sphinx) -> Dict[str, Any]:
+def setup(app: Sphinx) -> dict[str, Any]:
LOGGER.debug("Starting setup of Sphinx-Needs")
LOGGER.debug("Load Sphinx-Data-Viewer for Sphinx-Needs")
app.setup_extension("sphinx_data_viewer")
@@ -282,7 +284,7 @@ def process_caller(app: Sphinx, doctree: nodes.document, fromdocname: str) -> No
and fromdocname != f"{app.config.root_doc}"
):
return
- current_nodes: Dict[Type[nodes.Element], List[nodes.Element]] = {}
+ current_nodes: dict[type[nodes.Element], list[nodes.Element]] = {}
check_nodes = list(node_list.keys())
for node_need in doctree.findall(node_match(check_nodes)):
for check_node in node_list:
diff --git a/sphinx_needs/needsfile.py b/sphinx_needs/needsfile.py
index 17825637f..ece2bbd66 100644
--- a/sphinx_needs/needsfile.py
+++ b/sphinx_needs/needsfile.py
@@ -4,11 +4,13 @@
Creates, checks and imports ``needs.json`` files.
"""
+from __future__ import annotations
+
import json
import os
import sys
from datetime import datetime
-from typing import Any, List
+from typing import Any
from jsonschema import Draft7Validator
from sphinx.config import Config
@@ -150,7 +152,7 @@ def load_json(self, file: str) -> None:
class Errors:
- def __init__(self, schema_errors: List[Any]):
+ def __init__(self, schema_errors: list[Any]):
self.schema = schema_errors
diff --git a/sphinx_needs/roles/need_count.py b/sphinx_needs/roles/need_count.py
index ba0a137f1..8ff1299a8 100644
--- a/sphinx_needs/roles/need_count.py
+++ b/sphinx_needs/roles/need_count.py
@@ -4,7 +4,7 @@
Based on https://github.com/useblocks/sphinxcontrib-needs/issues/37
"""
-from typing import List
+from __future__ import annotations
from docutils import nodes
from sphinx.application import Sphinx
@@ -23,7 +23,7 @@ class NeedCount(nodes.Inline, nodes.Element):
def process_need_count(
- app: Sphinx, doctree: nodes.document, _fromdocname: str, found_nodes: List[nodes.Element]
+ app: Sphinx, doctree: nodes.document, _fromdocname: str, found_nodes: list[nodes.Element]
) -> None:
needs_config = NeedsSphinxConfig(app.config)
for node_need_count in found_nodes:
diff --git a/sphinx_needs/roles/need_func.py b/sphinx_needs/roles/need_func.py
index 50852465a..23daee4ac 100644
--- a/sphinx_needs/roles/need_func.py
+++ b/sphinx_needs/roles/need_func.py
@@ -2,7 +2,7 @@
Provide the role ``need_func``, which executes a dynamic function.
"""
-from typing import List
+from __future__ import annotations
from docutils import nodes
from sphinx.application import Sphinx
@@ -18,7 +18,7 @@ class NeedFunc(nodes.Inline, nodes.Element):
def process_need_func(
- app: Sphinx, doctree: nodes.document, _fromdocname: str, found_nodes: List[nodes.Element]
+ app: Sphinx, doctree: nodes.document, _fromdocname: str, found_nodes: list[nodes.Element]
) -> None:
env = app.env
# for node_need_func in doctree.findall(NeedFunc):
diff --git a/sphinx_needs/roles/need_incoming.py b/sphinx_needs/roles/need_incoming.py
index dc882b74e..fcdf5251b 100644
--- a/sphinx_needs/roles/need_incoming.py
+++ b/sphinx_needs/roles/need_incoming.py
@@ -1,4 +1,4 @@
-from typing import List
+from __future__ import annotations
from docutils import nodes
from sphinx.application import Sphinx
@@ -15,7 +15,7 @@ class NeedIncoming(nodes.Inline, nodes.Element):
def process_need_incoming(
- app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]
+ app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]
) -> None:
builder = app.builder
env = app.env
diff --git a/sphinx_needs/roles/need_outgoing.py b/sphinx_needs/roles/need_outgoing.py
index 8248f1ce4..1939b7e4e 100644
--- a/sphinx_needs/roles/need_outgoing.py
+++ b/sphinx_needs/roles/need_outgoing.py
@@ -1,4 +1,4 @@
-from typing import List
+from __future__ import annotations
from docutils import nodes
from sphinx.application import Sphinx
@@ -18,7 +18,7 @@ class NeedOutgoing(nodes.Inline, nodes.Element):
def process_need_outgoing(
- app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]
+ app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]
) -> None:
builder = app.builder
env = app.env
diff --git a/sphinx_needs/roles/need_part.py b/sphinx_needs/roles/need_part.py
index 8b398ac2d..242d197d4 100644
--- a/sphinx_needs/roles/need_part.py
+++ b/sphinx_needs/roles/need_part.py
@@ -7,9 +7,11 @@
"""
+from __future__ import annotations
+
import hashlib
import re
-from typing import List, cast
+from typing import cast
from docutils import nodes
from sphinx.application import Sphinx
@@ -25,14 +27,14 @@ class NeedPart(nodes.Inline, nodes.Element):
pass
-def process_need_part(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]) -> None:
+def process_need_part(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
pass
part_pattern = re.compile(r"\(([\w-]+)\)(.*)")
-def update_need_with_parts(env: BuildEnvironment, need: NeedsInfoType, part_nodes: List[NeedPart]) -> None:
+def update_need_with_parts(env: BuildEnvironment, need: NeedsInfoType, part_nodes: list[NeedPart]) -> None:
app = env.app
builder = app.builder
for part_node in part_nodes:
@@ -86,7 +88,7 @@ def update_need_with_parts(env: BuildEnvironment, need: NeedsInfoType, part_node
part_node.append(node_need_part_line)
-def find_parts(node: nodes.Node) -> List[NeedPart]:
+def find_parts(node: nodes.Node) -> list[NeedPart]:
found_nodes = []
for child in node.children:
if isinstance(child, NeedPart):
diff --git a/sphinx_needs/roles/need_ref.py b/sphinx_needs/roles/need_ref.py
index 2f1526a13..d817b9274 100644
--- a/sphinx_needs/roles/need_ref.py
+++ b/sphinx_needs/roles/need_ref.py
@@ -1,6 +1,7 @@
+from __future__ import annotations
+
import contextlib
from collections.abc import Iterable
-from typing import Dict, List, Union
from docutils import nodes
from sphinx.application import Sphinx
@@ -19,7 +20,7 @@ class NeedRef(nodes.Inline, nodes.Element):
pass
-def transform_need_to_dict(need: NeedsInfoType) -> Dict[str, str]:
+def transform_need_to_dict(need: NeedsInfoType) -> dict[str, str]:
"""
The function will transform a need in a dictionary of strings. Used to
be given e.g. to a python format string.
@@ -50,7 +51,7 @@ def transform_need_to_dict(need: NeedsInfoType) -> Dict[str, str]:
return dict_need
-def process_need_ref(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: List[nodes.Element]) -> None:
+def process_need_ref(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
builder = app.builder
env = app.env
needs_config = NeedsSphinxConfig(env.config)
@@ -93,7 +94,7 @@ def process_need_ref(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
title = f"{title[: max_length - 3]}..."
dict_need["title"] = title
- ref_name: Union[None, str, nodes.Text] = node_need_ref.children[0].children[0] # type: ignore[assignment]
+ ref_name: None | str | nodes.Text = node_need_ref.children[0].children[0] # type: ignore[assignment]
# Only use ref_name, if it differs from ref_id
if str(need_id_full) == str(ref_name):
ref_name = None
diff --git a/sphinx_needs/services/base.py b/sphinx_needs/services/base.py
index 36c60e82d..7189c115d 100644
--- a/sphinx_needs/services/base.py
+++ b/sphinx_needs/services/base.py
@@ -1,10 +1,12 @@
-from typing import Any, ClassVar, List
+from __future__ import annotations
+
+from typing import Any, ClassVar
from sphinx_needs.logging import get_logger
class BaseService:
- options: ClassVar[List[str]]
+ options: ClassVar[list[str]]
def __init__(self, *args: Any, **kwargs: Any) -> None:
self.log = get_logger(__name__)
diff --git a/sphinx_needs/services/config/github.py b/sphinx_needs/services/config/github.py
index aa90d7303..ed2c2a6e2 100644
--- a/sphinx_needs/services/config/github.py
+++ b/sphinx_needs/services/config/github.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
EXTRA_DATA_OPTIONS = ["user", "created_at", "updated_at", "closed_at", "service"]
EXTRA_LINK_OPTIONS = ["url"]
EXTRA_IMAGE_OPTIONS = ["avatar"]
diff --git a/sphinx_needs/services/config/open_needs.py b/sphinx_needs/services/config/open_needs.py
index 4ce37684d..5968e58fb 100644
--- a/sphinx_needs/services/config/open_needs.py
+++ b/sphinx_needs/services/config/open_needs.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
EXTRA_DATA_OPTIONS = ["params", "prefix"]
EXTRA_LINK_OPTIONS = ["url", "url_postfix"]
CONFIG_OPTIONS = ["query", "max_content_lines", "id_prefix"]
diff --git a/sphinx_needs/services/github.py b/sphinx_needs/services/github.py
index 0a8b6efdd..b2fc6da72 100644
--- a/sphinx_needs/services/github.py
+++ b/sphinx_needs/services/github.py
@@ -1,8 +1,10 @@
+from __future__ import annotations
+
import os
import textwrap
import time
from contextlib import suppress
-from typing import Any, Dict, List, Optional, Tuple
+from typing import Any
from urllib.parse import urlparse
import requests
@@ -25,7 +27,7 @@
class GithubService(BaseService):
options = CONFIG_OPTIONS + EXTRA_DATA_OPTIONS + EXTRA_LINK_OPTIONS + EXTRA_IMAGE_OPTIONS
- def __init__(self, app: Sphinx, name: str, config: Dict[str, Any], **kwargs: Any) -> None:
+ def __init__(self, app: Sphinx, name: str, config: dict[str, Any], **kwargs: Any) -> None:
self.app = app
self.name = name
self.config = config
@@ -74,7 +76,7 @@ def __init__(self, app: Sphinx, name: str, config: Dict[str, Any], **kwargs: Any
super().__init__()
- def _send(self, query: str, options: Dict[str, Any], specific: bool = False) -> Dict[str, Any]:
+ def _send(self, query: str, options: dict[str, Any], specific: bool = False) -> dict[str, Any]:
headers = {}
if self.gh_type == "commit":
headers["Accept"] = "application/vnd.github.cloak-preview+json"
@@ -105,7 +107,7 @@ def _send(self, query: str, options: Dict[str, Any], specific: bool = False) ->
self.log.info(f"Service {self.name} requesting data for query: {query}")
- auth: Optional[Tuple[str, str]]
+ auth: tuple[str, str] | None
if self.username:
# TODO token can be None
auth = (self.username, self.token) # type: ignore
@@ -146,7 +148,7 @@ def _send(self, query: str, options: Dict[str, Any], specific: bool = False) ->
return {"items": [resp.json()]}
return resp.json() # type: ignore
- def request(self, options: Optional[Dict[str, Any]] = None) -> List[Dict[str, Any]]:
+ def request(self, options: dict[str, Any] | None = None) -> list[dict[str, Any]]:
if options is None:
options = {}
self.log.debug(f"Requesting data for service {self.name}")
@@ -180,7 +182,7 @@ def request(self, options: Optional[Dict[str, Any]] = None) -> List[Dict[str, An
return data
- def prepare_issue_data(self, items: List[Dict[str, Any]], options: Dict[str, Any]) -> List[Dict[str, Any]]:
+ def prepare_issue_data(self, items: list[dict[str, Any]], options: dict[str, Any]) -> list[dict[str, Any]]:
data = []
for item in items:
# ensure that "None" can not reach .splitlines()
@@ -240,7 +242,7 @@ def prepare_issue_data(self, items: List[Dict[str, Any]], options: Dict[str, Any
return data
- def prepare_commit_data(self, items: List[Dict[str, Any]], options: Dict[str, Any]) -> List[Dict[str, Any]]:
+ def prepare_commit_data(self, items: list[dict[str, Any]], options: dict[str, Any]) -> list[dict[str, Any]]:
data = []
for item in items:
@@ -314,7 +316,7 @@ def _get_avatar(self, avatar_url: str) -> str:
return avatar_file_path
- def _add_given_options(self, options: Dict[str, Any], element_data: Dict[str, Any]) -> None:
+ def _add_given_options(self, options: dict[str, Any], element_data: dict[str, Any]) -> None:
"""
Add data from options, which was defined by user but is not set by this service
diff --git a/sphinx_needs/services/manager.py b/sphinx_needs/services/manager.py
index a0f3f8668..ecdacadaf 100644
--- a/sphinx_needs/services/manager.py
+++ b/sphinx_needs/services/manager.py
@@ -1,4 +1,6 @@
-from typing import Any, Dict, Type
+from __future__ import annotations
+
+from typing import Any
from docutils.parsers.rst import directives
from sphinx.application import Sphinx
@@ -15,9 +17,9 @@ def __init__(self, app: Sphinx):
self.app = app
self.log = get_logger(__name__)
- self.services: Dict[str, BaseService] = {}
+ self.services: dict[str, BaseService] = {}
- def register(self, name: str, klass: Type[BaseService], **kwargs: Any) -> None:
+ def register(self, name: str, klass: type[BaseService], **kwargs: Any) -> None:
try:
config = NeedsSphinxConfig(self.app.config).services[name]
except KeyError:
diff --git a/sphinx_needs/services/open_needs.py b/sphinx_needs/services/open_needs.py
index 24368bf92..6a9ea2195 100644
--- a/sphinx_needs/services/open_needs.py
+++ b/sphinx_needs/services/open_needs.py
@@ -1,6 +1,8 @@
+from __future__ import annotations
+
import re
from random import choices
-from typing import Any, Dict, List
+from typing import Any
import requests
from jinja2 import Template
@@ -22,7 +24,7 @@
class OpenNeedsService(BaseService):
options = CONFIG_OPTIONS + EXTRA_DATA_OPTIONS + EXTRA_LINK_OPTIONS
- def __init__(self, app: Sphinx, name: str, config: Dict[str, Any], **kwargs: Any) -> None:
+ def __init__(self, app: Sphinx, name: str, config: dict[str, Any], **kwargs: Any) -> None:
self.app = app
self.name = name
self.config = config
@@ -38,10 +40,10 @@ def __init__(self, app: Sphinx, name: str, config: Dict[str, Any], **kwargs: Any
self.id_prefix = self.config.get("id_prefix", "OPEN_NEEDS_")
self.query = self.config.get("query", "")
self.content = self.config.get("content", DEFAULT_CONTENT)
- self.mappings: Dict[str, Any] = self.config.get("mappings", {})
+ self.mappings: dict[str, Any] = self.config.get("mappings", {})
self.mapping_replaces = self.config.get("mappings_replaces", MAPPINGS_REPLACES_DEFAULT)
- self.extra_data: Dict[str, Any] = self.config.get("extra_data", {})
+ self.extra_data: dict[str, Any] = self.config.get("extra_data", {})
self.params = self.config.get("params", "skip=0,limit=100")
super().__init__(**kwargs)
@@ -70,8 +72,8 @@ def _prepare_request(self, options: Any) -> Any:
url: str = options.get("url", self.url)
url = url + str(self.url_postfix)
- headers: Dict[str, str] = {"Authorization": f"{self.token_type} {self.access_token}"}
- params: List[str] = [param.strip() for param in re.split(r";|,", options.get("params", self.params))]
+ headers: dict[str, str] = {"Authorization": f"{self.token_type} {self.access_token}"}
+ params: list[str] = [param.strip() for param in re.split(r";|,", options.get("params", self.params))]
new_params: str = "&".join(params)
url = f"{url}?{new_params}"
@@ -94,7 +96,7 @@ def _send_request(request: Any) -> Any:
raise OpenNeedsServiceException(f"Problem accessing {result.url}.\nReason: {result.text}")
return result
- def _extract_data(self, data: List[Dict[str, Any]], options: Dict[str, Any]) -> List[Dict[str, Any]]:
+ def _extract_data(self, data: list[dict[str, Any]], options: dict[str, Any]) -> list[dict[str, Any]]:
"""
Extract data of a list/dictionary, which was retrieved via send_request.
:param data: list or dict
diff --git a/sphinx_needs/utils.py b/sphinx_needs/utils.py
index 018ec9691..eb0fcccb7 100644
--- a/sphinx_needs/utils.py
+++ b/sphinx_needs/utils.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import cProfile
import importlib
import operator
@@ -5,18 +7,7 @@
import re
from functools import lru_cache, reduce, wraps
from re import Pattern
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Dict,
- List,
- Optional,
- Tuple,
- Type,
- TypeVar,
- Union,
-)
+from typing import TYPE_CHECKING, Any, Callable, TypeVar
from urllib.parse import urlparse
from docutils import nodes
@@ -44,10 +35,10 @@
class NeedFunctionsType(TypedDict):
name: str
- function: "DynamicFunction"
+ function: DynamicFunction
-NEEDS_FUNCTIONS: Dict[str, NeedFunctionsType] = {}
+NEEDS_FUNCTIONS: dict[str, NeedFunctionsType] = {}
# List of internal need option names. They should not be used by or presented to user.
INTERNALS = [
@@ -109,7 +100,7 @@ class NeedFunctionsType(TypedDict):
]
-def split_need_id(need_id_full: str) -> Tuple[str, Optional[str]]:
+def split_need_id(need_id_full: str) -> tuple[str, str | None]:
"""A need id can be a combination of a main id and a part id,
split by a dot.
This function splits them:
@@ -128,7 +119,7 @@ def split_need_id(need_id_full: str) -> Tuple[str, Optional[str]]:
def row_col_maker(
app: Sphinx,
fromdocname: str,
- all_needs: Dict[str, NeedsInfoType],
+ all_needs: dict[str, NeedsInfoType],
need_info: NeedsInfoType,
need_key: str,
make_ref: bool = False,
@@ -155,7 +146,7 @@ def row_col_maker(
row_col = nodes.entry(classes=["needs_" + need_key])
para_col = nodes.paragraph()
- needs_string_links_option: List[str] = []
+ needs_string_links_option: list[str] = []
for v in needs_config.string_links.values():
needs_string_links_option.extend(v["options"])
@@ -252,7 +243,7 @@ def row_col_maker(
return row_col
-def rstjinja(app: Sphinx, docname: str, source: List[str]) -> None:
+def rstjinja(app: Sphinx, docname: str, source: list[str]) -> None:
"""
Render our pages as a jinja template for fancy templating goodness.
"""
@@ -267,7 +258,7 @@ def rstjinja(app: Sphinx, docname: str, source: List[str]) -> None:
source[0] = rendered
-def import_prefix_link_edit(needs: Dict[str, Any], id_prefix: str, needs_extra_links: List[Dict[str, Any]]) -> None:
+def import_prefix_link_edit(needs: dict[str, Any], id_prefix: str, needs_extra_links: list[dict[str, Any]]) -> None:
"""
Changes existing links to support given prefix.
Only link-ids get touched, which are part of ``needs`` (so are linking them).
@@ -348,7 +339,7 @@ def check_and_calc_base_url_rel_path(external_url: str, fromdocname: str) -> str
return ref_uri
-def check_and_get_external_filter_func(filter_func_ref: Optional[str]) -> Tuple[Any, str]:
+def check_and_get_external_filter_func(filter_func_ref: str | None) -> tuple[Any, str]:
"""Check and import filter function from external python file."""
# Check if external filter code is defined
filter_func = None
@@ -379,7 +370,7 @@ def check_and_get_external_filter_func(filter_func_ref: Optional[str]) -> Tuple[
return filter_func, filter_args
-def jinja_parse(context: Dict[str, Any], jinja_string: str) -> str:
+def jinja_parse(context: dict[str, Any], jinja_string: str) -> str:
"""
Function to parse mapping options set to a string containing jinja template format.
@@ -401,7 +392,7 @@ def jinja_parse(context: Dict[str, Any], jinja_string: str) -> str:
@lru_cache
-def import_matplotlib() -> Optional["matplotlib"]:
+def import_matplotlib() -> matplotlib | None:
"""Import and return matplotlib, or return None if it cannot be imported.
Also sets the interactive backend to ``Agg``, if ``DISPLAY`` is not set.
@@ -416,7 +407,7 @@ def import_matplotlib() -> Optional["matplotlib"]:
return matplotlib
-def save_matplotlib_figure(app: Sphinx, figure: "FigureBase", basename: str, fromdocname: str) -> nodes.image:
+def save_matplotlib_figure(app: Sphinx, figure: FigureBase, basename: str, fromdocname: str) -> nodes.image:
builder = app.builder
env = app.env
@@ -455,7 +446,7 @@ def save_matplotlib_figure(app: Sphinx, figure: "FigureBase", basename: str, fro
return image_node
-def dict_get(root: Dict[str, Any], items: Any, default: Any = None) -> Any:
+def dict_get(root: dict[str, Any], items: Any, default: Any = None) -> Any:
"""
Access a nested object in root by item sequence.
@@ -473,7 +464,7 @@ def dict_get(root: Dict[str, Any], items: Any, default: Any = None) -> Any:
def match_string_link(
- text_item: str, data: str, need_key: str, matching_link_confs: List[Dict[str, Any]], render_context: Dict[str, Any]
+ text_item: str, data: str, need_key: str, matching_link_confs: list[dict[str, Any]], render_context: dict[str, Any]
) -> Any:
try:
link_name = None
@@ -499,8 +490,8 @@ def match_string_link(
def match_variants(
- option_value: Union[str, List[str]], keywords: Dict[str, Any], needs_variants: Dict[str, str]
-) -> Union[None, str, List[str]]:
+ option_value: str | list[str], keywords: dict[str, Any], needs_variants: dict[str, str]
+) -> None | str | list[str]:
"""
Function to handle variant option management.
@@ -512,8 +503,8 @@ def match_variants(
"""
def variant_handling(
- variant_definitions: List[str], variant_data: Dict[str, Any], variant_pattern: Pattern # type: ignore[type-arg]
- ) -> Optional[str]:
+ variant_definitions: list[str], variant_data: dict[str, Any], variant_pattern: Pattern # type: ignore[type-arg]
+ ) -> str | None:
filter_context = variant_data
# filter_result = []
no_variants_in_option = False
@@ -564,7 +555,7 @@ def variant_handling(
# Handling multiple variant definitions
if isinstance(option_value, str):
- multiple_variants: List[str] = variant_splitting.split(rf"""{option_value}""")
+ multiple_variants: list[str] = variant_splitting.split(rf"""{option_value}""")
multiple_variants = [
re.sub(r"^([;, ]+)|([;, ]+$)", "", i) for i in multiple_variants if i not in (None, ";", "", " ")
]
@@ -605,7 +596,7 @@ def clean_log(data: str) -> str:
return clean_credentials
-def node_match(node_types: Union[Type[nodes.Element], List[Type[nodes.Element]]]) -> Callable[[nodes.Node], bool]:
+def node_match(node_types: type[nodes.Element] | list[type[nodes.Element]]) -> Callable[[nodes.Node], bool]:
"""
Returns a condition function for doctuils.nodes.findall()
@@ -627,13 +618,13 @@ def node_match(node_types: Union[Type[nodes.Element], List[Type[nodes.Element]]]
"""
node_types_list = node_types if isinstance(node_types, list) else [node_types]
- def condition(node: nodes.Node, node_types: List[Type[nodes.Element]] = node_types_list) -> bool:
+ def condition(node: nodes.Node, node_types: list[type[nodes.Element]] = node_types_list) -> bool:
return any(isinstance(node, x) for x in node_types)
return condition
-def add_doc(env: BuildEnvironment, docname: str, category: Optional[str] = None) -> None:
+def add_doc(env: BuildEnvironment, docname: str, category: str | None = None) -> None:
"""Stores a docname, to know later all need-relevant docs"""
docs = SphinxNeedsData(env).get_or_create_docs()
if docname not in docs["all"]:
@@ -646,7 +637,7 @@ def add_doc(env: BuildEnvironment, docname: str, category: Optional[str] = None)
docs[category].append(docname)
-def split_link_types(link_types: str, location: Any) -> List[str]:
+def split_link_types(link_types: str, location: Any) -> list[str]:
"""Split link_types string into list of link_types."""
def _is_valid(link_type: str) -> bool:
@@ -667,7 +658,7 @@ def _is_valid(link_type: str) -> bool:
)
-def get_scale(options: Dict[str, Any], location: Any) -> str:
+def get_scale(options: dict[str, Any], location: Any) -> str:
"""Get scale for diagram, from directive option."""
scale: str = options.get("scale", "100").replace("%", "")
if not scale.isdigit():
diff --git a/sphinx_needs/warnings.py b/sphinx_needs/warnings.py
index 8025b33ff..c110e1496 100644
--- a/sphinx_needs/warnings.py
+++ b/sphinx_needs/warnings.py
@@ -3,7 +3,7 @@
"""
-from typing import Dict, Optional
+from __future__ import annotations
from sphinx.application import Sphinx
from sphinx.util import logging
@@ -16,7 +16,7 @@
logger = get_logger(__name__)
-def process_warnings(app: Sphinx, exception: Optional[Exception]) -> None:
+def process_warnings(app: Sphinx, exception: Exception | None) -> None:
"""
Checks the configured warnings.
@@ -47,7 +47,7 @@ def process_warnings(app: Sphinx, exception: Optional[Exception]) -> None:
env.needs_warnings_executed = True # type: ignore[attr-defined]
# Exclude external needs for warnings check
- checked_needs: Dict[str, NeedsInfoType] = {}
+ checked_needs: dict[str, NeedsInfoType] = {}
for need_id, need in needs.items():
if not need["is_external"]:
checked_needs[need_id] = need
From bbdf7bf0ff906fb88c4ef232aa845ef9b470d53b Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 15 Feb 2024 16:11:33 +0100
Subject: [PATCH 09/24] Bump jinja2 from 3.1.2 to 3.1.3 (#1091)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
poetry.lock | 33 +++++++++++++++++++++++++++++----
1 file changed, 29 insertions(+), 4 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 9aaf90977..88dd7b3f0 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]]
name = "alabaster"
@@ -228,6 +228,7 @@ files = [
{file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18a64814ae7bce73925131381603fff0116e2df25230dfc80d6d690aa6e20b37"},
{file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c81f22b4f572f8a2110b0b741bb64e5a6427e0a198b2cdc1fbaf85f352a3aa"},
{file = "contourpy-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53cc3a40635abedbec7f1bde60f8c189c49e84ac180c665f2cd7c162cc454baa"},
+ {file = "contourpy-1.1.0-cp310-cp310-win32.whl", hash = "sha256:9b2dd2ca3ac561aceef4c7c13ba654aaa404cf885b187427760d7f7d4c57cff8"},
{file = "contourpy-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:1f795597073b09d631782e7245016a4323cf1cf0b4e06eef7ea6627e06a37ff2"},
{file = "contourpy-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0b7b04ed0961647691cfe5d82115dd072af7ce8846d31a5fac6c142dcce8b882"},
{file = "contourpy-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27bc79200c742f9746d7dd51a734ee326a292d77e7d94c8af6e08d1e6c15d545"},
@@ -236,6 +237,7 @@ files = [
{file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5cec36c5090e75a9ac9dbd0ff4a8cf7cecd60f1b6dc23a374c7d980a1cd710e"},
{file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f0cbd657e9bde94cd0e33aa7df94fb73c1ab7799378d3b3f902eb8eb2e04a3a"},
{file = "contourpy-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:181cbace49874f4358e2929aaf7ba84006acb76694102e88dd15af861996c16e"},
+ {file = "contourpy-1.1.0-cp311-cp311-win32.whl", hash = "sha256:edb989d31065b1acef3828a3688f88b2abb799a7db891c9e282df5ec7e46221b"},
{file = "contourpy-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fb3b7d9e6243bfa1efb93ccfe64ec610d85cfe5aec2c25f97fbbd2e58b531256"},
{file = "contourpy-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcb41692aa09aeb19c7c213411854402f29f6613845ad2453d30bf421fe68fed"},
{file = "contourpy-1.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d123a5bc63cd34c27ff9c7ac1cd978909e9c71da12e05be0231c608048bb2ae"},
@@ -244,6 +246,7 @@ files = [
{file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:317267d915490d1e84577924bd61ba71bf8681a30e0d6c545f577363157e5e94"},
{file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d551f3a442655f3dcc1285723f9acd646ca5858834efeab4598d706206b09c9f"},
{file = "contourpy-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e7a117ce7df5a938fe035cad481b0189049e8d92433b4b33aa7fc609344aafa1"},
+ {file = "contourpy-1.1.0-cp38-cp38-win32.whl", hash = "sha256:108dfb5b3e731046a96c60bdc46a1a0ebee0760418951abecbe0fc07b5b93b27"},
{file = "contourpy-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4f26b25b4f86087e7d75e63212756c38546e70f2a92d2be44f80114826e1cd4"},
{file = "contourpy-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc00bb4225d57bff7ebb634646c0ee2a1298402ec10a5fe7af79df9a51c1bfd9"},
{file = "contourpy-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:189ceb1525eb0655ab8487a9a9c41f42a73ba52d6789754788d1883fb06b2d8a"},
@@ -252,6 +255,7 @@ files = [
{file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:143dde50520a9f90e4a2703f367cf8ec96a73042b72e68fcd184e1279962eb6f"},
{file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e94bef2580e25b5fdb183bf98a2faa2adc5b638736b2c0a4da98691da641316a"},
{file = "contourpy-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ed614aea8462735e7d70141374bd7650afd1c3f3cb0c2dbbcbe44e14331bf002"},
+ {file = "contourpy-1.1.0-cp39-cp39-win32.whl", hash = "sha256:71551f9520f008b2950bef5f16b0e3587506ef4f23c734b71ffb7b89f8721999"},
{file = "contourpy-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:438ba416d02f82b692e371858143970ed2eb6337d9cdbbede0d8ad9f3d7dd17d"},
{file = "contourpy-1.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a698c6a7a432789e587168573a864a7ea374c6be8d4f31f9d87c001d5a843493"},
{file = "contourpy-1.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397b0ac8a12880412da3551a8cb5a187d3298a72802b45a3bd1805e204ad8439"},
@@ -642,13 +646,13 @@ files = [
[[package]]
name = "jinja2"
-version = "3.1.2"
+version = "3.1.3"
description = "A very fast and expressive template engine."
optional = false
python-versions = ">=3.7"
files = [
- {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"},
- {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"},
+ {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"},
+ {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"},
]
[package.dependencies]
@@ -966,6 +970,16 @@ files = [
{file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
@@ -1686,6 +1700,7 @@ files = [
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
+ {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
@@ -1693,8 +1708,16 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
+ {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
+ {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
+ {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
+ {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
@@ -1711,6 +1734,7 @@ files = [
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
+ {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
@@ -1718,6 +1742,7 @@ files = [
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
From 8e2fcd687f00ec7af977ef0a77c40bc7fde5c714 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Thu, 15 Feb 2024 16:12:21 +0100
Subject: [PATCH 10/24] Bump pillow from 10.1.0 to 10.2.0 (#1094)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
poetry.lock | 130 ++++++++++++++++++++++++++++++----------------------
1 file changed, 74 insertions(+), 56 deletions(-)
diff --git a/poetry.lock b/poetry.lock
index 88dd7b3f0..b5317979b 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1211,70 +1211,88 @@ files = [
[[package]]
name = "pillow"
-version = "10.1.0"
+version = "10.2.0"
description = "Python Imaging Library (Fork)"
-optional = true
+optional = false
python-versions = ">=3.8"
files = [
- {file = "Pillow-10.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1ab05f3db77e98f93964697c8efc49c7954b08dd61cff526b7f2531a22410106"},
- {file = "Pillow-10.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6932a7652464746fcb484f7fc3618e6503d2066d853f68a4bd97193a3996e273"},
- {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f63b5a68daedc54c7c3464508d8c12075e56dcfbd42f8c1bf40169061ae666"},
- {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0949b55eb607898e28eaccb525ab104b2d86542a85c74baf3a6dc24002edec2"},
- {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ae88931f93214777c7a3aa0a8f92a683f83ecde27f65a45f95f22d289a69e593"},
- {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b0eb01ca85b2361b09480784a7931fc648ed8b7836f01fb9241141b968feb1db"},
- {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d27b5997bdd2eb9fb199982bb7eb6164db0426904020dc38c10203187ae2ff2f"},
- {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7df5608bc38bd37ef585ae9c38c9cd46d7c81498f086915b0f97255ea60c2818"},
- {file = "Pillow-10.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:41f67248d92a5e0a2076d3517d8d4b1e41a97e2df10eb8f93106c89107f38b57"},
- {file = "Pillow-10.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1fb29c07478e6c06a46b867e43b0bcdb241b44cc52be9bc25ce5944eed4648e7"},
- {file = "Pillow-10.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2cdc65a46e74514ce742c2013cd4a2d12e8553e3a2563c64879f7c7e4d28bce7"},
- {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50d08cd0a2ecd2a8657bd3d82c71efd5a58edb04d9308185d66c3a5a5bed9610"},
- {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062a1610e3bc258bff2328ec43f34244fcec972ee0717200cb1425214fe5b839"},
- {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:61f1a9d247317fa08a308daaa8ee7b3f760ab1809ca2da14ecc88ae4257d6172"},
- {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a646e48de237d860c36e0db37ecaecaa3619e6f3e9d5319e527ccbc8151df061"},
- {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:47e5bf85b80abc03be7455c95b6d6e4896a62f6541c1f2ce77a7d2bb832af262"},
- {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a92386125e9ee90381c3369f57a2a50fa9e6aa8b1cf1d9c4b200d41a7dd8e992"},
- {file = "Pillow-10.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f7c276c05a9767e877a0b4c5050c8bee6a6d960d7f0c11ebda6b99746068c2a"},
- {file = "Pillow-10.1.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:a89b8312d51715b510a4fe9fc13686283f376cfd5abca8cd1c65e4c76e21081b"},
- {file = "Pillow-10.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:00f438bb841382b15d7deb9a05cc946ee0f2c352653c7aa659e75e592f6fa17d"},
- {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d929a19f5469b3f4df33a3df2983db070ebb2088a1e145e18facbc28cae5b27"},
- {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a92109192b360634a4489c0c756364c0c3a2992906752165ecb50544c251312"},
- {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:0248f86b3ea061e67817c47ecbe82c23f9dd5d5226200eb9090b3873d3ca32de"},
- {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9882a7451c680c12f232a422730f986a1fcd808da0fd428f08b671237237d651"},
- {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c3ac5423c8c1da5928aa12c6e258921956757d976405e9467c5f39d1d577a4b"},
- {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:806abdd8249ba3953c33742506fe414880bad78ac25cc9a9b1c6ae97bedd573f"},
- {file = "Pillow-10.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:eaed6977fa73408b7b8a24e8b14e59e1668cfc0f4c40193ea7ced8e210adf996"},
- {file = "Pillow-10.1.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:fe1e26e1ffc38be097f0ba1d0d07fcade2bcfd1d023cda5b29935ae8052bd793"},
- {file = "Pillow-10.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7e3daa202beb61821c06d2517428e8e7c1aab08943e92ec9e5755c2fc9ba5e"},
- {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24fadc71218ad2b8ffe437b54876c9382b4a29e030a05a9879f615091f42ffc2"},
- {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1d323703cfdac2036af05191b969b910d8f115cf53093125e4058f62012c9a"},
- {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:912e3812a1dbbc834da2b32299b124b5ddcb664ed354916fd1ed6f193f0e2d01"},
- {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7dbaa3c7de82ef37e7708521be41db5565004258ca76945ad74a8e998c30af8d"},
- {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9d7bc666bd8c5a4225e7ac71f2f9d12466ec555e89092728ea0f5c0c2422ea80"},
- {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baada14941c83079bf84c037e2d8b7506ce201e92e3d2fa0d1303507a8538212"},
- {file = "Pillow-10.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:2ef6721c97894a7aa77723740a09547197533146fba8355e86d6d9a4a1056b14"},
- {file = "Pillow-10.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0a026c188be3b443916179f5d04548092e253beb0c3e2ee0a4e2cdad72f66099"},
- {file = "Pillow-10.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04f6f6149f266a100374ca3cc368b67fb27c4af9f1cc8cb6306d849dcdf12616"},
- {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb40c011447712d2e19cc261c82655f75f32cb724788df315ed992a4d65696bb"},
- {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a8413794b4ad9719346cd9306118450b7b00d9a15846451549314a58ac42219"},
- {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c9aeea7b63edb7884b031a35305629a7593272b54f429a9869a4f63a1bf04c34"},
- {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b4005fee46ed9be0b8fb42be0c20e79411533d1fd58edabebc0dd24626882cfd"},
- {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0152565c6aa6ebbfb1e5d8624140a440f2b99bf7afaafbdbf6430426497f28"},
- {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d921bc90b1defa55c9917ca6b6b71430e4286fc9e44c55ead78ca1a9f9eba5f2"},
- {file = "Pillow-10.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfe96560c6ce2f4c07d6647af2d0f3c54cc33289894ebd88cfbb3bcd5391e256"},
- {file = "Pillow-10.1.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:937bdc5a7f5343d1c97dc98149a0be7eb9704e937fe3dc7140e229ae4fc572a7"},
- {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c25762197144e211efb5f4e8ad656f36c8d214d390585d1d21281f46d556ba"},
- {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:afc8eef765d948543a4775f00b7b8c079b3321d6b675dde0d02afa2ee23000b4"},
- {file = "Pillow-10.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:883f216eac8712b83a63f41b76ddfb7b2afab1b74abbb413c5df6680f071a6b9"},
- {file = "Pillow-10.1.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b920e4d028f6442bea9a75b7491c063f0b9a3972520731ed26c83e254302eb1e"},
- {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c41d960babf951e01a49c9746f92c5a7e0d939d1652d7ba30f6b3090f27e412"},
- {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1fafabe50a6977ac70dfe829b2d5735fd54e190ab55259ec8aea4aaea412fa0b"},
- {file = "Pillow-10.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b834f4b16173e5b92ab6566f0473bfb09f939ba14b23b8da1f54fa63e4b623f"},
- {file = "Pillow-10.1.0.tar.gz", hash = "sha256:e6bf8de6c36ed96c86ea3b6e1d5273c53f46ef518a062464cd7ef5dd2cf92e38"},
+ {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"},
+ {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"},
+ {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"},
+ {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"},
+ {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"},
+ {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"},
+ {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"},
+ {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"},
+ {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"},
+ {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"},
+ {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"},
+ {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"},
+ {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"},
+ {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"},
+ {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"},
+ {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"},
+ {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"},
+ {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"},
+ {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"},
+ {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"},
+ {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"},
+ {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"},
+ {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"},
+ {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"},
+ {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"},
+ {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"},
+ {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"},
+ {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"},
+ {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"},
+ {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"},
+ {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"},
+ {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"},
+ {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"},
+ {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"},
+ {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"},
+ {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"},
+ {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"},
+ {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"},
+ {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"},
+ {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"},
+ {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"},
+ {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"},
+ {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"},
+ {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"},
+ {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"},
+ {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"},
+ {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"},
+ {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"},
+ {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"},
+ {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"},
+ {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"},
+ {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"},
+ {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"},
+ {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"},
+ {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"},
+ {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"},
+ {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"},
+ {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"},
+ {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"},
+ {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"},
+ {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"},
+ {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"},
+ {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"},
+ {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"},
+ {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"},
+ {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"},
+ {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"},
+ {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"},
]
[package.extras]
docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"]
+fpx = ["olefile"]
+mic = ["olefile"]
tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"]
+typing = ["typing-extensions"]
+xmp = ["defusedxml"]
[[package]]
name = "pkgutil-resolve-name"
From 84a5f72f2e72ab1471ab2d1bb5c570d6115ef199 Mon Sep 17 00:00:00 2001
From: Chris Sewell
Date: Thu, 15 Feb 2024 15:51:32 +0000
Subject: [PATCH 11/24] =?UTF-8?q?=F0=9F=94=A7=20Replace=20black/isort/pyup?=
=?UTF-8?q?grade/flake8=20with=20ruff=20(#1080)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.flake8 | 3 -
.pre-commit-config.yaml | 29 +-
docs/conf.py | 108 ++++++--
noxfile.py | 10 +-
performance/performance_test.py | 54 +++-
pyproject.toml | 18 +-
sphinx_needs/api/configuration.py | 35 ++-
sphinx_needs/api/need.py | 78 ++++--
sphinx_needs/builder.py | 12 +-
sphinx_needs/config.py | 260 +++++++++++++-----
sphinx_needs/data.py | 11 +-
sphinx_needs/debug.py | 27 +-
sphinx_needs/diagrams_common.py | 45 ++-
sphinx_needs/directives/list2need.py | 51 +++-
sphinx_needs/directives/need.py | 80 ++++--
sphinx_needs/directives/needbar.py | 70 +++--
sphinx_needs/directives/needextend.py | 46 +++-
sphinx_needs/directives/needextract.py | 21 +-
sphinx_needs/directives/needfilter.py | 74 +++--
sphinx_needs/directives/needflow.py | 105 +++++--
sphinx_needs/directives/needgantt.py | 80 ++++--
sphinx_needs/directives/needimport.py | 57 +++-
sphinx_needs/directives/needlist.py | 29 +-
sphinx_needs/directives/needpie.py | 51 +++-
sphinx_needs/directives/needreport.py | 20 +-
sphinx_needs/directives/needsequence.py | 76 +++--
sphinx_needs/directives/needservice.py | 38 ++-
sphinx_needs/directives/needtable.py | 64 ++++-
sphinx_needs/directives/needuml.py | 107 +++++--
sphinx_needs/directives/utils.py | 24 +-
sphinx_needs/environment.py | 23 +-
sphinx_needs/external_needs.py | 43 ++-
sphinx_needs/filter_common.py | 66 ++++-
sphinx_needs/functions/common.py | 36 ++-
sphinx_needs/functions/functions.py | 119 ++++++--
sphinx_needs/layout.py | 206 +++++++++++---
sphinx_needs/need_constraints.py | 16 +-
sphinx_needs/needs.py | 116 ++++++--
sphinx_needs/needsfile.py | 32 ++-
sphinx_needs/roles/need_count.py | 5 +-
sphinx_needs/roles/need_func.py | 11 +-
sphinx_needs/roles/need_incoming.py | 18 +-
sphinx_needs/roles/need_outgoing.py | 19 +-
sphinx_needs/roles/need_part.py | 19 +-
sphinx_needs/roles/need_ref.py | 22 +-
sphinx_needs/services/config/github.py | 33 ++-
sphinx_needs/services/github.py | 115 +++++---
sphinx_needs/services/manager.py | 8 +-
sphinx_needs/services/open_needs.py | 38 ++-
sphinx_needs/utils.py | 142 +++++++---
sphinx_needs/warnings.py | 24 +-
tests/benchmarks/test_basic.py | 8 +-
tests/benchmarks/test_official.py | 18 +-
tests/conftest.py | 39 ++-
tests/data/service_github.py | 24 +-
tests/doc_test/api_doc/conf.py | 32 ++-
tests/doc_test/api_doc_awesome/conf.py | 32 ++-
tests/doc_test/arch_doc/conf.py | 32 ++-
tests/doc_test/broken_doc/conf.py | 32 ++-
tests/doc_test/broken_links/conf.py | 32 ++-
tests/doc_test/broken_statuses/conf.py | 32 ++-
tests/doc_test/broken_syntax_doc/conf.py | 32 ++-
tests/doc_test/broken_tags/conf.py | 32 ++-
tests/doc_test/broken_tags_2/conf.py | 32 ++-
tests/doc_test/doc_basic/conf.py | 32 ++-
tests/doc_test/doc_basic_latex/conf.py | 32 ++-
tests/doc_test/doc_build_latex/conf.py | 32 ++-
tests/doc_test/doc_df_calc_sum/conf.py | 38 ++-
.../doc_df_check_linked_values/conf.py | 32 ++-
tests/doc_test/doc_df_user_functions/conf.py | 32 ++-
tests/doc_test/doc_dynamic_functions/conf.py | 32 ++-
tests/doc_test/doc_export_id/conf.py | 32 ++-
tests/doc_test/doc_extra_links/conf.py | 32 ++-
tests/doc_test/doc_github_issue_21/conf.py | 32 ++-
tests/doc_test/doc_github_issue_44/conf.py | 32 ++-
tests/doc_test/doc_global_options/conf.py | 32 ++-
tests/doc_test/doc_layout/conf.py | 47 +++-
tests/doc_test/doc_list2need/conf.py | 32 ++-
tests/doc_test/doc_measure_time/conf.py | 32 ++-
tests/doc_test/doc_need_count/conf.py | 32 ++-
tests/doc_test/doc_need_delete/conf.py | 32 ++-
tests/doc_test/doc_need_id_from_title/conf.py | 32 ++-
tests/doc_test/doc_need_jinja_content/conf.py | 32 ++-
tests/doc_test/doc_need_parts/conf.py | 32 ++-
tests/doc_test/doc_needarch/conf.py | 41 ++-
.../doc_needarch_jinja_func_import/conf.py | 48 +++-
.../doc_needarch_jinja_func_need/conf.py | 41 ++-
.../doc_needarch_negative_tests/conf.py | 41 ++-
tests/doc_test/doc_needbar/conf.py | 16 +-
tests/doc_test/doc_needextend/conf.py | 32 ++-
tests/doc_test/doc_needextend_strict/conf.py | 32 ++-
tests/doc_test/doc_needextract/conf.py | 32 ++-
tests/doc_test/doc_needflow/conf.py | 32 ++-
.../doc_needflow_incl_child_needs/conf.py | 32 ++-
.../conf.py | 32 ++-
.../conf.py | 32 ++-
tests/doc_test/doc_needlist/conf.py | 32 ++-
tests/doc_test/doc_needpie/conf.py | 32 ++-
tests/doc_test/doc_needs_builder/conf.py | 32 ++-
.../doc_needs_builder_negative_tests/conf.py | 32 ++-
.../doc_needs_builder_parallel/conf.py | 32 ++-
.../doc_test/doc_needs_external_needs/conf.py | 44 ++-
.../doc_needs_external_needs_remote/conf.py | 32 ++-
.../conf.py | 38 ++-
tests/doc_test/doc_needs_filter_data/conf.py | 32 ++-
.../conf.py | 16 +-
tests/doc_test/doc_needs_warnings/conf.py | 44 ++-
.../conf.py | 32 ++-
tests/doc_test/doc_needsfile/conf.py | 32 ++-
tests/doc_test/doc_needtable/conf.py | 32 ++-
tests/doc_test/doc_needuml/conf.py | 41 ++-
.../doc_needuml_diagram_allowmixing/conf.py | 41 ++-
.../doc_needuml_duplicate_key/conf.py | 41 ++-
tests/doc_test/doc_needuml_filter/conf.py | 41 ++-
.../doc_needuml_jinja_func_flow/conf.py | 41 ++-
.../conf.py | 48 +++-
.../conf.py | 41 ++-
.../doc_needuml_jinja_func_ref/conf.py | 41 ++-
.../doc_needuml_key_name_diagram/conf.py | 41 ++-
tests/doc_test/doc_needuml_save/conf.py | 41 ++-
.../doc_needuml_save_with_abs_path/conf.py | 41 ++-
tests/doc_test/doc_open_needs_service/conf.py | 32 ++-
.../doc_report_dead_links_false/conf.py | 32 ++-
.../doc_report_dead_links_true/conf.py | 32 ++-
.../doc_role_need_max_title_length/conf.py | 32 ++-
.../conf.py | 32 ++-
tests/doc_test/doc_role_need_template/conf.py | 32 ++-
tests/doc_test/doc_style_blank/conf.py | 32 ++-
tests/doc_test/doc_style_custom/conf.py | 32 ++-
tests/doc_test/doc_style_modern/conf.py | 32 ++-
tests/doc_test/doc_style_unknown/conf.py | 32 ++-
tests/doc_test/external_doc/conf.py | 46 +++-
tests/doc_test/filter_doc/conf.py | 56 +++-
tests/doc_test/generic_doc/conf.py | 32 ++-
tests/doc_test/import_doc/conf.py | 40 ++-
tests/doc_test/import_doc_empty/conf.py | 40 ++-
tests/doc_test/import_doc_invalid/conf.py | 40 ++-
tests/doc_test/need_constraints/conf.py | 44 ++-
.../doc_test/need_constraints_failed/conf.py | 44 ++-
.../needextract_with_nested_needs/conf.py | 32 ++-
.../doc_test/needpie_with_zero_needs/conf.py | 32 ++-
tests/doc_test/non_exists_file_import/conf.py | 40 ++-
tests/doc_test/parallel_doc/conf.py | 32 ++-
tests/doc_test/role_need_doc/conf.py | 46 +++-
tests/doc_test/service_doc/conf.py | 32 ++-
tests/doc_test/unicode_support/conf.py | 32 ++-
tests/doc_test/variant_doc/conf.py | 32 ++-
tests/doc_test/variant_options/conf.py | 32 ++-
tests/no_mpl_tests.py | 12 +-
tests/test_add_sections.py | 6 +-
tests/test_api_configuration.py | 10 +-
tests/test_api_usage_in_extension.py | 4 +-
tests/test_arch.py | 4 +-
tests/test_basic_doc.py | 96 +++++--
tests/test_broken_doc.py | 6 +-
tests/test_broken_links.py | 4 +-
tests/test_broken_statuses.py | 6 +-
tests/test_broken_syntax_doc.py | 6 +-
tests/test_broken_tags.py | 12 +-
tests/test_clean_log.py | 8 +-
tests/test_complex_builders.py | 18 +-
tests/test_doc_build_latex.py | 6 +-
tests/test_dynamic_functions.py | 48 +++-
tests/test_export_id.py | 12 +-
tests/test_external.py | 28 +-
tests/test_extra_links.py | 16 +-
tests/test_extra_options.py | 10 +-
tests/test_filter.py | 12 +-
tests/test_github_issues.py | 20 +-
tests/test_global_options.py | 6 +-
tests/test_import.py | 62 ++++-
tests/test_jinja_content_option.py | 4 +-
tests/test_layouts.py | 9 +-
tests/test_list2need.py | 14 +-
tests/test_multiple_link_backs.py | 6 +-
tests/test_need_constraints.py | 23 +-
tests/test_need_count.py | 6 +-
tests/test_need_delete_option.py | 6 +-
tests/test_need_id_from_title.py | 12 +-
tests/test_need_parts.py | 10 +-
tests/test_needarch.py | 26 +-
tests/test_needbar.py | 6 +-
tests/test_needextend.py | 29 +-
tests/test_needextract.py | 24 +-
tests/test_needextract_with_nested_needs.py | 10 +-
tests/test_needflow.py | 30 +-
tests/test_needimport_noindex.py | 6 +-
tests/test_needlist.py | 6 +-
tests/test_needpie.py | 6 +-
tests/test_needpie_with_zero_needs.py | 4 +-
tests/test_needreport.py | 11 +-
tests/test_needs_builder.py | 27 +-
tests/test_needs_external_needs_build.py | 190 ++++++++++---
tests/test_needs_filter_data.py | 16 +-
tests/test_needs_id_builder.py | 8 +-
tests/test_needs_warning.py | 39 ++-
tests/test_needsfile.py | 6 +-
tests/test_needtable.py | 30 +-
tests/test_needuml.py | 105 +++++--
tests/test_open_needs_service.py | 30 +-
tests/test_parallel_execution.py | 8 +-
tests/test_report_dead_links.py | 24 +-
tests/test_role_need.py | 9 +-
tests/test_role_need_max_title_length.py | 11 +-
tests/test_role_need_template.py | 4 +-
tests/test_services/test_service_basics.py | 11 +-
tests/test_styles/test_style_blank.py | 6 +-
.../test_style_css_js_registration.py | 24 +-
tests/test_styles/test_style_custom.py | 6 +-
tests/test_styles/test_style_modern.py | 6 +-
tests/test_styles/test_style_unknown.py | 6 +-
tests/test_test_doc.py | 6 +-
tests/test_title_from_content.py | 6 +-
tests/test_title_optional.py | 10 +-
tests/test_unicode.py | 10 +-
tests/test_unsafe_filter_for_filter_func.py | 9 +-
tests/test_variants.py | 11 +-
tests/util.py | 12 +-
218 files changed, 6067 insertions(+), 1358 deletions(-)
delete mode 100644 .flake8
diff --git a/.flake8 b/.flake8
deleted file mode 100644
index ba265fe89..000000000
--- a/.flake8
+++ /dev/null
@@ -1,3 +0,0 @@
-[flake8]
-max-line-length = 120
-extend-ignore = E501, E203, B028
\ No newline at end of file
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 4312eb7b9..a755133cc 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,30 +1,11 @@
repos:
- - repo: https://github.com/psf/black
- rev: 24.2.0
- hooks:
- - id: black
-
- - repo: https://github.com/PyCQA/flake8
- rev: 7.0.0
- hooks:
- - id: flake8
- additional_dependencies:
- - flake8-bugbear
- - flake8-comprehensions
- - flake8-simplify
- - pep8-naming
-
- - repo: https://github.com/pycqa/isort
- rev: 5.13.2
- hooks:
- - id: isort
- - repo: https://github.com/asottile/pyupgrade
- rev: v3.15.0
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.1.6
hooks:
- - id: pyupgrade
- args:
- - --py38-plus
+ - id: ruff
+ args: [--fix]
+ - id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.8.0
diff --git a/docs/conf.py b/docs/conf.py
index c9826cfd5..f9355ad77 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -55,7 +55,9 @@
needs_debug_measurement = True
add_module_names = False # Used to shorten function name output
-autodoc_docstring_signature = True # Used to read spec. func-defs from docstring (e.g. get rid of self)
+autodoc_docstring_signature = (
+ True # Used to read spec. func-defs from docstring (e.g. get rid of self)
+)
NOTE_TEMPLATE = """
.. _{{id}}:
@@ -80,9 +82,7 @@
{% endif %}
"""
-DEFAULT_DIAGRAM_TEMPLATE = (
- "{{type_name}}\\n**{{title|wordwrap(15, wrapstring='**\\\\n**')}}**\\n{{id}}"
-)
+DEFAULT_DIAGRAM_TEMPLATE = "{{type_name}}\\n**{{title|wordwrap(15, wrapstring='**\\\\n**')}}**\\n{{id}}"
# You can uncomment some of the following lines to override the default configuration for Sphinx-Needs.
# needs_diagram_template = DEFAULT_DIAGRAM_TEMPLATE
@@ -110,16 +110,71 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "title": "System", "content": "plantuml", "prefix": "S_", "color": "#BFD8D2", "style": "card"},
+ {
+ "directive": "sys",
+ "title": "System",
+ "content": "plantuml",
+ "prefix": "S_",
+ "color": "#BFD8D2",
+ "style": "card",
+ },
# Normal types
- {"directive": "req", "title": "Requirement", "prefix": "R_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "S_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "I_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "T_", "color": "#DCB239", "style": "node"},
- {"directive": "feature", "title": "Feature", "prefix": "F_", "color": "#FFCC00", "style": "node"},
- {"directive": "user", "title": "User", "prefix": "U_", "color": "#777777", "style": "node"},
- {"directive": "action", "title": "Action", "prefix": "A_", "color": "#FFCC00", "style": "node"},
- {"directive": "milestone", "title": "Milestone", "prefix": "M_", "color": "#FF3333", "style": "node"},
+ {
+ "directive": "req",
+ "title": "Requirement",
+ "prefix": "R_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "S_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "I_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "T_",
+ "color": "#DCB239",
+ "style": "node",
+ },
+ {
+ "directive": "feature",
+ "title": "Feature",
+ "prefix": "F_",
+ "color": "#FFCC00",
+ "style": "node",
+ },
+ {
+ "directive": "user",
+ "title": "User",
+ "prefix": "U_",
+ "color": "#777777",
+ "style": "node",
+ },
+ {
+ "directive": "action",
+ "title": "Action",
+ "prefix": "A_",
+ "color": "#FFCC00",
+ "style": "node",
+ },
+ {
+ "directive": "milestone",
+ "title": "Milestone",
+ "prefix": "M_",
+ "color": "#FF3333",
+ "style": "node",
+ },
]
needs_extra_links = [
@@ -208,7 +263,9 @@
needs_id_required = False
# needs_css = "dark.css"
-local_plantuml_path = os.path.join(os.path.dirname(__file__), "utils", "plantuml-1.2022.14.jar")
+local_plantuml_path = os.path.join(
+ os.path.dirname(__file__), "utils", "plantuml-1.2022.14.jar"
+)
plantuml = f"java -Djava.awt.headless=true -jar {local_plantuml_path}"
# plantuml_output_format = 'png'
@@ -246,7 +303,10 @@
"grid": "simple_side_right_partial",
"layout": {
"head": ['**< >** for *< >*'],
- "meta": ['**status**: < >', '**author**: < >'],
+ "meta": [
+ '**status**: < >',
+ '**author**: < >',
+ ],
"side": ['<>'],
},
},
@@ -450,14 +510,22 @@ def custom_defined_func():
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
- (master_doc, "needstestdocs.tex", "needs test docs Documentation", "team useblocks", "manual"),
+ (
+ master_doc,
+ "needstestdocs.tex",
+ "needs test docs Documentation",
+ "team useblocks",
+ "manual",
+ ),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
-man_pages = [(master_doc, "needstestdocs", "needs test docs Documentation", [author], 1)]
+man_pages = [
+ (master_doc, "needstestdocs", "needs test docs Documentation", [author], 1)
+]
# -- Options for Texinfo output -------------------------------------------
@@ -478,7 +546,11 @@ def custom_defined_func():
# contains different constraints
needs_constraints = {
- "critical": {"check_0": "'critical' in tags", "check_1": "'SECURITY_REQ' in links", "severity": "CRITICAL"},
+ "critical": {
+ "check_0": "'critical' in tags",
+ "check_1": "'SECURITY_REQ' in links",
+ "severity": "CRITICAL",
+ },
"security": {"check_0": "'security' in tags", "severity": "HIGH"},
"team": {"check_0": 'author == "Bob"', "severity": "LOW"},
}
diff --git a/noxfile.py b/noxfile.py
index 467cb545e..a67fef8da 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -72,7 +72,15 @@ def pre_commit(session):
def linkcheck(session):
session.install(".[docs]")
with session.chdir("docs"):
- session.run("sphinx-build", "-b", "linkcheck", ".", "_build/linkcheck", *session.posargs, external=True)
+ session.run(
+ "sphinx-build",
+ "-b",
+ "linkcheck",
+ ".",
+ "_build/linkcheck",
+ *session.posargs,
+ external=True,
+ )
@session(python="3.11")
diff --git a/performance/performance_test.py b/performance/performance_test.py
index 1733f6328..c7ca7e368 100644
--- a/performance/performance_test.py
+++ b/performance/performance_test.py
@@ -23,7 +23,15 @@ def cli():
def start(
- needs=1000, needtables=0, dummies=0, pages=1, parallel=1, keep=False, browser=False, debug=False, basic=False
+ needs=1000,
+ needtables=0,
+ dummies=0,
+ pages=1,
+ parallel=1,
+ keep=False,
+ browser=False,
+ debug=False,
+ basic=False,
):
"""
Test run implementation
@@ -163,11 +171,31 @@ def start(
@cli.command()
-@click.option("--profile", default=[], type=str, multiple=True, help="Activates profiling for given area")
-@click.option("--needs", default=[50, 10], type=int, multiple=True, help="Number of maximum needs.")
-@click.option("--needtables", default=-1, type=int, help="Number of maximum needtables.")
+@click.option(
+ "--profile",
+ default=[],
+ type=str,
+ multiple=True,
+ help="Activates profiling for given area",
+)
+@click.option(
+ "--needs",
+ default=[50, 10],
+ type=int,
+ multiple=True,
+ help="Number of maximum needs.",
+)
+@click.option(
+ "--needtables", default=-1, type=int, help="Number of maximum needtables."
+)
@click.option("--dummies", default=-1, type=int, help="Number of standard rst dummies.")
-@click.option("--pages", default=[5, 1], type=int, multiple=True, help="Number of additional pages with needs.")
+@click.option(
+ "--pages",
+ default=[5, 1],
+ type=int,
+ multiple=True,
+ help="Number of additional pages with needs.",
+)
@click.option(
"--parallel",
default=[1, 4],
@@ -177,9 +205,19 @@ def start(
)
@click.option("--keep", is_flag=True, help="Keeps the temporary src and build folders")
@click.option("--browser", is_flag=True, help="Opens the project in your browser")
-@click.option("--snakeviz", is_flag=True, help="Opens snakeviz view for measured profiles in browser")
-@click.option("--debug", is_flag=True, help="Prints more information, incl. sphinx build output")
-@click.option("--basic", is_flag=True, help="Use only default config of Sphinx-Needs (e.g. no extra options)")
+@click.option(
+ "--snakeviz",
+ is_flag=True,
+ help="Opens snakeviz view for measured profiles in browser",
+)
+@click.option(
+ "--debug", is_flag=True, help="Prints more information, incl. sphinx build output"
+)
+@click.option(
+ "--basic",
+ is_flag=True,
+ help="Use only default config of Sphinx-Needs (e.g. no extra options)",
+)
def series(
profile,
needs,
diff --git a/pyproject.toml b/pyproject.toml
index 054de6c18..23fe436da 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -101,11 +101,19 @@ markers = [
"jstest: marks tests as JavaScript test (deselect with '-m \"not jstest\"')",
]
-[tool.black]
-line-length = 120
-
-[tool.isort]
-profile = "black"
+[tool.ruff]
+extend-select = [
+ "B", # flake8-bugbear
+ "C4", # flake8-comprehensions
+ "I", # isort
+ "ICN", # flake8-import-conventions
+ "ISC", # flake8-implicit-str-concat
+ "N", # pep8-naming
+ "RUF", # Ruff-specific rules
+ "SIM", # flake8-simplify
+ "UP", # pyupgrade
+]
+extend-ignore = ["B904", "ISC001", "ICN001", "N818", "RUF005", "RUF013", "RUF012", "SIM108", "SIM118"]
[tool.mypy]
files = "sphinx_needs"
diff --git a/sphinx_needs/api/configuration.py b/sphinx_needs/api/configuration.py
index 8b06ab045..1197cd8c4 100644
--- a/sphinx_needs/api/configuration.py
+++ b/sphinx_needs/api/configuration.py
@@ -37,7 +37,12 @@ def get_need_types(app: Sphinx) -> list[str]:
def add_need_type(
- app: Sphinx, directive: str, title: str, prefix: str, color: str = "#ffffff", style: str = "node"
+ app: Sphinx,
+ directive: str,
+ title: str,
+ prefix: str,
+ color: str = "#ffffff",
+ style: str = "node",
) -> None:
"""
Adds a new need_type to the configuration.
@@ -68,7 +73,15 @@ def add_need_type(
if directive in type_names:
raise NeedsApiConfigException(f"{directive} already exists as need type")
- needs_types.append({"directive": directive, "title": title, "prefix": prefix, "color": color, "style": style})
+ needs_types.append(
+ {
+ "directive": directive,
+ "title": title,
+ "prefix": prefix,
+ "color": color,
+ "style": style,
+ }
+ )
app.add_directive(directive, sphinx_needs.directives.need.NeedDirective)
@@ -93,7 +106,9 @@ def add_extra_option(app: Sphinx, name: str) -> None:
NEEDS_CONFIG.extra_options[name] = directives.unchanged
-def add_dynamic_function(app: Sphinx, function: DynamicFunction, name: str | None = None) -> None:
+def add_dynamic_function(
+ app: Sphinx, function: DynamicFunction, name: str | None = None
+) -> None:
"""
Registers a new dynamic function for sphinx-needs.
@@ -124,7 +139,12 @@ def my_function(app, need, needs, *args, **kwargs):
WarningCheck = Callable[[NeedsInfoType, SphinxLoggerAdapter], bool]
-def add_warning(app: Sphinx, name: str, function: WarningCheck | None = None, filter_string: str | None = None) -> None:
+def add_warning(
+ app: Sphinx,
+ name: str,
+ function: WarningCheck | None = None,
+ filter_string: str | None = None,
+) -> None:
"""
Registers a warning.
@@ -137,11 +157,14 @@ def add_warning(app: Sphinx, name: str, function: WarningCheck | None = None, fi
:return: None
"""
if function is None and filter_string is None:
- raise NeedsApiConfigException("Function or filter_string must be given for add_warning_func")
+ raise NeedsApiConfigException(
+ "Function or filter_string must be given for add_warning_func"
+ )
if function is not None and filter_string is not None:
raise NeedsApiConfigException(
- "For add_warning_func only function or filter_string is allowed to be set, " "not both."
+ "For add_warning_func only function or filter_string is allowed to be set, "
+ "not both."
)
warning_check = function or filter_string
diff --git a/sphinx_needs/api/need.py b/sphinx_needs/api/need.py
index 1a1b87f8b..32dec8513 100644
--- a/sphinx_needs/api/need.py
+++ b/sphinx_needs/api/need.py
@@ -152,8 +152,8 @@ def run():
configured_need_types = [ntype["directive"] for ntype in types]
if need_type not in configured_need_types:
logger.warning(
- "Couldn't create need {}. Reason: The need-type (i.e. `{}`) is not set "
- "in the project's 'need_types' configuration in conf.py. [needs]".format(id, need_type),
+ f"Couldn't create need {id}. Reason: The need-type (i.e. `{need_type}`) is not set "
+ "in the project's 'need_types' configuration in conf.py. [needs]",
type="needs",
)
@@ -161,7 +161,9 @@ def run():
if ntype["directive"] == need_type:
type_name = ntype["title"]
type_prefix = ntype["prefix"]
- type_color = ntype["color"] or "#000000" # if no color set up user in config
+ type_color = (
+ ntype["color"] or "#000000"
+ ) # if no color set up user in config
type_style = ntype["style"] or "node" # if no style set up user in config
found = True
break
@@ -184,7 +186,7 @@ def run():
if id is None and needs_config.id_required:
raise NeedsNoIdException(
"An id is missing for this need and must be set, because 'needs_id_required' "
- "is set to True in conf.py. Need '{}' in {} ({})".format(title, docname, lineno)
+ f"is set to True in conf.py. Need '{title}' in {docname} ({lineno})"
)
if id is None:
@@ -193,13 +195,18 @@ def run():
need_id = id
if needs_config.id_regex and not re.match(needs_config.id_regex, need_id):
- raise NeedsInvalidException(f"Given ID '{need_id}' does not match configured regex '{needs_config.id_regex}'")
+ raise NeedsInvalidException(
+ f"Given ID '{need_id}' does not match configured regex '{needs_config.id_regex}'"
+ )
# Handle status
# Check if status is in needs_statuses. If not raise an error.
- if needs_config.statuses and status not in [stat["name"] for stat in needs_config.statuses]:
+ if needs_config.statuses and status not in [
+ stat["name"] for stat in needs_config.statuses
+ ]:
raise NeedsStatusNotAllowed(
- f"Status {status} of need id {need_id} is not allowed " "by config value 'needs_statuses'."
+ f"Status {status} of need id {need_id} is not allowed "
+ "by config value 'needs_statuses'."
)
if tags is None:
@@ -212,7 +219,8 @@ def run():
for i in range(len(tags)):
if len(tags[i]) == 0 or tags[i].isspace():
logger.warning(
- f"Scruffy tag definition found in need {need_id}. " "Defined tag contains spaces only. [needs]",
+ f"Scruffy tag definition found in need {need_id}. "
+ "Defined tag contains spaces only. [needs]",
type="needs",
)
else:
@@ -225,7 +233,8 @@ def run():
needs_tags = [tag["name"] for tag in needs_config.tags]
if tag not in needs_tags:
raise NeedsTagNotAllowed(
- f"Tag {tag} of need id {need_id} is not allowed " "by config value 'needs_tags'."
+ f"Tag {tag} of need id {need_id} is not allowed "
+ "by config value 'needs_tags'."
)
# This may have cut also dynamic function strings, as they can contain , as well.
# So let put them together again
@@ -237,7 +246,9 @@ def run():
if len(constraints) > 0:
# tags should be a string, but it can also be already a list,which can be used.
if isinstance(constraints, str):
- constraints = [constraint.strip() for constraint in re.split("[;,]", constraints)]
+ constraints = [
+ constraint.strip() for constraint in re.split("[;,]", constraints)
+ ]
new_constraints = [] # Shall contain only valid constraints
for i in range(len(constraints)):
@@ -356,9 +367,9 @@ def run():
for keyword in kwargs:
if keyword not in needs_extra_option_names and keyword not in link_names:
raise NeedsInvalidOption(
- "Unknown Option {}. "
+ f"Unknown Option {keyword}. "
"Use needs_extra_options or needs_extra_links in conf.py"
- "to define this option.".format(keyword)
+ "to define this option."
)
# Merge links
@@ -366,11 +377,15 @@ def run():
for link_type in needs_config.extra_links:
# Check, if specific link-type got some arguments during method call
- if link_type["option"] not in kwargs and link_type["option"] not in needs_global_options:
+ if (
+ link_type["option"] not in kwargs
+ and link_type["option"] not in needs_global_options
+ ):
# if not we set no links, but entry in needS_info must be there
links = []
elif link_type["option"] in needs_global_options and (
- link_type["option"] not in kwargs or len(str(kwargs[link_type["option"]])) == 0
+ link_type["option"] not in kwargs
+ or len(str(kwargs[link_type["option"]])) == 0
):
# If it is in global option, value got already set during prior handling of them
links_string = needs_info[link_type["option"]]
@@ -585,7 +600,9 @@ def _prepare_template(app: Sphinx, needs_info, template_key: str) -> str:
template_folder = os.path.join(app.srcdir, template_folder)
if not os.path.isdir(template_folder):
- raise NeedsTemplateException(f"Template folder does not exist: {template_folder}")
+ raise NeedsTemplateException(
+ f"Template folder does not exist: {template_folder}"
+ )
template_file_name = needs_info[template_key] + ".need"
template_path = os.path.join(template_folder, template_file_name)
@@ -600,7 +617,9 @@ def _prepare_template(app: Sphinx, needs_info, template_key: str) -> str:
return new_content
-def _render_template(content: str, docname: str, lineno: int, state: RSTState) -> nodes.Element:
+def _render_template(
+ content: str, docname: str, lineno: int, state: RSTState
+) -> nodes.Element:
rst = StringList()
for line in content.split("\n"):
rst.append(line, docname, lineno)
@@ -610,7 +629,9 @@ def _render_template(content: str, docname: str, lineno: int, state: RSTState) -
return node_need_content
-def _render_plantuml_template(content: str, docname: str, lineno: int, state: RSTState) -> nodes.Element:
+def _render_plantuml_template(
+ content: str, docname: str, lineno: int, state: RSTState
+) -> nodes.Element:
rst = StringList()
rst.append(".. needuml::", docname, lineno)
rst.append("", docname, lineno) # Empty option line for needuml
@@ -636,7 +657,8 @@ def _read_in_links(links_string: str | list[str]) -> list[str]:
for link in link_list:
if link.isspace():
logger.warning(
- f"Grubby link definition found in need {id}. " "Defined link contains spaces only. [needs]",
+ f"Grubby link definition found in need {id}. "
+ "Defined link contains spaces only. [needs]",
type="needs",
)
else:
@@ -648,7 +670,13 @@ def _read_in_links(links_string: str | list[str]) -> list[str]:
return _fix_list_dyn_func(links)
-def make_hashed_id(app: Sphinx, need_type: str, full_title: str, content: str, id_length: int | None = None) -> str:
+def make_hashed_id(
+ app: Sphinx,
+ need_type: str,
+ full_title: str,
+ content: str,
+ id_length: int | None = None,
+) -> str:
"""
Creates an ID based on title or need.
@@ -671,7 +699,9 @@ def make_hashed_id(app: Sphinx, need_type: str, full_title: str, content: str, i
type_prefix = ntype["prefix"]
break
if type_prefix is None:
- raise NeedsInvalidException(f"Given need_type {need_type} is unknown. File {app.env.docname}")
+ raise NeedsInvalidException(
+ f"Given need_type {need_type} is unknown. File {app.env.docname}"
+ )
hashable_content = full_title or "\n".join(content)
hashed_id = hashlib.sha1(hashable_content.encode("UTF-8")).hexdigest().upper()
@@ -764,11 +794,15 @@ def _merge_global_options(app: Sphinx, needs_info, global_options) -> None:
for single_value in values:
if len(single_value) < 2 or len(single_value) > 3:
- raise NeedsInvalidException(f"global option tuple has wrong amount of parameters: {key}")
+ raise NeedsInvalidException(
+ f"global option tuple has wrong amount of parameters: {key}"
+ )
if filter_single_need(needs_info, config, single_value[1]):
# Set value, if filter has matched
needs_info[key] = single_value[0]
- elif len(single_value) == 3 and (key not in needs_info.keys() or len(str(needs_info[key])) > 0):
+ elif len(single_value) == 3 and (
+ key not in needs_info.keys() or len(str(needs_info[key])) > 0
+ ):
# Otherwise set default, but only if no value was set before or value is "" and a default is defined
needs_info[key] = single_value[2]
else:
diff --git a/sphinx_needs/builder.py b/sphinx_needs/builder.py
index e1eaf7994..b4ebea207 100644
--- a/sphinx_needs/builder.py
+++ b/sphinx_needs/builder.py
@@ -50,7 +50,9 @@ def write(
if not SphinxNeedsData(self.env).has_export_filters:
return
LOGGER.warning(
- "At least one use of `export_id` directive option, requires a slower build", type="needs", subtype="build"
+ "At least one use of `export_id` directive option, requires a slower build",
+ type="needs",
+ subtype="build",
)
return super().write(build_docnames, updated_docnames, method)
@@ -70,7 +72,9 @@ def finish(self) -> None:
# check if needs.json file exists in conf.py directory
needs_json = os.path.join(self.srcdir, "needs.json")
if os.path.exists(needs_json):
- LOGGER.info("needs.json found, but will not be used because needs_file not configured.")
+ LOGGER.info(
+ "needs.json found, but will not be used because needs_file not configured."
+ )
# Clean needs_list from already stored needs of the current version.
# This is needed as needs could have been removed from documentation and if this is the case,
@@ -170,7 +174,9 @@ def finish(self) -> None:
post_process_needs_data(self.app)
data = SphinxNeedsData(self.env)
- needs = data.get_or_create_needs().values() # We need a list of needs for later filter checks
+ needs = (
+ data.get_or_create_needs().values()
+ ) # We need a list of needs for later filter checks
version = getattr(self.env.config, "version", "unset")
needs_config = NeedsSphinxConfig(self.env.config)
filter_string = needs_config.builder_filter
diff --git a/sphinx_needs/config.py b/sphinx_needs/config.py
index a988074fe..785436f7e 100644
--- a/sphinx_needs/config.py
+++ b/sphinx_needs/config.py
@@ -26,7 +26,9 @@ class Config:
def __init__(self) -> None:
self._extra_options: dict[str, Callable[[str], Any]] = {}
- self._warnings: dict[str, str | Callable[[NeedsInfoType, SphinxLoggerAdapter], bool]] = {}
+ self._warnings: dict[
+ str, str | Callable[[NeedsInfoType, SphinxLoggerAdapter], bool]
+ ] = {}
def clear(self) -> None:
self._extra_options = {}
@@ -44,7 +46,9 @@ def extra_options(self) -> dict[str, Callable[[str], Any]]:
return self._extra_options
@property
- def warnings(self) -> dict[str, str | Callable[[NeedsInfoType, SphinxLoggerAdapter], bool]]:
+ def warnings(
+ self
+ ) -> dict[str, str | Callable[[NeedsInfoType, SphinxLoggerAdapter], bool]]:
"""Warning handlers that are added by the user,
then called at the end of the build.
"""
@@ -130,58 +134,117 @@ def __setattr__(self, name: str, value: Any) -> None:
metadata={"rebuild": "html", "types": ()},
)
"""Custom user need types"""
- include_needs: bool = field(default=True, metadata={"rebuild": "html", "types": (bool,)})
- need_name: str = field(default="Need", metadata={"rebuild": "html", "types": (str,)})
- spec_name: str = field(default="Specification", metadata={"rebuild": "html", "types": (str,)})
- id_prefix_needs: str = field(default="", metadata={"rebuild": "html", "types": (str,)})
- id_prefix_specs: str = field(default="", metadata={"rebuild": "html", "types": (str,)})
+ include_needs: bool = field(
+ default=True, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ need_name: str = field(
+ default="Need", metadata={"rebuild": "html", "types": (str,)}
+ )
+ spec_name: str = field(
+ default="Specification", metadata={"rebuild": "html", "types": (str,)}
+ )
+ id_prefix_needs: str = field(
+ default="", metadata={"rebuild": "html", "types": (str,)}
+ )
+ id_prefix_specs: str = field(
+ default="", metadata={"rebuild": "html", "types": (str,)}
+ )
id_length: int = field(default=5, metadata={"rebuild": "html", "types": (int,)})
- id_from_title: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
- specs_show_needlist: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
- id_required: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
- id_regex: str = field(default="^[A-Z0-9_]{5,}", metadata={"rebuild": "html", "types": ()})
- show_link_type: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
- show_link_title: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
- show_link_id: bool = field(default=True, metadata={"rebuild": "html", "types": (bool,)})
+ id_from_title: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ specs_show_needlist: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ id_required: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ id_regex: str = field(
+ default="^[A-Z0-9_]{5,}", metadata={"rebuild": "html", "types": ()}
+ )
+ show_link_type: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ show_link_title: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ show_link_id: bool = field(
+ default=True, metadata={"rebuild": "html", "types": (bool,)}
+ )
file: None | str = field(default=None, metadata={"rebuild": "html", "types": ()})
table_columns: str = field(
- default="ID;TITLE;STATUS;TYPE;OUTGOING;TAGS", metadata={"rebuild": "html", "types": (str,)}
- )
- table_style: str = field(default="DATATABLES", metadata={"rebuild": "html", "types": (str,)})
- role_need_template: str = field(default="{title} ({id})", metadata={"rebuild": "html", "types": (str,)})
- role_need_max_title_length: int = field(default=30, metadata={"rebuild": "html", "types": (int,)})
- extra_options: list[str] = field(default_factory=list, metadata={"rebuild": "html", "types": (list,)})
- title_optional: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
- max_title_length: int = field(default=-1, metadata={"rebuild": "html", "types": (int,)})
- title_from_content: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
+ default="ID;TITLE;STATUS;TYPE;OUTGOING;TAGS",
+ metadata={"rebuild": "html", "types": (str,)},
+ )
+ table_style: str = field(
+ default="DATATABLES", metadata={"rebuild": "html", "types": (str,)}
+ )
+ role_need_template: str = field(
+ default="{title} ({id})", metadata={"rebuild": "html", "types": (str,)}
+ )
+ role_need_max_title_length: int = field(
+ default=30, metadata={"rebuild": "html", "types": (int,)}
+ )
+ extra_options: list[str] = field(
+ default_factory=list, metadata={"rebuild": "html", "types": (list,)}
+ )
+ title_optional: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ max_title_length: int = field(
+ default=-1, metadata={"rebuild": "html", "types": (int,)}
+ )
+ title_from_content: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
diagram_template: str = field(
default=DEFAULT_DIAGRAM_TEMPLATE,
metadata={"rebuild": "html", "types": (str,)},
)
- functions: list[Callable[..., Any]] = field(default_factory=list, metadata={"rebuild": "html", "types": (list,)})
- global_options: dict[str, Any] = field(default_factory=dict, metadata={"rebuild": "html", "types": (dict,)})
- duration_option: str = field(default="duration", metadata={"rebuild": "html", "types": (str,)})
- completion_option: str = field(default="completion", metadata={"rebuild": "html", "types": (str,)})
- needextend_strict: bool = field(default=True, metadata={"rebuild": "html", "types": (bool,)})
- statuses: list[dict[str, str]] = field(default_factory=list, metadata={"rebuild": "html", "types": ()})
+ functions: list[Callable[..., Any]] = field(
+ default_factory=list, metadata={"rebuild": "html", "types": (list,)}
+ )
+ global_options: dict[str, Any] = field(
+ default_factory=dict, metadata={"rebuild": "html", "types": (dict,)}
+ )
+ duration_option: str = field(
+ default="duration", metadata={"rebuild": "html", "types": (str,)}
+ )
+ completion_option: str = field(
+ default="completion", metadata={"rebuild": "html", "types": (str,)}
+ )
+ needextend_strict: bool = field(
+ default=True, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ statuses: list[dict[str, str]] = field(
+ default_factory=list, metadata={"rebuild": "html", "types": ()}
+ )
"""If given, only the defined status are allowed.
Values needed for each status:
* name
* description
Example: [{"name": "open", "description": "open status"}, {...}, {...}]
"""
- tags: list[dict[str, str]] = field(default_factory=list, metadata={"rebuild": "html", "types": (list,)})
+ tags: list[dict[str, str]] = field(
+ default_factory=list, metadata={"rebuild": "html", "types": (list,)}
+ )
"""If given, only the defined tags are allowed.
Values needed for each tag:
* name
* description
Example: [{"name": "new", "description": "new needs"}, {...}, {...}]
"""
- css: str = field(default="modern.css", metadata={"rebuild": "html", "types": (str,)})
+ css: str = field(
+ default="modern.css", metadata={"rebuild": "html", "types": (str,)}
+ )
"""Path of css file, which shall be used for need style"""
- part_prefix: str = field(default="→\xa0", metadata={"rebuild": "html", "types": (str,)})
+ part_prefix: str = field(
+ default="→\xa0", metadata={"rebuild": "html", "types": (str,)}
+ )
"""Prefix for need_part output in tables"""
- extra_links: list[dict[str, Any]] = field(default_factory=list, metadata={"rebuild": "html", "types": ()})
+ extra_links: list[dict[str, Any]] = field(
+ default_factory=list, metadata={"rebuild": "html", "types": ()}
+ )
"""List of additional links, which can be used by setting related option
Values needed for each new link:
* option (will also be the option name)
@@ -190,48 +253,97 @@ def __setattr__(self, name: str, value: Any) -> None:
* color (used for needflow. Default: #000000)
Example: [{"name": "blocks, "incoming": "is blocked by", "copy_link": True, "color": "#ffcc00"}]
"""
- report_dead_links: bool = field(default=True, metadata={"rebuild": "html", "types": (bool,)})
+ report_dead_links: bool = field(
+ default=True, metadata={"rebuild": "html", "types": (bool,)}
+ )
"""DEPRECATED: Use ``suppress_warnings = ["needs.link_outgoing"]`` instead."""
- filter_data: dict[str, Any] = field(default_factory=dict, metadata={"rebuild": "html", "types": ()})
- allow_unsafe_filters: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
- flow_show_links: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
- flow_link_types: list[str] = field(default_factory=lambda: ["links"], metadata={"rebuild": "html", "types": ()})
+ filter_data: dict[str, Any] = field(
+ default_factory=dict, metadata={"rebuild": "html", "types": ()}
+ )
+ allow_unsafe_filters: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ flow_show_links: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ flow_link_types: list[str] = field(
+ default_factory=lambda: ["links"], metadata={"rebuild": "html", "types": ()}
+ )
"""Defines the link_types to show in a needflow diagram."""
- warnings: dict[str, Any] = field(default_factory=dict, metadata={"rebuild": "html", "types": ()})
- warnings_always_warn: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
- layouts: dict[str, dict[str, Any]] = field(default_factory=dict, metadata={"rebuild": "html", "types": ()})
- default_layout: str = field(default="clean", metadata={"rebuild": "html", "types": (str,)})
- default_style: None | str = field(default=None, metadata={"rebuild": "html", "types": ()})
- flow_configs: dict[str, str] = field(default_factory=dict, metadata={"rebuild": "html", "types": ()})
- template_folder: str = field(default="needs_templates/", metadata={"rebuild": "html", "types": (str,)})
- services: dict[str, dict[str, Any]] = field(default_factory=dict, metadata={"rebuild": "html", "types": ()})
- service_all_data: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
- debug_no_external_calls: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
- external_needs: list[dict[str, Any]] = field(default_factory=list, metadata={"rebuild": "html", "types": ()})
+ warnings: dict[str, Any] = field(
+ default_factory=dict, metadata={"rebuild": "html", "types": ()}
+ )
+ warnings_always_warn: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ layouts: dict[str, dict[str, Any]] = field(
+ default_factory=dict, metadata={"rebuild": "html", "types": ()}
+ )
+ default_layout: str = field(
+ default="clean", metadata={"rebuild": "html", "types": (str,)}
+ )
+ default_style: None | str = field(
+ default=None, metadata={"rebuild": "html", "types": ()}
+ )
+ flow_configs: dict[str, str] = field(
+ default_factory=dict, metadata={"rebuild": "html", "types": ()}
+ )
+ template_folder: str = field(
+ default="needs_templates/", metadata={"rebuild": "html", "types": (str,)}
+ )
+ services: dict[str, dict[str, Any]] = field(
+ default_factory=dict, metadata={"rebuild": "html", "types": ()}
+ )
+ service_all_data: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ debug_no_external_calls: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ external_needs: list[dict[str, Any]] = field(
+ default_factory=list, metadata={"rebuild": "html", "types": ()}
+ )
"""Reference external needs, outside of the documentation."""
- builder_filter: str = field(default="is_external==False", metadata={"rebuild": "html", "types": (str,)})
+ builder_filter: str = field(
+ default="is_external==False", metadata={"rebuild": "html", "types": (str,)}
+ )
table_classes: list[str] = field(
- default_factory=lambda: NEEDS_TABLES_CLASSES, metadata={"rebuild": "html", "types": (list,)}
+ default_factory=lambda: NEEDS_TABLES_CLASSES,
+ metadata={"rebuild": "html", "types": (list,)},
)
"""Additional classes to set for needs and needtable."""
string_links: dict[str, dict[str, Any]] = field(
default_factory=dict, metadata={"rebuild": "html", "types": (dict,)}
)
- build_json: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
+ build_json: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
"""If True, the JSON needs file should be built."""
- reproducible_json: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
+ reproducible_json: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
"""If True, the JSON needs file should be idempotent for multiple builds fo the same documentation."""
- build_needumls: str = field(default="", metadata={"rebuild": "html", "types": (str,)})
- permalink_file: str = field(default="permalink.html", metadata={"rebuild": "html", "types": (str,)})
+ build_needumls: str = field(
+ default="", metadata={"rebuild": "html", "types": (str,)}
+ )
+ permalink_file: str = field(
+ default="permalink.html", metadata={"rebuild": "html", "types": (str,)}
+ )
"""Permalink related config values.
path to permalink.html; absolute path from web-root
"""
- permalink_data: str = field(default="needs.json", metadata={"rebuild": "html", "types": (str,)})
+ permalink_data: str = field(
+ default="needs.json", metadata={"rebuild": "html", "types": (str,)}
+ )
"""path to needs.json relative to permalink.html"""
- report_template: str = field(default="", metadata={"rebuild": "html", "types": (str,)})
+ report_template: str = field(
+ default="", metadata={"rebuild": "html", "types": (str,)}
+ )
"""path to needs_report_template file which is based on the conf.py directory."""
- constraints: dict[str, dict[str, str]] = field(default_factory=dict, metadata={"rebuild": "html", "types": (dict,)})
+ constraints: dict[str, dict[str, str]] = field(
+ default_factory=dict, metadata={"rebuild": "html", "types": (dict,)}
+ )
"""Mapping of constraint name, to check name, to filter string.
There are also some special keys for a constraint:
@@ -242,21 +354,35 @@ def __setattr__(self, name: str, value: Any) -> None:
default_factory=dict, metadata={"rebuild": "html", "types": (dict,)}
)
"""Mapping of constraint severity to what to do if a constraint is not fulfilled."""
- constraints_failed_color: str = field(default="", metadata={"rebuild": "html", "types": (str,)})
+ constraints_failed_color: str = field(
+ default="", metadata={"rebuild": "html", "types": (str,)}
+ )
"""DEPRECATED: Use constraint_failed_options instead."""
# add variants option
- variants: dict[str, str] = field(default_factory=dict, metadata={"rebuild": "html", "types": (dict,)})
- variant_options: list[str] = field(default_factory=list, metadata={"rebuild": "html", "types": (list,)})
+ variants: dict[str, str] = field(
+ default_factory=dict, metadata={"rebuild": "html", "types": (dict,)}
+ )
+ variant_options: list[str] = field(
+ default_factory=list, metadata={"rebuild": "html", "types": (list,)}
+ )
# add render context option
- render_context: dict[str, Any] = field(default_factory=dict, metadata={"rebuild": "html", "types": (dict,)})
+ render_context: dict[str, Any] = field(
+ default_factory=dict, metadata={"rebuild": "html", "types": (dict,)}
+ )
"""Jinja context for rendering templates"""
- debug_measurement: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
+ debug_measurement: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
# add config for needs_id_builder
- build_json_per_id: bool = field(default=False, metadata={"rebuild": "html", "types": (bool,)})
- build_json_per_id_path: str = field(default="needs_id", metadata={"rebuild": "html", "types": (str,)})
+ build_json_per_id: bool = field(
+ default=False, metadata={"rebuild": "html", "types": (bool,)}
+ )
+ build_json_per_id_path: str = field(
+ default="needs_id", metadata={"rebuild": "html", "types": (str,)}
+ )
@classmethod
def add_config_values(cls, app: Sphinx) -> None:
@@ -267,7 +393,9 @@ def add_config_values(cls, app: Sphinx) -> None:
elif item.default is not MISSING:
default = item.default
else:
- raise Exception(f"Config item {item.name} has no default value or factory.")
+ raise Exception(
+ f"Config item {item.name} has no default value or factory."
+ )
app.add_config_value(
f"needs_{item.name}",
default,
diff --git a/sphinx_needs/data.py b/sphinx_needs/data.py
index abea4ce70..a36e362e3 100644
--- a/sphinx_needs/data.py
+++ b/sphinx_needs/data.py
@@ -592,7 +592,9 @@ def get_or_create_umls(self) -> dict[str, NeedsUmlType]:
return self.env.needs_all_needumls
-def merge_data(_app: Sphinx, env: BuildEnvironment, _docnames: list[str], other: BuildEnvironment) -> None:
+def merge_data(
+ _app: Sphinx, env: BuildEnvironment, _docnames: list[str], other: BuildEnvironment
+) -> None:
"""
Performs data merge of parallel executed workers.
Used only for parallel builds.
@@ -621,14 +623,17 @@ def _merge(name: str, is_complex_dict: bool = False) -> None:
for other_key, other_value in other_objects.items():
# other_value is a list from here on!
if other_key in objects:
- objects[other_key] = list(set(objects[other_key]) | set(other_value))
+ objects[other_key] = list(
+ set(objects[other_key]) | set(other_value)
+ )
else:
objects[other_key] = other_value
elif isinstance(other_objects, list) and isinstance(objects, list):
objects = list(set(objects) | set(other_objects))
else:
raise TypeError(
- f'Objects to "merge" must be dict or list, ' f"not {type(other_objects)} and {type(objects)}"
+ f'Objects to "merge" must be dict or list, '
+ f"not {type(other_objects)} and {type(objects)}"
)
_merge("needs_all_docs", is_complex_dict=True)
diff --git a/sphinx_needs/debug.py b/sphinx_needs/debug.py
index 1fca7239b..ee426a076 100644
--- a/sphinx_needs/debug.py
+++ b/sphinx_needs/debug.py
@@ -18,14 +18,18 @@
from sphinx.application import Sphinx
TIME_MEASUREMENTS: dict[str, Any] = {} # Stores the timing results
-EXECUTE_TIME_MEASUREMENTS = False # Will be used to de/activate measurements. Set during a Sphinx Event
+EXECUTE_TIME_MEASUREMENTS = (
+ False # Will be used to de/activate measurements. Set during a Sphinx Event
+)
START_TIME = 0.0
T = TypeVar("T", bound=Callable[..., Any])
-def measure_time(category: str | None = None, source: str = "internal", name: str | None = None) -> Callable[[T], T]:
+def measure_time(
+ category: str | None = None, source: str = "internal", name: str | None = None
+) -> Callable[[T], T]:
"""
Decorator for measuring the needed execution time of a specific function.
@@ -108,9 +112,13 @@ def wrapper(*args: list[object], **kwargs: dict[object, object]) -> Any:
runtime_dict["max"] = runtime
runtime_dict["max_params"] = { # Store parameters as a shorten string
"args": str([str(arg)[:80] for arg in args]),
- "kwargs": str({key: str(value)[:80] for key, value in kwargs.items()}),
+ "kwargs": str(
+ {key: str(value)[:80] for key, value in kwargs.items()}
+ ),
}
- runtime_dict["min_max_spread"] = runtime_dict["max"] / runtime_dict["min"] * 100
+ runtime_dict["min_max_spread"] = (
+ runtime_dict["max"] / runtime_dict["min"] * 100
+ )
runtime_dict["avg"] = runtime_dict["overall"] / runtime_dict["amount"]
return result
@@ -119,7 +127,12 @@ def wrapper(*args: list[object], **kwargs: dict[object, object]) -> Any:
return inner
-def measure_time_func(func: T, category: str | None = None, source: str = "internal", name: str | None = None) -> T:
+def measure_time_func(
+ func: T,
+ category: str | None = None,
+ source: str = "internal",
+ name: str | None = None,
+) -> T:
"""Wrapper for measuring the needed execution time of a specific function.
Usage as function::
@@ -154,7 +167,9 @@ def store_timing_results_json(outdir: str, build_data: dict[str, Any]) -> None:
def store_timing_results_html(outdir: str, build_data: dict[str, Any]) -> None:
- jinja_env = Environment(loader=PackageLoader("sphinx_needs"), autoescape=select_autoescape())
+ jinja_env = Environment(
+ loader=PackageLoader("sphinx_needs"), autoescape=select_autoescape()
+ )
template = jinja_env.get_template("time_measurements.html")
out_file = Path(outdir) / "debug_measurement.html"
with open(out_file, "w", encoding="utf-8") as f:
diff --git a/sphinx_needs/diagrams_common.py b/sphinx_needs/diagrams_common.py
index 15de4321c..0eced3edb 100644
--- a/sphinx_needs/diagrams_common.py
+++ b/sphinx_needs/diagrams_common.py
@@ -114,7 +114,9 @@ def add_config(config: str) -> str:
uml = ""
if config and len(config) >= 3:
# Remove all empty lines
- config = "\n".join([line.strip() for line in config.split("\n") if line.strip()])
+ config = "\n".join(
+ [line.strip() for line in config.split("\n") if line.strip()]
+ )
uml += "\n' Config\n\n"
uml += config
uml += "\n\n"
@@ -125,13 +127,27 @@ def get_filter_para(node_element: NeedsFilteredBaseType) -> nodes.paragraph:
"""Return paragraph containing the used filter description"""
para = nodes.paragraph()
filter_text = "Used filter:"
- filter_text += " status(%s)" % " OR ".join(node_element["status"]) if len(node_element["status"]) > 0 else ""
+ filter_text += (
+ " status(%s)" % " OR ".join(node_element["status"])
+ if len(node_element["status"]) > 0
+ else ""
+ )
if len(node_element["status"]) > 0 and len(node_element["tags"]) > 0:
filter_text += " AND "
- filter_text += " tags(%s)" % " OR ".join(node_element["tags"]) if len(node_element["tags"]) > 0 else ""
- if (len(node_element["status"]) > 0 or len(node_element["tags"]) > 0) and len(node_element["types"]) > 0:
+ filter_text += (
+ " tags(%s)" % " OR ".join(node_element["tags"])
+ if len(node_element["tags"]) > 0
+ else ""
+ )
+ if (len(node_element["status"]) > 0 or len(node_element["tags"]) > 0) and len(
+ node_element["types"]
+ ) > 0:
filter_text += " AND "
- filter_text += " types(%s)" % " OR ".join(node_element["types"]) if len(node_element["types"]) > 0 else ""
+ filter_text += (
+ " types(%s)" % " OR ".join(node_element["types"])
+ if len(node_element["types"]) > 0
+ else ""
+ )
filter_node = nodes.emphasis(filter_text, filter_text)
para += filter_node
@@ -152,7 +168,9 @@ def get_debug_container(puml_node: nodes.Element) -> nodes.container:
return debug_container
-def calculate_link(app: Sphinx, need_info: NeedsPartsInfoType, _fromdocname: str) -> str:
+def calculate_link(
+ app: Sphinx, need_info: NeedsPartsInfoType, _fromdocname: str
+) -> str:
"""
Link calculation
All links we can get from docutils functions will be relative.
@@ -168,7 +186,9 @@ def calculate_link(app: Sphinx, need_info: NeedsPartsInfoType, _fromdocname: str
builder = app.builder
try:
if need_info["is_external"]:
- assert need_info["external_url"] is not None, "external_url must be set for external needs"
+ assert (
+ need_info["external_url"] is not None
+ ), "external_url must be set for external needs"
link = need_info["external_url"]
# check if need_info["external_url"] is relative path
parsed_url = urlparse(need_info["external_url"])
@@ -176,7 +196,12 @@ def calculate_link(app: Sphinx, need_info: NeedsPartsInfoType, _fromdocname: str
# only need to add ../ or ..\ to get out of the image folder
link = ".." + os.path.sep + need_info["external_url"]
else:
- link = "../" + builder.get_target_uri(need_info["docname"]) + "#" + need_info["target_id"]
+ link = (
+ "../"
+ + builder.get_target_uri(need_info["docname"])
+ + "#"
+ + need_info["target_id"]
+ )
if need_info["is_part"]:
link = f"{link}.{need_info['id']}"
@@ -188,7 +213,9 @@ def calculate_link(app: Sphinx, need_info: NeedsPartsInfoType, _fromdocname: str
def create_legend(need_types: list[dict[str, Any]]) -> str:
def create_row(need_type: dict[str, Any]) -> str:
- return "\n| {color} | {name} |".format(color=need_type["color"], name=need_type["title"])
+ return "\n| {color} | {name} |".format(
+ color=need_type["color"], name=need_type["title"]
+ )
rows = map(create_row, need_types)
table = "|= Color |= Type |" + "".join(rows)
diff --git a/sphinx_needs/directives/list2need.py b/sphinx_needs/directives/list2need.py
index bbdd27643..928a73d1a 100644
--- a/sphinx_needs/directives/list2need.py
+++ b/sphinx_needs/directives/list2need.py
@@ -23,8 +23,12 @@
"""
-LINE_REGEX = re.compile(r"(?P[^\S\n]*)\*\s*(?P.*)|[\S\*]*(?P.*)")
-ID_REGEX = re.compile(r"(\((?P[^\"'=\n]+)?\))") # Exclude some chars, which are used by option list
+LINE_REGEX = re.compile(
+ r"(?P[^\S\n]*)\*\s*(?P.*)|[\S\*]*(?P.*)"
+)
+ID_REGEX = re.compile(
+ r"(\((?P[^\"'=\n]+)?\))"
+) # Exclude some chars, which are used by option list
OPTION_AREA_REGEX = re.compile(r"\(\((.*)\)\)")
OPTIONS_REGEX = re.compile(r"([^=,\s]*)=[\"']([^\"]*)[\"']")
@@ -83,7 +87,9 @@ def run(self) -> Sequence[nodes.Node]:
for x in range(0, len(types_raw_list)):
types[x] = types_raw_list[x]
if types[x] not in conf_types:
- raise SphinxError(f"Unknown type configured: {types[x]}. Allowed are {', '.join(conf_types)}")
+ raise SphinxError(
+ f"Unknown type configured: {types[x]}. Allowed are {', '.join(conf_types)}"
+ )
down_links_raw = self.options.get("links-down")
if down_links_raw is None or down_links_raw == "":
@@ -99,7 +105,10 @@ def run(self) -> Sequence[nodes.Node]:
for i, down_link_raw in enumerate(down_links_raw_list):
down_links_types[i] = down_link_raw
if down_link_raw not in link_types:
- raise SphinxError(f"Unknown link configured: {down_link_raw}. " f"Allowed are {', '.join(link_types)}")
+ raise SphinxError(
+ f"Unknown link configured: {down_link_raw}. "
+ f"Allowed are {', '.join(link_types)}"
+ )
list_needs = []
# Storing the data in a sorted list
for content_line in content_raw.split("\n"):
@@ -112,23 +121,30 @@ def run(self) -> Sequence[nodes.Node]:
if text:
indent = len(indent)
if not indent % 2 == 0:
- raise IndentationError("Indentation for list must be always a multiply of 2.")
+ raise IndentationError(
+ "Indentation for list must be always a multiply of 2."
+ )
level = int(indent / 2)
if level not in types:
raise SphinxWarning(
- f"No need type defined for indentation level {level}." f" Defined types {types}"
+ f"No need type defined for indentation level {level}."
+ f" Defined types {types}"
)
if down_links_types and level > len(down_links_types):
- raise SphinxWarning(f"Not enough links-down defined for indentation level {level}.")
+ raise SphinxWarning(
+ f"Not enough links-down defined for indentation level {level}."
+ )
splitted_text = text.split(delimiter)
title = splitted_text[0]
content = ""
with suppress(IndexError):
- content = delimiter.join(splitted_text[1:]) # Put the content together again
+ content = delimiter.join(
+ splitted_text[1:]
+ ) # Put the content together again
need_id_result = ID_REGEX.search(title)
if need_id_result:
@@ -158,7 +174,9 @@ def run(self) -> Sequence[nodes.Node]:
more_text = more_text.lstrip()
if more_text.startswith(":"):
more_text = f" {more_text}"
- list_needs[-1]["content"] = f"{list_needs[-1]['content']}\n {more_text}"
+ list_needs[-1][
+ "content"
+ ] = f"{list_needs[-1]['content']}\n {more_text}"
# Finally creating the rst code
overall_text = []
@@ -179,7 +197,11 @@ def run(self) -> Sequence[nodes.Node]:
data = list_need
need_links_down = self.get_down_needs(list_needs, index)
- if down_links_types and list_need["level"] in down_links_types and need_links_down:
+ if (
+ down_links_types
+ and list_need["level"] in down_links_types
+ and need_links_down
+ ):
data["links_down"] = need_links_down
data["links_down_type"] = down_links_types[list_need["level"]]
data["set_links_down"] = True
@@ -193,14 +215,19 @@ def run(self) -> Sequence[nodes.Node]:
text_list = indented_text_list
overall_text += text_list
- self.state_machine.insert_input(overall_text, self.state_machine.document.attributes["source"])
+ self.state_machine.insert_input(
+ overall_text, self.state_machine.document.attributes["source"]
+ )
return []
def make_hashed_id(self, type_prefix: str, title: str, id_length: int) -> str:
hashable_content = title
return "{}{}".format(
- type_prefix, hashlib.sha1(hashable_content.encode("UTF-8")).hexdigest().upper()[:id_length]
+ type_prefix,
+ hashlib.sha1(hashable_content.encode("UTF-8"))
+ .hexdigest()
+ .upper()[:id_length],
)
def get_down_needs(self, list_needs: list[Any], index: int) -> list[str]:
diff --git a/sphinx_needs/directives/need.py b/sphinx_needs/directives/need.py
index f4c527791..7863b161f 100644
--- a/sphinx_needs/directives/need.py
+++ b/sphinx_needs/directives/need.py
@@ -64,7 +64,17 @@ def __init__(
state: RSTState,
state_machine: RSTStateMachine,
):
- super().__init__(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine)
+ super().__init__(
+ name,
+ arguments,
+ options,
+ content,
+ lineno,
+ content_offset,
+ block_text,
+ state,
+ state_machine,
+ )
self.needs_config = NeedsSphinxConfig(self.env.config)
self.log = get_logger(__name__)
self.full_title = self._get_full_title()
@@ -109,7 +119,9 @@ def run(self) -> Sequence[nodes.Node]:
content = "\n".join(self.content)
status = self.options.get("status")
if status:
- status = status.replace("__", "") # Support for multiline options, which must use __ for empty lines
+ status = status.replace(
+ "__", ""
+ ) # Support for multiline options, which must use __ for empty lines
tags = self.options.get("tags", "")
style = self.options.get("style")
layout = self.options.get("layout", "")
@@ -121,7 +133,9 @@ def run(self) -> Sequence[nodes.Node]:
need_extra_options = {"duration": duration, "completion": completion}
for extra_link in self.needs_config.extra_links:
- need_extra_options[extra_link["option"]] = self.options.get(extra_link["option"], "")
+ need_extra_options[extra_link["option"]] = self.options.get(
+ extra_link["option"], ""
+ )
for extra_option in NEEDS_CONFIG.extra_options:
need_extra_options[extra_option] = self.options.get(extra_option, "")
@@ -175,12 +189,17 @@ def read_in_links(self, name: str) -> list[str]:
def make_hashed_id(self, type_prefix: str, id_length: int) -> str:
hashable_content = self.full_title or "\n".join(self.content)
return "{}{}".format(
- type_prefix, hashlib.sha1(hashable_content.encode("UTF-8")).hexdigest().upper()[:id_length]
+ type_prefix,
+ hashlib.sha1(hashable_content.encode("UTF-8"))
+ .hexdigest()
+ .upper()[:id_length],
)
@property
def title_from_content(self) -> bool:
- return "title_from_content" in self.options or self.needs_config.title_from_content
+ return (
+ "title_from_content" in self.options or self.needs_config.title_from_content
+ )
@property
def docname(self) -> str:
@@ -211,8 +230,8 @@ def _get_full_title(self) -> str:
if len(self.arguments) > 0: # a title was passed
if "title_from_content" in self.options:
self.log.warning(
- 'need "{}" has :title_from_content: set, '
- "but a title was provided. (see file {}) [needs]".format(self.arguments[0], self.docname),
+ f'need "{self.arguments[0]}" has :title_from_content: set, '
+ f"but a title was provided. (see file {self.docname}) [needs]",
type="needs",
location=(self.env.docname, self.lineno),
)
@@ -223,7 +242,7 @@ def _get_full_title(self) -> str:
raise NeedsInvalidException(
":title_from_content: set, but "
"no content provided. "
- "(Line {} of file {}".format(self.lineno, self.docname)
+ f"(Line {self.lineno} of file {self.docname}"
)
return first_sentence
else:
@@ -260,7 +279,9 @@ def get_sections_and_signature_and_needs(
if isinstance(sibling, desc_signature):
# Check the child of the found signature for the text content/node.
for desc_child in sibling.children:
- if isinstance(desc_child, desc_name) and isinstance(desc_child.children[0], nodes.Text):
+ if isinstance(desc_child, desc_name) and isinstance(
+ desc_child.children[0], nodes.Text
+ ):
signature = desc_child.children[0]
if signature:
break
@@ -324,7 +345,9 @@ def analyse_need_locations(app: Sphinx, doctree: nodes.document) -> None:
# Fetch values from need
# Start from the target node, which is a sibling of the current need node
- sections, signature, parent_needs = get_sections_and_signature_and_needs(previous_sibling(need_node))
+ sections, signature, parent_needs = get_sections_and_signature_and_needs(
+ previous_sibling(need_node)
+ )
# append / set values from need
if sections:
@@ -410,7 +433,12 @@ def process_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str) -
@profile("NEED_FORMAT")
-def format_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str, found_needs_nodes: list[Need]) -> None:
+def format_need_nodes(
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_needs_nodes: list[Need],
+) -> None:
"""Replace need nodes in the document with node trees suitable for output"""
env = app.env
needs = SphinxNeedsData(env).get_or_create_needs()
@@ -423,7 +451,9 @@ def format_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str, fo
find_and_replace_node_content(node_need, env, need_data)
for index, attribute in enumerate(node_need.attributes["classes"]):
- node_need.attributes["classes"][index] = check_and_get_content(attribute, need_data, env)
+ node_need.attributes["classes"][index] = check_and_get_content(
+ attribute, need_data, env
+ )
layout = need_data["layout"] or NeedsSphinxConfig(app.config).default_layout
@@ -441,14 +471,15 @@ def check_links(needs: dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> N
report_dead_links = config.report_dead_links
for need in needs.values():
for link_type in extra_links:
- need_link_value = (
- [need[link_type["option"]]] if isinstance(need[link_type["option"]], str) else need[link_type["option"]] # type: ignore
- )
+ _value = need[link_type["option"]] # type: ignore[literal-required]
+ need_link_value = [_value] if isinstance(_value, str) else _value
for need_id_full in need_link_value:
need_id_main, need_id_part = split_need_id(need_id_full)
if need_id_main not in needs or (
- need_id_main in needs and need_id_part and need_id_part not in needs[need_id_main]["parts"]
+ need_id_main in needs
+ and need_id_part
+ and need_id_part not in needs[need_id_main]["parts"]
):
need["has_dead_links"] = True
if not link_type.get("allow_dead_links", False):
@@ -473,7 +504,9 @@ def check_links(needs: dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> N
)
-def create_back_links(needs: dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> None:
+def create_back_links(
+ needs: dict[str, NeedsInfoType], config: NeedsSphinxConfig
+) -> None:
"""Create back-links in all found needs.
These are fields for each link type, ``_back``,
@@ -484,7 +517,9 @@ def create_back_links(needs: dict[str, NeedsInfoType], config: NeedsSphinxConfig
option_back = f"{option}_back"
for key, need in needs.items():
- need_link_value: list[str] = [need[option]] if isinstance(need[option], str) else need[option] # type: ignore[literal-required]
+ need_link_value: list[str] = (
+ [need[option]] if isinstance(need[option], str) else need[option] # type: ignore[literal-required]
+ )
for need_id_full in need_link_value:
need_id_main, need_id_part = split_need_id(need_id_full)
@@ -494,9 +529,14 @@ def create_back_links(needs: dict[str, NeedsInfoType], config: NeedsSphinxConfig
# Handling of links to need_parts inside a need
if need_id_part and need_id_part in needs[need_id_main]["parts"]:
- if option_back not in needs[need_id_main]["parts"][need_id_part].keys():
+ if (
+ option_back
+ not in needs[need_id_main]["parts"][need_id_part].keys()
+ ):
needs[need_id_main]["parts"][need_id_part][option_back] = [] # type: ignore[literal-required]
- needs[need_id_main]["parts"][need_id_part][option_back].append(key) # type: ignore[literal-required]
+ needs[need_id_main]["parts"][need_id_part][option_back].append( # type: ignore[literal-required]
+ key
+ )
def _fix_list_dyn_func(list: list[str]) -> list[str]:
diff --git a/sphinx_needs/directives/needbar.py b/sphinx_needs/directives/needbar.py
index 623bef414..598b425a8 100644
--- a/sphinx_needs/directives/needbar.py
+++ b/sphinx_needs/directives/needbar.py
@@ -82,7 +82,11 @@ def run(self) -> Sequence[nodes.Node]:
style = self.options.get("style")
matplotlib = import_matplotlib()
- style = style.strip() if style else (matplotlib.style.use("default") if matplotlib else "default")
+ style = (
+ style.strip()
+ if style
+ else (matplotlib.style.use("default") if matplotlib else "default")
+ )
legend = "legend" in self.options
@@ -167,7 +171,12 @@ def run(self) -> Sequence[nodes.Node]:
# 8. create figure
# 9. final storage
# 10. cleanup matplotlib
-def process_needbar(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
+def process_needbar(
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
+) -> None:
env = app.env
needs_data = SphinxNeedsData(env)
needs_config = NeedsSphinxConfig(env.config)
@@ -221,13 +230,19 @@ def process_needbar(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
else:
# We can only process content with the same lenght for each line
if test_columns_length != len(row_data):
- raise Exception(f"{error_id}: each content line must have the same length")
+ raise Exception(
+ f"{error_id}: each content line must have the same length"
+ )
# 3. process the labels (maybe from content)
xlabels = current_needbar["xlabels"]
- xlabels_in_content = bool(xlabels and len(xlabels) >= 1 and xlabels[0] == "FROM_DATA")
+ xlabels_in_content = bool(
+ xlabels and len(xlabels) >= 1 and xlabels[0] == "FROM_DATA"
+ )
ylabels = current_needbar["ylabels"]
- ylabels_in_content = bool(ylabels and len(ylabels) >= 1 and ylabels[0] == "FROM_DATA")
+ ylabels_in_content = bool(
+ ylabels and len(ylabels) >= 1 and ylabels[0] == "FROM_DATA"
+ )
if xlabels_in_content:
# get xlabels from content => first row in content
@@ -265,14 +280,19 @@ def process_needbar(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
# 4. transpose the data if needed
if current_needbar["transpose"]:
- local_data = [[local_data[j][i] for j in range(len(local_data))] for i in range(len(local_data[0]))]
+ local_data = [
+ [local_data[j][i] for j in range(len(local_data))]
+ for i in range(len(local_data[0]))
+ ]
tmp = ylabels
ylabels = xlabels
xlabels = tmp
# 5. process content
local_data_number = []
- need_list = list(prepare_need_list(needs_data.get_or_create_needs().values())) # adds parts to need_list
+ need_list = list(
+ prepare_need_list(needs_data.get_or_create_needs().values())
+ ) # adds parts to need_list
for line in local_data:
line_number = []
@@ -335,7 +355,9 @@ def process_needbar(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
colors = colors + matplotlib.rcParams["axes.prop_cycle"].by_key()["color"]
multi = math.ceil(len(local_data) / len(colors))
if multi > 1:
- print(f"{error_id} warning: color schema is smaller than data, double coloring is occurring")
+ print(
+ f"{error_id} warning: color schema is smaller than data, double coloring is occurring"
+ )
colors = colors * multi
colors = colors[: len(local_data)]
@@ -368,9 +390,13 @@ def process_needbar(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
if current_needbar["show_sum"]:
try:
- bar_label = axes.bar_label(bar, label_type="center") # show label in the middel of each bar
+ bar_label = axes.bar_label(
+ bar, label_type="center"
+ ) # show label in the middel of each bar
bar_labels.append(bar_label)
- except AttributeError: # bar_label is not support in older matplotlib versions
+ except (
+ AttributeError
+ ): # bar_label is not support in older matplotlib versions
current_needbar["show_sum"] = None
current_needbar["show_top_sum"] = None
@@ -381,18 +407,24 @@ def process_needbar(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
try:
bar_label = axes.bar_label(bar)
bar_labels.append(bar_label)
- except AttributeError: # bar_label is not support in older matplotlib versions
+ except (
+ AttributeError
+ ): # bar_label is not support in older matplotlib versions
current_needbar["show_sum"] = None
current_needbar["show_top_sum"] = None
sum_rotation = current_needbar["sum_rotation"]
- if sum_rotation and (current_needbar["show_top_sum"] or current_needbar["show_sum"]):
+ if sum_rotation and (
+ current_needbar["show_top_sum"] or current_needbar["show_sum"]
+ ):
sum_rotation = sum_rotation.strip()
# Rotate the bar labels
if sum_rotation.isdigit():
matplotlib.pyplot.setp(bar_labels, rotation=int(sum_rotation))
- centers = [(i + j) / 2.0 for i, j in zip(index[0], index[len(local_data_number) - 1])]
+ centers = [
+ (i + j) / 2.0 for i, j in zip(index[0], index[len(local_data_number) - 1])
+ ]
if not current_needbar["horizontal"]:
# We want to support even older version of matplotlib, which do not support axes.set_xticks(labels)
axes.set_xticks(centers)
@@ -408,14 +440,18 @@ def process_needbar(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
xlabels_rotation = xlabels_rotation.strip()
# Rotate the tick labels
if xlabels_rotation.isdigit():
- matplotlib.pyplot.setp(axes.get_xticklabels(), rotation=int(xlabels_rotation))
+ matplotlib.pyplot.setp(
+ axes.get_xticklabels(), rotation=int(xlabels_rotation)
+ )
ylabels_rotation = current_needbar["ylabels_rotation"]
if ylabels_rotation:
ylabels_rotation = ylabels_rotation.strip()
# Rotate the tick labels
if ylabels_rotation.isdigit():
- matplotlib.pyplot.setp(axes.get_yticklabels(), rotation=int(ylabels_rotation))
+ matplotlib.pyplot.setp(
+ axes.get_yticklabels(), rotation=int(ylabels_rotation)
+ )
if current_needbar["title"]:
axes.set_title(current_needbar["title"].strip())
@@ -433,7 +469,9 @@ def process_needbar(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
# We need to calculate an unique bar-image file name
hash_value = hashlib.sha256(id.encode()).hexdigest()[:5]
- image_node = save_matplotlib_figure(app, figure, f"need_bar_{hash_value}", fromdocname)
+ image_node = save_matplotlib_figure(
+ app, figure, f"need_bar_{hash_value}", fromdocname
+ )
# Add lineno to node
image_node.line = current_needbar["lineno"]
diff --git a/sphinx_needs/directives/needextend.py b/sphinx_needs/directives/needextend.py
index 886aaa123..054d97f53 100644
--- a/sphinx_needs/directives/needextend.py
+++ b/sphinx_needs/directives/needextend.py
@@ -44,9 +44,13 @@ def run(self) -> Sequence[nodes.Node]:
extend_filter = self.arguments[0] if self.arguments else None
if not extend_filter:
- raise NeedsInvalidFilter(f"Filter of needextend must be set. See {env.docname}:{self.lineno}")
+ raise NeedsInvalidFilter(
+ f"Filter of needextend must be set. See {env.docname}:{self.lineno}"
+ )
- strict_option = self.options.get("strict", str(NeedsSphinxConfig(self.env.app.config).needextend_strict))
+ strict_option = self.options.get(
+ "strict", str(NeedsSphinxConfig(self.env.app.config).needextend_strict)
+ )
strict = True
if strict_option.upper() == "TRUE":
strict = True
@@ -69,7 +73,9 @@ def run(self) -> Sequence[nodes.Node]:
def extend_needs_data(
- all_needs: dict[str, NeedsInfoType], extends: dict[str, NeedsExtendType], needs_config: NeedsSphinxConfig
+ all_needs: dict[str, NeedsInfoType],
+ extends: dict[str, NeedsExtendType],
+ needs_config: NeedsSphinxConfig,
) -> None:
"""Use data gathered from needextend directives to modify fields of existing needs."""
@@ -81,7 +87,9 @@ def extend_needs_data(
if need_filter in all_needs:
# a single known ID
found_needs = [all_needs[need_filter]]
- elif need_filter is not None and re.fullmatch(needs_config.id_regex, need_filter):
+ elif need_filter is not None and re.fullmatch(
+ needs_config.id_regex, need_filter
+ ):
# an unknown ID
error = f"Provided id {need_filter} for needextend does not exist."
if current_needextend["strict"]:
@@ -92,7 +100,9 @@ def extend_needs_data(
else:
# a filter string
try:
- found_needs = filter_needs(all_needs.values(), needs_config, need_filter)
+ found_needs = filter_needs(
+ all_needs.values(), needs_config, need_filter
+ )
except NeedsInvalidFilter as e:
raise NeedsInvalidFilter(
f"Filter not valid for needextend on page {current_needextend['docname']}:\n{e}"
@@ -108,7 +118,9 @@ def extend_needs_data(
if option.startswith("+"):
option_name = option[1:]
if option_name in link_names:
- if value.strip().startswith("[[") and value.strip().endswith("]]"): # dynamic function
+ if value.strip().startswith("[[") and value.strip().endswith(
+ "]]"
+ ): # dynamic function
need[option_name].append(value)
else:
for ref_need in [i.strip() for i in re.split(";|,", value)]:
@@ -116,13 +128,18 @@ def extend_needs_data(
logger.warning(
f"Provided link id {ref_need} for needextend does not exist. [needs]",
type="needs",
- location=(current_needextend["docname"], current_needextend["lineno"]),
+ location=(
+ current_needextend["docname"],
+ current_needextend["lineno"],
+ ),
)
continue
if ref_need not in need[option_name]:
need[option_name].append(ref_need)
elif option_name in list_values:
- if value.strip().startswith("[[") and value.strip().endswith("]]"): # dynamic function
+ if value.strip().startswith("[[") and value.strip().endswith(
+ "]]"
+ ): # dynamic function
need[option_name].append(value)
else:
for item in [i.strip() for i in re.split(";|,", value)]:
@@ -145,7 +162,9 @@ def extend_needs_data(
else:
if option in link_names:
need[option] = []
- if value.strip().startswith("[[") and value.strip().endswith("]]"): # dynamic function
+ if value.strip().startswith("[[") and value.strip().endswith(
+ "]]"
+ ): # dynamic function
need[option].append(value)
else:
for ref_need in [i.strip() for i in re.split(";|,", value)]:
@@ -153,12 +172,17 @@ def extend_needs_data(
logger.warning(
f"Provided link id {ref_need} for needextend does not exist. [needs]",
type="needs",
- location=(current_needextend["docname"], current_needextend["lineno"]),
+ location=(
+ current_needextend["docname"],
+ current_needextend["lineno"],
+ ),
)
continue
need[option].append(ref_need)
elif option in list_values:
- if value.strip().startswith("[[") and value.strip().endswith("]]"): # dynamic function
+ if value.strip().startswith("[[") and value.strip().endswith(
+ "]]"
+ ): # dynamic function
need[option].append(value)
else:
need[option] = [i.strip() for i in re.split(";|,", value)]
diff --git a/sphinx_needs/directives/needextract.py b/sphinx_needs/directives/needextract.py
index 18f5cb060..9ceba1a09 100644
--- a/sphinx_needs/directives/needextract.py
+++ b/sphinx_needs/directives/needextract.py
@@ -43,7 +43,9 @@ class NeedextractDirective(FilterBase):
def run(self) -> Sequence[nodes.Node]:
env = self.env
- targetid = "needextract-{docname}-{id}".format(docname=env.docname, id=env.new_serialno("needextract"))
+ targetid = "needextract-{docname}-{id}".format(
+ docname=env.docname, id=env.new_serialno("needextract")
+ )
targetnode = nodes.target("", "", ids=[targetid])
filter_arg = self.arguments[0] if self.arguments else None
@@ -67,7 +69,10 @@ def run(self) -> Sequence[nodes.Node]:
def process_needextract(
- app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
) -> None:
"""
Replace all needextract nodes with a list of the collected needs.
@@ -88,14 +93,18 @@ def process_needextract(
# check if filter argument and option filter both exist
need_filter_arg = current_needextract["filter_arg"]
if need_filter_arg and current_needextract["filter"]:
- raise NeedsInvalidFilter("Needextract can't have filter arguments and option filter at the same time.")
+ raise NeedsInvalidFilter(
+ "Needextract can't have filter arguments and option filter at the same time."
+ )
elif need_filter_arg:
# check if given filter argument is need-id
if need_filter_arg in all_needs:
need_filter_arg = f'id == "{need_filter_arg}"'
elif re.fullmatch(needs_config.id_regex, need_filter_arg):
# check if given filter argument is need-id, but not exists
- raise NeedsInvalidFilter(f"Provided id {need_filter_arg} for needextract does not exist.")
+ raise NeedsInvalidFilter(
+ f"Provided id {need_filter_arg} for needextract does not exist."
+ )
current_needextract["filter"] = need_filter_arg
found_needs = process_filters(app, all_needs.values(), current_needextract)
@@ -118,7 +127,9 @@ def process_needextract(
content.append(need_extract)
if len(content) == 0:
- content.append(no_needs_found_paragraph(current_needextract.get("filter_warning")))
+ content.append(
+ no_needs_found_paragraph(current_needextract.get("filter_warning"))
+ )
if current_needextract["show_filters"]:
content.append(used_filter_paragraph(current_needextract))
diff --git a/sphinx_needs/directives/needfilter.py b/sphinx_needs/directives/needfilter.py
index ef0cb3d71..0d7cd00c8 100644
--- a/sphinx_needs/directives/needfilter.py
+++ b/sphinx_needs/directives/needfilter.py
@@ -49,7 +49,9 @@ def layout(argument: str) -> str:
def run(self) -> Sequence[nodes.Node]:
env = self.env
- targetid = "needfilter-{docname}-{id}".format(docname=env.docname, id=env.new_serialno("needfilter"))
+ targetid = "needfilter-{docname}-{id}".format(
+ docname=env.docname, id=env.new_serialno("needfilter")
+ )
targetnode = nodes.target("", "", ids=[targetid])
# Add the need and all needed information
@@ -72,7 +74,10 @@ def run(self) -> Sequence[nodes.Node]:
def process_needfilters(
- app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
) -> None:
# Replace all needlist nodes with a list of the collected needs.
# Augment each need with a backlink to the original location.
@@ -129,7 +134,14 @@ def process_needfilters(
status_colspec = nodes.colspec(colwidth=5)
links_colspec = nodes.colspec(colwidth=5)
tags_colspec = nodes.colspec(colwidth=5)
- tgroup += [id_colspec, title_colspec, type_colspec, status_colspec, links_colspec, tags_colspec]
+ tgroup += [
+ id_colspec,
+ title_colspec,
+ type_colspec,
+ status_colspec,
+ links_colspec,
+ tags_colspec,
+ ]
tgroup += nodes.thead(
"",
nodes.row(
@@ -170,7 +182,9 @@ def process_needfilters(
else:
ref = nodes.reference("", "")
ref["refdocname"] = need_info["docname"]
- ref["refuri"] = builder.get_relative_uri(fromdocname, need_info["docname"])
+ ref["refuri"] = builder.get_relative_uri(
+ fromdocname, need_info["docname"]
+ )
ref["refuri"] += "#" + target_id
ref.append(title)
line_node += ref
@@ -178,11 +192,17 @@ def process_needfilters(
line_block.append(line_node)
elif current_needfilter["layout"] == "table":
row = nodes.row()
- row += row_col_maker(app, fromdocname, all_needs, need_info, "id", make_ref=True)
+ row += row_col_maker(
+ app, fromdocname, all_needs, need_info, "id", make_ref=True
+ )
row += row_col_maker(app, fromdocname, all_needs, need_info, "title")
- row += row_col_maker(app, fromdocname, all_needs, need_info, "type_name")
+ row += row_col_maker(
+ app, fromdocname, all_needs, need_info, "type_name"
+ )
row += row_col_maker(app, fromdocname, all_needs, need_info, "status")
- row += row_col_maker(app, fromdocname, all_needs, need_info, "links", ref_lookup=True)
+ row += row_col_maker(
+ app, fromdocname, all_needs, need_info, "links", ref_lookup=True
+ )
row += row_col_maker(app, fromdocname, all_needs, need_info, "tags")
tbody += row
elif current_needfilter["layout"] == "diagram":
@@ -203,9 +223,13 @@ def process_needfilters(
link = ""
diagram_template = Template(needs_config.diagram_template)
- node_text = diagram_template.render(**need_info, **needs_config.render_context)
+ node_text = diagram_template.render(
+ **need_info, **needs_config.render_context
+ )
- puml_node["uml"] += '{style} "{node_text}" as {id} [[{link}]] {color}\n'.format(
+ puml_node[
+ "uml"
+ ] += '{style} "{node_text}" as {id} [[{link}]] {color}\n'.format(
id=need_info["id"],
node_text=node_text,
link=link,
@@ -213,7 +237,9 @@ def process_needfilters(
style=need_info["type_style"],
)
for link in need_info["links"]:
- puml_connections += "{id} --> {link}\n".format(id=need_info["id"], link=link)
+ puml_connections += "{id} --> {link}\n".format(
+ id=need_info["id"], link=link
+ )
if current_needfilter["layout"] == "list":
content.append(line_block)
@@ -227,11 +253,15 @@ def process_needfilters(
puml_node["uml"] += create_legend(needs_config.types)
puml_node["uml"] += "@enduml"
puml_node["incdir"] = os.path.dirname(current_needfilter["docname"])
- puml_node["filename"] = os.path.split(current_needfilter["docname"])[1] # Needed for plantuml >= 0.9
+ puml_node["filename"] = os.path.split(current_needfilter["docname"])[
+ 1
+ ] # Needed for plantuml >= 0.9
content.append(puml_node)
if len(content) == 0:
- content.append(no_needs_found_paragraph(current_needfilter.get("filter_warning")))
+ content.append(
+ no_needs_found_paragraph(current_needfilter.get("filter_warning"))
+ )
if current_needfilter["show_filters"]:
para_node = nodes.paragraph()
filter_text = "Used filter:"
@@ -240,17 +270,25 @@ def process_needfilters(
if len(current_needfilter["status"]) > 0
else ""
)
- if len(current_needfilter["status"]) > 0 and len(current_needfilter["tags"]) > 0:
+ if (
+ len(current_needfilter["status"]) > 0
+ and len(current_needfilter["tags"]) > 0
+ ):
filter_text += " AND "
filter_text += (
- " tags(%s)" % " OR ".join(current_needfilter["tags"]) if len(current_needfilter["tags"]) > 0 else ""
+ " tags(%s)" % " OR ".join(current_needfilter["tags"])
+ if len(current_needfilter["tags"]) > 0
+ else ""
)
- if (len(current_needfilter["status"]) > 0 or len(current_needfilter["tags"]) > 0) and len(
- current_needfilter["types"]
- ) > 0:
+ if (
+ len(current_needfilter["status"]) > 0
+ or len(current_needfilter["tags"]) > 0
+ ) and len(current_needfilter["types"]) > 0:
filter_text += " AND "
filter_text += (
- " types(%s)" % " OR ".join(current_needfilter["types"]) if len(current_needfilter["types"]) > 0 else ""
+ " types(%s)" % " OR ".join(current_needfilter["types"])
+ if len(current_needfilter["types"]) > 0
+ else ""
)
filter_node = nodes.emphasis(filter_text, filter_text)
diff --git a/sphinx_needs/directives/needflow.py b/sphinx_needs/directives/needflow.py
index 31572afb4..5f4df8d92 100644
--- a/sphinx_needs/directives/needflow.py
+++ b/sphinx_needs/directives/needflow.py
@@ -74,7 +74,9 @@ def run(self) -> Sequence[nodes.Node]:
targetnode = nodes.target("", "", ids=[targetid])
all_link_types = ",".join(x["option"] for x in needs_config.extra_links)
- link_types = split_link_types(self.options.get("link_types", all_link_types), location)
+ link_types = split_link_types(
+ self.options.get("link_types", all_link_types), location
+ )
config_names = self.options.get("config")
configs = []
@@ -220,7 +222,12 @@ def walk_curr_need_tree(
# check curr need child has children or has parts
if curr_child_need["parent_needs_back"] or curr_child_need["parts"]:
curr_need_tree += walk_curr_need_tree(
- app, fromdocname, current_needflow, all_needs, found_needs, curr_child_need
+ app,
+ fromdocname,
+ current_needflow,
+ all_needs,
+ found_needs,
+ curr_child_need,
)
# add newline for next element
curr_need_tree += "\n"
@@ -261,7 +268,9 @@ def cal_needs_node(
top_needs = get_root_needs(found_needs)
curr_need_tree = ""
for top_need in top_needs:
- top_need_node = get_need_node_rep_for_plantuml(app, fromdocname, current_needflow, all_needs, top_need)
+ top_need_node = get_need_node_rep_for_plantuml(
+ app, fromdocname, current_needflow, all_needs, top_need
+ )
curr_need_tree += (
top_need_node
+ walk_curr_need_tree(
@@ -278,7 +287,12 @@ def cal_needs_node(
@measure_time("needflow")
-def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
+def process_needflow(
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
+) -> None:
# Replace all needflow nodes with a list of the collected needs.
# Augment each need with a backlink to the original location.
env = app.env
@@ -305,7 +319,9 @@ def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
if lt not in link_type_names:
logger.warning(
"Unknown link type {link_type} in needflow {flow}. Allowed values: {link_types} [needs]".format(
- link_type=lt, flow=current_needflow["target_id"], link_types=",".join(link_type_names)
+ link_type=lt,
+ flow=current_needflow["target_id"],
+ link_types=",".join(link_type_names),
),
type="needs",
)
@@ -342,7 +358,9 @@ def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
config = current_needflow["config"]
if config and len(config) >= 3:
# Remove all empty lines
- config = "\n".join([line.strip() for line in config.split("\n") if line.strip()])
+ config = "\n".join(
+ [line.strip() for line in config.split("\n") if line.strip()]
+ )
puml_node["uml"] += "\n' Config\n\n"
puml_node["uml"] += config
puml_node["uml"] += "\n\n"
@@ -353,9 +371,13 @@ def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
for link_type in link_types:
# Skip link-type handling, if it is not part of a specified list of allowed link_types or
# if not part of the overall configuration of needs_flow_link_types
- if (current_needflow["link_types"] and link_type["option"].upper() not in option_link_types) or (
+ if (
+ current_needflow["link_types"]
+ and link_type["option"].upper() not in option_link_types
+ ) or (
not current_needflow["link_types"]
- and link_type["option"].upper() not in allowed_link_types_options
+ and link_type["option"].upper()
+ not in allowed_link_types_options
):
continue
@@ -367,30 +389,42 @@ def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
# If source or target of link is a need_part, a specific style is needed
if "." in link or "." in need_info["id_complete"]:
final_link = link
- if current_needflow["show_link_names"] or needs_config.flow_show_links:
+ if (
+ current_needflow["show_link_names"]
+ or needs_config.flow_show_links
+ ):
desc = link_type["outgoing"] + "\\n"
comment = f": {desc}"
else:
comment = ""
if "style_part" in link_type and link_type["style_part"]:
- link_style = "[{style}]".format(style=link_type["style_part"])
+ link_style = "[{style}]".format(
+ style=link_type["style_part"]
+ )
else:
link_style = "[dotted]"
else:
final_link = link
- if current_needflow["show_link_names"] or needs_config.flow_show_links:
+ if (
+ current_needflow["show_link_names"]
+ or needs_config.flow_show_links
+ ):
comment = ": {desc}".format(desc=link_type["outgoing"])
else:
comment = ""
if "style" in link_type and link_type["style"]:
- link_style = "[{style}]".format(style=link_type["style"])
+ link_style = "[{style}]".format(
+ style=link_type["style"]
+ )
else:
link_style = ""
# Do not create an links, if the link target is not part of the search result.
- if final_link not in [x["id"] for x in found_needs if x["is_need"]] and final_link not in [
+ if final_link not in [
+ x["id"] for x in found_needs if x["is_need"]
+ ] and final_link not in [
x["id_complete"] for x in found_needs if x["is_part"]
]:
continue
@@ -415,7 +449,9 @@ def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
)
# calculate needs node representation for plantuml
- puml_node["uml"] += cal_needs_node(app, fromdocname, current_needflow, all_needs.values(), found_needs)
+ puml_node["uml"] += cal_needs_node(
+ app, fromdocname, current_needflow, all_needs.values(), found_needs
+ )
puml_node["uml"] += "\n' Connection definition \n\n"
puml_node["uml"] += puml_connections
@@ -426,7 +462,9 @@ def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
puml_node["uml"] += "\n@enduml"
puml_node["incdir"] = os.path.dirname(current_needflow["docname"])
- puml_node["filename"] = os.path.split(current_needflow["docname"])[1] # Needed for plantuml >= 0.9
+ puml_node["filename"] = os.path.split(current_needflow["docname"])[
+ 1
+ ] # Needed for plantuml >= 0.9
scale = int(current_needflow["scale"])
# if scale != 100:
@@ -452,8 +490,14 @@ def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
gen_flow_link = generate_name(app, puml_node.children[0], file_ext)
current_file_parts = fromdocname.split("/")
subfolder_amount = len(current_file_parts) - 1
- img_locaton = "../" * subfolder_amount + "_images/" + gen_flow_link[0].split("/")[-1]
- flow_ref = nodes.reference("t", current_needflow["caption"], refuri=img_locaton)
+ img_locaton = (
+ "../" * subfolder_amount
+ + "_images/"
+ + gen_flow_link[0].split("/")[-1]
+ )
+ flow_ref = nodes.reference(
+ "t", current_needflow["caption"], refuri=img_locaton
+ )
puml_node += nodes.caption("", "", flow_ref)
# Add lineno to node
@@ -461,25 +505,36 @@ def process_needflow(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
content.append(puml_node)
else: # no needs found
- content.append(no_needs_found_paragraph(current_needflow.get("filter_warning")))
+ content.append(
+ no_needs_found_paragraph(current_needflow.get("filter_warning"))
+ )
if current_needflow["show_filters"]:
para = nodes.paragraph()
filter_text = "Used filter:"
filter_text += (
- " status(%s)" % " OR ".join(current_needflow["status"]) if len(current_needflow["status"]) > 0 else ""
+ " status(%s)" % " OR ".join(current_needflow["status"])
+ if len(current_needflow["status"]) > 0
+ else ""
)
- if len(current_needflow["status"]) > 0 and len(current_needflow["tags"]) > 0:
+ if (
+ len(current_needflow["status"]) > 0
+ and len(current_needflow["tags"]) > 0
+ ):
filter_text += " AND "
filter_text += (
- " tags(%s)" % " OR ".join(current_needflow["tags"]) if len(current_needflow["tags"]) > 0 else ""
+ " tags(%s)" % " OR ".join(current_needflow["tags"])
+ if len(current_needflow["tags"]) > 0
+ else ""
)
- if (len(current_needflow["status"]) > 0 or len(current_needflow["tags"]) > 0) and len(
- current_needflow["types"]
- ) > 0:
+ if (
+ len(current_needflow["status"]) > 0 or len(current_needflow["tags"]) > 0
+ ) and len(current_needflow["types"]) > 0:
filter_text += " AND "
filter_text += (
- " types(%s)" % " OR ".join(current_needflow["types"]) if len(current_needflow["types"]) > 0 else ""
+ " types(%s)" % " OR ".join(current_needflow["types"])
+ if len(current_needflow["types"]) > 0
+ else ""
)
filter_node = nodes.emphasis(filter_text, filter_text)
diff --git a/sphinx_needs/directives/needgantt.py b/sphinx_needs/directives/needgantt.py
index d0f4b9c11..5f8e8f3f1 100644
--- a/sphinx_needs/directives/needgantt.py
+++ b/sphinx_needs/directives/needgantt.py
@@ -88,15 +88,21 @@ def run(self) -> Sequence[nodes.Node]:
timeline_options = ["daily", "weekly", "monthly"]
if timeline and timeline not in timeline_options:
raise NeedGanttException(
- "Given scale value {} is invalid. Please use: " "{}".format(timeline, ",".join(timeline_options))
+ "Given scale value {} is invalid. Please use: " "{}".format(
+ timeline, ",".join(timeline_options)
+ )
)
else:
timeline = None # Timeline/scale not set later
no_color = "no_color" in self.options
- duration_option = self.options.get("duration_option", needs_config.duration_option)
- completion_option = self.options.get("completion_option", needs_config.completion_option)
+ duration_option = self.options.get(
+ "duration_option", needs_config.duration_option
+ )
+ completion_option = self.options.get(
+ "completion_option", needs_config.completion_option
+ )
# Add the needgantt and all needed information
SphinxNeedsData(env).get_or_create_gantts()[targetid] = {
@@ -121,7 +127,9 @@ def run(self) -> Sequence[nodes.Node]:
return [targetnode] + [Needgantt("")]
def get_link_type_option(self, name: str, default: str = "") -> list[str]:
- link_types = [x.strip() for x in re.split(";|,", self.options.get(name, default))]
+ link_types = [
+ x.strip() for x in re.split(";|,", self.options.get(name, default))
+ ]
conf_link_types = NeedsSphinxConfig(self.env.config).extra_links
conf_link_types_name = [x["option"] for x in conf_link_types]
@@ -131,14 +139,20 @@ def get_link_type_option(self, name: str, default: str = "") -> list[str]:
continue
if link_type not in conf_link_types_name:
raise SphinxNeedsLinkTypeException(
- link_type + "does not exist in configuration option needs_extra_links"
+ link_type
+ + "does not exist in configuration option needs_extra_links"
)
final_link_types.append(link_type)
return final_link_types
-def process_needgantt(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
+def process_needgantt(
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
+) -> None:
# Replace all needgantt nodes with a list of the collected needs.
env = app.env
needs_config = NeedsSphinxConfig(app.config)
@@ -196,7 +210,9 @@ def process_needgantt(app: Sphinx, doctree: nodes.document, fromdocname: str, fo
except Exception:
raise NeedGanttException(
'start_date "{}"for needgantt is invalid. '
- 'File: {}:current_needgantt["lineno"]'.format(start_date_string, current_needgantt["docname"])
+ 'File: {}:current_needgantt["lineno"]'.format(
+ start_date_string, current_needgantt["docname"]
+ )
)
month = MONTH_NAMES[int(start_date.strftime("%m"))]
@@ -212,12 +228,16 @@ def process_needgantt(app: Sphinx, doctree: nodes.document, fromdocname: str, fo
complete = None
if current_needgantt["milestone_filter"]:
- is_milestone = filter_single_need(need, needs_config, current_needgantt["milestone_filter"])
+ is_milestone = filter_single_need(
+ need, needs_config, current_needgantt["milestone_filter"]
+ )
else:
is_milestone = False
if current_needgantt["milestone_filter"] and is_milestone:
- gantt_element = "[{}] as [{}] lasts 0 days\n".format(need["title"], need["id"])
+ gantt_element = "[{}] as [{}] lasts 0 days\n".format(
+ need["title"], need["id"]
+ )
else: # Normal gantt element handling
duration_option = current_needgantt["duration_option"]
duration = need[duration_option] # type: ignore[literal-required]
@@ -230,18 +250,26 @@ def process_needgantt(app: Sphinx, doctree: nodes.document, fromdocname: str, fo
type="needs",
)
duration = 1
- gantt_element = "[{}] as [{}] lasts {} days\n".format(need["title"], need["id"], duration)
+ gantt_element = "[{}] as [{}] lasts {} days\n".format(
+ need["title"], need["id"], duration
+ )
if complete:
complete = complete.replace("%", "")
- el_completion_string += "[{}] is {}% completed\n".format(need["title"], complete)
+ el_completion_string += "[{}] is {}% completed\n".format(
+ need["title"], complete
+ )
- el_color_string += "[{}] is colored in {}\n".format(need["title"], need["type_color"])
+ el_color_string += "[{}] is colored in {}\n".format(
+ need["title"], need["type_color"]
+ )
puml_node["uml"] += gantt_element
puml_node["uml"] += "\n' Element links definition \n\n"
- puml_node["uml"] += "\n' Deactivated, as currently supported by plantuml beta only"
+ puml_node[
+ "uml"
+ ] += "\n' Deactivated, as currently supported by plantuml beta only"
puml_node["uml"] += "\n' Element completion definition \n\n"
puml_node["uml"] += el_completion_string + "\n"
@@ -257,10 +285,16 @@ def process_needgantt(app: Sphinx, doctree: nodes.document, fromdocname: str, fo
puml_node["uml"] += "\n' Constraints definition \n\n"
for need in found_needs:
if current_needgantt["milestone_filter"]:
- is_milestone = filter_single_need(need, needs_config, current_needgantt["milestone_filter"])
+ is_milestone = filter_single_need(
+ need, needs_config, current_needgantt["milestone_filter"]
+ )
else:
is_milestone = False
- for con_type in ("starts_with_links", "starts_after_links", "ends_with_links"):
+ for con_type in (
+ "starts_with_links",
+ "starts_after_links",
+ "ends_with_links",
+ ):
if is_milestone:
keyword = "happens"
elif con_type in ["starts_with_links", "starts_after_links"]:
@@ -288,7 +322,9 @@ def process_needgantt(app: Sphinx, doctree: nodes.document, fromdocname: str, fo
puml_node["uml"] += "\n@endgantt"
puml_node["incdir"] = os.path.dirname(current_needgantt["docname"])
- puml_node["filename"] = os.path.split(current_needgantt["docname"])[1] # Needed for plantuml >= 0.9
+ puml_node["filename"] = os.path.split(current_needgantt["docname"])[
+ 1
+ ] # Needed for plantuml >= 0.9
scale = int(current_needgantt["scale"])
# if scale != 100:
@@ -311,14 +347,20 @@ def process_needgantt(app: Sphinx, doctree: nodes.document, fromdocname: str, fo
gen_flow_link = generate_name(app, puml_node.children[0], file_ext)
current_file_parts = fromdocname.split("/")
subfolder_amount = len(current_file_parts) - 1
- img_location = "../" * subfolder_amount + "_images/" + gen_flow_link[0].split("/")[-1]
- flow_ref = nodes.reference("t", current_needgantt["caption"], refuri=img_location)
+ img_location = (
+ "../" * subfolder_amount + "_images/" + gen_flow_link[0].split("/")[-1]
+ )
+ flow_ref = nodes.reference(
+ "t", current_needgantt["caption"], refuri=img_location
+ )
puml_node += nodes.caption("", "", flow_ref)
content.append(puml_node)
if len(found_needs) == 0:
- content = [no_needs_found_paragraph(current_needgantt.get("filter_warning"))]
+ content = [
+ no_needs_found_paragraph(current_needgantt.get("filter_warning"))
+ ]
if current_needgantt["show_filters"]:
content.append(get_filter_para(current_needgantt))
diff --git a/sphinx_needs/directives/needimport.py b/sphinx_needs/directives/needimport.py
index b66148079..cf9d26cc3 100644
--- a/sphinx_needs/directives/needimport.py
+++ b/sphinx_needs/directives/needimport.py
@@ -73,19 +73,25 @@ def run(self) -> Sequence[nodes.Node]:
response.json()
) # The downloaded file MUST be json. Everything else we do not handle!
except Exception as e:
- raise NeedimportException(f"Getting {need_import_path} didn't work. Reason: {e}.")
+ raise NeedimportException(
+ f"Getting {need_import_path} didn't work. Reason: {e}."
+ )
else:
logger.info(f"Importing needs from {need_import_path}")
if not os.path.isabs(need_import_path):
# Relative path should start from current rst file directory
curr_dir = os.path.dirname(self.docname)
- new_need_import_path = os.path.join(self.env.app.srcdir, curr_dir, need_import_path)
+ new_need_import_path = os.path.join(
+ self.env.app.srcdir, curr_dir, need_import_path
+ )
correct_need_import_path = new_need_import_path
if not os.path.exists(new_need_import_path):
# Check the old way that calculates relative path starting from conf.py directory
- old_need_import_path = os.path.join(self.env.app.srcdir, need_import_path)
+ old_need_import_path = os.path.join(
+ self.env.app.srcdir, need_import_path
+ )
if os.path.exists(old_need_import_path):
correct_need_import_path = old_need_import_path
logger.warning(
@@ -97,14 +103,20 @@ def run(self) -> Sequence[nodes.Node]:
)
else:
# Absolute path starts with /, based on the source directory. The / need to be striped
- correct_need_import_path = os.path.join(self.env.app.srcdir, need_import_path[1:])
+ correct_need_import_path = os.path.join(
+ self.env.app.srcdir, need_import_path[1:]
+ )
if not os.path.exists(correct_need_import_path):
- raise ReferenceError(f"Could not load needs import file {correct_need_import_path}")
+ raise ReferenceError(
+ f"Could not load needs import file {correct_need_import_path}"
+ )
errors = check_needs_file(correct_need_import_path)
if errors.schema:
- logger.info(f"Schema validation errors detected in file {correct_need_import_path}:")
+ logger.info(
+ f"Schema validation errors detected in file {correct_need_import_path}:"
+ )
for error in errors.schema:
logger.info(f' {error.message} -> {".".join(error.path)}')
@@ -121,13 +133,19 @@ def run(self) -> Sequence[nodes.Node]:
if not isinstance(version, str):
raise KeyError
except KeyError:
- raise CorruptedNeedsFile(f"Key 'current_version' missing or corrupted in {correct_need_import_path}")
+ raise CorruptedNeedsFile(
+ f"Key 'current_version' missing or corrupted in {correct_need_import_path}"
+ )
if version not in needs_import_list["versions"].keys():
- raise VersionNotFound(f"Version {version} not found in needs import file {correct_need_import_path}")
+ raise VersionNotFound(
+ f"Version {version} not found in needs import file {correct_need_import_path}"
+ )
needs_config = NeedsSphinxConfig(self.config)
# TODO this is not exactly NeedsInfoType, because the export removes/adds some keys
- needs_list: dict[str, NeedsInfoType] = needs_import_list["versions"][version]["needs"]
+ needs_list: dict[str, NeedsInfoType] = needs_import_list["versions"][version][
+ "needs"
+ ]
# Filter imported needs
needs_list_filtered = {}
@@ -161,13 +179,20 @@ def run(self) -> Sequence[nodes.Node]:
for id in needs_list:
# Manipulate links in all link types
for extra_link in extra_links:
- if extra_link["option"] in need and id in need[extra_link["option"]]: # type: ignore[literal-required]
+ if (
+ extra_link["option"] in need
+ and id in need[extra_link["option"]] # type: ignore[literal-required]
+ ):
for n, link in enumerate(need[extra_link["option"]]): # type: ignore[literal-required]
if id == link:
- need[extra_link["option"]][n] = "".join([id_prefix, id]) # type: ignore[literal-required]
+ need[extra_link["option"]][n] = "".join( # type: ignore[literal-required]
+ [id_prefix, id]
+ )
# Manipulate descriptions
# ToDo: Use regex for better matches.
- need["description"] = need["description"].replace(id, "".join([id_prefix, id])) # type: ignore[typeddict-item]
+ need["description"] = need["description"].replace( # type: ignore[typeddict-item]
+ id, "".join([id_prefix, id])
+ )
# tags update
for need in needs_list.values():
@@ -194,8 +219,12 @@ def run(self) -> Sequence[nodes.Node]:
for need in needs_list.values():
# Set some values based on given option or value from imported need.
need["template"] = self.options.get("template", need.get("template"))
- need["pre_template"] = self.options.get("pre_template", need.get("pre_template"))
- need["post_template"] = self.options.get("post_template", need.get("post_template"))
+ need["pre_template"] = self.options.get(
+ "pre_template", need.get("pre_template")
+ )
+ need["post_template"] = self.options.get(
+ "post_template", need.get("post_template")
+ )
need["layout"] = self.options.get("layout", need.get("layout"))
need["style"] = self.options.get("style", need.get("style"))
diff --git a/sphinx_needs/directives/needlist.py b/sphinx_needs/directives/needlist.py
index 91a2950f2..d6b1a528d 100644
--- a/sphinx_needs/directives/needlist.py
+++ b/sphinx_needs/directives/needlist.py
@@ -41,7 +41,9 @@ class NeedlistDirective(FilterBase):
def run(self) -> Sequence[nodes.Node]:
env = self.env
- targetid = "needlist-{docname}-{id}".format(docname=env.docname, id=env.new_serialno("needlist"))
+ targetid = "needlist-{docname}-{id}".format(
+ docname=env.docname, id=env.new_serialno("needlist")
+ )
targetnode = nodes.target("", "", ids=[targetid])
# Add the need and all needed information
@@ -60,7 +62,12 @@ def run(self) -> Sequence[nodes.Node]:
return [targetnode, Needlist("")]
-def process_needlist(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
+def process_needlist(
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
+) -> None:
"""
Replace all needlist nodes with a list of the collected needs.
Augment each need with a backlink to the original location.
@@ -81,7 +88,7 @@ def process_needlist(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
all_needs = list(SphinxNeedsData(env).get_or_create_needs().values())
found_needs = process_filters(app, all_needs, current_needfilter)
- if 0 < len(found_needs):
+ if len(found_needs) > 0:
line_block = nodes.line_block()
# Add lineno to node
@@ -102,10 +109,14 @@ def process_needlist(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
if need_info["hide"]:
para += title
elif need_info["is_external"]:
- assert need_info["external_url"] is not None, "External need without URL"
+ assert (
+ need_info["external_url"] is not None
+ ), "External need without URL"
ref = nodes.reference("", "")
- ref["refuri"] = check_and_calc_base_url_rel_path(need_info["external_url"], fromdocname)
+ ref["refuri"] = check_and_calc_base_url_rel_path(
+ need_info["external_url"], fromdocname
+ )
ref["classes"].append(need_info["external_css"])
ref.append(title)
@@ -114,7 +125,9 @@ def process_needlist(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
target_id = need_info["target_id"]
ref = nodes.reference("", "")
ref["refdocname"] = need_info["docname"]
- ref["refuri"] = builder.get_relative_uri(fromdocname, need_info["docname"])
+ ref["refuri"] = builder.get_relative_uri(
+ fromdocname, need_info["docname"]
+ )
ref["refuri"] += "#" + target_id
ref.append(title)
para += ref
@@ -122,7 +135,9 @@ def process_needlist(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
content.append(line_block)
if len(content) == 0:
- content.append(no_needs_found_paragraph(current_needfilter.get("filter_warning")))
+ content.append(
+ no_needs_found_paragraph(current_needfilter.get("filter_warning"))
+ )
if current_needfilter["show_filters"]:
content.append(used_filter_paragraph(current_needfilter))
diff --git a/sphinx_needs/directives/needpie.py b/sphinx_needs/directives/needpie.py
index f055dffe3..700c25247 100644
--- a/sphinx_needs/directives/needpie.py
+++ b/sphinx_needs/directives/needpie.py
@@ -106,7 +106,12 @@ def run(self) -> Sequence[nodes.Node]:
@measure_time("needpie")
-def process_needpie(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
+def process_needpie(
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
+) -> None:
env = app.env
needs_data = SphinxNeedsData(env)
needs_config = NeedsSphinxConfig(env.config)
@@ -149,7 +154,9 @@ def process_needpie(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
content = current_needpie["content"]
sizes = []
- need_list = list(prepare_need_list(needs_data.get_or_create_needs().values())) # adds parts to need_list
+ need_list = list(
+ prepare_need_list(needs_data.get_or_create_needs().values())
+ ) # adds parts to need_list
if content and not current_needpie["filter_func"]:
for line in content:
if line.isdigit():
@@ -160,7 +167,9 @@ def process_needpie(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
elif current_needpie["filter_func"] and not content:
try:
# check and get filter_func
- filter_func, filter_args = check_and_get_external_filter_func(current_needpie.get("filter_func"))
+ filter_func, filter_args = check_and_get_external_filter_func(
+ current_needpie.get("filter_func")
+ )
# execute filter_func code
# Provides only a copy of needs to avoid data manipulations.
context = {
@@ -192,7 +201,9 @@ def process_needpie(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
except Exception as e:
raise e
elif current_needpie["filter_func"] and content:
- logger.error("filter_func and content can't be used at the same time for needpie.")
+ logger.error(
+ "filter_func and content can't be used at the same time for needpie."
+ )
else:
logger.error("Both filter_func and content are not used for needpie.")
@@ -215,7 +226,9 @@ def process_needpie(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
shadow = current_needpie["shadow"]
text_color = current_needpie["text_color"]
- fig, axes = matplotlib.pyplot.subplots(figsize=(8, 4), subplot_kw={"aspect": "equal"})
+ fig, axes = matplotlib.pyplot.subplots(
+ figsize=(8, 4), subplot_kw={"aspect": "equal"}
+ )
pie_kwargs = {
"labels": labels,
@@ -229,7 +242,9 @@ def process_needpie(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
if text_color:
pie_kwargs["textprops"] = {"color": text_color}
- wedges, _texts, autotexts = axes.pie(sizes, normalize=sum(float(s) for s in sizes) >= 1, **pie_kwargs)
+ wedges, _texts, autotexts = axes.pie(
+ sizes, normalize=sum(float(s) for s in sizes) >= 1, **pie_kwargs
+ )
ratio = 20 # we will remove all labels with size smaller 5%
legend_enforced = False
@@ -248,12 +263,12 @@ def process_needpie(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
for i in range(len(sizes)):
if sum(sizes) > 0:
labels[i] = "{label} {percent:.1f}% ({size:.0f})".format(
- label=labels[i], percent=100 * sizes[i] / sum(sizes), size=sizes[i]
+ label=labels[i],
+ percent=100 * sizes[i] / sum(sizes),
+ size=sizes[i],
)
else:
- labels[i] = "{label} {percent:.1f}% ({size:.0f})".format(
- label=labels[i], percent=0.0, size=sizes[i]
- )
+ labels[i] = f"{labels[i]} {0.0:.1f}% ({sizes[i]:.0f})"
if text_color:
for autotext in autotexts:
@@ -262,7 +277,13 @@ def process_needpie(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
# Legend preparation
if current_needpie["legend"]:
- axes.legend(wedges, labels, title="legend", loc="center left", bbox_to_anchor=(0.8, 0, 0.5, 1))
+ axes.legend(
+ wedges,
+ labels,
+ title="legend",
+ loc="center left",
+ bbox_to_anchor=(0.8, 0, 0.5, 1),
+ )
matplotlib.pyplot.setp(autotexts, size=8, weight="bold")
@@ -273,13 +294,17 @@ def process_needpie(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
# We need to calculate an unique pie-image file name
hash_value = hashlib.sha256(id.encode()).hexdigest()[:5]
- image_node = save_matplotlib_figure(app, fig, f"need_pie_{hash_value}", fromdocname)
+ image_node = save_matplotlib_figure(
+ app, fig, f"need_pie_{hash_value}", fromdocname
+ )
# Add lineno to node
image_node.line = current_needpie["lineno"]
if len(sizes) == 0 or all(s == 0 for s in sizes):
- node.replace_self(no_needs_found_paragraph(current_needpie.get("filter_warning")))
+ node.replace_self(
+ no_needs_found_paragraph(current_needpie.get("filter_warning"))
+ )
else:
node.replace_self(image_node)
diff --git a/sphinx_needs/directives/needreport.py b/sphinx_needs/directives/needreport.py
index 97f4b5c08..516a70abe 100644
--- a/sphinx_needs/directives/needreport.py
+++ b/sphinx_needs/directives/needreport.py
@@ -49,12 +49,18 @@ def run(self) -> Sequence[nodes.raw]:
report_info.update(**needs_config.render_context)
if "template" in self.options:
- need_report_template_path = Path(self.env.relfn2path(self.options["template"], self.env.docname)[1])
+ need_report_template_path = Path(
+ self.env.relfn2path(self.options["template"], self.env.docname)[1]
+ )
elif needs_config.report_template:
# Absolute path starts with /, based on the conf.py directory. The / need to be striped
- need_report_template_path = Path(str(env.app.srcdir)) / needs_config.report_template.lstrip("/")
+ need_report_template_path = Path(
+ str(env.app.srcdir)
+ ) / needs_config.report_template.lstrip("/")
else:
- need_report_template_path = Path(__file__).parent / "needreport_template.rst"
+ need_report_template_path = (
+ Path(__file__).parent / "needreport_template.rst"
+ )
if not need_report_template_path.is_file():
LOGGER.warning(
@@ -65,11 +71,15 @@ def run(self) -> Sequence[nodes.raw]:
)
return []
- needs_report_template_file_content = need_report_template_path.read_text(encoding="utf8")
+ needs_report_template_file_content = need_report_template_path.read_text(
+ encoding="utf8"
+ )
template = Template(needs_report_template_file_content, autoescape=True)
text = template.render(**report_info)
- self.state_machine.insert_input(text.split("\n"), self.state_machine.document.attributes["source"])
+ self.state_machine.insert_input(
+ text.split("\n"), self.state_machine.document.attributes["source"]
+ )
report_node = nodes.raw()
diff --git a/sphinx_needs/directives/needsequence.py b/sphinx_needs/directives/needsequence.py
index 42b1e1e25..bba1269dc 100644
--- a/sphinx_needs/directives/needsequence.py
+++ b/sphinx_needs/directives/needsequence.py
@@ -57,7 +57,8 @@ def run(self) -> Sequence[nodes.Node]:
start = self.options.get("start")
if start is None or len(start.strip()) == 0:
raise NeedSequenceException(
- "No valid start option given for needsequence. " "See file {}:{}".format(env.docname, self.lineno)
+ "No valid start option given for needsequence. "
+ f"See file {env.docname}:{self.lineno}"
)
# Add the needsequence and all needed information
@@ -76,7 +77,10 @@ def run(self) -> Sequence[nodes.Node]:
def process_needsequence(
- app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
) -> None:
# Replace all needsequence nodes with a list of the collected needs.
env = app.env
@@ -98,13 +102,17 @@ def process_needsequence(
id = node.attributes["ids"][0]
current_needsequence = needs_data.get_or_create_sequences()[id]
- option_link_types = [link.upper() for link in current_needsequence["link_types"]]
+ option_link_types = [
+ link.upper() for link in current_needsequence["link_types"]
+ ]
for lt in option_link_types:
if lt not in link_type_names:
logger.warning(
"Unknown link type {link_type} in needsequence {flow}. Allowed values:"
" {link_types} [needs]".format(
- link_type=lt, flow=current_needsequence["target_id"], link_types=",".join(link_type_names)
+ link_type=lt,
+ flow=current_needsequence["target_id"],
+ link_types=",".join(link_type_names),
),
type="needs",
)
@@ -131,7 +139,9 @@ def process_needsequence(
config = current_needsequence["config"]
puml_node["uml"] += add_config(config)
- start_needs_id = [x.strip() for x in re.split(";|,", current_needsequence["start"])]
+ start_needs_id = [
+ x.strip() for x in re.split(";|,", current_needsequence["start"])
+ ]
if len(start_needs_id) == 0:
# TODO this should be a warning (and not tested)
raise NeedSequenceException(
@@ -150,9 +160,11 @@ def process_needsequence(
need = all_needs_dict[need_id.strip()]
except KeyError:
raise NeedSequenceException(
- "Given {} in needsequence unknown."
- " File {}"
- ":{}".format(need_id, current_needsequence["docname"], current_needsequence["lineno"])
+ "Given {} in needsequence unknown." " File {}" ":{}".format(
+ need_id,
+ current_needsequence["docname"],
+ current_needsequence["lineno"],
+ )
)
# Add children of participants
@@ -177,7 +189,9 @@ def process_needsequence(
puml_node["uml"] += "\n@enduml"
puml_node["incdir"] = os.path.dirname(current_needsequence["docname"])
- puml_node["filename"] = os.path.split(current_needsequence["docname"])[1] # Needed for plantuml >= 0.9
+ puml_node["filename"] = os.path.split(current_needsequence["docname"])[
+ 1
+ ] # Needed for plantuml >= 0.9
scale = int(current_needsequence["scale"])
# if scale != 100:
@@ -203,8 +217,12 @@ def process_needsequence(
gen_flow_link = generate_name(app, puml_node.children[0], file_ext)
current_file_parts = fromdocname.split("/")
subfolder_amount = len(current_file_parts) - 1
- img_locaton = "../" * subfolder_amount + "_images/" + gen_flow_link[0].split("/")[-1]
- flow_ref = nodes.reference("t", current_needsequence["caption"], refuri=img_locaton)
+ img_locaton = (
+ "../" * subfolder_amount + "_images/" + gen_flow_link[0].split("/")[-1]
+ )
+ flow_ref = nodes.reference(
+ "t", current_needsequence["caption"], refuri=img_locaton
+ )
puml_node += nodes.caption("", "", flow_ref)
# Add lineno to node
@@ -212,8 +230,12 @@ def process_needsequence(
content.append(puml_node)
- if len(c_string) == 0 and p_string.count("participant") == 1: # no connections and just one (start) participant
- content = [(no_needs_found_paragraph(current_needsequence.get("filter_warning")))]
+ if (
+ len(c_string) == 0 and p_string.count("participant") == 1
+ ): # no connections and just one (start) participant
+ content = [
+ (no_needs_found_paragraph(current_needsequence.get("filter_warning")))
+ ]
if current_needsequence["show_filters"]:
content.append(get_filter_para(current_needsequence))
@@ -241,7 +263,11 @@ def get_message_needs(
p_string = ""
c_string = ""
for msg_need in msg_needs:
- messages[msg_need["id"]] = {"id": msg_need["id"], "title": msg_need["title"], "receivers": {}}
+ messages[msg_need["id"]] = {
+ "id": msg_need["id"],
+ "title": msg_need["title"],
+ "receivers": {},
+ }
if sender["id"] not in tracked_receivers:
p_string += 'participant "{}" as {}\n'.format(sender["title"], sender["id"])
tracked_receivers.append(sender["id"])
@@ -252,17 +278,31 @@ def get_message_needs(
from sphinx_needs.filter_common import filter_single_need
if not filter_single_need(
- all_needs_dict[rec_id], NeedsSphinxConfig(app.config), filter, needs=all_needs_dict.values()
+ all_needs_dict[rec_id],
+ NeedsSphinxConfig(app.config),
+ filter,
+ needs=all_needs_dict.values(),
):
continue
- rec_data = {"id": rec_id, "title": all_needs_dict[rec_id]["title"], "messages": []}
+ rec_data = {
+ "id": rec_id,
+ "title": all_needs_dict[rec_id]["title"],
+ "messages": [],
+ }
- c_string += "{} -> {}: {}\n".format(sender["id"], rec_data["id"], msg_need["title"])
+ c_string += "{} -> {}: {}\n".format(
+ sender["id"], rec_data["id"], msg_need["title"]
+ )
if rec_id not in tracked_receivers:
rec_messages, p_string_new, c_string_new = get_message_needs(
- app, all_needs_dict[rec_id], link_types, all_needs_dict, tracked_receivers, filter=filter
+ app,
+ all_needs_dict[rec_id],
+ link_types,
+ all_needs_dict,
+ tracked_receivers,
+ filter=filter,
)
p_string += p_string_new
c_string += c_string_new
diff --git a/sphinx_needs/directives/needservice.py b/sphinx_needs/directives/needservice.py
index bf234f7fe..73f920b7d 100644
--- a/sphinx_needs/directives/needservice.py
+++ b/sphinx_needs/directives/needservice.py
@@ -48,7 +48,17 @@ def __init__(
state: RSTState,
state_machine: RSTStateMachine,
):
- super().__init__(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine)
+ super().__init__(
+ name,
+ arguments,
+ options,
+ content,
+ lineno,
+ content_offset,
+ block_text,
+ state,
+ state_machine,
+ )
self.log = get_logger(__name__)
def run(self) -> Sequence[nodes.Node]:
@@ -94,8 +104,12 @@ def run(self) -> Sequence[nodes.Node]:
missing_options = {}
for element in datum.keys():
defined_options = list(self.__class__.option_spec.keys())
- defined_options.append("content") # Add content, so that it gets not detected as missing
- if element not in defined_options and element not in getattr(app.config, "needs_extra_links", []):
+ defined_options.append(
+ "content"
+ ) # Add content, so that it gets not detected as missing
+ if element not in defined_options and element not in getattr(
+ app.config, "needs_extra_links", []
+ ):
missing_options[element] = datum[element]
# Finally delete not found options
@@ -112,13 +126,25 @@ def run(self) -> Sequence[nodes.Node]:
datum.update(options)
# ToDo: Tags and Status are not set (but exist in data)
- section += add_need(self.env.app, self.state, docname, self.lineno, need_type, need_title, **datum)
+ section += add_need(
+ self.env.app,
+ self.state,
+ docname,
+ self.lineno,
+ need_type,
+ need_title,
+ **datum,
+ )
else:
try:
service_debug_data = service.debug(self.options)
except NotImplementedError:
- service_debug_data = {"error": f'Service {service_name} does not support "debug" output.'}
- viewer_node = get_data_viewer_node(title="Debug data", data=service_debug_data)
+ service_debug_data = {
+ "error": f'Service {service_name} does not support "debug" output.'
+ }
+ viewer_node = get_data_viewer_node(
+ title="Debug data", data=service_debug_data
+ )
section.append(viewer_node)
add_doc(self.env, self.env.docname)
diff --git a/sphinx_needs/directives/needtable.py b/sphinx_needs/directives/needtable.py
index c05dfb61b..e6c492d79 100644
--- a/sphinx_needs/directives/needtable.py
+++ b/sphinx_needs/directives/needtable.py
@@ -52,7 +52,9 @@ class NeedtableDirective(FilterBase):
def run(self) -> Sequence[nodes.Node]:
env = self.env
- targetid = "needtable-{docname}-{id}".format(docname=env.docname, id=env.new_serialno("needtable"))
+ targetid = "needtable-{docname}-{id}".format(
+ docname=env.docname, id=env.new_serialno("needtable")
+ )
targetnode = nodes.target("", "", ids=[targetid])
columns_str = str(self.options.get("columns", ""))
@@ -68,7 +70,9 @@ def run(self) -> Sequence[nodes.Node]:
colwidths = str(self.options.get("colwidths", ""))
colwidths_list = []
if colwidths:
- colwidths_list = [int(width.strip()) for width in re.split(";|,", colwidths)]
+ colwidths_list = [
+ int(width.strip()) for width in re.split(";|,", colwidths)
+ ]
if len(columns) != len(colwidths_list):
raise NeedsInvalidException(
f"Amount of elements in colwidths and columns do not match: "
@@ -115,7 +119,10 @@ def run(self) -> Sequence[nodes.Node]:
@measure_time("needtable")
@profile("NEEDTABLE")
def process_needtables(
- app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
) -> None:
"""
Replace all needtables nodes with a table of filtered nodes.
@@ -149,7 +156,9 @@ def process_needtables(
id = node.attributes["ids"][0]
current_needtable = needs_data.get_or_create_tables()[id]
- if current_needtable["style"] == "" or current_needtable["style"].upper() not in ["TABLE", "DATATABLES"]:
+ if current_needtable["style"] == "" or current_needtable[
+ "style"
+ ].upper() not in ["TABLE", "DATATABLES"]:
if needs_config.table_style == "":
style = "DATATABLES"
else:
@@ -199,7 +208,9 @@ def process_needtables(
# Perform filtering of needs
try:
- filtered_needs = process_filters(app, list(all_needs.values()), current_needtable)
+ filtered_needs = process_filters(
+ app, list(all_needs.values()), current_needtable
+ )
except Exception as e:
raise e
@@ -228,8 +239,12 @@ def sort(need: NeedsInfoType) -> Any:
filtered_needs.sort(key=get_sorter(current_needtable["sort"]))
for need_info in filtered_needs:
- style_row = check_and_get_content(current_needtable["style_row"], need_info, env)
- style_row = style_row.replace(" ", "_") # Replace whitespaces with _ to get valid css name
+ style_row = check_and_get_content(
+ current_needtable["style_row"], need_info, env
+ )
+ style_row = style_row.replace(
+ " ", "_"
+ ) # Replace whitespaces with _ to get valid css name
temp_need = need_info.copy()
if temp_need["is_need"]:
@@ -243,12 +258,26 @@ def sort(need: NeedsInfoType) -> Any:
for option, _title in current_needtable["columns"]:
if option == "ID":
- row += row_col_maker(app, fromdocname, all_needs, temp_need, "id", make_ref=True, prefix=prefix)
+ row += row_col_maker(
+ app,
+ fromdocname,
+ all_needs,
+ temp_need,
+ "id",
+ make_ref=True,
+ prefix=prefix,
+ )
elif option == "TITLE":
- row += row_col_maker(app, fromdocname, all_needs, temp_need, "title", prefix=prefix)
+ row += row_col_maker(
+ app, fromdocname, all_needs, temp_need, "title", prefix=prefix
+ )
elif option in link_type_list:
link_type = link_type_list[option]
- if option in ["INCOMING", link_type["option"].upper() + "_BACK", link_type["incoming"].upper()]:
+ if option in [
+ "INCOMING",
+ link_type["option"].upper() + "_BACK",
+ link_type["incoming"].upper(),
+ ]:
row += row_col_maker(
app,
fromdocname,
@@ -259,10 +288,17 @@ def sort(need: NeedsInfoType) -> Any:
)
else:
row += row_col_maker(
- app, fromdocname, all_needs, temp_need, link_type["option"], ref_lookup=True
+ app,
+ fromdocname,
+ all_needs,
+ temp_need,
+ link_type["option"],
+ ref_lookup=True,
)
else:
- row += row_col_maker(app, fromdocname, all_needs, temp_need, option.lower())
+ row += row_col_maker(
+ app, fromdocname, all_needs, temp_need, option.lower()
+ )
tbody += row
# Need part rows
@@ -315,7 +351,9 @@ def sort(need: NeedsInfoType) -> Any:
ref_lookup=True,
)
else:
- row += row_col_maker(app, fromdocname, all_needs, temp_part, option.lower())
+ row += row_col_maker(
+ app, fromdocname, all_needs, temp_part, option.lower()
+ )
tbody += row
diff --git a/sphinx_needs/directives/needuml.py b/sphinx_needs/directives/needuml.py
index 0b2efb482..3fd8f3b41 100644
--- a/sphinx_needs/directives/needuml.py
+++ b/sphinx_needs/directives/needuml.py
@@ -45,10 +45,14 @@ def run(self) -> Sequence[nodes.Node]:
env = self.env
if self.name == "needarch":
- targetid = "needarch-{docname}-{id}".format(docname=env.docname, id=env.new_serialno("needarch"))
+ targetid = "needarch-{docname}-{id}".format(
+ docname=env.docname, id=env.new_serialno("needarch")
+ )
is_arch = True
else:
- targetid = "needuml-{docname}-{id}".format(docname=env.docname, id=env.new_serialno("needuml"))
+ targetid = "needuml-{docname}-{id}".format(
+ docname=env.docname, id=env.new_serialno("needuml")
+ )
is_arch = False
targetnode = nodes.target("", "", ids=[targetid])
@@ -87,7 +91,9 @@ def run(self) -> Sequence[nodes.Node]:
plantuml_code_out_path = None
if save_path:
if os.path.isabs(save_path):
- raise NeedumlException(f"Given save path: {save_path}, is not a relative path.")
+ raise NeedumlException(
+ f"Given save path: {save_path}, is not a relative path."
+ )
else:
plantuml_code_out_path = save_path
@@ -123,7 +129,9 @@ def run(self) -> Sequence[nodes.Node]:
return NeedumlDirective.run(self)
-def transform_uml_to_plantuml_node(app, uml_content: str, parent_need_id: str, key: str, kwargs: dict, config: str):
+def transform_uml_to_plantuml_node(
+ app, uml_content: str, parent_need_id: str, key: str, kwargs: dict, config: str
+):
try:
if "sphinxcontrib.plantuml" not in app.config.extensions:
raise ImportError
@@ -178,7 +186,13 @@ def get_debug_node_from_puml_node(puml_node):
def jinja2uml(
- app, fromdocname, uml_content: str, parent_need_id: str, key: str, processed_need_ids: {}, kwargs: dict
+ app,
+ fromdocname,
+ uml_content: str,
+ parent_need_id: str,
+ key: str,
+ processed_need_ids: {},
+ kwargs: dict,
) -> (str, {}):
# Let's render jinja templates with uml content template to 'plantuml syntax' uml
# 1. Remove @startuml and @enduml
@@ -192,7 +206,9 @@ def jinja2uml(
# 4. Append need_id to processed_need_ids, so it will not been processed again
if parent_need_id:
- jinja_utils.append_need_to_processed_needs(need_id=parent_need_id, art="uml", key=key, kwargs=kwargs)
+ jinja_utils.append_need_to_processed_needs(
+ need_id=parent_need_id, art="uml", key=key, kwargs=kwargs
+ )
# 5. Get data for the jinja processing
data = {}
@@ -229,13 +245,17 @@ class JinjaFunctions:
Provides access to sphinx-app and all Needs objects.
"""
- def __init__(self, app: Sphinx, fromdocname, parent_need_id: str, processed_need_ids: dict):
+ def __init__(
+ self, app: Sphinx, fromdocname, parent_need_id: str, processed_need_ids: dict
+ ):
self.needs = SphinxNeedsData(app.env).get_or_create_needs()
self.app = app
self.fromdocname = fromdocname
self.parent_need_id = parent_need_id
if parent_need_id and parent_need_id not in self.needs:
- raise NeedumlException(f"JinjaFunctions initialized with undefined parent_need_id: '{parent_need_id}'")
+ raise NeedumlException(
+ f"JinjaFunctions initialized with undefined parent_need_id: '{parent_need_id}'"
+ )
self.processed_need_ids = processed_need_ids
def need_to_processed_data(self, art: str, key: str, kwargs: dict) -> {}:
@@ -246,7 +266,9 @@ def need_to_processed_data(self, art: str, key: str, kwargs: dict) -> {}:
}
return d
- def append_need_to_processed_needs(self, need_id: str, art: str, key: str, kwargs: dict) -> None:
+ def append_need_to_processed_needs(
+ self, need_id: str, art: str, key: str, kwargs: dict
+ ) -> None:
data = self.need_to_processed_data(art=art, key=key, kwargs=kwargs)
if need_id not in self.processed_need_ids:
self.processed_need_ids[need_id] = []
@@ -261,18 +283,26 @@ def append_needs_to_processed_needs(self, processed_needs_data: dict) -> None:
if d not in self.processed_need_ids[k]:
self.processed_need_ids[k].append(d)
- def data_in_processed_data(self, need_id: str, art: str, key: str, kwargs: dict) -> bool:
+ def data_in_processed_data(
+ self, need_id: str, art: str, key: str, kwargs: dict
+ ) -> bool:
data = self.need_to_processed_data(art=art, key=key, kwargs=kwargs)
- return (need_id in self.processed_need_ids) and (data in self.processed_need_ids[need_id])
+ return (need_id in self.processed_need_ids) and (
+ data in self.processed_need_ids[need_id]
+ )
def get_processed_need_ids(self) -> {}:
return self.processed_need_ids
def uml_from_need(self, need_id: str, key: str = "diagram", **kwargs) -> str:
if need_id not in self.needs:
- raise NeedumlException(f"Jinja function uml() is called with undefined need_id: '{need_id}'.")
+ raise NeedumlException(
+ f"Jinja function uml() is called with undefined need_id: '{need_id}'."
+ )
- if self.data_in_processed_data(need_id=need_id, art="uml", key=key, kwargs=kwargs):
+ if self.data_in_processed_data(
+ need_id=need_id, art="uml", key=key, kwargs=kwargs
+ ):
return ""
need_info = self.needs[need_id]
@@ -281,7 +311,9 @@ def uml_from_need(self, need_id: str, key: str = "diagram", **kwargs) -> str:
if need_info["arch"][key]:
uml_content = need_info["arch"][key]
else:
- raise NeedumlException(f"Option key name: {key} does not exist in need {need_id}.")
+ raise NeedumlException(
+ f"Option key name: {key} does not exist in need {need_id}."
+ )
else:
if "diagram" in need_info["arch"] and need_info["arch"]["diagram"]:
uml_content = need_info["arch"]["diagram"]
@@ -307,13 +339,17 @@ def uml_from_need(self, need_id: str, key: str = "diagram", **kwargs) -> str:
def flow(self, need_id) -> str:
if need_id not in self.needs:
- raise NeedumlException(f"Jinja function flow is called with undefined need_id: '{need_id}'.")
+ raise NeedumlException(
+ f"Jinja function flow is called with undefined need_id: '{need_id}'."
+ )
if self.data_in_processed_data(need_id=need_id, art="flow", key="", kwargs={}):
return ""
# append need_id to processed_need_ids, so it will not been processed again
- self.append_need_to_processed_needs(need_id=need_id, art="flow", key="", kwargs={})
+ self.append_need_to_processed_needs(
+ need_id=need_id, art="flow", key="", kwargs={}
+ )
need_info = self.needs[need_id]
link = calculate_link(self.app, need_info, self.fromdocname)
@@ -334,9 +370,13 @@ def flow(self, need_id) -> str:
def ref(self, need_id: str, option: str = None, text: str = None) -> str:
if need_id not in self.needs:
- raise NeedumlException(f"Jinja function ref is called with undefined need_id: '{need_id}'.")
+ raise NeedumlException(
+ f"Jinja function ref is called with undefined need_id: '{need_id}'."
+ )
if (option and text) and (not option and not text):
- raise NeedumlException("Jinja function ref requires exactly one entry 'option' or 'text'")
+ raise NeedumlException(
+ "Jinja function ref requires exactly one entry 'option' or 'text'"
+ )
need_info = self.needs[need_id]
link = calculate_link(self.app, need_info, self.fromdocname)
@@ -354,11 +394,15 @@ def filter(self, filter_string):
"""
needs_config = NeedsSphinxConfig(self.app.config)
- return filter_needs(list(self.needs.values()), needs_config, filter_string=filter_string)
+ return filter_needs(
+ list(self.needs.values()), needs_config, filter_string=filter_string
+ )
def imports(self, *args):
if not self.parent_need_id:
- raise NeedumlException("Jinja function 'import()' is not supported in needuml directive.")
+ raise NeedumlException(
+ "Jinja function 'import()' is not supported in needuml directive."
+ )
# gets all need ids from need links/extra_links options and wrap into jinja function uml()
need_info = self.needs[self.parent_need_id]
uml_ids = []
@@ -378,7 +422,9 @@ def imports(self, *args):
def need(self):
if not self.parent_need_id:
- raise NeedumlException("Jinja function 'need()' is not supported in needuml directive.")
+ raise NeedumlException(
+ "Jinja function 'need()' is not supported in needuml directive."
+ )
return self.needs[self.parent_need_id]
@@ -407,7 +453,12 @@ def is_element_of_need(node: nodes.Element) -> str:
@measure_time("needuml")
-def process_needuml(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
+def process_needuml(
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
+) -> None:
env = app.env
# for node in doctree.findall(Needuml):
@@ -421,14 +472,18 @@ def process_needuml(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
# Check if needarch is only used inside a need
parent_need_id = is_element_of_need(node)
if not parent_need_id:
- raise NeedArchException("Directive needarch can only be used inside a need.")
+ raise NeedArchException(
+ "Directive needarch can only be used inside a need."
+ )
content = []
# Adding config
config = current_needuml["config"]
if config and len(config) >= 3:
# Remove all empty lines
- config = "\n".join([line.strip() for line in config.split("\n") if line.strip()])
+ config = "\n".join(
+ [line.strip() for line in config.split("\n") if line.strip()]
+ )
puml_node = transform_uml_to_plantuml_node(
app=app,
@@ -459,7 +514,9 @@ def process_needuml(app: Sphinx, doctree: nodes.document, fromdocname: str, foun
puml_node["align"] = "center"
puml_node["incdir"] = os.path.dirname(current_needuml["docname"])
- puml_node["filename"] = os.path.split(current_needuml["docname"])[1] # Needed for plantuml >= 0.9
+ puml_node["filename"] = os.path.split(current_needuml["docname"])[
+ 1
+ ] # Needed for plantuml >= 0.9
content.append(puml_node)
diff --git a/sphinx_needs/directives/utils.py b/sphinx_needs/directives/utils.py
index 378342909..abb3b891b 100644
--- a/sphinx_needs/directives/utils.py
+++ b/sphinx_needs/directives/utils.py
@@ -24,17 +24,25 @@ def used_filter_paragraph(current_needfilter: NeedsFilteredBaseType) -> nodes.pa
para = nodes.paragraph()
filter_text = "Used filter:"
filter_text += (
- " status(%s)" % " OR ".join(current_needfilter["status"]) if len(current_needfilter["status"]) > 0 else ""
+ " status(%s)" % " OR ".join(current_needfilter["status"])
+ if len(current_needfilter["status"]) > 0
+ else ""
)
if len(current_needfilter["status"]) > 0 and len(current_needfilter["tags"]) > 0:
filter_text += " AND "
- filter_text += " tags(%s)" % " OR ".join(current_needfilter["tags"]) if len(current_needfilter["tags"]) > 0 else ""
- if (len(current_needfilter["status"]) > 0 or len(current_needfilter["tags"]) > 0) and len(
- current_needfilter["types"]
- ) > 0:
+ filter_text += (
+ " tags(%s)" % " OR ".join(current_needfilter["tags"])
+ if len(current_needfilter["tags"]) > 0
+ else ""
+ )
+ if (
+ len(current_needfilter["status"]) > 0 or len(current_needfilter["tags"]) > 0
+ ) and len(current_needfilter["types"]) > 0:
filter_text += " AND "
filter_text += (
- " types(%s)" % " OR ".join(current_needfilter["types"]) if len(current_needfilter["types"]) > 0 else ""
+ " types(%s)" % " OR ".join(current_needfilter["types"])
+ if len(current_needfilter["types"]) > 0
+ else ""
)
filter_node = nodes.emphasis(filter_text, filter_text)
@@ -91,7 +99,9 @@ def analyse_needs_metrics(env: BuildEnvironment) -> dict[str, Any]:
if i["type"] in needs_types:
needs_types[i["type"]] += 1
- metric_data["needs_types"] = {i[0]: i[1] for i in sorted(needs_types.items(), key=lambda x: x[0])}
+ metric_data["needs_types"] = {
+ i[0]: i[1] for i in sorted(needs_types.items(), key=lambda x: x[0])
+ }
return metric_data
diff --git a/sphinx_needs/environment.py b/sphinx_needs/environment.py
index 61dbf95dc..786ea2666 100644
--- a/sphinx_needs/environment.py
+++ b/sphinx_needs/environment.py
@@ -40,13 +40,21 @@ def safe_add_file(filename: Path, app: Sphinx) -> None:
if pure_path.suffix == ".js":
# Make sure the calculated (posix)-path is not already registered as "web"-path
- if hasattr(builder, "script_files") and str(static_data_file) not in builder.script_files:
+ if (
+ hasattr(builder, "script_files")
+ and str(static_data_file) not in builder.script_files
+ ):
app.add_js_file(str(pure_path))
elif pure_path.suffix == ".css":
- if hasattr(builder, "css_files") and str(static_data_file) not in builder.css_files:
+ if (
+ hasattr(builder, "css_files")
+ and str(static_data_file) not in builder.css_files
+ ):
app.add_css_file(str(pure_path))
else:
- raise NotImplementedError(f"File type {pure_path.suffix} not support by save_add_file")
+ raise NotImplementedError(
+ f"File type {pure_path.suffix} not support by save_add_file"
+ )
def safe_remove_file(filename: Path, app: Sphinx) -> None:
@@ -120,7 +128,10 @@ def _find_css_files() -> Iterable[Path]:
if not source_file_path.exists():
source_file_path = css_root / "blank" / "blank.css"
- logger.warning(f"{source_file_path} not found. Copying sphinx-internal blank.css [needs]", type="needs")
+ logger.warning(
+ f"{source_file_path} not found. Copying sphinx-internal blank.css [needs]",
+ type="needs",
+ )
dest_file = dest_dir / source_file_path.name
dest_dir.mkdir(exist_ok=True)
@@ -215,7 +226,9 @@ def install_permalink_file(app: Sphinx, env: BuildEnvironment) -> None:
return
# load jinja template
- jinja_env = Environment(loader=PackageLoader("sphinx_needs"), autoescape=select_autoescape())
+ jinja_env = Environment(
+ loader=PackageLoader("sphinx_needs"), autoescape=select_autoescape()
+ )
template = jinja_env.get_template("permalink.html")
# save file to build dir
diff --git a/sphinx_needs/external_needs.py b/sphinx_needs/external_needs.py
index fc8003648..331038996 100644
--- a/sphinx_needs/external_needs.py
+++ b/sphinx_needs/external_needs.py
@@ -47,18 +47,28 @@ def load_external_needs(app: Sphinx, env: BuildEnvironment, _docname: str) -> No
)
)
elif not (source.get("json_url", False) or source.get("json_path", False)):
- raise NeedsExternalException("json_path or json_url must be configured to use external_needs.")
+ raise NeedsExternalException(
+ "json_path or json_url must be configured to use external_needs."
+ )
if source.get("json_url", False):
- log.info(clean_log(f"Loading external needs from url {source['json_url']}."))
+ log.info(
+ clean_log(f"Loading external needs from url {source['json_url']}.")
+ )
s = requests.Session()
s.mount("file://", FileAdapter())
try:
response = s.get(source["json_url"])
- needs_json = response.json() # The downloaded file MUST be json. Everything else we do not handle!
+ needs_json = (
+ response.json()
+ ) # The downloaded file MUST be json. Everything else we do not handle!
except Exception as e:
raise NeedsExternalException(
- clean_log("Getting {} didn't work. Reason: {}".format(source["json_url"], e))
+ clean_log(
+ "Getting {} didn't work. Reason: {}".format(
+ source["json_url"], e
+ )
+ )
)
if source.get("json_path", False):
@@ -68,7 +78,9 @@ def load_external_needs(app: Sphinx, env: BuildEnvironment, _docname: str) -> No
json_path = os.path.join(app.srcdir, source["json_path"])
if not os.path.exists(json_path):
- raise NeedsExternalException(f"Given json_path {json_path} does not exist.")
+ raise NeedsExternalException(
+ f"Given json_path {json_path} does not exist."
+ )
with open(json_path) as json_file:
needs_json = json.load(json_file)
@@ -83,7 +95,9 @@ def load_external_needs(app: Sphinx, env: BuildEnvironment, _docname: str) -> No
needs = needs_json["versions"][version]["needs"]
except KeyError:
raise NeedsExternalException(
- clean_log(f"Version {version} not found in json file from {source['json_url']}")
+ clean_log(
+ f"Version {version} not found in json file from {source['json_url']}"
+ )
)
log.debug(f"Loading {len(needs)} needs.")
@@ -98,7 +112,16 @@ def load_external_needs(app: Sphinx, env: BuildEnvironment, _docname: str) -> No
if (
key not in needs_config.extra_options
and key not in extra_links
- and key not in ["title", "type", "id", "description", "tags", "docname", "status"]
+ and key
+ not in [
+ "title",
+ "type",
+ "id",
+ "description",
+ "tags",
+ "docname",
+ "status",
+ ]
):
del need_params[key]
@@ -112,9 +135,9 @@ def load_external_needs(app: Sphinx, env: BuildEnvironment, _docname: str) -> No
cal_target_url = mem_template.render(**{"need": need})
need_params["external_url"] = f'{source["base_url"]}/{cal_target_url}'
else:
- need_params["external_url"] = (
- f'{source["base_url"]}/{need.get("docname", "__error__")}.html#{need["id"]}'
- )
+ need_params[
+ "external_url"
+ ] = f'{source["base_url"]}/{need.get("docname", "__error__")}.html#{need["id"]}'
need_params["content"] = need["description"]
need_params["links"] = need.get("links", [])
diff --git a/sphinx_needs/filter_common.py b/sphinx_needs/filter_common.py
index 896256860..0ba740372 100644
--- a/sphinx_needs/filter_common.py
+++ b/sphinx_needs/filter_common.py
@@ -55,7 +55,11 @@ class FilterBase(SphinxDirective):
def collect_filter_attributes(self) -> FilterAttributesType:
_tags = str(self.options.get("tags", ""))
- tags = [tag.strip() for tag in re.split(";|,", _tags) if len(tag) > 0] if _tags else []
+ tags = (
+ [tag.strip() for tag in re.split(";|,", _tags) if len(tag) > 0]
+ if _tags
+ else []
+ )
status = self.options.get("status")
if status:
@@ -92,7 +96,10 @@ def collect_filter_attributes(self) -> FilterAttributesType:
def process_filters(
- app: Sphinx, all_needs: Iterable[NeedsInfoType], filter_data: NeedsFilteredBaseType, include_external: bool = True
+ app: Sphinx,
+ all_needs: Iterable[NeedsInfoType],
+ filter_data: NeedsFilteredBaseType,
+ include_external: bool = True,
) -> list[NeedsPartsInfoType]:
"""
Filters all needs with given configuration.
@@ -112,7 +119,10 @@ def process_filters(
try:
all_needs = sorted(all_needs, key=lambda node: node[sort_key] or "") # type: ignore[literal-required]
except KeyError as e:
- log.warning(f"Sorting parameter {sort_key} not valid: Error: {e} [needs]", type="needs")
+ log.warning(
+ f"Sorting parameter {sort_key} not valid: Error: {e} [needs]",
+ type="needs",
+ )
# check if include external needs
checked_all_needs: Iterable[NeedsInfoType]
@@ -130,7 +140,9 @@ def process_filters(
all_needs_incl_parts = prepare_need_list(checked_all_needs)
# Check if external filter code is defined
- filter_func, filter_args = check_and_get_external_filter_func(filter_data.get("filter_func"))
+ filter_func, filter_args = check_and_get_external_filter_func(
+ filter_data.get("filter_func")
+ )
filter_code = None
# Get filter_code from
@@ -141,12 +153,19 @@ def process_filters(
if bool(filter_data["status"] or filter_data["tags"] or filter_data["types"]):
for need_info in all_needs_incl_parts:
status_filter_passed = False
- if not filter_data["status"] or need_info["status"] and need_info["status"] in filter_data["status"]:
+ if (
+ not filter_data["status"]
+ or need_info["status"]
+ and need_info["status"] in filter_data["status"]
+ ):
# Filtering for status was not requested or match was found
status_filter_passed = True
tags_filter_passed = False
- if len(set(need_info["tags"]) & set(filter_data["tags"])) > 0 or len(filter_data["tags"]) == 0:
+ if (
+ len(set(need_info["tags"]) & set(filter_data["tags"])) > 0
+ or len(filter_data["tags"]) == 0
+ ):
tags_filter_passed = True
type_filter_passed = False
@@ -160,13 +179,19 @@ def process_filters(
if status_filter_passed and tags_filter_passed and type_filter_passed:
found_needs_by_options.append(need_info)
# Get need by filter string
- found_needs_by_string = filter_needs(all_needs_incl_parts, needs_config, filter_data["filter"])
+ found_needs_by_string = filter_needs(
+ all_needs_incl_parts, needs_config, filter_data["filter"]
+ )
# Make an intersection of both lists
- found_needs = intersection_of_need_results(found_needs_by_options, found_needs_by_string)
+ found_needs = intersection_of_need_results(
+ found_needs_by_options, found_needs_by_string
+ )
else:
# There is no other config as the one for filter string.
# So we only need this result.
- found_needs = filter_needs(all_needs_incl_parts, needs_config, filter_data["filter"])
+ found_needs = filter_needs(
+ all_needs_incl_parts, needs_config, filter_data["filter"]
+ )
else:
# Provides only a copy of needs to avoid data manipulations.
context = {
@@ -185,7 +210,9 @@ def process_filters(
context[f"arg{index+1}"] = arg
# Decorate function to allow time measurments
- filter_func = measure_time_func(filter_func, category="filter_func", source="user")
+ filter_func = measure_time_func(
+ filter_func, category="filter_func", source="user"
+ )
filter_func(**context)
else:
log.warning("Something went wrong running filter [needs]", type="needs")
@@ -237,7 +264,11 @@ def prepare_need_list(need_list: Iterable[NeedsInfoType]) -> list[NeedsPartsInfo
for need in need_list:
for part in need["parts"].values():
id_complete = ".".join([need["id"], part["id"]])
- filter_part: NeedsPartsInfoType = {**need, **part, **{"id_parent": need["id"], "id_complete": id_complete}} # type: ignore[typeddict-item]
+ filter_part: NeedsPartsInfoType = {
+ **need,
+ **part,
+ **{"id_parent": need["id"], "id_complete": id_complete}, # type: ignore[typeddict-item]
+ }
all_needs_incl_parts.append(filter_part)
# Be sure extra attributes, which makes only sense for need_parts, are also available on
@@ -288,12 +319,21 @@ def filter_needs(
for filter_need in needs:
try:
if filter_single_need(
- filter_need, config, filter_string, needs, current_need, filter_compiled=filter_compiled
+ filter_need,
+ config,
+ filter_string,
+ needs,
+ current_need,
+ filter_compiled=filter_compiled,
):
found_needs.append(filter_need)
except Exception as e:
if not error_reported: # Let's report a filter-problem only onces
- location = (current_need["docname"], current_need["lineno"]) if current_need else None
+ location = (
+ (current_need["docname"], current_need["lineno"])
+ if current_need
+ else None
+ )
log.warning(str(e) + " [needs]", type="needs", location=location)
error_reported = True
diff --git a/sphinx_needs/functions/common.py b/sphinx_needs/functions/common.py
index f5d96bf06..467743294 100644
--- a/sphinx_needs/functions/common.py
+++ b/sphinx_needs/functions/common.py
@@ -19,7 +19,13 @@
from sphinx_needs.utils import logger
-def test(app: Sphinx, need: NeedsInfoType, needs: dict[str, NeedsInfoType], *args: Any, **kwargs: Any) -> str:
+def test(
+ app: Sphinx,
+ need: NeedsInfoType,
+ needs: dict[str, NeedsInfoType],
+ *args: Any,
+ **kwargs: Any,
+) -> str:
"""
Test function for dynamic functions in sphinx needs.
@@ -41,7 +47,12 @@ def test(app: Sphinx, need: NeedsInfoType, needs: dict[str, NeedsInfoType], *arg
def echo(
- app: Sphinx, need: NeedsInfoType, needs: dict[str, NeedsInfoType], text: str, *args: Any, **kwargs: Any
+ app: Sphinx,
+ need: NeedsInfoType,
+ needs: dict[str, NeedsInfoType],
+ text: str,
+ *args: Any,
+ **kwargs: Any,
) -> str:
"""
.. versionadded:: 0.6.3
@@ -154,7 +165,9 @@ def copy(
need = needs[need_id]
if filter:
- result = filter_needs(needs.values(), NeedsSphinxConfig(app.config), filter, need)
+ result = filter_needs(
+ needs.values(), NeedsSphinxConfig(app.config), filter, need
+ )
if result:
need = result[0]
@@ -323,7 +336,10 @@ def check_linked_values(
if not filter_single_need(need, needs_config, filter_string):
continue
except Exception as e:
- logger.warning(f"CheckLinkedValues: Filter {filter_string} not valid: Error: {e} [needs]", type="needs")
+ logger.warning(
+ f"CheckLinkedValues: Filter {filter_string} not valid: Error: {e} [needs]",
+ type="needs",
+ )
need_value = need[search_option] # type: ignore[literal-required]
if not one_hit and need_value not in search_value:
@@ -422,7 +438,9 @@ def calc_sum(
:return: A float number
"""
needs_config = NeedsSphinxConfig(app.config)
- check_needs = [needs[link] for link in need["links"]] if links_only else needs.values()
+ check_needs = (
+ [needs[link] for link in need["links"]] if links_only else needs.values()
+ )
calculated_sum = 0.0
@@ -434,7 +452,9 @@ def calc_sum(
except ValueError:
pass
except NeedsInvalidFilter as ex:
- logger.warning(f"Given filter is not valid. Error: {ex} [needs]", type="needs")
+ logger.warning(
+ f"Given filter is not valid. Error: {ex} [needs]", type="needs"
+ )
with contextlib.suppress(ValueError):
calculated_sum += float(check_need[option]) # type: ignore[literal-required]
@@ -514,7 +534,9 @@ def links_from_content(
needs_config = NeedsSphinxConfig(app.config)
filtered_links = []
for link in raw_links:
- if link not in filtered_links and filter_single_need(needs[link], needs_config, filter):
+ if link not in filtered_links and filter_single_need(
+ needs[link], needs_config, filter
+ ):
filtered_links.append(link)
return filtered_links
diff --git a/sphinx_needs/functions/functions.py b/sphinx_needs/functions/functions.py
index 694435d17..32ead7438 100644
--- a/sphinx_needs/functions/functions.py
+++ b/sphinx_needs/functions/functions.py
@@ -29,7 +29,8 @@
# TODO these functions also take optional *args and **kwargs
DynamicFunction = Callable[
- [Sphinx, NeedsInfoType, Dict[str, NeedsInfoType]], Union[str, int, float, List[Union[str, int, float]]]
+ [Sphinx, NeedsInfoType, Dict[str, NeedsInfoType]],
+ Union[str, int, float, List[Union[str, int, float]]],
]
@@ -55,7 +56,9 @@ def register_func(need_function: DynamicFunction, name: str | None = None) -> No
# We can not throw an exception here, as using sphinx-needs in different sphinx-projects with the
# same python interpreter session does not clean NEEDS_FUNCTIONS.
# This is mostly the case during tet runs.
- logger.info(f"sphinx-needs: Function name {func_name} already registered. Ignoring the new one!")
+ logger.info(
+ f"sphinx-needs: Function name {func_name} already registered. Ignoring the new one!"
+ )
NEEDS_FUNCTIONS[func_name] = {"name": func_name, "function": need_function}
@@ -72,10 +75,22 @@ def execute_func(app: Sphinx, need: NeedsInfoType, func_string: str) -> Any:
func_name, func_args, func_kwargs = _analyze_func_string(func_string, need)
if func_name not in NEEDS_FUNCTIONS:
- raise SphinxError("Unknown dynamic sphinx-needs function: {}. Found in need: {}".format(func_name, need["id"]))
+ raise SphinxError(
+ "Unknown dynamic sphinx-needs function: {}. Found in need: {}".format(
+ func_name, need["id"]
+ )
+ )
- func = measure_time_func(NEEDS_FUNCTIONS[func_name]["function"], category="dyn_func", source="user")
- func_return = func(app, need, SphinxNeedsData(app.env).get_or_create_needs(), *func_args, **func_kwargs)
+ func = measure_time_func(
+ NEEDS_FUNCTIONS[func_name]["function"], category="dyn_func", source="user"
+ )
+ func_return = func(
+ app,
+ need,
+ SphinxNeedsData(app.env).get_or_create_needs(),
+ *func_args,
+ **func_kwargs,
+ )
if not isinstance(func_return, (str, int, float, list, unicode)) and func_return:
raise SphinxError(
@@ -97,7 +112,9 @@ def execute_func(app: Sphinx, need: NeedsInfoType, func_string: str) -> Any:
func_pattern = re.compile(r"\[\[(.*?)\]\]") # RegEx to detect function strings
-def find_and_replace_node_content(node: nodes.Node, env: BuildEnvironment, need: NeedsInfoType) -> nodes.Node:
+def find_and_replace_node_content(
+ node: nodes.Node, env: BuildEnvironment, need: NeedsInfoType
+) -> nodes.Node:
"""
Search inside a given node and its children for nodes of type Text,
if found, check if it contains a function string and run/replace it.
@@ -106,7 +123,11 @@ def find_and_replace_node_content(node: nodes.Node, env: BuildEnvironment, need:
:return: None
"""
new_children = []
- if not node.children and isinstance(node, nodes.Text) or isinstance(node, nodes.reference):
+ if (
+ not node.children
+ and isinstance(node, nodes.Text)
+ or isinstance(node, nodes.reference)
+ ):
if isinstance(node, nodes.reference):
try:
new_text = node.attributes["refuri"]
@@ -127,8 +148,8 @@ def find_and_replace_node_content(node: nodes.Node, env: BuildEnvironment, need:
func_string = func_string.replace("”", '"')
func_string = func_string.replace("”", '"')
- func_string = func_string.replace("‘", "'")
- func_string = func_string.replace("’", "'")
+ func_string = func_string.replace("‘", "'") # noqa: RUF001
+ func_string = func_string.replace("’", "'") # noqa: RUF001
func_return = execute_func(env.app, need, func_string)
# This should never happen, but we can not be sure.
@@ -176,20 +197,30 @@ def resolve_dynamic_values(needs: dict[str, NeedsInfoType], app: Sphinx) -> None
"""
for need in needs.values():
for need_option in need:
- if need_option in ["docname", "lineno", "content", "content_node", "content_id"]:
+ if need_option in [
+ "docname",
+ "lineno",
+ "content",
+ "content_node",
+ "content_id",
+ ]:
# dynamic values in this data are not allowed.
continue
if not isinstance(need[need_option], (list, set)):
func_call: str | None = "init"
while func_call:
try:
- func_call, func_return = _detect_and_execute(need[need_option], need, app)
+ func_call, func_return = _detect_and_execute(
+ need[need_option], need, app
+ )
except FunctionParsingException:
raise SphinxError(
"Function definition of {option} in file {file}:{line} has "
"unsupported parameters. "
"supported are str, int/float, list".format(
- option=need_option, file=need["docname"], line=need["lineno"]
+ option=need_option,
+ file=need["docname"],
+ line=need["lineno"],
)
)
@@ -197,9 +228,13 @@ def resolve_dynamic_values(needs: dict[str, NeedsInfoType], app: Sphinx) -> None
continue
# Replace original function string with return value of function call
if func_return is None:
- need[need_option] = need[need_option].replace(f"[[{func_call}]]", "")
+ need[need_option] = need[need_option].replace(
+ f"[[{func_call}]]", ""
+ )
else:
- need[need_option] = need[need_option].replace(f"[[{func_call}]]", str(func_return))
+ need[need_option] = need[need_option].replace(
+ f"[[{func_call}]]", str(func_return)
+ )
if need[need_option] == "":
need[need_option] = None
@@ -213,7 +248,9 @@ def resolve_dynamic_values(needs: dict[str, NeedsInfoType], app: Sphinx) -> None
"Function definition of {option} in file {file}:{line} has "
"unsupported parameters. "
"supported are str, int/float, list".format(
- option=need_option, file=need["docname"], line=need["lineno"]
+ option=need_option,
+ file=need["docname"],
+ line=need["lineno"],
)
)
if func_call is None:
@@ -221,7 +258,9 @@ def resolve_dynamic_values(needs: dict[str, NeedsInfoType], app: Sphinx) -> None
else:
# Replace original function string with return value of function call
if isinstance(need[need_option], (str, int, float)):
- new_values.append(element.replace(f"[[{func_call}]]", str(func_return)))
+ new_values.append(
+ element.replace(f"[[{func_call}]]", str(func_return))
+ )
else:
if isinstance(need[need_option], (list, set)):
if isinstance(func_return, (list, set)):
@@ -233,7 +272,9 @@ def resolve_dynamic_values(needs: dict[str, NeedsInfoType], app: Sphinx) -> None
def resolve_variants_options(
- needs: dict[str, NeedsInfoType], needs_config: NeedsSphinxConfig, tags: dict[str, bool]
+ needs: dict[str, NeedsInfoType],
+ needs_config: NeedsSphinxConfig,
+ tags: dict[str, bool],
) -> None:
"""
Resolve variants options inside need data.
@@ -255,20 +296,28 @@ def resolve_variants_options(
for need in needs.values():
# Data to use as filter context.
need_context: dict[str, Any] = {**need}
- need_context.update(**needs_config.filter_data) # Add needs_filter_data to filter context
+ need_context.update(
+ **needs_config.filter_data
+ ) # Add needs_filter_data to filter context
need_context.update(**tags) # Add sphinx tags to filter context
for var_option in variants_options:
if var_option in need and need[var_option] not in (None, "", []):
if not isinstance(need[var_option], (list, set, tuple)):
option_value: str = need[var_option]
- need[var_option] = match_variants(option_value, need_context, needs_config.variants)
+ need[var_option] = match_variants(
+ option_value, need_context, needs_config.variants
+ )
else:
option_value = need[var_option]
- need[var_option] = match_variants(option_value, need_context, needs_config.variants)
+ need[var_option] = match_variants(
+ option_value, need_context, needs_config.variants
+ )
-def check_and_get_content(content: str, need: NeedsInfoType, env: BuildEnvironment) -> str:
+def check_and_get_content(
+ content: str, need: NeedsInfoType, env: BuildEnvironment
+) -> str:
"""
Checks if the given content is a function call.
If not, content is returned.
@@ -290,14 +339,18 @@ def check_and_get_content(content: str, need: NeedsInfoType, env: BuildEnvironme
return content
func_call = func_match.group(1) # Extract function call
- func_return = execute_func(env.app, need, func_call) # Execute function call and get return value
+ func_return = execute_func(
+ env.app, need, func_call
+ ) # Execute function call and get return value
# Replace the function_call with the calculated value
content = content.replace(f"[[{func_call}]]", func_return)
return content
-def _detect_and_execute(content: Any, need: NeedsInfoType, app: Sphinx) -> tuple[str | None, Any]:
+def _detect_and_execute(
+ content: Any, need: NeedsInfoType, app: Sphinx
+) -> tuple[str | None, Any]:
"""Detects if given content is a function call and executes it."""
try:
content = str(content)
@@ -309,12 +362,16 @@ def _detect_and_execute(content: Any, need: NeedsInfoType, app: Sphinx) -> tuple
return None, None
func_call = func_match.group(1) # Extract function call
- func_return = execute_func(app, need, func_call) # Execute function call and get return value
+ func_return = execute_func(
+ app, need, func_call
+ ) # Execute function call and get return value
return func_call, func_return
-def _analyze_func_string(func_string: str, need: NeedsInfoType | None) -> tuple[str, list[Any], dict[str, Any]]:
+def _analyze_func_string(
+ func_string: str, need: NeedsInfoType | None
+) -> tuple[str, list[Any], dict[str, Any]]:
"""
Analyze given function string and extract:
@@ -331,12 +388,16 @@ def _analyze_func_string(func_string: str, need: NeedsInfoType | None) -> tuple[
func = ast.parse(func_string)
except SyntaxError as e:
need_id = need["id"] if need else "UNKNOWN"
- raise SphinxError(f"Parsing function string failed for need {need_id}: {func_string}. {e}")
+ raise SphinxError(
+ f"Parsing function string failed for need {need_id}: {func_string}. {e}"
+ )
try:
func_call = func.body[0].value # type: ignore
func_name = func_call.func.id
except AttributeError:
- raise SphinxError(f"Given dynamic function string is not a valid python call. Got: {func_string}")
+ raise SphinxError(
+ f"Given dynamic function string is not a valid python call. Got: {func_string}"
+ )
func_args: list[Any] = []
for arg in func_call.args:
@@ -366,8 +427,8 @@ def _analyze_func_string(func_string: str, need: NeedsInfoType | None) -> tuple[
)
else:
raise FunctionParsingException(
- "Unsupported type found in function definition: {}. "
- "Supported are numbers, strings, bool and list".format(func_string)
+ f"Unsupported type found in function definition: {func_string}. "
+ "Supported are numbers, strings, bool and list"
)
func_kargs: dict[str, Any] = {}
for keyword in func_call.keywords:
diff --git a/sphinx_needs/layout.py b/sphinx_needs/layout.py
index d9c573945..e7cc60e35 100644
--- a/sphinx_needs/layout.py
+++ b/sphinx_needs/layout.py
@@ -33,7 +33,11 @@
@measure_time("need")
def create_need(
- need_id: str, app: Sphinx, layout: str | None = None, style: str | None = None, docname: str | None = None
+ need_id: str,
+ app: Sphinx,
+ layout: str | None = None,
+ style: str | None = None,
+ docname: str | None = None,
) -> nodes.container:
"""
Creates a new need-node for a given layout.
@@ -78,7 +82,9 @@ def create_need(
# Overwrite the docname, which must be the original one from the reused need, as all used paths are relative
# to the original location, not to the current document.
- env.temp_data["docname"] = need_data["docname"] # Dirty, as in this phase normally no docname is set anymore in env
+ env.temp_data["docname"] = need_data[
+ "docname"
+ ] # Dirty, as in this phase normally no docname is set anymore in env
ImageCollector().process_doc(app, node_inner) # type: ignore[arg-type]
DownloadFileCollector().process_doc(app, node_inner) # type: ignore[arg-type]
@@ -102,7 +108,9 @@ def create_need(
# set the layout and style for the new need
node_container[0].attributes = node_container.parent.children[0].attributes # type: ignore
- node_container[0].children[0].attributes = node_container.parent.children[0].children[0].attributes # type: ignore
+ node_container[0].children[0].attributes = ( # type: ignore
+ node_container.parent.children[0].children[0].attributes # type: ignore
+ )
node_container.attributes["ids"] = []
@@ -129,7 +137,11 @@ def replace_pending_xref_refdoc(node: nodes.Element, new_refdoc: str) -> None:
@measure_time("need")
def build_need(
- layout: str, node: nodes.Element, app: Sphinx, style: str | None = None, fromdocname: str | None = None
+ layout: str,
+ node: nodes.Element,
+ app: Sphinx,
+ style: str | None = None,
+ fromdocname: str | None = None,
) -> None:
"""
Builds a need based on a given layout for a given need-node.
@@ -221,7 +233,11 @@ def __init__(
self.fromdocname = fromdocname
# For ReadTheDocs Theme we need to add 'rtd-exclude-wy-table'.
- classes = ["need", "needs_grid_" + self.layout["grid"], "needs_layout_" + self.layout_name]
+ classes = [
+ "need",
+ "needs_grid_" + self.layout["grid"],
+ "needs_layout_" + self.layout_name,
+ ]
classes.extend(self.needs_config.table_classes)
self.style = style or self.need["style"] or self.needs_config.default_style
@@ -241,59 +257,121 @@ def __init__(
self.grids = {
"simple": {
"func": self._grid_simple,
- "configs": {"colwidths": [100], "side_left": False, "side_right": False, "footer": False},
+ "configs": {
+ "colwidths": [100],
+ "side_left": False,
+ "side_right": False,
+ "footer": False,
+ },
},
"simple_footer": {
"func": self._grid_simple,
- "configs": {"colwidths": [100], "side_left": False, "side_right": False, "footer": True},
+ "configs": {
+ "colwidths": [100],
+ "side_left": False,
+ "side_right": False,
+ "footer": True,
+ },
},
"simple_side_left": {
"func": self._grid_simple,
- "configs": {"colwidths": [30, 70], "side_left": "full", "side_right": False, "footer": False},
+ "configs": {
+ "colwidths": [30, 70],
+ "side_left": "full",
+ "side_right": False,
+ "footer": False,
+ },
},
"simple_side_right": {
"func": self._grid_simple,
- "configs": {"colwidths": [70, 30], "side_left": False, "side_right": "full", "footer": False},
+ "configs": {
+ "colwidths": [70, 30],
+ "side_left": False,
+ "side_right": "full",
+ "footer": False,
+ },
},
"simple_side_left_partial": {
"func": self._grid_simple,
- "configs": {"colwidths": [20, 80], "side_left": "part", "side_right": False, "footer": False},
+ "configs": {
+ "colwidths": [20, 80],
+ "side_left": "part",
+ "side_right": False,
+ "footer": False,
+ },
},
"simple_side_right_partial": {
"func": self._grid_simple,
- "configs": {"colwidths": [80, 20], "side_left": False, "side_right": "part", "footer": False},
+ "configs": {
+ "colwidths": [80, 20],
+ "side_left": False,
+ "side_right": "part",
+ "footer": False,
+ },
},
"complex": self._grid_complex,
"content": {
"func": self._grid_content,
- "configs": {"colwidths": [100], "side_left": False, "side_right": False, "footer": False},
+ "configs": {
+ "colwidths": [100],
+ "side_left": False,
+ "side_right": False,
+ "footer": False,
+ },
},
"content_footer": {
"func": self._grid_content,
- "configs": {"colwidths": [100], "side_left": False, "side_right": False, "footer": True},
+ "configs": {
+ "colwidths": [100],
+ "side_left": False,
+ "side_right": False,
+ "footer": True,
+ },
},
"content_side_left": {
"func": self._grid_content,
- "configs": {"colwidths": [5, 95], "side_left": True, "side_right": False, "footer": False},
+ "configs": {
+ "colwidths": [5, 95],
+ "side_left": True,
+ "side_right": False,
+ "footer": False,
+ },
},
"content_side_right": {
"func": self._grid_content,
- "configs": {"colwidths": [95, 5], "side_left": False, "side_right": True, "footer": False},
+ "configs": {
+ "colwidths": [95, 5],
+ "side_left": False,
+ "side_right": True,
+ "footer": False,
+ },
},
"content_footer_side_left": {
"func": self._grid_content,
- "configs": {"colwidths": [5, 95], "side_left": True, "side_right": False, "footer": True},
+ "configs": {
+ "colwidths": [5, 95],
+ "side_left": True,
+ "side_right": False,
+ "footer": True,
+ },
},
"content_footer_side_right": {
"func": self._grid_content,
- "configs": {"colwidths": [95, 5], "side_left": False, "side_right": True, "footer": True},
+ "configs": {
+ "colwidths": [95, 5],
+ "side_left": False,
+ "side_right": True,
+ "footer": True,
+ },
},
}
# Dummy Document setup
self.doc_settings, self.inline_parser = _generate_inline_parser()
self.dummy_doc = new_document("dummy", self.doc_settings)
- self.doc_language = languages.get_language(self.dummy_doc.settings.language_code)
+ self.doc_language = languages.get_language(
+ self.dummy_doc.settings.language_code
+ )
self.doc_memo = Struct(
document=self.dummy_doc,
reporter=self.dummy_doc.reporter,
@@ -304,7 +382,9 @@ def __init__(
inliner=None,
)
- self.functions: dict[str, Callable[..., None | nodes.Node | list[nodes.Node]]] = {
+ self.functions: dict[
+ str, Callable[..., None | nodes.Node | list[nodes.Node]]
+ ] = {
"meta": self.meta, # type: ignore[dict-item]
"meta_all": self.meta_all,
"meta_links": self.meta_links,
@@ -334,7 +414,9 @@ def __init__(
def get_need_table(self) -> nodes.table:
if self.layout["grid"] not in self.grids.keys():
raise SphinxNeedLayoutException(
- "Unknown layout-grid: {}. Supported are {}".format(self.layout["grid"], ", ".join(self.grids.keys()))
+ "Unknown layout-grid: {}. Supported are {}".format(
+ self.layout["grid"], ", ".join(self.grids.keys())
+ )
)
func = self.grids[self.layout["grid"]]
@@ -376,7 +458,9 @@ def _parse(self, line: str) -> list[nodes.Node]:
:param line: string to parse
:return: nodes
"""
- result, message = self.inline_parser.parse(line, 0, self.doc_memo, self.dummy_doc) # type: ignore
+ result, message = self.inline_parser.parse( # type: ignore
+ line, 0, self.doc_memo, self.dummy_doc
+ )
if message:
raise SphinxNeedLayoutException(message)
return result # type: ignore[no-any-return]
@@ -420,7 +504,9 @@ def _func_replace(self, section_nodes: list[nodes.Node]) -> list[nodes.Node]:
)
func_def_clean = func_def.replace("<<", "").replace(">>", "")
- func_name, func_args, func_kargs = _analyze_func_string(func_def_clean, None)
+ func_name, func_args, func_kargs = _analyze_func_string(
+ func_def_clean, None
+ )
# Replace place holders
# Looks for {{name}}, where name must be an option of need, and replaces it with the
@@ -552,7 +638,9 @@ def meta(
ref_item = nodes.Text(datum)
data_node += ref_item
- if (name in needs_string_links_option and index + 1 < len(data)) or index + 1 < len([data]):
+ if (
+ name in needs_string_links_option and index + 1 < len(data)
+ ) or index + 1 < len([data]):
data_node += nodes.emphasis("; ", "; ")
data_container.append(data_node)
@@ -703,7 +791,9 @@ def meta_links(self, name: str, incoming: bool = False) -> nodes.inline:
data_container.append(node_links)
return data_container
- def meta_links_all(self, prefix: str = "", postfix: str = "", exclude: list[str] | None = None) -> list[nodes.line]:
+ def meta_links_all(
+ self, prefix: str = "", postfix: str = "", exclude: list[str] | None = None
+ ) -> list[nodes.line]:
"""
Documents all used link types for the current need automatically.
@@ -718,7 +808,9 @@ def meta_links_all(self, prefix: str = "", postfix: str = "", exclude: list[str]
type_key = link_type["option"]
if self.need[type_key] and type_key not in exclude: # type: ignore[literal-required]
outgoing_line = nodes.line()
- outgoing_label = prefix + "{}:".format(link_type["outgoing"]) + postfix + " "
+ outgoing_label = (
+ prefix + "{}:".format(link_type["outgoing"]) + postfix + " "
+ )
outgoing_line += self._parse(outgoing_label)
outgoing_line += self.meta_links(link_type["option"], incoming=False)
data_container.append(outgoing_line)
@@ -726,7 +818,9 @@ def meta_links_all(self, prefix: str = "", postfix: str = "", exclude: list[str]
type_key = link_type["option"] + "_back"
if self.need[type_key] and type_key not in exclude: # type: ignore[literal-required]
incoming_line = nodes.line()
- incoming_label = prefix + "{}:".format(link_type["incoming"]) + postfix + " "
+ incoming_label = (
+ prefix + "{}:".format(link_type["incoming"]) + postfix + " "
+ )
incoming_line += self._parse(incoming_label)
incoming_line += self.meta_links(link_type["option"], incoming=True)
data_container.append(incoming_line)
@@ -791,7 +885,9 @@ def image(
options["align"] = align
if url is None or not isinstance(url, str):
- raise SphinxNeedLayoutException("not valid url given for image function in layout")
+ raise SphinxNeedLayoutException(
+ "not valid url given for image function in layout"
+ )
if url.startswith("icon:"):
if any(x in builder.name.upper() for x in ["PDF", "LATEX"]):
@@ -926,7 +1022,11 @@ def link(
return data_container
def collapse_button(
- self, target: str = "meta", collapsed: str = "Show", visible: str = "Close", initial: bool = False
+ self,
+ target: str = "meta",
+ collapsed: str = "Show",
+ visible: str = "Close",
+ initial: bool = False,
) -> nodes.inline | None:
"""
To show icons instead of text on the button, use collapse_button() like this::
@@ -952,16 +1052,28 @@ def collapse_button(
if collapsed.startswith("image:") or collapsed.startswith("icon:"):
coll_node_collapsed.append(
- self.image(collapsed.replace("image:", ""), width="17px", no_link=True, img_class="sn_collapse_img")
+ self.image(
+ collapsed.replace("image:", ""),
+ width="17px",
+ no_link=True,
+ img_class="sn_collapse_img",
+ )
)
elif collapsed.startswith("Debug view"):
- coll_node_collapsed.append(nodes.container(classes=["debug_on_layout_btn"])) # For debug layout
+ coll_node_collapsed.append(
+ nodes.container(classes=["debug_on_layout_btn"])
+ ) # For debug layout
else:
coll_node_collapsed.append(nodes.Text(collapsed))
if visible.startswith("image:") or visible.startswith("icon:"):
coll_node_visible.append(
- self.image(visible.replace("image:", ""), width="17px", no_link=True, img_class="sn_collapse_img")
+ self.image(
+ visible.replace("image:", ""),
+ width="17px",
+ no_link=True,
+ img_class="sn_collapse_img",
+ )
)
elif visible.startswith("Debug view"):
coll_node_visible.append(nodes.container(classes=["debug_off_layout_btn"]))
@@ -972,7 +1084,9 @@ def collapse_button(
# docutils doesn't allow has to add any html-attributes beside class and id to nodes.
# So we misused "id" for this and use "__" (2x _) as separator for row-target names
- if (not self.need["collapse"]) or (self.need["collapse"] is None and not initial):
+ if (not self.need["collapse"]) or (
+ self.need["collapse"] is None and not initial
+ ):
status = "show"
if (self.need["collapse"]) or (not self.need["collapse"] and initial):
@@ -1034,7 +1148,13 @@ def permalink(
prefix=prefix,
)
- def _grid_simple(self, colwidths: list[int], side_left: bool | str, side_right: bool | str, footer: bool) -> None:
+ def _grid_simple(
+ self,
+ colwidths: list[int],
+ side_left: bool | str,
+ side_right: bool | str,
+ footer: bool,
+ ) -> None:
"""
Creates most "simple" grid layouts.
Side parts and footer can be activated via config.
@@ -1104,7 +1224,9 @@ def _grid_simple(self, colwidths: list[int], side_left: bool | str, side_right:
head_row = nodes.row(classes=["need", "head"])
if side_left:
- side_entry = nodes.entry(classes=["need", "side"], morerows=side_left_morerows)
+ side_entry = nodes.entry(
+ classes=["need", "side"], morerows=side_left_morerows
+ )
side_entry += self.get_section("side")
head_row += side_entry
@@ -1113,7 +1235,9 @@ def _grid_simple(self, colwidths: list[int], side_left: bool | str, side_right:
head_row += head_entry
if side_right:
- side_entry = nodes.entry(classes=["need", "side"], morerows=side_right_morerows)
+ side_entry = nodes.entry(
+ classes=["need", "side"], morerows=side_right_morerows
+ )
side_entry += self.get_section("side")
head_row += side_entry
@@ -1125,14 +1249,18 @@ def _grid_simple(self, colwidths: list[int], side_left: bool | str, side_right:
# CONTENT row
content_row = nodes.row(classes=["need", "content"])
- content_entry = nodes.entry(classes=["need", "content"], morecols=common_more_cols)
+ content_entry = nodes.entry(
+ classes=["need", "content"], morecols=common_more_cols
+ )
content_entry.insert(0, self.node.children)
content_row += content_entry
# FOOTER row
if footer:
footer_row = nodes.row(classes=["need", "footer"])
- footer_entry = nodes.entry(classes=["need", "footer"], morecols=common_more_cols)
+ footer_entry = nodes.entry(
+ classes=["need", "footer"], morecols=common_more_cols
+ )
footer_entry += self.get_section("footer")
footer_row += footer_entry
@@ -1207,7 +1335,9 @@ def _grid_complex(self) -> None:
# Construct table
node_tgroup += self.node_tbody
- def _grid_content(self, colwidths: list[int], side_left: bool, side_right: bool, footer: bool) -> None:
+ def _grid_content(
+ self, colwidths: list[int], side_left: bool, side_right: bool, footer: bool
+ ) -> None:
"""
Creates most "content" based grid layouts.
Side parts and footer can be activated via config.
diff --git a/sphinx_needs/need_constraints.py b/sphinx_needs/need_constraints.py
index e068a6bdf..b1925ab8a 100644
--- a/sphinx_needs/need_constraints.py
+++ b/sphinx_needs/need_constraints.py
@@ -11,7 +11,9 @@
logger = get_logger(__name__)
-def process_constraints(needs: dict[str, NeedsInfoType], config: NeedsSphinxConfig) -> None:
+def process_constraints(
+ needs: dict[str, NeedsInfoType], config: NeedsSphinxConfig
+) -> None:
"""Analyse constraints of all needs,
and set corresponding fields on the need data item:
``constraints_passed`` and ``constraints_results``.
@@ -56,7 +58,9 @@ def process_constraints(needs: dict[str, NeedsInfoType], config: NeedsSphinxConf
if "error_message" in executable_constraints:
msg = str(executable_constraints["error_message"])
- template = error_templates_cache.setdefault(msg, jinja2.Template(msg))
+ template = error_templates_cache.setdefault(
+ msg, jinja2.Template(msg)
+ )
need["constraints_error"] = template.render(**need)
if "severity" not in executable_constraints:
@@ -88,10 +92,14 @@ def process_constraints(needs: dict[str, NeedsInfoType], config: NeedsSphinxConf
# set styles
old_style = need["style"]
if old_style and len(old_style) > 0:
- new_styles = "".join(", " + x for x in failed_options.get("style", []))
+ new_styles = "".join(
+ ", " + x for x in failed_options.get("style", [])
+ )
else:
old_style = ""
- new_styles = "".join(x + "," for x in failed_options.get("style", []))
+ new_styles = "".join(
+ x + "," for x in failed_options.get("style", [])
+ )
if failed_options.get("force_style", False):
need["style"] = new_styles.strip(", ")
diff --git a/sphinx_needs/needs.py b/sphinx_needs/needs.py
index e252dfc7f..4d99df291 100644
--- a/sphinx_needs/needs.py
+++ b/sphinx_needs/needs.py
@@ -109,7 +109,10 @@
__version__ = VERSION = "2.0.0"
NEEDS_FUNCTIONS.clear()
-_NODE_TYPES_T = Dict[Type[nodes.Element], Callable[[Sphinx, nodes.document, str, List[nodes.Element]], None]]
+_NODE_TYPES_T = Dict[
+ Type[nodes.Element],
+ Callable[[Sphinx, nodes.document, str, List[nodes.Element]], None],
+]
NODE_TYPES_PRIO: _NODE_TYPES_T = { # Node types to be checked before most others
Needextract: process_needextract,
@@ -150,7 +153,9 @@ def setup(app: Sphinx) -> dict[str, Any]:
NeedsSphinxConfig.add_config_values(app)
# Define nodes
- app.add_node(Need, html=(html_visit, html_depart), latex=(latex_visit, latex_depart))
+ app.add_node(
+ Need, html=(html_visit, html_depart), latex=(latex_visit, latex_depart)
+ )
app.add_node(
Needfilter,
)
@@ -167,7 +172,11 @@ def setup(app: Sphinx) -> dict[str, Any]:
app.add_node(Needextend)
app.add_node(Needuml)
app.add_node(List2Need)
- app.add_node(NeedPart, html=(visitor_dummy, visitor_dummy), latex=(visitor_dummy, visitor_dummy))
+ app.add_node(
+ NeedPart,
+ html=(visitor_dummy, visitor_dummy),
+ latex=(visitor_dummy, visitor_dummy),
+ )
########################################################################
# DIRECTIVES
@@ -195,23 +204,54 @@ def setup(app: Sphinx) -> dict[str, Any]:
# ROLES
########################################################################
# Provides :need:`ABC_123` for inline links.
- app.add_role("need", NeedsXRefRole(nodeclass=NeedRef, innernodeclass=nodes.emphasis, warn_dangling=True))
+ app.add_role(
+ "need",
+ NeedsXRefRole(
+ nodeclass=NeedRef, innernodeclass=nodes.emphasis, warn_dangling=True
+ ),
+ )
app.add_role(
- "need_incoming", NeedsXRefRole(nodeclass=NeedIncoming, innernodeclass=nodes.emphasis, warn_dangling=True)
+ "need_incoming",
+ NeedsXRefRole(
+ nodeclass=NeedIncoming, innernodeclass=nodes.emphasis, warn_dangling=True
+ ),
)
app.add_role(
- "need_outgoing", NeedsXRefRole(nodeclass=NeedOutgoing, innernodeclass=nodes.emphasis, warn_dangling=True)
+ "need_outgoing",
+ NeedsXRefRole(
+ nodeclass=NeedOutgoing, innernodeclass=nodes.emphasis, warn_dangling=True
+ ),
)
- app.add_role("need_part", NeedsXRefRole(nodeclass=NeedPart, innernodeclass=nodes.inline, warn_dangling=True))
+ app.add_role(
+ "need_part",
+ NeedsXRefRole(
+ nodeclass=NeedPart, innernodeclass=nodes.inline, warn_dangling=True
+ ),
+ )
# Shortcut for need_part
- app.add_role("np", NeedsXRefRole(nodeclass=NeedPart, innernodeclass=nodes.inline, warn_dangling=True))
+ app.add_role(
+ "np",
+ NeedsXRefRole(
+ nodeclass=NeedPart, innernodeclass=nodes.inline, warn_dangling=True
+ ),
+ )
- app.add_role("need_count", NeedsXRefRole(nodeclass=NeedCount, innernodeclass=nodes.inline, warn_dangling=True))
+ app.add_role(
+ "need_count",
+ NeedsXRefRole(
+ nodeclass=NeedCount, innernodeclass=nodes.inline, warn_dangling=True
+ ),
+ )
- app.add_role("need_func", NeedsXRefRole(nodeclass=NeedFunc, innernodeclass=nodes.inline, warn_dangling=True))
+ app.add_role(
+ "need_func",
+ NeedsXRefRole(
+ nodeclass=NeedFunc, innernodeclass=nodes.inline, warn_dangling=True
+ ),
+ )
########################################################################
# EVENTS
@@ -241,7 +281,11 @@ def setup(app: Sphinx) -> dict[str, Any]:
# doctree-read. So manipulating the doctree may result in conflicts, as e.g. images get not
# registered for sphinx. So some sphinx-internal tasks/functions may be called by hand again...
# See also https://github.com/sphinx-doc/sphinx/issues/7054#issuecomment-578019701 for an example
- app.connect("doctree-resolved", process_creator(NODE_TYPES_PRIO, "needextract"), priority=100)
+ app.connect(
+ "doctree-resolved",
+ process_creator(NODE_TYPES_PRIO, "needextract"),
+ priority=100,
+ )
app.connect("doctree-resolved", process_need_nodes)
app.connect("doctree-resolved", process_creator(NODE_TYPES))
@@ -280,7 +324,8 @@ def process_caller(app: Sphinx, doctree: nodes.document, fromdocname: str) -> No
"""
# We only need to analyse docs, which have Sphinx-Needs directives in it.
if (
- fromdocname not in SphinxNeedsData(app.env).get_or_create_docs().get(doc_category, [])
+ fromdocname
+ not in SphinxNeedsData(app.env).get_or_create_docs().get(doc_category, [])
and fromdocname != f"{app.config.root_doc}"
):
return
@@ -297,7 +342,11 @@ def process_caller(app: Sphinx, doctree: nodes.document, fromdocname: str) -> No
# Let's call the handlers
for check_node, check_func in node_list.items():
# Call the handler only, if it defined, and we found some nodes for it
- if check_node in current_nodes and check_func is not None and current_nodes[check_node]:
+ if (
+ check_node in current_nodes
+ and check_func is not None
+ and current_nodes[check_node]
+ ):
check_func(app, doctree, fromdocname, current_nodes[check_node])
return process_caller
@@ -319,7 +368,9 @@ def load_config(app: Sphinx, *_args: Any) -> None:
for option in needs_config.extra_options:
if option in extra_options:
LOGGER.warning(
- f'extra_option "{option}" already registered. [needs.config]', type="needs", subtype="config"
+ f'extra_option "{option}" already registered. [needs.config]',
+ type="needs",
+ subtype="config",
)
NEEDS_CONFIG.extra_options[option] = directives.unchanged
@@ -397,7 +448,9 @@ def load_config(app: Sphinx, *_args: Any) -> None:
NEEDS_CONFIG.warnings[name] = check
else:
LOGGER.warning(
- f"{name!r} in 'needs_warnings' is already registered. [needs.config]", type="needs", subtype="config"
+ f"{name!r} in 'needs_warnings' is already registered. [needs.config]",
+ type="needs",
+ subtype="config",
)
if needs_config.constraints_failed_color:
@@ -441,7 +494,11 @@ def prepare_env(app: Sphinx, env: BuildEnvironment, _docname: str) -> None:
# Register user defined services
for name, service in needs_config.services.items():
- if name not in services.services and "class" in service and "class_init" in service:
+ if (
+ name not in services.services
+ and "class" in service
+ and "class_init" in service
+ ):
# We found a not yet registered service
# But only register, if service-config contains class and class_init.
# Otherwise, the service may get registered later by an external sphinx-needs extension
@@ -456,7 +513,14 @@ def prepare_env(app: Sphinx, env: BuildEnvironment, _docname: str) -> None:
register_func(needs_func)
# Own extra options
- for option in ["hidden", "duration", "completion", "has_dead_links", "has_forbidden_dead_links", "constraints"]:
+ for option in [
+ "hidden",
+ "duration",
+ "completion",
+ "has_dead_links",
+ "has_forbidden_dead_links",
+ "constraints",
+ ]:
# Check if not already set by user
if option not in NEEDS_CONFIG.extra_options:
NEEDS_CONFIG.extra_options[option] = directives.unchanged
@@ -525,25 +589,29 @@ def check_configuration(_app: Sphinx, config: Config) -> None:
# Check if needs external filter and extra option are using the same name
if extern_filter in extra_options:
raise NeedsConfigException(
- "Same name for external filter and extra option: {}." " This is not allowed.".format(extern_filter)
+ f"Same name for external filter and extra option: {extern_filter}."
+ " This is not allowed."
)
# Check for usage of internal names
for internal in INTERNALS:
if internal in extra_options:
raise NeedsConfigException(
- 'Extra option "{}" already used internally. ' " Please use another name.".format(internal)
+ f'Extra option "{internal}" already used internally. '
+ " Please use another name."
)
if internal in link_types:
raise NeedsConfigException(
- 'Link type name "{}" already used internally. ' " Please use another name.".format(internal)
+ f'Link type name "{internal}" already used internally. '
+ " Please use another name."
)
# Check if option and link are using the same name
for link in link_types:
if link in extra_options:
raise NeedsConfigException(
- "Same name for link type and extra option: {}." " This is not allowed.".format(link)
+ f"Same name for link type and extra option: {link}."
+ " This is not allowed."
)
if link + "_back" in extra_options:
raise NeedsConfigException(
@@ -562,7 +630,11 @@ def check_configuration(_app: Sphinx, config: Config) -> None:
for option in external_variant_options:
# Check variant option is added in either extra options or extra links or NEED_DEFAULT_OPTIONS
- if option not in extra_options and option not in link_types and option not in NEED_DEFAULT_OPTIONS.keys():
+ if (
+ option not in extra_options
+ and option not in link_types
+ and option not in NEED_DEFAULT_OPTIONS.keys()
+ ):
raise NeedsConfigException(
"Variant option `{}` is not added in either extra options or extra links. "
"This is not allowed.".format(option)
diff --git a/sphinx_needs/needsfile.py b/sphinx_needs/needsfile.py
index ece2bbd66..cf1184fc8 100644
--- a/sphinx_needs/needsfile.py
+++ b/sphinx_needs/needsfile.py
@@ -94,16 +94,30 @@ def update_or_add_version(self, version: str) -> None:
def add_need(self, version: str, need_info: NeedsInfoType) -> None:
self.update_or_add_version(version)
- writable_needs = {key: need_info[key] for key in need_info if key not in self._exclude_need_keys} # type: ignore[literal-required]
+ writable_needs = {
+ key: need_info[key] # type: ignore[literal-required]
+ for key in need_info
+ if key not in self._exclude_need_keys
+ }
writable_needs["description"] = need_info["content"]
self.needs_list["versions"][version]["needs"][need_info["id"]] = writable_needs
- self.needs_list["versions"][version]["needs_amount"] = len(self.needs_list["versions"][version]["needs"])
+ self.needs_list["versions"][version]["needs_amount"] = len(
+ self.needs_list["versions"][version]["needs"]
+ )
def add_filter(self, version: str, need_filter: NeedsFilterType) -> None:
self.update_or_add_version(version)
- writable_filters = {key: need_filter[key] for key in need_filter if key not in self._exclude_filter_keys} # type: ignore[literal-required]
- self.needs_list["versions"][version]["filters"][need_filter["export_id"].upper()] = writable_filters
- self.needs_list["versions"][version]["filters_amount"] = len(self.needs_list["versions"][version]["filters"])
+ writable_filters = {
+ key: need_filter[key] # type: ignore[literal-required]
+ for key in need_filter
+ if key not in self._exclude_filter_keys
+ }
+ self.needs_list["versions"][version]["filters"][
+ need_filter["export_id"].upper()
+ ] = writable_filters
+ self.needs_list["versions"][version]["filters_amount"] = len(
+ self.needs_list["versions"][version]["filters"]
+ )
def wipe_version(self, version: str) -> None:
if version in self.needs_list["versions"]:
@@ -130,7 +144,9 @@ def load_json(self, file: str) -> None:
file = os.path.join(self.confdir, file)
if not os.path.exists(file):
- self.log.warning(f"Could not load needs json file {file} [needs]", type="needs")
+ self.log.warning(
+ f"Could not load needs json file {file} [needs]", type="needs"
+ )
else:
errors = check_needs_file(file)
# We only care for schema errors here, all other possible errors
@@ -144,7 +160,9 @@ def load_json(self, file: str) -> None:
try:
needs_list = json.load(needs_file)
except json.JSONDecodeError:
- self.log.warning(f"Could not decode json file {file} [needs]", type="needs")
+ self.log.warning(
+ f"Could not decode json file {file} [needs]", type="needs"
+ )
else:
self.needs_list = needs_list
diff --git a/sphinx_needs/roles/need_count.py b/sphinx_needs/roles/need_count.py
index 8ff1299a8..15275a090 100644
--- a/sphinx_needs/roles/need_count.py
+++ b/sphinx_needs/roles/need_count.py
@@ -23,7 +23,10 @@ class NeedCount(nodes.Inline, nodes.Element):
def process_need_count(
- app: Sphinx, doctree: nodes.document, _fromdocname: str, found_nodes: list[nodes.Element]
+ app: Sphinx,
+ doctree: nodes.document,
+ _fromdocname: str,
+ found_nodes: list[nodes.Element],
) -> None:
needs_config = NeedsSphinxConfig(app.config)
for node_need_count in found_nodes:
diff --git a/sphinx_needs/roles/need_func.py b/sphinx_needs/roles/need_func.py
index 23daee4ac..3497f395b 100644
--- a/sphinx_needs/roles/need_func.py
+++ b/sphinx_needs/roles/need_func.py
@@ -18,11 +18,18 @@ class NeedFunc(nodes.Inline, nodes.Element):
def process_need_func(
- app: Sphinx, doctree: nodes.document, _fromdocname: str, found_nodes: list[nodes.Element]
+ app: Sphinx,
+ doctree: nodes.document,
+ _fromdocname: str,
+ found_nodes: list[nodes.Element],
) -> None:
env = app.env
# for node_need_func in doctree.findall(NeedFunc):
for node_need_func in found_nodes:
- result = check_and_get_content(node_need_func.attributes["reftarget"], {"id": "need_func_dummy"}, env) # type: ignore
+ result = check_and_get_content(
+ node_need_func.attributes["reftarget"],
+ {"id": "need_func_dummy"}, # type: ignore
+ env,
+ )
new_node_func = nodes.Text(str(result))
node_need_func.replace_self(new_node_func)
diff --git a/sphinx_needs/roles/need_incoming.py b/sphinx_needs/roles/need_incoming.py
index fcdf5251b..2aa939035 100644
--- a/sphinx_needs/roles/need_incoming.py
+++ b/sphinx_needs/roles/need_incoming.py
@@ -15,7 +15,10 @@ class NeedIncoming(nodes.Inline, nodes.Element):
def process_need_incoming(
- app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
) -> None:
builder = app.builder
env = app.env
@@ -64,8 +67,12 @@ def process_need_incoming(
node_need_backref["reftarget"],
)
else:
- assert target_need["external_url"] is not None, "External URL must not be set"
- new_node_ref = nodes.reference(target_need["id"], target_need["id"])
+ assert (
+ target_need["external_url"] is not None
+ ), "External URL must not be set"
+ new_node_ref = nodes.reference(
+ target_need["id"], target_need["id"]
+ )
new_node_ref["refuri"] = check_and_calc_base_url_rel_path(
target_need["external_url"], fromdocname
)
@@ -82,7 +89,10 @@ def process_need_incoming(
pass
else:
- logger.warning(f"need {node_need_backref['reftarget']} not found [needs]", location=node_need_backref)
+ logger.warning(
+ f"need {node_need_backref['reftarget']} not found [needs]",
+ location=node_need_backref,
+ )
if len(node_link_container.children) == 0:
node_link_container += nodes.Text("None")
diff --git a/sphinx_needs/roles/need_outgoing.py b/sphinx_needs/roles/need_outgoing.py
index 1939b7e4e..c6391129d 100644
--- a/sphinx_needs/roles/need_outgoing.py
+++ b/sphinx_needs/roles/need_outgoing.py
@@ -18,7 +18,10 @@ class NeedOutgoing(nodes.Inline, nodes.Element):
def process_need_outgoing(
- app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
) -> None:
builder = app.builder
env = app.env
@@ -55,7 +58,11 @@ def process_need_outgoing(
target_need = needs_all_needs[need_id_main]
if need_id_part and need_id_part in target_need["parts"]:
part_content = target_need["parts"][need_id_part]["content"]
- target_title = part_content if len(part_content) < 30 else part_content[:27] + "..."
+ target_title = (
+ part_content
+ if len(part_content) < 30
+ else part_content[:27] + "..."
+ )
target_id = ".".join([need_id_main, need_id_part])
else:
target_title = target_need["title"]
@@ -84,8 +91,12 @@ def process_need_outgoing(
node_need_ref["reftarget"],
)
else:
- assert target_need["external_url"] is not None, "External URL must be set"
- new_node_ref = nodes.reference(target_need["id"], target_need["id"])
+ assert (
+ target_need["external_url"] is not None
+ ), "External URL must be set"
+ new_node_ref = nodes.reference(
+ target_need["id"], target_need["id"]
+ )
new_node_ref["refuri"] = check_and_calc_base_url_rel_path(
target_need["external_url"], fromdocname
)
diff --git a/sphinx_needs/roles/need_part.py b/sphinx_needs/roles/need_part.py
index 242d197d4..ae2b1eb89 100644
--- a/sphinx_needs/roles/need_part.py
+++ b/sphinx_needs/roles/need_part.py
@@ -27,14 +27,21 @@ class NeedPart(nodes.Inline, nodes.Element):
pass
-def process_need_part(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
+def process_need_part(
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
+) -> None:
pass
part_pattern = re.compile(r"\(([\w-]+)\)(.*)")
-def update_need_with_parts(env: BuildEnvironment, need: NeedsInfoType, part_nodes: list[NeedPart]) -> None:
+def update_need_with_parts(
+ env: BuildEnvironment, need: NeedsInfoType, part_nodes: list[NeedPart]
+) -> None:
app = env.app
builder = app.builder
for part_node in part_nodes:
@@ -45,7 +52,9 @@ def update_need_with_parts(env: BuildEnvironment, need: NeedsInfoType, part_node
part_content = result.group(2)
else:
part_content = content
- inline_id = hashlib.sha1(part_content.encode("UTF-8")).hexdigest().upper()[:3]
+ inline_id = (
+ hashlib.sha1(part_content.encode("UTF-8")).hexdigest().upper()[:3]
+ )
if "parts" not in need:
need["parts"] = {}
@@ -78,7 +87,9 @@ def update_need_with_parts(env: BuildEnvironment, need: NeedsInfoType, part_node
from sphinx.util.nodes import make_refnode
- part_ref_node = make_refnode(builder, need["docname"], need["docname"], part_id_ref, part_link_node)
+ part_ref_node = make_refnode(
+ builder, need["docname"], need["docname"], part_id_ref, part_link_node
+ )
part_ref_node["classes"] += ["needs-id"]
part_node.children = []
diff --git a/sphinx_needs/roles/need_ref.py b/sphinx_needs/roles/need_ref.py
index d817b9274..231d3c627 100644
--- a/sphinx_needs/roles/need_ref.py
+++ b/sphinx_needs/roles/need_ref.py
@@ -51,7 +51,12 @@ def transform_need_to_dict(need: NeedsInfoType) -> dict[str, str]:
return dict_need
-def process_need_ref(app: Sphinx, doctree: nodes.document, fromdocname: str, found_nodes: list[nodes.Element]) -> None:
+def process_need_ref(
+ app: Sphinx,
+ doctree: nodes.document,
+ fromdocname: str,
+ found_nodes: list[nodes.Element],
+) -> None:
builder = app.builder
env = app.env
needs_config = NeedsSphinxConfig(env.config)
@@ -78,7 +83,9 @@ def process_need_ref(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
if need_id_main in all_needs:
target_need = all_needs[need_id_main]
- dict_need = transform_need_to_dict(target_need) # Transform a dict in a dict of {str, str}
+ dict_need = transform_need_to_dict(
+ target_need
+ ) # Transform a dict in a dict of {str, str}
# We set the id to the complete id maintained in node_need_ref["reftarget"]
dict_need["id"] = need_id_full
@@ -118,7 +125,8 @@ def process_need_ref(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
link_text = needs_config.role_need_template.format(**dict_need)
except KeyError as e:
link_text = (
- '"the config parameter needs_role_need_template uses not supported placeholders: %s "' % e
+ '"the config parameter needs_role_need_template uses not supported placeholders: %s "'
+ % e
)
log.warning(link_text + " [needs]", type="needs")
@@ -135,9 +143,13 @@ def process_need_ref(app: Sphinx, doctree: nodes.document, fromdocname: str, fou
node_need_ref["reftarget"],
)
else:
- assert target_need["external_url"] is not None, "external_url must be set for external needs"
+ assert (
+ target_need["external_url"] is not None
+ ), "external_url must be set for external needs"
new_node_ref = nodes.reference(target_need["id"], target_need["id"])
- new_node_ref["refuri"] = check_and_calc_base_url_rel_path(target_need["external_url"], fromdocname)
+ new_node_ref["refuri"] = check_and_calc_base_url_rel_path(
+ target_need["external_url"], fromdocname
+ )
new_node_ref["classes"].append(target_need["external_css"])
else:
diff --git a/sphinx_needs/services/config/github.py b/sphinx_needs/services/config/github.py
index ed2c2a6e2..65f40708c 100644
--- a/sphinx_needs/services/config/github.py
+++ b/sphinx_needs/services/config/github.py
@@ -3,9 +3,22 @@
EXTRA_DATA_OPTIONS = ["user", "created_at", "updated_at", "closed_at", "service"]
EXTRA_LINK_OPTIONS = ["url"]
EXTRA_IMAGE_OPTIONS = ["avatar"]
-CONFIG_OPTIONS = ["type", "query", "specific", "max_amount", "max_content_lines", "id_prefix"]
-GITHUB_DATA = ["status", "tags"] + EXTRA_DATA_OPTIONS + EXTRA_LINK_OPTIONS + EXTRA_IMAGE_OPTIONS
-GITHUB_DATA_STR = '"' + '","'.join(EXTRA_DATA_OPTIONS + EXTRA_LINK_OPTIONS + EXTRA_IMAGE_OPTIONS) + '"'
+CONFIG_OPTIONS = [
+ "type",
+ "query",
+ "specific",
+ "max_amount",
+ "max_content_lines",
+ "id_prefix",
+]
+GITHUB_DATA = (
+ ["status", "tags"] + EXTRA_DATA_OPTIONS + EXTRA_LINK_OPTIONS + EXTRA_IMAGE_OPTIONS
+)
+GITHUB_DATA_STR = (
+ '"'
+ + '","'.join(EXTRA_DATA_OPTIONS + EXTRA_LINK_OPTIONS + EXTRA_IMAGE_OPTIONS)
+ + '"'
+)
CONFIG_DATA_STR = '"' + '","'.join(CONFIG_OPTIONS) + '"'
GITHUB_LAYOUT = {
"grid": "complex",
@@ -18,14 +31,20 @@
"head": [
'**< >** ('
+ ", ".join(
- ['< >'.format(value=x) for x in EXTRA_LINK_OPTIONS]
+ [
+ f'< >'
+ for x in EXTRA_LINK_OPTIONS
+ ]
)
+ ")"
],
- "head_right": ['<>', '< >'],
- "meta_left": ['< >'.format(value=x) for x in EXTRA_DATA_OPTIONS]
+ "head_right": [
+ '<>',
+ '< >',
+ ],
+ "meta_left": [f'< >' for x in EXTRA_DATA_OPTIONS]
+ [
- '< >'.format(value=x)
+ f'< >'
for x in EXTRA_LINK_OPTIONS
],
"meta_right": [
diff --git a/sphinx_needs/services/github.py b/sphinx_needs/services/github.py
index b2fc6da72..4e5c54c84 100644
--- a/sphinx_needs/services/github.py
+++ b/sphinx_needs/services/github.py
@@ -25,9 +25,13 @@
class GithubService(BaseService):
- options = CONFIG_OPTIONS + EXTRA_DATA_OPTIONS + EXTRA_LINK_OPTIONS + EXTRA_IMAGE_OPTIONS
+ options = (
+ CONFIG_OPTIONS + EXTRA_DATA_OPTIONS + EXTRA_LINK_OPTIONS + EXTRA_IMAGE_OPTIONS
+ )
- def __init__(self, app: Sphinx, name: str, config: dict[str, Any], **kwargs: Any) -> None:
+ def __init__(
+ self, app: Sphinx, name: str, config: dict[str, Any], **kwargs: Any
+ ) -> None:
self.app = app
self.name = name
self.config = config
@@ -50,7 +54,11 @@ def __init__(self, app: Sphinx, name: str, config: dict[str, Any], **kwargs: Any
layouts["github"] = GITHUB_LAYOUT
self.gh_type_config = {
- "issue": {"url": "search/issues", "query": "is:issue", "need_type": "issue"},
+ "issue": {
+ "url": "search/issues",
+ "query": "is:issue",
+ "need_type": "issue",
+ },
"pr": {"url": "search/issues", "query": "is:pr", "need_type": "pr"},
"commit": {"url": "search/commits", "query": "", "need_type": "commit"},
}
@@ -68,15 +76,21 @@ def __init__(self, app: Sphinx, name: str, config: dict[str, Any], **kwargs: Any
if self.gh_type not in self.gh_type_config.keys():
raise KeyError(
- 'github type "{}" not supported. Use: {}'.format(self.gh_type, ", ".join(self.gh_type_config.keys()))
+ 'github type "{}" not supported. Use: {}'.format(
+ self.gh_type, ", ".join(self.gh_type_config.keys())
+ )
)
# Set need_type to use by default
- self.need_type = self.config.get("need_type", self.gh_type_config[self.gh_type]["need_type"])
+ self.need_type = self.config.get(
+ "need_type", self.gh_type_config[self.gh_type]["need_type"]
+ )
super().__init__()
- def _send(self, query: str, options: dict[str, Any], specific: bool = False) -> dict[str, Any]:
+ def _send(
+ self, query: str, options: dict[str, Any], specific: bool = False
+ ) -> dict[str, Any]:
headers = {}
if self.gh_type == "commit":
headers["Accept"] = "application/vnd.github.cloak-preview+json"
@@ -93,17 +107,20 @@ def _send(self, query: str, options: dict[str, Any], specific: bool = False) ->
single_type = "pulls"
else:
single_type = "commits"
- url = self.url + "repos/{owner}/{repo}/{single_type}/{number}".format(
- owner=owner, repo=repo, single_type=single_type, number=number
- )
+ url = self.url + f"repos/{owner}/{repo}/{single_type}/{number}"
except IndexError:
- raise NeedGithubServiceException('Single option ot valid, must follow "owner/repo/number"')
+ raise NeedGithubServiceException(
+ 'Single option ot valid, must follow "owner/repo/number"'
+ )
params = {}
else:
url = self.url + self.gh_type_config[self.gh_type]["url"]
query = "{} {}".format(query, self.gh_type_config[self.gh_type]["query"])
- params = {"q": query, "per_page": options.get("max_amount", self.max_amount)}
+ params = {
+ "q": query,
+ "per_page": options.get("max_amount", self.max_amount),
+ }
self.log.info(f"Service {self.name} requesting data for query: {query}")
@@ -122,26 +139,30 @@ def _send(self, query: str, options: dict[str, Any], specific: bool = False) ->
if "rate limit" in resp.json()["message"]:
resp_limit = requests.get(self.url + "rate_limit", auth=auth)
extra_info = resp_limit.json()
- self.log.info("GitHub: API rate limit exceeded. We need to wait 60 secs...")
+ self.log.info(
+ "GitHub: API rate limit exceeded. We need to wait 60 secs..."
+ )
self.log.info(extra_info)
time.sleep(61)
resp = requests.get(url, params=params, auth=auth, headers=headers)
if resp.status_code > 299:
if "rate limit" in resp.json()["message"]:
- raise NeedGithubServiceException("GitHub: API rate limit exceeded (twice). Stop here.")
+ raise NeedGithubServiceException(
+ "GitHub: API rate limit exceeded (twice). Stop here."
+ )
else:
raise NeedGithubServiceException(
"Github service error during request.\n"
- "Status code: {}\n"
- "Error: {}\n"
- "{}".format(resp.status_code, resp.text, extra_info)
+ f"Status code: {resp.status_code}\n"
+ f"Error: {resp.text}\n"
+ f"{extra_info}"
)
else:
raise NeedGithubServiceException(
"Github service error during request.\n"
- "Status code: {}\n"
- "Error: {}\n"
- "{}".format(resp.status_code, resp.text, extra_info)
+ f"Status code: {resp.status_code}\n"
+ f"Error: {resp.text}\n"
+ f"{extra_info}"
)
if specific:
@@ -154,9 +175,13 @@ def request(self, options: dict[str, Any] | None = None) -> list[dict[str, Any]]
self.log.debug(f"Requesting data for service {self.name}")
if "query" not in options and "specific" not in options:
- raise NeedGithubServiceException('"query" or "specific" missing as option for github service.')
+ raise NeedGithubServiceException(
+ '"query" or "specific" missing as option for github service.'
+ )
elif "query" in options and "specific" in options:
- raise NeedGithubServiceException('Only "query" or "specific" allowed for github service. Not both!')
+ raise NeedGithubServiceException(
+ 'Only "query" or "specific" allowed for github service. Not both!'
+ )
elif "query" in options:
query = options["query"]
specific = False
@@ -168,7 +193,9 @@ def request(self, options: dict[str, Any] | None = None) -> list[dict[str, Any]]
if "items" not in response:
if "errors" in response:
raise NeedGithubServiceException(
- "GitHub service query error: {}\n" "Used query: {}".format(response["errors"][0]["message"], query)
+ "GitHub service query error: {}\n" "Used query: {}".format(
+ response["errors"][0]["message"], query
+ )
)
else:
raise NeedGithubServiceException("Github service: Unknown error.")
@@ -182,7 +209,9 @@ def request(self, options: dict[str, Any] | None = None) -> list[dict[str, Any]]
return data
- def prepare_issue_data(self, items: list[dict[str, Any]], options: dict[str, Any]) -> list[dict[str, Any]]:
+ def prepare_issue_data(
+ self, items: list[dict[str, Any]], options: dict[str, Any]
+ ) -> list[dict[str, Any]]:
data = []
for item in items:
# ensure that "None" can not reach .splitlines()
@@ -191,7 +220,11 @@ def prepare_issue_data(self, items: list[dict[str, Any]], options: dict[str, Any
# wraps content lines, if they are too long. Respects already existing newlines.
content_lines = [
- "\n ".join(textwrap.wrap(line, 60, break_long_words=True, replace_whitespace=False))
+ "\n ".join(
+ textwrap.wrap(
+ line, 60, break_long_words=True, replace_whitespace=False
+ )
+ )
for line in item["body"].splitlines() # type: ignore
if line.strip()
]
@@ -199,7 +232,9 @@ def prepare_issue_data(self, items: list[dict[str, Any]], options: dict[str, Any
content = "\n\n ".join(content_lines)
# Reduce content length, if requested by config
if self.max_content_lines > 0:
- max_lines = int(options.get("max_content_lines", self.max_content_lines))
+ max_lines = int(
+ options.get("max_content_lines", self.max_content_lines)
+ )
content_lines = content.splitlines()
if len(content_lines) > max_lines:
content_lines = content_lines[0:max_lines]
@@ -242,7 +277,9 @@ def prepare_issue_data(self, items: list[dict[str, Any]], options: dict[str, Any
return data
- def prepare_commit_data(self, items: list[dict[str, Any]], options: dict[str, Any]) -> list[dict[str, Any]]:
+ def prepare_commit_data(
+ self, items: list[dict[str, Any]], options: dict[str, Any]
+ ) -> list[dict[str, Any]]:
data = []
for item in items:
@@ -253,7 +290,9 @@ def prepare_commit_data(self, items: list[dict[str, Any]], options: dict[str, An
"type": options.get("type", self.need_type),
"layout": options.get("layout", self.layout),
"id": self.id_prefix + item["sha"][:6],
- "title": item["commit"]["message"].split("\n")[0][:60], # 1. line, max length 60 chars
+ "title": item["commit"]["message"].split("\n")[0][
+ :60
+ ], # 1. line, max length 60 chars
"content": item["commit"]["message"],
"user": item["author"]["login"],
"url": item["html_url"],
@@ -278,7 +317,9 @@ def _get_avatar(self, avatar_url: str) -> str:
avatar_file_path = os.path.join(path, filename)
# Placeholder avatar, if things go wrong or avatar download is deactivated
- default_avatar_file_path = os.path.join(os.path.dirname(__file__), "../images/avatar.png")
+ default_avatar_file_path = os.path.join(
+ os.path.dirname(__file__), "../images/avatar.png"
+ )
if self.download_avatars:
# Download only, if file not downloaded yet
if not os.path.exists(avatar_file_path):
@@ -294,20 +335,20 @@ def _get_avatar(self, avatar_url: str) -> str:
f.write(response.content)
elif response.status_code == 302:
self.log.warning(
- "GitHub service {} could not download avatar image "
- "from {}.\n"
- " Status code: {}\n"
+ f"GitHub service {self.name} could not download avatar image "
+ f"from {avatar_url}.\n"
+ f" Status code: {response.status_code}\n"
" Reason: Looks like the authentication provider tries to redirect you."
" This is not supported and is a common problem, "
- "if you use GitHub Enterprise. [needs]".format(self.name, avatar_url, response.status_code),
+ "if you use GitHub Enterprise. [needs]",
type="needs",
)
avatar_file_path = default_avatar_file_path
else:
self.log.warning(
- "GitHub service {} could not download avatar image "
- "from {}.\n"
- " Status code: {} [needs]".format(self.name, avatar_url, response.status_code),
+ f"GitHub service {self.name} could not download avatar image "
+ f"from {avatar_url}.\n"
+ f" Status code: {response.status_code} [needs]",
type="needs",
)
avatar_file_path = default_avatar_file_path
@@ -316,7 +357,9 @@ def _get_avatar(self, avatar_url: str) -> str:
return avatar_file_path
- def _add_given_options(self, options: dict[str, Any], element_data: dict[str, Any]) -> None:
+ def _add_given_options(
+ self, options: dict[str, Any], element_data: dict[str, Any]
+ ) -> None:
"""
Add data from options, which was defined by user but is not set by this service
diff --git a/sphinx_needs/services/manager.py b/sphinx_needs/services/manager.py
index ecdacadaf..b95962a22 100644
--- a/sphinx_needs/services/manager.py
+++ b/sphinx_needs/services/manager.py
@@ -23,7 +23,9 @@ def register(self, name: str, klass: type[BaseService], **kwargs: Any) -> None:
try:
config = NeedsSphinxConfig(self.app.config).services[name]
except KeyError:
- self.log.debug(f"No service config found for {name}. Add it in your conf.py to needs_services dictionary.")
+ self.log.debug(
+ f"No service config found for {name}. Add it in your conf.py to needs_services dictionary."
+ )
config = {}
# Register options from service class
@@ -43,7 +45,9 @@ def get(self, name: str) -> BaseService:
return self.services[name]
else:
raise NeedsServiceException(
- "Service {} could not be found. " "Available services are {}".format(name, ", ".join(self.services))
+ "Service {} could not be found. " "Available services are {}".format(
+ name, ", ".join(self.services)
+ )
)
diff --git a/sphinx_needs/services/open_needs.py b/sphinx_needs/services/open_needs.py
index 6a9ea2195..d11e99b30 100644
--- a/sphinx_needs/services/open_needs.py
+++ b/sphinx_needs/services/open_needs.py
@@ -24,7 +24,9 @@
class OpenNeedsService(BaseService):
options = CONFIG_OPTIONS + EXTRA_DATA_OPTIONS + EXTRA_LINK_OPTIONS
- def __init__(self, app: Sphinx, name: str, config: dict[str, Any], **kwargs: Any) -> None:
+ def __init__(
+ self, app: Sphinx, name: str, config: dict[str, Any], **kwargs: Any
+ ) -> None:
self.app = app
self.name = name
self.config = config
@@ -41,7 +43,9 @@ def __init__(self, app: Sphinx, name: str, config: dict[str, Any], **kwargs: Any
self.query = self.config.get("query", "")
self.content = self.config.get("content", DEFAULT_CONTENT)
self.mappings: dict[str, Any] = self.config.get("mappings", {})
- self.mapping_replaces = self.config.get("mappings_replaces", MAPPINGS_REPLACES_DEFAULT)
+ self.mapping_replaces = self.config.get(
+ "mappings_replaces", MAPPINGS_REPLACES_DEFAULT
+ )
self.extra_data: dict[str, Any] = self.config.get("extra_data", {})
self.params = self.config.get("params", "skip=0,limit=100")
@@ -59,8 +63,8 @@ def _oauthorization(self) -> None:
if login_resp.status_code != 200:
raise OpenNeedsServiceException(
"ONS service error during request.\n"
- "Status code: {}\n"
- "Error: {}\n".format(login_resp.status_code, login_resp.text)
+ f"Status code: {login_resp.status_code}\n"
+ f"Error: {login_resp.text}\n"
)
oauth_credentials = dict(**login_resp.json())
self.token_type = oauth_credentials.get("token_type")
@@ -72,8 +76,13 @@ def _prepare_request(self, options: Any) -> Any:
url: str = options.get("url", self.url)
url = url + str(self.url_postfix)
- headers: dict[str, str] = {"Authorization": f"{self.token_type} {self.access_token}"}
- params: list[str] = [param.strip() for param in re.split(r";|,", options.get("params", self.params))]
+ headers: dict[str, str] = {
+ "Authorization": f"{self.token_type} {self.access_token}"
+ }
+ params: list[str] = [
+ param.strip()
+ for param in re.split(r";|,", options.get("params", self.params))
+ ]
new_params: str = "&".join(params)
url = f"{url}?{new_params}"
@@ -93,10 +102,14 @@ def _send_request(request: Any) -> Any:
result: Any = requests.get(**request)
if result.status_code >= 300:
- raise OpenNeedsServiceException(f"Problem accessing {result.url}.\nReason: {result.text}")
+ raise OpenNeedsServiceException(
+ f"Problem accessing {result.url}.\nReason: {result.text}"
+ )
return result
- def _extract_data(self, data: list[dict[str, Any]], options: dict[str, Any]) -> list[dict[str, Any]]:
+ def _extract_data(
+ self, data: list[dict[str, Any]], options: dict[str, Any]
+ ) -> list[dict[str, Any]]:
"""
Extract data of a list/dictionary, which was retrieved via send_request.
:param data: list or dict
@@ -176,7 +189,10 @@ def _extract_data(self, data: list[dict[str, Any]], options: dict[str, Any]) ->
if name == "links":
# Add a prefix to the referenced link if it is an ID of a need object in
# the data retrieved from the Open Needs Server or don't add prefix
- value = [(prefix + link if link in ids_of_needs_data else link) for link in value]
+ value = [
+ (prefix + link if link in ids_of_needs_data else link)
+ for link in value
+ ]
value = ";".join(value)
# Ensures mapping option with value == None is not implemented. E.g. the links option
# can't be == None since there will be nothing to link to and that will raise a warning
@@ -184,7 +200,9 @@ def _extract_data(self, data: list[dict[str, Any]], options: dict[str, Any]) ->
need_values[name] = value
for regex, new_str in self.mapping_replaces.items():
- need_values[name] = re.sub(regex, new_str, need_values.get(name, ""))
+ need_values[name] = re.sub(
+ regex, new_str, need_values.get(name, "")
+ )
if name == "id":
need_values[name] = str(prefix) + str(need_values.get(name, ""))
diff --git a/sphinx_needs/utils.py b/sphinx_needs/utils.py
index eb0fcccb7..4476c9bb9 100644
--- a/sphinx_needs/utils.py
+++ b/sphinx_needs/utils.py
@@ -173,8 +173,12 @@ def row_col_maker(
link_string_list = {}
for link_name, link_conf in needs_config.string_links.items():
link_string_list[link_name] = {
- "url_template": Environment(autoescape=True).from_string(link_conf["link_url"]),
- "name_template": Environment(autoescape=True).from_string(link_conf["link_name"]),
+ "url_template": Environment(autoescape=True).from_string(
+ link_conf["link_url"]
+ ),
+ "name_template": Environment(autoescape=True).from_string(
+ link_conf["link_name"]
+ ),
"regex_compiled": re.compile(link_conf["regex"]),
"options": link_conf["options"],
"name": link_name,
@@ -203,22 +207,34 @@ def row_col_maker(
if make_ref:
if need_info["is_external"]:
- assert need_info["external_url"] is not None, "external_url must be set for external needs"
- ref_col["refuri"] = check_and_calc_base_url_rel_path(need_info["external_url"], fromdocname)
+ assert (
+ need_info["external_url"] is not None
+ ), "external_url must be set for external needs"
+ ref_col["refuri"] = check_and_calc_base_url_rel_path(
+ need_info["external_url"], fromdocname
+ )
ref_col["classes"].append(need_info["external_css"])
row_col["classes"].append(need_info["external_css"])
else:
- ref_col["refuri"] = builder.get_relative_uri(fromdocname, need_info["docname"])
+ ref_col["refuri"] = builder.get_relative_uri(
+ fromdocname, need_info["docname"]
+ )
ref_col["refuri"] += "#" + datum
elif ref_lookup:
temp_need = all_needs[link_id]
if temp_need["is_external"]:
- assert temp_need["external_url"] is not None, "external_url must be set for external needs"
- ref_col["refuri"] = check_and_calc_base_url_rel_path(temp_need["external_url"], fromdocname)
+ assert (
+ temp_need["external_url"] is not None
+ ), "external_url must be set for external needs"
+ ref_col["refuri"] = check_and_calc_base_url_rel_path(
+ temp_need["external_url"], fromdocname
+ )
ref_col["classes"].append(temp_need["external_css"])
row_col["classes"].append(temp_need["external_css"])
else:
- ref_col["refuri"] = builder.get_relative_uri(fromdocname, temp_need["docname"])
+ ref_col["refuri"] = builder.get_relative_uri(
+ fromdocname, temp_need["docname"]
+ )
ref_col["refuri"] += "#" + temp_need["id"]
if link_part:
ref_col["refuri"] += "." + link_part
@@ -230,7 +246,11 @@ def row_col_maker(
para_col += ref_col
elif matching_link_confs:
para_col += match_string_link(
- datum_text, datum, need_key, matching_link_confs, render_context=needs_config.render_context
+ datum_text,
+ datum,
+ need_key,
+ matching_link_confs,
+ render_context=needs_config.render_context,
)
else:
para_col += text_col
@@ -258,7 +278,9 @@ def rstjinja(app: Sphinx, docname: str, source: list[str]) -> None:
source[0] = rendered
-def import_prefix_link_edit(needs: dict[str, Any], id_prefix: str, needs_extra_links: list[dict[str, Any]]) -> None:
+def import_prefix_link_edit(
+ needs: dict[str, Any], id_prefix: str, needs_extra_links: list[dict[str, Any]]
+) -> None:
"""
Changes existing links to support given prefix.
Only link-ids get touched, which are part of ``needs`` (so are linking them).
@@ -285,7 +307,9 @@ def import_prefix_link_edit(needs: dict[str, Any], id_prefix: str, needs_extra_l
need[extra_link["option"]][n] = f"{id_prefix}{id}"
# Manipulate descriptions
# ToDo: Use regex for better matches.
- need["description"] = need["description"].replace(id, "".join([id_prefix, id]))
+ need["description"] = need["description"].replace(
+ id, "".join([id_prefix, id])
+ )
FuncT = TypeVar("FuncT")
@@ -332,7 +356,11 @@ def check_and_calc_base_url_rel_path(external_url: str, fromdocname: str) -> str
# get path sep considering plattform dependency, '\' for Windows, '/' fro Unix
curr_path_sep = os.path.sep
# check / or \ to determine the relative path to conf.py directory
- if not parsed_url.scheme and not os.path.isabs(external_url) and curr_path_sep in fromdocname:
+ if (
+ not parsed_url.scheme
+ and not os.path.isabs(external_url)
+ and curr_path_sep in fromdocname
+ ):
sub_level = len(fromdocname.split(curr_path_sep)) - 1
ref_uri = os.path.join(sub_level * (".." + curr_path_sep), external_url)
@@ -350,7 +378,8 @@ def check_and_get_external_filter_func(filter_func_ref: str | None) -> tuple[Any
filter_module, filter_function = filter_func_ref.rsplit(".")
except ValueError:
logger.warning(
- f'Filter function not valid "{filter_func_ref}". Example: my_module:my_func [needs]', type="needs"
+ f'Filter function not valid "{filter_func_ref}". Example: my_module:my_func [needs]',
+ type="needs",
)
return filter_func, filter_args
@@ -364,7 +393,10 @@ def check_and_get_external_filter_func(filter_func_ref: str | None) -> tuple[Any
final_module = importlib.import_module(filter_module)
filter_func = getattr(final_module, filter_function)
except Exception:
- logger.warning(f"Could not import filter function: {filter_func_ref} [needs]", type="needs")
+ logger.warning(
+ f"Could not import filter function: {filter_func_ref} [needs]",
+ type="needs",
+ )
return filter_func, filter_args
return filter_func, filter_args
@@ -385,7 +417,10 @@ def jinja_parse(context: dict[str, Any], jinja_string: str) -> str:
try:
content_template = Template(jinja_string, autoescape=True)
except Exception as e:
- raise ReferenceError(f'There was an error in the jinja statement: "{jinja_string}". ' f"Error Msg: {e}")
+ raise ReferenceError(
+ f'There was an error in the jinja statement: "{jinja_string}". '
+ f"Error Msg: {e}"
+ )
content = content_template.render(**context)
return content
@@ -407,7 +442,9 @@ def import_matplotlib() -> matplotlib | None:
return matplotlib
-def save_matplotlib_figure(app: Sphinx, figure: FigureBase, basename: str, fromdocname: str) -> nodes.image:
+def save_matplotlib_figure(
+ app: Sphinx, figure: FigureBase, basename: str, fromdocname: str
+) -> nodes.image:
builder = app.builder
env = app.env
@@ -464,20 +501,34 @@ def dict_get(root: dict[str, Any], items: Any, default: Any = None) -> Any:
def match_string_link(
- text_item: str, data: str, need_key: str, matching_link_confs: list[dict[str, Any]], render_context: dict[str, Any]
+ text_item: str,
+ data: str,
+ need_key: str,
+ matching_link_confs: list[dict[str, Any]],
+ render_context: dict[str, Any],
) -> Any:
try:
link_name = None
link_url = None
- link_conf = matching_link_confs[0] # We only handle the first matching string_link
+ link_conf = matching_link_confs[
+ 0
+ ] # We only handle the first matching string_link
match = link_conf["regex_compiled"].search(data)
if match:
render_content = match.groupdict()
- link_url = link_conf["url_template"].render(**render_content, **render_context)
- link_name = link_conf["name_template"].render(**render_content, **render_context)
+ link_url = link_conf["url_template"].render(
+ **render_content, **render_context
+ )
+ link_name = link_conf["name_template"].render(
+ **render_content, **render_context
+ )
# if no string_link match was made, we handle it as normal string value
- ref_item = nodes.reference(link_name, link_name, refuri=link_url) if link_name else nodes.Text(text_item)
+ ref_item = (
+ nodes.reference(link_name, link_name, refuri=link_url)
+ if link_name
+ else nodes.Text(text_item)
+ )
except Exception as e:
logger.warning(
@@ -490,7 +541,9 @@ def match_string_link(
def match_variants(
- option_value: str | list[str], keywords: dict[str, Any], needs_variants: dict[str, str]
+ option_value: str | list[str],
+ keywords: dict[str, Any],
+ needs_variants: dict[str, str],
) -> None | str | list[str]:
"""
Function to handle variant option management.
@@ -503,7 +556,9 @@ def match_variants(
"""
def variant_handling(
- variant_definitions: list[str], variant_data: dict[str, Any], variant_pattern: Pattern # type: ignore[type-arg]
+ variant_definitions: list[str],
+ variant_data: dict[str, Any],
+ variant_pattern: Pattern, # type: ignore[type-arg]
) -> str | None:
filter_context = variant_data
# filter_result = []
@@ -515,9 +570,15 @@ def variant_handling(
if check_definition:
variants_in_option = True
# Separate variant definition from value to use for the option
- filter_string, output, _ = re.split(r"(:[\w':.\-\" ]+)$", variant_definition)
+ filter_string, output, _ = re.split(
+ r"(:[\w':.\-\" ]+)$", variant_definition
+ )
filter_string = re.sub(r"^\[|[:\]]$", "", filter_string)
- filter_string = needs_variants[filter_string] if filter_string in needs_variants else filter_string
+ filter_string = (
+ needs_variants[filter_string]
+ if filter_string in needs_variants
+ else filter_string
+ )
try:
# https://docs.python.org/3/library/functions.html?highlight=compile#compile
filter_compiled = compile(filter_string, "", "eval")
@@ -533,7 +594,8 @@ def variant_handling(
return output.lstrip(":")
except Exception as e:
logger.warning(
- f'There was an error in the filter statement: "{filter_string}". ' f"Error Msg: {e} [needs]",
+ f'There was an error in the filter statement: "{filter_string}". '
+ f"Error Msg: {e} [needs]",
type="needs",
)
else:
@@ -557,11 +619,17 @@ def variant_handling(
if isinstance(option_value, str):
multiple_variants: list[str] = variant_splitting.split(rf"""{option_value}""")
multiple_variants = [
- re.sub(r"^([;, ]+)|([;, ]+$)", "", i) for i in multiple_variants if i not in (None, ";", "", " ")
+ re.sub(r"^([;, ]+)|([;, ]+$)", "", i)
+ for i in multiple_variants
+ if i not in (None, ";", "", " ")
]
- if len(multiple_variants) == 1 and not variant_rule_matching.search(multiple_variants[0]):
+ if len(multiple_variants) == 1 and not variant_rule_matching.search(
+ multiple_variants[0]
+ ):
return option_value
- new_option_value = variant_handling(multiple_variants, keywords, variant_rule_matching)
+ new_option_value = variant_handling(
+ multiple_variants, keywords, variant_rule_matching
+ )
if new_option_value is None:
return option_value
return new_option_value
@@ -570,10 +638,14 @@ def variant_handling(
# In case an option value is a list (:tags: open; close), and does not contain any variant definition,
# then return the unmodified value
# options = all([bool(not variant_rule_matching.search(i)) for i in multiple_variants])
- options = all(bool(not variant_rule_matching.search(i)) for i in multiple_variants)
+ options = all(
+ bool(not variant_rule_matching.search(i)) for i in multiple_variants
+ )
if options:
return option_value
- new_option_value = variant_handling(multiple_variants, keywords, variant_rule_matching)
+ new_option_value = variant_handling(
+ multiple_variants, keywords, variant_rule_matching
+ )
return new_option_value
else:
return option_value
@@ -596,7 +668,9 @@ def clean_log(data: str) -> str:
return clean_credentials
-def node_match(node_types: type[nodes.Element] | list[type[nodes.Element]]) -> Callable[[nodes.Node], bool]:
+def node_match(
+ node_types: type[nodes.Element] | list[type[nodes.Element]]
+) -> Callable[[nodes.Node], bool]:
"""
Returns a condition function for doctuils.nodes.findall()
@@ -618,7 +692,9 @@ def node_match(node_types: type[nodes.Element] | list[type[nodes.Element]]) -> C
"""
node_types_list = node_types if isinstance(node_types, list) else [node_types]
- def condition(node: nodes.Node, node_types: list[type[nodes.Element]] = node_types_list) -> bool:
+ def condition(
+ node: nodes.Node, node_types: list[type[nodes.Element]] = node_types_list
+ ) -> bool:
return any(isinstance(node, x) for x in node_types)
return condition
diff --git a/sphinx_needs/warnings.py b/sphinx_needs/warnings.py
index c110e1496..1f9a47c2c 100644
--- a/sphinx_needs/warnings.py
+++ b/sphinx_needs/warnings.py
@@ -61,7 +61,9 @@ def process_warnings(app: Sphinx, exception: Exception | None) -> None:
for warning_name, warning_filter in NEEDS_CONFIG.warnings.items():
if isinstance(warning_filter, str):
# filter string used
- result = filter_needs(checked_needs.values(), needs_config, warning_filter)
+ result = filter_needs(
+ checked_needs.values(), needs_config, warning_filter
+ )
elif callable(warning_filter):
# custom defined filter code used from conf.py
result = []
@@ -69,7 +71,10 @@ def process_warnings(app: Sphinx, exception: Exception | None) -> None:
if warning_filter(need, logger):
result.append(need)
else:
- logger.warning(f"Unknown needs warnings filter {warning_filter}! [needs]", type="needs")
+ logger.warning(
+ f"Unknown needs warnings filter {warning_filter}! [needs]",
+ type="needs",
+ )
if len(result) == 0:
logger.info(f"{warning_name}: passed")
@@ -94,17 +99,26 @@ def process_warnings(app: Sphinx, exception: Exception | None) -> None:
if warnings_always_warn:
logger.warning(
"{}: failed\n\t\tfailed needs: {} ({})\n\t\tused filter: {} [needs]".format(
- warning_name, len(need_ids), ", ".join(need_ids), warning_text
+ warning_name,
+ len(need_ids),
+ ", ".join(need_ids),
+ warning_text,
),
type="needs",
)
else:
logger.info(
"{}: failed\n\t\tfailed needs: {} ({})\n\t\tused filter: {}".format(
- warning_name, len(need_ids), ", ".join(need_ids), warning_text
+ warning_name,
+ len(need_ids),
+ ", ".join(need_ids),
+ warning_text,
)
)
warning_raised = True
if warning_raised:
- logger.warning("warnings were raised. See console / log output for details. [needs]", type="needs")
+ logger.warning(
+ "warnings were raised. See console / log output for details. [needs]",
+ type="needs",
+ )
diff --git a/tests/benchmarks/test_basic.py b/tests/benchmarks/test_basic.py
index 02c329101..a45133136 100644
--- a/tests/benchmarks/test_basic.py
+++ b/tests/benchmarks/test_basic.py
@@ -8,7 +8,9 @@
@responses.activate
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/doc_basic"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app", [{"buildername": "html", "srcdir": "doc_test/doc_basic"}], indirect=True
+)
def test_basic_time(test_app, benchmark):
responses.add_callback(
responses.GET,
@@ -16,7 +18,9 @@ def test_basic_time(test_app, benchmark):
callback=random_data_callback,
content_type="application/json",
)
- responses.add(responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body="")
+ responses.add(
+ responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body=""
+ )
app = test_app
benchmark.pedantic(app.builder.build_all, rounds=1, iterations=1)
diff --git a/tests/benchmarks/test_official.py b/tests/benchmarks/test_official.py
index b2f3ba6a8..663a3ea86 100644
--- a/tests/benchmarks/test_official.py
+++ b/tests/benchmarks/test_official.py
@@ -9,7 +9,9 @@
@responses.activate
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "../docs"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app", [{"buildername": "html", "srcdir": "../docs"}], indirect=True
+)
def test_official_time(test_app, benchmark):
responses.add_callback(
responses.GET,
@@ -17,7 +19,9 @@ def test_official_time(test_app, benchmark):
callback=random_data_callback,
content_type="application/json",
)
- responses.add(responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body="")
+ responses.add(
+ responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body=""
+ )
app = test_app
benchmark.pedantic(app.builder.build_all, rounds=1, iterations=1)
@@ -29,7 +33,11 @@ def test_official_time(test_app, benchmark):
@responses.activate
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "../docs", "parallel": 1}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "../docs", "parallel": 1}],
+ indirect=True,
+)
def test_official_memory(test_app):
responses.add_callback(
responses.GET,
@@ -37,7 +45,9 @@ def test_official_memory(test_app):
callback=random_data_callback,
content_type="application/json",
)
- responses.add(responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body="")
+ responses.add(
+ responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body=""
+ )
app = test_app
diff --git a/tests/conftest.py b/tests/conftest.py
index c631b27fc..e39cd8524 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -83,7 +83,16 @@ class Starter(ProcessStarter):
pattern = "Serving HTTP on [0-9.]+ port 62343|Address already in use"
timeout = 20
terminate_on_interrupt = True
- args = ["python3", "-m", "http.server", "--directory", sphinx_test_tempdir, "--bind", addr, port]
+ args = [
+ "python3",
+ "-m",
+ "http.server",
+ "--directory",
+ sphinx_test_tempdir,
+ "--bind",
+ addr,
+ port,
+ ]
env = {"PYTHONUNBUFFERED": "1"}
def check_server_connection(log_path: str):
@@ -98,13 +107,18 @@ def check_server_connection(log_path: str):
sock.close()
if result == 0:
with open(str(log_path), "wb", 0) as stdout:
- stdout.write(bytes("Serving HTTP on 127.0.0.1 port 62343 (http://127.0.0.1:62343/) ...\n", "utf8"))
+ stdout.write(
+ bytes(
+ "Serving HTTP on 127.0.0.1 port 62343 (http://127.0.0.1:62343/) ...\n",
+ "utf8",
+ )
+ )
return True
return False
if not check_server_connection(log_path=xprocess.getinfo("http_server").logpath):
# Start the process and ensure it is running
- _, logfile = xprocess.ensure("http_server", Starter, persist_logs=False) # noqa:F841
+ _, logfile = xprocess.ensure("http_server", Starter, persist_logs=False)
http_server_process = xprocess.getinfo("http_server")
server_url = f"http://{addr}:{port}"
@@ -129,7 +143,9 @@ def test_js(self) -> Dict[str, Any]:
"""
cypress_testpath = get_abspath(self.spec_pattern)
- if not cypress_testpath or not (os.path.isabs(cypress_testpath) and os.path.exists(cypress_testpath)):
+ if not cypress_testpath or not (
+ os.path.isabs(cypress_testpath) and os.path.exists(cypress_testpath)
+ ):
return {
"returncode": 1,
"stdout": None,
@@ -179,7 +195,12 @@ def test_js(self) -> Dict[str, Any]:
def pytest_addoption(parser):
- parser.addoption("--sn-build-dir", action="store", default=None, help="Base directory for sphinx-needs builds")
+ parser.addoption(
+ "--sn-build-dir",
+ action="store",
+ default=None,
+ help="Base directory for sphinx-needs builds",
+ )
@pytest.fixture(scope="session")
@@ -195,7 +216,9 @@ def sphinx_test_tempdir(request) -> path:
# We create a temp-folder on our own, as the util-functions from sphinx and pytest make troubles.
# It seems like they reuse certain-temp names
- temp_base = os.path.abspath(request.config.getoption("--sn-build-dir") or tempfile.gettempdir())
+ temp_base = os.path.abspath(
+ request.config.getoption("--sn-build-dir") or tempfile.gettempdir()
+ )
sphinx_test_tempdir = path(temp_base).joinpath("sn_test_build_data")
utils_dir = sphinx_test_tempdir.joinpath("utils")
@@ -235,7 +258,9 @@ def test_app(make_app, sphinx_test_tempdir, request):
if not builder_params.get("no_plantuml", False):
# Since we don't want copy the plantuml.jar file for each test function,
# we need to override the plantuml conf variable and set it to what we have already
- plantuml = "java -Djava.awt.headless=true -jar %s" % os.path.join(sphinx_test_tempdir, "utils", "plantuml.jar")
+ plantuml = "java -Djava.awt.headless=true -jar %s" % os.path.join(
+ sphinx_test_tempdir, "utils", "plantuml.jar"
+ )
sphinx_conf_overrides.update(plantuml=plantuml)
# copy test srcdir to test temporary directory sphinx_test_tempdir
diff --git a/tests/data/service_github.py b/tests/data/service_github.py
index fb24ab4d8..9151feb32 100644
--- a/tests/data/service_github.py
+++ b/tests/data/service_github.py
@@ -108,7 +108,8 @@
GITHUB_SPECIFIC_ISSUE_ANSWER = {
"url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/issues/141",
"repository_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs",
- "labels_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/issues/" "141/labels{/name}",
+ "labels_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/issues/"
+ "141/labels{/name}",
"comments_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/issues/141/comments",
"events_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/issues/141/events",
"html_url": "https://github.com/useblocks/sphinxcontrib-needs/issues/141",
@@ -140,7 +141,8 @@
{
"id": 491973814,
"node_id": "MDU6TGFiZWw0OTE5NzM4MTQ=",
- "url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs" "/labels/bug",
+ "url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs"
+ "/labels/bug",
"name": "bug",
"color": "ee0701",
"default": True,
@@ -318,7 +320,8 @@
"/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/teams",
"hooks_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/hooks",
- "issue_events_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/issues/events{" "/number}",
+ "issue_events_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/issues/events{"
+ "/number}",
"events_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/events",
"assignees_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/assignees{/user}",
"branches_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/branches{/branch}",
@@ -334,17 +337,22 @@
"subscribers_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/subscribers",
"subscription_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/subscription",
"commits_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/commits{/sha}",
- "git_commits_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/git" "/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/git"
+ "/commits{/sha}",
"comments_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/comments{/number}",
- "issue_comment_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/issues" "/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/issues"
+ "/comments{/number}",
"contents_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/contents/{+path}",
- "compare_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/compare/{base}...{" "head}",
+ "compare_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/compare/{base}...{"
+ "head}",
"merges_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/merges",
- "archive_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/{archive_format}{" "/ref}",
+ "archive_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/{archive_format}{"
+ "/ref}",
"downloads_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/downloads",
"issues_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/issues{/number}",
"pulls_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/pulls{/number}",
- "milestones_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/milestones{" "/number}",
+ "milestones_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/milestones{"
+ "/number}",
"notifications_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs"
"/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/useblocks/sphinxcontrib-needs/labels{/name}",
diff --git a/tests/doc_test/api_doc/conf.py b/tests/doc_test/api_doc/conf.py
index 7832f346d..df50838b8 100644
--- a/tests/doc_test/api_doc/conf.py
+++ b/tests/doc_test/api_doc/conf.py
@@ -1,8 +1,32 @@
extensions = ["sphinx_needs", "dummy_extension.dummy"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/api_doc_awesome/conf.py b/tests/doc_test/api_doc_awesome/conf.py
index 7832f346d..df50838b8 100644
--- a/tests/doc_test/api_doc_awesome/conf.py
+++ b/tests/doc_test/api_doc_awesome/conf.py
@@ -1,8 +1,32 @@
extensions = ["sphinx_needs", "dummy_extension.dummy"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/arch_doc/conf.py b/tests/doc_test/arch_doc/conf.py
index 1571c763f..70ce823f5 100644
--- a/tests/doc_test/arch_doc/conf.py
+++ b/tests/doc_test/arch_doc/conf.py
@@ -3,8 +3,32 @@
plantuml_output_format = "svg"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/broken_doc/conf.py b/tests/doc_test/broken_doc/conf.py
index 21eab6d52..1b74e151a 100644
--- a/tests/doc_test/broken_doc/conf.py
+++ b/tests/doc_test/broken_doc/conf.py
@@ -1,8 +1,32 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/broken_links/conf.py b/tests/doc_test/broken_links/conf.py
index 21eab6d52..1b74e151a 100644
--- a/tests/doc_test/broken_links/conf.py
+++ b/tests/doc_test/broken_links/conf.py
@@ -1,8 +1,32 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/broken_statuses/conf.py b/tests/doc_test/broken_statuses/conf.py
index 3706ca5ff..6e013eae5 100644
--- a/tests/doc_test/broken_statuses/conf.py
+++ b/tests/doc_test/broken_statuses/conf.py
@@ -1,10 +1,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_statuses = [
diff --git a/tests/doc_test/broken_syntax_doc/conf.py b/tests/doc_test/broken_syntax_doc/conf.py
index 21eab6d52..1b74e151a 100644
--- a/tests/doc_test/broken_syntax_doc/conf.py
+++ b/tests/doc_test/broken_syntax_doc/conf.py
@@ -1,8 +1,32 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/broken_tags/conf.py b/tests/doc_test/broken_tags/conf.py
index 7c04d916c..6559d7fe0 100644
--- a/tests/doc_test/broken_tags/conf.py
+++ b/tests/doc_test/broken_tags/conf.py
@@ -1,10 +1,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_tags = [
diff --git a/tests/doc_test/broken_tags_2/conf.py b/tests/doc_test/broken_tags_2/conf.py
index 7c04d916c..6559d7fe0 100644
--- a/tests/doc_test/broken_tags_2/conf.py
+++ b/tests/doc_test/broken_tags_2/conf.py
@@ -1,10 +1,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_tags = [
diff --git a/tests/doc_test/doc_basic/conf.py b/tests/doc_test/doc_basic/conf.py
index 0a5cb1761..2db347716 100644
--- a/tests/doc_test/doc_basic/conf.py
+++ b/tests/doc_test/doc_basic/conf.py
@@ -6,8 +6,32 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_basic_latex/conf.py b/tests/doc_test/doc_basic_latex/conf.py
index 32f8e8de5..1f9bcc6ed 100644
--- a/tests/doc_test/doc_basic_latex/conf.py
+++ b/tests/doc_test/doc_basic_latex/conf.py
@@ -8,8 +8,32 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_build_latex/conf.py b/tests/doc_test/doc_build_latex/conf.py
index 6a81ad4bb..df70aa270 100644
--- a/tests/doc_test/doc_build_latex/conf.py
+++ b/tests/doc_test/doc_build_latex/conf.py
@@ -13,8 +13,32 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_df_calc_sum/conf.py b/tests/doc_test/doc_df_calc_sum/conf.py
index 2e1bb2cf3..ee4795ac0 100644
--- a/tests/doc_test/doc_df_calc_sum/conf.py
+++ b/tests/doc_test/doc_df_calc_sum/conf.py
@@ -3,10 +3,38 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
-needs_extra_options = {"test_func": directives.unchanged, "hours": directives.unchanged, "amount": directives.unchanged}
+needs_extra_options = {
+ "test_func": directives.unchanged,
+ "hours": directives.unchanged,
+ "amount": directives.unchanged,
+}
diff --git a/tests/doc_test/doc_df_check_linked_values/conf.py b/tests/doc_test/doc_df_check_linked_values/conf.py
index 707f3afad..2a93cda81 100644
--- a/tests/doc_test/doc_df_check_linked_values/conf.py
+++ b/tests/doc_test/doc_df_check_linked_values/conf.py
@@ -3,10 +3,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_extra_options = {"test_func": directives.unchanged, "hours": directives.unchanged}
diff --git a/tests/doc_test/doc_df_user_functions/conf.py b/tests/doc_test/doc_df_user_functions/conf.py
index 07958c0c0..14945940d 100644
--- a/tests/doc_test/doc_df_user_functions/conf.py
+++ b/tests/doc_test/doc_df_user_functions/conf.py
@@ -3,10 +3,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_extra_options = {"test_func": directives.unchanged, "hours": directives.unchanged}
diff --git a/tests/doc_test/doc_dynamic_functions/conf.py b/tests/doc_test/doc_dynamic_functions/conf.py
index 5097fc12d..e474af668 100644
--- a/tests/doc_test/doc_dynamic_functions/conf.py
+++ b/tests/doc_test/doc_dynamic_functions/conf.py
@@ -3,10 +3,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_extra_options = {"test_func": directives.unchanged}
diff --git a/tests/doc_test/doc_export_id/conf.py b/tests/doc_test/doc_export_id/conf.py
index 34095785f..d2a706459 100644
--- a/tests/doc_test/doc_export_id/conf.py
+++ b/tests/doc_test/doc_export_id/conf.py
@@ -6,10 +6,34 @@
plantuml_output_format = "svg"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_extra_links = [
diff --git a/tests/doc_test/doc_extra_links/conf.py b/tests/doc_test/doc_extra_links/conf.py
index ad8a7f24e..863b56681 100644
--- a/tests/doc_test/doc_extra_links/conf.py
+++ b/tests/doc_test/doc_extra_links/conf.py
@@ -6,10 +6,34 @@
plantuml_output_format = "svg"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_extra_links = [
diff --git a/tests/doc_test/doc_github_issue_21/conf.py b/tests/doc_test/doc_github_issue_21/conf.py
index 21eab6d52..1b74e151a 100644
--- a/tests/doc_test/doc_github_issue_21/conf.py
+++ b/tests/doc_test/doc_github_issue_21/conf.py
@@ -1,8 +1,32 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_github_issue_44/conf.py b/tests/doc_test/doc_github_issue_44/conf.py
index df3fe57f8..7556c0c4e 100644
--- a/tests/doc_test/doc_github_issue_44/conf.py
+++ b/tests/doc_test/doc_github_issue_44/conf.py
@@ -3,8 +3,32 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_global_options/conf.py b/tests/doc_test/doc_global_options/conf.py
index 46408adc5..f22187cad 100644
--- a/tests/doc_test/doc_global_options/conf.py
+++ b/tests/doc_test/doc_global_options/conf.py
@@ -1,10 +1,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_global_options = {
diff --git a/tests/doc_test/doc_layout/conf.py b/tests/doc_test/doc_layout/conf.py
index 6969f1c23..899d6306c 100644
--- a/tests/doc_test/doc_layout/conf.py
+++ b/tests/doc_test/doc_layout/conf.py
@@ -3,10 +3,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_extra_options = {
@@ -19,7 +43,10 @@
"grid": "simple_side_right_partial",
"layout": {
"head": ['**< >** for *< >*'],
- "meta": ['**status**: < >', '**author**: < >'],
+ "meta": [
+ '**status**: < >',
+ '**author**: < >',
+ ],
"side": ['<>'],
},
},
@@ -27,14 +54,20 @@
"grid": "simple",
"layout": {
"head": ['**< >**'],
- "meta": ['**status**: < >', r'< >'],
+ "meta": [
+ '**status**: < >',
+ r'< >',
+ ],
},
},
"footer_grid": {
"grid": "simple_footer",
"layout": {
"head": ['**< >** for *< >*'],
- "meta": ['**status**: < >', '**author**: < >'],
+ "meta": [
+ '**status**: < >',
+ '**author**: < >',
+ ],
"footer": ['**custom footer for < >**'],
},
},
diff --git a/tests/doc_test/doc_list2need/conf.py b/tests/doc_test/doc_list2need/conf.py
index 009477a1b..43d93e5f4 100644
--- a/tests/doc_test/doc_list2need/conf.py
+++ b/tests/doc_test/doc_list2need/conf.py
@@ -8,10 +8,34 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_extra_links = [
diff --git a/tests/doc_test/doc_measure_time/conf.py b/tests/doc_test/doc_measure_time/conf.py
index 74f164462..a4b1d4e5f 100644
--- a/tests/doc_test/doc_measure_time/conf.py
+++ b/tests/doc_test/doc_measure_time/conf.py
@@ -17,10 +17,34 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_need_count/conf.py b/tests/doc_test/doc_need_count/conf.py
index 21eab6d52..1b74e151a 100644
--- a/tests/doc_test/doc_need_count/conf.py
+++ b/tests/doc_test/doc_need_count/conf.py
@@ -1,8 +1,32 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_need_delete/conf.py b/tests/doc_test/doc_need_delete/conf.py
index 104609b54..3badaf917 100644
--- a/tests/doc_test/doc_need_delete/conf.py
+++ b/tests/doc_test/doc_need_delete/conf.py
@@ -6,8 +6,32 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "req", "title": "Requirement", "prefix": "R_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "req",
+ "title": "Requirement",
+ "prefix": "R_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_need_id_from_title/conf.py b/tests/doc_test/doc_need_id_from_title/conf.py
index e3a53e73b..d7e949123 100644
--- a/tests/doc_test/doc_need_id_from_title/conf.py
+++ b/tests/doc_test/doc_need_id_from_title/conf.py
@@ -1,10 +1,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_id_length = 20
diff --git a/tests/doc_test/doc_need_jinja_content/conf.py b/tests/doc_test/doc_need_jinja_content/conf.py
index 3f7d3cdd7..6081c7b09 100644
--- a/tests/doc_test/doc_need_jinja_content/conf.py
+++ b/tests/doc_test/doc_need_jinja_content/conf.py
@@ -6,10 +6,34 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "req", "title": "Requirement", "prefix": "R_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "req",
+ "title": "Requirement",
+ "prefix": "R_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_need_parts/conf.py b/tests/doc_test/doc_need_parts/conf.py
index 21eab6d52..1b74e151a 100644
--- a/tests/doc_test/doc_need_parts/conf.py
+++ b/tests/doc_test/doc_need_parts/conf.py
@@ -1,8 +1,32 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needarch/conf.py b/tests/doc_test/doc_needarch/conf.py
index 1666acffc..208a58691 100644
--- a/tests/doc_test/doc_needarch/conf.py
+++ b/tests/doc_test/doc_needarch/conf.py
@@ -22,7 +22,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -31,8 +38,32 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needarch_jinja_func_import/conf.py b/tests/doc_test/doc_needarch_jinja_func_import/conf.py
index dfccf97c2..f20c68b8c 100644
--- a/tests/doc_test/doc_needarch_jinja_func_import/conf.py
+++ b/tests/doc_test/doc_needarch_jinja_func_import/conf.py
@@ -20,7 +20,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -29,10 +36,34 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_extra_links = [
@@ -41,5 +72,10 @@
"incoming": "is used by",
"outgoing": "uses",
},
- {"option": "tests", "incoming": "is tested by", "outgoing": "tests", "style": "#00AA00"},
+ {
+ "option": "tests",
+ "incoming": "is tested by",
+ "outgoing": "tests",
+ "style": "#00AA00",
+ },
]
diff --git a/tests/doc_test/doc_needarch_jinja_func_need/conf.py b/tests/doc_test/doc_needarch_jinja_func_need/conf.py
index 1666acffc..208a58691 100644
--- a/tests/doc_test/doc_needarch_jinja_func_need/conf.py
+++ b/tests/doc_test/doc_needarch_jinja_func_need/conf.py
@@ -22,7 +22,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -31,8 +38,32 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needarch_negative_tests/conf.py b/tests/doc_test/doc_needarch_negative_tests/conf.py
index 1666acffc..208a58691 100644
--- a/tests/doc_test/doc_needarch_negative_tests/conf.py
+++ b/tests/doc_test/doc_needarch_negative_tests/conf.py
@@ -22,7 +22,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -31,8 +38,32 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needbar/conf.py b/tests/doc_test/doc_needbar/conf.py
index 0d9b66fe3..c0dc66839 100644
--- a/tests/doc_test/doc_needbar/conf.py
+++ b/tests/doc_test/doc_needbar/conf.py
@@ -3,6 +3,18 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "req", "title": "Requirement", "prefix": "RQ_", "color": "#FEDCD2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
+ {
+ "directive": "req",
+ "title": "Requirement",
+ "prefix": "RQ_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needextend/conf.py b/tests/doc_test/doc_needextend/conf.py
index 08df9747f..4c07658ee 100644
--- a/tests/doc_test/doc_needextend/conf.py
+++ b/tests/doc_test/doc_needextend/conf.py
@@ -3,8 +3,32 @@
needs_build_json = True
needs_id_regex = "^[A-Za-z0-9_]*"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needextend_strict/conf.py b/tests/doc_test/doc_needextend_strict/conf.py
index 6b4b9fa9d..99f842750 100644
--- a/tests/doc_test/doc_needextend_strict/conf.py
+++ b/tests/doc_test/doc_needextend_strict/conf.py
@@ -2,8 +2,32 @@
needs_id_regex = "^[A-Za-z0-9_]*"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needextract/conf.py b/tests/doc_test/doc_needextract/conf.py
index 47badfa7d..87cb4ac0e 100644
--- a/tests/doc_test/doc_needextract/conf.py
+++ b/tests/doc_test/doc_needextract/conf.py
@@ -8,8 +8,32 @@
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needflow/conf.py b/tests/doc_test/doc_needflow/conf.py
index aa6219420..aab24df0d 100644
--- a/tests/doc_test/doc_needflow/conf.py
+++ b/tests/doc_test/doc_needflow/conf.py
@@ -8,8 +8,32 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needflow_incl_child_needs/conf.py b/tests/doc_test/doc_needflow_incl_child_needs/conf.py
index 24b1cbd1b..96753e0bd 100644
--- a/tests/doc_test/doc_needflow_incl_child_needs/conf.py
+++ b/tests/doc_test/doc_needflow_incl_child_needs/conf.py
@@ -8,10 +8,34 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needimport_download_needs_json/conf.py b/tests/doc_test/doc_needimport_download_needs_json/conf.py
index 885bd47d6..814c929f3 100644
--- a/tests/doc_test/doc_needimport_download_needs_json/conf.py
+++ b/tests/doc_test/doc_needimport_download_needs_json/conf.py
@@ -3,8 +3,32 @@
needs_table_style = "TABLE"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needimport_download_needs_json_negative/conf.py b/tests/doc_test/doc_needimport_download_needs_json_negative/conf.py
index 885bd47d6..814c929f3 100644
--- a/tests/doc_test/doc_needimport_download_needs_json_negative/conf.py
+++ b/tests/doc_test/doc_needimport_download_needs_json_negative/conf.py
@@ -3,8 +3,32 @@
needs_table_style = "TABLE"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needlist/conf.py b/tests/doc_test/doc_needlist/conf.py
index 885bd47d6..814c929f3 100644
--- a/tests/doc_test/doc_needlist/conf.py
+++ b/tests/doc_test/doc_needlist/conf.py
@@ -3,8 +3,32 @@
needs_table_style = "TABLE"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needpie/conf.py b/tests/doc_test/doc_needpie/conf.py
index 86ef3ff45..2c635d301 100644
--- a/tests/doc_test/doc_needpie/conf.py
+++ b/tests/doc_test/doc_needpie/conf.py
@@ -8,10 +8,34 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_extra_options = ["author"]
diff --git a/tests/doc_test/doc_needs_builder/conf.py b/tests/doc_test/doc_needs_builder/conf.py
index ce1cbf2cf..0e12a0867 100644
--- a/tests/doc_test/doc_needs_builder/conf.py
+++ b/tests/doc_test/doc_needs_builder/conf.py
@@ -5,10 +5,34 @@
needs_table_style = "TABLE"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_file = "custom_needs_test.json"
diff --git a/tests/doc_test/doc_needs_builder_negative_tests/conf.py b/tests/doc_test/doc_needs_builder_negative_tests/conf.py
index 885bd47d6..814c929f3 100644
--- a/tests/doc_test/doc_needs_builder_negative_tests/conf.py
+++ b/tests/doc_test/doc_needs_builder_negative_tests/conf.py
@@ -3,8 +3,32 @@
needs_table_style = "TABLE"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needs_builder_parallel/conf.py b/tests/doc_test/doc_needs_builder_parallel/conf.py
index 87abb2f42..dabfbcfcf 100644
--- a/tests/doc_test/doc_needs_builder_parallel/conf.py
+++ b/tests/doc_test/doc_needs_builder_parallel/conf.py
@@ -7,10 +7,34 @@
needs_build_json = True
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_file = "custom_needs_test.json"
diff --git a/tests/doc_test/doc_needs_external_needs/conf.py b/tests/doc_test/doc_needs_external_needs/conf.py
index 95e5cee80..2d787751d 100644
--- a/tests/doc_test/doc_needs_external_needs/conf.py
+++ b/tests/doc_test/doc_needs_external_needs/conf.py
@@ -6,13 +6,45 @@
needs_table_style = "TABLE"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_external_needs = [
- {"base_url": "http://my_company.com/docs/v1/", "json_path": "needs_test_small.json", "id_prefix": "ext_"},
- {"base_url": "../../_build/html", "json_path": "needs_test_small.json", "id_prefix": "ext_rel_path_"},
+ {
+ "base_url": "http://my_company.com/docs/v1/",
+ "json_path": "needs_test_small.json",
+ "id_prefix": "ext_",
+ },
+ {
+ "base_url": "../../_build/html",
+ "json_path": "needs_test_small.json",
+ "id_prefix": "ext_rel_path_",
+ },
]
diff --git a/tests/doc_test/doc_needs_external_needs_remote/conf.py b/tests/doc_test/doc_needs_external_needs_remote/conf.py
index b63ea602c..460476c0b 100644
--- a/tests/doc_test/doc_needs_external_needs_remote/conf.py
+++ b/tests/doc_test/doc_needs_external_needs_remote/conf.py
@@ -6,10 +6,34 @@
needs_table_style = "TABLE"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_external_needs = [
diff --git a/tests/doc_test/doc_needs_external_needs_with_target_url/conf.py b/tests/doc_test/doc_needs_external_needs_with_target_url/conf.py
index d4c198d2d..6a45a59f8 100644
--- a/tests/doc_test/doc_needs_external_needs_with_target_url/conf.py
+++ b/tests/doc_test/doc_needs_external_needs_with_target_url/conf.py
@@ -6,10 +6,34 @@
needs_table_style = "TABLE"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_external_needs = [
@@ -31,5 +55,9 @@
"json_path": "needs_test_small.json",
"id_prefix": "ext_need_type_",
},
- {"base_url": "http://my_company.com/docs/v1/", "json_path": "needs_test_small.json", "id_prefix": "ext_default_"},
+ {
+ "base_url": "http://my_company.com/docs/v1/",
+ "json_path": "needs_test_small.json",
+ "id_prefix": "ext_default_",
+ },
]
diff --git a/tests/doc_test/doc_needs_filter_data/conf.py b/tests/doc_test/doc_needs_filter_data/conf.py
index 23aa68579..8c62e9c97 100644
--- a/tests/doc_test/doc_needs_filter_data/conf.py
+++ b/tests/doc_test/doc_needs_filter_data/conf.py
@@ -12,10 +12,34 @@
needs_id_regex = "^[A-Za-z0-9_]*"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needs_filter_func_allow_dirty_filter/conf.py b/tests/doc_test/doc_needs_filter_func_allow_dirty_filter/conf.py
index 6d7baed7d..3361ea436 100644
--- a/tests/doc_test/doc_needs_filter_func_allow_dirty_filter/conf.py
+++ b/tests/doc_test/doc_needs_filter_func_allow_dirty_filter/conf.py
@@ -8,8 +8,20 @@
needs_id_regex = "^[A-Za-z0-9_]*"
needs_types = [
- {"directive": "feature", "title": "Feature", "prefix": "FE_", "color": "#FEDCD2", "style": "node"},
- {"directive": "usecase", "title": "Use Case", "prefix": "USE_", "color": "#DF744A", "style": "node"},
+ {
+ "directive": "feature",
+ "title": "Feature",
+ "prefix": "FE_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "usecase",
+ "title": "Use Case",
+ "prefix": "USE_",
+ "color": "#DF744A",
+ "style": "node",
+ },
]
needs_extra_options = ["ti", "tcl"]
diff --git a/tests/doc_test/doc_needs_warnings/conf.py b/tests/doc_test/doc_needs_warnings/conf.py
index f6b991773..cfc630c88 100644
--- a/tests/doc_test/doc_needs_warnings/conf.py
+++ b/tests/doc_test/doc_needs_warnings/conf.py
@@ -3,14 +3,42 @@
needs_table_style = "TABLE"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_external_needs = [
- {"base_url": "http://my_company.com/docs/v1/", "json_path": "needs_test_small.json", "id_prefix": "ext_"}
+ {
+ "base_url": "http://my_company.com/docs/v1/",
+ "json_path": "needs_test_small.json",
+ "id_prefix": "ext_",
+ }
]
@@ -38,7 +66,11 @@ def setup(app):
add_warning(app, "api_warning_filter", filter_string="status == 'example_2'")
add_warning(app, "api_warning_func", custom_warning_func)
- add_warning(app, "invalid_status", "status not in ['open', 'closed', 'done', 'example_2', 'example_3']")
+ add_warning(
+ app,
+ "invalid_status",
+ "status not in ['open', 'closed', 'done', 'example_2', 'example_3']",
+ )
# Needs option to set True or False to raise sphinx-warning for each not passed warning check
diff --git a/tests/doc_test/doc_needs_warnings_return_status_code/conf.py b/tests/doc_test/doc_needs_warnings_return_status_code/conf.py
index 490cb7534..47d7db034 100644
--- a/tests/doc_test/doc_needs_warnings_return_status_code/conf.py
+++ b/tests/doc_test/doc_needs_warnings_return_status_code/conf.py
@@ -3,10 +3,34 @@
needs_table_style = "TABLE"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needsfile/conf.py b/tests/doc_test/doc_needsfile/conf.py
index e607e1f34..457adbf2a 100644
--- a/tests/doc_test/doc_needsfile/conf.py
+++ b/tests/doc_test/doc_needsfile/conf.py
@@ -3,8 +3,32 @@
needs_file = "needs_errors.json"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needtable/conf.py b/tests/doc_test/doc_needtable/conf.py
index 73fd61077..0aef9a7d0 100644
--- a/tests/doc_test/doc_needtable/conf.py
+++ b/tests/doc_test/doc_needtable/conf.py
@@ -5,10 +5,34 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_extra_options = [
diff --git a/tests/doc_test/doc_needuml/conf.py b/tests/doc_test/doc_needuml/conf.py
index 1666acffc..208a58691 100644
--- a/tests/doc_test/doc_needuml/conf.py
+++ b/tests/doc_test/doc_needuml/conf.py
@@ -22,7 +22,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -31,8 +38,32 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needuml_diagram_allowmixing/conf.py b/tests/doc_test/doc_needuml_diagram_allowmixing/conf.py
index 1666acffc..208a58691 100644
--- a/tests/doc_test/doc_needuml_diagram_allowmixing/conf.py
+++ b/tests/doc_test/doc_needuml_diagram_allowmixing/conf.py
@@ -22,7 +22,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -31,8 +38,32 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needuml_duplicate_key/conf.py b/tests/doc_test/doc_needuml_duplicate_key/conf.py
index 1666acffc..208a58691 100644
--- a/tests/doc_test/doc_needuml_duplicate_key/conf.py
+++ b/tests/doc_test/doc_needuml_duplicate_key/conf.py
@@ -22,7 +22,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -31,8 +38,32 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needuml_filter/conf.py b/tests/doc_test/doc_needuml_filter/conf.py
index 1666acffc..208a58691 100644
--- a/tests/doc_test/doc_needuml_filter/conf.py
+++ b/tests/doc_test/doc_needuml_filter/conf.py
@@ -22,7 +22,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -31,8 +38,32 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needuml_jinja_func_flow/conf.py b/tests/doc_test/doc_needuml_jinja_func_flow/conf.py
index 1666acffc..208a58691 100644
--- a/tests/doc_test/doc_needuml_jinja_func_flow/conf.py
+++ b/tests/doc_test/doc_needuml_jinja_func_flow/conf.py
@@ -22,7 +22,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -31,8 +38,32 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needuml_jinja_func_import_negative_tests/conf.py b/tests/doc_test/doc_needuml_jinja_func_import_negative_tests/conf.py
index dfccf97c2..f20c68b8c 100644
--- a/tests/doc_test/doc_needuml_jinja_func_import_negative_tests/conf.py
+++ b/tests/doc_test/doc_needuml_jinja_func_import_negative_tests/conf.py
@@ -20,7 +20,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -29,10 +36,34 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_extra_links = [
@@ -41,5 +72,10 @@
"incoming": "is used by",
"outgoing": "uses",
},
- {"option": "tests", "incoming": "is tested by", "outgoing": "tests", "style": "#00AA00"},
+ {
+ "option": "tests",
+ "incoming": "is tested by",
+ "outgoing": "tests",
+ "style": "#00AA00",
+ },
]
diff --git a/tests/doc_test/doc_needuml_jinja_func_need_removed/conf.py b/tests/doc_test/doc_needuml_jinja_func_need_removed/conf.py
index 1666acffc..208a58691 100644
--- a/tests/doc_test/doc_needuml_jinja_func_need_removed/conf.py
+++ b/tests/doc_test/doc_needuml_jinja_func_need_removed/conf.py
@@ -22,7 +22,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -31,8 +38,32 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needuml_jinja_func_ref/conf.py b/tests/doc_test/doc_needuml_jinja_func_ref/conf.py
index 1666acffc..208a58691 100644
--- a/tests/doc_test/doc_needuml_jinja_func_ref/conf.py
+++ b/tests/doc_test/doc_needuml_jinja_func_ref/conf.py
@@ -22,7 +22,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -31,8 +38,32 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needuml_key_name_diagram/conf.py b/tests/doc_test/doc_needuml_key_name_diagram/conf.py
index 1666acffc..208a58691 100644
--- a/tests/doc_test/doc_needuml_key_name_diagram/conf.py
+++ b/tests/doc_test/doc_needuml_key_name_diagram/conf.py
@@ -22,7 +22,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -31,8 +38,32 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_needuml_save/conf.py b/tests/doc_test/doc_needuml_save/conf.py
index e4c6437fd..ad06ccaad 100644
--- a/tests/doc_test/doc_needuml_save/conf.py
+++ b/tests/doc_test/doc_needuml_save/conf.py
@@ -22,7 +22,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -31,10 +38,34 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_build_needumls = "my_needumls"
diff --git a/tests/doc_test/doc_needuml_save_with_abs_path/conf.py b/tests/doc_test/doc_needuml_save_with_abs_path/conf.py
index 1666acffc..208a58691 100644
--- a/tests/doc_test/doc_needuml_save_with_abs_path/conf.py
+++ b/tests/doc_test/doc_needuml_save_with_abs_path/conf.py
@@ -22,7 +22,14 @@
"color": "#BFD8D2",
"style": "card",
},
- {"directive": "sys", "content": "plantuml", "title": "System", "prefix": "S_", "color": "#FF68D2", "style": "node"},
+ {
+ "directive": "sys",
+ "content": "plantuml",
+ "title": "System",
+ "prefix": "S_",
+ "color": "#FF68D2",
+ "style": "node",
+ },
{
"directive": "prod",
"content": "plantuml",
@@ -31,8 +38,32 @@
"color": "#FF68D2",
"style": "node",
},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/doc_open_needs_service/conf.py b/tests/doc_test/doc_open_needs_service/conf.py
index be86e0012..50d1c74d3 100644
--- a/tests/doc_test/doc_open_needs_service/conf.py
+++ b/tests/doc_test/doc_open_needs_service/conf.py
@@ -3,10 +3,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "req", "title": "Requirement", "prefix": "R_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "task", "title": "Task", "prefix": "T_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "req",
+ "title": "Requirement",
+ "prefix": "R_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "task",
+ "title": "Task",
+ "prefix": "T_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_services = {
diff --git a/tests/doc_test/doc_report_dead_links_false/conf.py b/tests/doc_test/doc_report_dead_links_false/conf.py
index 813f445a4..ae1448dee 100644
--- a/tests/doc_test/doc_report_dead_links_false/conf.py
+++ b/tests/doc_test/doc_report_dead_links_false/conf.py
@@ -4,10 +4,34 @@
plantuml_output_format = "svg"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
suppress_warnings = ["needs.link_outgoing"]
diff --git a/tests/doc_test/doc_report_dead_links_true/conf.py b/tests/doc_test/doc_report_dead_links_true/conf.py
index 91467f963..e114d9518 100644
--- a/tests/doc_test/doc_report_dead_links_true/conf.py
+++ b/tests/doc_test/doc_report_dead_links_true/conf.py
@@ -4,10 +4,34 @@
plantuml_output_format = "svg"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_extra_links = [
diff --git a/tests/doc_test/doc_role_need_max_title_length/conf.py b/tests/doc_test/doc_role_need_max_title_length/conf.py
index 46b0b1685..11f882b66 100644
--- a/tests/doc_test/doc_role_need_max_title_length/conf.py
+++ b/tests/doc_test/doc_role_need_max_title_length/conf.py
@@ -1,10 +1,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_role_need_template = "[{id}] {title} ({status}) {type_name}/{type} - {tags} - {links} - {links_back} - {content}"
diff --git a/tests/doc_test/doc_role_need_max_title_length_unlimited/conf.py b/tests/doc_test/doc_role_need_max_title_length_unlimited/conf.py
index 23dec017c..0a85db025 100644
--- a/tests/doc_test/doc_role_need_max_title_length_unlimited/conf.py
+++ b/tests/doc_test/doc_role_need_max_title_length_unlimited/conf.py
@@ -1,10 +1,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_role_need_template = "[{id}] {title} ({status}) {type_name}/{type} - {tags} - {links} - {links_back} - {content}"
diff --git a/tests/doc_test/doc_role_need_template/conf.py b/tests/doc_test/doc_role_need_template/conf.py
index 4d15bdb0d..fa86a8027 100644
--- a/tests/doc_test/doc_role_need_template/conf.py
+++ b/tests/doc_test/doc_role_need_template/conf.py
@@ -1,10 +1,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_role_need_template = "[{id}] {title} ({status}) {type_name}/{type} - {tags} - {links} - {links_back} - {content}"
diff --git a/tests/doc_test/doc_style_blank/conf.py b/tests/doc_test/doc_style_blank/conf.py
index ef8a46842..959f7f07b 100644
--- a/tests/doc_test/doc_style_blank/conf.py
+++ b/tests/doc_test/doc_style_blank/conf.py
@@ -1,10 +1,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_css = "blank.css"
diff --git a/tests/doc_test/doc_style_custom/conf.py b/tests/doc_test/doc_style_custom/conf.py
index 80485dc17..ed2820a9b 100644
--- a/tests/doc_test/doc_style_custom/conf.py
+++ b/tests/doc_test/doc_style_custom/conf.py
@@ -3,10 +3,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_css = os.path.join(os.path.dirname(__file__), "my_custom.css")
diff --git a/tests/doc_test/doc_style_modern/conf.py b/tests/doc_test/doc_style_modern/conf.py
index 68f00c54c..51fd8c41c 100644
--- a/tests/doc_test/doc_style_modern/conf.py
+++ b/tests/doc_test/doc_style_modern/conf.py
@@ -1,10 +1,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_css = "modern.css"
diff --git a/tests/doc_test/doc_style_unknown/conf.py b/tests/doc_test/doc_style_unknown/conf.py
index ec0cfafe9..8ee6fe502 100644
--- a/tests/doc_test/doc_style_unknown/conf.py
+++ b/tests/doc_test/doc_style_unknown/conf.py
@@ -1,10 +1,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_css = "UNKNOWN.css"
diff --git a/tests/doc_test/external_doc/conf.py b/tests/doc_test/external_doc/conf.py
index e38367471..c884b37f9 100644
--- a/tests/doc_test/external_doc/conf.py
+++ b/tests/doc_test/external_doc/conf.py
@@ -10,11 +10,41 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "req", "title": "Requirement", "prefix": "RE_", "color": "#BFD8D2", "style": "node"},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "req",
+ "title": "Requirement",
+ "prefix": "RE_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
test_dir = os.path.dirname(__file__)
@@ -23,7 +53,11 @@
# needs_external_needs = [{"base_url": f"file://{test_dir}", "json_url": f"file://{test_json}", "id_prefix": "ext_"}]
needs_external_needs = [
- {"base_url": "http://my_company.com/docs/v1/", "json_path": "needs_test_small.json", "id_prefix": "EXT_"}
+ {
+ "base_url": "http://my_company.com/docs/v1/",
+ "json_path": "needs_test_small.json",
+ "id_prefix": "EXT_",
+ }
]
# Needed to export really ALL needs. The default entry would filter out all needs coming from external
diff --git a/tests/doc_test/filter_doc/conf.py b/tests/doc_test/filter_doc/conf.py
index 717640d49..787367095 100644
--- a/tests/doc_test/filter_doc/conf.py
+++ b/tests/doc_test/filter_doc/conf.py
@@ -8,13 +8,55 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "req", "title": "Requirement", "prefix": "RE_", "color": "#BFD8D2", "style": "node"},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
- {"directive": "user", "title": "User", "prefix": "U_", "color": "#777777", "style": "node"},
- {"directive": "action", "title": "Action", "prefix": "A_", "color": "#FFCC00", "style": "node"},
+ {
+ "directive": "req",
+ "title": "Requirement",
+ "prefix": "RE_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
+ {
+ "directive": "user",
+ "title": "User",
+ "prefix": "U_",
+ "color": "#777777",
+ "style": "node",
+ },
+ {
+ "directive": "action",
+ "title": "Action",
+ "prefix": "A_",
+ "color": "#FFCC00",
+ "style": "node",
+ },
]
needs_extra_links = [
diff --git a/tests/doc_test/generic_doc/conf.py b/tests/doc_test/generic_doc/conf.py
index 21eab6d52..1b74e151a 100644
--- a/tests/doc_test/generic_doc/conf.py
+++ b/tests/doc_test/generic_doc/conf.py
@@ -1,8 +1,32 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/import_doc/conf.py b/tests/doc_test/import_doc/conf.py
index fcbde4dd9..5ddaeb329 100644
--- a/tests/doc_test/import_doc/conf.py
+++ b/tests/doc_test/import_doc/conf.py
@@ -5,11 +5,41 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "req", "title": "Requirement", "prefix": "RE_", "color": "#BFD8D2", "style": "node"},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "req",
+ "title": "Requirement",
+ "prefix": "RE_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_template = """
diff --git a/tests/doc_test/import_doc_empty/conf.py b/tests/doc_test/import_doc_empty/conf.py
index 45270f660..7e12482c6 100644
--- a/tests/doc_test/import_doc_empty/conf.py
+++ b/tests/doc_test/import_doc_empty/conf.py
@@ -3,11 +3,41 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "req", "title": "Requirement", "prefix": "RE_", "color": "#BFD8D2", "style": "node"},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "req",
+ "title": "Requirement",
+ "prefix": "RE_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_template = """
diff --git a/tests/doc_test/import_doc_invalid/conf.py b/tests/doc_test/import_doc_invalid/conf.py
index 45270f660..7e12482c6 100644
--- a/tests/doc_test/import_doc_invalid/conf.py
+++ b/tests/doc_test/import_doc_invalid/conf.py
@@ -3,11 +3,41 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "req", "title": "Requirement", "prefix": "RE_", "color": "#BFD8D2", "style": "node"},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "req",
+ "title": "Requirement",
+ "prefix": "RE_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_template = """
diff --git a/tests/doc_test/need_constraints/conf.py b/tests/doc_test/need_constraints/conf.py
index b33483a78..ab7f10295 100644
--- a/tests/doc_test/need_constraints/conf.py
+++ b/tests/doc_test/need_constraints/conf.py
@@ -4,14 +4,42 @@
needs_table_style = "TABLE"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_external_needs = [
- {"base_url": "http://my_company.com/docs/v1/", "json_path": "needs_test_small.json", "id_prefix": "ext_"}
+ {
+ "base_url": "http://my_company.com/docs/v1/",
+ "json_path": "needs_test_small.json",
+ "id_prefix": "ext_",
+ }
]
@@ -39,7 +67,11 @@ def setup(app):
add_warning(app, "api_warning_filter", filter_string="status == 'example_2'")
add_warning(app, "api_warning_func", custom_warning_func)
- add_warning(app, "invalid_status", "status not in ['open', 'closed', 'done', 'example_2', 'example_3']")
+ add_warning(
+ app,
+ "invalid_status",
+ "status not in ['open', 'closed', 'done', 'example_2', 'example_3']",
+ )
# Needs option to set True or False to raise sphinx-warning for each not passed warning check
diff --git a/tests/doc_test/need_constraints_failed/conf.py b/tests/doc_test/need_constraints_failed/conf.py
index 31228134f..7430e40f2 100644
--- a/tests/doc_test/need_constraints_failed/conf.py
+++ b/tests/doc_test/need_constraints_failed/conf.py
@@ -3,14 +3,42 @@
needs_table_style = "TABLE"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_external_needs = [
- {"base_url": "http://my_company.com/docs/v1/", "json_path": "needs_test_small.json", "id_prefix": "ext_"}
+ {
+ "base_url": "http://my_company.com/docs/v1/",
+ "json_path": "needs_test_small.json",
+ "id_prefix": "ext_",
+ }
]
@@ -38,7 +66,11 @@ def setup(app):
add_warning(app, "api_warning_filter", filter_string="status == 'example_2'")
add_warning(app, "api_warning_func", custom_warning_func)
- add_warning(app, "invalid_status", "status not in ['open', 'closed', 'done', 'example_2', 'example_3']")
+ add_warning(
+ app,
+ "invalid_status",
+ "status not in ['open', 'closed', 'done', 'example_2', 'example_3']",
+ )
# Needs option to set True or False to raise sphinx-warning for each not passed warning check
diff --git a/tests/doc_test/needextract_with_nested_needs/conf.py b/tests/doc_test/needextract_with_nested_needs/conf.py
index df3fe57f8..7556c0c4e 100644
--- a/tests/doc_test/needextract_with_nested_needs/conf.py
+++ b/tests/doc_test/needextract_with_nested_needs/conf.py
@@ -3,8 +3,32 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/needpie_with_zero_needs/conf.py b/tests/doc_test/needpie_with_zero_needs/conf.py
index df3fe57f8..7556c0c4e 100644
--- a/tests/doc_test/needpie_with_zero_needs/conf.py
+++ b/tests/doc_test/needpie_with_zero_needs/conf.py
@@ -3,8 +3,32 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/non_exists_file_import/conf.py b/tests/doc_test/non_exists_file_import/conf.py
index 45270f660..7e12482c6 100644
--- a/tests/doc_test/non_exists_file_import/conf.py
+++ b/tests/doc_test/non_exists_file_import/conf.py
@@ -3,11 +3,41 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "req", "title": "Requirement", "prefix": "RE_", "color": "#BFD8D2", "style": "node"},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "req",
+ "title": "Requirement",
+ "prefix": "RE_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_template = """
diff --git a/tests/doc_test/parallel_doc/conf.py b/tests/doc_test/parallel_doc/conf.py
index 03f708826..f6f03d869 100644
--- a/tests/doc_test/parallel_doc/conf.py
+++ b/tests/doc_test/parallel_doc/conf.py
@@ -1,10 +1,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_variants = {"change_author": "assignee == 'Randy Duodu'"}
needs_variant_options = ["status", "author"]
diff --git a/tests/doc_test/role_need_doc/conf.py b/tests/doc_test/role_need_doc/conf.py
index a3d0a4e67..b64a2a601 100644
--- a/tests/doc_test/role_need_doc/conf.py
+++ b/tests/doc_test/role_need_doc/conf.py
@@ -8,11 +8,41 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "req", "title": "Requirement", "prefix": "RE_", "color": "#BFD8D2", "style": "node"},
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "req",
+ "title": "Requirement",
+ "prefix": "RE_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
test_dir = os.path.dirname(__file__)
@@ -21,7 +51,11 @@
# needs_external_needs = [{"base_url": f"file://{test_dir}", "json_url": f"file://{test_json}", "id_prefix": "ext_"}]
needs_external_needs = [
- {"base_url": "http://my_company.com/docs/v1/", "json_path": "needs_test_small.json", "id_prefix": "EXT_"}
+ {
+ "base_url": "http://my_company.com/docs/v1/",
+ "json_path": "needs_test_small.json",
+ "id_prefix": "EXT_",
+ }
]
# Needed to export really ALL needs. The default entry would filter out all needs coming from external
diff --git a/tests/doc_test/service_doc/conf.py b/tests/doc_test/service_doc/conf.py
index b2a23705d..b931c94b1 100644
--- a/tests/doc_test/service_doc/conf.py
+++ b/tests/doc_test/service_doc/conf.py
@@ -3,10 +3,34 @@
extensions = ["sphinx_needs"]
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/unicode_support/conf.py b/tests/doc_test/unicode_support/conf.py
index 885bd47d6..814c929f3 100644
--- a/tests/doc_test/unicode_support/conf.py
+++ b/tests/doc_test/unicode_support/conf.py
@@ -3,8 +3,32 @@
needs_table_style = "TABLE"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
diff --git a/tests/doc_test/variant_doc/conf.py b/tests/doc_test/variant_doc/conf.py
index d94dd50ed..d78cd7913 100644
--- a/tests/doc_test/variant_doc/conf.py
+++ b/tests/doc_test/variant_doc/conf.py
@@ -8,10 +8,34 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_variants = {"change_author": "assignee == 'Randy Duodu'"}
needs_variant_options = ["status", "author", "links"]
diff --git a/tests/doc_test/variant_options/conf.py b/tests/doc_test/variant_options/conf.py
index 42108d4a1..1902a3923 100644
--- a/tests/doc_test/variant_options/conf.py
+++ b/tests/doc_test/variant_options/conf.py
@@ -8,10 +8,34 @@
needs_id_regex = "^[A-Za-z0-9_]"
needs_types = [
- {"directive": "story", "title": "User Story", "prefix": "US_", "color": "#BFD8D2", "style": "node"},
- {"directive": "spec", "title": "Specification", "prefix": "SP_", "color": "#FEDCD2", "style": "node"},
- {"directive": "impl", "title": "Implementation", "prefix": "IM_", "color": "#DF744A", "style": "node"},
- {"directive": "test", "title": "Test Case", "prefix": "TC_", "color": "#DCB239", "style": "node"},
+ {
+ "directive": "story",
+ "title": "User Story",
+ "prefix": "US_",
+ "color": "#BFD8D2",
+ "style": "node",
+ },
+ {
+ "directive": "spec",
+ "title": "Specification",
+ "prefix": "SP_",
+ "color": "#FEDCD2",
+ "style": "node",
+ },
+ {
+ "directive": "impl",
+ "title": "Implementation",
+ "prefix": "IM_",
+ "color": "#DF744A",
+ "style": "node",
+ },
+ {
+ "directive": "test",
+ "title": "Test Case",
+ "prefix": "TC_",
+ "color": "#DCB239",
+ "style": "node",
+ },
]
needs_variants = {"change_author": "assignee == 'Randy Duodu'"}
needs_variant_options = []
diff --git a/tests/no_mpl_tests.py b/tests/no_mpl_tests.py
index 95aaf0747..dfaeb30ef 100644
--- a/tests/no_mpl_tests.py
+++ b/tests/no_mpl_tests.py
@@ -3,7 +3,11 @@
import pytest
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/doc_needbar"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_needbar"}],
+ indirect=True,
+)
def test_needbar(test_app):
"""Test the build fails correctly, if matplotlib is not installed."""
test_app.build()
@@ -11,7 +15,11 @@ def test_needbar(test_app):
assert expected in test_app._warning.getvalue()
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/doc_needpie"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_needpie"}],
+ indirect=True,
+)
def test_needpie(test_app):
"""Test the build fails correctly, if matplotlib is not installed."""
test_app.build()
diff --git a/tests/test_add_sections.py b/tests/test_add_sections.py
index 77028bf84..4658beef8 100644
--- a/tests/test_add_sections.py
+++ b/tests/test_add_sections.py
@@ -4,7 +4,11 @@
import pytest
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/add_sections"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/add_sections"}],
+ indirect=True,
+)
def test_section_is_usable_in_filters(test_app):
app = test_app
app.builder.build_all()
diff --git a/tests/test_api_configuration.py b/tests/test_api_configuration.py
index 556c98887..c031f8e6e 100644
--- a/tests/test_api_configuration.py
+++ b/tests/test_api_configuration.py
@@ -14,7 +14,9 @@ def setup(app):
sys.modules["dummy_extension.dummy"] = dummy_extension
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/api_doc"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app", [{"buildername": "html", "srcdir": "doc_test/api_doc"}], indirect=True
+)
def test_api_get_types(test_app):
from sphinx_needs.api import get_need_types
@@ -24,7 +26,11 @@ def test_api_get_types(test_app):
assert set(need_types) == {"story", "spec", "impl", "test"}
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/api_doc_awesome"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/api_doc_awesome"}],
+ indirect=True,
+)
def test_api_add_type(test_app, snapshot):
from sphinx_needs.api import add_need_type
diff --git a/tests/test_api_usage_in_extension.py b/tests/test_api_usage_in_extension.py
index 1ff0dd801..2a434e8df 100644
--- a/tests/test_api_usage_in_extension.py
+++ b/tests/test_api_usage_in_extension.py
@@ -22,7 +22,9 @@ def after_config(app, config):
sys.modules["dummy_extension.dummy"] = dummy_extension
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/api_doc"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app", [{"buildername": "html", "srcdir": "doc_test/api_doc"}], indirect=True
+)
def test_api_configuration(test_app):
app = test_app
diff --git a/tests/test_arch.py b/tests/test_arch.py
index f17ec8887..e58389a30 100644
--- a/tests/test_arch.py
+++ b/tests/test_arch.py
@@ -3,7 +3,9 @@
import pytest
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/arch_doc"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app", [{"buildername": "html", "srcdir": "doc_test/arch_doc"}], indirect=True
+)
def test_doc_build_html(test_app):
app = test_app
app.build()
diff --git a/tests/test_basic_doc.py b/tests/test_basic_doc.py
index 3c12b8ac7..806d29147 100644
--- a/tests/test_basic_doc.py
+++ b/tests/test_basic_doc.py
@@ -31,7 +31,9 @@ def random_data_callback(request):
if re.match(r"/search/issues", request.path_url):
data = GITHUB_ISSUE_SEARCH_ANSWER
data["items"][0]["number"] = randrange(10000)
- elif re.match(r"/.+/issue/.+", request.path_url) or re.match(r"/.+/pulls/.+", request.path_url):
+ elif re.match(r"/.+/issue/.+", request.path_url) or re.match(
+ r"/.+/pulls/.+", request.path_url
+ ):
data = GITHUB_SPECIFIC_ISSUE_ANSWER
data["number"] = randrange(10000)
elif re.match(r"/search/commits", request.path_url):
@@ -49,7 +51,9 @@ def random_data_callback(request):
@responses.activate
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/doc_basic"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app", [{"buildername": "html", "srcdir": "doc_test/doc_basic"}], indirect=True
+)
def test_build_html(test_app):
responses.add_callback(
responses.GET,
@@ -57,7 +61,9 @@ def test_build_html(test_app):
callback=random_data_callback,
content_type="application/json",
)
- responses.add(responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body="")
+ responses.add(
+ responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body=""
+ )
app = test_app
app.builder.build_all()
@@ -71,7 +77,11 @@ def test_build_html(test_app):
@responses.activate
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/generic_doc"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/generic_doc"}],
+ indirect=True,
+)
def test_build_html_parallel(test_app: Sphinx, snapshot_doctree):
responses.add_callback(
responses.GET,
@@ -79,7 +89,9 @@ def test_build_html_parallel(test_app: Sphinx, snapshot_doctree):
callback=random_data_callback,
content_type="application/json",
)
- responses.add(responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body="")
+ responses.add(
+ responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body=""
+ )
app = test_app
app.builder.build_all()
@@ -94,8 +106,14 @@ def test_build_html_parallel(test_app: Sphinx, snapshot_doctree):
assert app.env.get_doctree("index") == snapshot_doctree
-@pytest.mark.skipif(sys.platform == "win32", reason="assert fails on windows, need to fix later.")
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/generic_doc"}], indirect=True)
+@pytest.mark.skipif(
+ sys.platform == "win32", reason="assert fails on windows, need to fix later."
+)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/generic_doc"}],
+ indirect=True,
+)
def test_html_head_files(test_app):
app = test_app
app.builder.build_all()
@@ -120,7 +138,11 @@ def test_html_head_files(test_app):
@responses.activate
-@pytest.mark.parametrize("test_app", [{"buildername": "singlehtml", "srcdir": "doc_test/doc_basic"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "singlehtml", "srcdir": "doc_test/doc_basic"}],
+ indirect=True,
+)
def test_build_singlehtml(test_app):
responses.add_callback(
responses.GET,
@@ -128,14 +150,20 @@ def test_build_singlehtml(test_app):
callback=random_data_callback,
content_type="application/json",
)
- responses.add(responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body="")
+ responses.add(
+ responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body=""
+ )
app = test_app
app.builder.build_all()
@responses.activate
-@pytest.mark.parametrize("test_app", [{"buildername": "latex", "srcdir": "doc_test/doc_basic"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "latex", "srcdir": "doc_test/doc_basic"}],
+ indirect=True,
+)
def test_build_latex(test_app):
responses.add_callback(
responses.GET,
@@ -143,14 +171,18 @@ def test_build_latex(test_app):
callback=random_data_callback,
content_type="application/json",
)
- responses.add(responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body="")
+ responses.add(
+ responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body=""
+ )
app = test_app
app.builder.build_all()
@responses.activate
-@pytest.mark.parametrize("test_app", [{"buildername": "epub", "srcdir": "doc_test/doc_basic"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app", [{"buildername": "epub", "srcdir": "doc_test/doc_basic"}], indirect=True
+)
def test_build_epub(test_app):
responses.add_callback(
responses.GET,
@@ -158,14 +190,18 @@ def test_build_epub(test_app):
callback=random_data_callback,
content_type="application/json",
)
- responses.add(responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body="")
+ responses.add(
+ responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body=""
+ )
app = test_app
app.builder.build_all()
@responses.activate
-@pytest.mark.parametrize("test_app", [{"buildername": "json", "srcdir": "doc_test/doc_basic"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app", [{"buildername": "json", "srcdir": "doc_test/doc_basic"}], indirect=True
+)
def test_build_json(test_app):
responses.add_callback(
responses.GET,
@@ -173,14 +209,20 @@ def test_build_json(test_app):
callback=random_data_callback,
content_type="application/json",
)
- responses.add(responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body="")
+ responses.add(
+ responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body=""
+ )
app = test_app
app.builder.build_all()
@responses.activate
-@pytest.mark.parametrize("test_app", [{"buildername": "needs", "srcdir": "doc_test/doc_basic"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "needs", "srcdir": "doc_test/doc_basic"}],
+ indirect=True,
+)
def test_build_needs(test_app, snapshot):
responses.add_callback(
responses.GET,
@@ -188,7 +230,9 @@ def test_build_needs(test_app, snapshot):
callback=random_data_callback,
content_type="application/json",
)
- responses.add(responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body="")
+ responses.add(
+ responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body=""
+ )
app = test_app
app.builder.build_all()
@@ -202,7 +246,13 @@ def test_build_needs(test_app, snapshot):
@responses.activate
@pytest.mark.parametrize(
"test_app",
- [{"buildername": "html", "srcdir": "doc_test/doc_basic", "confoverrides": {"needs_id_required": True}}],
+ [
+ {
+ "buildername": "html",
+ "srcdir": "doc_test/doc_basic",
+ "confoverrides": {"needs_id_required": True},
+ }
+ ],
indirect=True,
)
def test_id_required_build_html(test_app):
@@ -213,7 +263,11 @@ def test_id_required_build_html(test_app):
callback=random_data_callback,
content_type="application/json",
)
- responses.add(responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body="")
+ responses.add(
+ responses.GET,
+ re.compile(r"https://avatars.githubusercontent.com/.*"),
+ body="",
+ )
app = test_app
app.builder.build_all()
@@ -231,7 +285,9 @@ def test_sphinx_api_build():
callback=random_data_callback,
content_type="application/json",
)
- responses.add(responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body="")
+ responses.add(
+ responses.GET, re.compile(r"https://avatars.githubusercontent.com/.*"), body=""
+ )
temp_dir = tempfile.mkdtemp()
src_dir = os.path.join(os.path.dirname(__file__), "doc_test", "doc_basic")
diff --git a/tests/test_broken_doc.py b/tests/test_broken_doc.py
index 4458d69ce..d203541e0 100644
--- a/tests/test_broken_doc.py
+++ b/tests/test_broken_doc.py
@@ -3,7 +3,11 @@
from sphinx_needs.api.need import NeedsDuplicatedId
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/broken_doc"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/broken_doc"}],
+ indirect=True,
+)
def test_doc_build_html(test_app):
with pytest.raises(NeedsDuplicatedId):
app = test_app
diff --git a/tests/test_broken_links.py b/tests/test_broken_links.py
index 0bc1613b9..37944db51 100644
--- a/tests/test_broken_links.py
+++ b/tests/test_broken_links.py
@@ -3,7 +3,9 @@
@pytest.mark.parametrize(
- "test_app", [{"buildername": "html", "srcdir": "doc_test/broken_links", "no_plantuml": True}], indirect=True
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/broken_links", "no_plantuml": True}],
+ indirect=True,
)
def test_doc_build_html(test_app):
app = test_app
diff --git a/tests/test_broken_statuses.py b/tests/test_broken_statuses.py
index dcb09237d..4efc7abff 100644
--- a/tests/test_broken_statuses.py
+++ b/tests/test_broken_statuses.py
@@ -3,7 +3,11 @@
from sphinx_needs.api.need import NeedsStatusNotAllowed
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/broken_statuses"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/broken_statuses"}],
+ indirect=True,
+)
def test_doc_build_html(test_app):
with pytest.raises(NeedsStatusNotAllowed):
app = test_app
diff --git a/tests/test_broken_syntax_doc.py b/tests/test_broken_syntax_doc.py
index 162da581a..33552793f 100644
--- a/tests/test_broken_syntax_doc.py
+++ b/tests/test_broken_syntax_doc.py
@@ -3,7 +3,11 @@
import pytest
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/broken_syntax_doc"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/broken_syntax_doc"}],
+ indirect=True,
+)
def test_doc_broken_syntax(test_app):
app = test_app
diff --git a/tests/test_broken_tags.py b/tests/test_broken_tags.py
index 8a1d6896f..631c0b623 100644
--- a/tests/test_broken_tags.py
+++ b/tests/test_broken_tags.py
@@ -5,7 +5,11 @@
from sphinx_needs.api.need import NeedsTagNotAllowed
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/broken_tags"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/broken_tags"}],
+ indirect=True,
+)
def test_doc_build_html(test_app):
with pytest.raises(NeedsTagNotAllowed):
app = test_app
@@ -15,7 +19,11 @@ def test_doc_build_html(test_app):
assert "SP_TOO_003" in html
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/broken_tags_2"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/broken_tags_2"}],
+ indirect=True,
+)
def test_doc_build_html_unneeded_chars(test_app):
"""
Test for https://github.com/useblocks/sphinxcontrib-needs/issues/36
diff --git a/tests/test_clean_log.py b/tests/test_clean_log.py
index 327e8adcf..af6f25036 100644
--- a/tests/test_clean_log.py
+++ b/tests/test_clean_log.py
@@ -5,9 +5,13 @@
class CleanLogTestCase(unittest.TestCase):
def test_external_needs_clean_log(self):
- self.assertEqual(clean_log("http://user:password@host.url/"), "http://****:****@host.url/")
self.assertEqual(
- clean_log("Downloading file from https://daniel:my_password@server.com now"),
+ clean_log("http://user:password@host.url/"), "http://****:****@host.url/"
+ )
+ self.assertEqual(
+ clean_log(
+ "Downloading file from https://daniel:my_password@server.com now"
+ ),
"Downloading file from https://****:****@server.com now",
)
self.assertEqual(
diff --git a/tests/test_complex_builders.py b/tests/test_complex_builders.py
index 3c913b2c7..c937df6d4 100644
--- a/tests/test_complex_builders.py
+++ b/tests/test_complex_builders.py
@@ -10,7 +10,14 @@
@pytest.mark.parametrize(
"test_app",
- [{"buildername": "latex", "srcdir": "doc_test/doc_basic_latex", "warning": True, "parallel": 2}],
+ [
+ {
+ "buildername": "latex",
+ "srcdir": "doc_test/doc_basic_latex",
+ "warning": True,
+ "parallel": 2,
+ }
+ ],
indirect=True,
)
def test_doc_complex_latex(test_app):
@@ -23,7 +30,14 @@ def test_doc_complex_latex(test_app):
@pytest.mark.parametrize(
"test_app",
- [{"buildername": "singlehtml", "srcdir": "doc_test/doc_basic_latex", "warning": True, "parallel": 2}],
+ [
+ {
+ "buildername": "singlehtml",
+ "srcdir": "doc_test/doc_basic_latex",
+ "warning": True,
+ "parallel": 2,
+ }
+ ],
indirect=True,
)
def test_doc_complex_singlehtml(test_app):
diff --git a/tests/test_doc_build_latex.py b/tests/test_doc_build_latex.py
index a8e763ee0..11b3a89e8 100644
--- a/tests/test_doc_build_latex.py
+++ b/tests/test_doc_build_latex.py
@@ -3,7 +3,11 @@
import pytest
-@pytest.mark.parametrize("test_app", [{"buildername": "latex", "srcdir": "doc_test/doc_build_latex"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "latex", "srcdir": "doc_test/doc_build_latex"}],
+ indirect=True,
+)
def test_doc_build_latex(test_app):
app = test_app
diff --git a/tests/test_dynamic_functions.py b/tests/test_dynamic_functions.py
index 181cd2256..59d0a7978 100644
--- a/tests/test_dynamic_functions.py
+++ b/tests/test_dynamic_functions.py
@@ -5,7 +5,9 @@
@pytest.mark.parametrize(
- "test_app", [{"buildername": "html", "srcdir": "doc_test/doc_dynamic_functions"}], indirect=True
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_dynamic_functions"}],
+ indirect=True,
)
def test_doc_dynamic_functions(test_app):
app = test_app
@@ -13,22 +15,42 @@ def test_doc_dynamic_functions(test_app):
html = Path(app.outdir, "index.html").read_text()
assert "This is id SP_TOO_001" in html
- assert sum(1 for _ in re.finditer('test2 ', html)) == 2
- assert sum(1 for _ in re.finditer('test ', html)) == 2
- assert sum(1 for _ in re.finditer('my_tag ', html)) == 1
+ assert (
+ sum(1 for _ in re.finditer('test2 ', html)) == 2
+ )
+ assert (
+ sum(1 for _ in re.finditer('test ', html)) == 2
+ )
+ assert (
+ sum(1 for _ in re.finditer('my_tag ', html)) == 1
+ )
- assert sum(1 for _ in re.finditer('test_4a ', html)) == 1
- assert sum(1 for _ in re.finditer('test_4b ', html)) == 1
- assert sum(1 for _ in re.finditer('TEST_4 ', html)) == 2
+ assert (
+ sum(1 for _ in re.finditer('test_4a ', html))
+ == 1
+ )
+ assert (
+ sum(1 for _ in re.finditer('test_4b ', html))
+ == 1
+ )
+ assert (
+ sum(1 for _ in re.finditer('TEST_4 ', html)) == 2
+ )
- assert sum(1 for _ in re.finditer('TEST_5 ', html)) == 2
+ assert (
+ sum(1 for _ in re.finditer('TEST_5 ', html)) == 2
+ )
assert "Test output of need TEST_3. args:" in html
assert 'link ' in html
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/doc_df_calc_sum"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_df_calc_sum"}],
+ indirect=True,
+)
def test_doc_df_calc_sum(test_app):
app = test_app
app.build()
@@ -39,7 +61,9 @@ def test_doc_df_calc_sum(test_app):
@pytest.mark.parametrize(
- "test_app", [{"buildername": "html", "srcdir": "doc_test/doc_df_check_linked_values"}], indirect=True
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_df_check_linked_values"}],
+ indirect=True,
)
def test_doc_df_linked_values(test_app):
app = test_app
@@ -51,7 +75,9 @@ def test_doc_df_linked_values(test_app):
@pytest.mark.parametrize(
- "test_app", [{"buildername": "html", "srcdir": "doc_test/doc_df_user_functions"}], indirect=True
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_df_user_functions"}],
+ indirect=True,
)
def test_doc_df_user_functions(test_app):
app = test_app
diff --git a/tests/test_export_id.py b/tests/test_export_id.py
index dcdb7152e..19f4beccc 100644
--- a/tests/test_export_id.py
+++ b/tests/test_export_id.py
@@ -6,7 +6,11 @@
from syrupy.filters import props
-@pytest.mark.parametrize("test_app", [{"buildername": "needs", "srcdir": "doc_test/doc_export_id"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "needs", "srcdir": "doc_test/doc_export_id"}],
+ indirect=True,
+)
def test_export_id(test_app, snapshot):
app = test_app
app.build()
@@ -14,7 +18,11 @@ def test_export_id(test_app, snapshot):
assert needs_data == snapshot(exclude=props("created"))
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/doc_export_id"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_export_id"}],
+ indirect=True,
+)
def test_export_id_html(test_app):
app = test_app
app.build()
diff --git a/tests/test_external.py b/tests/test_external.py
index 9169cf431..de705d8e2 100644
--- a/tests/test_external.py
+++ b/tests/test_external.py
@@ -5,7 +5,11 @@
from syrupy.filters import props
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/external_doc"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/external_doc"}],
+ indirect=True,
+)
def test_external_html(test_app):
app = test_app
app.build()
@@ -17,11 +21,16 @@ def test_external_html(test_app):
assert (
'Test need ref: EXT_TEST_01
' in html
+ ' href="http://my_company.com/docs/v1/index.html#TEST_01">EXT_TEST_01
'
+ in html
)
-@pytest.mark.parametrize("test_app", [{"buildername": "needs", "srcdir": "doc_test/external_doc"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "needs", "srcdir": "doc_test/external_doc"}],
+ indirect=True,
+)
def test_external_json(test_app, snapshot):
app = test_app
app.build()
@@ -30,7 +39,11 @@ def test_external_json(test_app, snapshot):
assert needs == snapshot(exclude=props("created"))
-@pytest.mark.parametrize("test_app", [{"buildername": "needs", "srcdir": "doc_test/external_doc"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "needs", "srcdir": "doc_test/external_doc"}],
+ indirect=True,
+)
def test_external_needs_warnings(test_app):
import os
import subprocess
@@ -40,8 +53,11 @@ def test_external_needs_warnings(test_app):
srcdir = Path(app.srcdir)
out_dir = os.path.join(srcdir, "_build")
- out = subprocess.run(["sphinx-build", "-b", "html", srcdir, out_dir], capture_output=True)
+ out = subprocess.run(
+ ["sphinx-build", "-b", "html", srcdir, out_dir], capture_output=True
+ )
assert (
"WARNING: Couldn't create need EXT_TEST_03. Reason: The need-type (i.e. `ask`) is not"
- " set in the project's 'need_types' configuration in conf.py." in out.stderr.decode("utf-8")
+ " set in the project's 'need_types' configuration in conf.py."
+ in out.stderr.decode("utf-8")
)
diff --git a/tests/test_extra_links.py b/tests/test_extra_links.py
index b57088989..e089d38f0 100644
--- a/tests/test_extra_links.py
+++ b/tests/test_extra_links.py
@@ -3,7 +3,11 @@
import pytest
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/doc_extra_links"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_extra_links"}],
+ indirect=True,
+)
def test_extra_links_html(test_app):
app = test_app
app.build()
@@ -16,11 +20,17 @@ def test_extra_links_html(test_app):
# Check for correct dead_links handling
assert 'DEAD_LINK_ALLOWED ' in html
- assert 'DEAD_LINK_NOT_ALLOWED ' in html
+ assert (
+ 'DEAD_LINK_NOT_ALLOWED ' in html
+ )
assert 'REQ_005.invalid ' in html
-@pytest.mark.parametrize("test_app", [{"buildername": "latex", "srcdir": "doc_test/doc_extra_links"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "latex", "srcdir": "doc_test/doc_extra_links"}],
+ indirect=True,
+)
def test_extra_links_latex(test_app):
app = test_app
app.build()
diff --git a/tests/test_extra_options.py b/tests/test_extra_options.py
index 4d00b6ecc..4e6921887 100644
--- a/tests/test_extra_options.py
+++ b/tests/test_extra_options.py
@@ -4,7 +4,11 @@
import pytest
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/extra_options"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/extra_options"}],
+ indirect=True,
+)
def test_custom_attributes_appear(test_app):
app = test_app
app.build()
@@ -37,6 +41,8 @@ def test_custom_attributes_appear(test_app):
assert "R_12346" not in tables[3]
# Need list should only have component B requirements
- items = re.findall('(' in html_5
+ 'href="#CHILD_1_STORY" title="STORY_PARENT">CHILD_1_STORY'
+ in html_5
)
assert (
'' in html_5
+ 'href="#CHILD_1_STORY" title="CHILD_2_STORY">CHILD_1_STORY'
+ in html_5
)
html_6 = Path(app.outdir, "filter_no_needs.html").read_text()
diff --git a/tests/test_github_issues.py b/tests/test_github_issues.py
index 717f37e5f..4c8862136 100644
--- a/tests/test_github_issues.py
+++ b/tests/test_github_issues.py
@@ -5,7 +5,11 @@
import pytest
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/doc_github_issue_44"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_github_issue_44"}],
+ indirect=True,
+)
def test_doc_github_44(test_app):
"""
https://github.com/useblocks/sphinxcontrib-needs/issues/44
@@ -17,7 +21,9 @@ def test_doc_github_44(test_app):
app = test_app
output = subprocess.run(
- ["sphinx-build", "-a", "-E", "-b", "html", app.srcdir, app.outdir], check=True, capture_output=True
+ ["sphinx-build", "-a", "-E", "-b", "html", app.srcdir, app.outdir],
+ check=True,
+ capture_output=True,
)
# app.build() Uncomment, if build should stop on breakpoints
@@ -34,7 +40,11 @@ def test_doc_github_44(test_app):
]
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/doc_github_issue_61"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_github_issue_61"}],
+ indirect=True,
+)
def test_doc_github_61(test_app):
"""
Test for https://github.com/useblocks/sphinxcontrib-needs/issues/61
@@ -58,7 +68,9 @@ def test_doc_github_61(test_app):
@pytest.mark.parametrize(
- "test_app", [{"buildername": "html", "srcdir": "doc_test/doc_github_issue_160"}], indirect=True
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_github_issue_160"}],
+ indirect=True,
)
def test_doc_github_160(test_app):
app = test_app
diff --git a/tests/test_global_options.py b/tests/test_global_options.py
index cf45c5ab8..9829f1a7c 100644
--- a/tests/test_global_options.py
+++ b/tests/test_global_options.py
@@ -3,7 +3,11 @@
import pytest
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/doc_global_options"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_global_options"}],
+ indirect=True,
+)
def test_doc_global_option(test_app):
app = test_app
app.build()
diff --git a/tests/test_import.py b/tests/test_import.py
index e5bd1b760..0fb73e9ad 100644
--- a/tests/test_import.py
+++ b/tests/test_import.py
@@ -6,7 +6,11 @@
from syrupy.filters import props
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/import_doc"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/import_doc"}],
+ indirect=True,
+)
def test_import_json(test_app):
app = test_app
app.build()
@@ -40,7 +44,9 @@ def test_import_json(test_app):
assert "small_rel_path_TEST_01" in rel_path_import_html
# Check deprecated relative path import based on conf.py
- deprec_rel_path_import_html = Path(app.outdir, "subdoc/deprecated_rel_path_import.html").read_text()
+ deprec_rel_path_import_html = Path(
+ app.outdir, "subdoc/deprecated_rel_path_import.html"
+ ).read_text()
assert "small_depr_rel_path_TEST_01" in deprec_rel_path_import_html
warning = app._warning
@@ -48,7 +54,11 @@ def test_import_json(test_app):
assert "Deprecation warning:" in warnings
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/import_doc_invalid"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/import_doc_invalid"}],
+ indirect=True,
+)
def test_json_schema_console_check(test_app):
"""Checks the console output for hints about json schema validation errors"""
import os
@@ -58,12 +68,18 @@ def test_json_schema_console_check(test_app):
srcdir = Path(app.srcdir)
out_dir = os.path.join(srcdir, "_build")
- out = subprocess.run(["sphinx-build", "-b", "html", srcdir, out_dir], capture_output=True)
+ out = subprocess.run(
+ ["sphinx-build", "-b", "html", srcdir, out_dir], capture_output=True
+ )
assert "Schema validation errors detected" in str(out.stdout)
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/import_doc_invalid"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/import_doc_invalid"}],
+ indirect=True,
+)
def test_json_schema_file_check(test_app):
"""Checks that an invalid json-file gets normally still imported and is used as normal (if possible)"""
app = test_app
@@ -74,7 +90,11 @@ def test_json_schema_file_check(test_app):
assert "new_tag" in html
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/import_doc_empty"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/import_doc_empty"}],
+ indirect=True,
+)
def test_empty_file_check(test_app):
"""Checks that an empty needs.json throws an exception"""
app = test_app
@@ -85,7 +105,9 @@ def test_empty_file_check(test_app):
@pytest.mark.parametrize(
- "test_app", [{"buildername": "html", "srcdir": "doc_test/non_exists_file_import"}], indirect=True
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/non_exists_file_import"}],
+ indirect=True,
)
def test_import_non_exists_json(test_app):
# Check non exists file import
@@ -97,7 +119,11 @@ def test_import_non_exists_json(test_app):
assert "non_exists_file_import" in err.args[0]
-@pytest.mark.parametrize("test_app", [{"buildername": "needs", "srcdir": "doc_test/import_doc"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "needs", "srcdir": "doc_test/import_doc"}],
+ indirect=True,
+)
def test_import_builder(test_app, snapshot):
app = test_app
app.build()
@@ -107,7 +133,9 @@ def test_import_builder(test_app, snapshot):
@pytest.mark.parametrize(
- "test_app", [{"buildername": "needs", "srcdir": "doc_test/doc_needimport_download_needs_json"}], indirect=True
+ "test_app",
+ [{"buildername": "needs", "srcdir": "doc_test/doc_needimport_download_needs_json"}],
+ indirect=True,
)
def test_needimport_needs_json_download(test_app, snapshot):
app = test_app
@@ -175,7 +203,12 @@ def test_needimport_needs_json_download(test_app, snapshot):
@pytest.mark.parametrize(
"test_app",
- [{"buildername": "needs", "srcdir": "doc_test/doc_needimport_download_needs_json_negative"}],
+ [
+ {
+ "buildername": "needs",
+ "srcdir": "doc_test/doc_needimport_download_needs_json_negative",
+ }
+ ],
indirect=True,
)
def test_needimport_needs_json_download_negative(test_app):
@@ -210,11 +243,16 @@ def test_needimport_needs_json_download_negative(test_app):
with requests_mock.Mocker() as m:
# test with invalid url
- m.get("http://my_wrong_name_company.com/docs/v1/remote-needs.json", json=remote_json)
+ m.get(
+ "http://my_wrong_name_company.com/docs/v1/remote-needs.json",
+ json=remote_json,
+ )
src_dir = Path(app.srcdir)
out_dir = Path(app.outdir)
- output = subprocess.run(["sphinx-build", "-M", "html", src_dir, out_dir], capture_output=True)
+ output = subprocess.run(
+ ["sphinx-build", "-M", "html", src_dir, out_dir], capture_output=True
+ )
assert (
"NeedimportException: Getting http://my_wrong_name_company.com/docs/v1/remote-needs.json didn't work."
in output.stderr.decode("utf-8")
diff --git a/tests/test_jinja_content_option.py b/tests/test_jinja_content_option.py
index f9cd5f508..dbc03913e 100644
--- a/tests/test_jinja_content_option.py
+++ b/tests/test_jinja_content_option.py
@@ -4,7 +4,9 @@
@pytest.mark.parametrize(
- "test_app", [{"buildername": "html", "srcdir": "doc_test/doc_need_jinja_content"}], indirect=True
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_need_jinja_content"}],
+ indirect=True,
)
def test_doc_need_jinja_content(test_app):
app = test_app
diff --git a/tests/test_layouts.py b/tests/test_layouts.py
index 89097855f..bed530dca 100644
--- a/tests/test_layouts.py
+++ b/tests/test_layouts.py
@@ -3,7 +3,11 @@
from tests.util import extract_needs_from_html
-@pytest.mark.parametrize("test_app", [{"buildername": "html", "srcdir": "doc_test/doc_layout"}], indirect=True)
+@pytest.mark.parametrize(
+ "test_app",
+ [{"buildername": "html", "srcdir": "doc_test/doc_layout"}],
+ indirect=True,
+)
def test_doc_build_html(test_app):
app = test_app
app.build()
@@ -17,7 +21,8 @@ def test_doc_build_html(test_app):
assert len(needs) == 6
assert (
- 'author : some author ' in html
+ 'author : some author '
+ in html
)
assert '