diff --git a/sphinx_needs/api/need.py b/sphinx_needs/api/need.py index 3518bdb88..9de22f526 100644 --- a/sphinx_needs/api/need.py +++ b/sphinx_needs/api/need.py @@ -353,7 +353,6 @@ def run(): "doctype": doctype, "target_id": need_id, "content": "\n".join(content) if isinstance(content, StringList) else content, - "content_node": None, "type": need_type, "type_name": type_name, "type_prefix": type_prefix, @@ -579,8 +578,7 @@ def _create_need_node( need_parts = find_parts(node_need) update_need_with_parts(env, data, need_parts) - # Create a copy of the content - data["content_node"] = node_need.deepcopy() + SphinxNeedsData(env).set_need_node(data["id"], node_need) return_nodes.append(node_need) @@ -605,10 +603,11 @@ def del_need(app: Sphinx, need_id: str) -> None: :param app: Sphinx application object. :param need_id: Sphinx need id. """ - env = app.env - needs = SphinxNeedsData(env).get_or_create_needs() + data = SphinxNeedsData(app.env) + needs = data.get_or_create_needs() if need_id in needs: del needs[need_id] + data.remove_need_node(need_id) else: log_warning(logger, f"Given need id {need_id} not exists!", None, None) diff --git a/sphinx_needs/data.py b/sphinx_needs/data.py index 8f2bbbebd..5a0ee79a2 100644 --- a/sphinx_needs/data.py +++ b/sphinx_needs/data.py @@ -11,11 +11,12 @@ from sphinx_needs.logging import log_warning if TYPE_CHECKING: - from docutils.nodes import Element, Text + from docutils.nodes import Text from sphinx.application import Sphinx from sphinx.environment import BuildEnvironment from typing_extensions import NotRequired, Required + from sphinx_needs.nodes import Need from sphinx_needs.services.manager import ServiceManager @@ -236,11 +237,6 @@ class CoreFieldParameters(TypedDict): "description": "Post-content of the need.", "schema": {"type": "string", "default": ""}, }, - "content_node": { - "description": "Deep copy of the content node.", - "schema": {}, - "exclude_json": True, - }, "has_dead_links": { "description": "True if any links reference need ids that are not found in the need list.", "schema": {"type": "boolean", "default": False}, @@ -374,8 +370,6 @@ class NeedsInfoType(TypedDict, total=False): content: Required[str] pre_content: str post_content: str - content_node: Required[None | Element] - """Deep copy of the content node.""" # these default to False and are updated in check_links post-process has_dead_links: Required[bool] @@ -781,6 +775,32 @@ def get_or_create_umls(self) -> dict[str, NeedsUmlType]: self.env.needs_all_needumls = {} return self.env.needs_all_needumls + @property + def _needs_all_nodes(self) -> dict[str, Need]: + try: + return self.env.needs_all_nodes + except AttributeError: + self.env.needs_all_nodes = {} + return self.env.needs_all_nodes + + def set_need_node(self, need_id: str, node: Need) -> None: + """Set a need node in the cache.""" + self._needs_all_nodes[need_id] = node.deepcopy() + + def remove_need_node(self, need_id: str) -> None: + """Remove a need node from the cache, if it exists.""" + if need_id in self._needs_all_nodes: + del self._needs_all_nodes[need_id] + + def get_need_node(self, need_id: str) -> Need | None: + """Get a need node from the cache, if it exists.""" + if need_id in self._needs_all_nodes: + # We must create a copy of the node, as it may be reused several time + # (multiple needextract for the same need) and the Sphinx ImageTransformator add location specific + # uri to some nodes, which are not valid for all locations. + return self._needs_all_nodes[need_id].deepcopy() + return None + def merge_data( _app: Sphinx, env: BuildEnvironment, docnames: list[str], other: BuildEnvironment @@ -852,5 +872,6 @@ def _merge(name: str, is_complex_dict: bool = False) -> None: ) _merge("needs_all_docs", is_complex_dict=True) + _merge("needs_all_nodes") _merge("need_all_needextend") _merge("needs_all_needumls") diff --git a/sphinx_needs/directives/need.py b/sphinx_needs/directives/need.py index f69444fdb..c18c86655 100644 --- a/sphinx_needs/directives/need.py +++ b/sphinx_needs/directives/need.py @@ -284,10 +284,12 @@ def purge_needs(app: Sphinx, env: BuildEnvironment, docname: str) -> None: Gets executed, if a doc file needs to be purged/ read in again. So this code delete all found needs for the given docname. """ - needs = SphinxNeedsData(env).get_or_create_needs() + data = SphinxNeedsData(env) + needs = data.get_or_create_needs() for need_id in list(needs): if needs[need_id]["docname"] == docname: del needs[need_id] + data.remove_need_node(need_id) def analyse_need_locations(app: Sphinx, doctree: nodes.document) -> None: diff --git a/sphinx_needs/functions/functions.py b/sphinx_needs/functions/functions.py index 2295d4cd5..9d2c89511 100644 --- a/sphinx_needs/functions/functions.py +++ b/sphinx_needs/functions/functions.py @@ -199,7 +199,6 @@ def resolve_dynamic_values(needs: dict[str, NeedsInfoType], app: Sphinx) -> None "docname", "lineno", "content", - "content_node", ]: # dynamic values in this data are not allowed. continue diff --git a/sphinx_needs/layout.py b/sphinx_needs/layout.py index 3f696614e..9018efcb6 100644 --- a/sphinx_needs/layout.py +++ b/sphinx_needs/layout.py @@ -76,12 +76,8 @@ def create_need( node_container = nodes.container() # node_container += needs[need_id]["need_node"].children - # We must create a standalone copy of the content_node, as it may be reused several time - # (multiple needextract for the same need) and the Sphinx ImageTransformator add location specific - # uri to some nodes, which are not valid for all locations. - content_node = needs[need_id]["content_node"] - assert content_node is not None, f"Need {need_id} has no content node." - node_inner = content_node.deepcopy() + node_inner = SphinxNeedsData(env).get_need_node(need_id) + assert node_inner is not None, f"Need {need_id} has no content node." # Rerun some important Sphinx collectors for need-content coming from "needsexternal". # This is needed, as Sphinx needs to know images and download paths. diff --git a/tests/test_import.py b/tests/test_import.py index f393a21f2..90be273c4 100644 --- a/tests/test_import.py +++ b/tests/test_import.py @@ -199,7 +199,7 @@ def test_needimport_needs_json_download(test_app, snapshot): app.build() needs_all_needs = app.env.needs_all_needs - assert needs_all_needs == snapshot(exclude=props("content_node")) + assert needs_all_needs == snapshot() @pytest.mark.parametrize( diff --git a/tests/test_needuml.py b/tests/test_needuml.py index 8f6130aba..c04a4fe34 100644 --- a/tests/test_needuml.py +++ b/tests/test_needuml.py @@ -2,7 +2,6 @@ from pathlib import Path import pytest -from syrupy.filters import props @pytest.mark.parametrize( @@ -17,7 +16,7 @@ def test_doc_build_html(test_app, snapshot): assert Path(app.outdir, "index.html").read_text(encoding="utf8") all_needs = app.env.needs_all_needs - assert all_needs == snapshot(exclude=props("content_node")) + assert all_needs == snapshot() all_needumls = app.env.needs_all_needumls assert all_needumls == snapshot