diff --git a/sphinx_needs/builder.py b/sphinx_needs/builder.py index 454cc95b9..5674d2323 100644 --- a/sphinx_needs/builder.py +++ b/sphinx_needs/builder.py @@ -1,5 +1,5 @@ import os -from typing import Iterable, List, Optional, Set +from typing import Iterable, List, Optional, Sequence, Set from docutils import nodes from sphinx import version_info @@ -8,28 +8,58 @@ from sphinx_needs.config import NeedsSphinxConfig from sphinx_needs.data import NeedsInfoType, SphinxNeedsData +from sphinx_needs.directives.need import post_process_needs_data from sphinx_needs.logging import get_logger from sphinx_needs.needsfile import NeedsList -log = get_logger(__name__) +LOGGER = get_logger(__name__) class NeedsBuilder(Builder): + """Output the needs data as a JSON file, + filtering by the ``needs_builder_filter`` config option if set, + and writing to ``needs.json`` (or the ``needs_file`` config option if set) + in the output folder. + + Note this builder normally completely skips the write phase, + where all documents are post-transformed, to improve performance. + It is assumed all need data is already read in the read phase, + and the post-processing of the data is done in the finish phase. + + However, if the ``export_id`` option is set for any directives, + the write phase is still run, since this filter data is currently only added there. + A warning is issued in this case. + """ + name = "needs" format = "needs" file_suffix = ".txt" links_suffix = None - def write_doc(self, docname: str, doctree: nodes.document) -> None: - pass + def get_outdated_docs(self) -> Iterable[str]: + return [] + + def write( + self, + build_docnames: Iterable[str], + updated_docnames: Sequence[str], + method: str = "update", + ) -> None: + if not SphinxNeedsData(self.env).has_export_filters: + return + LOGGER.warning( + "At least one use of `export_id` directive option, requires a slower build", type="needs", subtype="build" + ) + return super().write(build_docnames, updated_docnames, method) def finish(self) -> None: - env = self.env - data = SphinxNeedsData(env) + post_process_needs_data(self.app) + + data = SphinxNeedsData(self.env) + needs_config = NeedsSphinxConfig(self.env.config) filters = data.get_or_create_filters() - version = getattr(env.config, "version", "unset") - needs_list = NeedsList(env.config, self.outdir, self.srcdir) - needs_config = NeedsSphinxConfig(env.config) + version = getattr(self.env.config, "version", "unset") + needs_list = NeedsList(self.env.config, self.outdir, self.srcdir) if needs_config.file: needs_file = needs_config.file @@ -38,7 +68,7 @@ def finish(self) -> None: # check if needs.json file exists in conf.py directory needs_json = os.path.join(self.srcdir, "needs.json") if os.path.exists(needs_json): - log.info("needs.json found, but will not be used because needs_file not configured.") + LOGGER.info("needs.json found, but will not be used because needs_file not configured.") # Clean needs_list from already stored needs of the current version. # This is needed as needs could have been removed from documentation and if this is the case, @@ -62,25 +92,30 @@ def finish(self) -> None: try: needs_list.write_json() except Exception as e: - log.error(f"Error during writing json file: {e}") + LOGGER.error(f"Error during writing json file: {e}") else: - log.info("Needs successfully exported") + LOGGER.info("Needs successfully exported") - def get_outdated_docs(self) -> Iterable[str]: - return [] + def get_target_uri(self, _docname: str, _typ: Optional[str] = None) -> str: + # only needed if the write phase is run + return "" def prepare_writing(self, _docnames: Set[str]) -> None: + # only needed if the write phase is run + pass + + def write_doc(self, docname: str, doctree: nodes.document) -> None: + # only needed if the write phase is run pass def write_doc_serialized(self, _docname: str, _doctree: nodes.document) -> None: + # only needed if the write phase is run pass def cleanup(self) -> None: + # only needed if the write phase is run pass - def get_target_uri(self, _docname: str, _typ: Optional[str] = None) -> str: - return "" - def build_needs_json(app: Sphinx, _exception: Exception) -> None: env = app.env @@ -101,7 +136,87 @@ def build_needs_json(app: Sphinx, _exception: Exception) -> None: needs_builder.finish() +class NeedsIdBuilder(Builder): + """Output the needs data as multiple JSON files, one per need, + filtering by the ``needs_builder_filter`` config option if set, + and writing to the ``needs_id`` folder (or the ``build_json_per_id_path`` config option if set) + in the output folder. + + Note this builder completely skips the write phase, + where all documents are post-transformed, to improve performance. + It is assumed all need data is already read in the read phase, + and the post-processing of the data is done in the finish phase. + """ + + name = "needs_id" + format = "needs" + file_suffix = ".txt" + links_suffix = None + + def get_outdated_docs(self) -> Iterable[str]: + return [] + + def write( + self, + build_docnames: Iterable[str], + updated_docnames: Sequence[str], + method: str = "update", + ) -> None: + pass + + def finish(self) -> None: + post_process_needs_data(self.app) + + data = SphinxNeedsData(self.env) + needs = data.get_or_create_needs().values() # We need a list of needs for later filter checks + version = getattr(self.env.config, "version", "unset") + needs_config = NeedsSphinxConfig(self.env.config) + filter_string = needs_config.builder_filter + from sphinx_needs.filter_common import filter_needs + + filtered_needs = filter_needs(needs, needs_config, filter_string) + needs_build_json_per_id_path = needs_config.build_json_per_id_path + needs_dir = os.path.join(self.outdir, needs_build_json_per_id_path) + if not os.path.exists(needs_dir): + os.makedirs(needs_dir, exist_ok=True) + for need in filtered_needs: + needs_list = NeedsList(self.env.config, self.outdir, self.srcdir) + needs_list.wipe_version(version) + needs_list.add_need(version, need) + id = need["id"] + try: + file_name = f"{id}.json" + needs_list.write_json(file_name, needs_dir) + except Exception as e: + LOGGER.error(f"Needs-ID Builder {id} error: {e}") + LOGGER.info("Needs_id successfully exported") + + +def build_needs_id_json(app: Sphinx, _exception: Exception) -> None: + env = app.env + + if not NeedsSphinxConfig(env.config).build_json_per_id: + return + + # Do not create an additional needs_json for every needs_id, if builder is already "needs_id". + if isinstance(app.builder, NeedsIdBuilder): + return + try: + needs_id_builder = NeedsIdBuilder(app, env) + except TypeError: + needs_id_builder = NeedsIdBuilder(app) + needs_id_builder.set_environment(env) + + needs_id_builder.finish() + + class NeedumlsBuilder(Builder): + """Write generated PlantUML input files to the output dir, + that were generated by need directives, + if they have a ``save`` field set, + denoting the path relative to the output folder. + """ + name = "needumls" def write_doc(self, docname: str, doctree: nodes.document) -> None: @@ -120,7 +235,7 @@ def finish(self) -> None: if not os.path.exists(save_dir): os.makedirs(save_dir, exist_ok=True) - log.info(f"Storing needuml data to file {save_path}.") + LOGGER.info(f"Storing needuml data to file {save_path}.") with open(save_path, "w") as f: f.write(puml_content) @@ -161,74 +276,3 @@ def build_needumls_pumls(app: Sphinx, _exception: Exception) -> None: needs_builder.set_environment(env) needs_builder.finish() - - -class NeedsIdBuilder(Builder): - """Json builder for needs, which creates separate json-files per need""" - - name = "needs_id" - format = "needs" - file_suffix = ".txt" - links_suffix = None - - def write_doc(self, docname: str, doctree: nodes.document) -> None: - pass - - def finish(self) -> None: - env = self.env - data = SphinxNeedsData(env) - needs = data.get_or_create_needs().values() # We need a list of needs for later filter checks - version = getattr(env.config, "version", "unset") - needs_config = NeedsSphinxConfig(env.config) - filter_string = needs_config.builder_filter - from sphinx_needs.filter_common import filter_needs - - filtered_needs = filter_needs(needs, needs_config, filter_string) - needs_build_json_per_id_path = needs_config.build_json_per_id_path - needs_dir = os.path.join(self.outdir, needs_build_json_per_id_path) - if not os.path.exists(needs_dir): - os.makedirs(needs_dir, exist_ok=True) - for need in filtered_needs: - needs_list = NeedsList(env.config, self.outdir, self.srcdir) - needs_list.wipe_version(version) - needs_list.add_need(version, need) - id = need["id"] - try: - file_name = f"{id}.json" - needs_list.write_json(file_name, needs_dir) - except Exception as e: - log.error(f"Needs-ID Builder {id} error: {e}") - log.info("Needs_id successfully exported") - - def get_outdated_docs(self) -> Iterable[str]: - return [] - - def prepare_writing(self, _docnames: Set[str]) -> None: - pass - - def write_doc_serialized(self, _docname: str, _doctree: nodes.document) -> None: - pass - - def cleanup(self) -> None: - pass - - def get_target_uri(self, _docname: str, _typ: Optional[str] = None) -> str: - return "" - - -def build_needs_id_json(app: Sphinx, _exception: Exception) -> None: - env = app.env - - if not NeedsSphinxConfig(env.config).build_json_per_id: - return - - # Do not create an additional needs_json for every needs_id, if builder is already "needs_id". - if isinstance(app.builder, NeedsIdBuilder): - return - try: - needs_id_builder = NeedsIdBuilder(app, env) - except TypeError: - needs_id_builder = NeedsIdBuilder(app) - needs_id_builder.set_environment(env) - - needs_id_builder.finish() diff --git a/sphinx_needs/data.py b/sphinx_needs/data.py index 9fa8d44f2..182324ae2 100644 --- a/sphinx_needs/data.py +++ b/sphinx_needs/data.py @@ -25,9 +25,10 @@ class NeedsFilterType(TypedDict): status: list[str] tags: list[str] types: list[str] - export_id: str result: list[str] amount: int + export_id: str + """If set, the filter is exported with this ID in the needs.json file.""" class NeedsBaseDataType(TypedDict): @@ -271,6 +272,7 @@ class NeedsFilteredBaseType(NeedsBaseDataType): filter_code: list[str] filter_func: None | str export_id: str + """If set, the filter is exported with this ID in the needs.json file.""" class NeedsFilteredDiagramBaseType(NeedsFilteredBaseType): @@ -409,6 +411,18 @@ def get_or_create_needs(self) -> dict[str, NeedsInfoType]: self.env.needs_all_needs = {} return self.env.needs_all_needs + @property + def has_export_filters(self) -> bool: + """Whether any filters have export IDs.""" + try: + return self.env.needs_filters_export_id + except AttributeError: + return False + + @has_export_filters.setter + def has_export_filters(self, value: bool) -> None: + self.env.needs_filters_export_id = value + def get_or_create_filters(self) -> dict[str, NeedsFilterType]: """Get all filters, mapped by ID. @@ -593,6 +607,8 @@ def merge_data(_app: Sphinx, env: BuildEnvironment, _docnames: list[str], other: needs = SphinxNeedsData(env).get_or_create_needs() other_needs = SphinxNeedsData(other).get_or_create_needs() needs.update(other_needs) + if SphinxNeedsData(other).has_export_filters: + SphinxNeedsData(env).has_export_filters = True def _merge(name: str, is_complex_dict: bool = False) -> None: # Update global needs dict diff --git a/sphinx_needs/directives/need.py b/sphinx_needs/directives/need.py index e21e78742..45e2bc1de 100644 --- a/sphinx_needs/directives/need.py +++ b/sphinx_needs/directives/need.py @@ -360,8 +360,32 @@ def previous_sibling(node: nodes.Node) -> Optional[nodes.Node]: return node.parent[i - 1] if i > 0 else None # type: ignore -@profile("NEED_PROCESS") -@measure_time("need") +@profile("NEEDS_POST_PROCESS") +@measure_time("need_post_process") +def post_process_needs_data(app: Sphinx) -> None: + """In-place post-processing of needs data. + + This should be called after all needs (and extend) data has been collected. + + This function is idempotent; + it will only be run on the first call, and will not be run again. + + After this function has been run, one should assume that the needs data is finalised, + and so in principle should be treated as read-only. + """ + needs_config = NeedsSphinxConfig(app.config) + needs_data = SphinxNeedsData(app.env) + needs = needs_data.get_or_create_needs() + if needs and not needs_data.needs_is_post_processed: + extend_needs_data(needs, needs_data.get_or_create_extends(), needs_config) + resolve_dynamic_values(needs, app) + resolve_variants_options(needs, needs_config, app.builder.tags.tags) + check_links(needs, needs_config) + create_back_links(needs, needs_config) + process_constraints(needs, needs_config) + needs_data.needs_is_post_processed = True + + def process_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str) -> None: """ Event handler to add title meta data (status, tags, links, ...) information to the Need node. Also processes @@ -374,39 +398,23 @@ def process_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str) - node.parent.remove(node) # type: ignore return - env = app.env - needs_data = SphinxNeedsData(env) - needs = needs_data.get_or_create_needs() + needs_data = SphinxNeedsData(app.env) # If no needs were defined, we do not need to do anything - if not needs: + if not needs_data.get_or_create_needs(): return - if not needs_data.needs_is_post_processed: - extend_needs_data(needs, needs_data.get_or_create_extends(), needs_config) - resolve_dynamic_values(needs, app) - resolve_variants_options(needs, needs_config, app.builder.tags.tags) - check_links(needs, needs_config) - create_back_links(needs, needs_config) - process_constraints(needs, needs_config) - needs_data.needs_is_post_processed = True + post_process_needs_data(app) for extend_node in doctree.findall(Needextend): remove_needextend_node(extend_node) - print_need_nodes(app, doctree, fromdocname, list(doctree.findall(Need))) + format_need_nodes(app, doctree, fromdocname, list(doctree.findall(Need))) -@profile("NEED_PRINT") -def print_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str, found_needs_nodes: List[Need]) -> None: - """ - Finally creates the need-node in the docutils node-tree. - - :param app: - :param doctree: - :param fromdocname: - :return: - """ +@profile("NEED_FORMAT") +def format_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str, found_needs_nodes: List[Need]) -> None: + """Replace need nodes in the document with node trees suitable for output""" env = app.env needs = SphinxNeedsData(env).get_or_create_needs() diff --git a/sphinx_needs/directives/needextract.py b/sphinx_needs/directives/needextract.py index dc397a5a5..d77da0ce6 100644 --- a/sphinx_needs/directives/needextract.py +++ b/sphinx_needs/directives/needextract.py @@ -56,7 +56,6 @@ def run(self) -> Sequence[nodes.Node]: "docname": env.docname, "lineno": self.lineno, "target_id": targetid, - "export_id": self.options.get("export_id", ""), "layout": self.options.get("layout"), "style": self.options.get("style"), "show_filters": "show_filters" in self.options, diff --git a/sphinx_needs/directives/needfilter.py b/sphinx_needs/directives/needfilter.py index d1d6326f0..2278b5adf 100644 --- a/sphinx_needs/directives/needfilter.py +++ b/sphinx_needs/directives/needfilter.py @@ -60,7 +60,6 @@ def run(self) -> Sequence[nodes.Node]: "show_filters": "show_filters" in self.options, "show_legend": "show_legend" in self.options, "layout": self.options.get("layout", "list"), - "export_id": self.options.get("export_id", ""), **self.collect_filter_attributes(), } diff --git a/sphinx_needs/directives/needlist.py b/sphinx_needs/directives/needlist.py index ea8979560..53526cb10 100644 --- a/sphinx_needs/directives/needlist.py +++ b/sphinx_needs/directives/needlist.py @@ -50,7 +50,6 @@ def run(self) -> Sequence[nodes.Node]: "show_tags": "show_tags" in self.options, "show_status": "show_status" in self.options, "show_filters": "show_filters" in self.options, - "export_id": self.options.get("export_id", ""), **self.collect_filter_attributes(), } diff --git a/sphinx_needs/filter_common.py b/sphinx_needs/filter_common.py index e5e9bed04..8d4ed357f 100644 --- a/sphinx_needs/filter_common.py +++ b/sphinx_needs/filter_common.py @@ -40,6 +40,7 @@ class FilterAttributesType(TypedDict): filter_code: list[str] filter_func: str export_id: str + """If set, the filter is exported with this ID in the needs.json file.""" class FilterBase(SphinxDirective): @@ -74,6 +75,10 @@ def collect_filter_attributes(self) -> FilterAttributesType: if isinstance(types, str): types = [typ.strip() for typ in re.split(";|,", types)] + if self.options.get("export_id", ""): + # this is used by needs builders + SphinxNeedsData(self.env).has_export_filters = True + # Add the need and all needed information collected_filter_options: FilterAttributesType = { "status": status, diff --git a/tests/__snapshots__/test_needs_id_builder.ambr b/tests/__snapshots__/test_needs_id_builder.ambr new file mode 100644 index 000000000..9e45b666d --- /dev/null +++ b/tests/__snapshots__/test_needs_id_builder.ambr @@ -0,0 +1,252 @@ +# serializer version: 1 +# name: test_doc_needs_id_builder[test_app0] + dict({ + 'TC_001.json': dict({ + 'current_version': '1.0', + 'project': 'Python', + 'versions': dict({ + '1.0': dict({ + 'filters': dict({ + }), + 'filters_amount': 0, + 'needs': dict({ + 'TC_001': dict({ + 'arch': dict({ + }), + 'avatar': '', + 'closed_at': '', + 'completion': '', + 'constraints': list([ + ]), + 'constraints_passed': True, + 'constraints_results': dict({ + }), + 'content_id': 'TC_001', + 'created_at': '', + 'delete': None, + 'description': '', + 'docname': 'index', + 'doctype': '.rst', + 'duration': '', + 'external_css': 'external_link', + 'external_url': None, + 'full_title': 'Test example', + 'has_dead_links': '', + 'has_forbidden_dead_links': '', + 'hidden': '', + 'id': 'TC_001', + 'id_prefix': '', + 'is_external': False, + 'is_modified': False, + 'is_need': True, + 'is_part': False, + 'jinja_content': None, + 'layout': '', + 'links': list([ + ]), + 'max_amount': '', + 'max_content_lines': '', + 'modifications': 0, + 'params': '', + 'parent_need': '', + 'parent_needs': list([ + ]), + 'parts': dict({ + }), + 'post_template': None, + 'pre_template': None, + 'prefix': '', + 'query': '', + 'section_name': 'TEST DOCUMENT NEEDS Builder', + 'sections': list([ + 'TEST DOCUMENT NEEDS Builder', + ]), + 'service': '', + 'signature': '', + 'specific': '', + 'status': 'open', + 'style': None, + 'tags': list([ + ]), + 'target_id': 'TC_001', + 'template': None, + 'title': 'Test example', + 'type': 'test', + 'type_name': 'Test Case', + 'updated_at': '', + 'url': '', + 'url_postfix': '', + 'user': '', + }), + }), + 'needs_amount': 1, + }), + }), + }), + 'TC_NEG_001.json': dict({ + 'current_version': '1.0', + 'project': 'Python', + 'versions': dict({ + '1.0': dict({ + 'filters': dict({ + }), + 'filters_amount': 0, + 'needs': dict({ + 'TC_NEG_001': dict({ + 'arch': dict({ + }), + 'avatar': '', + 'closed_at': '', + 'completion': '', + 'constraints': list([ + ]), + 'constraints_passed': True, + 'constraints_results': dict({ + }), + 'content_id': 'TC_NEG_001', + 'created_at': '', + 'delete': None, + 'description': '', + 'docname': 'index', + 'doctype': '.rst', + 'duration': '', + 'external_css': 'external_link', + 'external_url': None, + 'full_title': 'Negative test example', + 'has_dead_links': '', + 'has_forbidden_dead_links': '', + 'hidden': '', + 'id': 'TC_NEG_001', + 'id_prefix': '', + 'is_external': False, + 'is_modified': False, + 'is_need': True, + 'is_part': False, + 'jinja_content': None, + 'layout': '', + 'links': list([ + ]), + 'max_amount': '', + 'max_content_lines': '', + 'modifications': 0, + 'params': '', + 'parent_need': '', + 'parent_needs': list([ + ]), + 'parts': dict({ + }), + 'post_template': None, + 'pre_template': None, + 'prefix': '', + 'query': '', + 'section_name': 'TEST DOCUMENT NEEDS Builder', + 'sections': list([ + 'TEST DOCUMENT NEEDS Builder', + ]), + 'service': '', + 'signature': '', + 'specific': '', + 'status': 'closed', + 'style': None, + 'tags': list([ + ]), + 'target_id': 'TC_NEG_001', + 'template': None, + 'title': 'Negative test example', + 'type': 'test', + 'type_name': 'Test Case', + 'updated_at': '', + 'url': '', + 'url_postfix': '', + 'user': '', + }), + }), + 'needs_amount': 1, + }), + }), + }), + 'US_63252.json': dict({ + 'current_version': '1.0', + 'project': 'Python', + 'versions': dict({ + '1.0': dict({ + 'filters': dict({ + }), + 'filters_amount': 0, + 'needs': dict({ + 'US_63252': dict({ + 'arch': dict({ + }), + 'avatar': '', + 'closed_at': '', + 'completion': '', + 'constraints': list([ + ]), + 'constraints_passed': True, + 'constraints_results': dict({ + }), + 'content_id': 'US_63252', + 'created_at': '', + 'delete': None, + 'description': '', + 'docname': 'index', + 'doctype': '.rst', + 'duration': '', + 'external_css': 'external_link', + 'external_url': None, + 'full_title': 'A story', + 'has_dead_links': '', + 'has_forbidden_dead_links': '', + 'hidden': '', + 'id': 'US_63252', + 'id_prefix': '', + 'is_external': False, + 'is_modified': False, + 'is_need': True, + 'is_part': False, + 'jinja_content': None, + 'layout': '', + 'links': list([ + ]), + 'max_amount': '', + 'max_content_lines': '', + 'modifications': 0, + 'params': '', + 'parent_need': '', + 'parent_needs': list([ + ]), + 'parts': dict({ + }), + 'post_template': None, + 'pre_template': None, + 'prefix': '', + 'query': '', + 'section_name': 'TEST DOCUMENT NEEDS Builder', + 'sections': list([ + 'TEST DOCUMENT NEEDS Builder', + ]), + 'service': '', + 'signature': '', + 'specific': '', + 'status': 'in progress', + 'style': None, + 'tags': list([ + '1', + ]), + 'target_id': 'US_63252', + 'template': None, + 'title': 'A story', + 'type': 'story', + 'type_name': 'User Story', + 'updated_at': '', + 'url': '', + 'url_postfix': '', + 'user': '', + }), + }), + 'needs_amount': 1, + }), + }), + }), + }) +# --- diff --git a/tests/test_needs_id_builder.py b/tests/test_needs_id_builder.py index cbf86ad46..6c647242b 100644 --- a/tests/test_needs_id_builder.py +++ b/tests/test_needs_id_builder.py @@ -1,8 +1,8 @@ import json -import os from pathlib import Path import pytest +from syrupy.filters import props from sphinx_needs.config import NeedsSphinxConfig from sphinx_needs.data import SphinxNeedsData @@ -11,23 +11,11 @@ @pytest.mark.parametrize( "test_app", [{"buildername": "needs_id", "srcdir": "doc_test/doc_needs_builder"}], indirect=True ) -def test_doc_needs_id_builder(test_app): +def test_doc_needs_id_builder(test_app, snapshot): app = test_app app.build() - out_dir = app.outdir - env = app.env - data = SphinxNeedsData(env) - needs_config = NeedsSphinxConfig(env.config) - needs = data.get_or_create_needs().values() # We need a list of needs for later filter checks - needs_build_json_per_id_path = needs_config.build_json_per_id_path - needs_id_path = os.path.join(out_dir, needs_build_json_per_id_path) - assert os.path.exists(needs_id_path) - for need in needs: - need_id = need["id"] - need_file_name = f"{need_id}.json" - needs_json = Path(needs_id_path, need_file_name) - assert os.path.exists(needs_json) - with open(needs_json) as needs_file: - needs_file_content = needs_file.read() - needs_list = json.loads(needs_file_content) - assert needs_list["versions"]["1.0"]["needs"][need_id]["docname"] + data = SphinxNeedsData(app.env) + needs_config = NeedsSphinxConfig(app.config) + needs_id_path = Path(app.outdir, needs_config.build_json_per_id_path) + data = {path.name: json.loads(path.read_text()) for path in needs_id_path.glob("*.json")} + assert data == snapshot(exclude=props("created"))