diff --git a/dodo.py b/dodo.py index a97f5b7..a512315 100644 --- a/dodo.py +++ b/dodo.py @@ -29,7 +29,7 @@ def task_generate_meta_schemas(): name = Path(example.root_directory).name yield { "name": name, - "file_dep": [schema.path for schema in example.schemas] + "file_dep": [schema.file_path for schema in example.schemas] + [BASE_META_SCHEMA_PATH, CORE_SCHEMA_PATH, Path(SOURCE_PATH, "meta_schema.py")], "targets": [schema.meta_schema_path for schema in example.schemas], "actions": [(example.generate_meta_schemas, [])], @@ -44,7 +44,7 @@ def task_validate_schemas(): yield { "name": name, "task_dep": [f"generate_meta_schemas:{name}"], - "file_dep": [schema.path for schema in example.schemas] + "file_dep": [schema.file_path for schema in example.schemas] + [schema.meta_schema_path for schema in example.schemas] + [BASE_META_SCHEMA_PATH, CORE_SCHEMA_PATH, Path(SOURCE_PATH, "meta_schema.py")], "actions": [(example.validate_schemas, [])], @@ -58,7 +58,7 @@ def task_generate_json_schemas(): yield { "name": name, "task_dep": [f"validate_schemas:{name}"], - "file_dep": [schema.path for schema in example.schemas] + "file_dep": [schema.file_path for schema in example.schemas] + [schema.meta_schema_path for schema in example.schemas] + [CORE_SCHEMA_PATH, BASE_META_SCHEMA_PATH, Path(SOURCE_PATH, "schema_to_json.py")], "targets": [schema.json_schema_path for schema in example.schemas], @@ -88,7 +88,7 @@ def task_generate_markdown(): yield { "name": name, "targets": [template.markdown_output_path for template in example.doc_templates], - "file_dep": [schema.path for schema in example.schemas] + "file_dep": [schema.file_path for schema in example.schemas] + [template.path for template in example.doc_templates] + [Path(SOURCE_PATH, "docs", "grid_table.py")], "task_dep": [f"validate_schemas:{name}"], @@ -104,7 +104,7 @@ def task_generate_cpp_code(): yield { "name": name, "task_dep": [f"validate_schemas:{name}"], - "file_dep": [schema.path for schema in example.cpp_schemas] + "file_dep": [schema.file_path for schema in example.cpp_schemas] + [schema.meta_schema_path for schema in example.schemas] + [CORE_SCHEMA_PATH, BASE_META_SCHEMA_PATH, Path(SOURCE_PATH, "header_entries.py")], "targets": [schema.cpp_header_path for schema in example.cpp_schemas] @@ -122,7 +122,7 @@ def task_generate_web_docs(): yield { "name": name, "task_dep": [f"validate_schemas:{name}", f"generate_json_schemas:{name}", f"validate_example_files:{name}"], - "file_dep": [schema.path for schema in example.schemas] + "file_dep": [schema.file_path for schema in example.schemas] + [template.path for template in example.doc_templates] + [Path(SOURCE_PATH, "docs", "mkdocs_web.py")], "targets": [Path(example.web_docs_directory_path, "public")], diff --git a/examples/fan_spec/examples/Fan-Continuous.RS0003.a205.json b/examples/fan_spec/examples/Fan-Continuous.RS0003.a205.json index e6ca139..279695a 100644 --- a/examples/fan_spec/examples/Fan-Continuous.RS0003.a205.json +++ b/examples/fan_spec/examples/Fan-Continuous.RS0003.a205.json @@ -1,12 +1,13 @@ { "metadata": { "schema_author": "ASHRAE_205", - "schema": "RS0003", + "schema_name": "RS0003", "schema_version": "0.2.0", + "author": "SSPC 205 Working Group", "description": "Continuous Fan", "id": "123e4567-e89b-12d3-a456-426614174000", - "timestamp": "2020-05-11T00:00Z", - "version": 1, + "time_of_creation": "2020-05-11T00:00Z", + "version": "1.0.0", "disclaimer": "Example data not to be used for simulation" }, "description": { diff --git a/examples/fan_spec/schema/ASHRAE205.schema.yaml b/examples/fan_spec/schema/ASHRAE205.schema.yaml index 4f7ec2a..2fdb026 100644 --- a/examples/fan_spec/schema/ASHRAE205.schema.yaml +++ b/examples/fan_spec/schema/ASHRAE205.schema.yaml @@ -25,15 +25,15 @@ PerformanceMapTemplate: GridVariablesTemplate: Object Type: "Data Group Template" Required Data Types: - - "[Numeric][1..]" - - "[Integer][1..]" + - "[Numeric]" + - "[Integer]" Data Elements Required: True LookupVariablesTemplate: Object Type: "Data Group Template" Required Data Types: - - "[Numeric][1..]" - - "[Integer][1..]" + - "[Numeric]" + - "[Integer]" Data Elements Required: True RepresentationSpecificationTemplate: diff --git a/examples/fan_spec/schema/RS0003.schema.yaml b/examples/fan_spec/schema/RS0003.schema.yaml index cb0ac41..9d5c273 100644 --- a/examples/fan_spec/schema/RS0003.schema.yaml +++ b/examples/fan_spec/schema/RS0003.schema.yaml @@ -217,14 +217,18 @@ SystemCurve: Data Elements: standard_air_volumetric_flow_rate: Description: "Volumetric air flow rate through an air distribution system at standard air conditions" - Data Type: "[Numeric][2..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[2..]" Units: "m3/s" Required: True static_pressure_difference: Description: "Static pressure difference of an air distribution system" - Data Type: "[Numeric][2..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[2..]" Units: "Pa" Required: True @@ -247,14 +251,18 @@ GridVariablesContinuous: Data Elements: standard_air_volumetric_flow_rate: Description: "Volumetric air flow rate through fan assembly at standard air conditions" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "m3/s" Required: True static_pressure_difference: Description: "External static pressure across fan assembly at dry coil conditions" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "Pa" Notes: "Any static pressure deduction (or addition) for wet coil is specified by `wet_pressure_difference` in 'assembly_components' data group" Required: True @@ -265,14 +273,18 @@ LookupVariablesContinuous: Data Elements: impeller_rotational_speed: Description: "Rotational speed of fan impeller" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "rev/s" Required: True shaft_power: Description: "Mechanical shaft power input to fan assembly" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "W" Notes: "Does not include the mechanical efficiency of any mechanical drive used to modify rotational speed between the motor and impeller" Required: True @@ -296,15 +308,19 @@ GridVariablesDiscrete: Data Elements: speed_number: Description: "Number indicating discrete speed of fan impeller in rank order (with 1 being the lowest speed)" - Data Type: "[Integer][1..]" - Constraints: ">=0" + Data Type: "[Integer]" + Constraints: + - ">=0.0" + - "[1..]" Units: "-" Notes: "Data shall be provided for all allowable discrete speeds or settings" Required: True static_pressure_difference: Description: "External static pressure across fan assembly at dry coil conditions" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "Pa" Notes: "Any static pressure deduction (or addition) for wet coil is specified by `wet_pressure_difference` in 'assembly_components' data group" Required: True @@ -315,20 +331,26 @@ LookupVariablesDiscrete: Data Elements: standard_air_volumetric_flow_rate: Description: "Volumetric air flow rate through fan assembly at standard air conditions" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "m3/s" Required: True shaft_power: Description: "Mechanical shaft power input to fan assembly" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "W" Notes: "Does not include the mechanical efficiency of any mechanical drive used to modify rotational speed between the motor and impeller" Required: True impeller_rotational_speed: Description: "Rotational speed of fan impeller" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "rev/s" Required: True diff --git a/examples/fan_spec/schema/RS0005.schema.yaml b/examples/fan_spec/schema/RS0005.schema.yaml index 4c58fce..9e2157d 100644 --- a/examples/fan_spec/schema/RS0005.schema.yaml +++ b/examples/fan_spec/schema/RS0005.schema.yaml @@ -103,14 +103,18 @@ GridVariables: Data Elements: shaft_power: Description: "Delivered rotational shaft power" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "W" Required: True shaft_rotational_speed: Description: "Rotational speed of shaft" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "rev/s" Required: True @@ -120,14 +124,20 @@ LookupVariables: Data Elements: efficiency: Description: "Efficiency of motor" - Data Type: "[Numeric][1..]" - Constraints: [">=0.0", "<=1.0"] + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "<=1.0" + - "[1..]" Units: "-" Notes: "Defined as the ratio of mechanical shaft power to electrical input power of the motor" Required: True power_factor: Description: "Power factor of the motor" - Data Type: "[Numeric][1..]" - Constraints: [">=0.0", "<=1.0"] + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "<=1.0" + - "[1..]" Units: "-" Required: True diff --git a/examples/fan_spec/schema/RS0006.schema.yaml b/examples/fan_spec/schema/RS0006.schema.yaml index 623b3e0..14896c6 100644 --- a/examples/fan_spec/schema/RS0006.schema.yaml +++ b/examples/fan_spec/schema/RS0006.schema.yaml @@ -105,14 +105,18 @@ GridVariables: Data Elements: output_power: Description: "Power delivered to the motor" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "W" Required: True output_frequency: Description: "Frequency delivered to the motor" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "Hz" Required: True @@ -122,8 +126,11 @@ LookupVariables: Data Elements: efficiency: Description: "Efficiency of drive" - Data Type: "[Numeric][1..]" - Constraints: [">=0.0","<=1.0"] + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "<=1.0" + - "[1..]" Units: "-" Notes: ["Defined as the ratio of electrical output power (to the motor) to electrical input power (to the drive)", "Input power shall include any power required to provide active air cooling for the drive"] diff --git a/examples/fan_spec/schema/RS0007.schema.yaml b/examples/fan_spec/schema/RS0007.schema.yaml index 252fdba..0ac786a 100644 --- a/examples/fan_spec/schema/RS0007.schema.yaml +++ b/examples/fan_spec/schema/RS0007.schema.yaml @@ -99,8 +99,10 @@ GridVariables: Data Elements: output_power: Description: "Output shaft power" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "W" Required: True @@ -110,8 +112,11 @@ LookupVariables: Data Elements: efficiency: Description: "Efficiency of drive" - Data Type: "[Numeric][1..]" - Constraints: [">=0.0","<=1.0"] + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "<=1.0" + - "[1..]" Units: "-" Notes: "Defined as the ratio of output shaft power to input shaft power" Required: True diff --git a/examples/lookup_table/schema/LookupTable.schema.yaml b/examples/lookup_table/schema/LookupTable.schema.yaml index 6bda6b3..262d68a 100644 --- a/examples/lookup_table/schema/LookupTable.schema.yaml +++ b/examples/lookup_table/schema/LookupTable.schema.yaml @@ -24,15 +24,15 @@ LookupTableTemplate: GridVariablesTemplate: Object Type: "Data Group Template" Required Data Types: - - "[Numeric][1..]" - - "[Integer][1..]" + - "[Numeric]" + - "[Integer]" Data Elements Required: True LookupVariablesTemplate: Object Type: "Data Group Template" Required Data Types: - - "[Numeric][1..]" - - "[Integer][1..]" + - "[Numeric]" + - "[Integer]" Data Elements Required: True LookupTable: @@ -54,8 +54,10 @@ GridVariables: Data Elements: output_power: Description: "Output shaft power" - Data Type: "[Numeric][1..]" - Constraints: ">=0.0" + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "[1..]" Units: "W" Required: True @@ -65,8 +67,11 @@ LookupVariables: Data Elements: efficiency: Description: "Efficiency of drive" - Data Type: "[Numeric][1..]" - Constraints: [">=0.0","<=1.0"] + Data Type: "[Numeric]" + Constraints: + - ">=0.0" + - "<=1.0" + - "[1..]" Units: "-" Notes: "Defined as the ratio of output shaft power to input shaft power" Required: True diff --git a/examples/time_series/schema/TimeSeries.schema.yaml b/examples/time_series/schema/TimeSeries.schema.yaml index 900b55d..97474df 100644 --- a/examples/time_series/schema/TimeSeries.schema.yaml +++ b/examples/time_series/schema/TimeSeries.schema.yaml @@ -31,9 +31,9 @@ TimeIntervals: Data Elements: id: Description: Reference identification - Data Type: ID - Constraints: ":TimeIntervals:" + Data Type: String Required: true + ID: true starting_time: Description: Timestamp indicating the beginning of the data Data Type: Timestamp @@ -47,15 +47,21 @@ TimeIntervals: Required: "if !timestamps" timestamps: Description: Array of timestamps - Data Type: "[Timestamp][1..]" + Data Type: "[Timestamp]" + Constraints: + - "[1..]" Required: "if !regular_interval" labels: Description: Informal labels describing each time interval - Data Type: "[String][1..]" + Data Type: "[String]" + Constraints: + - "[1..]" Notes: "e.g., month names for monthly intervals" notes: Description: Notes about each time interval - Data Type: "[String][1..]" + Data Type: "[String]" + Constraints: + - "[1..]" TimeSeries: Object Type: "Data Group" @@ -74,10 +80,11 @@ TimeSeries: Required: true value_time_intervals: Description: Reference to a `TimeInterval` data group associated with this time series - Data Type: Reference - Constraints: ":TimeIntervals:" + Data Type: ":TimeIntervals:" Required: true values: Description: Time series data values - Data Type: "[Numeric][1..]" + Data Type: "[Numeric]" + Constraints: + - "[1..]" Required: true diff --git a/lattice/core.schema.yaml b/lattice/core.schema.yaml index 4fb805a..7239e85 100644 --- a/lattice/core.schema.yaml +++ b/lattice/core.schema.yaml @@ -48,47 +48,37 @@ Pattern: Examples: - "CA225FB.[1-9]" -ID: - Object Type: "Data Type" - Description: "A string used to identify an instance of a data group." - JSON Schema Type: string - Examples: - - "Lobby Zone" - -Reference: - Object Type: "Data Type" - Description: "A string used to reference an identified instance of a data group." - JSON Schema Type: string - Examples: - - "Lobby Zone" - # Special String Data Types UUID: Object Type: "String Type" Description: "An effectively unique character string conforming to ITU-T Recommendation X.667 (ITU-T 2012)." - JSON Schema Pattern: "^[0-9,a-f,A-F]{8}-[0-9,a-f,A-F]{4}-[0-9,a-f,A-F]{4}-[0-9,a-f,A-F]{4}-[0-9,a-f,A-F]{12}$" + Regular Expression Pattern: "^[0-9,a-f,A-F]{8}-[0-9,a-f,A-F]{4}-[0-9,a-f,A-F]{4}-[0-9,a-f,A-F]{4}-[0-9,a-f,A-F]{12}$" Examples: - "123e4567-e89b-12d3-a456-426655440000" Date: Object Type: "String Type" Description: "A calendar date formatted per ISO 8601 (ISO 2004)" - JSON Schema Pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + Regular Expression Pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" Examples: - "2015-04-29" Timestamp: Object Type: "String Type" Description: "Date with UTC time formatted per ISO 8601 (ISO 2004)" - JSON Schema Pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}Z$" + Regular Expression Pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}Z$" Examples: - "2016-06-29T14:35Z" +# TODO: GenericTimestamp + +# TODO: TimeDuration + Version: Object Type: "String Type" Description: "Version identifier in the form major.minor.patch as defined by Semver 2016." - JSON Schema Pattern: "^(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)(?:-((?:0|[1-9][0-9]*|[0-9]*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9][0-9]*|[0-9]*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$" + Regular Expression Pattern: "^(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)\\.(0|[1-9][0-9]*)(?:-((?:0|[1-9][0-9]*|[0-9]*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9][0-9]*|[0-9]*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$" Examples: - "1.1.3" - "1.2.0-beta-92" @@ -99,41 +89,52 @@ Metadata: Object Type: "Data Group" Data Elements: schema_author: - Description: "Name of the schema author" + Description: "Name of the organization that published the schema" Data Type: "String" Required: True - Notes: "Identifies the data model where the schema is defined" - schema: + Notes: "Identifies the organization that defined the schema" + schema_name: Description: "Schema name or identifier" - Data Type: "" + Data Type: "String" Required: True Notes: "Identifies the schema used to define the data content" schema_version: - Description: "Version of the root schema this data complies with" + Description: "The version of the schema the data complies with" Data Type: "Version" Required: True - description: - Description: "Description of data content (suitable for display)" + schema_url: + Description: "The Uniform Resource Locator (url) for the schema definition and/or documentation" + Data Type: "String" + author: + Description: "Name of the entity creating the serialization" Data Type: "String" Required: True - timestamp: - Description: "Date of data publication" - Data Type: "Timestamp" - Required: True - Notes: "Date/time of publication of this data." + Notes: "Identifies the organization that created the file" id: Description: "Unique data set identifier" Data Type: "UUID" Notes: "Assigned by *data publisher* to identify this data. `id` shall remain unchanged for revisions of the same data." + description: + Description: "Description of data content (suitable for display)" + Data Type: "String" + Required: True + time_of_creation: + Description: "Timestamp indicating when the serialization was created" + Data Type: "Timestamp" + Required: True + Notes: "Updated anytime any data content is modified" version: Description: "Integer version identifier for the data" - Data Type: "Integer" - Constraints: ">=1" - Notes: "Used by *data publisher* to track revisions of the data. `data_version` shall be incremented for each data revision." - data_source: + Data Type: "Version" + Notes: + - "Used by data publisher to track revisions of the data" + - "Shall be incremented for each data revision" + source: Description: "Source(s) of the data" Data Type: "String" - Notes: "Used by *data publisher* to document methods (e.g. software and version) used to generate data. **Informative note:** `data_source` may be different from ratings source(s) included elsewhere." + Notes: + - "Used by data publisher to document methods (e.g., software and version) used to generate data" + - "**Informative note:** `source` may be different from other data source(s) included elsewhere within the data" disclaimer: Description: "Characterization of accuracy, limitations, and applicability of this data" Data Type: "String" diff --git a/lattice/docs/schema_table.py b/lattice/docs/schema_table.py index 0a8a23b..0d59323 100644 --- a/lattice/docs/schema_table.py +++ b/lattice/docs/schema_table.py @@ -29,9 +29,12 @@ def process_string_types(string_types): for str_typ in string_types: new_item = deepcopy(str_typ) if "Is Regex" in new_item and new_item["Is Regex"]: - new_item["JSON Schema Pattern"] = "(Not applicable)" - new_item["JSON Schema Pattern"] = ( - new_item["JSON Schema Pattern"].replace("*", r"\*").replace(r"(?", "\n" r"(?").replace(r"-[", "\n" r"-[") + new_item["Regular Expression Pattern"] = "(Not applicable)" + new_item["Regular Expression Pattern"] = ( + new_item["Regular Expression Pattern"] + .replace("*", r"\*") + .replace(r"(?", "\n" r"(?") + .replace(r"-[", "\n" r"-[") ) new_list.append(new_item) return new_list @@ -215,7 +218,7 @@ def string_types_table(string_types, caption=None, add_training_ws=True): RETURN: string, the table in Pandoc markdown grid table format """ return create_table_from_list( - columns=["String Type", "Description", "JSON Schema Pattern", "Examples"], + columns=["String Type", "Description", "Regular Expression Pattern", "Examples"], data_list=string_types, caption=caption, add_training_ws=add_training_ws, diff --git a/lattice/header_entries.py b/lattice/header_entries.py index 5bc144f..1fdb11c 100644 --- a/lattice/header_entries.py +++ b/lattice/header_entries.py @@ -252,7 +252,7 @@ def _get_simple_type(self, type_str): First, attempt to capture enum, definition, or special string type as references; then default to fundamental types with simple key "type". """ - enum_or_def = r"(\{|\<)(.*)(\}|\>)" + enum_or_def = r"(\{|\<|:)(.*)(\}|\>|:)" internal_type = None nested_type = None m = re.match(enum_or_def, type_str) @@ -284,11 +284,7 @@ def _get_simple_type(self, type_str): return simple_type try: - if "/" in type_str: - # e.g. "Numeric/Null" - simple_type = self._datatypes[type_str.split("/")[0]] - else: - simple_type = self._datatypes[type_str] + simple_type = self._datatypes[type_str] except KeyError: print("Type not processed:", type_str) return simple_type diff --git a/lattice/lattice.py b/lattice/lattice.py index 9dc49be..e0fa990 100644 --- a/lattice/lattice.py +++ b/lattice/lattice.py @@ -15,108 +15,7 @@ from .header_entries import HeaderTranslator from .cpp_entries import CPPTranslator from lattice.cpp.generate_support_headers import generate_support_headers, support_header_pathnames - - -class SchemaFile: # pylint:disable=R0902 - """Parse the components of a schema file.""" - - def __init__(self, path: Path) -> None: - """Open and parse source schema""" - - self.path = Path(path).absolute() - self.file_base_name = get_base_stem(self.path) - self.schema_type = self.file_base_name # Overwritten if it is actually specified - - self._content: dict = load(self.path) - self._meta_schema_path: Path = None - self._json_schema_path: Path = None - self._root_data_group: str = None - - # Check for required content - if "Schema" not in self._content: - raise Exception(f'Required "Schema" object not found in schema file, "{self.path}".') - - self.schema_author = None - if "Root Data Group" in self._content["Schema"]: - self._root_data_group = self._content["Schema"]["Root Data Group"] - self.schema_type = self._root_data_group - if self._root_data_group in self._content: - # Get metadata - if "Data Elements" not in self._content[self._root_data_group]: - raise Exception(f'Root Data Group, "{self._root_data_group}" ' 'does not contain "Data Elements".') - if "metadata" in self._content[self._root_data_group]["Data Elements"]: - self._get_schema_constraints() - else: - pass # Warning? - else: - raise Exception( - f'Root Data Group, "{self._root_data_group}", ' f'not found in schema file, "{self.path}"' - ) - - # TODO: Version? # pylint: disable=fixme - - def _get_schema_constraints(self): - """Populate instance variables from schema constraints""" - - constraints = self._content[self._root_data_group]["Data Elements"]["metadata"].get("Constraints") - data_element_pattern = "([a-z]+)(_([a-z]|[0-9])+)*" - enumerator_pattern = "([A-Z]([A-Z]|[0-9])*)(_([A-Z]|[0-9])+)*" - constraint_pattern = re.compile( - f"^(?P{data_element_pattern})=(?P{enumerator_pattern})$" - ) - if not isinstance(constraints, list): - constraints = [constraints] - for constraint in [c for c in constraints if c]: - match = constraint_pattern.match(constraint) - if match: - if match.group(1) == "schema_author": - self.schema_author = match.group(5) - else: - pass # Warning? - - if match.group(1) == "schema": - self.schema_type = match.group(5) - else: - pass # Warning? - - if match.group("data_element") == "schema": - self.schema_type = match.group("enumerator") - else: - pass # Warning? - - @property - def meta_schema_path(self) -> Path: - """Path to this SchemaFile's validating metaschema""" - return self._meta_schema_path - - @meta_schema_path.setter - def meta_schema_path(self, meta_schema_path): - self._meta_schema_path = Path(meta_schema_path).absolute() - - @property - def json_schema_path(self) -> Path: - """Path to this SchemaFile as translated JSON""" - return self._json_schema_path - - @json_schema_path.setter - def json_schema_path(self, json_schema_path): - self._json_schema_path = Path(json_schema_path).absolute() - - @property - def cpp_header_path(self): # pylint:disable=C0116 - return self._cpp_header_path - - @cpp_header_path.setter - def cpp_header_path(self, value): - self._cpp_header_path = Path(value).absolute() - - @property - def cpp_source_path(self): # pylint:disable=C0116 - return self._cpp_source_path - - @cpp_source_path.setter - def cpp_source_path(self, value): - self._cpp_source_path = Path(value).absolute() +from lattice.schema import Schema class Lattice: # pylint:disable=R0902 @@ -128,7 +27,7 @@ def __init__( self, root_directory: Path = Path.cwd(), build_directory: Union[Path, None] = None, - build_output_directory_name: Path = Path(".lattice"), + build_output_directory_name: Union[Path, None] = Path(".lattice"), build_validation: bool = True, ) -> None: """Set up file structure""" @@ -182,10 +81,10 @@ def collect_schemas(self): self.schema_directory_path = self.root_directory # Collect list of schema files - self.schemas: List[SchemaFile] = [] + self.schemas: List[Schema] = [] for file_name in sorted(list(self.schema_directory_path.iterdir())): if fnmatch(file_name, "*.schema.yaml") or fnmatch(file_name, "*.schema.yml"): - self.schemas.append(SchemaFile(file_name)) + self.schemas.append(Schema(file_name)) if len(self.schemas) == 0: raise Exception(f'No schemas found in "{self.schema_directory_path}".') @@ -196,34 +95,34 @@ def setup_meta_schemas(self): self.meta_schema_directory = Path(self.build_directory) / "meta_schema" make_dir(self.meta_schema_directory) for schema in self.schemas: - meta_schema_path = self.meta_schema_directory / f"{schema.file_base_name}.meta.schema.json" + meta_schema_path = self.meta_schema_directory / f"{schema.name}.meta.schema.json" schema.meta_schema_path = meta_schema_path def generate_meta_schemas(self): """Generate metaschemas""" for schema in self.schemas: - generate_meta_schema(Path(schema.meta_schema_path), Path(schema.path)) + generate_meta_schema(Path(schema.meta_schema_path), Path(schema.file_path)) def validate_schemas(self): """Validate source schema using metaschema file""" for schema in self.schemas: - meta_validate_file(Path(schema.path), Path(schema.meta_schema_path)) + meta_validate_file(Path(schema.file_path), Path(schema.meta_schema_path)) def setup_json_schemas(self): """Set up json_schema subdirectory""" self.json_schema_directory = Path(self.build_directory) / "json_schema" make_dir(self.json_schema_directory) for schema in self.schemas: - json_schema_path = self.json_schema_directory / f"{schema.file_base_name}.schema.json" + json_schema_path = self.json_schema_directory / f"{schema.name}.schema.json" schema.json_schema_path = json_schema_path def generate_json_schemas(self): """Generate JSON schemas""" for schema in self.schemas: - generate_json_schema(schema.path, schema.json_schema_path) + generate_json_schema(schema.file_path, schema.json_schema_path) def validate_file(self, input_path, schema_type=None): """ @@ -235,8 +134,8 @@ def validate_file(self, input_path, schema_type=None): instance = load(input_path) if schema_type is None: if "metadata" in instance: - if "schema" in instance["metadata"]: - schema_type = instance["metadata"]["schema"] + if "schema_name" in instance["metadata"]: + schema_type = instance["metadata"]["schema_name"] if schema_type is None: if len(self.schemas) > 1: @@ -249,7 +148,7 @@ def validate_file(self, input_path, schema_type=None): else: # Find corresponding schema for schema in self.schemas: - if schema.schema_type == schema_type: + if schema.schema_name == schema_type: try: validate_file(input_path, schema.json_schema_path) postvalidate_file(input_path, schema.json_schema_path) @@ -322,15 +221,15 @@ def generate_web_documentation(self): def collect_cpp_schemas(self): """Collect source schemas into list of SchemaFiles""" - self.cpp_schemas = self.schemas + [SchemaFile(Path(__file__).with_name("core.schema.yaml"))] + self.cpp_schemas = self.schemas + [Schema(Path(__file__).with_name("core.schema.yaml"))] def setup_cpp_source_files(self): """Create directories for generated CPP source""" self.cpp_output_dir = Path(self.build_directory) / "cpp" make_dir(self.cpp_output_dir) for schema in self.cpp_schemas: - schema.cpp_header_path = self.cpp_output_dir / f"{schema.file_base_name.lower()}.h" - schema.cpp_source_path = self.cpp_output_dir / f"{schema.file_base_name.lower()}.cpp" + schema.cpp_header_path = self.cpp_output_dir / f"{schema.name.lower()}.h" + schema.cpp_source_path = self.cpp_output_dir / f"{schema.name.lower()}.cpp" def cpp_support_headers(self) -> list[Path]: return support_header_pathnames(self.cpp_output_dir) @@ -341,7 +240,7 @@ def generate_cpp_headers(self): c = CPPTranslator() root_groups = [] for schema in self.cpp_schemas: - h.translate(schema.path, self.root_directory.name, self.schema_directory_path) + h.translate(schema.file_path, self.root_directory.name, self.schema_directory_path) if h._root_data_group is not None: root_groups.append(h._root_data_group) dump(str(h), schema.cpp_header_path) diff --git a/lattice/meta.schema.yaml b/lattice/meta.schema.yaml index 0bbbb9d..6720be3 100644 --- a/lattice/meta.schema.yaml +++ b/lattice/meta.schema.yaml @@ -72,7 +72,7 @@ definitions: const: String Type Description: type: string - JSON Schema Pattern: + Regular Expression Pattern: type: string Examples: type: array @@ -167,7 +167,7 @@ definitions: pattern: "**GENERATED**" DataElementAttributes: type: object - properties: + properties: # TODO: Need to allow custom attributes Description: type: string Data Type: @@ -181,6 +181,8 @@ definitions: "$ref": meta.schema.json#/definitions/Required Notes: "$ref": meta.schema.json#/definitions/Notes + ID: + type: boolean DataGroup: type: object properties: diff --git a/lattice/meta_schema.py b/lattice/meta_schema.py index 7ea00f2..66b1f68 100644 --- a/lattice/meta_schema.py +++ b/lattice/meta_schema.py @@ -11,7 +11,7 @@ class MetaSchema: def __init__(self, schema_path): - with open(schema_path) as meta_schema_file: + with open(schema_path, encoding="utf-8") as meta_schema_file: uri_path = os.path.abspath(os.path.dirname(schema_path)) if os.sep != posixpath.sep: uri_path = posixpath.sep + uri_path @@ -20,7 +20,7 @@ def __init__(self, schema_path): self.validator = jsonschema.Draft7Validator(json.load(meta_schema_file), resolver=resolver) def validate(self, instance_path): - with open(os.path.join(instance_path), "r") as input_file: + with open(os.path.join(instance_path), "r", encoding="utf-8") as input_file: instance = yaml.load(input_file, Loader=yaml.FullLoader) errors = sorted(self.validator.iter_errors(instance), key=lambda e: e.path) file_name = os.path.basename(instance_path) @@ -129,12 +129,11 @@ def generate_meta_schema(output_path, schema=None): meta_schema["patternProperties"][schema_patterns.type_base_names.anchored()]["allOf"].append( { "if": {"properties": {"Object Type": {"const": "Data Group"}, "Data Group Template": True}}, - "then": {"$ref": f"meta.schema.json#/definitions/DataGroup"}, + "then": {"$ref": "meta.schema.json#/definitions/DataGroup"}, } ) - for data_group_template_name in data_group_templates: - data_group_template = data_group_templates[data_group_template_name] + for data_group_template_name, data_group_template in data_group_templates.items(): meta_schema["definitions"][f"{data_group_template_name}DataElementAttributes"] = copy.deepcopy( meta_schema["definitions"]["DataElementAttributes"] ) @@ -245,9 +244,9 @@ def generate_meta_schema(output_path, schema=None): dump(meta_schema, output_path) - with open(output_path, "r") as file: + with open(output_path, "r", encoding="utf-8") as file: content = file.read() - with open(output_path, "w") as file: + with open(output_path, "w", encoding="utf-8") as file: file.writelines(content.replace("meta.schema.json", meta_schema_file_name)) return schema_patterns diff --git a/lattice/schema.py b/lattice/schema.py index 0880112..e58f019 100644 --- a/lattice/schema.py +++ b/lattice/schema.py @@ -1,7 +1,7 @@ from __future__ import ( annotations, ) # Needed for type hinting classes that are not yet fully defined -from typing import List +from typing import List, Union, Type, Dict, Any import pathlib import re @@ -18,14 +18,14 @@ def __init__(self, pattern_string: str) -> None: def __str__(self): return self.pattern.pattern - def match(self, test_string: str, anchored: bool = False): + def match(self, test_string: str, anchored: bool = False) -> Union[re.Match[str], None]: return self.pattern.match(test_string) if not anchored else self.anchored_pattern.match(test_string) def anchored(self): return self.anchored_pattern.pattern @staticmethod - def anchor(pattern_text: str): + def anchor(pattern_text: str) -> str: return f"^{pattern_text}$" @@ -37,7 +37,10 @@ def anchor(pattern_text: str): class DataType: - def __init__(self, text, parent_data_element: DataElement): + pattern: RegularExpressionPattern + value_pattern: RegularExpressionPattern + + def __init__(self, text: str, parent_data_element: DataElement): self.text = text self.parent_data_element = parent_data_element @@ -78,16 +81,14 @@ class PatternType(DataType): value_pattern = RegularExpressionPattern('".*"') -class ArrayType(DataType): - pattern = RegularExpressionPattern(r"\[(\S+)]") - - class DataGroupType(DataType): pattern = RegularExpressionPattern(rf"\{{({_type_base_names})\}}") def __init__(self, text, parent_data_element): super().__init__(text, parent_data_element) - self.data_group_name = self.pattern.match(text).group(1) + match = self.pattern.match(text) + assert match is not None + self.data_group_name = match.group(1) self.data_group = None # only valid once resolve() is called def resolve(self): @@ -103,6 +104,14 @@ class AlternativeType(DataType): pattern = RegularExpressionPattern(r"\(([^\s,]+)((, ?([^\s,]+))+)\)") +class ReferenceType(DataType): + pattern = RegularExpressionPattern(rf":{_type_base_names}:") + + +class ArrayType(DataType): + pattern = RegularExpressionPattern(rf"\[({_type_base_names}|{DataGroupType.pattern}|{EnumerationType.pattern})\]") + + _value_pattern = RegularExpressionPattern( f"(({NumericType.value_pattern})|" f"({StringType.value_pattern})|" @@ -113,6 +122,8 @@ class AlternativeType(DataType): # Constraints class Constraint: + pattern: RegularExpressionPattern + def __init__(self, text: str, parent_data_element: DataElement): self.text = text self.parent_data_element = parent_data_element @@ -154,6 +165,7 @@ def __init__(self, text: str, parent_data_element: DataElement): super().__init__(text, parent_data_element) self.pattern = parent_data_element.parent_data_group.parent_schema.schema_patterns.data_element_value_constraint match = self.pattern.match(self.text) + assert match is not None self.data_element_name = match.group(1) # TODO: Named groups? self.data_element_value = match.group(5) # TODO: Named groups? @@ -162,7 +174,7 @@ class ArrayLengthLimitsConstraint(Constraint): pattern = RegularExpressionPattern(r"\[(\d*)\.\.(\d*)\]") -_constraint_list: List[Constraint] = [ +_constraint_list: List[Type[Constraint]] = [ RangeConstraint, MultipleConstraint, SetConstraint, @@ -173,19 +185,18 @@ class ArrayLengthLimitsConstraint(Constraint): ] -def _constraint_factory(input: str, parent_data_element: DataElement) -> Constraint: +def _constraint_factory(text: str, parent_data_element: DataElement) -> Constraint: number_of_matches = 0 - match_type = None for constraint in _constraint_list: - if constraint.pattern.match(input): + if constraint.pattern.match(text): match_type = constraint number_of_matches += 1 if number_of_matches == 1: - return match_type(input, parent_data_element) + return match_type(text, parent_data_element) if number_of_matches == 0: - raise Exception(f"No matching constraint for {input}.") - raise Exception(f"Multiple matches found for constraint, {input}") + raise Exception(f"No matching constraint for {text}.") + raise Exception(f"Multiple matches found for constraint, {text}") # Required @@ -198,7 +209,8 @@ def __init__(self, name: str, data_element_dictionary: dict, parent_data_group: self.name = name self.dictionary = data_element_dictionary self.parent_data_group = parent_data_group - self.constraints = [] + self.constraints: List[Constraint] = [] + self.is_id = False for attribute in self.dictionary: if attribute == "Description": self.description = self.dictionary[attribute] @@ -214,6 +226,16 @@ def __init__(self, name: str, data_element_dictionary: dict, parent_data_group: self.required = self.dictionary[attribute] elif attribute == "Notes": self.notes = self.dictionary[attribute] + elif attribute == "ID": + self.is_id = self.dictionary[attribute] + if self.is_id: + if self.parent_data_group.id_data_element is None: + self.parent_data_group.id_data_element = self + else: + raise RuntimeError( + f"Multiple ID data elements found for Data Group '{self.parent_data_group.name}': '{self.parent_data_group.id_data_element.name}' and '{self.name}'" + ) + else: raise Exception( f'Unrecognized attribute, "{attribute}".' @@ -222,7 +244,7 @@ def __init__(self, name: str, data_element_dictionary: dict, parent_data_group: f"Data Element={self.name}" ) - def set_constraints(self, constraints_input): + def set_constraints(self, constraints_input: Union[str, List[str]]) -> None: if not isinstance(constraints_input, list): constraints_input = [constraints_input] @@ -245,7 +267,7 @@ def __init__(self, name: str, string_type_dictionary: dict, parent_schema: Schem self.name = name self.dictionary = string_type_dictionary self.parent_schema = parent_schema - self.value_pattern = self.dictionary["JSON Schema Pattern"] + self.value_pattern = self.dictionary["Regular Expression Pattern"] # Make new DataType class def init_method(self, text, parent_data_element): @@ -265,11 +287,12 @@ def init_method(self, text, parent_data_element): class DataGroup: - def __init__(self, name: str, data_group_dictionary, parent_schema: Schema): + def __init__(self, name: str, data_group_dictionary: dict, parent_schema: Schema): self.name = name self.dictionary = data_group_dictionary self.parent_schema = parent_schema self.data_elements = {} + self.id_data_element: Union[DataElement, None] = None # data element containing unique id for this data group for data_element in self.dictionary["Data Elements"]: self.data_elements[data_element] = DataElement( data_element, self.dictionary["Data Elements"][data_element], self @@ -283,14 +306,14 @@ def resolve(self): class Enumerator: pattern = EnumerationType.value_pattern - def __init__(self, name, enumerator_dictionary, parent_enumeration: Enumeration) -> None: + def __init__(self, name: str, enumerator_dictionary: dict, parent_enumeration: Enumeration): self.name = name self.dictionary = enumerator_dictionary self.parent_enumeration = parent_enumeration class Enumeration: - def __init__(self, name, enumeration_dictionary, parent_schema: Schema): + def __init__(self, name: str, enumeration_dictionary: dict, parent_schema: Schema): self.name = name self.dictionary = enumeration_dictionary self.parent_schema = parent_schema @@ -329,8 +352,8 @@ def __init__(self, schema=None): regex_base_types = core_types["Data Type"] - base_types = "|".join(regex_base_types) - base_types = RegularExpressionPattern(f"({base_types})") + base_types_string = "|".join(regex_base_types) + base_types = RegularExpressionPattern(f"({base_types_string})") string_types = core_types["String Type"] if schema: @@ -339,39 +362,41 @@ def __init__(self, schema=None): if "String Type" in schema_types: string_types += ["String Type"] - re_string_types = "|".join(string_types) - re_string_types = RegularExpressionPattern(f"({re_string_types})") + re_string_types_string = "|".join(string_types) + re_string_types = RegularExpressionPattern(f"({re_string_types_string})") self.data_group_types = DataGroupType.pattern self.enumeration_types = EnumerationType.pattern - single_type = rf"({base_types}|{re_string_types}|{self.data_group_types}|{self.enumeration_types})" + references = ReferenceType.pattern + single_type = rf"({base_types}|{re_string_types}|{self.data_group_types}|{self.enumeration_types}|{references})" alternatives = rf"\(({single_type})(,\s*{single_type})+\)" - arrays = rf"\[({single_type})\](\[\d*\.*\d*\])?" + arrays = ArrayType.pattern self.data_types = RegularExpressionPattern(f"({single_type})|({alternatives})|({arrays})") # Values self.values = RegularExpressionPattern( - f"(({self.numeric})|" f"({self.string})|" f"({self.enumerator})|" f"({self.boolean}))" + f"(({self.numeric})|({self.string})|({self.enumerator})|({self.boolean}))" ) # Constraints - alpha_array = "([A-Z]{[1-9]+})" - numeric_array = "([0-9]{[1-9]+})" self.range_constraint = RangeConstraint.pattern self.multiple_constraint = MultipleConstraint.pattern self.data_element_value_constraint = DataElementValueConstraint.pattern sets = SetConstraint.pattern reference_scope = f":{_type_base_names}:" self.selector_constraint = SelectorConstraint.pattern + array_limits = ArrayLengthLimitsConstraint.pattern + string_patterns = StringPatternConstraint.pattern self.constraints = RegularExpressionPattern( - f"({alpha_array}|{numeric_array}|{self.range_constraint})|" # pylint:disable=C0301 + f"({self.range_constraint})|" # pylint:disable=C0301 f"({self.multiple_constraint})|" f"({sets})|" f"({self.data_element_value_constraint})|" f"({reference_scope})|" f"({self.selector_constraint})|" - f"({StringPatternConstraint.pattern})" + f"({array_limits})|" + f"({string_patterns})" ) # Conditional Requirements @@ -395,7 +420,7 @@ def __init__(self, file_path: pathlib.Path, parent_schema: Schema | None = None) self.data_groups = {} self.data_group_templates = {} - self._data_type_list = [ + self._data_type_list: List[Type[DataType]] = [ IntegerType, NumericType, BooleanType, @@ -405,6 +430,7 @@ def __init__(self, file_path: pathlib.Path, parent_schema: Schema | None = None) DataGroupType, EnumerationType, AlternativeType, + ReferenceType, ] self.schema_patterns = SchemaPatterns(self.source_dictionary) @@ -445,10 +471,10 @@ def __init__(self, file_path: pathlib.Path, parent_schema: Schema | None = None) self.root_data_group = None self.metadata = None self.schema_author = None - self.schema_type = self.name + self.schema_name = self.name if self.root_data_group_name is not None: - self.schema_type = self.root_data_group_name + self.schema_name = self.root_data_group_name self.root_data_group = self.get_data_group(self.root_data_group_name) self.metadata = ( self.root_data_group.data_elements["metadata"] @@ -461,7 +487,7 @@ def __init__(self, file_path: pathlib.Path, parent_schema: Schema | None = None) if constraint.data_element_name == "schema_author": self.schema_author = constraint.data_element_value elif constraint.data_element_name == "schema": - self.schema_type = constraint.data_element_value + self.schema_name = constraint.data_element_value for data_group in self.data_groups.values(): data_group.resolve() @@ -488,7 +514,7 @@ def set_reference_schema(self, schema_name, schema_path): else: self.reference_schemas[schema_name] = Schema(schema_path, self) - def get_reference_schema(self, schema_name) -> Schema | None: + def get_reference_schema(self, schema_name: str) -> Schema | None: # TODO: verify schema has the same path too? # Search this schema first if schema_name in self.reference_schemas: @@ -500,7 +526,7 @@ def get_reference_schema(self, schema_name) -> Schema | None: return None - def get_data_group(self, data_group_name: str): + def get_data_group(self, data_group_name: str) -> DataGroup: matching_schemas = [] # 1. Search this schema first if data_group_name in self.data_groups: @@ -514,22 +540,21 @@ def get_data_group(self, data_group_name: str): return matching_schemas[0].data_groups[data_group_name] - def data_type_factory(self, input: str, parent_data_element: DataElement) -> DataType: + def data_type_factory(self, text: str, parent_data_element: DataElement) -> DataType: number_of_matches = 0 - match_type = None for data_type in self._data_type_list: - if data_type.pattern.match(input): + if data_type.pattern.match(text): match_type = data_type number_of_matches += 1 if number_of_matches == 1: - return match_type(input, parent_data_element) + return match_type(text, parent_data_element) if number_of_matches == 0: - raise Exception(f"No matching data type for {input}.") + raise Exception(f"No matching data type for {text}.") else: - raise Exception(f"Multiple matches found for data type, {input}") + raise Exception(f"Multiple matches found for data type, {text}") - def add_data_type(self, data_type: DataType): + def add_data_type(self, data_type: Type[DataType]) -> None: if data_type not in self._data_type_list: self._data_type_list.append(data_type) @@ -539,9 +564,9 @@ def add_data_type(self, data_type: DataType): def get_types(schema): """For each Object Type in a schema, map a list of Objects matching that type.""" - types = {} - for object in schema: - if schema[object]["Object Type"] not in types: - types[schema[object]["Object Type"]] = [] - types[schema[object]["Object Type"]].append(object) + types: Dict[str, Any] = {} + for object_name in schema: + if schema[object_name]["Object Type"] not in types: + types[schema[object_name]["Object Type"]] = [] + types[schema[object_name]["Object Type"]].append(object_name) return types diff --git a/lattice/schema_to_json.py b/lattice/schema_to_json.py index 01a7a16..f78c60f 100644 --- a/lattice/schema_to_json.py +++ b/lattice/schema_to_json.py @@ -25,7 +25,7 @@ class DataGroup: # pylint: disable=R0903 # Parse ellipsis range-notation e.g. '[1..]' minmax_range_type = r"(?P[0-9]*)(?P\.*)(?P[0-9]*)" - enum_or_def = r"(\{|\<)(.*)(\}|\>)" + enum_or_def = r"(\{|\<|:)(.*)(\}|\>|:)" numeric_type = r"[+-]?[0-9]*\.?[0-9]+|[0-9]+" # Any optionally signed, floating point number scope_constraint = r"^:(?P.*):" # Lattice scope constraint for ID/Reference ranged_array_type = rf"{array_type}(\[{minmax_range_type}\])?" @@ -179,11 +179,7 @@ def _get_simple_type(self, type_str, target_dict_to_append): target_dict_to_append["$ref"] = internal_type return try: - if "/" in type_str: - # e.g. "Numeric/Null" becomes a list of 'type's - target_dict_to_append["type"] = [self._types[t] for t in type_str.split("/")] - else: - target_dict_to_append["type"] = self._types[type_str] + target_dict_to_append["type"] = self._types[type_str] except KeyError: raise KeyError( f"Unknown type: {type_str} does not appear in referenced schema " @@ -428,7 +424,7 @@ def __init__(self, input_file_path: Path, forward_declaration_dir: Optional[Path { tag: { "type": "string", - "pattern": entry["JSON Schema Pattern"], + "pattern": entry["Regular Expression Pattern"], } } ) diff --git a/test/test_schema_patterns.py b/test/test_schema_patterns.py index 2aa5d01..2a0ece4 100644 --- a/test/test_schema_patterns.py +++ b/test/test_schema_patterns.py @@ -47,9 +47,8 @@ def test_data_type_pattern(): "Numeric", "[Numeric]", "{DataGroup}", - "[String][1..]", ], - invalid_examples=["Wrong"], + invalid_examples=["Wrong", "[String][1..]", "ID"], )