diff --git a/custom_tools/partition_iterator.py b/custom_tools/partition_iterator.py new file mode 100644 index 00000000..f16b5d71 --- /dev/null +++ b/custom_tools/partition_iterator.py @@ -0,0 +1,297 @@ +import arcpy +import os +import logging +import random + +from env_setup import environment_setup +from custom_tools import custom_arcpy +from env_setup import setup_directory_structure +from file_manager.n100.file_manager_buildings import Building_N100 + + +class PartitionIterator: + def __init__( + self, + inputs, + root_file_partition_iterator, + scale, + output_feature_class, + custom_functions=None, + feature_count="15000", + partition_method="FEATURES", + ): + """ + Initialize the PartitionIterator with input datasets for partitioning and processing. + + :param inputs: A dictionary of input feature class paths with their aliases. + :param root_file_partition_iterator: Base path for in progress outputs. + :param scale: Scale for the partitions. + :param output_feature_class: The output feature class for final results. + :param feature_count: Feature count for cartographic partitioning. + :param partition_method: Method used for creating cartographic partitions. + """ + self.inputs = inputs + self.root_file_partition_iterator = root_file_partition_iterator + self.scale = scale + self.output_feature_class = output_feature_class + self.feature_count = feature_count + self.partition_method = partition_method + self.partition_feature = ( + f"{root_file_partition_iterator}_partition_feature_{scale}" + ) + self.custom_functions = custom_functions or [] + self.file_mapping = None + self.alias = list(self.inputs.keys()) + self.original_input_path = list(self.inputs.values()) + self.final_append_feature = ( + f"{root_file_partition_iterator}_{self.alias}_final_append_feature_{scale}" + ) + + def setup_arcpy_environment(self): + # Set up the ArcPy environment + environment_setup.general_setup() + + def create_cartographic_partitions(self): + """ + Creates cartographic partitions based on the input feature classes. + """ + arcpy.cartography.CreateCartographicPartitions( + in_features=self.original_input_path, + out_features=self.partition_feature, + feature_count=self.feature_count, + partition_method=self.partition_method, + ) + print(f"Created partitions in {self.partition_feature}") + + def pre_iteration(self): + """ + Determine the maximum OBJECTID for partitioning. + """ + try: + # Use a search cursor to find the maximum OBJECTID + with arcpy.da.SearchCursor( + self.partition_feature, + ["OBJECTID"], + sql_clause=(None, "ORDER BY OBJECTID DESC"), + ) as cursor: + max_object_id = next(cursor)[0] + + print(f"Maximum OBJECTID found: {max_object_id}") + + # Handle the error or raise it + + for alias, input_feature in zip(self.alias, self.original_input_path): + if arcpy.Exists(self.final_append_feature): + arcpy.management.Delete(self.final_append_feature) + return max_object_id, self.final_append_feature + except Exception as e: + print(f"Error in finding max OBJECTID: {e}") + + def prepare_input_data(self): + for alias, input_feature in zip(self.alias, self.original_input_path): + # Copy the input feature class to a new location + input_data_copy = f"{self.root_file_partition_iterator}_{alias}_input_copy" + arcpy.management.Copy(in_data=input_feature, out_data=input_data_copy) + print(f"Copied {input_data_copy}") + + # Add a partition selection field to the copied feature class + partition_field = "partition_select" + arcpy.AddField_management( + in_table=input_data_copy, field_name=partition_field, field_type="LONG" + ) + print(f"Added field {partition_field}") + + # Add a unique ID field to the copied feature class, ensuring it's a new field + existing_field_names = [ + field.name for field in arcpy.ListFields(input_data_copy) + ] + orig_id_field = "id_field" + while orig_id_field in existing_field_names: + orig_id_field = f"{orig_id_field}_{random.randint(0, 9)}" + arcpy.AddField_management( + in_table=input_data_copy, field_name=orig_id_field, field_type="LONG" + ) + print(f"Added field {orig_id_field}") + + # Update file mapping for the input feature class + self.file_mapping[alias] = {"current_output": input_data_copy} + + def custom_function(inputs): + outputs = [] + return outputs + + def partition_iteration( + self, + input_data_copy, + partition_feature, + max_object_id, + root_file_partition_iterator, + scale, + partition_field, + orig_id_field, + final_append_feature, + ): + for object_id in range(1, max_object_id + 1): + iteration_partition = f"{partition_feature}_{object_id}" + + custom_arcpy.select_attribute_and_make_permanent_feature( + input_layer=partition_feature, + expression=f"OBJECTID = {object_id}", + output_name=iteration_partition, + ) + + for alias, input_feature in zip(self.alias, self.original_input_path): + iteration_append_feature = f"{self.root_file_partition_iterator}_{self.alias}_iteration_append_feature_{self.scale}" + if arcpy.Exists(iteration_append_feature): + arcpy.management.Delete(iteration_append_feature) + + arcpy.management.CreateFeatureclass( + out_path=os.path.dirname(iteration_append_feature), + out_name=os.path.basename(iteration_append_feature), + template=self.input_data_copy, + ) + print(f"Created {iteration_append_feature}") + + feature_present_in_partition = False + + base_partition_selection = f"{self.root_file_partition_iterator}_{self.alias}_partition_base_select_{self.scale}" + + custom_arcpy.select_location_and_make_feature_layer( + input_layer=self.input_data_copy, + overlap_type=custom_arcpy.OverlapType.HAVE_THEIR_CENTER_IN.value, + select_features=iteration_partition, + output_name=self.base_partition_selection, + ) + + count_points = int( + arcpy.management.GetCount(self.base_partition_selection).getOutput( + 0 + ) + ) + if feature_present_in_partition > 0: + points_exist = True + print( + f"iteration partition {object_id} has {feature_present_in_partition} building points" + ) + + arcpy.CalculateField_management( + in_table=self.base_partition_selection, + field=partition_field, + expression="1", + ) + + arcpy.management.Append( + inputs=self.base_partition_selection, + target=self.iteration_append_feature, + schema_type="NO_TEST", + ) + + base_partition_selection_2 = f"{self.root_file_partition_iterator}_{self.alias}_partition_base_select_2_{self.scale}" + + custom_arcpy.select_location_and_make_feature_layer( + input_layer=self.input_data_copy, + overlap_type=custom_arcpy.OverlapType.WITHIN_A_DISTANCE, + select_features=iteration_partition, + output_name=base_partition_selection_2, + selection_type=custom_arcpy.SelectionType.NEW_SELECTION.value, + search_distance="500 Meters", + ) + + arcpy.management.SelectLayerByLocation( + in_layer=base_partition_selection_2, + overlap_type="HAVE_THEIR_CENTER_IN", + select_features=iteration_partition, + selection_type="REMOVE_FROM_SELECTION", + ) + + arcpy.CalculateField_management( + in_table=base_partition_selection_2, + field=partition_field, + expression="0", + ) + + arcpy.management.Append( + inputs=self.base_partition_selection_2, + target=self.iteration_append_feature, + schema_type="NO_TEST", + ) + + for func in self.custom_functions: + # Determine inputs for the current function + inputs = [ + self.file_mapping[fc]["current_output"] or fc + for fc in self.input_feature_classes + ] + + # Call the function and get outputs + outputs = func(inputs) + + # Update file mapping with the outputs + for fc, output in zip(self.input_feature_classes, outputs): + self.file_mapping[fc]["current_output"] = output + + if not arcpy.Exists(self.final_append_feature): + # Create the final output feature class using the schema of the first erased feature + arcpy.management.CreateFeatureclass( + out_path=os.path.dirname(self.final_append_feature), + out_name=os.path.basename(self.final_append_feature), + template=self.iteration_append_feature, + ) + print(f"Created {self.final_append_feature}") + selected_features_from_partition = f"{self.root_file_partition_iterator}_{self.alias}_iteration_select_feature_from_partition_{self.scale}" + custom_arcpy.select_attribute_and_make_feature_layer( + input_layer=self.iteration_append_featur, + expression=f"{partition_field} = 1", + output_name=self.selected_features_from_partition, + ) + + arcpy.management.Append( + inputs=self.selected_features_from_partition, + target=self.final_append_feature, + schema_type="NO_TEST", + ) + + def run(self): + self.setup_arcpy_environment() + self.create_cartographic_partitions() + + max_object_id = self.pre_iteration() + + # Initialize the file mapping for each alias + self.file_mapping = {alias: {"current_output": None} for alias in self.alias} + + self.prepare_input_data() + + # Partition iteration for each object ID + for alias in self.alias: + current_output = self.file_mapping[alias]["current_output"] + self.partition_iteration( + current_output, + self.partition_feature, + max_object_id, + self.root_file_partition_iterator, + self.scale, + "partition_select", # Assuming this is the partition field + "id_field", # Assuming this is the original ID field + self.final_append_feature, + ) + + +if __name__ == "__main__": + # Define your input feature classes and their aliases + inputs = { + "building_points": f"{Building_N100.table_management__bygningspunkt_pre_resolve_building_conflicts__n100.value}", + "building_polygons": f"{Building_N100.simplify_building_polygons__simplified_grunnriss__n100.value}", + } + + # Instantiate PartitionIterator with necessary parameters + partition_iterator = PartitionIterator( + inputs=inputs, + root_file_partition_iterator=Building_N100.iteration__partition_iterator__n100.value, + scale=setup_directory_structure.scale_n100, + output_feature_class=Building_N100.iteration__partition_iterator_final_output__n100.value, + # Add other parameters like custom_functions if you have any + ) + + # Run the partition iterator + partition_iterator.run() diff --git a/file_manager/n100/file_manager_buildings.py b/file_manager/n100/file_manager_buildings.py index fc404f34..c1ef2f63 100644 --- a/file_manager/n100/file_manager_buildings.py +++ b/file_manager/n100/file_manager_buildings.py @@ -438,6 +438,18 @@ class Building_N100(Enum): ############ ITERATION ############ ################################## + iteration__partition_iterator__n100 = generate_file_name_gdb( + function_name=iteration, + description="partition_iterator", + scale=scale, + ) + + iteration__partition_iterator_final_output__n100 = generate_file_name_gdb( + function_name=iteration, + description="partition_iterator_final_output", + scale=scale, + ) + iteration__iteration_partition__n100 = generate_file_name_gdb( function_name=iteration, description="iteration_partition", diff --git a/generalization/n100/building/create_cartographic_partitions.py b/generalization/n100/building/create_cartographic_partitions.py index 7424f11b..0bb12ddf 100644 --- a/generalization/n100/building/create_cartographic_partitions.py +++ b/generalization/n100/building/create_cartographic_partitions.py @@ -1,12 +1,9 @@ import arcpy import os -import gc -import config from env_setup import environment_setup from custom_tools import custom_arcpy from file_manager.n100.file_manager_buildings import Building_N100 -from input_data import input_n100 def main(): diff --git a/generalization/n100/building/iteration.py b/generalization/n100/building/iteration.py index 8ff43acc..88cdfa49 100644 --- a/generalization/n100/building/iteration.py +++ b/generalization/n100/building/iteration.py @@ -249,25 +249,8 @@ def iteration_partition( schema_type="NO_TEST", ) - selected_points_from_partition = f"{iteration_append_point_feature}_temp" - - custom_arcpy.select_attribute_and_make_feature_layer( - input_layer=iteration_append_point_feature, - expression=f"{partition_field} = 1", - output_name=selected_points_from_partition, - ) - - arcpy.management.Append( - inputs=selected_points_from_partition, - target=append_feature_building_points, - schema_type="NO_TEST", - ) - arcpy.management.Delete(selected_points_from_partition) - - print(f"appended selected points to {append_feature_building_points}") - building_polygon_base_partition_selection = f"{Building_N100.iteration__building_polygon_base_partition_selection__n100.value}_{object_id}" - building_polygon_present_partition = f"{Building_N100.iteration__building_points_base_partition_selection__n100.value}_{object_id}" + building_polygon_present_partition = f"{Building_N100.iteration__building_polygon_present_partition__n100.value}_{object_id}" custom_arcpy.select_location_and_make_feature_layer( input_layer=input_building_polygon, @@ -326,6 +309,27 @@ def iteration_partition( schema_type="NO_TEST", ) + selected_points_from_partition = f"{iteration_append_point_feature}_temp" + + ####################################### + # HERE I WOULD PUT LOGIC TO PROCESS ON building_points_base_partition_selection and building_polygon_base_partition_selection + ####################################### + + custom_arcpy.select_attribute_and_make_feature_layer( + input_layer=iteration_append_point_feature, + expression=f"{partition_field} = 1", + output_name=selected_points_from_partition, + ) + + arcpy.management.Append( + inputs=selected_points_from_partition, + target=append_feature_building_points, + schema_type="NO_TEST", + ) + arcpy.management.Delete(selected_points_from_partition) + + print(f"appended selected points to {append_feature_building_points}") + selected_polygon_from_partition = f"{iteration_append_polygon_feature}_temp" custom_arcpy.select_attribute_and_make_feature_layer( diff --git a/generalization/n100/building/points_to_polygon.py b/generalization/n100/building/points_to_polygon.py index 9342c17e..ef9ea206 100644 --- a/generalization/n100/building/points_to_polygon.py +++ b/generalization/n100/building/points_to_polygon.py @@ -1,26 +1,29 @@ from file_manager.n100.file_manager_buildings import Building_N100 from custom_tools.polygon_processor import PolygonProcessor -building_symbol_dimensions = { - 1: (145, 145), - 2: (145, 145), - 3: (195, 145), - 4: (40, 40), - 5: (80, 80), - 6: (30, 30), - 7: (45, 45), - 8: (45, 45), - 9: (53, 45), -} -polygon_processor = PolygonProcessor( - input_building_points=Building_N100.building_point_buffer_displacement__displaced_building_points__n100.value, - output_polygon_feature_class=Building_N100.points_to_polygon__transform_points_to_square_polygons__n100.value, - building_symbol_dimensions=building_symbol_dimensions, - symbol_field_name="symbol_val", - index_field_name="OBJECTID", -) +def main(): + building_symbol_dimensions = { + 1: (145, 145), + 2: (145, 145), + 3: (195, 145), + 4: (40, 40), + 5: (80, 80), + 6: (30, 30), + 7: (45, 45), + 8: (45, 45), + 9: (53, 45), + } + + polygon_processor = PolygonProcessor( + input_building_points=Building_N100.building_point_buffer_displacement__displaced_building_points__n100.value, + output_polygon_feature_class=Building_N100.points_to_polygon__transform_points_to_square_polygons__n100.value, + building_symbol_dimensions=building_symbol_dimensions, + symbol_field_name="symbol_val", + index_field_name="OBJECTID", + ) + polygon_processor.run() if __name__ == "__main__": - polygon_processor.run() + main()