diff --git a/python/Cargo.lock b/python/Cargo.lock index fcc29183..d26e4803 100644 --- a/python/Cargo.lock +++ b/python/Cargo.lock @@ -830,7 +830,7 @@ dependencies = [ [[package]] name = "flatgeobuf" version = "4.5.0" -source = "git+https://github.com/kylebarron/flatgeobuf?rev=06e987d6d3d73edb95124a14cdaab9ee8e6e57ac#06e987d6d3d73edb95124a14cdaab9ee8e6e57ac" +source = "git+https://github.com/flatgeobuf/flatgeobuf?rev=f7563617549f8ab0c111e83ee423996f100ddb0c#f7563617549f8ab0c111e83ee423996f100ddb0c" dependencies = [ "byteorder", "bytes", @@ -1390,8 +1390,9 @@ dependencies = [ [[package]] name = "http-range-client" -version = "0.8.0" -source = "git+https://github.com/pka/http-range-client?rev=5699e32fafc416ce683bfbf1d179f80b0b6549a3#5699e32fafc416ce683bfbf1d179f80b0b6549a3" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3b0cb8b2a6444be75e1bb3bfa79911cae70865df20a36d7c70945273b13b641" dependencies = [ "async-trait", "byteorder", diff --git a/python/geoarrow-io/src/io/shapefile.rs b/python/geoarrow-io/src/io/shapefile.rs index 45b1c753..a83efd96 100644 --- a/python/geoarrow-io/src/io/shapefile.rs +++ b/python/geoarrow-io/src/io/shapefile.rs @@ -4,7 +4,9 @@ use std::path::PathBuf; use crate::error::PyGeoArrowResult; use crate::util::to_arro3_table; -use geoarrow::io::shapefile::{read_shapefile as _read_shapefile, ShapefileReaderOptions}; +use arrow::array::RecordBatchReader; +use geoarrow::io::shapefile::{ShapefileReaderBuilder, ShapefileReaderOptions}; +use geoarrow::table::Table; use pyo3::prelude::*; use pyo3_arrow::export::Arro3Table; use pyo3_geoarrow::PyCoordType; @@ -47,6 +49,17 @@ pub fn read_shapefile( let shp_file = BufReader::new(File::open(shp_path)?); let dbf_file = BufReader::new(File::open(dbf_path)?); - let table = _read_shapefile(shp_file, dbf_file, options)?; + let mut builder = ShapefileReaderBuilder::try_new(shp_file, dbf_file, options)?; + let reader = builder.read()?; + + // Note: this fails because it's trying to cast to `'static` when passing to + // PyRecordBatchReader. We need to remove the `'a` lifetime in the core shapefile reader + // implementation, but to do that we need to change the iterators in the `shapefile` crate to + // be owning instead of borrowing. + // Ok(PyRecordBatchReader::new(reader).into()) + + let schema = reader.schema(); + let batches = reader.collect::, _>>()?; + let table = Table::try_new(batches, schema).unwrap(); Ok(to_arro3_table(table)) } diff --git a/rust/geoarrow/src/io/shapefile/mod.rs b/rust/geoarrow/src/io/shapefile/mod.rs index 868124c5..70bba2f0 100644 --- a/rust/geoarrow/src/io/shapefile/mod.rs +++ b/rust/geoarrow/src/io/shapefile/mod.rs @@ -5,4 +5,4 @@ mod reader; mod scalar; -pub use reader::{read_shapefile, ShapefileReaderOptions}; +pub use reader::{ShapefileReaderBuilder, ShapefileReaderOptions}; diff --git a/rust/geoarrow/src/io/shapefile/reader.rs b/rust/geoarrow/src/io/shapefile/reader.rs index 53f20aab..7651349b 100644 --- a/rust/geoarrow/src/io/shapefile/reader.rs +++ b/rust/geoarrow/src/io/shapefile/reader.rs @@ -1,21 +1,22 @@ use std::io::{Read, Seek}; use std::sync::Arc; -use arrow_schema::{DataType, Field, Schema, SchemaRef}; +use arrow_array::{RecordBatch, RecordBatchReader}; +use arrow_schema::{ArrowError, DataType, Field, FieldRef, Schema, SchemaRef}; use dbase::{FieldInfo, FieldType, FieldValue, Record}; use geozero::FeatureProcessor; +use shapefile::reader::ShapeRecordIterator; use shapefile::{Reader, ShapeReader, ShapeType}; use crate::array::metadata::ArrayMetadata; use crate::array::{ CoordType, MultiLineStringBuilder, MultiPointBuilder, MultiPolygonBuilder, PointBuilder, }; -use crate::datatypes::Dimension; +use crate::datatypes::{Dimension, NativeType}; use crate::error::{GeoArrowError, Result}; use crate::io::geozero::table::builder::anyvalue::AnyBuilder; use crate::io::geozero::table::builder::properties::PropertiesBatchBuilder; use crate::io::geozero::table::{GeoTableBuilder, GeoTableBuilderOptions}; -use crate::table::Table; /// Options for the Shapefile reader #[derive(Debug, Clone, Default)] @@ -31,274 +32,692 @@ pub struct ShapefileReaderOptions { pub crs: Option, } -// TODO: -// stretch goal: return a record batch reader. -/// Read a Shapefile into a [Table]. -pub fn read_shapefile( - shp_reader: T, - dbf_reader: T, - options: ShapefileReaderOptions, -) -> Result { - let dbf_reader = dbase::Reader::new(dbf_reader).unwrap(); - let shp_reader = ShapeReader::new(shp_reader).unwrap(); - - let header = shp_reader.header(); - - let dbf_fields = dbf_reader.fields().to_vec(); - let schema = infer_schema(&dbf_fields); - let geometry_type = header.shape_type; - - let features_count = dbf_reader.header().num_records as usize; - let features_count = if features_count > 0 { - Some(features_count) - } else { - None - }; - - let array_metadata = options - .crs - .map(ArrayMetadata::from_unknown_crs_type) - .unwrap_or_default(); +/// A builder for [ShapefileReader] +pub struct ShapefileReaderBuilder { + dbf_fields: Vec, + options: GeoTableBuilderOptions, + reader: Reader, + properties_schema: SchemaRef, + shape_type: ShapeType, +} - let table_builder_options = GeoTableBuilderOptions::new( - options.coord_type, - true, - options.batch_size, - Some(schema), - features_count, - Arc::new(array_metadata), - ); +impl ShapefileReaderBuilder { + pub fn try_new(shp_reader: T, dbf_reader: T, options: ShapefileReaderOptions) -> Result { + let dbf_reader = dbase::Reader::new(dbf_reader).unwrap(); + let shp_reader = ShapeReader::new(shp_reader).unwrap(); - let mut reader = Reader::new(shp_reader, dbf_reader); + let header = shp_reader.header(); - // TODO: these might work in a macro + let dbf_fields = dbf_reader.fields().to_vec(); + let properties_schema = infer_schema(&dbf_fields); + let shape_type = header.shape_type; - match geometry_type { - ShapeType::Point => { - let mut builder = GeoTableBuilder::::new_with_options( - Dimension::XY, - table_builder_options, - ); + let features_count = dbf_reader.header().num_records as usize; + let features_count = if features_count > 0 { + Some(features_count) + } else { + None + }; - for geom_and_record in - reader.iter_shapes_and_records_as::() - { - let (geom, record) = geom_and_record.unwrap(); + let array_metadata = options + .crs + .map(ArrayMetadata::from_unknown_crs_type) + .unwrap_or_default(); + + let table_builder_options = GeoTableBuilderOptions::new( + options.coord_type, + true, + options.batch_size, + Some(properties_schema.clone()), + features_count, + Arc::new(array_metadata), + ); + + let reader = Reader::new(shp_reader, dbf_reader); + + Ok(Self { + dbf_fields, + options: table_builder_options, + reader, + properties_schema, + shape_type, + }) + } - // Process properties - let prop_builder = builder.properties_builder_mut(); - prop_builder.add_record(record, &dbf_fields)?; + fn geometry_type(&self) -> Result { + let coord_type = self.options.coord_type; + match self.shape_type { + ShapeType::Point => Ok(NativeType::Point(coord_type, Dimension::XY)), + ShapeType::PointZ => Ok(NativeType::Point(coord_type, Dimension::XYZ)), + ShapeType::Multipoint => Ok(NativeType::MultiPoint(coord_type, Dimension::XY)), + ShapeType::MultipointZ => Ok(NativeType::MultiPoint(coord_type, Dimension::XYZ)), + ShapeType::Polyline => Ok(NativeType::MultiLineString(coord_type, Dimension::XY)), + ShapeType::PolylineZ => Ok(NativeType::MultiLineString(coord_type, Dimension::XYZ)), + ShapeType::Polygon => Ok(NativeType::MultiPolygon(coord_type, Dimension::XY)), + ShapeType::PolygonZ => Ok(NativeType::MultiPolygon(coord_type, Dimension::XYZ)), + t => Err(GeoArrowError::General(format!( + "Unsupported shapefile geometry type: {}", + t + ))), + } + } - // Hack to advance internal row number - builder.properties_end()?; + fn geometry_field(&self) -> Result { + Ok(Arc::new(self.geometry_type()?.to_field_with_metadata( + "geometry", + true, + &self.options.metadata, + ))) + } - let geom = super::scalar::Point::new(&geom); - builder.geom_builder().push_point(Some(&geom)); + fn schema(&self) -> Result { + let mut fields = self.properties_schema.fields().to_vec(); + fields.push(self.geometry_field()?); + Ok(Arc::new(Schema::new_with_metadata( + fields, + self.properties_schema.metadata().clone(), + ))) + } - // Hack to advance internal row number - builder.feature_end(0)?; + /// Create a [`RecordBatchReader`] from this Shapefile + pub fn read<'a>(&'a mut self) -> Result> { + let schema = self.schema()?; + let reader: Box = match self.shape_type { + ShapeType::Point => Box::new(PointReader { + iter: self + .reader + .iter_shapes_and_records_as::(), + options: self.options.clone(), + schema, + dbf_fields: &self.dbf_fields, + }), + ShapeType::PointZ => Box::new(PointZReader { + iter: self + .reader + .iter_shapes_and_records_as::(), + options: self.options.clone(), + schema, + dbf_fields: &self.dbf_fields, + }), + ShapeType::Multipoint => Box::new(MultipointReader { + iter: self + .reader + .iter_shapes_and_records_as::(), + options: self.options.clone(), + schema, + dbf_fields: &self.dbf_fields, + }), + ShapeType::MultipointZ => Box::new(MultipointZReader { + iter: self + .reader + .iter_shapes_and_records_as::(), + options: self.options.clone(), + schema, + dbf_fields: &self.dbf_fields, + }), + ShapeType::Polyline => Box::new(PolylineReader { + iter: self + .reader + .iter_shapes_and_records_as::(), + options: self.options.clone(), + schema, + dbf_fields: &self.dbf_fields, + }), + ShapeType::PolylineZ => Box::new(PolylineZReader { + iter: self + .reader + .iter_shapes_and_records_as::(), + options: self.options.clone(), + schema, + dbf_fields: &self.dbf_fields, + }), + ShapeType::Polygon => Box::new(PolygonReader { + iter: self + .reader + .iter_shapes_and_records_as::(), + options: self.options.clone(), + schema, + dbf_fields: &self.dbf_fields, + }), + ShapeType::PolygonZ => Box::new(PolygonZReader { + iter: self + .reader + .iter_shapes_and_records_as::(), + options: self.options.clone(), + schema, + dbf_fields: &self.dbf_fields, + }), + t => { + return Err(GeoArrowError::General(format!( + "Unsupported shapefile geometry type: {}", + t + ))) } - builder.finish() + }; + Ok(reader) + } +} + +/// Point Reader is infallible when pushing points with `push_point` +macro_rules! impl_point_reader { + ($reader_name:ident, $shapefile_ty:ty, $builder:ty, $dim:expr, $scalar_ty:ty, $push_func:ident) => { + struct $reader_name<'a, T: Read + Seek> { + iter: ShapeRecordIterator<'a, T, T, $shapefile_ty, Record>, + schema: SchemaRef, + options: GeoTableBuilderOptions, + dbf_fields: &'a [FieldInfo], } - ShapeType::PointZ => { - let mut builder = GeoTableBuilder::::new_with_options( - Dimension::XYZ, - table_builder_options, - ); - for geom_and_record in - reader.iter_shapes_and_records_as::() - { - let (geom, record) = geom_and_record.unwrap(); + impl $reader_name<'_, T> { + fn next_batch(&mut self) -> Result> { + let mut builder = + GeoTableBuilder::<$builder>::new_with_options($dim, self.options.clone()); - // Process properties - let prop_builder = builder.properties_builder_mut(); - prop_builder.add_record(record, &dbf_fields)?; + let mut row_count = 0; + loop { + if row_count >= self.options.batch_size { + let (batches, _schema) = builder.finish()?.into_inner(); + assert_eq!(batches.len(), 1); + return Ok(Some(batches.into_iter().next().unwrap())); + } - // Hack to advance internal row number - builder.properties_end()?; + if let Some(feature) = self.iter.next() { + let (geom, record) = feature.unwrap(); - let geom = super::scalar::PointZ::new(&geom); - builder.geom_builder().push_point(Some(&geom)); + // Process properties + let prop_builder = builder.properties_builder_mut(); + prop_builder.add_record(record, self.dbf_fields)?; - // Hack to advance internal row number - builder.feature_end(0)?; - } - builder.finish() - } - ShapeType::Multipoint => { - let mut builder = GeoTableBuilder::::new_with_options( - Dimension::XY, - table_builder_options, - ); + // Hack to advance internal row number + builder.properties_end()?; - for geom_and_record in - reader.iter_shapes_and_records_as::() - { - let (geom, record) = geom_and_record.unwrap(); + let geom = <$scalar_ty>::new(&geom); + builder.geom_builder().$push_func(Some(&geom)); - // Process properties - let prop_builder = builder.properties_builder_mut(); - prop_builder.add_record(record, &dbf_fields)?; + // Hack to advance internal row number + builder.feature_end(0)?; - // Hack to advance internal row number - builder.properties_end()?; + row_count += 1; + } else { + return Ok(None); + } + } + } + } - let geom = super::scalar::MultiPoint::new(&geom); - builder.geom_builder().push_multi_point(Some(&geom))?; + impl Iterator for $reader_name<'_, T> { + type Item = std::result::Result; - // Hack to advance internal row number - builder.feature_end(0)?; + fn next(&mut self) -> Option { + self.next_batch() + .map_err(|err| ArrowError::ExternalError(Box::new(err))) + .transpose() } - builder.finish() } - ShapeType::MultipointZ => { - let mut builder = GeoTableBuilder::::new_with_options( - Dimension::XYZ, - table_builder_options, - ); - for geom_and_record in - reader.iter_shapes_and_records_as::() - { - let (geom, record) = geom_and_record.unwrap(); + impl RecordBatchReader for $reader_name<'_, T> { + fn schema(&self) -> SchemaRef { + self.schema.clone() + } + } + }; +} - // Process properties - let prop_builder = builder.properties_builder_mut(); - prop_builder.add_record(record, &dbf_fields)?; +impl_point_reader!( + PointReader, + shapefile::Point, + PointBuilder, + Dimension::XY, + super::scalar::Point, + push_point +); +impl_point_reader!( + PointZReader, + shapefile::PointZ, + PointBuilder, + Dimension::XYZ, + super::scalar::PointZ, + push_point +); + +macro_rules! impl_multipoint_polyline_reader { + ($reader_name:ident, $shapefile_ty:ty, $builder:ty, $dim:expr, $scalar_ty:ty, $push_func:ident) => { + struct $reader_name<'a, T: Read + Seek> { + iter: ShapeRecordIterator<'a, T, T, $shapefile_ty, Record>, + schema: SchemaRef, + options: GeoTableBuilderOptions, + dbf_fields: &'a [FieldInfo], + } - // Hack to advance internal row number - builder.properties_end()?; + impl $reader_name<'_, T> { + fn next_batch(&mut self) -> Result> { + let mut builder = + GeoTableBuilder::<$builder>::new_with_options($dim, self.options.clone()); - let geom = super::scalar::MultiPointZ::new(&geom); - builder.geom_builder().push_multi_point(Some(&geom))?; + let mut row_count = 0; + loop { + if row_count >= self.options.batch_size { + let (batches, _schema) = builder.finish()?.into_inner(); + assert_eq!(batches.len(), 1); + return Ok(Some(batches.into_iter().next().unwrap())); + } - // Hack to advance internal row number - builder.feature_end(0)?; - } - builder.finish() - } - ShapeType::Polyline => { - let mut builder = GeoTableBuilder::::new_with_options( - Dimension::XY, - table_builder_options, - ); + if let Some(feature) = self.iter.next() { + let (geom, record) = feature.unwrap(); - for geom_and_record in - reader.iter_shapes_and_records_as::() - { - let (geom, record) = geom_and_record.unwrap(); + // Process properties + let prop_builder = builder.properties_builder_mut(); + prop_builder.add_record(record, self.dbf_fields)?; - // Process properties - let prop_builder = builder.properties_builder_mut(); - prop_builder.add_record(record, &dbf_fields)?; + // Hack to advance internal row number + builder.properties_end()?; - // Hack to advance internal row number - builder.properties_end()?; + let geom = <$scalar_ty>::new(&geom); + builder.geom_builder().$push_func(Some(&geom))?; - let geom = super::scalar::Polyline::new(&geom); - builder.geom_builder().push_multi_line_string(Some(&geom))?; + // Hack to advance internal row number + builder.feature_end(0)?; - // Hack to advance internal row number - builder.feature_end(0)?; + row_count += 1; + } else { + return Ok(None); + } + } } - builder.finish() } - ShapeType::PolylineZ => { - let mut builder = GeoTableBuilder::::new_with_options( - Dimension::XYZ, - table_builder_options, - ); - - for geom_and_record in - reader.iter_shapes_and_records_as::() - { - let (geom, record) = geom_and_record.unwrap(); - - // Process properties - let prop_builder = builder.properties_builder_mut(); - prop_builder.add_record(record, &dbf_fields)?; - - // Hack to advance internal row number - builder.properties_end()?; - let geom = super::scalar::PolylineZ::new(&geom); - builder.geom_builder().push_multi_line_string(Some(&geom))?; + impl Iterator for $reader_name<'_, T> { + type Item = std::result::Result; - // Hack to advance internal row number - builder.feature_end(0)?; + fn next(&mut self) -> Option { + self.next_batch() + .map_err(|err| ArrowError::ExternalError(Box::new(err))) + .transpose() } - builder.finish() } - ShapeType::Polygon => { - let mut builder = GeoTableBuilder::::new_with_options( - Dimension::XY, - table_builder_options, - ); - for geom_and_record in - reader.iter_shapes_and_records_as::() - { - let (geom, record) = geom_and_record.unwrap(); + impl RecordBatchReader for $reader_name<'_, T> { + fn schema(&self) -> SchemaRef { + self.schema.clone() + } + } + }; +} - // Process properties - let prop_builder = builder.properties_builder_mut(); - prop_builder.add_record(record, &dbf_fields)?; +impl_multipoint_polyline_reader!( + MultipointReader, + shapefile::Multipoint, + MultiPointBuilder, + Dimension::XY, + super::scalar::MultiPoint, + push_multi_point +); +impl_multipoint_polyline_reader!( + MultipointZReader, + shapefile::MultipointZ, + MultiPointBuilder, + Dimension::XYZ, + super::scalar::MultiPointZ, + push_multi_point +); +impl_multipoint_polyline_reader!( + PolylineReader, + shapefile::Polyline, + MultiLineStringBuilder, + Dimension::XY, + super::scalar::Polyline, + push_multi_line_string +); +impl_multipoint_polyline_reader!( + PolylineZReader, + shapefile::PolylineZ, + MultiLineStringBuilder, + Dimension::XYZ, + super::scalar::PolylineZ, + push_multi_line_string +); + +/// Polygon Reader takes `geom` by value in +/// `super::scalar::MultiPolygon::new` +macro_rules! impl_polygon_reader { + ($reader_name:ident, $shapefile_ty:ty, $builder:ty, $dim:expr, $scalar_ty:ty, $push_func:ident) => { + struct $reader_name<'a, T: Read + Seek> { + iter: ShapeRecordIterator<'a, T, T, $shapefile_ty, Record>, + schema: SchemaRef, + options: GeoTableBuilderOptions, + dbf_fields: &'a [FieldInfo], + } - // Hack to advance internal row number - builder.properties_end()?; + impl $reader_name<'_, T> { + fn next_batch(&mut self) -> Result> { + let mut builder = + GeoTableBuilder::<$builder>::new_with_options($dim, self.options.clone()); - let geom = super::scalar::MultiPolygon::new(geom); - builder.geom_builder().push_multi_polygon(Some(&geom))?; + let mut row_count = 0; + loop { + if row_count >= self.options.batch_size { + let (batches, _schema) = builder.finish()?.into_inner(); + assert_eq!(batches.len(), 1); + return Ok(Some(batches.into_iter().next().unwrap())); + } - // Hack to advance internal row number - builder.feature_end(0)?; - } - builder.finish() - } - ShapeType::PolygonZ => { - let mut builder = GeoTableBuilder::::new_with_options( - Dimension::XYZ, - table_builder_options, - ); + if let Some(feature) = self.iter.next() { + let (geom, record) = feature.unwrap(); - for geom_and_record in - reader.iter_shapes_and_records_as::() - { - let (geom, record) = geom_and_record.unwrap(); + // Process properties + let prop_builder = builder.properties_builder_mut(); + prop_builder.add_record(record, self.dbf_fields)?; - // Process properties - let prop_builder = builder.properties_builder_mut(); - prop_builder.add_record(record, &dbf_fields)?; + // Hack to advance internal row number + builder.properties_end()?; - // Hack to advance internal row number - builder.properties_end()?; + let geom = <$scalar_ty>::new(geom); + builder.geom_builder().$push_func(Some(&geom))?; - let geom = super::scalar::MultiPolygonZ::new(geom); - builder.geom_builder().push_multi_polygon(Some(&geom))?; + // Hack to advance internal row number + builder.feature_end(0)?; - // Hack to advance internal row number - builder.feature_end(0)?; + row_count += 1; + } else { + return Ok(None); + } + } } - builder.finish() } - t => Err(GeoArrowError::General(format!( - "Unsupported shapefile geometry type: {}", - t - ))), - } - // ?; - // // Assign CRS onto the table - // let geom_col_idx = table.default_geometry_column_idx()?; - // let col = table.geometry_column(Some(geom_col_idx))?; - // let field = col.data_type().to_field_with_metadata("geometry", true, &array_metadata); + impl Iterator for $reader_name<'_, T> { + type Item = std::result::Result; - // table.remove_column(geom_col_idx); - // table.append_column(field.into(), col.array_refs())?; - // Ok(table) + fn next(&mut self) -> Option { + self.next_batch() + .map_err(|err| ArrowError::ExternalError(Box::new(err))) + .transpose() + } + } + + impl RecordBatchReader for $reader_name<'_, T> { + fn schema(&self) -> SchemaRef { + self.schema.clone() + } + } + }; } +impl_polygon_reader!( + PolygonReader, + shapefile::Polygon, + MultiPolygonBuilder, + Dimension::XY, + super::scalar::MultiPolygon, + push_multi_polygon +); +impl_polygon_reader!( + PolygonZReader, + shapefile::PolygonZ, + MultiPolygonBuilder, + Dimension::XYZ, + super::scalar::MultiPolygonZ, + push_multi_polygon +); + +// // TODO: +// // stretch goal: return a record batch reader. +// /// Read a Shapefile into a [Table]. +// pub fn read_shapefile( +// shp_reader: T, +// dbf_reader: T, +// options: ShapefileReaderOptions, +// ) -> Result
{ +// let dbf_reader = dbase::Reader::new(dbf_reader).unwrap(); +// let shp_reader = ShapeReader::new(shp_reader).unwrap(); + +// let header = shp_reader.header(); + +// let dbf_fields = dbf_reader.fields().to_vec(); +// let schema = infer_schema(&dbf_fields); +// let geometry_type = header.shape_type; + +// let features_count = dbf_reader.header().num_records as usize; +// let features_count = if features_count > 0 { +// Some(features_count) +// } else { +// None +// }; + +// let array_metadata = options +// .crs +// .map(ArrayMetadata::from_unknown_crs_type) +// .unwrap_or_default(); + +// let table_builder_options = GeoTableBuilderOptions::new( +// options.coord_type, +// true, +// options.batch_size, +// Some(schema), +// features_count, +// Arc::new(array_metadata), +// ); + +// let mut reader = Reader::new(shp_reader, dbf_reader); + +// // TODO: these might work in a macro + +// match geometry_type { +// ShapeType::Point => { +// let mut builder = GeoTableBuilder::::new_with_options( +// Dimension::XY, +// table_builder_options, +// ); + +// for geom_and_record in +// reader.iter_shapes_and_records_as::() +// { +// let (geom, record) = geom_and_record.unwrap(); + +// // Process properties +// let prop_builder = builder.properties_builder_mut(); +// prop_builder.add_record(record, &dbf_fields)?; + +// // Hack to advance internal row number +// builder.properties_end()?; + +// let geom = super::scalar::Point::new(&geom); +// builder.geom_builder().push_point(Some(&geom)); + +// // Hack to advance internal row number +// builder.feature_end(0)?; +// } +// builder.finish() +// } +// ShapeType::PointZ => { +// let mut builder = GeoTableBuilder::::new_with_options( +// Dimension::XYZ, +// table_builder_options, +// ); + +// for geom_and_record in +// reader.iter_shapes_and_records_as::() +// { +// let (geom, record) = geom_and_record.unwrap(); + +// // Process properties +// let prop_builder = builder.properties_builder_mut(); +// prop_builder.add_record(record, &dbf_fields)?; + +// // Hack to advance internal row number +// builder.properties_end()?; + +// let geom = super::scalar::PointZ::new(&geom); +// builder.geom_builder().push_point(Some(&geom)); + +// // Hack to advance internal row number +// builder.feature_end(0)?; +// } +// builder.finish() +// } +// ShapeType::Multipoint => { +// let mut builder = GeoTableBuilder::::new_with_options( +// Dimension::XY, +// table_builder_options, +// ); + +// for geom_and_record in +// reader.iter_shapes_and_records_as::() +// { +// let (geom, record) = geom_and_record.unwrap(); + +// // Process properties +// let prop_builder = builder.properties_builder_mut(); +// prop_builder.add_record(record, &dbf_fields)?; + +// // Hack to advance internal row number +// builder.properties_end()?; + +// let geom = super::scalar::MultiPoint::new(&geom); +// builder.geom_builder().push_multi_point(Some(&geom))?; + +// // Hack to advance internal row number +// builder.feature_end(0)?; +// } +// builder.finish() +// } +// ShapeType::MultipointZ => { +// let mut builder = GeoTableBuilder::::new_with_options( +// Dimension::XYZ, +// table_builder_options, +// ); + +// for geom_and_record in +// reader.iter_shapes_and_records_as::() +// { +// let (geom, record) = geom_and_record.unwrap(); + +// // Process properties +// let prop_builder = builder.properties_builder_mut(); +// prop_builder.add_record(record, &dbf_fields)?; + +// // Hack to advance internal row number +// builder.properties_end()?; + +// let geom = super::scalar::MultiPointZ::new(&geom); +// builder.geom_builder().push_multi_point(Some(&geom))?; + +// // Hack to advance internal row number +// builder.feature_end(0)?; +// } +// builder.finish() +// } +// ShapeType::Polyline => { +// let mut builder = GeoTableBuilder::::new_with_options( +// Dimension::XY, +// table_builder_options, +// ); + +// for geom_and_record in +// reader.iter_shapes_and_records_as::() +// { +// let (geom, record) = geom_and_record.unwrap(); + +// // Process properties +// let prop_builder = builder.properties_builder_mut(); +// prop_builder.add_record(record, &dbf_fields)?; + +// // Hack to advance internal row number +// builder.properties_end()?; + +// let geom = super::scalar::Polyline::new(&geom); +// builder.geom_builder().push_multi_line_string(Some(&geom))?; + +// // Hack to advance internal row number +// builder.feature_end(0)?; +// } +// builder.finish() +// } +// ShapeType::PolylineZ => { +// let mut builder = GeoTableBuilder::::new_with_options( +// Dimension::XYZ, +// table_builder_options, +// ); + +// for geom_and_record in +// reader.iter_shapes_and_records_as::() +// { +// let (geom, record) = geom_and_record.unwrap(); + +// // Process properties +// let prop_builder = builder.properties_builder_mut(); +// prop_builder.add_record(record, &dbf_fields)?; + +// // Hack to advance internal row number +// builder.properties_end()?; + +// let geom = super::scalar::PolylineZ::new(&geom); +// builder.geom_builder().push_multi_line_string(Some(&geom))?; + +// // Hack to advance internal row number +// builder.feature_end(0)?; +// } +// builder.finish() +// } +// ShapeType::Polygon => { +// let mut builder = GeoTableBuilder::::new_with_options( +// Dimension::XY, +// table_builder_options, +// ); + +// for geom_and_record in +// reader.iter_shapes_and_records_as::() +// { +// let (geom, record) = geom_and_record.unwrap(); + +// // Process properties +// let prop_builder = builder.properties_builder_mut(); +// prop_builder.add_record(record, &dbf_fields)?; + +// // Hack to advance internal row number +// builder.properties_end()?; + +// let geom = super::scalar::MultiPolygon::new(geom); +// builder.geom_builder().push_multi_polygon(Some(&geom))?; + +// // Hack to advance internal row number +// builder.feature_end(0)?; +// } +// builder.finish() +// } +// ShapeType::PolygonZ => { +// let mut builder = GeoTableBuilder::::new_with_options( +// Dimension::XYZ, +// table_builder_options, +// ); + +// for geom_and_record in +// reader.iter_shapes_and_records_as::() +// { +// let (geom, record) = geom_and_record.unwrap(); + +// // Process properties +// let prop_builder = builder.properties_builder_mut(); +// prop_builder.add_record(record, &dbf_fields)?; + +// // Hack to advance internal row number +// builder.properties_end()?; + +// let geom = super::scalar::MultiPolygonZ::new(geom); +// builder.geom_builder().push_multi_polygon(Some(&geom))?; + +// // Hack to advance internal row number +// builder.feature_end(0)?; +// } +// builder.finish() +// } +// t => Err(GeoArrowError::General(format!( +// "Unsupported shapefile geometry type: {}", +// t +// ))), +// } +// } + impl PropertiesBatchBuilder { fn add_record(&mut self, record: Record, fields: &[FieldInfo]) -> Result<()> { for field_info in fields {