From ab7a639707f53b4eee427b8d9ef209e0759adc40 Mon Sep 17 00:00:00 2001 From: steve <82342664+stevegerrits@users.noreply.github.com> Date: Tue, 17 Dec 2024 18:31:15 +0100 Subject: [PATCH 01/12] dec release 2 (#275) --- .../ObservationDetailsComponent.vue | 54 ++--- .../observations/tasks/observation_mapper.py | 35 ++- .../observations/tasks/observation_sync.py | 2 +- vespadb/observations/views.py | 214 ++++++++++-------- 4 files changed, 169 insertions(+), 136 deletions(-) diff --git a/src/components/ObservationDetailsComponent.vue b/src/components/ObservationDetailsComponent.vue index 8321eb1..f860cb5 100644 --- a/src/components/ObservationDetailsComponent.vue +++ b/src/components/ObservationDetailsComponent.vue @@ -205,53 +205,42 @@ Volgende -
- +

+ {{ selectedObservation.nest_type ? nestTypeEnum[selectedObservation.nest_type] : + 'Geen' }} +

- +

+ {{ selectedObservation.nest_location ? + nestLocationEnum[selectedObservation.nest_location] : + 'Geen' }} +

- +

+ {{ selectedObservation.nest_size ? nestSizeEnum[selectedObservation.nest_size] : + 'Geen' }} +

- +

+ {{ selectedObservation.nest_height ? + nestHeightEnum[selectedObservation.nest_height] : + 'Geen' }} +

@@ -322,7 +311,8 @@
-

{{ selectedObservation.observer_phone_number }}

+

{{ selectedObservation.observer_phone_number }} +

@@ -498,10 +488,6 @@ export default { }; const editableFields = [ - "nest_height", - "nest_size", - "nest_location", - "nest_type", "observation_datetime", "eradication_date", "admin_notes", diff --git a/vespadb/observations/tasks/observation_mapper.py b/vespadb/observations/tasks/observation_mapper.py index d815c6f..6121c04 100644 --- a/vespadb/observations/tasks/observation_mapper.py +++ b/vespadb/observations/tasks/observation_mapper.py @@ -2,7 +2,6 @@ import logging from datetime import datetime -from difflib import get_close_matches from typing import Any, cast import pytz @@ -28,8 +27,10 @@ mapping_dict: dict[int, dict[str, str]] = { 329: { + "hoger_dan_4_meter": "hoger_dan_4_meter", "Hoger dan 4 meter": "hoger_dan_4_meter", "Higher than 4 meters": "hoger_dan_4_meter", + "lager_dan_4_meter": "lager_dan_4_meter", "Lager dan 4 meter": "lager_dan_4_meter", "Lower than 4 meters": "lager_dan_4_meter", }, @@ -39,7 +40,7 @@ "Larger than 25cm": "groter_dan_25_cm", "Smaller than 25cm": "kleiner_dan_25_cm", }, - 331 : { + 331: { "Binnen, in gebouw of constructie": "binnen_in_gebouw_of_constructie", "Buiten, maar overdekt door constructie": "buiten_maar_overdekt_door_constructie", "Buiten, natuurlijk overdekt": "buiten_natuurlijk_overdekt", @@ -50,7 +51,28 @@ "Outside, natural cover": "buiten_natuurlijk_overdekt", "Outside, uncovered in a tree or bush": "buiten_onbedekt_in_boom_of_struik", "Outside, uncovered on building": "buiten_onbedekt_op_gebouw", - } + }, + 368: { + "Actief embryonaal nest": "actief_embryonaal_nest", + "Actief embryonaal nest (van maart tot eind juni, nest met enkel koningin)": "actief_embryonaal_nest", + "Actief primair nest": "actief_primair_nest", + "Actief primair nest (van juni tot eind november, nest met werksters op lage hoogte (tot 6 meter))": "actief_primair_nest", + "Actief secundair nest": "actief_secundair_nest", + "Actief secundair nest (van augustus tot eind november, nest met werksters op grote hoogte (tot 30m))": "actief_secundair_nest", + "Active embryonic nest": "actief_embryonaal_nest", + "Active embryonic nest (from march to the end of june, nest with queen only)": "actief_embryonaal_nest", + "Active primary nest": "actief_primair_nest", + "Active primary nest (from june to the end of november, nest with workers at low altitude (up to 6m))": "actief_primair_nest", + "Active secondary nest": "actief_secundair_nest", + "Active secondary nest (from aug. to the end of nov., nest with workers at high altitude (up to 30m))": "actief_secundair_nest", + "Inactief/leeg nest (typisch tijdens wintermaanden, een leeg nest hoog in een boom)": "inactief_leeg_nest", + "Inactief/leeg nest (typisch tijdens wintermaanden, een leeg netst oog in een boom)": "inactief_leeg_nest", + "Inactive/empty nest": "inactief_leeg_nest", + "Inactive/empty nest (typically during the winter months, an empty nest high in a tree)": "inactief_leeg_nest", + "Potential nest": None, + "Potentieel nest": None, + "Potentieel nest (onzeker van de soort)": None, + }, } ENUMS_MAPPING: dict[str, type[TextChoices]] = { @@ -67,6 +89,7 @@ 329: "nest_height", 330: "nest_size", 331: "nest_location", + 368: "nest_type", } # Literal mapping functions def map_nest_height_attribute_to_enum(value: str) -> Any | None: @@ -81,6 +104,10 @@ def map_nest_location_attribute_to_enum(value: str) -> str | None: """Maps Nest location values to enums based on literal mapping.""" return mapping_dict[331].get(value.strip()) +def map_nest_type_attribute_to_enum(value: str) -> str | None: + """Maps Nest location values to enums based on literal mapping.""" + return mapping_dict[368].get(value.strip()) + def map_attribute_to_enum(attribute_id: int, value: str) -> str | None: """ Maps a single attribute value to an enum using literal mapping functions. @@ -91,6 +118,8 @@ def map_attribute_to_enum(attribute_id: int, value: str) -> str | None: return map_nest_size_attribute_to_enum(value) elif attribute_id == 331: return map_nest_location_attribute_to_enum(value) + elif attribute_id == 368: + return map_nest_type_attribute_to_enum(value) else: return None diff --git a/vespadb/observations/tasks/observation_sync.py b/vespadb/observations/tasks/observation_sync.py index e234ea2..332e632 100644 --- a/vespadb/observations/tasks/observation_sync.py +++ b/vespadb/observations/tasks/observation_sync.py @@ -325,4 +325,4 @@ def fetch_and_update_observations(self: Task, since_week: int | None = None, dat logger.info("Finished processing observations") manage_observations_visibility(token) - logger.info("Finished managing observations visibility") + logger.info("Finished managing observations visibility") \ No newline at end of file diff --git a/vespadb/observations/views.py b/vespadb/observations/views.py index 41b95d6..7246067 100644 --- a/vespadb/observations/views.py +++ b/vespadb/observations/views.py @@ -9,6 +9,9 @@ import csv import json from typing import TYPE_CHECKING, Any, Generator, Any, Union +from django.http import FileResponse +import os +import tempfile from django.contrib.gis.db.models.functions import Transform from django.contrib.gis.geos import GEOSGeometry @@ -62,13 +65,14 @@ BBOX_LENGTH = 4 GEOJSON_REDIS_CACHE_EXPIRATION = 900 # 15 minutes GET_REDIS_CACHE_EXPIRATION = 86400 # 1 day -BATCH_SIZE = 150 CSV_HEADERS = [ "id", "created_datetime", "modified_datetime", "latitude", "longitude", "source", "source_id", "nest_height", "nest_size", "nest_location", "nest_type", "observation_datetime", "province", "eradication_date", "municipality", "images", "anb_domain", "notes", "eradication_result", "wn_id", "wn_validation_status", "nest_status" ] +BATCH_SIZE = 1000 + class ObservationsViewSet(ModelViewSet): # noqa: PLR0904 """ViewSet for the Observation model.""" @@ -642,127 +646,141 @@ def save_observations(self, valid_data: list[dict[str, Any]]) -> Response: ) @method_decorator(ratelimit(key="ip", rate="60/m", method="GET", block=True)) @action(detail=False, methods=["get"], permission_classes=[AllowAny]) - def export(self, request: HttpRequest) -> Union[StreamingHttpResponse, JsonResponse]: + def export(self, request: HttpRequest) -> FileResponse: """ - Export observations as CSV with dynamically controlled fields based on user permissions. - - Observations from municipalities the user has access to will display full details; - others will show limited fields as per public access. + Export observations as CSV using batch processing. """ - if request.query_params.get("export_format", "csv").lower() != "csv": - return JsonResponse({"error": "Only CSV export is supported"}, status=400) - - # Determine user permissions - if request.user.is_authenticated: - user_municipality_ids = set(request.user.municipalities.values_list("id", flat=True)) - is_admin = request.user.is_superuser - else: - user_municipality_ids = set() - is_admin = False - - # Set CSV headers directly from CSV_HEADERS as a base - dynamic_csv_headers = CSV_HEADERS - - # Prepare response - queryset = self.filter_queryset(self.get_queryset()) - response = StreamingHttpResponse( - self.generate_csv_rows(queryset, dynamic_csv_headers, user_municipality_ids, is_admin), - content_type="text/csv" - ) - response["Content-Disposition"] = 'attachment; filename="observations_export.csv"' - return response - - def generate_csv_rows( - self, queryset: QuerySet, headers: list[str], user_municipality_ids: set, is_admin: bool - ) -> Generator[bytes, None, None]: - """Generate CSV rows with headers and filtered data according to user permissions.""" - # Yield headers - yield self._csv_line(headers) - - for observation in queryset.iterator(chunk_size=500): - # Determine fields to include based on user permissions for each observation - if is_admin or (observation.municipality_id in user_municipality_ids): - # Full access for admins and assigned municipalities - allowed_fields = user_read_fields + try: + # Validate export format + if request.query_params.get("export_format", "csv").lower() != "csv": + return JsonResponse({"error": "Only CSV export is supported"}, status=400) + + # Get user permissions + if request.user.is_authenticated: + user_municipality_ids = set(request.user.municipalities.values_list("id", flat=True)) + is_admin = request.user.is_superuser else: - # Restricted access for other municipalities - allowed_fields = public_read_fields - - # Add essential fields for export - allowed_fields.extend(["source_id", "latitude", "longitude", "anb_domain", "nest_status"]) + user_municipality_ids = set() + is_admin = False - # Serialize the observation with restricted fields as needed - row = self.serialize_observation(observation, headers, allowed_fields) - yield self._csv_line(row) + # Get filtered queryset + queryset = self.filter_queryset(self.get_queryset()) - def parse_location(self, srid_str: str) -> tuple[float, float]: + # Create temporary file + with tempfile.NamedTemporaryFile(mode='w+', newline='', delete=False, suffix='.csv') as temp_file: + writer = csv.writer(temp_file) + + # Write headers + writer.writerow(CSV_HEADERS) + + # Process in batches + total_processed = 0 + while True: + # Get batch of observations + batch = queryset[total_processed:total_processed + BATCH_SIZE] + if not batch: + break + + # Process each observation in the batch + for observation in batch: + row_data = self._prepare_row_data( + observation, + is_admin, + user_municipality_ids + ) + writer.writerow(row_data) + + total_processed += len(batch) + logger.info(f"Processed {total_processed} observations") + + # Create response with the temporary file + response = FileResponse( + open(temp_file.name, 'rb'), + content_type='text/csv', + as_attachment=True, + filename=f"observations_export_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" + ) + + # Delete the temporary file after it's sent + response.close = lambda: os.unlink(temp_file.name) + + return response + + except Exception as e: + logger.exception("Export failed") + return JsonResponse( + {"error": "Export failed. Please try again or contact support."}, + status=500 + ) + def _prepare_row_data( + self, + observation: Observation, + is_admin: bool, + user_municipality_ids: set[str] + ) -> list[str]: """ - Parse SRID string to extract latitude and longitude. + Prepare a single row of data for the CSV export. """ - # Convert the SRID location string to GEOSGeometry - geom = GEOSGeometry(srid_str) + # Determine allowed fields based on permissions + if is_admin or (observation.municipality_id in user_municipality_ids): + allowed_fields = user_read_fields + else: + allowed_fields = public_read_fields + + # Add essential fields + allowed_fields.extend(["source_id", "latitude", "longitude", "anb_domain", "nest_status"]) - # Extract latitude and longitude - longitude = geom.x - latitude = geom.y - return latitude, longitude - - def serialize_observation(self, obj: Observation, headers: list[str], allowed_fields: list[str]) -> list[str]: - """Serialize an observation for CSV export with specified fields.""" - data = [] - for field in headers: + row_data = [] + for field in CSV_HEADERS: if field not in allowed_fields: - data.append("") # Add empty string for restricted fields + row_data.append("") continue - # Handle custom formatting for certain fields - if field == "latitude" or field == "longitude": - if obj.location: - srid_location_str = f"SRID=4326;POINT ({obj.location.x} {obj.location.y})" - latitude, longitude = self.parse_location(srid_location_str) - logger.info('Latitude: %s, Longitude: %s', latitude, longitude) - if field == "latitude": - data.append(str(latitude)) - elif field == "longitude": - data.append(str(longitude)) - else: - data.append("") + # Handle special fields + if field == "latitude": + row_data.append(str(observation.location.y) if observation.location else "") + elif field == "longitude": + row_data.append(str(observation.location.x) if observation.location else "") elif field in ["created_datetime", "modified_datetime", "observation_datetime"]: - datetime_val = getattr(obj, field, None) + datetime_val = getattr(observation, field, None) if datetime_val: - # Remove milliseconds and ensure ISO format with 'Z' datetime_val = datetime_val.replace(microsecond=0) - # Convert to ISO format and replace +00:00 with Z if present - iso_datetime = datetime_val.isoformat() - if iso_datetime.endswith('+00:00'): - iso_datetime = iso_datetime[:-6] + 'Z' - elif not iso_datetime.endswith('Z'): - iso_datetime += 'Z' - data.append(iso_datetime) + row_data.append(datetime_val.isoformat() + "Z") else: - data.append("") + row_data.append("") elif field == "province": - data.append(obj.province.name if obj.province else "") + row_data.append(observation.province.name if observation.province else "") elif field == "municipality": - data.append(obj.municipality.name if obj.municipality else "") + row_data.append(observation.municipality.name if observation.municipality else "") elif field == "anb_domain": - data.append(str(obj.anb)) - elif field == "eradication_result": - data.append(obj.eradication_result if obj.eradication_result else "") + row_data.append(str(observation.anb)) elif field == "nest_status": - logger.info("Getting status for observation %s", obj.eradication_result) - data.append(self.get_status(obj)) + row_data.append(self.get_status(observation)) elif field == "source_id": - data.append(str(obj.source_id) if obj.source_id is not None else "") + row_data.append(str(observation.source_id) if observation.source_id is not None else "") else: - value = getattr(obj, field, "") - data.append(str(value) if value is not None else "") - return data + value = getattr(observation, field, "") + row_data.append(str(value) if value is not None else "") + + return row_data + + + def parse_location(self, srid_str: str) -> tuple[float, float]: + """ + Parse SRID string to extract latitude and longitude. + """ + # Convert the SRID location string to GEOSGeometry + geom = GEOSGeometry(srid_str) + + # Extract latitude and longitude + longitude = geom.x + latitude = geom.y + return latitude, longitude def get_status(self, observation: Observation) -> str: """Determine observation status based on eradication data.""" - logger.info("Getting status for observation %s", observation.eradication_result) - if observation.eradication_result == EradicationResultEnum.SUCCESSFUL: + logger.debug("Getting status for observation %s", observation.eradication_result) + if observation.eradication_result: return "eradicated" if observation.reserved_by: return "reserved" From 392801996eda2d3fc0e4c0de98c71cc8236ce39e Mon Sep 17 00:00:00 2001 From: Steven Gerrits Date: Tue, 17 Dec 2024 18:46:38 +0000 Subject: [PATCH 02/12] dec-r3 --- .../observations/tasks/observation_sync.py | 2 +- vespadb/observations/views.py | 302 ++++++++++-------- 2 files changed, 175 insertions(+), 129 deletions(-) diff --git a/vespadb/observations/tasks/observation_sync.py b/vespadb/observations/tasks/observation_sync.py index 332e632..e234ea2 100644 --- a/vespadb/observations/tasks/observation_sync.py +++ b/vespadb/observations/tasks/observation_sync.py @@ -325,4 +325,4 @@ def fetch_and_update_observations(self: Task, since_week: int | None = None, dat logger.info("Finished processing observations") manage_observations_visibility(token) - logger.info("Finished managing observations visibility") \ No newline at end of file + logger.info("Finished managing observations visibility") diff --git a/vespadb/observations/views.py b/vespadb/observations/views.py index 7246067..a0d5cf7 100644 --- a/vespadb/observations/views.py +++ b/vespadb/observations/views.py @@ -1,14 +1,18 @@ """Views for the observations app.""" - -import csv import datetime import io import json -import time import logging -import csv import json -from typing import TYPE_CHECKING, Any, Generator, Any, Union +import csv +from typing import TYPE_CHECKING, Any, Any, Union, TextIO, Union, List, Set, Optional +from _csv import _writer +import datetime +import tempfile +import os +import logging +from tenacity import retry, stop_after_attempt, wait_exponential + from django.http import FileResponse import os import tempfile @@ -631,25 +635,99 @@ def save_observations(self, valid_data: list[dict[str, Any]]) -> Response: {"error": f"An error occurred during bulk import: {e!s}"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR ) - @swagger_auto_schema( - method="get", - manual_parameters=[ - openapi.Parameter( - "export_format", - in_=openapi.IN_QUERY, - description="Format of the exported data", - type=openapi.TYPE_STRING, - enum=["csv"], - default="csv", - ), - ], + @retry( + stop=stop_after_attempt(3), + wait=wait_exponential(multiplier=1, min=4, max=10), + retry_error_callback=lambda retry_state: None ) + def write_batch_to_file( + self, + writer: '_writer', # Explicitly type the csv writer + batch: List[Observation], + is_admin: bool, + user_municipality_ids: Set[str] + ) -> int: + """ + Write a batch of observations to the CSV file with retry logic. + Returns number of successfully written records. + """ + successful_writes = 0 + for observation in batch: + try: + row_data = self._prepare_row_data(observation, is_admin, user_municipality_ids) + writer.writerow(row_data) + successful_writes += 1 + except Exception as e: + logger.error(f"Error processing observation {observation.id}: {str(e)}") + continue + return successful_writes + + def _prepare_row_data( + self, + observation: Observation, + is_admin: bool, + user_municipality_ids: set[str] + ) -> list[str]: + """ + Prepare a single row of data for the CSV export with error handling. + """ + try: + # Determine allowed fields based on permissions + if is_admin or (observation.municipality_id in user_municipality_ids): + allowed_fields = user_read_fields + else: + allowed_fields = public_read_fields + + allowed_fields.extend(["source_id", "latitude", "longitude", "anb_domain", "nest_status"]) + + row_data = [] + for field in CSV_HEADERS: + try: + if field not in allowed_fields: + row_data.append("") + continue + + if field == "latitude": + row_data.append(str(observation.location.y) if observation.location else "") + elif field == "longitude": + row_data.append(str(observation.location.x) if observation.location else "") + elif field in ["created_datetime", "modified_datetime", "observation_datetime"]: + datetime_val = getattr(observation, field, None) + if datetime_val: + datetime_val = datetime_val.replace(microsecond=0) + row_data.append(datetime_val.isoformat() + "Z") + else: + row_data.append("") + elif field == "province": + row_data.append(observation.province.name if observation.province else "") + elif field == "municipality": + row_data.append(observation.municipality.name if observation.municipality else "") + elif field == "anb_domain": + row_data.append(str(observation.anb)) + elif field == "nest_status": + row_data.append(self.get_status(observation)) + elif field == "source_id": + row_data.append(str(observation.source_id) if observation.source_id is not None else "") + else: + value = getattr(observation, field, "") + row_data.append(str(value) if value is not None else "") + except Exception as e: + logger.warning(f"Error processing field {field} for observation {observation.id}: {str(e)}") + row_data.append("") + + return row_data + except Exception as e: + logger.error(f"Error preparing row data for observation {observation.id}: {str(e)}") + return [""] * len(CSV_HEADERS) # Return empty row in case of error + + @method_decorator(ratelimit(key="ip", rate="60/m", method="GET", block=True)) @action(detail=False, methods=["get"], permission_classes=[AllowAny]) - def export(self, request: HttpRequest) -> FileResponse: + def export(self, request: HttpRequest) -> Union[FileResponse, JsonResponse]: """ - Export observations as CSV using batch processing. + Export observations as CSV using batch processing with improved error handling. """ + temp_file = None try: # Validate export format if request.query_params.get("export_format", "csv").lower() != "csv": @@ -663,119 +741,94 @@ def export(self, request: HttpRequest) -> FileResponse: user_municipality_ids = set() is_admin = False - # Get filtered queryset - queryset = self.filter_queryset(self.get_queryset()) - # Create temporary file - with tempfile.NamedTemporaryFile(mode='w+', newline='', delete=False, suffix='.csv') as temp_file: - writer = csv.writer(temp_file) - - # Write headers - writer.writerow(CSV_HEADERS) - - # Process in batches - total_processed = 0 - while True: - # Get batch of observations - batch = queryset[total_processed:total_processed + BATCH_SIZE] - if not batch: - break + temp_file = tempfile.NamedTemporaryFile(mode='w+', newline='', delete=False, suffix='.csv') + writer = csv.writer(temp_file) + writer.writerow(CSV_HEADERS) + + # Get filtered queryset with timeout protection + total_count = None + try: + with transaction.atomic(), connection.cursor() as cursor: + cursor.execute('SET statement_timeout TO 30000') # 30 seconds timeout + queryset = self.filter_queryset(self.get_queryset()) + total_count = queryset.count() + except Exception as e: + logger.error(f"Error getting total count: {str(e)}") + # Continue with None total_count + + # Process in batches with progress tracking + total_processed = 0 + successful_records = 0 + offset = 0 + batch_size = 1000 + + while True: + try: + # Get batch with timeout protection + with transaction.atomic(), connection.cursor() as cursor: + cursor.execute('SET statement_timeout TO 30000') + batch = list(queryset[offset:offset + batch_size]) + if not batch: # No more records + break - # Process each observation in the batch - for observation in batch: - row_data = self._prepare_row_data( - observation, - is_admin, - user_municipality_ids - ) - writer.writerow(row_data) + # Process batch with retry logic + successful_writes = self.write_batch_to_file(writer, batch, is_admin, user_municipality_ids) + successful_records += successful_writes - total_processed += len(batch) - logger.info(f"Processed {total_processed} observations") - - # Create response with the temporary file - response = FileResponse( - open(temp_file.name, 'rb'), - content_type='text/csv', - as_attachment=True, - filename=f"observations_export_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" - ) - - # Delete the temporary file after it's sent - response.close = lambda: os.unlink(temp_file.name) + batch_count = len(batch) + total_processed += batch_count + offset += batch_size + + # Log progress if we know the total + if total_count: + progress = (total_processed / total_count) * 100 + logger.info(f"Export progress: {progress:.1f}% ({total_processed}/{total_count})") + else: + logger.info(f"Processed {total_processed} records") + + except Exception as e: + logger.error(f"Error processing batch at offset {offset}: {str(e)}") + offset += batch_size # Skip problematic batch + continue + + # Ensure all data is written + temp_file.flush() - return response + # Create response + try: + response = FileResponse( + open(temp_file.name, 'rb'), + content_type='text/csv', + as_attachment=True, + filename=f"observations_export_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" + ) + + # Log export statistics + if total_count: + logger.info(f"Export completed: {successful_records} successful records out of {total_count} total") + else: + logger.info(f"Export completed: {successful_records} successful records") + + # Clean up temp file after sending + response.close = lambda: os.unlink(temp_file.name) + return response + + except Exception as e: + logger.exception("Error creating response") + if temp_file and os.path.exists(temp_file.name): + os.unlink(temp_file.name) + return JsonResponse({"error": "Error creating export file"}, status=500) except Exception as e: logger.exception("Export failed") + # Clean up temp file in case of error + if temp_file and os.path.exists(temp_file.name): + os.unlink(temp_file.name) return JsonResponse( {"error": "Export failed. Please try again or contact support."}, status=500 ) - def _prepare_row_data( - self, - observation: Observation, - is_admin: bool, - user_municipality_ids: set[str] - ) -> list[str]: - """ - Prepare a single row of data for the CSV export. - """ - # Determine allowed fields based on permissions - if is_admin or (observation.municipality_id in user_municipality_ids): - allowed_fields = user_read_fields - else: - allowed_fields = public_read_fields - - # Add essential fields - allowed_fields.extend(["source_id", "latitude", "longitude", "anb_domain", "nest_status"]) - - row_data = [] - for field in CSV_HEADERS: - if field not in allowed_fields: - row_data.append("") - continue - - # Handle special fields - if field == "latitude": - row_data.append(str(observation.location.y) if observation.location else "") - elif field == "longitude": - row_data.append(str(observation.location.x) if observation.location else "") - elif field in ["created_datetime", "modified_datetime", "observation_datetime"]: - datetime_val = getattr(observation, field, None) - if datetime_val: - datetime_val = datetime_val.replace(microsecond=0) - row_data.append(datetime_val.isoformat() + "Z") - else: - row_data.append("") - elif field == "province": - row_data.append(observation.province.name if observation.province else "") - elif field == "municipality": - row_data.append(observation.municipality.name if observation.municipality else "") - elif field == "anb_domain": - row_data.append(str(observation.anb)) - elif field == "nest_status": - row_data.append(self.get_status(observation)) - elif field == "source_id": - row_data.append(str(observation.source_id) if observation.source_id is not None else "") - else: - value = getattr(observation, field, "") - row_data.append(str(value) if value is not None else "") - - return row_data - - - def parse_location(self, srid_str: str) -> tuple[float, float]: - """ - Parse SRID string to extract latitude and longitude. - """ - # Convert the SRID location string to GEOSGeometry - geom = GEOSGeometry(srid_str) - - # Extract latitude and longitude - longitude = geom.x - latitude = geom.y - return latitude, longitude def get_status(self, observation: Observation) -> str: """Determine observation status based on eradication data.""" @@ -786,13 +839,6 @@ def get_status(self, observation: Observation) -> str: return "reserved" return "untreated" - def _csv_line(self, row: list[str]) -> bytes: - """Convert a list of strings to a CSV-compatible line in bytes.""" - buffer = io.StringIO() - writer = csv.writer(buffer) - writer.writerow(row) - return buffer.getvalue().encode("utf-8") - @require_GET def search_address(request: Request) -> JsonResponse: """ From f279fec03880e60f41b312e6edccdb2f7711b10c Mon Sep 17 00:00:00 2001 From: Steven Gerrits Date: Tue, 17 Dec 2024 19:15:34 +0000 Subject: [PATCH 03/12] dec-r3 --- poetry.lock | 769 ++++++++++++++++++---------------- pyproject.toml | 1 + vespadb/observations/views.py | 3 +- 3 files changed, 400 insertions(+), 373 deletions(-) diff --git a/poetry.lock b/poetry.lock index ae63ff6..f403b7d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "amqp" -version = "5.2.0" +version = "5.3.1" description = "Low-level AMQP client for Python (fork of amqplib)." optional = false python-versions = ">=3.6" files = [ - {file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"}, - {file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"}, + {file = "amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2"}, + {file = "amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432"}, ] [package.dependencies] @@ -41,37 +41,34 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "asttokens" -version = "2.4.1" +version = "3.0.0" description = "Annotate AST trees with source code positions" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, + {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, + {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, ] -[package.dependencies] -six = ">=1.12.0" - [package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] +astroid = ["astroid (>=2,<4)"] +test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] [[package]] name = "attrs" -version = "24.2.0" +version = "24.3.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308"}, + {file = "attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff"}, ] [package.extras] benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] @@ -103,17 +100,17 @@ files = [ [[package]] name = "boto3" -version = "1.35.47" +version = "1.35.82" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.47-py3-none-any.whl", hash = "sha256:0b307f685875e9c7857ce21c0d3050d8d4f3778455a6852d5f98ac75194b400e"}, - {file = "boto3-1.35.47.tar.gz", hash = "sha256:65b808e4cf1af8c2f405382d53656a0d92eee8f85c7388c43d64c7a5571b065f"}, + {file = "boto3-1.35.82-py3-none-any.whl", hash = "sha256:c422b68ae76959b9e23b77eb79e41c3483332f7e1de918d2b083c456d8cf234c"}, + {file = "boto3-1.35.82.tar.gz", hash = "sha256:2bbaf1551b1ed55770cb437d7040f1abe6742601103695057b30ce6328eef286"}, ] [package.dependencies] -botocore = ">=1.35.47,<1.36.0" +botocore = ">=1.35.82,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -122,13 +119,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.47" +version = "1.35.82" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.47-py3-none-any.whl", hash = "sha256:05f4493119a96799ff84d43e78691efac3177e1aec8840cca99511de940e342a"}, - {file = "botocore-1.35.47.tar.gz", hash = "sha256:f8f703463d3cd8b6abe2bedc443a7ab29f0e2ff1588a2e83164b108748645547"}, + {file = "botocore-1.35.82-py3-none-any.whl", hash = "sha256:e43b97d8cbf19d35ce3a177f144bd97cc370f0a67d0984c7d7cf105ac198748f"}, + {file = "botocore-1.35.82.tar.gz", hash = "sha256:78dd7bf8f49616d00073698d7bbaf5a115208fe730b7b7afae4456adddb3552e"}, ] [package.dependencies] @@ -197,13 +194,13 @@ zstd = ["zstandard (==0.22.0)"] [[package]] name = "certifi" -version = "2024.8.30" +version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] @@ -503,73 +500,73 @@ files = [ [[package]] name = "coverage" -version = "7.6.4" +version = "7.6.9" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07"}, - {file = "coverage-7.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0"}, - {file = "coverage-7.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72"}, - {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51"}, - {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491"}, - {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b"}, - {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea"}, - {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a"}, - {file = "coverage-7.6.4-cp310-cp310-win32.whl", hash = "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa"}, - {file = "coverage-7.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172"}, - {file = "coverage-7.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b"}, - {file = "coverage-7.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25"}, - {file = "coverage-7.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546"}, - {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b"}, - {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e"}, - {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718"}, - {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db"}, - {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522"}, - {file = "coverage-7.6.4-cp311-cp311-win32.whl", hash = "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf"}, - {file = "coverage-7.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19"}, - {file = "coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2"}, - {file = "coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117"}, - {file = "coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613"}, - {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27"}, - {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52"}, - {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2"}, - {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1"}, - {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5"}, - {file = "coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17"}, - {file = "coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08"}, - {file = "coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9"}, - {file = "coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba"}, - {file = "coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c"}, - {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06"}, - {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f"}, - {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b"}, - {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21"}, - {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a"}, - {file = "coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e"}, - {file = "coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963"}, - {file = "coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f"}, - {file = "coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806"}, - {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11"}, - {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3"}, - {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a"}, - {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc"}, - {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70"}, - {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef"}, - {file = "coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e"}, - {file = "coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1"}, - {file = "coverage-7.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3"}, - {file = "coverage-7.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c"}, - {file = "coverage-7.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076"}, - {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376"}, - {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0"}, - {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858"}, - {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111"}, - {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901"}, - {file = "coverage-7.6.4-cp39-cp39-win32.whl", hash = "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09"}, - {file = "coverage-7.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f"}, - {file = "coverage-7.6.4-pp39.pp310-none-any.whl", hash = "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e"}, - {file = "coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73"}, + {file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"}, + {file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"}, + {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"}, + {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"}, + {file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"}, + {file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"}, + {file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"}, + {file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"}, + {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"}, + {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"}, + {file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"}, + {file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"}, + {file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"}, + {file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"}, + {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"}, + {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"}, + {file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"}, + {file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"}, + {file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"}, + {file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"}, + {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"}, + {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"}, + {file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"}, + {file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"}, + {file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"}, + {file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"}, + {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"}, + {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"}, + {file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"}, + {file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"}, + {file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"}, + {file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"}, + {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"}, + {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"}, + {file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"}, + {file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"}, + {file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"}, + {file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"}, ] [package.extras] @@ -591,51 +588,51 @@ dev = ["polib"] [[package]] name = "cryptography" -version = "43.0.3" +version = "44.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, - {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, - {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, - {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, - {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, - {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, - {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, - {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, - {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, - {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, - {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, - {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, - {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, - {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, - {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, - {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, - {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, - {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, - {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, - {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, - {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, - {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, - {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, - {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, - {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, - {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, - {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, +python-versions = "!=3.9.0,!=3.9.1,>=3.7" +files = [ + {file = "cryptography-44.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123"}, + {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092"}, + {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831c3c4d0774e488fdc83a1923b49b9957d33287de923d58ebd3cec47a0ae43f"}, + {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"}, + {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"}, + {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"}, + {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"}, + {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"}, + {file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"}, + {file = "cryptography-44.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:abc998e0c0eee3c8a1904221d3f67dcfa76422b23620173e28c11d3e626c21bd"}, + {file = "cryptography-44.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:660cb7312a08bc38be15b696462fa7cc7cd85c3ed9c576e81f4dc4d8b2b31591"}, + {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1923cb251c04be85eec9fda837661c67c1049063305d6be5721643c22dd4e2b7"}, + {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:404fdc66ee5f83a1388be54300ae978b2efd538018de18556dde92575e05defc"}, + {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"}, + {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"}, + {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"}, + {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"}, + {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"}, + {file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"}, + {file = "cryptography-44.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:708ee5f1bafe76d041b53a4f95eb28cdeb8d18da17e597d46d7833ee59b97ede"}, + {file = "cryptography-44.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37d76e6863da3774cd9db5b409a9ecfd2c71c981c38788d3fcfaf177f447b731"}, + {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f677e1268c4e23420c3acade68fac427fffcb8d19d7df95ed7ad17cdef8404f4"}, + {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f5e7cb1e5e56ca0933b4873c0220a78b773b24d40d186b6738080b73d3d0a756"}, + {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:8b3e6eae66cf54701ee7d9c83c30ac0a1e3fa17be486033000f2a73a12ab507c"}, + {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:be4ce505894d15d5c5037167ffb7f0ae90b7be6f2a98f9a5c3442395501c32fa"}, + {file = "cryptography-44.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:62901fb618f74d7d81bf408c8719e9ec14d863086efe4185afd07c352aee1d2c"}, + {file = "cryptography-44.0.0.tar.gz", hash = "sha256:cd4e834f340b4293430701e772ec543b0fbe6c2dea510a5286fe0acabe153a02"}, ] [package.dependencies] cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] -nox = ["nox"] -pep8test = ["check-sdist", "click", "mypy", "ruff"] -sdist = ["build"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.0)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] [[package]] @@ -662,13 +659,13 @@ files = [ [[package]] name = "django" -version = "5.1.2" +version = "5.1.4" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.10" files = [ - {file = "Django-5.1.2-py3-none-any.whl", hash = "sha256:f11aa87ad8d5617171e3f77e1d5d16f004b79a2cf5d2e1d2b97a6a1f8e9ba5ed"}, - {file = "Django-5.1.2.tar.gz", hash = "sha256:bd7376f90c99f96b643722eee676498706c9fd7dc759f55ebfaf2c08ebcdf4f0"}, + {file = "Django-5.1.4-py3-none-any.whl", hash = "sha256:236e023f021f5ce7dee5779de7b286565fdea5f4ab86bae5338e3f7b69896cf0"}, + {file = "Django-5.1.4.tar.gz", hash = "sha256:de450c09e91879fa5a307f696e57c851955c910a438a35e6b4c895e86bedc82a"}, ] [package.dependencies] @@ -716,18 +713,18 @@ Django = ">=3.2.18" [[package]] name = "django-cors-headers" -version = "4.5.0" +version = "4.6.0" description = "django-cors-headers is a Django application for handling the server headers required for Cross-Origin Resource Sharing (CORS)." optional = false python-versions = ">=3.9" files = [ - {file = "django_cors_headers-4.5.0-py3-none-any.whl", hash = "sha256:28c1ded847aa70208798de3e42422a782f427b8b720e8d7319d34b654b5978e6"}, - {file = "django_cors_headers-4.5.0.tar.gz", hash = "sha256:6c01a85cf1ec779a7bde621db853aa3ce5c065a5ba8e27df7a9f9e8dac310f4f"}, + {file = "django_cors_headers-4.6.0-py3-none-any.whl", hash = "sha256:8edbc0497e611c24d5150e0055d3b178c6534b8ed826fb6f53b21c63f5d48ba3"}, + {file = "django_cors_headers-4.6.0.tar.gz", hash = "sha256:14d76b4b4c8d39375baeddd89e4f08899051eeaf177cb02a29bd6eae8cf63aa8"}, ] [package.dependencies] asgiref = ">=3.6" -django = ">=3.2" +django = ">=4.2" [[package]] name = "django-filter" @@ -774,13 +771,13 @@ hiredis = ["redis[hiredis] (>=3,!=4.0.0,!=4.0.1)"] [[package]] name = "django-ses" -version = "4.2.0" +version = "4.3.1" description = "A Django email backend for Amazon's Simple Email Service (SES)" optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "django_ses-4.2.0-py3-none-any.whl", hash = "sha256:2344f2650b699417c0f2b6197800580f5022aaccf0083c2a1b640b7ff7366067"}, - {file = "django_ses-4.2.0.tar.gz", hash = "sha256:6167401bd3fc2b29a33b35e4f6f124dc738f6612e3a926ff5533143e75c71ff1"}, + {file = "django_ses-4.3.1-py3-none-any.whl", hash = "sha256:1b66e7958c74b5109aa5179aa975b71599c4db15d5072d5787a2c2ff53e50903"}, + {file = "django_ses-4.3.1.tar.gz", hash = "sha256:3db5b442d76f38679a09099c8e3bfd60690823e4bd583b15fe5187822bf8389d"}, ] [package.dependencies] @@ -874,20 +871,20 @@ test = ["cryptography", "freezegun", "pytest", "pytest-cov", "pytest-django", "p [[package]] name = "dparse" -version = "0.6.4b0" +version = "0.6.4" description = "A parser for Python dependency files" optional = false python-versions = ">=3.7" files = [ - {file = "dparse-0.6.4b0-py3-none-any.whl", hash = "sha256:592ff183348b8a5ea0a18442a7965e29445d3a26063654ec2c7e8ef42cd5753c"}, - {file = "dparse-0.6.4b0.tar.gz", hash = "sha256:f8d49b41a527f3d16a269f854e6665245b325e50e41d2c213810cb984553e5c8"}, + {file = "dparse-0.6.4-py3-none-any.whl", hash = "sha256:fbab4d50d54d0e739fbb4dedfc3d92771003a5b9aa8545ca7a7045e3b174af57"}, + {file = "dparse-0.6.4.tar.gz", hash = "sha256:90b29c39e3edc36c6284c82c4132648eaf28a01863eb3c231c2512196132201a"}, ] [package.dependencies] packaging = "*" [package.extras] -all = ["dparse[conda]", "dparse[pipenv]", "dparse[poetry]"] +all = ["pipenv", "poetry", "pyyaml"] conda = ["pyyaml"] pipenv = ["pipenv"] poetry = ["poetry"] @@ -950,13 +947,13 @@ doc = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] [[package]] name = "faker" -version = "30.8.0" +version = "33.1.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-30.8.0-py3-none-any.whl", hash = "sha256:4cd0c5ea4bc1e4c902967f6e662f5f5da69f1674d9a94f54e516d27f3c2a6a16"}, - {file = "faker-30.8.0.tar.gz", hash = "sha256:3608c7fcac2acde0eaa6da28dae97628f18f14d54eaa2a92b96ae006f1621bd7"}, + {file = "Faker-33.1.0-py3-none-any.whl", hash = "sha256:d30c5f0e2796b8970de68978365247657486eb0311c5abe88d0b895b68dff05d"}, + {file = "faker-33.1.0.tar.gz", hash = "sha256:1c925fc0e86a51fc46648b504078c88d0cd48da1da2595c4e712841cab43a1e4"}, ] [package.dependencies] @@ -1102,13 +1099,13 @@ tornado = ["tornado (>=0.2)"] [[package]] name = "identify" -version = "2.6.1" +version = "2.6.3" description = "File identification library for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, - {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, + {file = "identify-2.6.3-py2.py3-none-any.whl", hash = "sha256:9edba65473324c2ea9684b1f944fe3191db3345e50b6d04571d10ed164f8d7bd"}, + {file = "identify-2.6.3.tar.gz", hash = "sha256:62f5dae9b5fef52c84cc188514e9ea4f3f636b1d8799ab5ebc475471f9e47a02"}, ] [package.extras] @@ -1152,13 +1149,13 @@ files = [ [[package]] name = "ipython" -version = "8.28.0" +version = "8.30.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.10" files = [ - {file = "ipython-8.28.0-py3-none-any.whl", hash = "sha256:530ef1e7bb693724d3cdc37287c80b07ad9b25986c007a53aa1857272dac3f35"}, - {file = "ipython-8.28.0.tar.gz", hash = "sha256:0d0d15ca1e01faeb868ef56bc7ee5a0de5bd66885735682e8a322ae289a13d1a"}, + {file = "ipython-8.30.0-py3-none-any.whl", hash = "sha256:85ec56a7e20f6c38fce7727dcca699ae4ffc85985aa7b23635a8008f918ae321"}, + {file = "ipython-8.30.0.tar.gz", hash = "sha256:cb0a405a306d2995a5cbb9901894d240784a9f341394c6ba3f4fe8c6eb89ff6e"}, ] [package.dependencies] @@ -1167,16 +1164,16 @@ decorator = "*" jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} -prompt-toolkit = ">=3.0.41,<3.1.0" +prompt_toolkit = ">=3.0.41,<3.1.0" pygments = ">=2.4.0" -stack-data = "*" +stack_data = "*" traitlets = ">=5.13.0" -typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} +typing_extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} [package.extras] all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing_extensions"] kernel = ["ipykernel"] matplotlib = ["matplotlib"] nbconvert = ["nbconvert"] @@ -1189,22 +1186,22 @@ test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "num [[package]] name = "jedi" -version = "0.19.1" +version = "0.19.2" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" files = [ - {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, - {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, + {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, + {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, ] [package.dependencies] -parso = ">=0.8.3,<0.9.0" +parso = ">=0.8.4,<0.9.0" [package.extras] docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] -testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] [[package]] name = "jinja2" @@ -1363,13 +1360,13 @@ files = [ [[package]] name = "marshmallow" -version = "3.23.0" +version = "3.23.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.9" files = [ - {file = "marshmallow-3.23.0-py3-none-any.whl", hash = "sha256:82f20a2397834fe6d9611b241f2f7e7b680ed89c49f84728a1ad937be6b4bdf4"}, - {file = "marshmallow-3.23.0.tar.gz", hash = "sha256:98d8827a9f10c03d44ead298d2e99c6aea8197df18ccfad360dae7f89a50da2e"}, + {file = "marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491"}, + {file = "marshmallow-3.23.1.tar.gz", hash = "sha256:3a8dfda6edd8dcdbf216c0ede1d1e78d230a6dc9c5a088f58c4083b974a0d468"}, ] [package.dependencies] @@ -1377,7 +1374,7 @@ packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] -docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.14)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] tests = ["pytest", "simplejson"] [[package]] @@ -1475,75 +1472,77 @@ files = [ [[package]] name = "numpy" -version = "2.1.2" +version = "2.2.0" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" files = [ - {file = "numpy-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee"}, - {file = "numpy-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884"}, - {file = "numpy-2.1.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648"}, - {file = "numpy-2.1.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d"}, - {file = "numpy-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86"}, - {file = "numpy-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7"}, - {file = "numpy-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03"}, - {file = "numpy-2.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466"}, - {file = "numpy-2.1.2-cp310-cp310-win32.whl", hash = "sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb"}, - {file = "numpy-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2"}, - {file = "numpy-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe"}, - {file = "numpy-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1"}, - {file = "numpy-2.1.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f"}, - {file = "numpy-2.1.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4"}, - {file = "numpy-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a"}, - {file = "numpy-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1"}, - {file = "numpy-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2"}, - {file = "numpy-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146"}, - {file = "numpy-2.1.2-cp311-cp311-win32.whl", hash = "sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c"}, - {file = "numpy-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9"}, - {file = "numpy-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b"}, - {file = "numpy-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db"}, - {file = "numpy-2.1.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1"}, - {file = "numpy-2.1.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426"}, - {file = "numpy-2.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0"}, - {file = "numpy-2.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df"}, - {file = "numpy-2.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366"}, - {file = "numpy-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142"}, - {file = "numpy-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550"}, - {file = "numpy-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e"}, - {file = "numpy-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d"}, - {file = "numpy-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf"}, - {file = "numpy-2.1.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e"}, - {file = "numpy-2.1.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3"}, - {file = "numpy-2.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8"}, - {file = "numpy-2.1.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a"}, - {file = "numpy-2.1.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98"}, - {file = "numpy-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe"}, - {file = "numpy-2.1.2-cp313-cp313-win32.whl", hash = "sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a"}, - {file = "numpy-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445"}, - {file = "numpy-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5"}, - {file = "numpy-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0"}, - {file = "numpy-2.1.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17"}, - {file = "numpy-2.1.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6"}, - {file = "numpy-2.1.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8"}, - {file = "numpy-2.1.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35"}, - {file = "numpy-2.1.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62"}, - {file = "numpy-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a"}, - {file = "numpy-2.1.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952"}, - {file = "numpy-2.1.2-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5"}, - {file = "numpy-2.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7"}, - {file = "numpy-2.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e"}, - {file = "numpy-2.1.2.tar.gz", hash = "sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1e25507d85da11ff5066269d0bd25d06e0a0f2e908415534f3e603d2a78e4ffa"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a62eb442011776e4036af5c8b1a00b706c5bc02dc15eb5344b0c750428c94219"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:b606b1aaf802e6468c2608c65ff7ece53eae1a6874b3765f69b8ceb20c5fa78e"}, + {file = "numpy-2.2.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:36b2b43146f646642b425dd2027730f99bac962618ec2052932157e213a040e9"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fe8f3583e0607ad4e43a954e35c1748b553bfe9fdac8635c02058023277d1b3"}, + {file = "numpy-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:122fd2fcfafdefc889c64ad99c228d5a1f9692c3a83f56c292618a59aa60ae83"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3f2f5cddeaa4424a0a118924b988746db6ffa8565e5829b1841a8a3bd73eb59a"}, + {file = "numpy-2.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fe4bb0695fe986a9e4deec3b6857003b4cfe5c5e4aac0b95f6a658c14635e31"}, + {file = "numpy-2.2.0-cp310-cp310-win32.whl", hash = "sha256:b30042fe92dbd79f1ba7f6898fada10bdaad1847c44f2dff9a16147e00a93661"}, + {file = "numpy-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dc1d6d66f8d37843ed281773c7174f03bf7ad826523f73435deb88ba60d2d4"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9874bc2ff574c40ab7a5cbb7464bf9b045d617e36754a7bc93f933d52bd9ffc6"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0da8495970f6b101ddd0c38ace92edea30e7e12b9a926b57f5fabb1ecc25bb90"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:0557eebc699c1c34cccdd8c3778c9294e8196df27d713706895edc6f57d29608"}, + {file = "numpy-2.2.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:3579eaeb5e07f3ded59298ce22b65f877a86ba8e9fe701f5576c99bb17c283da"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40deb10198bbaa531509aad0cd2f9fadb26c8b94070831e2208e7df543562b74"}, + {file = "numpy-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2aed8fcf8abc3020d6a9ccb31dbc9e7d7819c56a348cc88fd44be269b37427e"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a222d764352c773aa5ebde02dd84dba3279c81c6db2e482d62a3fa54e5ece69b"}, + {file = "numpy-2.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4e58666988605e251d42c2818c7d3d8991555381be26399303053b58a5bbf30d"}, + {file = "numpy-2.2.0-cp311-cp311-win32.whl", hash = "sha256:4723a50e1523e1de4fccd1b9a6dcea750c2102461e9a02b2ac55ffeae09a4410"}, + {file = "numpy-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:16757cf28621e43e252c560d25b15f18a2f11da94fea344bf26c599b9cf54b73"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cff210198bb4cae3f3c100444c5eaa573a823f05c253e7188e1362a5555235b3"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58b92a5828bd4d9aa0952492b7de803135038de47343b2aa3cc23f3b71a3dc4e"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:ebe5e59545401fbb1b24da76f006ab19734ae71e703cdb4a8b347e84a0cece67"}, + {file = "numpy-2.2.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e2b8cd48a9942ed3f85b95ca4105c45758438c7ed28fff1e4ce3e57c3b589d8e"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57fcc997ffc0bef234b8875a54d4058afa92b0b0c4223fc1f62f24b3b5e86038"}, + {file = "numpy-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ad7d11b309bd132d74397fcf2920933c9d1dc865487128f5c03d580f2c3d03"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cb24cca1968b21355cc6f3da1a20cd1cebd8a023e3c5b09b432444617949085a"}, + {file = "numpy-2.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0798b138c291d792f8ea40fe3768610f3c7dd2574389e37c3f26573757c8f7ef"}, + {file = "numpy-2.2.0-cp312-cp312-win32.whl", hash = "sha256:afe8fb968743d40435c3827632fd36c5fbde633b0423da7692e426529b1759b1"}, + {file = "numpy-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a4199f519e57d517ebd48cb76b36c82da0360781c6a0353e64c0cac30ecaad3"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f8c8b141ef9699ae777c6278b52c706b653bf15d135d302754f6b2e90eb30367"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f0986e917aca18f7a567b812ef7ca9391288e2acb7a4308aa9d265bd724bdae"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:1c92113619f7b272838b8d6702a7f8ebe5edea0df48166c47929611d0b4dea69"}, + {file = "numpy-2.2.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5a145e956b374e72ad1dff82779177d4a3c62bc8248f41b80cb5122e68f22d13"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18142b497d70a34b01642b9feabb70156311b326fdddd875a9981f34a369b671"}, + {file = "numpy-2.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7d41d1612c1a82b64697e894b75db6758d4f21c3ec069d841e60ebe54b5b571"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a98f6f20465e7618c83252c02041517bd2f7ea29be5378f09667a8f654a5918d"}, + {file = "numpy-2.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e09d40edfdb4e260cb1567d8ae770ccf3b8b7e9f0d9b5c2a9992696b30ce2742"}, + {file = "numpy-2.2.0-cp313-cp313-win32.whl", hash = "sha256:3905a5fffcc23e597ee4d9fb3fcd209bd658c352657548db7316e810ca80458e"}, + {file = "numpy-2.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:a184288538e6ad699cbe6b24859206e38ce5fba28f3bcfa51c90d0502c1582b2"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7832f9e8eb00be32f15fdfb9a981d6955ea9adc8574c521d48710171b6c55e95"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0dd071b95bbca244f4cb7f70b77d2ff3aaaba7fa16dc41f58d14854a6204e6c"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:b0b227dcff8cdc3efbce66d4e50891f04d0a387cce282fe1e66199146a6a8fca"}, + {file = "numpy-2.2.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:6ab153263a7c5ccaf6dfe7e53447b74f77789f28ecb278c3b5d49db7ece10d6d"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e500aba968a48e9019e42c0c199b7ec0696a97fa69037bea163b55398e390529"}, + {file = "numpy-2.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440cfb3db4c5029775803794f8638fbdbf71ec702caf32735f53b008e1eaece3"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a55dc7a7f0b6198b07ec0cd445fbb98b05234e8b00c5ac4874a63372ba98d4ab"}, + {file = "numpy-2.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4bddbaa30d78c86329b26bd6aaaea06b1e47444da99eddac7bf1e2fab717bd72"}, + {file = "numpy-2.2.0-cp313-cp313t-win32.whl", hash = "sha256:30bf971c12e4365153afb31fc73f441d4da157153f3400b82db32d04de1e4066"}, + {file = "numpy-2.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d35717333b39d1b6bb8433fa758a55f1081543de527171543a2b710551d40881"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e12c6c1ce84628c52d6367863773f7c8c8241be554e8b79686e91a43f1733773"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:b6207dc8fb3c8cb5668e885cef9ec7f70189bec4e276f0ff70d5aa078d32c88e"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a50aeff71d0f97b6450d33940c7181b08be1441c6c193e678211bff11aa725e7"}, + {file = "numpy-2.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:df12a1f99b99f569a7c2ae59aa2d31724e8d835fc7f33e14f4792e3071d11221"}, + {file = "numpy-2.2.0.tar.gz", hash = "sha256:140dd80ff8981a583a60980be1a655068f8adebf7a45a06a6858c873fcdcd4a0"}, ] [[package]] name = "packaging" -version = "24.1" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -1599,8 +1598,8 @@ files = [ [package.dependencies] numpy = [ - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -1704,13 +1703,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "poethepoet" -version = "0.29.0" +version = "0.31.1" description = "A task runner that works well with poetry." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "poethepoet-0.29.0-py3-none-any.whl", hash = "sha256:f8dfe55006dcfb5cf31bcb1904e1262e1c642a4502fee3688cbf1bddfe5c7601"}, - {file = "poethepoet-0.29.0.tar.gz", hash = "sha256:676842302f2304a86b31ac56398dd672fae8471128d2086896393384dbafc095"}, + {file = "poethepoet-0.31.1-py3-none-any.whl", hash = "sha256:7fdfa0ac6074be9936723e7231b5bfaad2923e96c674a9857e81d326cf8ccdc2"}, + {file = "poethepoet-0.31.1.tar.gz", hash = "sha256:d6b66074edf85daf115bb916eae0afd6387d19e1562e1c9ef7d61d5c585696aa"}, ] [package.dependencies] @@ -1848,22 +1847,19 @@ files = [ [[package]] name = "pydantic" -version = "2.9.2" +version = "2.10.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, - {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, + {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, + {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.23.4" -typing-extensions = [ - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, -] +pydantic-core = "2.27.1" +typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -1871,100 +1867,111 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.23.4" +version = "2.27.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, - {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, - {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, - {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, - {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, - {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, - {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, - {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, - {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, - {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, - {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, - {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, - {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, - {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, + {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, + {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, + {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, + {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, + {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, + {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, + {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, + {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, + {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, + {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, + {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, + {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, + {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, + {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, + {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, + {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, ] [package.dependencies] @@ -2039,13 +2046,13 @@ certifi = "*" [[package]] name = "pytest" -version = "8.3.3" +version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, - {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, + {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, + {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, ] [package.dependencies] @@ -2210,13 +2217,13 @@ files = [ [[package]] name = "redis" -version = "5.1.1" +version = "5.2.1" description = "Python client for Redis database and key-value store" optional = false python-versions = ">=3.8" files = [ - {file = "redis-5.1.1-py3-none-any.whl", hash = "sha256:f8ea06b7482a668c6475ae202ed8d9bcaa409f6e87fb77ed1043d912afd62e24"}, - {file = "redis-5.1.1.tar.gz", hash = "sha256:f6c997521fedbae53387307c5d0bf784d9acc28d9f1d058abeac566ec4dbed72"}, + {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, + {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, ] [package.extras] @@ -2246,13 +2253,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.9.3" +version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" files = [ - {file = "rich-13.9.3-py3-none-any.whl", hash = "sha256:9836f5096eb2172c9e77df411c1b009bace4193d6a481d534fea75ebba758283"}, - {file = "rich-13.9.3.tar.gz", hash = "sha256:bc1e01b899537598cf02579d2b9f4a415104d3fc439313a7a2c165d76557a08e"}, + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, ] [package.dependencies] @@ -2293,6 +2300,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"}, + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"}, @@ -2301,6 +2309,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"}, @@ -2309,6 +2318,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"}, @@ -2317,6 +2327,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"}, @@ -2325,6 +2336,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"}, {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, @@ -2358,13 +2370,13 @@ files = [ [[package]] name = "s3transfer" -version = "0.10.3" +version = "0.10.4" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">=3.8" files = [ - {file = "s3transfer-0.10.3-py3-none-any.whl", hash = "sha256:263ed587a5803c6c708d3ce44dc4dfedaab4c1a32e8329bab818933d79ddcf5d"}, - {file = "s3transfer-0.10.3.tar.gz", hash = "sha256:4f50ed74ab84d474ce614475e0b8d5047ff080810aac5d01ea25231cfc944b0c"}, + {file = "s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e"}, + {file = "s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7"}, ] [package.dependencies] @@ -2375,13 +2387,13 @@ crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] [[package]] name = "safety" -version = "3.2.8" +version = "3.2.9" description = "Checks installed dependencies for known vulnerabilities and licenses." optional = false python-versions = ">=3.7" files = [ - {file = "safety-3.2.8-py3-none-any.whl", hash = "sha256:af6b8c0bb5305b27e00a24b91d3c691d980d43c9becc919d16e5264a78a0437e"}, - {file = "safety-3.2.8.tar.gz", hash = "sha256:4385ea2b321fda163910d1a96ecaaca89d518e28ea5a88c1bb3f53171b45bf94"}, + {file = "safety-3.2.9-py3-none-any.whl", hash = "sha256:5e199c057550dc6146c081084274279dfb98c17735193b028db09a55ea508f1a"}, + {file = "safety-3.2.9.tar.gz", hash = "sha256:494bea752366161ac9e0742033d2a82e4dc51d7c788be42e0ecf5f3ef36b8071"}, ] [package.dependencies] @@ -2428,23 +2440,23 @@ typing-extensions = ">=4.7.1" [[package]] name = "setuptools" -version = "75.2.0" +version = "75.6.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, - {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, + {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, + {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [[package]] name = "shapely" @@ -2531,24 +2543,24 @@ files = [ [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] name = "sqlparse" -version = "0.5.1" +version = "0.5.3" description = "A non-validating SQL parser." optional = false python-versions = ">=3.8" files = [ - {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"}, - {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"}, + {file = "sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca"}, + {file = "sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272"}, ] [package.extras] @@ -2574,6 +2586,21 @@ pure-eval = "*" [package.extras] tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] +[[package]] +name = "tenacity" +version = "9.0.0" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, + {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + [[package]] name = "traitlets" version = "5.14.3" @@ -2591,13 +2618,13 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "typeguard" -version = "4.3.0" +version = "4.4.1" description = "Run-time type checker for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "typeguard-4.3.0-py3-none-any.whl", hash = "sha256:4d24c5b39a117f8a895b9da7a9b3114f04eb63bade45a4492de49b175b6f7dfa"}, - {file = "typeguard-4.3.0.tar.gz", hash = "sha256:92ee6a0aec9135181eae6067ebd617fd9de8d75d714fb548728a4933b1dea651"}, + {file = "typeguard-4.4.1-py3-none-any.whl", hash = "sha256:9324ec07a27ec67fc54a9c063020ca4c0ae6abad5e9f0f9804ca59aee68c6e21"}, + {file = "typeguard-4.4.1.tar.gz", hash = "sha256:0d22a89d00b453b47c49875f42b6601b961757541a2e1e0ef517b6e24213c21b"}, ] [package.dependencies] @@ -2609,13 +2636,13 @@ test = ["coverage[toml] (>=7)", "mypy (>=1.2.0)", "pytest (>=7)"] [[package]] name = "typer" -version = "0.12.5" +version = "0.15.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.7" files = [ - {file = "typer-0.12.5-py3-none-any.whl", hash = "sha256:62fe4e471711b147e3365034133904df3e235698399bc4de2b36c8579298d52b"}, - {file = "typer-0.12.5.tar.gz", hash = "sha256:f592f089bedcc8ec1b974125d64851029c3b1af145f04aca64d69410f0c9b722"}, + {file = "typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847"}, + {file = "typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a"}, ] [package.dependencies] @@ -2626,13 +2653,13 @@ typing-extensions = ">=3.7.4.3" [[package]] name = "types-python-dateutil" -version = "2.9.0.20241003" +version = "2.9.0.20241206" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.9.0.20241003.tar.gz", hash = "sha256:58cb85449b2a56d6684e41aeefb4c4280631246a0da1a719bdbe6f3fb0317446"}, - {file = "types_python_dateutil-2.9.0.20241003-py3-none-any.whl", hash = "sha256:250e1d8e80e7bbc3a6c99b907762711d1a1cdd00e978ad39cb5940f6f0a87f3d"}, + {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, + {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, ] [[package]] @@ -2723,13 +2750,13 @@ files = [ [[package]] name = "virtualenv" -version = "20.27.0" +version = "20.28.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.27.0-py3-none-any.whl", hash = "sha256:44a72c29cceb0ee08f300b314848c86e57bf8d1f13107a5e671fb9274138d655"}, - {file = "virtualenv-20.27.0.tar.gz", hash = "sha256:2ca56a68ed615b8fe4326d11a0dca5dfbe8fd68510fb6c6349163bed3c15f2b2"}, + {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"}, + {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"}, ] [package.dependencies] @@ -2754,13 +2781,13 @@ files = [ [[package]] name = "whitenoise" -version = "6.7.0" +version = "6.8.2" description = "Radically simplified static file serving for WSGI applications" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "whitenoise-6.7.0-py3-none-any.whl", hash = "sha256:a1ae85e01fdc9815d12fa33f17765bc132ed2c54fa76daf9e39e879dd93566f6"}, - {file = "whitenoise-6.7.0.tar.gz", hash = "sha256:58c7a6cd811e275a6c91af22e96e87da0b1109e9a53bb7464116ef4c963bf636"}, + {file = "whitenoise-6.8.2-py3-none-any.whl", hash = "sha256:df12dce147a043d1956d81d288c6f0044147c6d2ab9726e5772ac50fb45d2280"}, + {file = "whitenoise-6.8.2.tar.gz", hash = "sha256:486bd7267a375fa9650b136daaec156ac572971acc8bf99add90817a530dd1d4"}, ] [package.extras] @@ -2769,4 +2796,4 @@ brotli = ["brotli"] [metadata] lock-version = "2.0" python-versions = ">=3.11.6,<4.0" -content-hash = "7ae099a1d0ffc1ac9fc8692445fa86897b0d74953883b938b3e64af392f108e1" +content-hash = "55104070732a4b70d487543187b0ef0936fbc4f1ac660a7886bd4508ec4260d4" diff --git a/pyproject.toml b/pyproject.toml index 02ba336..362ed32 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,7 @@ types-python-dateutil = "^2.9.0.20240316" whitenoise = "^6.6.0" django-ses = "^4.2.0" +tenacity = "^9.0.0" [tool.poetry.group.dev.dependencies] # https://python-poetry.org/docs/master/managing-dependencies/ coverage = { extras = ["toml"], version = ">=7.4.1" } ipython = ">=8.20.0" diff --git a/vespadb/observations/views.py b/vespadb/observations/views.py index a0d5cf7..c1e587c 100644 --- a/vespadb/observations/views.py +++ b/vespadb/observations/views.py @@ -6,7 +6,6 @@ import json import csv from typing import TYPE_CHECKING, Any, Any, Union, TextIO, Union, List, Set, Optional -from _csv import _writer import datetime import tempfile import os @@ -642,7 +641,7 @@ def save_observations(self, valid_data: list[dict[str, Any]]) -> Response: ) def write_batch_to_file( self, - writer: '_writer', # Explicitly type the csv writer + writer: Any, batch: List[Observation], is_admin: bool, user_municipality_ids: Set[str] From 07ae962817c2a96f1a5e36dd40dc39522411502f Mon Sep 17 00:00:00 2001 From: Steven Gerrits Date: Wed, 18 Dec 2024 10:42:03 +0000 Subject: [PATCH 04/12] dec r4-export --- nginx.conf | 28 +++- vespadb/observations/views.py | 260 ++++++++++++++++++++++------------ 2 files changed, 198 insertions(+), 90 deletions(-) diff --git a/nginx.conf b/nginx.conf index 48ac6e7..641d390 100644 --- a/nginx.conf +++ b/nginx.conf @@ -8,13 +8,28 @@ http { include mime.types; default_type application/octet-stream; + # Global timeout settings + proxy_connect_timeout 300; + proxy_send_timeout 300; + proxy_read_timeout 300; + send_timeout 300; + sendfile on; keepalive_timeout 65; + # Buffering settings for large responses + proxy_buffering on; + proxy_buffer_size 16k; + proxy_buffers 8 16k; + proxy_busy_buffers_size 32k; + server { listen 80; server_name uat.vespadb.be; + # Increase client body size limit if needed + client_max_body_size 20M; + location /static/ { alias /workspaces/vespadb/collected_static/; } @@ -29,6 +44,10 @@ http { proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; + + proxy_connect_timeout 300s; + proxy_send_timeout 300s; + proxy_read_timeout 300s; } } @@ -36,6 +55,9 @@ http { listen 80; server_name data.vespawatch.be; + # Increase client body size limit if needed + client_max_body_size 20M; + location /static/ { alias /workspaces/vespadb/collected_static/; } @@ -50,6 +72,10 @@ http { proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; + + proxy_connect_timeout 300s; + proxy_send_timeout 300s; + proxy_read_timeout 300s; } } -} +} \ No newline at end of file diff --git a/vespadb/observations/views.py b/vespadb/observations/views.py index c1e587c..8dcd2db 100644 --- a/vespadb/observations/views.py +++ b/vespadb/observations/views.py @@ -11,7 +11,18 @@ import os import logging from tenacity import retry, stop_after_attempt, wait_exponential - +from tenacity import ( + retry, + stop_after_attempt, + wait_exponential, + retry_if_exception_type, + before_log, + after_log, +) +from typing import Generator, Optional +from django.db import OperationalError, connection, transaction +from django.core.exceptions import ValidationError +import psycopg2 from django.http import FileResponse import os import tempfile @@ -75,6 +86,13 @@ "notes", "eradication_result", "wn_id", "wn_validation_status", "nest_status" ] BATCH_SIZE = 1000 +class ExportError(Exception): + """Custom exception for export-related errors.""" + pass + +class QueryTimeoutError(Exception): + """Custom exception for query timeout errors.""" + pass class ObservationsViewSet(ModelViewSet): # noqa: PLR0904 """ViewSet for the Observation model.""" @@ -719,17 +737,131 @@ def _prepare_row_data( logger.error(f"Error preparing row data for observation {observation.id}: {str(e)}") return [""] * len(CSV_HEADERS) # Return empty row in case of error + @retry( + stop=stop_after_attempt(3), + wait=wait_exponential(multiplier=1, min=4, max=10), + retry=retry_if_exception_type((OperationalError, psycopg2.OperationalError)), + before=before_log(logger, logging.INFO), + after=before_log(logger, logging.INFO) + ) + def get_queryset_count(self, queryset: QuerySet) -> int: + """Get queryset count with retry logic.""" + try: + with transaction.atomic(), connection.cursor() as cursor: + cursor.execute('SET statement_timeout TO 30000') # 30 seconds timeout + return int(queryset.count()) + except (OperationalError, psycopg2.OperationalError) as e: + logger.error(f"Error getting queryset count: {str(e)}") + raise QueryTimeoutError("Query timed out while getting count") from e + + def get_chunk_with_retries( + self, + queryset: QuerySet, + start: int, + batch_size: int, + max_retries: int = 3 + ) -> Optional[List[Observation]]: + """Get a chunk of data with retries and error handling.""" + for attempt in range(max_retries): + try: + with transaction.atomic(), connection.cursor() as cursor: + cursor.execute('SET statement_timeout TO 30000') + chunk = list( + queryset.select_related( + 'province', + 'municipality', + 'reserved_by' + )[start:start + batch_size] + ) + return chunk + except (OperationalError, psycopg2.OperationalError) as e: + if attempt == max_retries - 1: + logger.error(f"Failed to get chunk after {max_retries} attempts: {str(e)}") + return None + wait_time = (2 ** attempt) * 1 # Exponential backoff + logger.warning(f"Retry {attempt + 1}/{max_retries} after {wait_time}s") + time.sleep(wait_time) + return None + + def create_csv_generator( + self, + queryset: QuerySet, + is_admin: bool, + user_municipality_ids: Set[str], + batch_size: int = BATCH_SIZE + ) -> Generator[str, None, None]: + """Create a generator for CSV streaming with improved error handling.""" + buffer = io.StringIO() + writer = csv.writer(buffer) + + # Write headers + writer.writerow(CSV_HEADERS) + yield buffer.getvalue() + buffer.seek(0) + buffer.truncate(0) + + total_processed = 0 + successful_writes = 0 + error_count = 0 + + try: + total_count = self.get_queryset_count(queryset) + + # Process in chunks + start = 0 + while True: + chunk = self.get_chunk_with_retries(queryset, start, batch_size) + if not chunk: + break + + for observation in chunk: + try: + row_data = self._prepare_row_data( + observation, + is_admin, + user_municipality_ids + ) + writer.writerow(row_data) + successful_writes += 1 + except Exception as e: + error_count += 1 + logger.error(f"Error processing observation {observation.id}: {str(e)}") + if error_count > total_count * 0.1: # If more than 10% errors + raise ExportError("Too many errors during export") + continue + + data = buffer.getvalue() + yield data + buffer.seek(0) + buffer.truncate(0) + + total_processed += len(chunk) + progress = (total_processed / total_count) * 100 if total_count else 0 + logger.info( + f"Export progress: {progress:.1f}% ({total_processed}/{total_count}). " + f"Successful: {successful_writes}, Errors: {error_count}" + ) + + start += batch_size + + except Exception as e: + logger.exception("Error in CSV generator") + raise ExportError(f"Export failed: {str(e)}") from e + finally: + buffer.close() @method_decorator(ratelimit(key="ip", rate="60/m", method="GET", block=True)) @action(detail=False, methods=["get"], permission_classes=[AllowAny]) - def export(self, request: HttpRequest) -> Union[FileResponse, JsonResponse]: + def export(self, request: HttpRequest) -> StreamingHttpResponse: """ - Export observations as CSV using batch processing with improved error handling. + Export observations as CSV using streaming response with improved error handling + and performance optimizations. """ - temp_file = None + export_format = request.query_params.get("export_format", "csv").lower() + try: - # Validate export format - if request.query_params.get("export_format", "csv").lower() != "csv": + # Input validation + if export_format != "csv": return JsonResponse({"error": "Only CSV export is supported"}, status=400) # Get user permissions @@ -740,95 +872,45 @@ def export(self, request: HttpRequest) -> Union[FileResponse, JsonResponse]: user_municipality_ids = set() is_admin = False - # Create temporary file - temp_file = tempfile.NamedTemporaryFile(mode='w+', newline='', delete=False, suffix='.csv') - writer = csv.writer(temp_file) - writer.writerow(CSV_HEADERS) - - # Get filtered queryset with timeout protection - total_count = None - try: - with transaction.atomic(), connection.cursor() as cursor: - cursor.execute('SET statement_timeout TO 30000') # 30 seconds timeout - queryset = self.filter_queryset(self.get_queryset()) - total_count = queryset.count() - except Exception as e: - logger.error(f"Error getting total count: {str(e)}") - # Continue with None total_count - - # Process in batches with progress tracking - total_processed = 0 - successful_records = 0 - offset = 0 - batch_size = 1000 - - while True: - try: - # Get batch with timeout protection - with transaction.atomic(), connection.cursor() as cursor: - cursor.execute('SET statement_timeout TO 30000') - batch = list(queryset[offset:offset + batch_size]) - if not batch: # No more records - break - - # Process batch with retry logic - successful_writes = self.write_batch_to_file(writer, batch, is_admin, user_municipality_ids) - successful_records += successful_writes - - batch_count = len(batch) - total_processed += batch_count - offset += batch_size - - # Log progress if we know the total - if total_count: - progress = (total_processed / total_count) * 100 - logger.info(f"Export progress: {progress:.1f}% ({total_processed}/{total_count})") - else: - logger.info(f"Processed {total_processed} records") - - except Exception as e: - logger.error(f"Error processing batch at offset {offset}: {str(e)}") - offset += batch_size # Skip problematic batch - continue - - # Ensure all data is written - temp_file.flush() + # Get filtered queryset + queryset = self.filter_queryset(self.get_queryset()) - # Create response - try: - response = FileResponse( - open(temp_file.name, 'rb'), - content_type='text/csv', - as_attachment=True, - filename=f"observations_export_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" - ) - - # Log export statistics - if total_count: - logger.info(f"Export completed: {successful_records} successful records out of {total_count} total") - else: - logger.info(f"Export completed: {successful_records} successful records") - - # Clean up temp file after sending - response.close = lambda: os.unlink(temp_file.name) - return response - - except Exception as e: - logger.exception("Error creating response") - if temp_file and os.path.exists(temp_file.name): - os.unlink(temp_file.name) - return JsonResponse({"error": "Error creating export file"}, status=500) + # Create streaming response + response = StreamingHttpResponse( + streaming_content=self.create_csv_generator( + queryset=queryset, + is_admin=is_admin, + user_municipality_ids=user_municipality_ids + ), + content_type='text/csv' + ) + + # Set headers + filename = f"observations_export_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" + response['Content-Disposition'] = f'attachment; filename="{filename}"' + response['X-Accel-Buffering'] = 'no' # Disable nginx buffering + + return response + except QueryTimeoutError: + logger.exception("Query timeout during export") + return JsonResponse( + {"error": "Export timed out. Please try with a smaller date range or fewer filters."}, + status=503 + ) + except ExportError as e: + logger.exception("Export error") + return JsonResponse( + {"error": f"Export failed: {str(e)}. Please try again or contact support."}, + status=500 + ) except Exception as e: - logger.exception("Export failed") - # Clean up temp file in case of error - if temp_file and os.path.exists(temp_file.name): - os.unlink(temp_file.name) + logger.exception("Unexpected error during export") return JsonResponse( - {"error": "Export failed. Please try again or contact support."}, + {"error": "An unexpected error occurred. Please try again or contact support."}, status=500 ) - + def get_status(self, observation: Observation) -> str: """Determine observation status based on eradication data.""" logger.debug("Getting status for observation %s", observation.eradication_result) From e97fc970a8d1f9a6e4ec25ba065cf64bdd1be657 Mon Sep 17 00:00:00 2001 From: Steven Gerrits Date: Wed, 18 Dec 2024 12:48:31 +0000 Subject: [PATCH 05/12] fix export uat --- nginx.conf | 31 +++++++++++-- vespadb/observations/views.py | 82 ++++++++++++++++++++++++----------- 2 files changed, 85 insertions(+), 28 deletions(-) diff --git a/nginx.conf b/nginx.conf index 641d390..61e9de4 100644 --- a/nginx.conf +++ b/nginx.conf @@ -8,6 +8,12 @@ http { include mime.types; default_type application/octet-stream; + # HTTP/2 specific settings + http2_max_field_size 16k; + http2_max_header_size 32k; + http2_max_requests 1000; + http2_idle_timeout 5m; + # Global timeout settings proxy_connect_timeout 300; proxy_send_timeout 300; @@ -18,13 +24,14 @@ http { keepalive_timeout 65; # Buffering settings for large responses - proxy_buffering on; + proxy_buffering off; + proxy_request_buffering off; proxy_buffer_size 16k; proxy_buffers 8 16k; proxy_busy_buffers_size 32k; server { - listen 80; + listen 80 http2; # Added http2 server_name uat.vespadb.be; # Increase client body size limit if needed @@ -45,14 +52,23 @@ http { proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; + # HTTP/2 specific + proxy_http_version 1.1; + proxy_set_header Connection ""; + + # Timeouts proxy_connect_timeout 300s; proxy_send_timeout 300s; proxy_read_timeout 300s; + + # Buffer settings + proxy_buffering off; + proxy_request_buffering off; } } server { - listen 80; + listen 80 http2; # Added http2 server_name data.vespawatch.be; # Increase client body size limit if needed @@ -73,9 +89,18 @@ http { proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; + # HTTP/2 specific + proxy_http_version 1.1; + proxy_set_header Connection ""; + + # Timeouts proxy_connect_timeout 300s; proxy_send_timeout 300s; proxy_read_timeout 300s; + + # Buffer settings + proxy_buffering off; + proxy_request_buffering off; } } } \ No newline at end of file diff --git a/vespadb/observations/views.py b/vespadb/observations/views.py index 8dcd2db..2fbca5c 100644 --- a/vespadb/observations/views.py +++ b/vespadb/observations/views.py @@ -19,6 +19,7 @@ before_log, after_log, ) +import time from typing import Generator, Optional from django.db import OperationalError, connection, transaction from django.core.exceptions import ValidationError @@ -783,6 +784,52 @@ def get_chunk_with_retries( time.sleep(wait_time) return None + def _generate_csv_content( + self, + queryset: QuerySet, + is_admin: bool, + user_municipality_ids: Set[str] + ) -> Generator[str, None, None]: + """Generate CSV content in smaller chunks.""" + buffer = io.StringIO() + writer = csv.writer(buffer) + + # Write headers first + writer.writerow(CSV_HEADERS) + data = buffer.getvalue() + buffer.seek(0) + buffer.truncate() + yield data + + # Process in smaller chunks + chunk_size = 100 # Kleinere chunk size + total = queryset.count() + + for start in range(0, total, chunk_size): + chunk = queryset.select_related( + 'province', + 'municipality', + 'reserved_by' + )[start:start + chunk_size] + + for observation in chunk: + try: + row_data = self._prepare_row_data( + observation, + is_admin, + user_municipality_ids + ) + writer.writerow(row_data) + data = buffer.getvalue() + buffer.seek(0) + buffer.truncate() + yield data + except Exception as e: + logger.error(f"Error processing observation {observation.id}: {str(e)}") + continue + + buffer.close() + def create_csv_generator( self, queryset: QuerySet, @@ -857,11 +904,9 @@ def export(self, request: HttpRequest) -> StreamingHttpResponse: Export observations as CSV using streaming response with improved error handling and performance optimizations. """ - export_format = request.query_params.get("export_format", "csv").lower() - try: - # Input validation - if export_format != "csv": + # Validate export format + if request.query_params.get("export_format", "csv").lower() != "csv": return JsonResponse({"error": "Only CSV export is supported"}, status=400) # Get user permissions @@ -875,39 +920,26 @@ def export(self, request: HttpRequest) -> StreamingHttpResponse: # Get filtered queryset queryset = self.filter_queryset(self.get_queryset()) - # Create streaming response + # Create the StreamingHttpResponse response = StreamingHttpResponse( - streaming_content=self.create_csv_generator( - queryset=queryset, - is_admin=is_admin, - user_municipality_ids=user_municipality_ids + streaming_content=self._generate_csv_content( + queryset, is_admin, user_municipality_ids ), content_type='text/csv' ) - # Set headers + # Important headers filename = f"observations_export_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" response['Content-Disposition'] = f'attachment; filename="{filename}"' - response['X-Accel-Buffering'] = 'no' # Disable nginx buffering + response['X-Accel-Buffering'] = 'no' + response['Cache-Control'] = 'no-cache' return response - except QueryTimeoutError: - logger.exception("Query timeout during export") - return JsonResponse( - {"error": "Export timed out. Please try with a smaller date range or fewer filters."}, - status=503 - ) - except ExportError as e: - logger.exception("Export error") - return JsonResponse( - {"error": f"Export failed: {str(e)}. Please try again or contact support."}, - status=500 - ) except Exception as e: - logger.exception("Unexpected error during export") + logger.exception("Export failed") return JsonResponse( - {"error": "An unexpected error occurred. Please try again or contact support."}, + {"error": "Export failed. Please try again or contact support."}, status=500 ) From 0479ff92dcb2e851c73065829ce07fb0200f0943 Mon Sep 17 00:00:00 2001 From: Steven Gerrits Date: Wed, 18 Dec 2024 13:51:23 +0000 Subject: [PATCH 06/12] export uat --- entrypoint.sh | 6 ++- nginx.conf | 74 ++++++++++++++------------ vespadb/observations/views.py | 97 +++++++++++++++++++++++++++-------- 3 files changed, 122 insertions(+), 55 deletions(-) diff --git a/entrypoint.sh b/entrypoint.sh index ee8cf3b..6841668 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -33,7 +33,11 @@ echo "Load waarnemingen observation data via: python manage.py load_waarnemingen # Start Gunicorn echo "Starting Gunicorn..." -gunicorn --workers 3 --bind 0.0.0.0:8000 vespadb.wsgi:application & +gunicorn --workers 3 \ + --timeout 300 \ + --keep-alive 65 \ + --bind 0.0.0.0:8000 \ + vespadb.wsgi:application & # Wait for Gunicorn to start sleep 5 diff --git a/nginx.conf b/nginx.conf index 61e9de4..c6b55f5 100644 --- a/nginx.conf +++ b/nginx.conf @@ -1,7 +1,9 @@ -worker_processes 1; +worker_processes auto; events { - worker_connections 1024; + worker_connections 4096; + multi_accept on; + use epoll; } http { @@ -15,27 +17,29 @@ http { http2_idle_timeout 5m; # Global timeout settings - proxy_connect_timeout 300; - proxy_send_timeout 300; - proxy_read_timeout 300; - send_timeout 300; + proxy_connect_timeout 600; + proxy_send_timeout 600; + proxy_read_timeout 600; + send_timeout 600; + keepalive_timeout 650; sendfile on; - keepalive_timeout 65; + tcp_nopush on; + tcp_nodelay on; # Buffering settings for large responses - proxy_buffering off; - proxy_request_buffering off; - proxy_buffer_size 16k; - proxy_buffers 8 16k; - proxy_busy_buffers_size 32k; + proxy_buffer_size 128k; + proxy_buffers 8 256k; + proxy_busy_buffers_size 256k; + proxy_temp_file_write_size 256k; + proxy_max_temp_file_size 0; server { - listen 80 http2; # Added http2 + listen 80 http2; server_name uat.vespadb.be; - # Increase client body size limit if needed - client_max_body_size 20M; + # Increase client body size limit + client_max_body_size 0; # Disabled limit for large files location /static/ { alias /workspaces/vespadb/collected_static/; @@ -57,22 +61,25 @@ http { proxy_set_header Connection ""; # Timeouts - proxy_connect_timeout 300s; - proxy_send_timeout 300s; - proxy_read_timeout 300s; - - # Buffer settings - proxy_buffering off; - proxy_request_buffering off; + proxy_connect_timeout 600s; + proxy_send_timeout 600s; + proxy_read_timeout 600s; + + # Buffer settings for large files + proxy_buffering on; + proxy_request_buffering on; + proxy_buffer_size 128k; + proxy_buffers 8 256k; + proxy_busy_buffers_size 256k; } } server { - listen 80 http2; # Added http2 + listen 80 http2; server_name data.vespawatch.be; - # Increase client body size limit if needed - client_max_body_size 20M; + # Increase client body size limit + client_max_body_size 0; # Disabled limit for large files location /static/ { alias /workspaces/vespadb/collected_static/; @@ -94,13 +101,16 @@ http { proxy_set_header Connection ""; # Timeouts - proxy_connect_timeout 300s; - proxy_send_timeout 300s; - proxy_read_timeout 300s; - - # Buffer settings - proxy_buffering off; - proxy_request_buffering off; + proxy_connect_timeout 600s; + proxy_send_timeout 600s; + proxy_read_timeout 600s; + + # Buffer settings for large files + proxy_buffering on; + proxy_request_buffering on; + proxy_buffer_size 128k; + proxy_buffers 8 256k; + proxy_busy_buffers_size 256k; } } } \ No newline at end of file diff --git a/vespadb/observations/views.py b/vespadb/observations/views.py index 2fbca5c..3b0e571 100644 --- a/vespadb/observations/views.py +++ b/vespadb/observations/views.py @@ -899,15 +899,18 @@ def create_csv_generator( @method_decorator(ratelimit(key="ip", rate="60/m", method="GET", block=True)) @action(detail=False, methods=["get"], permission_classes=[AllowAny]) - def export(self, request: HttpRequest) -> StreamingHttpResponse: - """ - Export observations as CSV using streaming response with improved error handling - and performance optimizations. - """ + def export(self, request: HttpRequest) -> Union[FileResponse, JsonResponse]: + """Export observations as CSV using temporary file approach.""" + temp_file = None + temp_file_path = None + try: - # Validate export format - if request.query_params.get("export_format", "csv").lower() != "csv": - return JsonResponse({"error": "Only CSV export is supported"}, status=400) + # Create temporary file + temp_file = tempfile.NamedTemporaryFile(mode='w+', delete=False) + temp_file_path = temp_file.name + + writer = csv.writer(temp_file) + writer.writerow(CSV_HEADERS) # Get user permissions if request.user.is_authenticated: @@ -917,32 +920,82 @@ def export(self, request: HttpRequest) -> StreamingHttpResponse: user_municipality_ids = set() is_admin = False - # Get filtered queryset - queryset = self.filter_queryset(self.get_queryset()) - - # Create the StreamingHttpResponse - response = StreamingHttpResponse( - streaming_content=self._generate_csv_content( - queryset, is_admin, user_municipality_ids - ), - content_type='text/csv' + # Get filtered queryset with optimizations + queryset = self.filter_queryset( + self.get_queryset().select_related('province', 'municipality', 'reserved_by') ) + + # Set a smaller chunk size for better memory management + chunk_size = 500 + total_count = queryset.count() + processed = 0 + + # Process in chunks + for start in range(0, total_count, chunk_size): + chunk = queryset[start:start + chunk_size] + + for observation in chunk: + try: + row_data = self._prepare_row_data( + observation, + is_admin, + user_municipality_ids + ) + writer.writerow(row_data) + except Exception as e: + logger.error(f"Error processing observation {observation.id}: {str(e)}") + continue + + processed += len(chunk) + logger.info(f"Export progress: {(processed/total_count)*100:.1f}%") + + # Make sure all data is written and file is closed + temp_file.flush() + os.fsync(temp_file.fileno()) + temp_file.close() - # Important headers - filename = f"observations_export_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" - response['Content-Disposition'] = f'attachment; filename="{filename}"' - response['X-Accel-Buffering'] = 'no' + # Open the file for reading and create response + response = FileResponse( + open(temp_file_path, 'rb'), + content_type='text/csv', + as_attachment=True, + filename=f"observations_export_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" + ) + # Set headers more explicitly + response['Content-Disposition'] = f'attachment; filename="{filename}"; filename*=UTF-8\'\'{filename}' + response['Content-Type'] = 'text/csv; charset=utf-8' + response['Content-Length'] = os.path.getsize(temp_file_path) response['Cache-Control'] = 'no-cache' + response['X-Accel-Buffering'] = 'no' + + # Schedule file cleanup after response is sent + def cleanup_temp_file(response: FileResponse) -> Any: + """.""" + try: + os.unlink(temp_file_path) + except: + pass + return response + + response.close = cleanup_temp_file.__get__(response, FileResponse) return response except Exception as e: logger.exception("Export failed") + # Cleanup in case of error + if temp_file: + temp_file.close() + if temp_file_path and os.path.exists(temp_file_path): + try: + os.unlink(temp_file_path) + except: + pass return JsonResponse( {"error": "Export failed. Please try again or contact support."}, status=500 ) - + def get_status(self, observation: Observation) -> str: """Determine observation status based on eradication data.""" logger.debug("Getting status for observation %s", observation.eradication_result) From 418153e403d2884d69d65f3153dff84b2187cef8 Mon Sep 17 00:00:00 2001 From: Steven Gerrits Date: Wed, 18 Dec 2024 13:53:05 +0000 Subject: [PATCH 07/12] export uat --- vespadb/observations/views.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/vespadb/observations/views.py b/vespadb/observations/views.py index 3b0e571..e7808bc 100644 --- a/vespadb/observations/views.py +++ b/vespadb/observations/views.py @@ -955,11 +955,13 @@ def export(self, request: HttpRequest) -> Union[FileResponse, JsonResponse]: temp_file.close() # Open the file for reading and create response + filename=f"observations_export_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" + response = FileResponse( open(temp_file_path, 'rb'), content_type='text/csv', as_attachment=True, - filename=f"observations_export_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" + filename=filename ) # Set headers more explicitly response['Content-Disposition'] = f'attachment; filename="{filename}"; filename*=UTF-8\'\'{filename}' From f4e8b8bfcdba21db17d9ded703e80b8bd8735d86 Mon Sep 17 00:00:00 2001 From: Steven Gerrits Date: Wed, 18 Dec 2024 14:07:02 +0000 Subject: [PATCH 08/12] fix-export-timeout --- entrypoint.sh | 4 ++-- nginx.conf | 8 ++++---- vespadb/observations/views.py | 31 +++++++++++++++++-------------- 3 files changed, 23 insertions(+), 20 deletions(-) diff --git a/entrypoint.sh b/entrypoint.sh index 6841668..a5b935a 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -34,11 +34,11 @@ echo "Load waarnemingen observation data via: python manage.py load_waarnemingen # Start Gunicorn echo "Starting Gunicorn..." gunicorn --workers 3 \ - --timeout 300 \ + --timeout 1800 \ --keep-alive 65 \ --bind 0.0.0.0:8000 \ vespadb.wsgi:application & - + # Wait for Gunicorn to start sleep 5 diff --git a/nginx.conf b/nginx.conf index c6b55f5..1946f62 100644 --- a/nginx.conf +++ b/nginx.conf @@ -1,4 +1,4 @@ -worker_processes auto; +worker_processes auto; # Changed from 1 to auto for better performance events { worker_connections 4096; @@ -19,7 +19,7 @@ http { # Global timeout settings proxy_connect_timeout 600; proxy_send_timeout 600; - proxy_read_timeout 600; + proxy_read_timeout 1800; send_timeout 600; keepalive_timeout 650; @@ -63,7 +63,7 @@ http { # Timeouts proxy_connect_timeout 600s; proxy_send_timeout 600s; - proxy_read_timeout 600s; + proxy_read_timeout 1800s; # Buffer settings for large files proxy_buffering on; @@ -103,7 +103,7 @@ http { # Timeouts proxy_connect_timeout 600s; proxy_send_timeout 600s; - proxy_read_timeout 600s; + proxy_read_timeout 1800s; # Buffer settings for large files proxy_buffering on; diff --git a/vespadb/observations/views.py b/vespadb/observations/views.py index e7808bc..9e5f00a 100644 --- a/vespadb/observations/views.py +++ b/vespadb/observations/views.py @@ -906,7 +906,7 @@ def export(self, request: HttpRequest) -> Union[FileResponse, JsonResponse]: try: # Create temporary file - temp_file = tempfile.NamedTemporaryFile(mode='w+', delete=False) + temp_file = tempfile.NamedTemporaryFile(mode='w+', delete=False, encoding='utf-8-sig') temp_file_path = temp_file.name writer = csv.writer(temp_file) @@ -925,12 +925,12 @@ def export(self, request: HttpRequest) -> Union[FileResponse, JsonResponse]: self.get_queryset().select_related('province', 'municipality', 'reserved_by') ) - # Set a smaller chunk size for better memory management - chunk_size = 500 + # Use much smaller chunk size + chunk_size = 100 total_count = queryset.count() processed = 0 - # Process in chunks + # Process in chunks with periodic flushes for start in range(0, total_count, chunk_size): chunk = queryset[start:start + chunk_size] @@ -946,6 +946,10 @@ def export(self, request: HttpRequest) -> Union[FileResponse, JsonResponse]: logger.error(f"Error processing observation {observation.id}: {str(e)}") continue + # Flush after each chunk + temp_file.flush() + os.fsync(temp_file.fileno()) + processed += len(chunk) logger.info(f"Export progress: {(processed/total_count)*100:.1f}%") @@ -955,24 +959,23 @@ def export(self, request: HttpRequest) -> Union[FileResponse, JsonResponse]: temp_file.close() # Open the file for reading and create response - filename=f"observations_export_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" - response = FileResponse( open(temp_file_path, 'rb'), - content_type='text/csv', - as_attachment=True, - filename=filename + content_type='text/csv' ) - # Set headers more explicitly + + # Set explicit headers + filename = f"observations_export_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" response['Content-Disposition'] = f'attachment; filename="{filename}"; filename*=UTF-8\'\'{filename}' response['Content-Type'] = 'text/csv; charset=utf-8' response['Content-Length'] = os.path.getsize(temp_file_path) - response['Cache-Control'] = 'no-cache' + response['Cache-Control'] = 'no-cache, no-store, must-revalidate' + response['Pragma'] = 'no-cache' + response['Expires'] = '0' response['X-Accel-Buffering'] = 'no' # Schedule file cleanup after response is sent - def cleanup_temp_file(response: FileResponse) -> Any: - """.""" + def cleanup_temp_file(response): try: os.unlink(temp_file_path) except: @@ -994,7 +997,7 @@ def cleanup_temp_file(response: FileResponse) -> Any: except: pass return JsonResponse( - {"error": "Export failed. Please try again or contact support."}, + {"error": f"Export failed: {str(e)}. Please try again or contact support."}, status=500 ) From b7b9e68f366ea222217f33ebd3be57fa2fdd45a9 Mon Sep 17 00:00:00 2001 From: Steven Gerrits Date: Thu, 19 Dec 2024 19:41:18 +0000 Subject: [PATCH 09/12] export --- entrypoint.sh | 6 + nginx.conf | 2 +- poetry.lock | 16 +- pyproject.toml | 1 + src/components/MapPage.vue | 5 + src/components/NavbarComponent.vue | 25 +- src/stores/vespaStore.js | 59 +- .../observations/migrations/0033_export.py | 31 ++ vespadb/observations/models.py | 26 + vespadb/observations/serializers.py | 8 +- vespadb/observations/tasks/generate_export.py | 230 ++++++++ vespadb/observations/views.py | 522 +++++------------- vespadb/settings.py | 4 + 13 files changed, 533 insertions(+), 402 deletions(-) create mode 100644 vespadb/observations/migrations/0033_export.py create mode 100644 vespadb/observations/tasks/generate_export.py diff --git a/entrypoint.sh b/entrypoint.sh index a5b935a..63db785 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -34,8 +34,14 @@ echo "Load waarnemingen observation data via: python manage.py load_waarnemingen # Start Gunicorn echo "Starting Gunicorn..." gunicorn --workers 3 \ + --worker-class gthread \ + --threads 4 \ + --worker-connections 1000 \ --timeout 1800 \ + --graceful-timeout 300 \ --keep-alive 65 \ + --max-requests 1000 \ + --max-requests-jitter 50 \ --bind 0.0.0.0:8000 \ vespadb.wsgi:application & diff --git a/nginx.conf b/nginx.conf index 1946f62..5bd3146 100644 --- a/nginx.conf +++ b/nginx.conf @@ -113,4 +113,4 @@ http { proxy_busy_buffers_size 256k; } } -} \ No newline at end of file +} diff --git a/poetry.lock b/poetry.lock index f403b7d..c30e111 100644 --- a/poetry.lock +++ b/poetry.lock @@ -726,6 +726,20 @@ files = [ asgiref = ">=3.6" django = ">=4.2" +[[package]] +name = "django-extensions" +version = "3.2.3" +description = "Extensions for Django" +optional = false +python-versions = ">=3.6" +files = [ + {file = "django-extensions-3.2.3.tar.gz", hash = "sha256:44d27919d04e23b3f40231c4ab7af4e61ce832ef46d610cc650d53e68328410a"}, + {file = "django_extensions-3.2.3-py3-none-any.whl", hash = "sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401"}, +] + +[package.dependencies] +Django = ">=3.2" + [[package]] name = "django-filter" version = "23.5" @@ -2796,4 +2810,4 @@ brotli = ["brotli"] [metadata] lock-version = "2.0" python-versions = ">=3.11.6,<4.0" -content-hash = "55104070732a4b70d487543187b0ef0936fbc4f1ac660a7886bd4508ec4260d4" +content-hash = "550711048bca44df4e5917c00b7b05b0016617e390d57297ef3285cd214347b0" diff --git a/pyproject.toml b/pyproject.toml index 362ed32..9aa4a70 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,6 +46,7 @@ types-python-dateutil = "^2.9.0.20240316" whitenoise = "^6.6.0" django-ses = "^4.2.0" tenacity = "^9.0.0" +django-extensions = "^3.2.3" [tool.poetry.group.dev.dependencies] # https://python-poetry.org/docs/master/managing-dependencies/ coverage = { extras = ["toml"], version = ">=7.4.1" } ipython = ">=8.20.0" diff --git a/src/components/MapPage.vue b/src/components/MapPage.vue index 5d76b27..1f2c61d 100644 --- a/src/components/MapPage.vue +++ b/src/components/MapPage.vue @@ -15,6 +15,9 @@
Observaties laden...
+
+ Exporteren... +
Gerapporteerd nest @@ -61,6 +64,7 @@ export default { setup() { const vespaStore = useVespaStore(); const searchQuery = ref(''); + const isExporting = computed(() => vespaStore.isExporting); const router = useRouter(); const selectedObservation = computed(() => vespaStore.selectedObservation); const isEditing = computed(() => vespaStore.isEditing); @@ -327,6 +331,7 @@ export default { updateMarkerColor, searchQuery, searchAddress, + isExporting, }; }, }; diff --git a/src/components/NavbarComponent.vue b/src/components/NavbarComponent.vue index f1f4130..b11420c 100644 --- a/src/components/NavbarComponent.vue +++ b/src/components/NavbarComponent.vue @@ -19,7 +19,15 @@ Export
@@ -67,6 +75,7 @@ export default { const isModalVisible = ref(false); const modalTitle = ref(''); const modalMessage = ref(''); + const isExporting = computed(() => vespaStore.isExporting); watch(() => vespaStore.error, (newError) => { if (newError) { @@ -94,10 +103,18 @@ export default { }; const exportData = async (format) => { - await vespaStore.exportData(format); - }; + try { + if (vespaStore.isExporting) return; + + await vespaStore.exportData(format); + } catch (error) { + modalTitle.value = 'Error'; + modalMessage.value = 'Er is een fout opgetreden tijdens het exporteren.'; + isModalVisible.value = true; + } + }; - return { isLoggedIn, loadingAuth, username, logout, navigateToChangePassword, exportData, fileInput, isModalVisible, modalTitle, modalMessage }; + return { isLoggedIn, loadingAuth, username, logout, navigateToChangePassword, exportData, fileInput, isModalVisible, modalTitle, modalMessage, isExporting }; }, mounted() { var dropdownElementList = [].slice.call(document.querySelectorAll('.dropdown-toggle')); diff --git a/src/stores/vespaStore.js b/src/stores/vespaStore.js index d132687..26d3205 100644 --- a/src/stores/vespaStore.js +++ b/src/stores/vespaStore.js @@ -27,6 +27,7 @@ export const useVespaStore = defineStore('vespaStore', { isEditing: false, map: null, viewMode: 'map', + isExporting: false, filters: { municipalities: [], provinces: [], @@ -307,21 +308,55 @@ export const useVespaStore = defineStore('vespaStore', { } }, async exportData(format) { - const filterQuery = this.createFilterQuery(); - const url = `/observations/export?export_format=${format}&${filterQuery}`; - try { - const response = await ApiService.get(url, { responseType: 'blob' }); - const blob = new Blob([response.data], { type: response.headers['content-type'] }); - const downloadUrl = window.URL.createObjectURL(blob); - const link = document.createElement('a'); - link.href = downloadUrl; - link.setAttribute('download', `export.${format}`); - document.body.appendChild(link); - link.click(); - link.remove(); + this.isExporting = true; // Start loading indicator + const response = await ApiService.get( + `/observations/export?${this.createFilterQuery()}` + ); + + if (response.status === 200) { + const { export_id } = response.data; + + const checkStatus = async () => { + const statusResponse = await ApiService.get( + `/observations/export_status?export_id=${export_id}` + ); + + if (statusResponse.data.status === 'completed') { + const downloadResponse = await ApiService.get( + `/observations/download_export/?export_id=${export_id}`, + { responseType: 'blob' } + ); + + const blob = new Blob([downloadResponse.data], { type: 'text/csv' }); + const url = window.URL.createObjectURL(blob); + const link = document.createElement('a'); + link.href = url; + link.setAttribute('download', `observations_export_${new Date().getTime()}.csv`); + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + window.URL.revokeObjectURL(url); + this.isExporting = false; // Stop loading indicator + return true; + } else if (statusResponse.data.status === 'failed') { + this.isExporting = false; // Stop loading indicator on error + throw new Error(statusResponse.data.error || 'Export failed'); + } + + return new Promise(resolve => { + setTimeout(async () => { + resolve(await checkStatus()); + }, 2000); + }); + }; + + await checkStatus(); + } } catch (error) { + this.isExporting = false; // Stop loading indicator on error console.error('Error exporting data:', error); + throw error; } }, async fetchMunicipalitiesByProvinces(provinceIds) { diff --git a/vespadb/observations/migrations/0033_export.py b/vespadb/observations/migrations/0033_export.py new file mode 100644 index 0000000..b3b0306 --- /dev/null +++ b/vespadb/observations/migrations/0033_export.py @@ -0,0 +1,31 @@ +# Generated by Django 5.1.4 on 2024-12-18 16:03 + +import django.db.models.deletion +from django.conf import settings +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('observations', '0032_rename_wn_notes_observation_notes'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='Export', + fields=[ + ('id', models.AutoField(primary_key=True, serialize=False)), + ('filters', models.JSONField(default=dict, help_text='Filters applied to the export')), + ('status', models.CharField(choices=[('pending', 'Pending'), ('processing', 'Processing'), ('completed', 'Completed'), ('failed', 'Failed')], default='pending', help_text='Status of the export', max_length=20)), + ('progress', models.IntegerField(default=0, help_text='Progress percentage of the export')), + ('file_path', models.CharField(blank=True, help_text='Path to the exported file', max_length=255, null=True)), + ('created_at', models.DateTimeField(auto_now_add=True, help_text='Datetime when the export was created')), + ('completed_at', models.DateTimeField(blank=True, help_text='Datetime when the export was completed', null=True)), + ('error_message', models.TextField(blank=True, help_text='Error message if the export failed', null=True)), + ('task_id', models.CharField(blank=True, help_text='Celery task ID for the export', max_length=255, null=True)), + ('user', models.ForeignKey(blank=True, help_text='User who requested the export', null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)), + ], + ), + ] diff --git a/vespadb/observations/models.py b/vespadb/observations/models.py index a6c9684..55ab414 100644 --- a/vespadb/observations/models.py +++ b/vespadb/observations/models.py @@ -389,3 +389,29 @@ def save(self, *args: Any, **kwargs: Any) -> None: self.province = municipality.province if municipality else None super().save(*args, **kwargs) + +class Export(models.Model): + """Model for tracking observation exports.""" + STATUS_CHOICES = ( + ('pending', 'Pending'), + ('processing', 'Processing'), + ('completed', 'Completed'), + ('failed', 'Failed'), + ) + + id = models.AutoField(primary_key=True) + user = models.ForeignKey( + settings.AUTH_USER_MODEL, + on_delete=models.SET_NULL, + null=True, + blank=True, + help_text="User who requested the export", + ) + filters = models.JSONField(default=dict, help_text="Filters applied to the export") + status = models.CharField(max_length=20, choices=STATUS_CHOICES, default='pending', help_text="Status of the export") + progress = models.IntegerField(default=0, help_text="Progress percentage of the export") + file_path = models.CharField(max_length=255, blank=True, null=True, help_text="Path to the exported file") + created_at = models.DateTimeField(auto_now_add=True, help_text="Datetime when the export was created") + completed_at = models.DateTimeField(blank=True, null=True, help_text="Datetime when the export was completed") + error_message = models.TextField(blank=True, null=True, help_text="Error message if the export failed") + task_id = models.CharField(max_length=255, blank=True, null=True, help_text="Celery task ID for the export") diff --git a/vespadb/observations/serializers.py b/vespadb/observations/serializers.py index 5f0cedb..71332aa 100644 --- a/vespadb/observations/serializers.py +++ b/vespadb/observations/serializers.py @@ -13,7 +13,7 @@ from rest_framework.request import Request from vespadb.observations.helpers import parse_and_convert_to_cet, parse_and_convert_to_utc -from vespadb.observations.models import EradicationResultEnum, Municipality, Observation, Province +from vespadb.observations.models import EradicationResultEnum, Municipality, Observation, Province, Export from vespadb.observations.utils import get_municipality_from_coordinates from vespadb.users.models import VespaUser @@ -484,3 +484,9 @@ class Meta: model = Province fields = ["id", "name"] + + +class ExportSerializer(serializers.ModelSerializer): + class Meta: + model = Export + fields = '__all__' diff --git a/vespadb/observations/tasks/generate_export.py b/vespadb/observations/tasks/generate_export.py new file mode 100644 index 0000000..21aef1f --- /dev/null +++ b/vespadb/observations/tasks/generate_export.py @@ -0,0 +1,230 @@ +import csv +import logging +from datetime import datetime, timedelta +from typing import Optional, Dict, Any, List, Set, Iterator +from django.core.cache import cache +from django.db import models, transaction +from django.utils import timezone +from celery import shared_task +from vespadb.observations.models import Observation, Export +from vespadb.users.models import VespaUser as User +from vespadb.observations.serializers import user_read_fields, public_read_fields + +logger = logging.getLogger(__name__) + +CSV_HEADERS = [ + "id", "created_datetime", "modified_datetime", "latitude", "longitude", + "source", "source_id", "nest_height", "nest_size", "nest_location", + "nest_type", "observation_datetime", "province", "eradication_date", + "municipality", "images", "anb_domain", "notes", "eradication_result", + "wn_id", "wn_validation_status", "nest_status" +] + +class Echo: + """An object that implements just the write method of the file-like interface.""" + def write(self, value): + """Write the value by returning it, instead of storing in a buffer.""" + return value + +def get_status(observation: Observation) -> str: + """Get observation status string.""" + if observation.eradication_result: + return "eradicated" + if observation.reserved_by: + return "reserved" + return "untreated" + +def _prepare_row_data( + observation: Observation, + is_admin: bool, + user_municipality_ids: Set[str] +) -> List[str]: + """ + Prepare a single row of data for the CSV export with error handling. + """ + try: + # Determine allowed fields based on permissions + if is_admin or (observation.municipality_id in user_municipality_ids): + allowed_fields = user_read_fields + else: + allowed_fields = public_read_fields + + allowed_fields.extend(["source_id", "latitude", "longitude", "anb_domain", "nest_status"]) + + row_data = [] + for field in CSV_HEADERS: + try: + if field not in allowed_fields: + row_data.append("") + continue + + if field == "latitude": + row_data.append(str(observation.location.y) if observation.location else "") + elif field == "longitude": + row_data.append(str(observation.location.x) if observation.location else "") + elif field in ["created_datetime", "modified_datetime", "observation_datetime"]: + datetime_val = getattr(observation, field, None) + if datetime_val: + datetime_val = datetime_val.replace(microsecond=0) + row_data.append(datetime_val.isoformat() + "Z") + else: + row_data.append("") + elif field == "province": + row_data.append(observation.province.name if observation.province else "") + elif field == "municipality": + row_data.append(observation.municipality.name if observation.municipality else "") + elif field == "anb_domain": + row_data.append(str(observation.anb)) + elif field == "nest_status": + row_data.append(get_status(observation)) + elif field == "source_id": + row_data.append(str(observation.source_id) if observation.source_id is not None else "") + else: + value = getattr(observation, field, "") + row_data.append(str(value) if value is not None else "") + except Exception as e: + logger.warning(f"Error processing field {field} for observation {observation.id}: {str(e)}") + row_data.append("") + + return row_data + except Exception as e: + logger.error(f"Error preparing row data for observation {observation.id}: {str(e)}") + return [""] * len(CSV_HEADERS) + +def parse_boolean(value: str) -> bool: + """ + Convert a string value to a boolean. + """ + if isinstance(value, bool): + return value + if isinstance(value, str): + value_lower = value.lower() + if value_lower in {"true", "1"}: + return True + elif value_lower in {"false", "0"}: + return False + raise ValueError(f"Invalid boolean value: {value}") + +def generate_rows(queryset, is_admin: bool, user_municipality_ids: set) -> Iterator[List[str]]: + """Generate rows for CSV streaming.""" + # First yield the headers + yield CSV_HEADERS + + # Then yield the data rows + for observation in queryset: + try: + row = _prepare_row_data(observation, is_admin, user_municipality_ids) + yield row + except Exception as e: + logger.error(f"Error processing observation {observation.id}: {str(e)}") + continue + +@shared_task( + name="generate_export", + max_retries=3, + default_retry_delay=60, + soft_time_limit=1700, + time_limit=1800, + acks_late=True +) +def generate_export(export_id: int, filters: Dict[str, Any], user_id: Optional[int] = None) -> Dict[str, Any]: + """ + Generate CSV export of observations based on filters. + + Args: + export_id: ID of the Export record + filters: Dictionary of filters to apply to the queryset + user_id: Optional ID of the user requesting the export + + Returns: + Dictionary containing export status and details + """ + logger.info(f"Starting export {export_id} for user {user_id}") + export = Export.objects.get(id=export_id) + + try: + # Update export status + export.status = 'processing' + export.save() + logger.info(f"Export {export_id} status set to processing") + + # Validate and preprocess filters + valid_fields = {field.name: field for field in Observation._meta.get_fields()} + processed_filters = {} + for key, value in filters.items(): + if key in valid_fields: + field = valid_fields[key] + if isinstance(field, models.BooleanField): + try: + processed_filters[key] = parse_boolean(value) + except ValueError: + logger.error(f"Invalid boolean value for filter {key}: {value}") + continue + else: + processed_filters[key] = value + + # Prepare queryset with optimizations + queryset = (Observation.objects + .filter(**processed_filters) + .select_related('province', 'municipality', 'reserved_by') + .order_by('id')) + + total = queryset.count() + processed = 0 + + is_admin = False + user_municipality_ids = set() + if user_id: + try: + user = User.objects.get(id=user_id) + is_admin = user.is_superuser + user_municipality_ids = set(user.municipalities.values_list('id', flat=True)) + except User.DoesNotExist: + pass + + logger.info(f"Processing {total} observations for export {export_id}") + + # Generate CSV data + rows = list(generate_rows(queryset, is_admin, user_municipality_ids)) + + # Store in cache + cache_key = f'export_{export_id}_data' + cache.set(cache_key, rows, timeout=3600) # Store for 1 hour + + # Update export record + with transaction.atomic(): + export.status = 'completed' + export.completed_at = timezone.now() + export.progress = 100 + export.save() + + logger.info(f"Export {export_id} completed successfully") + + return { + 'status': 'completed', + 'cache_key': cache_key, + 'total_processed': total + } + + except Exception as e: + logger.exception(f"Export {export_id} failed: {str(e)}") + export.status = 'failed' + export.error_message = str(e) + export.save() + raise + +@shared_task +def cleanup_old_exports() -> None: + """Clean up exports older than 24 hours.""" + logger.info("Starting cleanup of old exports") + cutoff = timezone.now() - timedelta(days=1) + old_exports = Export.objects.filter(created_at__lt=cutoff) + + for export in old_exports: + # Remove from cache if exists + cache_key = f'export_{export.id}_data' + cache.delete(cache_key) + + # Delete the export record + export.delete() + logger.info(f"Cleaned up export {export.id}") diff --git a/vespadb/observations/views.py b/vespadb/observations/views.py index 9e5f00a..28b6b9b 100644 --- a/vespadb/observations/views.py +++ b/vespadb/observations/views.py @@ -1,50 +1,36 @@ """Views for the observations app.""" + +import csv import datetime import io import json +import time import logging -import json import csv -from typing import TYPE_CHECKING, Any, Any, Union, TextIO, Union, List, Set, Optional -import datetime -import tempfile +import json +from typing import TYPE_CHECKING, Any, Union, Optional +from django.conf import settings +from django.http import FileResponse, HttpResponseNotFound import os -import logging -from tenacity import retry, stop_after_attempt, wait_exponential -from tenacity import ( - retry, - stop_after_attempt, - wait_exponential, - retry_if_exception_type, - before_log, - after_log, -) -import time -from typing import Generator, Optional -from django.db import OperationalError, connection, transaction -from django.core.exceptions import ValidationError -import psycopg2 -from django.http import FileResponse -import os -import tempfile +from django.conf import settings from django.contrib.gis.db.models.functions import Transform from django.contrib.gis.geos import GEOSGeometry from django.core.cache import cache from django.core.exceptions import PermissionDenied, ValidationError from django.core.files.uploadedfile import InMemoryUploadedFile -from django.core.paginator import Paginator from django.db import transaction from django.db.models import CharField, OuterRef, QuerySet, Subquery, Value from django.db.models.functions import Coalesce from django.db.utils import IntegrityError -from django.http import HttpResponse, JsonResponse, StreamingHttpResponse, HttpRequest +from django.http import HttpResponse, JsonResponse, HttpRequest from django.db import connection from django.utils.decorators import method_decorator from django.utils.timezone import now from django.views.decorators.http import require_GET from django_filters.rest_framework import DjangoFilterBackend from django_ratelimit.decorators import ratelimit +from django.http import StreamingHttpResponse, HttpResponseBadRequest, HttpResponseNotFound, HttpResponseServerError from drf_yasg import openapi from drf_yasg.utils import swagger_auto_schema from geopy.exc import GeocoderServiceError, GeocoderTimedOut @@ -59,17 +45,20 @@ from rest_framework.serializers import BaseSerializer from rest_framework.viewsets import ModelViewSet, ReadOnlyModelViewSet from rest_framework_gis.filters import DistanceToPointFilter -from vespadb.observations.serializers import user_read_fields, public_read_fields from vespadb.observations.cache import invalidate_geojson_cache, invalidate_observation_cache from vespadb.observations.filters import ObservationFilter from vespadb.observations.helpers import parse_and_convert_to_utc -from vespadb.observations.models import Municipality, Observation, Province, EradicationResultEnum -from vespadb.observations.serializers import ( - MunicipalitySerializer, - ObservationSerializer, - ProvinceSerializer, -) +from vespadb.observations.models import Municipality, Observation, Province, Export +from vespadb.observations.models import Export +from vespadb.observations.tasks.generate_export import generate_export +from vespadb.observations.serializers import ObservationSerializer, MunicipalitySerializer, ProvinceSerializer + +from django.utils.decorators import method_decorator +from django_ratelimit.decorators import ratelimit +from rest_framework.decorators import action +from rest_framework.permissions import AllowAny +from django.shortcuts import get_object_or_404 if TYPE_CHECKING: from geopy.location import Location @@ -77,24 +66,23 @@ logger = logging.getLogger(__name__) logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s") +class Echo: + """An object that implements just the write method of the file-like interface.""" + def write(self, value): + """Write the value by returning it, instead of storing in a buffer.""" + return value + + BBOX_LENGTH = 4 GEOJSON_REDIS_CACHE_EXPIRATION = 900 # 15 minutes GET_REDIS_CACHE_EXPIRATION = 86400 # 1 day +BATCH_SIZE = 150 CSV_HEADERS = [ "id", "created_datetime", "modified_datetime", "latitude", "longitude", "source", "source_id", "nest_height", "nest_size", "nest_location", "nest_type", "observation_datetime", "province", "eradication_date", "municipality", "images", "anb_domain", "notes", "eradication_result", "wn_id", "wn_validation_status", "nest_status" ] -BATCH_SIZE = 1000 -class ExportError(Exception): - """Custom exception for export-related errors.""" - pass - -class QueryTimeoutError(Exception): - """Custom exception for query timeout errors.""" - pass - class ObservationsViewSet(ModelViewSet): # noqa: PLR0904 """ViewSet for the Observation model.""" @@ -653,362 +641,130 @@ def save_observations(self, valid_data: list[dict[str, Any]]) -> Response: {"error": f"An error occurred during bulk import: {e!s}"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR ) - @retry( - stop=stop_after_attempt(3), - wait=wait_exponential(multiplier=1, min=4, max=10), - retry_error_callback=lambda retry_state: None - ) - def write_batch_to_file( - self, - writer: Any, - batch: List[Observation], - is_admin: bool, - user_municipality_ids: Set[str] - ) -> int: - """ - Write a batch of observations to the CSV file with retry logic. - Returns number of successfully written records. - """ - successful_writes = 0 - for observation in batch: - try: - row_data = self._prepare_row_data(observation, is_admin, user_municipality_ids) - writer.writerow(row_data) - successful_writes += 1 - except Exception as e: - logger.error(f"Error processing observation {observation.id}: {str(e)}") - continue - return successful_writes - - def _prepare_row_data( - self, - observation: Observation, - is_admin: bool, - user_municipality_ids: set[str] - ) -> list[str]: - """ - Prepare a single row of data for the CSV export with error handling. - """ - try: - # Determine allowed fields based on permissions - if is_admin or (observation.municipality_id in user_municipality_ids): - allowed_fields = user_read_fields - else: - allowed_fields = public_read_fields - - allowed_fields.extend(["source_id", "latitude", "longitude", "anb_domain", "nest_status"]) - - row_data = [] - for field in CSV_HEADERS: - try: - if field not in allowed_fields: - row_data.append("") - continue - - if field == "latitude": - row_data.append(str(observation.location.y) if observation.location else "") - elif field == "longitude": - row_data.append(str(observation.location.x) if observation.location else "") - elif field in ["created_datetime", "modified_datetime", "observation_datetime"]: - datetime_val = getattr(observation, field, None) - if datetime_val: - datetime_val = datetime_val.replace(microsecond=0) - row_data.append(datetime_val.isoformat() + "Z") - else: - row_data.append("") - elif field == "province": - row_data.append(observation.province.name if observation.province else "") - elif field == "municipality": - row_data.append(observation.municipality.name if observation.municipality else "") - elif field == "anb_domain": - row_data.append(str(observation.anb)) - elif field == "nest_status": - row_data.append(self.get_status(observation)) - elif field == "source_id": - row_data.append(str(observation.source_id) if observation.source_id is not None else "") - else: - value = getattr(observation, field, "") - row_data.append(str(value) if value is not None else "") - except Exception as e: - logger.warning(f"Error processing field {field} for observation {observation.id}: {str(e)}") - row_data.append("") - - return row_data - except Exception as e: - logger.error(f"Error preparing row data for observation {observation.id}: {str(e)}") - return [""] * len(CSV_HEADERS) # Return empty row in case of error - - @retry( - stop=stop_after_attempt(3), - wait=wait_exponential(multiplier=1, min=4, max=10), - retry=retry_if_exception_type((OperationalError, psycopg2.OperationalError)), - before=before_log(logger, logging.INFO), - after=before_log(logger, logging.INFO) - ) - def get_queryset_count(self, queryset: QuerySet) -> int: - """Get queryset count with retry logic.""" - try: - with transaction.atomic(), connection.cursor() as cursor: - cursor.execute('SET statement_timeout TO 30000') # 30 seconds timeout - return int(queryset.count()) - except (OperationalError, psycopg2.OperationalError) as e: - logger.error(f"Error getting queryset count: {str(e)}") - raise QueryTimeoutError("Query timed out while getting count") from e - - def get_chunk_with_retries( - self, - queryset: QuerySet, - start: int, - batch_size: int, - max_retries: int = 3 - ) -> Optional[List[Observation]]: - """Get a chunk of data with retries and error handling.""" - for attempt in range(max_retries): - try: - with transaction.atomic(), connection.cursor() as cursor: - cursor.execute('SET statement_timeout TO 30000') - chunk = list( - queryset.select_related( - 'province', - 'municipality', - 'reserved_by' - )[start:start + batch_size] - ) - return chunk - except (OperationalError, psycopg2.OperationalError) as e: - if attempt == max_retries - 1: - logger.error(f"Failed to get chunk after {max_retries} attempts: {str(e)}") - return None - wait_time = (2 ** attempt) * 1 # Exponential backoff - logger.warning(f"Retry {attempt + 1}/{max_retries} after {wait_time}s") - time.sleep(wait_time) - return None - - def _generate_csv_content( - self, - queryset: QuerySet, - is_admin: bool, - user_municipality_ids: Set[str] - ) -> Generator[str, None, None]: - """Generate CSV content in smaller chunks.""" - buffer = io.StringIO() - writer = csv.writer(buffer) - - # Write headers first - writer.writerow(CSV_HEADERS) - data = buffer.getvalue() - buffer.seek(0) - buffer.truncate() - yield data - - # Process in smaller chunks - chunk_size = 100 # Kleinere chunk size - total = queryset.count() - - for start in range(0, total, chunk_size): - chunk = queryset.select_related( - 'province', - 'municipality', - 'reserved_by' - )[start:start + chunk_size] - - for observation in chunk: - try: - row_data = self._prepare_row_data( - observation, - is_admin, - user_municipality_ids - ) - writer.writerow(row_data) - data = buffer.getvalue() - buffer.seek(0) - buffer.truncate() - yield data - except Exception as e: - logger.error(f"Error processing observation {observation.id}: {str(e)}") - continue - - buffer.close() - - def create_csv_generator( - self, - queryset: QuerySet, - is_admin: bool, - user_municipality_ids: Set[str], - batch_size: int = BATCH_SIZE - ) -> Generator[str, None, None]: - """Create a generator for CSV streaming with improved error handling.""" - buffer = io.StringIO() - writer = csv.writer(buffer) - - # Write headers - writer.writerow(CSV_HEADERS) - yield buffer.getvalue() - buffer.seek(0) - buffer.truncate(0) - - total_processed = 0 - successful_writes = 0 - error_count = 0 - - try: - total_count = self.get_queryset_count(queryset) - - # Process in chunks - start = 0 - while True: - chunk = self.get_chunk_with_retries(queryset, start, batch_size) - if not chunk: - break - - for observation in chunk: - try: - row_data = self._prepare_row_data( - observation, - is_admin, - user_municipality_ids - ) - writer.writerow(row_data) - successful_writes += 1 - except Exception as e: - error_count += 1 - logger.error(f"Error processing observation {observation.id}: {str(e)}") - if error_count > total_count * 0.1: # If more than 10% errors - raise ExportError("Too many errors during export") - continue - - data = buffer.getvalue() - yield data - buffer.seek(0) - buffer.truncate(0) - - total_processed += len(chunk) - progress = (total_processed / total_count) * 100 if total_count else 0 - logger.info( - f"Export progress: {progress:.1f}% ({total_processed}/{total_count}). " - f"Successful: {successful_writes}, Errors: {error_count}" - ) - - start += batch_size - - except Exception as e: - logger.exception("Error in CSV generator") - raise ExportError(f"Export failed: {str(e)}") from e - finally: - buffer.close() - @method_decorator(ratelimit(key="ip", rate="60/m", method="GET", block=True)) @action(detail=False, methods=["get"], permission_classes=[AllowAny]) - def export(self, request: HttpRequest) -> Union[FileResponse, JsonResponse]: - """Export observations as CSV using temporary file approach.""" - temp_file = None - temp_file_path = None - - try: - # Create temporary file - temp_file = tempfile.NamedTemporaryFile(mode='w+', delete=False, encoding='utf-8-sig') - temp_file_path = temp_file.name - - writer = csv.writer(temp_file) - writer.writerow(CSV_HEADERS) + def export(self, request: HttpRequest) -> JsonResponse: + """Initiate the export of observations and trigger a Celery task.""" + # Initialize the filterset + filterset = self.filterset_class(data=request.GET, queryset=self.get_queryset()) + + # Validate the filterset + if not filterset.is_valid(): + return JsonResponse({"error": filterset.errors}, status=400) + + # Prepare the filter parameters + filters = {key: value for key, value in request.GET.items()} + + # Create an Export record + export = Export.objects.create( + user=request.user if request.user.is_authenticated else None, + filters=filters, + status='pending', + ) - # Get user permissions - if request.user.is_authenticated: - user_municipality_ids = set(request.user.municipalities.values_list("id", flat=True)) - is_admin = request.user.is_superuser - else: - user_municipality_ids = set() - is_admin = False + # Trigger the Celery task + task = generate_export.delay( + export.id, + filters, + user_id=request.user.id if request.user.is_authenticated else None + ) + + # Update the Export record with the task ID + export.task_id = task.id + export.save() + + return JsonResponse({ + 'export_id': export.id, + 'task_id': task.id, + }) - # Get filtered queryset with optimizations - queryset = self.filter_queryset( - self.get_queryset().select_related('province', 'municipality', 'reserved_by') + @swagger_auto_schema( + operation_description="Check the status of an export.", + manual_parameters=[ + openapi.Parameter( + 'export_id', + openapi.IN_QUERY, + description="The ID of the export to check the status of.", + type=openapi.TYPE_INTEGER, + required=True, ) + ], + responses={ + 200: openapi.Schema( + type=openapi.TYPE_OBJECT, + properties={ + 'status': openapi.Schema(type=openapi.TYPE_STRING), + 'progress': openapi.Schema(type=openapi.TYPE_INTEGER), + 'error': openapi.Schema(type=openapi.TYPE_STRING, nullable=True), + 'download_url': openapi.Schema(type=openapi.TYPE_STRING, nullable=True), + }, + ), + 400: "Bad Request", + 404: "Export not found", + }, + ) + @action(detail=False, methods=["get"]) + def export_status(self, request: HttpRequest) -> JsonResponse: + """Check export status.""" + export_id = request.GET.get('export_id') + if not export_id: + logger.error("Export ID not provided") + return JsonResponse({"error": "Export ID is required"}, status=400) + + try: + export = get_object_or_404(Export, id=export_id) + except Exception as e: + logger.exception(f"Export ID {export_id} not found or invalid: {str(e)}") + return JsonResponse({"error": f"Export ID {export_id} not found"}, status=404) + + if export.status == 'completed': + download_url = request.build_absolute_uri(f'/observations/download_export/?export_id={export_id}') + return JsonResponse({ + 'status': 'completed', + 'download_url': download_url + }) + + return JsonResponse({ + 'status': export.status, + 'progress': export.progress, + 'error': export.error_message + }) - # Use much smaller chunk size - chunk_size = 100 - total_count = queryset.count() - processed = 0 + @action(detail=False, methods=["get"]) + def download_export(self, request: HttpRequest) -> Union[StreamingHttpResponse, HttpResponse]: + """Stream the export directly to the user.""" + export_id = request.GET.get('export_id') + if not export_id: + return HttpResponseBadRequest("Export ID is required") - # Process in chunks with periodic flushes - for start in range(0, total_count, chunk_size): - chunk = queryset[start:start + chunk_size] - - for observation in chunk: - try: - row_data = self._prepare_row_data( - observation, - is_admin, - user_municipality_ids - ) - writer.writerow(row_data) - except Exception as e: - logger.error(f"Error processing observation {observation.id}: {str(e)}") - continue - - # Flush after each chunk - temp_file.flush() - os.fsync(temp_file.fileno()) - - processed += len(chunk) - logger.info(f"Export progress: {(processed/total_count)*100:.1f}%") - - # Make sure all data is written and file is closed - temp_file.flush() - os.fsync(temp_file.fileno()) - temp_file.close() - - # Open the file for reading and create response - response = FileResponse( - open(temp_file_path, 'rb'), + try: + export = Export.objects.get(id=export_id) + if export.status != 'completed': + return HttpResponseBadRequest("Export is not ready") + + # Get the data iterator from cache + cache_key = f'export_{export_id}_data' + rows = cache.get(cache_key) + if not rows: + return HttpResponseNotFound("Export data not found or expired") + + # Create the streaming response + pseudo_buffer = Echo() + writer = csv.writer(pseudo_buffer) + response = StreamingHttpResponse( + (writer.writerow(row) for row in rows), content_type='text/csv' ) - # Set explicit headers - filename = f"observations_export_{datetime.datetime.now().strftime('%Y%m%d_%H%M%S')}.csv" - response['Content-Disposition'] = f'attachment; filename="{filename}"; filename*=UTF-8\'\'{filename}' - response['Content-Type'] = 'text/csv; charset=utf-8' - response['Content-Length'] = os.path.getsize(temp_file_path) - response['Cache-Control'] = 'no-cache, no-store, must-revalidate' - response['Pragma'] = 'no-cache' - response['Expires'] = '0' - response['X-Accel-Buffering'] = 'no' - - # Schedule file cleanup after response is sent - def cleanup_temp_file(response): - try: - os.unlink(temp_file_path) - except: - pass - return response - - response.close = cleanup_temp_file.__get__(response, FileResponse) - + timestamp = datetime.datetime.now().strftime('%Y%m%d_%H%M%S') + response['Content-Disposition'] = f'attachment; filename="observations_export_{timestamp}.csv"' return response + except Export.DoesNotExist: + return HttpResponseNotFound("Export not found") except Exception as e: - logger.exception("Export failed") - # Cleanup in case of error - if temp_file: - temp_file.close() - if temp_file_path and os.path.exists(temp_file_path): - try: - os.unlink(temp_file_path) - except: - pass - return JsonResponse( - {"error": f"Export failed: {str(e)}. Please try again or contact support."}, - status=500 - ) - - def get_status(self, observation: Observation) -> str: - """Determine observation status based on eradication data.""" - logger.debug("Getting status for observation %s", observation.eradication_result) - if observation.eradication_result: - return "eradicated" - if observation.reserved_by: - return "reserved" - return "untreated" + logger.error(f"Error streaming export: {str(e)}") + return HttpResponseServerError("Error generating export") @require_GET def search_address(request: Request) -> JsonResponse: diff --git a/vespadb/settings.py b/vespadb/settings.py index 6519d11..7682211 100644 --- a/vespadb/settings.py +++ b/vespadb/settings.py @@ -57,6 +57,7 @@ "django.contrib.messages", "django.contrib.staticfiles", "django_filters", + 'django_extensions', "django_celery_beat", "django_celery_results", "rest_framework", @@ -269,3 +270,6 @@ AWS_SES_REGION_ENDPOINT = "email.eu-west-1.amazonaws.com" DEFAULT_FROM_EMAIL = secrets["DEFAULT_FROM_EMAIL"] SERVER_EMAIL = secrets["DEFAULT_FROM_EMAIL"] +MEDIA_URL = '/media/' +MEDIA_ROOT = os.path.join(BASE_DIR, 'media') +EXPORTS_DIR = os.path.join(MEDIA_ROOT, 'exports') From 0fe70aca470007a4b5cdc1e90f2721f0ccb07277 Mon Sep 17 00:00:00 2001 From: Steven Gerrits Date: Sun, 22 Dec 2024 15:30:57 +0000 Subject: [PATCH 10/12] addd controlled values to mapper --- .../observations/tasks/observation_mapper.py | 21 ++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/vespadb/observations/tasks/observation_mapper.py b/vespadb/observations/tasks/observation_mapper.py index 6121c04..a56f09d 100644 --- a/vespadb/observations/tasks/observation_mapper.py +++ b/vespadb/observations/tasks/observation_mapper.py @@ -27,20 +27,33 @@ mapping_dict: dict[int, dict[str, str]] = { 329: { + # Controlled values "hoger_dan_4_meter": "hoger_dan_4_meter", + "lager_dan_4_meter": "lager_dan_4_meter", + # others "Hoger dan 4 meter": "hoger_dan_4_meter", "Higher than 4 meters": "hoger_dan_4_meter", - "lager_dan_4_meter": "lager_dan_4_meter", "Lager dan 4 meter": "lager_dan_4_meter", "Lower than 4 meters": "lager_dan_4_meter", }, 330: { + # Controlled values + "groter_dan_25_cm": "groter_dan_25_cm", + "kleiner_dan_25_cm": "kleiner_dan_25_cm", + # others "Groter dan 25 cm": "groter_dan_25_cm", "Kleiner dan 25 cm": "kleiner_dan_25_cm", "Larger than 25cm": "groter_dan_25_cm", "Smaller than 25cm": "kleiner_dan_25_cm", }, 331: { + # Controlled values + "buiten_onbedekt_op_gebouw": "buiten_onbedekt_op_gebouw", + "buiten_onbedekt_in_boom_of_struik": "buiten_onbedekt_in_boom_of_struik", + "buiten_natuurlijk_overdekt": "buiten_natuurlijk_overdekt", + "buiten_maar_overdekt_door_constructie": "buiten_maar_overdekt_door_constructie", + "binnen_in_gebouw_of_constructie": "binnen_in_gebouw_of_constructie", + # others "Binnen, in gebouw of constructie": "binnen_in_gebouw_of_constructie", "Buiten, maar overdekt door constructie": "buiten_maar_overdekt_door_constructie", "Buiten, natuurlijk overdekt": "buiten_natuurlijk_overdekt", @@ -53,6 +66,12 @@ "Outside, uncovered on building": "buiten_onbedekt_op_gebouw", }, 368: { + # Controlled values + "actief_embryonaal_nest": "actief_embryonaal_nest", + "actief_primair_nest": "actief_primair_nest", + "actief_secundair_nest": "actief_secundair_nest", + "inactief_leeg_nest": "inactief_leeg_nest", + # others "Actief embryonaal nest": "actief_embryonaal_nest", "Actief embryonaal nest (van maart tot eind juni, nest met enkel koningin)": "actief_embryonaal_nest", "Actief primair nest": "actief_primair_nest", From 30f9038e11a641f2f44f2ccbf315edfc6b3573f5 Mon Sep 17 00:00:00 2001 From: Steven Gerrits Date: Fri, 3 Jan 2025 17:26:53 +0000 Subject: [PATCH 11/12] export and filter --- src/components/FilterComponent.vue | 10 +- vespadb/observations/tasks/generate_export.py | 93 ++++++++++++------- vespadb/observations/views.py | 17 +++- 3 files changed, 77 insertions(+), 43 deletions(-) diff --git a/src/components/FilterComponent.vue b/src/components/FilterComponent.vue index 1903c45..8b4761a 100644 --- a/src/components/FilterComponent.vue +++ b/src/components/FilterComponent.vue @@ -98,7 +98,7 @@ export default { { name: 'Zichtbaar', value: true }, { name: 'Niet zichtbaar', value: false } ]); - const minDate = ref(new Date(new Date().getFullYear(), 3, 1)); + const minDate = ref(new Date(2024, 3, 1)); const maxDate = ref(null); const selectedObservationStart = ref(false); const selectedObservationEnd = ref(false); @@ -121,7 +121,7 @@ export default { max_observation_date: maxDateCET, visible: visibleActief.value }); - + }, 300); const toggleMenu1 = () => { @@ -156,11 +156,11 @@ export default { watch([selectedMunicipalities, selectedProvinces, selectedNestType, selectedNestStatus, anbAreasActief, selectedObservationStart, selectedObservationEnd, visibleActief], () => { emitFilterUpdate(); - }, { deep: true}); - + }, { deep: true }); + watch(() => vespaStore.filters, (newFilters, oldFilters) => { const hasChanged = JSON.stringify(newFilters) !== JSON.stringify(oldFilters); - + if (hasChanged) { selectedMunicipalities.value = newFilters.municipalities || []; selectedProvinces.value = newFilters.provinces || []; diff --git a/vespadb/observations/tasks/generate_export.py b/vespadb/observations/tasks/generate_export.py index 21aef1f..02ce2d3 100644 --- a/vespadb/observations/tasks/generate_export.py +++ b/vespadb/observations/tasks/generate_export.py @@ -128,49 +128,54 @@ def generate_rows(queryset, is_admin: bool, user_municipality_ids: set) -> Itera acks_late=True ) def generate_export(export_id: int, filters: Dict[str, Any], user_id: Optional[int] = None) -> Dict[str, Any]: - """ - Generate CSV export of observations based on filters. - - Args: - export_id: ID of the Export record - filters: Dictionary of filters to apply to the queryset - user_id: Optional ID of the user requesting the export - - Returns: - Dictionary containing export status and details - """ - logger.info(f"Starting export {export_id} for user {user_id}") + """Generate CSV export of observations based on filters.""" + logger.info(f"Starting export {export_id} for user {user_id} with filters: {filters}") export = Export.objects.get(id=export_id) try: # Update export status export.status = 'processing' export.save() - logger.info(f"Export {export_id} status set to processing") - # Validate and preprocess filters + # Clean and validate filters before applying valid_fields = {field.name: field for field in Observation._meta.get_fields()} processed_filters = {} + + # Log the incoming filters + logger.info(f"Processing filters: {filters}") + for key, value in filters.items(): + # Skip pagination and ordering parameters + if key in ['page', 'page_size', 'ordering']: + continue + if key in valid_fields: field = valid_fields[key] - if isinstance(field, models.BooleanField): - try: + try: + if isinstance(field, models.BooleanField): processed_filters[key] = parse_boolean(value) - except ValueError: - logger.error(f"Invalid boolean value for filter {key}: {value}") - continue - else: - processed_filters[key] = value + elif value: # Only add non-empty values + processed_filters[key] = value + except ValueError as e: + logger.warning(f"Skipping invalid filter {key}: {value}, error: {e}") + continue + + logger.info(f"Processed filters: {processed_filters}") - # Prepare queryset with optimizations - queryset = (Observation.objects - .filter(**processed_filters) + # Apply filters and get initial count + queryset = Observation.objects.filter(**processed_filters) + initial_count = queryset.count() + logger.info(f"Initial queryset count: {initial_count}") + + # Add optimizations + queryset = (queryset .select_related('province', 'municipality', 'reserved_by') .order_by('id')) - total = queryset.count() + # Process in batches + batch_size = 1000 processed = 0 + rows = [CSV_HEADERS] # Start with headers is_admin = False user_municipality_ids = set() @@ -180,16 +185,35 @@ def generate_export(export_id: int, filters: Dict[str, Any], user_id: Optional[i is_admin = user.is_superuser user_municipality_ids = set(user.municipalities.values_list('id', flat=True)) except User.DoesNotExist: - pass - - logger.info(f"Processing {total} observations for export {export_id}") - - # Generate CSV data - rows = list(generate_rows(queryset, is_admin, user_municipality_ids)) + logger.warning(f"User {user_id} not found") + + # Process in batches to reduce memory usage + for i in range(0, initial_count, batch_size): + batch = queryset[i:i + batch_size] + batch_rows = [] + + for observation in batch: + try: + row = _prepare_row_data(observation, is_admin, user_municipality_ids) + batch_rows.append(row) + processed += 1 + + if processed % 100 == 0: + progress = int((processed / initial_count) * 100) + export.progress = progress + export.save() + logger.info(f"Processed {processed}/{initial_count} records") + except Exception as e: + logger.error(f"Error processing observation {observation.id}: {e}") + continue + + # Add batch to rows and clear batch data + rows.extend(batch_rows) + batch_rows = [] # Store in cache cache_key = f'export_{export_id}_data' - cache.set(cache_key, rows, timeout=3600) # Store for 1 hour + cache.set(cache_key, rows, timeout=3600) # Update export record with transaction.atomic(): @@ -199,11 +223,10 @@ def generate_export(export_id: int, filters: Dict[str, Any], user_id: Optional[i export.save() logger.info(f"Export {export_id} completed successfully") - return { 'status': 'completed', 'cache_key': cache_key, - 'total_processed': total + 'total_processed': processed } except Exception as e: @@ -212,7 +235,7 @@ def generate_export(export_id: int, filters: Dict[str, Any], user_id: Optional[i export.error_message = str(e) export.save() raise - + @shared_task def cleanup_old_exports() -> None: """Clean up exports older than 24 hours.""" diff --git a/vespadb/observations/views.py b/vespadb/observations/views.py index 28b6b9b..ede1019 100644 --- a/vespadb/observations/views.py +++ b/vespadb/observations/views.py @@ -652,8 +652,18 @@ def export(self, request: HttpRequest) -> JsonResponse: if not filterset.is_valid(): return JsonResponse({"error": filterset.errors}, status=400) - # Prepare the filter parameters - filters = {key: value for key, value in request.GET.items()} + # Get the filtered queryset count first + filtered_count = filterset.qs.count() + if filtered_count > 10000: + return JsonResponse({ + "error": f"Export too large. Found {filtered_count} records, maximum allowed is 10,000" + }, status=400) + + # Prepare the filter parameters - only include valid filters + filters = {} + for key, value in request.GET.items(): + if key in filterset.filters and value: + filters[key] = value # Create an Export record export = Export.objects.create( @@ -676,8 +686,9 @@ def export(self, request: HttpRequest) -> JsonResponse: return JsonResponse({ 'export_id': export.id, 'task_id': task.id, + 'total_records': filtered_count }) - + @swagger_auto_schema( operation_description="Check the status of an export.", manual_parameters=[ From 0fec002fe563d230fa83408b082f1b0f1cdbf3e4 Mon Sep 17 00:00:00 2001 From: Steven Gerrits Date: Fri, 3 Jan 2025 17:43:44 +0000 Subject: [PATCH 12/12] filter fix --- src/components/FilterComponent.vue | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/components/FilterComponent.vue b/src/components/FilterComponent.vue index 8b4761a..be01e7d 100644 --- a/src/components/FilterComponent.vue +++ b/src/components/FilterComponent.vue @@ -178,7 +178,7 @@ export default { anbAreasActief.value = vespaStore.filters.anbAreasActief; selectedNestType.value = vespaStore.filters.nestType || []; selectedNestStatus.value = vespaStore.filters.nestStatus || []; - minDate.value = vespaStore.filters.min_observation_date ? new Date(vespaStore.filters.min_observation_date) : new Date(new Date().getFullYear(), 3, 1); + minDate.value = vespaStore.filters.min_observation_date ? new Date(vespaStore.filters.min_observation_date) : new Date(2024, 3, 1); maxDate.value = vespaStore.filters.max_observation_date ? new Date(vespaStore.filters.max_observation_date) : null; if (!vespaStore.municipalitiesFetched) await vespaStore.fetchMunicipalities(); if (!vespaStore.provincesFetched) await vespaStore.fetchProvinces();