From c171e0419ad0b55936df5abb8871288c74e12870 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Thu, 16 Jan 2025 14:09:33 -0300 Subject: [PATCH 01/45] feat: add import app boilerplate --- app/WYGIWYH/settings.py | 1 + app/apps/import/__init__.py | 0 app/apps/import/admin.py | 3 +++ app/apps/import/apps.py | 6 ++++++ app/apps/import/migrations/__init__.py | 0 app/apps/import/models.py | 3 +++ app/apps/import/tests.py | 3 +++ app/apps/import/views.py | 3 +++ 8 files changed, 19 insertions(+) create mode 100644 app/apps/import/__init__.py create mode 100644 app/apps/import/admin.py create mode 100644 app/apps/import/apps.py create mode 100644 app/apps/import/migrations/__init__.py create mode 100644 app/apps/import/models.py create mode 100644 app/apps/import/tests.py create mode 100644 app/apps/import/views.py diff --git a/app/WYGIWYH/settings.py b/app/WYGIWYH/settings.py index d10dddd..e4e7c73 100644 --- a/app/WYGIWYH/settings.py +++ b/app/WYGIWYH/settings.py @@ -64,6 +64,7 @@ "apps.accounts.apps.AccountsConfig", "apps.common.apps.CommonConfig", "apps.net_worth.apps.NetWorthConfig", + "apps.import.apps.ImportConfig", "apps.api.apps.ApiConfig", "cachalot", "rest_framework", diff --git a/app/apps/import/__init__.py b/app/apps/import/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/apps/import/admin.py b/app/apps/import/admin.py new file mode 100644 index 0000000..8c38f3f --- /dev/null +++ b/app/apps/import/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/app/apps/import/apps.py b/app/apps/import/apps.py new file mode 100644 index 0000000..fdfa08d --- /dev/null +++ b/app/apps/import/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class ImportConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "apps.import" diff --git a/app/apps/import/migrations/__init__.py b/app/apps/import/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/apps/import/models.py b/app/apps/import/models.py new file mode 100644 index 0000000..71a8362 --- /dev/null +++ b/app/apps/import/models.py @@ -0,0 +1,3 @@ +from django.db import models + +# Create your models here. diff --git a/app/apps/import/tests.py b/app/apps/import/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/app/apps/import/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/app/apps/import/views.py b/app/apps/import/views.py new file mode 100644 index 0000000..91ea44a --- /dev/null +++ b/app/apps/import/views.py @@ -0,0 +1,3 @@ +from django.shortcuts import render + +# Create your views here. From fbb26b8442c744438388fb6544a7477256fdd187 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Fri, 17 Jan 2025 17:40:51 -0300 Subject: [PATCH 02/45] feat: rename app, some work on schema --- app/WYGIWYH/settings.py | 2 +- app/WYGIWYH/urls.py | 1 + app/apps/import/admin.py | 3 - app/apps/import/models.py | 3 - app/apps/import/views.py | 3 - app/apps/{import => import_app}/__init__.py | 0 app/apps/import_app/admin.py | 6 + app/apps/{import => import_app}/apps.py | 2 +- .../migrations/__init__.py | 0 app/apps/import_app/models.py | 74 ++++++ app/apps/import_app/schemas.py | 0 app/apps/import_app/schemas/__init__.py | 8 + app/apps/import_app/schemas/v1.py | 104 ++++++++ app/apps/import_app/services.py | 0 app/apps/import_app/services/__init__.py | 1 + app/apps/import_app/services/v1.py | 237 ++++++++++++++++++ app/apps/import_app/tasks.py | 18 ++ app/apps/{import => import_app}/tests.py | 0 app/apps/import_app/urls.py | 6 + app/apps/import_app/views.py | 26 ++ app/apps/transactions/models.py | 1 + requirements.txt | 2 + 22 files changed, 486 insertions(+), 11 deletions(-) delete mode 100644 app/apps/import/admin.py delete mode 100644 app/apps/import/models.py delete mode 100644 app/apps/import/views.py rename app/apps/{import => import_app}/__init__.py (100%) create mode 100644 app/apps/import_app/admin.py rename app/apps/{import => import_app}/apps.py (81%) rename app/apps/{import => import_app}/migrations/__init__.py (100%) create mode 100644 app/apps/import_app/models.py create mode 100644 app/apps/import_app/schemas.py create mode 100644 app/apps/import_app/schemas/__init__.py create mode 100644 app/apps/import_app/schemas/v1.py create mode 100644 app/apps/import_app/services.py create mode 100644 app/apps/import_app/services/__init__.py create mode 100644 app/apps/import_app/services/v1.py create mode 100644 app/apps/import_app/tasks.py rename app/apps/{import => import_app}/tests.py (100%) create mode 100644 app/apps/import_app/urls.py create mode 100644 app/apps/import_app/views.py diff --git a/app/WYGIWYH/settings.py b/app/WYGIWYH/settings.py index e4e7c73..8243c91 100644 --- a/app/WYGIWYH/settings.py +++ b/app/WYGIWYH/settings.py @@ -64,7 +64,7 @@ "apps.accounts.apps.AccountsConfig", "apps.common.apps.CommonConfig", "apps.net_worth.apps.NetWorthConfig", - "apps.import.apps.ImportConfig", + "apps.import_app.apps.ImportConfig", "apps.api.apps.ApiConfig", "cachalot", "rest_framework", diff --git a/app/WYGIWYH/urls.py b/app/WYGIWYH/urls.py index 5a465a5..eb4357d 100644 --- a/app/WYGIWYH/urls.py +++ b/app/WYGIWYH/urls.py @@ -47,4 +47,5 @@ path("", include("apps.calendar_view.urls")), path("", include("apps.dca.urls")), path("", include("apps.mini_tools.urls")), + path("", include("apps.import_app.urls")), ] diff --git a/app/apps/import/admin.py b/app/apps/import/admin.py deleted file mode 100644 index 8c38f3f..0000000 --- a/app/apps/import/admin.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.contrib import admin - -# Register your models here. diff --git a/app/apps/import/models.py b/app/apps/import/models.py deleted file mode 100644 index 71a8362..0000000 --- a/app/apps/import/models.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.db import models - -# Create your models here. diff --git a/app/apps/import/views.py b/app/apps/import/views.py deleted file mode 100644 index 91ea44a..0000000 --- a/app/apps/import/views.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.shortcuts import render - -# Create your views here. diff --git a/app/apps/import/__init__.py b/app/apps/import_app/__init__.py similarity index 100% rename from app/apps/import/__init__.py rename to app/apps/import_app/__init__.py diff --git a/app/apps/import_app/admin.py b/app/apps/import_app/admin.py new file mode 100644 index 0000000..cbccf2b --- /dev/null +++ b/app/apps/import_app/admin.py @@ -0,0 +1,6 @@ +from django.contrib import admin +from apps.import_app import models + +# Register your models here. +admin.site.register(models.ImportRun) +admin.site.register(models.ImportProfile) diff --git a/app/apps/import/apps.py b/app/apps/import_app/apps.py similarity index 81% rename from app/apps/import/apps.py rename to app/apps/import_app/apps.py index fdfa08d..4dbe90c 100644 --- a/app/apps/import/apps.py +++ b/app/apps/import_app/apps.py @@ -3,4 +3,4 @@ class ImportConfig(AppConfig): default_auto_field = "django.db.models.BigAutoField" - name = "apps.import" + name = "apps.import_app" diff --git a/app/apps/import/migrations/__init__.py b/app/apps/import_app/migrations/__init__.py similarity index 100% rename from app/apps/import/migrations/__init__.py rename to app/apps/import_app/migrations/__init__.py diff --git a/app/apps/import_app/models.py b/app/apps/import_app/models.py new file mode 100644 index 0000000..aca04e3 --- /dev/null +++ b/app/apps/import_app/models.py @@ -0,0 +1,74 @@ +from django.db import models +from django.utils.translation import gettext_lazy as _ + + +class ImportProfile(models.Model): + class Versions(models.IntegerChoices): + VERSION_1 = 1, _("Version 1") + + name = models.CharField(max_length=100) + yaml_config = models.TextField(help_text=_("YAML configuration")) + version = models.IntegerField( + choices=Versions, + default=Versions.VERSION_1, + verbose_name=_("Version"), + ) + + def __str__(self): + return self.name + + class Meta: + ordering = ["name"] + + +class ImportRun(models.Model): + class Status(models.TextChoices): + QUEUED = "QUEUED", _("Queued") + PROCESSING = "PROCESSING", _("Processing") + FAILED = "FAILED", _("Failed") + FINISHED = "FINISHED", _("Finished") + + status = models.CharField( + max_length=10, + choices=Status, + default=Status.QUEUED, + verbose_name=_("Status"), + ) + profile = models.ForeignKey( + ImportProfile, + on_delete=models.CASCADE, + ) + file_name = models.CharField( + max_length=10000, + help_text=_("File name"), + ) + transactions = models.ManyToManyField( + "transactions.Transaction", related_name="import_runs" + ) + tags = models.ManyToManyField( + "transactions.TransactionTag", related_name="import_runs" + ) + categories = models.ManyToManyField( + "transactions.TransactionCategory", related_name="import_runs" + ) + entities = models.ManyToManyField( + "transactions.TransactionEntity", related_name="import_runs" + ) + currencies = models.ManyToManyField( + "currencies.Currency", related_name="import_runs" + ) + + logs = models.TextField(blank=True) + processed_rows = models.IntegerField(default=0) + total_rows = models.IntegerField(default=0) + successful_rows = models.IntegerField(default=0) + skipped_rows = models.IntegerField(default=0) + failed_rows = models.IntegerField(default=0) + started_at = models.DateTimeField(null=True) + finished_at = models.DateTimeField(null=True) + + @property + def progress(self): + if self.total_rows == 0: + return 0 + return (self.processed_rows / self.total_rows) * 100 diff --git a/app/apps/import_app/schemas.py b/app/apps/import_app/schemas.py new file mode 100644 index 0000000..e69de29 diff --git a/app/apps/import_app/schemas/__init__.py b/app/apps/import_app/schemas/__init__.py new file mode 100644 index 0000000..f68ce79 --- /dev/null +++ b/app/apps/import_app/schemas/__init__.py @@ -0,0 +1,8 @@ +from apps.import_app.schemas.v1 import ( + ImportProfileSchema as SchemaV1, + ColumnMapping as ColumnMappingV1, + # TransformationRule as TransformationRuleV1, + ImportSettings as SettingsV1, + HashTransformationRule as HashTransformationRuleV1, + CompareDeduplicationRule as CompareDeduplicationRuleV1, +) diff --git a/app/apps/import_app/schemas/v1.py b/app/apps/import_app/schemas/v1.py new file mode 100644 index 0000000..1cc7dc5 --- /dev/null +++ b/app/apps/import_app/schemas/v1.py @@ -0,0 +1,104 @@ +from typing import Dict, List, Optional, Literal +from pydantic import BaseModel, Field + + +class CompareDeduplicationRule(BaseModel): + type: Literal["compare"] + fields: Dict = Field( + ..., description="Match header and fields to compare for deduplication" + ) + match_type: Literal["lax", "strict"] + + +class ReplaceTransformationRule(BaseModel): + field: str + type: Literal["replace", "regex"] = Field( + ..., description="Type of transformation: replace or regex" + ) + pattern: str = Field(..., description="Pattern to match") + replacement: str = Field(..., description="Value to replace with") + + +class DateFormatTransformationRule(BaseModel): + field: str + type: Literal["date_format"] = Field( + ..., description="Type of transformation: replace or regex" + ) + original_format: str = Field(..., description="Original date format") + new_format: str = Field(..., description="New date format to use") + + +class HashTransformationRule(BaseModel): + fields: List[str] + type: Literal["hash"] + + +class MergeTransformationRule(BaseModel): + fields: List[str] + type: Literal["merge"] + separator: str = Field(default=" ", description="Separator to use when merging") + + +class SplitTransformationRule(BaseModel): + fields: List[str] + type: Literal["split"] + separator: str = Field(default=",", description="Separator to use when splitting") + index: int | None = Field( + default=0, description="Index to return as value. Empty to return all." + ) + + +class ImportSettings(BaseModel): + skip_errors: bool = Field( + default=False, + description="If True, errors during import will be logged and skipped", + ) + file_type: Literal["csv"] = "csv" + delimiter: str = Field(default=",", description="CSV delimiter character") + encoding: str = Field(default="utf-8", description="File encoding") + skip_rows: int = Field( + default=0, description="Number of rows to skip at the beginning of the file" + ) + importing: Literal[ + "transactions", "accounts", "currencies", "categories", "tags", "entities" + ] + + +class ColumnMapping(BaseModel): + source: Optional[str] = Field( + default=None, + description="CSV column header. If None, the field will be generated from transformations", + ) + target: Literal[ + "account", + "type", + "is_paid", + "date", + "reference_date", + "amount", + "notes", + "category", + "tags", + "entities", + "internal_note", + ] = Field(..., description="Transaction field to map to") + default_value: Optional[str] = None + required: bool = False + transformations: Optional[ + List[ + ReplaceTransformationRule + | DateFormatTransformationRule + | HashTransformationRule + | MergeTransformationRule + | SplitTransformationRule + ] + ] = Field(default_factory=list) + + +class ImportProfileSchema(BaseModel): + settings: ImportSettings + column_mapping: Dict[str, ColumnMapping] + deduplication: List[CompareDeduplicationRule] = Field( + default_factory=list, + description="Rules for deduplicating records during import", + ) diff --git a/app/apps/import_app/services.py b/app/apps/import_app/services.py new file mode 100644 index 0000000..e69de29 diff --git a/app/apps/import_app/services/__init__.py b/app/apps/import_app/services/__init__.py new file mode 100644 index 0000000..6001902 --- /dev/null +++ b/app/apps/import_app/services/__init__.py @@ -0,0 +1 @@ +from apps.import_app.services.v1 import ImportService as ImportServiceV1 diff --git a/app/apps/import_app/services/v1.py b/app/apps/import_app/services/v1.py new file mode 100644 index 0000000..333eb6e --- /dev/null +++ b/app/apps/import_app/services/v1.py @@ -0,0 +1,237 @@ +import csv +import hashlib +import re +from datetime import datetime +from typing import Dict, Any, Literal + +import yaml + +from django.db import transaction +from django.core.files.storage import default_storage +from django.utils import timezone + +from apps.import_app.models import ImportRun, ImportProfile +from apps.import_app.schemas import ( + SchemaV1, + ColumnMappingV1, + SettingsV1, + HashTransformationRuleV1, + CompareDeduplicationRuleV1, +) +from apps.transactions.models import Transaction + + +class ImportService: + def __init__(self, import_run: ImportRun): + self.import_run: ImportRun = import_run + self.profile: ImportProfile = import_run.profile + self.config: SchemaV1 = self._load_config() + self.settings: SettingsV1 = self.config.settings + self.deduplication: list[CompareDeduplicationRuleV1] = self.config.deduplication + self.mapping: Dict[str, ColumnMappingV1] = self.config.column_mapping + + def _load_config(self) -> SchemaV1: + yaml_data = yaml.safe_load(self.profile.yaml_config) + + if self.profile.version == ImportProfile.Versions.VERSION_1: + return SchemaV1(**yaml_data) + + raise ValueError(f"Unsupported version: {self.profile.version}") + + def _log(self, level: str, message: str, **kwargs) -> None: + """Add a log entry to the import run logs""" + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + + # Format additional context if present + context = "" + if kwargs: + context = " - " + ", ".join(f"{k}={v}" for k, v in kwargs.items()) + + log_line = f"[{timestamp}] {level.upper()}: {message}{context}\n" + + # Append to existing logs + self.import_run.logs += log_line + self.import_run.save(update_fields=["logs"]) + + def _update_status( + self, new_status: Literal["PROCESSING", "FAILED", "FINISHED"] + ) -> None: + if new_status == "PROCESSING": + self.import_run.status = ImportRun.Status.PROCESSING + elif new_status == "FAILED": + self.import_run.status = ImportRun.Status.FAILED + elif new_status == "FINISHED": + self.import_run.status = ImportRun.Status.FINISHED + + self.import_run.save(update_fields=["status"]) + + @staticmethod + def _transform_value( + value: str, mapping: ColumnMappingV1, row: Dict[str, str] = None + ) -> Any: + transformed = value + + for transform in mapping.transformations: + if transform.type == "hash": + if not isinstance(transform, HashTransformationRuleV1): + continue + + # Collect all values to be hashed + values_to_hash = [] + for field in transform.fields: + if field in row: + values_to_hash.append(str(row[field])) + + # Create hash from concatenated values + if values_to_hash: + concatenated = "|".join(values_to_hash) + transformed = hashlib.sha256(concatenated.encode()).hexdigest() + + elif transform.type == "replace": + transformed = transformed.replace( + transform.pattern, transform.replacement + ) + elif transform.type == "regex": + transformed = re.sub( + transform.pattern, transform.replacement, transformed + ) + elif transform.type == "date_format": + transformed = datetime.strptime( + transformed, transform.pattern + ).strftime(transform.replacement) + + return transformed + + def _map_row_to_transaction(self, row: Dict[str, str]) -> Dict[str, Any]: + transaction_data = {} + + for field, mapping in self.mapping.items(): + # If source is None, use None as the initial value + value = row.get(mapping.source) if mapping.source else None + + # Use default_value if value is None + if value is None: + value = mapping.default_value + + if mapping.required and value is None and not mapping.transformations: + raise ValueError(f"Required field {field} is missing") + + # Apply transformations even if initial value is None + if mapping.transformations: + value = self._transform_value(value, mapping, row) + + if value is not None: + transaction_data[field] = value + + return transaction_data + + def _check_duplicate_transaction(self, transaction_data: Dict[str, Any]) -> bool: + for rule in self.deduplication: + if rule.type == "compare": + query = Transaction.objects.all() + + # Build query conditions for each field in the rule + for field, header in rule.fields.items(): + if field in transaction_data: + if rule.match_type == "strict": + query = query.filter(**{field: transaction_data[field]}) + else: # lax matching + query = query.filter( + **{f"{field}__iexact": transaction_data[field]} + ) + + # If we found any matching transaction, it's a duplicate + if query.exists(): + return True + + return False + + def _process_csv(self, file_path): + with open(file_path, "r", encoding=self.settings.encoding) as csv_file: + reader = csv.DictReader(csv_file, delimiter=self.settings.delimiter) + + # Count total rows + self.import_run.total_rows = sum(1 for _ in reader) + csv_file.seek(0) + reader = csv.DictReader(csv_file, delimiter=self.settings.delimiter) + + self._log("info", f"Starting import with {self.import_run.total_rows} rows") + + # Skip specified number of rows + for _ in range(self.settings.skip_rows): + next(reader) + + if self.settings.skip_rows: + self._log("info", f"Skipped {self.settings.skip_rows} initial rows") + + for row_number, row in enumerate(reader, start=1): + try: + transaction_data = self._map_row_to_transaction(row) + + if transaction_data: + if self.deduplication and self._check_duplicate_transaction( + transaction_data + ): + self.import_run.skipped_rows += 1 + self._log("info", f"Skipped duplicate row {row_number}") + continue + + self.import_run.transactions.add(transaction_data) + self.import_run.successful_rows += 1 + self._log("debug", f"Successfully processed row {row_number}") + + self.import_run.processed_rows += 1 + self.import_run.save( + update_fields=[ + "processed_rows", + "successful_rows", + "skipped_rows", + ] + ) + + except Exception as e: + if not self.settings.skip_errors: + self._log( + "error", + f"Fatal error processing row {row_number}: {str(e)}", + ) + self._update_status("FAILED") + raise + else: + self._log( + "warning", f"Error processing row {row_number}: {str(e)}" + ) + self.import_run.failed_rows += 1 + self.import_run.save(update_fields=["failed_rows"]) + + def process_file(self, file_path: str): + self._update_status("PROCESSING") + self.import_run.started_at = timezone.now() + self.import_run.save(update_fields=["started_at"]) + + self._log("info", "Starting import process") + + try: + if self.settings.file_type == "csv": + self._process_csv(file_path) + + if self.import_run.processed_rows == self.import_run.total_rows: + self._update_status("FINISHED") + self._log( + "info", + f"Import completed successfully. " + f"Successful: {self.import_run.successful_rows}, " + f"Failed: {self.import_run.failed_rows}, " + f"Skipped: {self.import_run.skipped_rows}", + ) + + except Exception as e: + self._update_status("FAILED") + self._log("error", f"Import failed: {str(e)}") + raise Exception("Import failed") + + finally: + self._log("info", "Cleaning up temporary files") + default_storage.delete(file_path) + self.import_run.finished_at = timezone.now() + self.import_run.save(update_fields=["finished_at"]) diff --git a/app/apps/import_app/tasks.py b/app/apps/import_app/tasks.py new file mode 100644 index 0000000..25efcbc --- /dev/null +++ b/app/apps/import_app/tasks.py @@ -0,0 +1,18 @@ +import logging + +from procrastinate.contrib.django import app + +from apps.import_app.models import ImportRun +from apps.import_app.services import ImportServiceV1 + +logger = logging.getLogger(__name__) + + +@app.task(queue="imports") +def process_import(import_run_id: int, file_path: str): + try: + import_run = ImportRun.objects.get(id=import_run_id) + import_service = ImportServiceV1(import_run) + import_service.process_file(file_path) + except ImportRun.DoesNotExist: + raise ValueError(f"ImportRun with id {import_run_id} not found") diff --git a/app/apps/import/tests.py b/app/apps/import_app/tests.py similarity index 100% rename from app/apps/import/tests.py rename to app/apps/import_app/tests.py diff --git a/app/apps/import_app/urls.py b/app/apps/import_app/urls.py new file mode 100644 index 0000000..aea8670 --- /dev/null +++ b/app/apps/import_app/urls.py @@ -0,0 +1,6 @@ +from django.urls import path +import apps.import_app.views as views + +urlpatterns = [ + path("import/", views.ImportRunCreateView.as_view(), name="import"), +] diff --git a/app/apps/import_app/views.py b/app/apps/import_app/views.py new file mode 100644 index 0000000..d5b1d94 --- /dev/null +++ b/app/apps/import_app/views.py @@ -0,0 +1,26 @@ +from django.views.generic import CreateView +from apps.import_app.models import ImportRun +from apps.import_app.services import ImportServiceV1 + + +class ImportRunCreateView(CreateView): + model = ImportRun + fields = ["profile"] + + def form_valid(self, form): + response = super().form_valid(form) + + import_run = form.instance + file = self.request.FILES["file"] + + # Save uploaded file temporarily + temp_file_path = f"/tmp/import_{import_run.id}.csv" + with open(temp_file_path, "wb+") as destination: + for chunk in file.chunks(): + destination.write(chunk) + + # Process the import + import_service = ImportServiceV1(import_run) + import_service.process_file(temp_file_path) + + return response diff --git a/app/apps/transactions/models.py b/app/apps/transactions/models.py index 70bbc94..f131518 100644 --- a/app/apps/transactions/models.py +++ b/app/apps/transactions/models.py @@ -141,6 +141,7 @@ class Type(models.TextChoices): related_name="transactions", verbose_name=_("Recurring Transaction"), ) + internal_note = models.TextField(blank=True, verbose_name=_("Internal Note")) class Meta: verbose_name = _("Transaction") diff --git a/requirements.txt b/requirements.txt index b4e4f02..af9d39b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,3 +24,5 @@ requests~=2.32.3 pytz~=2024.2 python-dateutil~=2.9.0.post0 simpleeval~=1.0.0 +pydantic~=2.10.5 +PyYAML~=6.0.2 From 86dac632c4bc9edd949aef294844563fb207fa46 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 11:27:14 -0300 Subject: [PATCH 03/45] feat(import): improve schema definition --- app/apps/import_app/schemas/__init__.py | 9 +- app/apps/import_app/schemas/v1.py | 330 ++++++++++++++- app/apps/import_app/services/v1.py | 512 +++++++++++++++++++----- 3 files changed, 720 insertions(+), 131 deletions(-) diff --git a/app/apps/import_app/schemas/__init__.py b/app/apps/import_app/schemas/__init__.py index f68ce79..530268d 100644 --- a/app/apps/import_app/schemas/__init__.py +++ b/app/apps/import_app/schemas/__init__.py @@ -1,8 +1 @@ -from apps.import_app.schemas.v1 import ( - ImportProfileSchema as SchemaV1, - ColumnMapping as ColumnMappingV1, - # TransformationRule as TransformationRuleV1, - ImportSettings as SettingsV1, - HashTransformationRule as HashTransformationRuleV1, - CompareDeduplicationRule as CompareDeduplicationRuleV1, -) +import apps.import_app.schemas.v1 as version_1 diff --git a/app/apps/import_app/schemas/v1.py b/app/apps/import_app/schemas/v1.py index 1cc7dc5..043f2a9 100644 --- a/app/apps/import_app/schemas/v1.py +++ b/app/apps/import_app/schemas/v1.py @@ -1,5 +1,5 @@ from typing import Dict, List, Optional, Literal -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, model_validator, field_validator class CompareDeduplicationRule(BaseModel): @@ -9,6 +9,12 @@ class CompareDeduplicationRule(BaseModel): ) match_type: Literal["lax", "strict"] + @field_validator("fields", mode="before") + def coerce_fields_to_dict(cls, v): + if isinstance(v, list): + return {k: v for d in v for k, v in d.items()} + return v + class ReplaceTransformationRule(BaseModel): field: str @@ -17,6 +23,10 @@ class ReplaceTransformationRule(BaseModel): ) pattern: str = Field(..., description="Pattern to match") replacement: str = Field(..., description="Value to replace with") + exclusive: bool = Field( + default=False, + description="If it should match against the last transformation or the original value", + ) class DateFormatTransformationRule(BaseModel): @@ -48,7 +58,7 @@ class SplitTransformationRule(BaseModel): ) -class ImportSettings(BaseModel): +class CSVImportSettings(BaseModel): skip_errors: bool = Field( default=False, description="If True, errors during import will be logged and skipped", @@ -56,7 +66,7 @@ class ImportSettings(BaseModel): file_type: Literal["csv"] = "csv" delimiter: str = Field(default=",", description="CSV delimiter character") encoding: str = Field(default="utf-8", description="File encoding") - skip_rows: int = Field( + skip_lines: int = Field( default=0, description="Number of rows to skip at the beginning of the file" ) importing: Literal[ @@ -69,20 +79,7 @@ class ColumnMapping(BaseModel): default=None, description="CSV column header. If None, the field will be generated from transformations", ) - target: Literal[ - "account", - "type", - "is_paid", - "date", - "reference_date", - "amount", - "notes", - "category", - "tags", - "entities", - "internal_note", - ] = Field(..., description="Transaction field to map to") - default_value: Optional[str] = None + default: Optional[str] = None required: bool = False transformations: Optional[ List[ @@ -95,10 +92,305 @@ class ColumnMapping(BaseModel): ] = Field(default_factory=list) +class TransactionAccountMapping(ColumnMapping): + target: Literal["account"] = Field(..., description="Transaction field to map to") + type: Literal["id", "name"] = "name" + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class TransactionTypeMapping(ColumnMapping): + target: Literal["type"] = Field(..., description="Transaction field to map to") + detection_method: Literal["sign", "always_income", "always_expense"] = "sign" + coerce_to: Literal["transaction_type"] = Field("transaction_type", frozen=True) + + +class TransactionIsPaidMapping(ColumnMapping): + target: Literal["is_paid"] = Field(..., description="Transaction field to map to") + detection_method: Literal["sign", "boolean", "always_paid", "always_unpaid"] + coerce_to: Literal["is_paid"] = Field("is_paid", frozen=True) + + +class TransactionDateMapping(ColumnMapping): + target: Literal["date"] = Field(..., description="Transaction field to map to") + format: List[str] | str + coerce_to: Literal["date"] = Field("date", frozen=True) + + +class TransactionReferenceDateMapping(ColumnMapping): + target: Literal["reference_date"] = Field( + ..., description="Transaction field to map to" + ) + format: List[str] | str + coerce_to: Literal["date"] = Field("date", frozen=True) + + +class TransactionAmountMapping(ColumnMapping): + target: Literal["amount"] = Field(..., description="Transaction field to map to") + coerce_to: Literal["positive_decimal"] = Field("positive_decimal", frozen=True) + + +class TransactionDescriptionMapping(ColumnMapping): + target: Literal["description"] = Field( + ..., description="Transaction field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class TransactionNotesMapping(ColumnMapping): + target: Literal["notes"] = Field(..., description="Transaction field to map to") + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class TransactionTagsMapping(ColumnMapping): + target: Literal["tags"] = Field(..., description="Transaction field to map to") + create: bool = Field( + default=True, description="Create new tags if they doesn't exist" + ) + coerce_to: Literal["list"] = Field("list", frozen=True) + + +class TransactionEntitiesMapping(ColumnMapping): + target: Literal["entities"] = Field(..., description="Transaction field to map to") + create: bool = Field( + default=True, description="Create new entities if they doesn't exist" + ) + coerce_to: Literal["list"] = Field("list", frozen=True) + + +class TransactionCategoryMapping(ColumnMapping): + target: Literal["category"] = Field(..., description="Transaction field to map to") + create: bool = Field( + default=True, description="Create category if it doesn't exist" + ) + type: Literal["id", "name"] = "name" + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class TransactionInternalMapping(ColumnMapping): + target: Literal["internal_note"] = Field( + ..., description="Transaction field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CategoryNameMapping(ColumnMapping): + target: Literal["category_name"] = Field( + ..., description="Category field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CategoryMuteMapping(ColumnMapping): + target: Literal["category_mute"] = Field( + ..., description="Category field to map to" + ) + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class CategoryActiveMapping(ColumnMapping): + target: Literal["category_active"] = Field( + ..., description="Category field to map to" + ) + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class TagNameMapping(ColumnMapping): + target: Literal["tag_name"] = Field(..., description="Tag field to map to") + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class TagActiveMapping(ColumnMapping): + target: Literal["tag_active"] = Field(..., description="Tag field to map to") + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class EntityNameMapping(ColumnMapping): + target: Literal["entity_name"] = Field(..., description="Entity field to map to") + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class EntityActiveMapping(ColumnMapping): + target: Literal["entitiy_active"] = Field(..., description="Entity field to map to") + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class AccountNameMapping(ColumnMapping): + target: Literal["account_name"] = Field(..., description="Account field to map to") + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class AccountGroupMapping(ColumnMapping): + target: Literal["account_group"] = Field(..., description="Account field to map to") + type: Literal["id", "name"] + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class AccountCurrencyMapping(ColumnMapping): + target: Literal["account_currency"] = Field( + ..., description="Account field to map to" + ) + type: Literal["id", "name", "code"] + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class AccountExchangeCurrencyMapping(ColumnMapping): + target: Literal["account_exchange_currency"] = Field( + ..., description="Account field to map to" + ) + type: Literal["id", "name", "code"] + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class AccountIsAssetMapping(ColumnMapping): + target: Literal["account_is_asset"] = Field( + ..., description="Account field to map to" + ) + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class AccountIsArchivedMapping(ColumnMapping): + target: Literal["account_is_archived"] = Field( + ..., description="Account field to map to" + ) + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class CurrencyCodeMapping(ColumnMapping): + target: Literal["currency_code"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CurrencyNameMapping(ColumnMapping): + target: Literal["currency_name"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CurrencyDecimalPlacesMapping(ColumnMapping): + target: Literal["currency_decimal_places"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["int"] = Field("int", frozen=True) + + +class CurrencyPrefixMapping(ColumnMapping): + target: Literal["currency_prefix"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CurrencySuffixMapping(ColumnMapping): + target: Literal["currency_suffix"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CurrencyExchangeMapping(ColumnMapping): + target: Literal["currency_exchange"] = Field( + ..., description="Currency field to map to" + ) + type: Literal["id", "name", "code"] + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + class ImportProfileSchema(BaseModel): - settings: ImportSettings - column_mapping: Dict[str, ColumnMapping] + settings: CSVImportSettings + mapping: Dict[ + str, + TransactionAccountMapping + | TransactionTypeMapping + | TransactionIsPaidMapping + | TransactionDateMapping + | TransactionReferenceDateMapping + | TransactionAmountMapping + | TransactionDescriptionMapping + | TransactionNotesMapping + | TransactionTagsMapping + | TransactionEntitiesMapping + | TransactionCategoryMapping + | TransactionInternalMapping + | CategoryNameMapping + | CategoryMuteMapping + | CategoryActiveMapping + | TagNameMapping + | TagActiveMapping + | EntityNameMapping + | EntityActiveMapping + | AccountNameMapping + | AccountGroupMapping + | AccountCurrencyMapping + | AccountExchangeCurrencyMapping + | AccountIsAssetMapping + | AccountIsArchivedMapping + | CurrencyCodeMapping + | CurrencyNameMapping + | CurrencyDecimalPlacesMapping + | CurrencyPrefixMapping + | CurrencySuffixMapping + | CurrencyExchangeMapping, + ] deduplication: List[CompareDeduplicationRule] = Field( default_factory=list, description="Rules for deduplicating records during import", ) + + @model_validator(mode="after") + def validate_mappings(self) -> "ImportProfileSchema": + import_type = self.settings.importing + + # Define allowed mapping types for each import type + allowed_mappings = { + "transactions": ( + TransactionAccountMapping, + TransactionTypeMapping, + TransactionIsPaidMapping, + TransactionDateMapping, + TransactionReferenceDateMapping, + TransactionAmountMapping, + TransactionDescriptionMapping, + TransactionNotesMapping, + TransactionTagsMapping, + TransactionEntitiesMapping, + TransactionCategoryMapping, + TransactionInternalMapping, + ), + "accounts": ( + AccountNameMapping, + AccountGroupMapping, + AccountCurrencyMapping, + AccountExchangeCurrencyMapping, + AccountIsAssetMapping, + AccountIsArchivedMapping, + ), + "currencies": ( + CurrencyCodeMapping, + CurrencyNameMapping, + CurrencyDecimalPlacesMapping, + CurrencyPrefixMapping, + CurrencySuffixMapping, + CurrencyExchangeMapping, + ), + "categories": ( + CategoryNameMapping, + CategoryMuteMapping, + CategoryActiveMapping, + ), + "tags": (TagNameMapping, TagActiveMapping), + "entities": (EntityNameMapping, EntityActiveMapping), + } + + allowed_types = allowed_mappings[import_type] + + for field_name, mapping in self.mapping.items(): + if not isinstance(mapping, allowed_types): + raise ValueError( + f"Mapping type '{type(mapping).__name__}' is not allowed when importing {import_type}. " + f"Allowed types are: {', '.join(t.__name__ for t in allowed_types)}" + ) + + return self diff --git a/app/apps/import_app/services/v1.py b/app/apps/import_app/services/v1.py index 333eb6e..069115b 100644 --- a/app/apps/import_app/services/v1.py +++ b/app/apps/import_app/services/v1.py @@ -1,42 +1,56 @@ import csv import hashlib +import logging +import os import re from datetime import datetime -from typing import Dict, Any, Literal +from decimal import Decimal +from typing import Dict, Any, Literal, Union import yaml - from django.db import transaction -from django.core.files.storage import default_storage from django.utils import timezone +from apps.accounts.models import Account, AccountGroup +from apps.currencies.models import Currency from apps.import_app.models import ImportRun, ImportProfile -from apps.import_app.schemas import ( - SchemaV1, - ColumnMappingV1, - SettingsV1, - HashTransformationRuleV1, - CompareDeduplicationRuleV1, +from apps.import_app.schemas import version_1 +from apps.transactions.models import ( + Transaction, + TransactionCategory, + TransactionTag, + TransactionEntity, ) -from apps.transactions.models import Transaction + +logger = logging.getLogger(__name__) class ImportService: + TEMP_DIR = "/usr/src/app/temp" + def __init__(self, import_run: ImportRun): self.import_run: ImportRun = import_run self.profile: ImportProfile = import_run.profile - self.config: SchemaV1 = self._load_config() - self.settings: SettingsV1 = self.config.settings - self.deduplication: list[CompareDeduplicationRuleV1] = self.config.deduplication - self.mapping: Dict[str, ColumnMappingV1] = self.config.column_mapping + self.config: version_1.ImportProfileSchema = self._load_config() + self.settings: version_1.CSVImportSettings = self.config.settings + self.deduplication: list[version_1.CompareDeduplicationRule] = ( + self.config.deduplication + ) + self.mapping: Dict[str, version_1.ColumnMapping] = self.config.mapping - def _load_config(self) -> SchemaV1: - yaml_data = yaml.safe_load(self.profile.yaml_config) - - if self.profile.version == ImportProfile.Versions.VERSION_1: - return SchemaV1(**yaml_data) + # Ensure temp directory exists + os.makedirs(self.TEMP_DIR, exist_ok=True) - raise ValueError(f"Unsupported version: {self.profile.version}") + def _load_config(self) -> version_1.ImportProfileSchema: + yaml_data = yaml.safe_load(self.profile.yaml_config) + try: + config = version_1.ImportProfileSchema(**yaml_data) + except Exception as e: + self._log("error", f"Fatal error processing YAML config: {str(e)}") + self._update_status("FAILED") + raise e + else: + return config def _log(self, level: str, message: str, **kwargs) -> None: """Add a log entry to the import run logs""" @@ -53,6 +67,48 @@ def _log(self, level: str, message: str, **kwargs) -> None: self.import_run.logs += log_line self.import_run.save(update_fields=["logs"]) + def _update_totals( + self, + field: Literal["total", "processed", "successful", "skipped", "failed"], + value: int, + ) -> None: + if field == "total": + self.import_run.total_rows = value + self.import_run.save(update_fields=["total_rows"]) + elif field == "processed": + self.import_run.processed_rows = value + self.import_run.save(update_fields=["processed_rows"]) + elif field == "successful": + self.import_run.successful_rows = value + self.import_run.save(update_fields=["successful_rows"]) + elif field == "skipped": + self.import_run.skipped_rows = value + self.import_run.save(update_fields=["skipped_rows"]) + elif field == "failed": + self.import_run.failed_rows = value + self.import_run.save(update_fields=["failed_rows"]) + + def _increment_totals( + self, + field: Literal["total", "processed", "successful", "skipped", "failed"], + value: int, + ) -> None: + if field == "total": + self.import_run.total_rows = self.import_run.total_rows + value + self.import_run.save(update_fields=["total_rows"]) + elif field == "processed": + self.import_run.processed_rows = self.import_run.processed_rows + value + self.import_run.save(update_fields=["processed_rows"]) + elif field == "successful": + self.import_run.successful_rows = self.import_run.successful_rows + value + self.import_run.save(update_fields=["successful_rows"]) + elif field == "skipped": + self.import_run.skipped_rows = self.import_run.skipped_rows + value + self.import_run.save(update_fields=["skipped_rows"]) + elif field == "failed": + self.import_run.failed_rows = self.import_run.failed_rows + value + self.import_run.save(update_fields=["failed_rows"]) + def _update_status( self, new_status: Literal["PROCESSING", "FAILED", "FINISHED"] ) -> None: @@ -67,15 +123,12 @@ def _update_status( @staticmethod def _transform_value( - value: str, mapping: ColumnMappingV1, row: Dict[str, str] = None + value: str, mapping: version_1.ColumnMapping, row: Dict[str, str] = None ) -> Any: transformed = value for transform in mapping.transformations: if transform.type == "hash": - if not isinstance(transform, HashTransformationRuleV1): - continue - # Collect all values to be hashed values_to_hash = [] for field in transform.fields: @@ -88,47 +141,143 @@ def _transform_value( transformed = hashlib.sha256(concatenated.encode()).hexdigest() elif transform.type == "replace": - transformed = transformed.replace( - transform.pattern, transform.replacement - ) + if transform.exclusive: + transformed = value.replace( + transform.pattern, transform.replacement + ) + else: + transformed = transformed.replace( + transform.pattern, transform.replacement + ) elif transform.type == "regex": - transformed = re.sub( - transform.pattern, transform.replacement, transformed - ) + if transform.exclusive: + transformed = re.sub( + transform.pattern, transform.replacement, value + ) + else: + transformed = re.sub( + transform.pattern, transform.replacement, transformed + ) elif transform.type == "date_format": transformed = datetime.strptime( - transformed, transform.pattern - ).strftime(transform.replacement) + transformed, transform.original_format + ).strftime(transform.new_format) + elif transform.type == "merge": + values_to_merge = [] + for field in transform.fields: + if field in row: + values_to_merge.append(str(row[field])) + transformed = transform.separator.join(values_to_merge) + elif transform.type == "split": + parts = transformed.split(transform.separator) + if transform.index is not None: + transformed = parts[transform.index] if parts else "" + else: + transformed = parts return transformed - def _map_row_to_transaction(self, row: Dict[str, str]) -> Dict[str, Any]: - transaction_data = {} - - for field, mapping in self.mapping.items(): - # If source is None, use None as the initial value - value = row.get(mapping.source) if mapping.source else None - - # Use default_value if value is None - if value is None: - value = mapping.default_value - - if mapping.required and value is None and not mapping.transformations: - raise ValueError(f"Required field {field} is missing") - - # Apply transformations even if initial value is None - if mapping.transformations: - value = self._transform_value(value, mapping, row) - - if value is not None: - transaction_data[field] = value - - return transaction_data + def _create_transaction(self, data: Dict[str, Any]) -> Transaction: + tags = [] + entities = [] + # Handle related objects first + if "category" in data: + category_name = data.pop("category") + category, _ = TransactionCategory.objects.get_or_create(name=category_name) + data["category"] = category + self.import_run.categories.add(category) + + if "account" in data: + account_id = data.pop("account") + account = None + if isinstance(account_id, str): + account = Account.objects.get(name=account_id) + elif isinstance(account_id, int): + account = Account.objects.get(id=account_id) + data["account"] = account + # self.import_run.acc.add(category) + + if "tags" in data: + tag_names = data.pop("tags").split(",") + for tag_name in tag_names: + tag, _ = TransactionTag.objects.get_or_create(name=tag_name.strip()) + tags.append(tag) + self.import_run.tags.add(tag) + + if "entities" in data: + entity_names = data.pop("entities").split(",") + for entity_name in entity_names: + entity, _ = TransactionEntity.objects.get_or_create( + name=entity_name.strip() + ) + entities.append(entity) + self.import_run.entities.add(entity) + + if "amount" in data: + amount = data.pop("amount") + data["amount"] = abs(Decimal(amount)) + + # Create the transaction + new_transaction = Transaction.objects.create(**data) + self.import_run.transactions.add(new_transaction) + + # Add many-to-many relationships + if tags: + new_transaction.tags.set(tags) + if entities: + new_transaction.entities.set(entities) + + return new_transaction + + def _create_account(self, data: Dict[str, Any]) -> Account: + if "group" in data: + group_name = data.pop("group") + group, _ = AccountGroup.objects.get_or_create(name=group_name) + data["group"] = group + + # Handle currency references + if "currency" in data: + currency = Currency.objects.get(code=data["currency"]) + data["currency"] = currency + self.import_run.currencies.add(currency) + + if "exchange_currency" in data: + exchange_currency = Currency.objects.get(code=data["exchange_currency"]) + data["exchange_currency"] = exchange_currency + self.import_run.currencies.add(exchange_currency) + + return Account.objects.create(**data) + + def _create_currency(self, data: Dict[str, Any]) -> Currency: + # Handle exchange currency reference + if "exchange_currency" in data: + exchange_currency = Currency.objects.get(code=data["exchange_currency"]) + data["exchange_currency"] = exchange_currency + self.import_run.currencies.add(exchange_currency) + + currency = Currency.objects.create(**data) + self.import_run.currencies.add(currency) + return currency + + def _create_category(self, data: Dict[str, Any]) -> TransactionCategory: + category = TransactionCategory.objects.create(**data) + self.import_run.categories.add(category) + return category + + def _create_tag(self, data: Dict[str, Any]) -> TransactionTag: + tag = TransactionTag.objects.create(**data) + self.import_run.tags.add(tag) + return tag + + def _create_entity(self, data: Dict[str, Any]) -> TransactionEntity: + entity = TransactionEntity.objects.create(**data) + self.import_run.entities.add(entity) + return entity def _check_duplicate_transaction(self, transaction_data: Dict[str, Any]) -> bool: for rule in self.deduplication: if rule.type == "compare": - query = Transaction.objects.all() + query = Transaction.objects.all().values("id") # Build query conditions for each field in the rule for field, header in rule.fields.items(): @@ -146,65 +295,214 @@ def _check_duplicate_transaction(self, transaction_data: Dict[str, Any]) -> bool return False + def _coerce_type( + self, value: str, mapping: version_1.ColumnMapping + ) -> Union[str, int, bool, Decimal, datetime, list]: + if not value: + return None + + coerce_to = mapping.coerce_to + + if "|" in coerce_to: + types = coerce_to.split("|") + for t in types: + try: + return self._coerce_single_type(value, t, mapping) + except ValueError: + continue + raise ValueError( + f"Could not coerce '{value}' to any of the types: {coerce_to}" + ) + else: + return self._coerce_single_type(value, coerce_to, mapping) + + def _coerce_single_type( + self, value: str, coerce_to: str, mapping: version_1.ColumnMapping + ) -> Union[str, int, bool, Decimal, datetime.date, list]: + if coerce_to == "str": + return str(value) + elif coerce_to == "int": + if hasattr(mapping, "type") and mapping.type == "id": + return int(value) + elif hasattr(mapping, "type") and mapping.type in ["name", "code"]: + return str(value) + else: + return int(value) + elif coerce_to == "bool": + return value.lower() in ["true", "1", "yes", "y", "on"] + elif coerce_to == "positive_decimal": + return abs(Decimal(value)) + elif coerce_to == "date": + if isinstance( + mapping, + ( + version_1.TransactionDateMapping, + version_1.TransactionReferenceDateMapping, + ), + ): + formats = ( + mapping.format + if isinstance(mapping.format, list) + else [mapping.format] + ) + for fmt in formats: + try: + return datetime.strptime(value, fmt).date() + except ValueError: + continue + raise ValueError( + f"Could not parse date '{value}' with any of the provided formats" + ) + else: + raise ValueError( + "Date coercion is only supported for TransactionDateMapping and TransactionReferenceDateMapping" + ) + elif coerce_to == "list": + return ( + value + if isinstance(value, list) + else [item.strip() for item in value.split(",") if item.strip()] + ) + elif coerce_to == "transaction_type": + if isinstance(mapping, version_1.TransactionTypeMapping): + if mapping.detection_method == "sign": + return ( + Transaction.Type.EXPENSE + if value.startswith("-") + else Transaction.Type.INCOME + ) + elif mapping.detection_method == "always_income": + return Transaction.Type.INCOME + elif mapping.detection_method == "always_expense": + return Transaction.Type.EXPENSE + raise ValueError("Invalid transaction type detection method") + elif coerce_to == "is_paid": + if isinstance(mapping, version_1.TransactionIsPaidMapping): + if mapping.detection_method == "sign": + return not value.startswith("-") + elif mapping.detection_method == "boolean": + return value.lower() in ["true", "1", "yes", "y", "on"] + elif mapping.detection_method == "always_paid": + return True + elif mapping.detection_method == "always_unpaid": + return False + raise ValueError("Invalid is_paid detection method") + else: + raise ValueError(f"Unsupported coercion type: {coerce_to}") + + def _map_row(self, row: Dict[str, str]) -> Dict[str, Any]: + mapped_data = {} + + for field, mapping in self.mapping.items(): + # If source is None, use None as the initial value + value = row.get(mapping.source) if mapping.source else None + + # Use default_value if value is None + if value is None: + value = mapping.default + + if mapping.required and value is None and not mapping.transformations: + raise ValueError(f"Required field {field} is missing") + + # Apply transformations + if mapping.transformations: + value = self._transform_value(value, mapping, row) + + value = self._coerce_type(value, mapping) + + if value is not None: + # Remove the prefix from the target field + target = mapping.target + if self.settings.importing == "transactions": + mapped_data[target] = value + else: + # Remove the model prefix (e.g., "account_" from "account_name") + field_name = target.split("_", 1)[1] + mapped_data[field_name] = value + + return mapped_data + + def _process_row(self, row: Dict[str, str], row_number: int) -> None: + try: + mapped_data = self._map_row(row) + + if mapped_data: + # Handle different import types + if self.settings.importing == "transactions": + if self.deduplication and self._check_duplicate_transaction( + mapped_data + ): + self._increment_totals("skipped", 1) + self._log("info", f"Skipped duplicate row {row_number}") + return + self._create_transaction(mapped_data) + elif self.settings.importing == "accounts": + self._create_account(mapped_data) + elif self.settings.importing == "currencies": + self._create_currency(mapped_data) + elif self.settings.importing == "categories": + self._create_category(mapped_data) + elif self.settings.importing == "tags": + self._create_tag(mapped_data) + elif self.settings.importing == "entities": + self._create_entity(mapped_data) + + self._increment_totals("successful", value=1) + self._log("info", f"Successfully processed row {row_number}") + + self._increment_totals("processed", value=1) + + except Exception as e: + if not self.settings.skip_errors: + self._log("error", f"Fatal error processing row {row_number}: {str(e)}") + self._update_status("FAILED") + raise + else: + self._log("warning", f"Error processing row {row_number}: {str(e)}") + self._increment_totals("failed", value=1) + + logger.error(f"Fatal error processing row {row_number}", exc_info=e) + def _process_csv(self, file_path): + # First pass: count rows with open(file_path, "r", encoding=self.settings.encoding) as csv_file: - reader = csv.DictReader(csv_file, delimiter=self.settings.delimiter) + # Skip specified number of rows + for _ in range(self.settings.skip_lines): + next(csv_file) - # Count total rows - self.import_run.total_rows = sum(1 for _ in reader) - csv_file.seek(0) reader = csv.DictReader(csv_file, delimiter=self.settings.delimiter) + self._update_totals("total", value=sum(1 for _ in reader)) - self._log("info", f"Starting import with {self.import_run.total_rows} rows") - + with open(file_path, "r", encoding=self.settings.encoding) as csv_file: # Skip specified number of rows - for _ in range(self.settings.skip_rows): - next(reader) + for _ in range(self.settings.skip_lines): + next(csv_file) + if self.settings.skip_lines: + self._log("info", f"Skipped {self.settings.skip_lines} initial lines") - if self.settings.skip_rows: - self._log("info", f"Skipped {self.settings.skip_rows} initial rows") + reader = csv.DictReader(csv_file, delimiter=self.settings.delimiter) - for row_number, row in enumerate(reader, start=1): - try: - transaction_data = self._map_row_to_transaction(row) - - if transaction_data: - if self.deduplication and self._check_duplicate_transaction( - transaction_data - ): - self.import_run.skipped_rows += 1 - self._log("info", f"Skipped duplicate row {row_number}") - continue - - self.import_run.transactions.add(transaction_data) - self.import_run.successful_rows += 1 - self._log("debug", f"Successfully processed row {row_number}") - - self.import_run.processed_rows += 1 - self.import_run.save( - update_fields=[ - "processed_rows", - "successful_rows", - "skipped_rows", - ] - ) + self._log("info", f"Starting import with {self.import_run.total_rows} rows") - except Exception as e: - if not self.settings.skip_errors: - self._log( - "error", - f"Fatal error processing row {row_number}: {str(e)}", - ) - self._update_status("FAILED") - raise - else: - self._log( - "warning", f"Error processing row {row_number}: {str(e)}" - ) - self.import_run.failed_rows += 1 - self.import_run.save(update_fields=["failed_rows"]) + with transaction.atomic(): + for row_number, row in enumerate(reader, start=1): + self._process_row(row, row_number) + self._increment_totals("processed", value=1) + + def _validate_file_path(self, file_path: str) -> str: + """ + Validates that the file path is within the allowed temporary directory. + Returns the absolute path. + """ + abs_path = os.path.abspath(file_path) + if not abs_path.startswith(self.TEMP_DIR): + raise ValueError(f"Invalid file path. File must be in {self.TEMP_DIR}") + return abs_path def process_file(self, file_path: str): + # Validate and get absolute path + file_path = self._validate_file_path(file_path) + self._update_status("PROCESSING") self.import_run.started_at = timezone.now() self.import_run.save(update_fields=["started_at"]) @@ -232,6 +530,12 @@ def process_file(self, file_path: str): finally: self._log("info", "Cleaning up temporary files") - default_storage.delete(file_path) + try: + if os.path.exists(file_path): + os.remove(file_path) + self._log("info", f"Deleted temporary file: {file_path}") + except OSError as e: + self._log("warning", f"Failed to delete temporary file: {str(e)}") + self.import_run.finished_at = timezone.now() self.import_run.save(update_fields=["finished_at"]) From a94e0b4904fbd2d6f8cbc54a6939a816469783e5 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 11:45:06 -0300 Subject: [PATCH 04/45] docs(requirements): add django_ace --- app/WYGIWYH/settings.py | 1 + requirements.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/app/WYGIWYH/settings.py b/app/WYGIWYH/settings.py index 8243c91..e219d6f 100644 --- a/app/WYGIWYH/settings.py +++ b/app/WYGIWYH/settings.py @@ -70,6 +70,7 @@ "rest_framework", "drf_spectacular", "django_cotton", + "django_ace", "apps.rules.apps.RulesConfig", "apps.calendar_view.apps.CalendarViewConfig", "apps.dca.apps.DcaConfig", diff --git a/requirements.txt b/requirements.txt index af9d39b..8c24038 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,6 +9,7 @@ django-filter==24.3 django-debug-toolbar==4.3.0 django-cachalot~=2.6.3 django-cotton~=1.2.1 +django_ace~=1.36.2 djangorestframework~=3.15.2 drf-spectacular~=0.27.2 From 238f205513344f50f5d7568a810597a91d4f6922 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 11:47:33 -0300 Subject: [PATCH 05/45] docker: add temp volume --- docker-compose.dev.yml | 3 ++- docker-compose.prod.yml | 7 +++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index c06c0fd..133d522 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -1,6 +1,6 @@ volumes: wygiwyh_dev_postgres_data: {} - temp: + wygiwyh_temp: services: web: &django @@ -13,6 +13,7 @@ services: volumes: - ./app/:/usr/src/app/:z - ./frontend/:/usr/src/frontend:z + - wygiwyh_temp:/usr/src/app/temp/ ports: - "${OUTBOUND_PORT}:8000" env_file: diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml index a12b4ed..b840e46 100644 --- a/docker-compose.prod.yml +++ b/docker-compose.prod.yml @@ -9,6 +9,8 @@ services: - .env depends_on: - db + volumes: + - wygiwyh_temp:/usr/src/app/temp/ restart: unless-stopped db: @@ -29,5 +31,10 @@ services: - db env_file: - .env + volumes: + - wygiwyh_temp:/usr/src/app/temp/ command: /start-procrastinate restart: unless-stopped + +volumes: + wygiwyh_temp: From 3ccb0e19eb3070a47b760e2a297a666d9558178f Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 13:55:17 -0300 Subject: [PATCH 06/45] feat(transactions): soft delete --- app/apps/transactions/admin.py | 18 +++++ .../0028_transaction_internal_note.py | 18 +++++ .../0029_alter_transaction_options.py | 17 ++++ ...nsaction_deleted_transaction_deleted_at.py | 23 ++++++ .../0031_alter_transaction_deleted.py | 18 +++++ ...ction_created_at_transaction_updated_at.py | 25 ++++++ app/apps/transactions/models.py | 77 +++++++++++++++++++ 7 files changed, 196 insertions(+) create mode 100644 app/apps/transactions/migrations/0028_transaction_internal_note.py create mode 100644 app/apps/transactions/migrations/0029_alter_transaction_options.py create mode 100644 app/apps/transactions/migrations/0030_transaction_deleted_transaction_deleted_at.py create mode 100644 app/apps/transactions/migrations/0031_alter_transaction_deleted.py create mode 100644 app/apps/transactions/migrations/0032_transaction_created_at_transaction_updated_at.py diff --git a/app/apps/transactions/admin.py b/app/apps/transactions/admin.py index 5a4ef15..df4d1c8 100644 --- a/app/apps/transactions/admin.py +++ b/app/apps/transactions/admin.py @@ -12,7 +12,14 @@ @admin.register(Transaction) class TransactionModelAdmin(admin.ModelAdmin): + def get_queryset(self, request): + # Use the all_objects manager to show all transactions, including deleted ones + return self.model.all_objects.all() + + list_filter = ["deleted", "type", "is_paid", "date", "account"] + list_display = [ + "deleted", "description", "type", "account__name", @@ -22,6 +29,17 @@ class TransactionModelAdmin(admin.ModelAdmin): "reference_date", ] + actions = ["hard_delete_selected"] + + def hard_delete_selected(self, request, queryset): + for obj in queryset: + obj.hard_delete() + self.message_user( + request, f"Successfully hard deleted {queryset.count()} transactions." + ) + + hard_delete_selected.short_description = "Hard delete selected transactions" + class TransactionInline(admin.TabularInline): model = Transaction diff --git a/app/apps/transactions/migrations/0028_transaction_internal_note.py b/app/apps/transactions/migrations/0028_transaction_internal_note.py new file mode 100644 index 0000000..c88c11d --- /dev/null +++ b/app/apps/transactions/migrations/0028_transaction_internal_note.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.5 on 2025-01-19 00:44 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0027_alter_transaction_description'), + ] + + operations = [ + migrations.AddField( + model_name='transaction', + name='internal_note', + field=models.TextField(blank=True, verbose_name='Internal Note'), + ), + ] diff --git a/app/apps/transactions/migrations/0029_alter_transaction_options.py b/app/apps/transactions/migrations/0029_alter_transaction_options.py new file mode 100644 index 0000000..c06b7cd --- /dev/null +++ b/app/apps/transactions/migrations/0029_alter_transaction_options.py @@ -0,0 +1,17 @@ +# Generated by Django 5.1.5 on 2025-01-19 14:59 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0028_transaction_internal_note'), + ] + + operations = [ + migrations.AlterModelOptions( + name='transaction', + options={'default_manager_name': 'objects', 'verbose_name': 'Transaction', 'verbose_name_plural': 'Transactions'}, + ), + ] diff --git a/app/apps/transactions/migrations/0030_transaction_deleted_transaction_deleted_at.py b/app/apps/transactions/migrations/0030_transaction_deleted_transaction_deleted_at.py new file mode 100644 index 0000000..35f4c91 --- /dev/null +++ b/app/apps/transactions/migrations/0030_transaction_deleted_transaction_deleted_at.py @@ -0,0 +1,23 @@ +# Generated by Django 5.1.5 on 2025-01-19 14:59 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0029_alter_transaction_options'), + ] + + operations = [ + migrations.AddField( + model_name='transaction', + name='deleted', + field=models.BooleanField(default=False, verbose_name='Deleted'), + ), + migrations.AddField( + model_name='transaction', + name='deleted_at', + field=models.DateTimeField(blank=True, null=True, verbose_name='Deleted At'), + ), + ] diff --git a/app/apps/transactions/migrations/0031_alter_transaction_deleted.py b/app/apps/transactions/migrations/0031_alter_transaction_deleted.py new file mode 100644 index 0000000..b5d2dc4 --- /dev/null +++ b/app/apps/transactions/migrations/0031_alter_transaction_deleted.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.5 on 2025-01-19 15:14 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0030_transaction_deleted_transaction_deleted_at'), + ] + + operations = [ + migrations.AlterField( + model_name='transaction', + name='deleted', + field=models.BooleanField(db_index=True, default=False, verbose_name='Deleted'), + ), + ] diff --git a/app/apps/transactions/migrations/0032_transaction_created_at_transaction_updated_at.py b/app/apps/transactions/migrations/0032_transaction_created_at_transaction_updated_at.py new file mode 100644 index 0000000..46e76ae --- /dev/null +++ b/app/apps/transactions/migrations/0032_transaction_created_at_transaction_updated_at.py @@ -0,0 +1,25 @@ +# Generated by Django 5.1.5 on 2025-01-19 16:48 + +import django.utils.timezone +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0031_alter_transaction_deleted'), + ] + + operations = [ + migrations.AddField( + model_name='transaction', + name='created_at', + field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), + preserve_default=False, + ), + migrations.AddField( + model_name='transaction', + name='updated_at', + field=models.DateTimeField(auto_now=True), + ), + ] diff --git a/app/apps/transactions/models.py b/app/apps/transactions/models.py index f131518..2bd2a68 100644 --- a/app/apps/transactions/models.py +++ b/app/apps/transactions/models.py @@ -6,6 +6,7 @@ from django.db.models import Q from django.utils import timezone from django.utils.translation import gettext_lazy as _ +from django.conf import settings from apps.common.fields.month_year import MonthYearModelField from apps.common.functions.decimals import truncate_decimal @@ -15,6 +16,53 @@ logger = logging.getLogger() +class SoftDeleteQuerySet(models.QuerySet): + def delete(self): + if not settings.ENABLE_SOFT_DELETION: + # If soft deletion is disabled, perform a normal delete + return super().delete() + + # Separate the queryset into already deleted and not deleted objects + already_deleted = self.filter(deleted=True) + not_deleted = self.filter(deleted=False) + + # Use a transaction to ensure atomicity + with transaction.atomic(): + # Perform hard delete on already deleted objects + hard_deleted_count = already_deleted._raw_delete(already_deleted.db) + + # Perform soft delete on not deleted objects + soft_deleted_count = not_deleted.update( + deleted=True, deleted_at=timezone.now() + ) + + # Return a tuple of counts as expected by Django's delete method + return ( + hard_deleted_count + soft_deleted_count, + {"Transaction": hard_deleted_count + soft_deleted_count}, + ) + + def hard_delete(self): + return super().delete() + + +class SoftDeleteManager(models.Manager): + def get_queryset(self): + qs = SoftDeleteQuerySet(self.model, using=self._db) + return qs if not settings.ENABLE_SOFT_DELETION else qs.filter(deleted=False) + + +class AllObjectsManager(models.Manager): + def get_queryset(self): + return SoftDeleteQuerySet(self.model, using=self._db) + + +class DeletedObjectsManager(models.Manager): + def get_queryset(self): + qs = SoftDeleteQuerySet(self.model, using=self._db) + return qs if not settings.ENABLE_SOFT_DELETION else qs.filter(deleted=True) + + class TransactionCategory(models.Model): name = models.CharField(max_length=255, verbose_name=_("Name"), unique=True) mute = models.BooleanField(default=False, verbose_name=_("Mute")) @@ -143,10 +191,24 @@ class Type(models.TextChoices): ) internal_note = models.TextField(blank=True, verbose_name=_("Internal Note")) + deleted = models.BooleanField( + default=False, verbose_name=_("Deleted"), db_index=True + ) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + deleted_at = models.DateTimeField( + null=True, blank=True, verbose_name=_("Deleted At") + ) + + objects = SoftDeleteManager.from_queryset(SoftDeleteQuerySet)() + all_objects = AllObjectsManager.from_queryset(SoftDeleteQuerySet)() + deleted_objects = DeletedObjectsManager.from_queryset(SoftDeleteQuerySet)() + class Meta: verbose_name = _("Transaction") verbose_name_plural = _("Transactions") db_table = "transactions" + default_manager_name = "objects" def save(self, *args, **kwargs): self.amount = truncate_decimal( @@ -161,6 +223,17 @@ def save(self, *args, **kwargs): self.full_clean() super().save(*args, **kwargs) + def delete(self, *args, **kwargs): + if settings.ENABLE_SOFT_DELETION: + self.deleted = True + self.deleted_at = timezone.now() + self.save() + else: + super().delete(*args, **kwargs) + + def hard_delete(self, *args, **kwargs): + super().delete(*args, **kwargs) + def exchanged_amount(self): if self.account.exchange_currency: converted_amount, prefix, suffix, decimal_places = convert( @@ -179,6 +252,10 @@ def exchanged_amount(self): return None + def __str__(self): + type_display = self.get_type_display() + return f"{self.description} - {type_display} - {self.account} - {self.date}" + class InstallmentPlan(models.Model): class Recurrence(models.TextChoices): From f96d8d286298902791263d15973b5699c261596d Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 13:55:25 -0300 Subject: [PATCH 07/45] feat(transactions): soft delete --- app/WYGIWYH/settings.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app/WYGIWYH/settings.py b/app/WYGIWYH/settings.py index e219d6f..155408d 100644 --- a/app/WYGIWYH/settings.py +++ b/app/WYGIWYH/settings.py @@ -336,3 +336,5 @@ } CACHALOT_UNCACHABLE_TABLES = ("django_migrations", "procrastinate_jobs") + +ENABLE_SOFT_DELETION = os.environ.get("ENABLE_SOFT_DELETION", "False").lower() == "true" From 2d8864773ce13d6cc18abd3a73e89a88c9fe2d03 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 13:56:13 -0300 Subject: [PATCH 08/45] feat(import): disable cache when running --- app/apps/import_app/services.py | 0 app/apps/import_app/services/v1.py | 84 ++++++++++++++++-------------- 2 files changed, 44 insertions(+), 40 deletions(-) delete mode 100644 app/apps/import_app/services.py diff --git a/app/apps/import_app/services.py b/app/apps/import_app/services.py deleted file mode 100644 index e69de29..0000000 diff --git a/app/apps/import_app/services/v1.py b/app/apps/import_app/services/v1.py index 069115b..7735342 100644 --- a/app/apps/import_app/services/v1.py +++ b/app/apps/import_app/services/v1.py @@ -7,8 +7,9 @@ from decimal import Decimal from typing import Dict, Any, Literal, Union +import cachalot.api import yaml -from django.db import transaction +from cachalot.api import cachalot_disabled from django.utils import timezone from apps.accounts.models import Account, AccountGroup @@ -277,7 +278,7 @@ def _create_entity(self, data: Dict[str, Any]) -> TransactionEntity: def _check_duplicate_transaction(self, transaction_data: Dict[str, Any]) -> bool: for rule in self.deduplication: if rule.type == "compare": - query = Transaction.objects.all().values("id") + query = Transaction.all_objects.all().values("id") # Build query conditions for each field in the rule for field, header in rule.fields.items(): @@ -484,10 +485,9 @@ def _process_csv(self, file_path): self._log("info", f"Starting import with {self.import_run.total_rows} rows") - with transaction.atomic(): - for row_number, row in enumerate(reader, start=1): - self._process_row(row, row_number) - self._increment_totals("processed", value=1) + for row_number, row in enumerate(reader, start=1): + self._process_row(row, row_number) + self._increment_totals("processed", value=1) def _validate_file_path(self, file_path: str) -> str: """ @@ -500,42 +500,46 @@ def _validate_file_path(self, file_path: str) -> str: return abs_path def process_file(self, file_path: str): - # Validate and get absolute path - file_path = self._validate_file_path(file_path) + with cachalot_disabled(): + # Validate and get absolute path + file_path = self._validate_file_path(file_path) - self._update_status("PROCESSING") - self.import_run.started_at = timezone.now() - self.import_run.save(update_fields=["started_at"]) + self._update_status("PROCESSING") + self.import_run.started_at = timezone.now() + self.import_run.save(update_fields=["started_at"]) - self._log("info", "Starting import process") + self._log("info", "Starting import process") - try: - if self.settings.file_type == "csv": - self._process_csv(file_path) - - if self.import_run.processed_rows == self.import_run.total_rows: - self._update_status("FINISHED") - self._log( - "info", - f"Import completed successfully. " - f"Successful: {self.import_run.successful_rows}, " - f"Failed: {self.import_run.failed_rows}, " - f"Skipped: {self.import_run.skipped_rows}", - ) + try: + if self.settings.file_type == "csv": + self._process_csv(file_path) + + if self.import_run.processed_rows == self.import_run.total_rows: + self._update_status("FINISHED") + self._log( + "info", + f"Import completed successfully. " + f"Successful: {self.import_run.successful_rows}, " + f"Failed: {self.import_run.failed_rows}, " + f"Skipped: {self.import_run.skipped_rows}", + ) - except Exception as e: - self._update_status("FAILED") - self._log("error", f"Import failed: {str(e)}") - raise Exception("Import failed") + except Exception as e: + self._update_status("FAILED") + self._log("error", f"Import failed: {str(e)}") + raise Exception("Import failed") - finally: - self._log("info", "Cleaning up temporary files") - try: - if os.path.exists(file_path): - os.remove(file_path) - self._log("info", f"Deleted temporary file: {file_path}") - except OSError as e: - self._log("warning", f"Failed to delete temporary file: {str(e)}") - - self.import_run.finished_at = timezone.now() - self.import_run.save(update_fields=["finished_at"]) + finally: + self._log("info", "Cleaning up temporary files") + try: + if os.path.exists(file_path): + os.remove(file_path) + self._log("info", f"Deleted temporary file: {file_path}") + except OSError as e: + self._log("warning", f"Failed to delete temporary file: {str(e)}") + + self.import_run.finished_at = timezone.now() + self.import_run.save(update_fields=["finished_at"]) + + if self.import_run.successful_rows >= 1: + cachalot.api.invalidate() From ba0c54767c405806f93f1a0eb7526948529435cd Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 13:56:29 -0300 Subject: [PATCH 09/45] feat(import): add migrations --- .../import_app/migrations/0001_initial.py | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 app/apps/import_app/migrations/0001_initial.py diff --git a/app/apps/import_app/migrations/0001_initial.py b/app/apps/import_app/migrations/0001_initial.py new file mode 100644 index 0000000..bcce0fe --- /dev/null +++ b/app/apps/import_app/migrations/0001_initial.py @@ -0,0 +1,51 @@ +# Generated by Django 5.1.5 on 2025-01-19 00:44 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('currencies', '0006_currency_exchange_currency'), + ('transactions', '0028_transaction_internal_note'), + ] + + operations = [ + migrations.CreateModel( + name='ImportProfile', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=100)), + ('yaml_config', models.TextField(help_text='YAML configuration')), + ('version', models.IntegerField(choices=[(1, 'Version 1')], default=1, verbose_name='Version')), + ], + options={ + 'ordering': ['name'], + }, + ), + migrations.CreateModel( + name='ImportRun', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('status', models.CharField(choices=[('QUEUED', 'Queued'), ('PROCESSING', 'Processing'), ('FAILED', 'Failed'), ('FINISHED', 'Finished')], default='QUEUED', max_length=10, verbose_name='Status')), + ('file_name', models.CharField(help_text='File name', max_length=10000)), + ('logs', models.TextField(blank=True)), + ('processed_rows', models.IntegerField(default=0)), + ('total_rows', models.IntegerField(default=0)), + ('successful_rows', models.IntegerField(default=0)), + ('skipped_rows', models.IntegerField(default=0)), + ('failed_rows', models.IntegerField(default=0)), + ('started_at', models.DateTimeField(null=True)), + ('finished_at', models.DateTimeField(null=True)), + ('categories', models.ManyToManyField(related_name='import_runs', to='transactions.transactioncategory')), + ('currencies', models.ManyToManyField(related_name='import_runs', to='currencies.currency')), + ('entities', models.ManyToManyField(related_name='import_runs', to='transactions.transactionentity')), + ('profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='import_app.importprofile')), + ('tags', models.ManyToManyField(related_name='import_runs', to='transactions.transactiontag')), + ('transactions', models.ManyToManyField(related_name='import_runs', to='transactions.transaction')), + ], + ), + ] From 3ef6b0ac5ce1702394ad23eddde17cf63e0d7129 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 15:16:47 -0300 Subject: [PATCH 10/45] feat(settings): add KEEP_DELETED_TRANSACTIONS_FOR variable --- app/WYGIWYH/settings.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/WYGIWYH/settings.py b/app/WYGIWYH/settings.py index 155408d..83950f2 100644 --- a/app/WYGIWYH/settings.py +++ b/app/WYGIWYH/settings.py @@ -337,4 +337,5 @@ CACHALOT_UNCACHABLE_TABLES = ("django_migrations", "procrastinate_jobs") -ENABLE_SOFT_DELETION = os.environ.get("ENABLE_SOFT_DELETION", "False").lower() == "true" +ENABLE_SOFT_DELETION = os.getenv("ENABLE_SOFT_DELETION", "True").lower() == "true" +KEEP_DELETED_TRANSACTIONS_FOR = int(os.getenv("KEEP_DELETED_ENTRIES_FOR", "365")) From ae91c5196795f9956685bf61819022146f71a2ca Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 15:17:18 -0300 Subject: [PATCH 11/45] feat(transactions:tasks): add old deleted transactions cleanup task --- app/apps/transactions/tasks.py | 39 ++++++++++++++++++++++++++++++++-- 1 file changed, 37 insertions(+), 2 deletions(-) diff --git a/app/apps/transactions/tasks.py b/app/apps/transactions/tasks.py index e0bfafc..5f1c42f 100644 --- a/app/apps/transactions/tasks.py +++ b/app/apps/transactions/tasks.py @@ -1,9 +1,13 @@ import logging +from datetime import timedelta -from procrastinate.contrib.django import app +from cachalot.api import cachalot_disabled, invalidate +from django.utils import timezone +from django.conf import settings -from apps.transactions.models import RecurringTransaction +from procrastinate.contrib.django import app +from apps.transactions.models import RecurringTransaction, Transaction logger = logging.getLogger(__name__) @@ -19,3 +23,34 @@ def generate_recurring_transactions(timestamp=None): exc_info=True, ) raise e + + +@app.periodic(cron="10 1 * * *") +@app.task +def cleanup_deleted_transactions(): + with cachalot_disabled(): + if ( + settings.ENABLE_SOFT_DELETION + and settings.KEEP_DELETED_TRANSACTIONS_FOR == 0 + ): + return "KEEP_DELETED_TRANSACTIONS_FOR is 0, no cleanup performed." + + if not settings.ENABLE_SOFT_DELETION: + # Hard delete all soft-deleted transactions + deleted_count, _ = Transaction.deleted_objects.all().hard_delete() + return ( + f"Hard deleted {deleted_count} transactions (soft deletion disabled)." + ) + + # Calculate the cutoff date + cutoff_date = timezone.now() - timedelta( + days=settings.KEEP_DELETED_TRANSACTIONS_FOR + ) + + invalidate("transactions.Transaction") + + # Hard delete soft-deleted transactions older than the cutoff date + old_transactions = Transaction.deleted_objects.filter(deleted_at__lt=cutoff_date) + deleted_count, _ = old_transactions.hard_delete() + + return f"Hard deleted {deleted_count} objects older than {settings.KEEP_DELETED_TRANSACTIONS_FOR} days." From e73e1dfc2592d09724061b2363e39576bd5335c3 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 15:20:25 -0300 Subject: [PATCH 12/45] feat(import:v1:schema): add option for triggering rules --- app/apps/import_app/schemas/v1.py | 1 + app/apps/import_app/services/v1.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/app/apps/import_app/schemas/v1.py b/app/apps/import_app/schemas/v1.py index 043f2a9..74e37a1 100644 --- a/app/apps/import_app/schemas/v1.py +++ b/app/apps/import_app/schemas/v1.py @@ -69,6 +69,7 @@ class CSVImportSettings(BaseModel): skip_lines: int = Field( default=0, description="Number of rows to skip at the beginning of the file" ) + trigger_transaction_rules: bool = True importing: Literal[ "transactions", "accounts", "currencies", "categories", "tags", "entities" ] diff --git a/app/apps/import_app/services/v1.py b/app/apps/import_app/services/v1.py index 7735342..0416caf 100644 --- a/app/apps/import_app/services/v1.py +++ b/app/apps/import_app/services/v1.py @@ -22,6 +22,7 @@ TransactionTag, TransactionEntity, ) +from apps.rules.signals import transaction_created logger = logging.getLogger(__name__) @@ -228,6 +229,9 @@ def _create_transaction(self, data: Dict[str, Any]) -> Transaction: if entities: new_transaction.entities.set(entities) + if self.settings.trigger_transaction_rules: + transaction_created.send(sender=new_transaction) + return new_transaction def _create_account(self, data: Dict[str, Any]) -> Account: From 8db13b082b17518c926f55efa2b69af36177055d Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Mon, 20 Jan 2025 14:30:17 -0300 Subject: [PATCH 13/45] feat(import): some layouts --- app/apps/import_app/forms.py | 58 +++++++++++++++++ .../import_app/fragments/profiles/add.html | 11 ++++ .../import_app/fragments/profiles/edit.html | 11 ++++ .../import_app/fragments/profiles/list.html | 65 +++++++++++++++++++ .../import_app/fragments/runs/add.html | 11 ++++ .../import_app/fragments/runs/list.html | 9 +++ 6 files changed, 165 insertions(+) create mode 100644 app/apps/import_app/forms.py create mode 100644 app/templates/import_app/fragments/profiles/add.html create mode 100644 app/templates/import_app/fragments/profiles/edit.html create mode 100644 app/templates/import_app/fragments/profiles/list.html create mode 100644 app/templates/import_app/fragments/runs/add.html create mode 100644 app/templates/import_app/fragments/runs/list.html diff --git a/app/apps/import_app/forms.py b/app/apps/import_app/forms.py new file mode 100644 index 0000000..78ee3d7 --- /dev/null +++ b/app/apps/import_app/forms.py @@ -0,0 +1,58 @@ +from crispy_forms.bootstrap import FormActions +from crispy_forms.helper import FormHelper +from crispy_forms.layout import ( + Layout, +) +from django import forms +from django.utils.translation import gettext_lazy as _ +from django_ace import AceWidget + +from apps.import_app.models import ImportProfile +from apps.common.widgets.crispy.submit import NoClassSubmit + + +class ImportProfileForm(forms.ModelForm): + class Meta: + model = ImportProfile + fields = [ + "name", + "version", + "yaml_config", + ] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + self.helper = FormHelper() + self.helper.form_tag = False + self.helper.form_method = "post" + self.helper.layout = Layout("name", "version", "yaml_config") + + if self.instance and self.instance.pk: + self.helper.layout.append( + FormActions( + NoClassSubmit( + "submit", _("Update"), css_class="btn btn-outline-primary w-100" + ), + ), + ) + else: + self.helper.layout.append( + FormActions( + NoClassSubmit( + "submit", _("Add"), css_class="btn btn-outline-primary w-100" + ), + ), + ) + + +class ImportRunFileUploadForm(forms.Form): + file = forms.FileField(label=_("Select a file")) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + self.helper = FormHelper() + self.helper.form_tag = False + self.helper.form_method = "post" + self.helper.layout = Layout("file") diff --git a/app/templates/import_app/fragments/profiles/add.html b/app/templates/import_app/fragments/profiles/add.html new file mode 100644 index 0000000..beda873 --- /dev/null +++ b/app/templates/import_app/fragments/profiles/add.html @@ -0,0 +1,11 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Add new import profile' %}{% endblock %} + +{% block body %} +
+ {% crispy form %} +
+{% endblock %} diff --git a/app/templates/import_app/fragments/profiles/edit.html b/app/templates/import_app/fragments/profiles/edit.html new file mode 100644 index 0000000..fa94bef --- /dev/null +++ b/app/templates/import_app/fragments/profiles/edit.html @@ -0,0 +1,11 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Edit import profile' %}{% endblock %} + +{% block body %} +
+ {% crispy form %} +
+{% endblock %} diff --git a/app/templates/import_app/fragments/profiles/list.html b/app/templates/import_app/fragments/profiles/list.html new file mode 100644 index 0000000..f1f34d2 --- /dev/null +++ b/app/templates/import_app/fragments/profiles/list.html @@ -0,0 +1,65 @@ +{% load i18n %} +
+
+ {% spaceless %} +
{% translate 'Import Profiles' %} + + +
+ {% endspaceless %} +
+ +
+
+ {% if profiles %} + + + + + + + + + + + {% for profile in profiles %} + + + + + + {% endfor %} + +
{% translate 'Name' %}{% translate 'Version' %}
+
+ + +{# #} +{#
#} +
{{ profile.name }}{{ profile.get_version_display }}
+ {% else %} + + {% endif %} +
+
+
diff --git a/app/templates/import_app/fragments/runs/add.html b/app/templates/import_app/fragments/runs/add.html new file mode 100644 index 0000000..d5a5b89 --- /dev/null +++ b/app/templates/import_app/fragments/runs/add.html @@ -0,0 +1,11 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Import file' %}{% endblock %} + +{% block body %} +
+ {% crispy form %} +
+{% endblock %} diff --git a/app/templates/import_app/fragments/runs/list.html b/app/templates/import_app/fragments/runs/list.html new file mode 100644 index 0000000..0697d26 --- /dev/null +++ b/app/templates/import_app/fragments/runs/list.html @@ -0,0 +1,9 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Runs for ' %}{{ profile.name }}{% endblock %} + +{% block body %} + +{% endblock %} From 4cc32e3f579a82ae1f11bbe4255b9a8289968729 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Mon, 20 Jan 2025 14:30:40 -0300 Subject: [PATCH 14/45] feat(import): test yaml_config before saving --- app/apps/import_app/models.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/app/apps/import_app/models.py b/app/apps/import_app/models.py index aca04e3..b489c43 100644 --- a/app/apps/import_app/models.py +++ b/app/apps/import_app/models.py @@ -1,13 +1,18 @@ +import yaml + +from django.core.exceptions import ValidationError from django.db import models from django.utils.translation import gettext_lazy as _ +from apps.import_app.schemas import version_1 + class ImportProfile(models.Model): class Versions(models.IntegerChoices): VERSION_1 = 1, _("Version 1") - name = models.CharField(max_length=100) - yaml_config = models.TextField(help_text=_("YAML configuration")) + name = models.CharField(max_length=100, verbose_name=_("Name")) + yaml_config = models.TextField(verbose_name=_("YAML Configuration")) version = models.IntegerField( choices=Versions, default=Versions.VERSION_1, @@ -20,6 +25,14 @@ def __str__(self): class Meta: ordering = ["name"] + def clean(self): + if self.version and self.version == self.Versions.VERSION_1: + try: + yaml_data = yaml.safe_load(self.yaml_config) + version_1.ImportProfileSchema(**yaml_data) + except Exception as e: + raise ValidationError({"yaml_config": _("Invalid YAML Configuration")}) + class ImportRun(models.Model): class Status(models.TextChoices): From b9810ce06296035e58eaeaee3cb51286e6c0addd Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Mon, 20 Jan 2025 14:30:59 -0300 Subject: [PATCH 15/45] feat(import): some layouts --- app/templates/import_app/pages/profiles_index.html | 8 ++++++++ app/templates/import_app/pages/runs_index.html | 8 ++++++++ 2 files changed, 16 insertions(+) create mode 100644 app/templates/import_app/pages/profiles_index.html create mode 100644 app/templates/import_app/pages/runs_index.html diff --git a/app/templates/import_app/pages/profiles_index.html b/app/templates/import_app/pages/profiles_index.html new file mode 100644 index 0000000..a5c59ee --- /dev/null +++ b/app/templates/import_app/pages/profiles_index.html @@ -0,0 +1,8 @@ +{% extends "layouts/base.html" %} +{% load i18n %} + +{% block title %}{% translate 'Import Profiles' %}{% endblock %} + +{% block content %} +
+{% endblock %} diff --git a/app/templates/import_app/pages/runs_index.html b/app/templates/import_app/pages/runs_index.html new file mode 100644 index 0000000..38a48a6 --- /dev/null +++ b/app/templates/import_app/pages/runs_index.html @@ -0,0 +1,8 @@ +{% extends "layouts/base.html" %} +{% load i18n %} + +{% block title %}{% translate 'Import Runs' %}{% endblock %} + +{% block content %} +
+{% endblock %} From 0fccdbe573c057a917688457c3cc25a056a8e59f Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Mon, 20 Jan 2025 14:31:12 -0300 Subject: [PATCH 16/45] feat(import): some views and urls --- app/apps/import_app/tasks.py | 2 +- app/apps/import_app/urls.py | 37 +++++++- app/apps/import_app/views.py | 168 +++++++++++++++++++++++++++++++---- 3 files changed, 186 insertions(+), 21 deletions(-) diff --git a/app/apps/import_app/tasks.py b/app/apps/import_app/tasks.py index 25efcbc..cf6f3a7 100644 --- a/app/apps/import_app/tasks.py +++ b/app/apps/import_app/tasks.py @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) -@app.task(queue="imports") +@app.task def process_import(import_run_id: int, file_path: str): try: import_run = ImportRun.objects.get(id=import_run_id) diff --git a/app/apps/import_app/urls.py b/app/apps/import_app/urls.py index aea8670..c2608a3 100644 --- a/app/apps/import_app/urls.py +++ b/app/apps/import_app/urls.py @@ -2,5 +2,40 @@ import apps.import_app.views as views urlpatterns = [ - path("import/", views.ImportRunCreateView.as_view(), name="import"), + path("import/", views.import_view, name="import"), + path( + "import/profiles/", + views.import_profile_index, + name="import_profiles_index", + ), + path( + "import/profiles/list/", + views.import_profile_list, + name="import_profiles_list", + ), + path( + "import/profiles/add/", + views.import_profile_add, + name="import_profiles_add", + ), + path( + "import/profiles//edit/", + views.import_profile_edit, + name="import_profile_edit", + ), + path( + "import/profiles//runs/", + views.import_run_add, + name="import_profile_runs_index", + ), + path( + "import/profiles//runs/list/", + views.import_run_add, + name="import_profile_runs_list", + ), + path( + "import/profiles//runs/add/", + views.import_run_add, + name="import_run_add", + ), ] diff --git a/app/apps/import_app/views.py b/app/apps/import_app/views.py index d5b1d94..ce65a2b 100644 --- a/app/apps/import_app/views.py +++ b/app/apps/import_app/views.py @@ -1,26 +1,156 @@ -from django.views.generic import CreateView -from apps.import_app.models import ImportRun -from apps.import_app.services import ImportServiceV1 +import shutil +from django.contrib import messages +from django.contrib.auth.decorators import login_required +from django.core.files.storage import FileSystemStorage +from django.http import HttpResponse +from django.shortcuts import render, get_object_or_404 +from django.views.decorators.http import require_http_methods +from django.utils.translation import gettext_lazy as _ -class ImportRunCreateView(CreateView): - model = ImportRun - fields = ["profile"] +from apps.common.decorators.htmx import only_htmx +from apps.import_app.forms import ImportRunFileUploadForm, ImportProfileForm +from apps.import_app.models import ImportRun, ImportProfile +from apps.import_app.tasks import process_import - def form_valid(self, form): - response = super().form_valid(form) - import_run = form.instance - file = self.request.FILES["file"] +def import_view(request): + import_profile = ImportProfile.objects.get(id=2) + shutil.copyfile( + "/usr/src/app/apps/import_app/teste2.csv", "/usr/src/app/temp/teste2.csv" + ) + ir = ImportRun.objects.create(profile=import_profile, file_name="teste.csv") + process_import.defer( + import_run_id=ir.id, + file_path="/usr/src/app/temp/teste2.csv", + ) + return HttpResponse("Hello, world. You're at the polls page.") - # Save uploaded file temporarily - temp_file_path = f"/tmp/import_{import_run.id}.csv" - with open(temp_file_path, "wb+") as destination: - for chunk in file.chunks(): - destination.write(chunk) - # Process the import - import_service = ImportServiceV1(import_run) - import_service.process_file(temp_file_path) +@login_required +@require_http_methods(["GET", "POST"]) +def import_profile_index(request): + return render( + request, + "import_app/pages/profiles_index.html", + ) - return response + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_profile_list(request): + profiles = ImportProfile.objects.all() + + return render( + request, + "import_app/fragments/profiles/list.html", + {"profiles": profiles}, + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_profile_add(request): + if request.method == "POST": + form = ImportProfileForm(request.POST) + + if form.is_valid(): + form.save() + messages.success(request, _("Import Profile added successfully")) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated, hide_offcanvas", + }, + ) + else: + form = ImportProfileForm() + + return render( + request, + "import_app/fragments/profiles/add.html", + {"form": form}, + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_profile_edit(request, profile_id): + profile = get_object_or_404(ImportProfile, id=profile_id) + + if request.method == "POST": + form = ImportProfileForm(request.POST, instance=profile) + + if form.is_valid(): + form.save() + messages.success(request, _("Import Profile update successfully")) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated, hide_offcanvas", + }, + ) + else: + form = ImportProfileForm(instance=profile) + + return render( + request, + "import_app/fragments/profiles/edit.html", + {"form": form, "profile": profile}, + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_run_list(request, profile_id): + profile = ImportProfile.objects.get(id=profile_id) + + runs = ImportRun.objects.filter(profile=profile).order_by("id") + + return render( + request, + "import_app/fragments/runs/list.html", + {"profile": profile, "runs": runs}, + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_run_add(request, profile_id): + profile = ImportProfile.objects.get(id=profile_id) + + if request.method == "POST": + form = ImportRunFileUploadForm(request.POST, request.FILES) + + if form.is_valid(): + uploaded_file = request.FILES["file"] + fs = FileSystemStorage(location="/usr/src/app/temp") + filename = fs.save(uploaded_file.name, uploaded_file) + file_path = fs.path(filename) + + import_run = ImportRun.objects.create(profile=profile, file_name=filename) + + # Defer the procrastinate task + process_import.defer(import_run_id=import_run.id, file_path=file_path) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated, hide_offcanvas", + }, + ) + else: + form = ImportRunFileUploadForm() + + return render( + request, + "import_app/fragments/runs/add.html", + {"form": form, "profile": profile}, + ) From 02adfd828a8f2f15f69b2f04651b1a1ddf8905a7 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Mon, 20 Jan 2025 23:09:49 -0300 Subject: [PATCH 17/45] feat(transactions): add internal_id field to transactions --- app/apps/import_app/schemas/v1.py | 15 +++++++++++--- .../0033_transaction_internal_id.py | 20 +++++++++++++++++++ app/apps/transactions/models.py | 3 +++ 3 files changed, 35 insertions(+), 3 deletions(-) create mode 100644 app/apps/transactions/migrations/0033_transaction_internal_id.py diff --git a/app/apps/import_app/schemas/v1.py b/app/apps/import_app/schemas/v1.py index 74e37a1..22df7c2 100644 --- a/app/apps/import_app/schemas/v1.py +++ b/app/apps/import_app/schemas/v1.py @@ -167,13 +167,20 @@ class TransactionCategoryMapping(ColumnMapping): coerce_to: Literal["str|int"] = Field("str|int", frozen=True) -class TransactionInternalMapping(ColumnMapping): +class TransactionInternalNoteMapping(ColumnMapping): target: Literal["internal_note"] = Field( ..., description="Transaction field to map to" ) coerce_to: Literal["str"] = Field("str", frozen=True) +class TransactionInternalIDMapping(ColumnMapping): + target: Literal["internal_id"] = Field( + ..., description="Transaction field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + class CategoryNameMapping(ColumnMapping): target: Literal["category_name"] = Field( ..., description="Category field to map to" @@ -314,7 +321,8 @@ class ImportProfileSchema(BaseModel): | TransactionTagsMapping | TransactionEntitiesMapping | TransactionCategoryMapping - | TransactionInternalMapping + | TransactionInternalNoteMapping + | TransactionInternalIDMapping | CategoryNameMapping | CategoryMuteMapping | CategoryActiveMapping @@ -358,7 +366,8 @@ def validate_mappings(self) -> "ImportProfileSchema": TransactionTagsMapping, TransactionEntitiesMapping, TransactionCategoryMapping, - TransactionInternalMapping, + TransactionInternalNoteMapping, + TransactionInternalIDMapping, ), "accounts": ( AccountNameMapping, diff --git a/app/apps/transactions/migrations/0033_transaction_internal_id.py b/app/apps/transactions/migrations/0033_transaction_internal_id.py new file mode 100644 index 0000000..b7d578c --- /dev/null +++ b/app/apps/transactions/migrations/0033_transaction_internal_id.py @@ -0,0 +1,20 @@ +# Generated by Django 5.1.5 on 2025-01-21 01:56 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("transactions", "0032_transaction_created_at_transaction_updated_at"), + ] + + operations = [ + migrations.AddField( + model_name="transaction", + name="internal_id", + field=models.TextField( + blank=True, null=True, unique=True, verbose_name="Internal ID" + ), + ), + ] diff --git a/app/apps/transactions/models.py b/app/apps/transactions/models.py index 2bd2a68..85ff53a 100644 --- a/app/apps/transactions/models.py +++ b/app/apps/transactions/models.py @@ -190,6 +190,9 @@ class Type(models.TextChoices): verbose_name=_("Recurring Transaction"), ) internal_note = models.TextField(blank=True, verbose_name=_("Internal Note")) + internal_id = models.TextField( + blank=True, null=True, unique=True, verbose_name=_("Internal ID") + ) deleted = models.BooleanField( default=False, verbose_name=_("Deleted"), db_index=True From 32b5864736a7a02b4eeacbc2c078cdac1138696f Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Mon, 20 Jan 2025 23:10:11 -0300 Subject: [PATCH 18/45] feat(transactions): make deleted_at readonly on admin --- app/apps/transactions/admin.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/app/apps/transactions/admin.py b/app/apps/transactions/admin.py index df4d1c8..8f37317 100644 --- a/app/apps/transactions/admin.py +++ b/app/apps/transactions/admin.py @@ -19,15 +19,16 @@ def get_queryset(self, request): list_filter = ["deleted", "type", "is_paid", "date", "account"] list_display = [ - "deleted", + "date", "description", "type", "account__name", "amount", "account__currency__code", - "date", "reference_date", + "deleted", ] + readonly_fields = ["deleted_at"] actions = ["hard_delete_selected"] From d96787cfebe4c8e50762612a9c441575c3331f15 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Wed, 22 Jan 2025 01:41:17 -0300 Subject: [PATCH 19/45] feat(import): more UI and endpoints --- app/apps/import_app/forms.py | 9 +- app/apps/import_app/services/v1.py | 18 ++- app/apps/import_app/urls.py | 22 +++- app/apps/import_app/views.py | 58 ++++++++- .../import_app/fragments/profiles/list.html | 38 ++++-- .../import_app/fragments/runs/add.html | 4 +- .../import_app/fragments/runs/list.html | 111 ++++++++++++++++++ 7 files changed, 227 insertions(+), 33 deletions(-) diff --git a/app/apps/import_app/forms.py b/app/apps/import_app/forms.py index 78ee3d7..f300721 100644 --- a/app/apps/import_app/forms.py +++ b/app/apps/import_app/forms.py @@ -55,4 +55,11 @@ def __init__(self, *args, **kwargs): self.helper = FormHelper() self.helper.form_tag = False self.helper.form_method = "post" - self.helper.layout = Layout("file") + self.helper.layout = Layout( + "file", + FormActions( + NoClassSubmit( + "submit", _("Import"), css_class="btn btn-outline-primary w-100" + ), + ), + ) diff --git a/app/apps/import_app/services/v1.py b/app/apps/import_app/services/v1.py index 0416caf..abda751 100644 --- a/app/apps/import_app/services/v1.py +++ b/app/apps/import_app/services/v1.py @@ -491,7 +491,6 @@ def _process_csv(self, file_path): for row_number, row in enumerate(reader, start=1): self._process_row(row, row_number) - self._increment_totals("processed", value=1) def _validate_file_path(self, file_path: str) -> str: """ @@ -518,15 +517,14 @@ def process_file(self, file_path: str): if self.settings.file_type == "csv": self._process_csv(file_path) - if self.import_run.processed_rows == self.import_run.total_rows: - self._update_status("FINISHED") - self._log( - "info", - f"Import completed successfully. " - f"Successful: {self.import_run.successful_rows}, " - f"Failed: {self.import_run.failed_rows}, " - f"Skipped: {self.import_run.skipped_rows}", - ) + self._update_status("FINISHED") + self._log( + "info", + f"Import completed successfully. " + f"Successful: {self.import_run.successful_rows}, " + f"Failed: {self.import_run.failed_rows}, " + f"Skipped: {self.import_run.skipped_rows}", + ) except Exception as e: self._update_status("FAILED") diff --git a/app/apps/import_app/urls.py b/app/apps/import_app/urls.py index c2608a3..beb65ba 100644 --- a/app/apps/import_app/urls.py +++ b/app/apps/import_app/urls.py @@ -13,6 +13,11 @@ views.import_profile_list, name="import_profiles_list", ), + path( + "import/profiles//delete/", + views.import_profile_delete, + name="import_profile_delete", + ), path( "import/profiles/add/", views.import_profile_add, @@ -23,16 +28,21 @@ views.import_profile_edit, name="import_profile_edit", ), - path( - "import/profiles//runs/", - views.import_run_add, - name="import_profile_runs_index", - ), path( "import/profiles//runs/list/", - views.import_run_add, + views.import_runs_list, name="import_profile_runs_list", ), + path( + "import/profiles//runs//log/", + views.import_run_log, + name="import_run_log", + ), + path( + "import/profiles//runs//delete/", + views.import_run_delete, + name="import_run_delete", + ), path( "import/profiles//runs/add/", views.import_run_add, diff --git a/app/apps/import_app/views.py b/app/apps/import_app/views.py index ce65a2b..6b869fd 100644 --- a/app/apps/import_app/views.py +++ b/app/apps/import_app/views.py @@ -5,6 +5,7 @@ from django.core.files.storage import FileSystemStorage from django.http import HttpResponse from django.shortcuts import render, get_object_or_404 +from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_http_methods from django.utils.translation import gettext_lazy as _ @@ -105,13 +106,32 @@ def import_profile_edit(request, profile_id): ) +@only_htmx +@login_required +@csrf_exempt +@require_http_methods(["DELETE"]) +def import_profile_delete(request, profile_id): + profile = ImportProfile.objects.get(id=profile_id) + + profile.delete() + + messages.success(request, _("Import Profile deleted successfully")) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated", + }, + ) + + @only_htmx @login_required @require_http_methods(["GET", "POST"]) -def import_run_list(request, profile_id): +def import_runs_list(request, profile_id): profile = ImportProfile.objects.get(id=profile_id) - runs = ImportRun.objects.filter(profile=profile).order_by("id") + runs = ImportRun.objects.filter(profile=profile).order_by("-id") return render( request, @@ -120,6 +140,19 @@ def import_run_list(request, profile_id): ) +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_run_log(request, profile_id, run_id): + run = ImportRun.objects.get(profile__id=profile_id, id=run_id) + + return render( + request, + "import_app/fragments/runs/log.html", + {"run": run}, + ) + + @only_htmx @login_required @require_http_methods(["GET", "POST"]) @@ -140,6 +173,8 @@ def import_run_add(request, profile_id): # Defer the procrastinate task process_import.defer(import_run_id=import_run.id, file_path=file_path) + messages.success(request, _("Import Run queued successfully")) + return HttpResponse( status=204, headers={ @@ -154,3 +189,22 @@ def import_run_add(request, profile_id): "import_app/fragments/runs/add.html", {"form": form, "profile": profile}, ) + + +@only_htmx +@login_required +@csrf_exempt +@require_http_methods(["DELETE"]) +def import_run_delete(request, profile_id, run_id): + run = ImportRun.objects.get(profile__id=profile_id, id=run_id) + + run.delete() + + messages.success(request, _("Run deleted successfully")) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated", + }, + ) diff --git a/app/templates/import_app/fragments/profiles/list.html b/app/templates/import_app/fragments/profiles/list.html index f1f34d2..2872897 100644 --- a/app/templates/import_app/fragments/profiles/list.html +++ b/app/templates/import_app/fragments/profiles/list.html @@ -38,18 +38,32 @@ hx-get="{% url 'import_profile_edit' profile_id=profile.id %}" hx-target="#generic-offcanvas"> -{# #} -{# #} + + + + + + {{ profile.name }} {{ profile.get_version_display }} diff --git a/app/templates/import_app/fragments/runs/add.html b/app/templates/import_app/fragments/runs/add.html index d5a5b89..9997044 100644 --- a/app/templates/import_app/fragments/runs/add.html +++ b/app/templates/import_app/fragments/runs/add.html @@ -2,10 +2,10 @@ {% load i18n %} {% load crispy_forms_tags %} -{% block title %}{% translate 'Import file' %}{% endblock %} +{% block title %}{% translate 'Import file with profile' %} {{ profile.name }}{% endblock %} {% block body %} -
+ {% crispy form %}
{% endblock %} diff --git a/app/templates/import_app/fragments/runs/list.html b/app/templates/import_app/fragments/runs/list.html index 0697d26..f67054c 100644 --- a/app/templates/import_app/fragments/runs/list.html +++ b/app/templates/import_app/fragments/runs/list.html @@ -5,5 +5,116 @@ {% block title %}{% translate 'Runs for ' %}{{ profile.name }}{% endblock %} {% block body %} +
+ {% if runs %} +
+ {% for run in runs %} +
+
+
+ {{ run.get_status_display }} +
+
+
{{ run.id }}({{ run.file_name }})
+
+
+
+
+
+
+ {% trans 'Total Items' %} +
+
+ {{ run.total_rows }} +
+
+
+
+
+
+
+
+ {% trans 'Processed Items' %} +
+
+ {{ run.processed_rows }} +
+
+
+
+ +
+
+
+
+ {% trans 'Skipped Items' %} +
+
+ {{ run.skipped_rows }} +
+
+
+
+ +
+
+
+
+ {% trans 'Failed Items' %} +
+
+ {{ run.failed_rows }} +
+
+
+
+ +
+
+
+
+ {% trans 'Successful Items' %} +
+
+ {{ run.successful_rows }} +
+
+
+
+ +
+
+ +
+
+ {% endfor %} + {% else %} + + {% endif %} +
+
{% endblock %} From 8992cd98b55ebf65a54965ba078e67b5830711ca Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Thu, 16 Jan 2025 14:09:33 -0300 Subject: [PATCH 20/45] feat: add import app boilerplate --- app/WYGIWYH/settings.py | 1 + app/apps/import/__init__.py | 0 app/apps/import/admin.py | 3 +++ app/apps/import/apps.py | 6 ++++++ app/apps/import/migrations/__init__.py | 0 app/apps/import/models.py | 3 +++ app/apps/import/tests.py | 3 +++ app/apps/import/views.py | 3 +++ 8 files changed, 19 insertions(+) create mode 100644 app/apps/import/__init__.py create mode 100644 app/apps/import/admin.py create mode 100644 app/apps/import/apps.py create mode 100644 app/apps/import/migrations/__init__.py create mode 100644 app/apps/import/models.py create mode 100644 app/apps/import/tests.py create mode 100644 app/apps/import/views.py diff --git a/app/WYGIWYH/settings.py b/app/WYGIWYH/settings.py index b2eeba1..a2336cb 100644 --- a/app/WYGIWYH/settings.py +++ b/app/WYGIWYH/settings.py @@ -64,6 +64,7 @@ "apps.accounts.apps.AccountsConfig", "apps.common.apps.CommonConfig", "apps.net_worth.apps.NetWorthConfig", + "apps.import.apps.ImportConfig", "apps.api.apps.ApiConfig", "cachalot", "rest_framework", diff --git a/app/apps/import/__init__.py b/app/apps/import/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/apps/import/admin.py b/app/apps/import/admin.py new file mode 100644 index 0000000..8c38f3f --- /dev/null +++ b/app/apps/import/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/app/apps/import/apps.py b/app/apps/import/apps.py new file mode 100644 index 0000000..fdfa08d --- /dev/null +++ b/app/apps/import/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class ImportConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "apps.import" diff --git a/app/apps/import/migrations/__init__.py b/app/apps/import/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/app/apps/import/models.py b/app/apps/import/models.py new file mode 100644 index 0000000..71a8362 --- /dev/null +++ b/app/apps/import/models.py @@ -0,0 +1,3 @@ +from django.db import models + +# Create your models here. diff --git a/app/apps/import/tests.py b/app/apps/import/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/app/apps/import/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/app/apps/import/views.py b/app/apps/import/views.py new file mode 100644 index 0000000..91ea44a --- /dev/null +++ b/app/apps/import/views.py @@ -0,0 +1,3 @@ +from django.shortcuts import render + +# Create your views here. From 493bf268bb3607c875699211c709878e173806fc Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Fri, 17 Jan 2025 17:40:51 -0300 Subject: [PATCH 21/45] feat: rename app, some work on schema --- app/WYGIWYH/settings.py | 2 +- app/WYGIWYH/urls.py | 1 + app/apps/import/admin.py | 3 - app/apps/import/models.py | 3 - app/apps/import/views.py | 3 - app/apps/{import => import_app}/__init__.py | 0 app/apps/import_app/admin.py | 6 + app/apps/{import => import_app}/apps.py | 2 +- .../migrations/__init__.py | 0 app/apps/import_app/models.py | 74 ++++++ app/apps/import_app/schemas.py | 0 app/apps/import_app/schemas/__init__.py | 8 + app/apps/import_app/schemas/v1.py | 104 ++++++++ app/apps/import_app/services.py | 0 app/apps/import_app/services/__init__.py | 1 + app/apps/import_app/services/v1.py | 237 ++++++++++++++++++ app/apps/import_app/tasks.py | 18 ++ app/apps/{import => import_app}/tests.py | 0 app/apps/import_app/urls.py | 6 + app/apps/import_app/views.py | 26 ++ app/apps/transactions/models.py | 1 + requirements.txt | 2 + 22 files changed, 486 insertions(+), 11 deletions(-) delete mode 100644 app/apps/import/admin.py delete mode 100644 app/apps/import/models.py delete mode 100644 app/apps/import/views.py rename app/apps/{import => import_app}/__init__.py (100%) create mode 100644 app/apps/import_app/admin.py rename app/apps/{import => import_app}/apps.py (81%) rename app/apps/{import => import_app}/migrations/__init__.py (100%) create mode 100644 app/apps/import_app/models.py create mode 100644 app/apps/import_app/schemas.py create mode 100644 app/apps/import_app/schemas/__init__.py create mode 100644 app/apps/import_app/schemas/v1.py create mode 100644 app/apps/import_app/services.py create mode 100644 app/apps/import_app/services/__init__.py create mode 100644 app/apps/import_app/services/v1.py create mode 100644 app/apps/import_app/tasks.py rename app/apps/{import => import_app}/tests.py (100%) create mode 100644 app/apps/import_app/urls.py create mode 100644 app/apps/import_app/views.py diff --git a/app/WYGIWYH/settings.py b/app/WYGIWYH/settings.py index a2336cb..d597d5d 100644 --- a/app/WYGIWYH/settings.py +++ b/app/WYGIWYH/settings.py @@ -64,7 +64,7 @@ "apps.accounts.apps.AccountsConfig", "apps.common.apps.CommonConfig", "apps.net_worth.apps.NetWorthConfig", - "apps.import.apps.ImportConfig", + "apps.import_app.apps.ImportConfig", "apps.api.apps.ApiConfig", "cachalot", "rest_framework", diff --git a/app/WYGIWYH/urls.py b/app/WYGIWYH/urls.py index 5a465a5..eb4357d 100644 --- a/app/WYGIWYH/urls.py +++ b/app/WYGIWYH/urls.py @@ -47,4 +47,5 @@ path("", include("apps.calendar_view.urls")), path("", include("apps.dca.urls")), path("", include("apps.mini_tools.urls")), + path("", include("apps.import_app.urls")), ] diff --git a/app/apps/import/admin.py b/app/apps/import/admin.py deleted file mode 100644 index 8c38f3f..0000000 --- a/app/apps/import/admin.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.contrib import admin - -# Register your models here. diff --git a/app/apps/import/models.py b/app/apps/import/models.py deleted file mode 100644 index 71a8362..0000000 --- a/app/apps/import/models.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.db import models - -# Create your models here. diff --git a/app/apps/import/views.py b/app/apps/import/views.py deleted file mode 100644 index 91ea44a..0000000 --- a/app/apps/import/views.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.shortcuts import render - -# Create your views here. diff --git a/app/apps/import/__init__.py b/app/apps/import_app/__init__.py similarity index 100% rename from app/apps/import/__init__.py rename to app/apps/import_app/__init__.py diff --git a/app/apps/import_app/admin.py b/app/apps/import_app/admin.py new file mode 100644 index 0000000..cbccf2b --- /dev/null +++ b/app/apps/import_app/admin.py @@ -0,0 +1,6 @@ +from django.contrib import admin +from apps.import_app import models + +# Register your models here. +admin.site.register(models.ImportRun) +admin.site.register(models.ImportProfile) diff --git a/app/apps/import/apps.py b/app/apps/import_app/apps.py similarity index 81% rename from app/apps/import/apps.py rename to app/apps/import_app/apps.py index fdfa08d..4dbe90c 100644 --- a/app/apps/import/apps.py +++ b/app/apps/import_app/apps.py @@ -3,4 +3,4 @@ class ImportConfig(AppConfig): default_auto_field = "django.db.models.BigAutoField" - name = "apps.import" + name = "apps.import_app" diff --git a/app/apps/import/migrations/__init__.py b/app/apps/import_app/migrations/__init__.py similarity index 100% rename from app/apps/import/migrations/__init__.py rename to app/apps/import_app/migrations/__init__.py diff --git a/app/apps/import_app/models.py b/app/apps/import_app/models.py new file mode 100644 index 0000000..aca04e3 --- /dev/null +++ b/app/apps/import_app/models.py @@ -0,0 +1,74 @@ +from django.db import models +from django.utils.translation import gettext_lazy as _ + + +class ImportProfile(models.Model): + class Versions(models.IntegerChoices): + VERSION_1 = 1, _("Version 1") + + name = models.CharField(max_length=100) + yaml_config = models.TextField(help_text=_("YAML configuration")) + version = models.IntegerField( + choices=Versions, + default=Versions.VERSION_1, + verbose_name=_("Version"), + ) + + def __str__(self): + return self.name + + class Meta: + ordering = ["name"] + + +class ImportRun(models.Model): + class Status(models.TextChoices): + QUEUED = "QUEUED", _("Queued") + PROCESSING = "PROCESSING", _("Processing") + FAILED = "FAILED", _("Failed") + FINISHED = "FINISHED", _("Finished") + + status = models.CharField( + max_length=10, + choices=Status, + default=Status.QUEUED, + verbose_name=_("Status"), + ) + profile = models.ForeignKey( + ImportProfile, + on_delete=models.CASCADE, + ) + file_name = models.CharField( + max_length=10000, + help_text=_("File name"), + ) + transactions = models.ManyToManyField( + "transactions.Transaction", related_name="import_runs" + ) + tags = models.ManyToManyField( + "transactions.TransactionTag", related_name="import_runs" + ) + categories = models.ManyToManyField( + "transactions.TransactionCategory", related_name="import_runs" + ) + entities = models.ManyToManyField( + "transactions.TransactionEntity", related_name="import_runs" + ) + currencies = models.ManyToManyField( + "currencies.Currency", related_name="import_runs" + ) + + logs = models.TextField(blank=True) + processed_rows = models.IntegerField(default=0) + total_rows = models.IntegerField(default=0) + successful_rows = models.IntegerField(default=0) + skipped_rows = models.IntegerField(default=0) + failed_rows = models.IntegerField(default=0) + started_at = models.DateTimeField(null=True) + finished_at = models.DateTimeField(null=True) + + @property + def progress(self): + if self.total_rows == 0: + return 0 + return (self.processed_rows / self.total_rows) * 100 diff --git a/app/apps/import_app/schemas.py b/app/apps/import_app/schemas.py new file mode 100644 index 0000000..e69de29 diff --git a/app/apps/import_app/schemas/__init__.py b/app/apps/import_app/schemas/__init__.py new file mode 100644 index 0000000..f68ce79 --- /dev/null +++ b/app/apps/import_app/schemas/__init__.py @@ -0,0 +1,8 @@ +from apps.import_app.schemas.v1 import ( + ImportProfileSchema as SchemaV1, + ColumnMapping as ColumnMappingV1, + # TransformationRule as TransformationRuleV1, + ImportSettings as SettingsV1, + HashTransformationRule as HashTransformationRuleV1, + CompareDeduplicationRule as CompareDeduplicationRuleV1, +) diff --git a/app/apps/import_app/schemas/v1.py b/app/apps/import_app/schemas/v1.py new file mode 100644 index 0000000..1cc7dc5 --- /dev/null +++ b/app/apps/import_app/schemas/v1.py @@ -0,0 +1,104 @@ +from typing import Dict, List, Optional, Literal +from pydantic import BaseModel, Field + + +class CompareDeduplicationRule(BaseModel): + type: Literal["compare"] + fields: Dict = Field( + ..., description="Match header and fields to compare for deduplication" + ) + match_type: Literal["lax", "strict"] + + +class ReplaceTransformationRule(BaseModel): + field: str + type: Literal["replace", "regex"] = Field( + ..., description="Type of transformation: replace or regex" + ) + pattern: str = Field(..., description="Pattern to match") + replacement: str = Field(..., description="Value to replace with") + + +class DateFormatTransformationRule(BaseModel): + field: str + type: Literal["date_format"] = Field( + ..., description="Type of transformation: replace or regex" + ) + original_format: str = Field(..., description="Original date format") + new_format: str = Field(..., description="New date format to use") + + +class HashTransformationRule(BaseModel): + fields: List[str] + type: Literal["hash"] + + +class MergeTransformationRule(BaseModel): + fields: List[str] + type: Literal["merge"] + separator: str = Field(default=" ", description="Separator to use when merging") + + +class SplitTransformationRule(BaseModel): + fields: List[str] + type: Literal["split"] + separator: str = Field(default=",", description="Separator to use when splitting") + index: int | None = Field( + default=0, description="Index to return as value. Empty to return all." + ) + + +class ImportSettings(BaseModel): + skip_errors: bool = Field( + default=False, + description="If True, errors during import will be logged and skipped", + ) + file_type: Literal["csv"] = "csv" + delimiter: str = Field(default=",", description="CSV delimiter character") + encoding: str = Field(default="utf-8", description="File encoding") + skip_rows: int = Field( + default=0, description="Number of rows to skip at the beginning of the file" + ) + importing: Literal[ + "transactions", "accounts", "currencies", "categories", "tags", "entities" + ] + + +class ColumnMapping(BaseModel): + source: Optional[str] = Field( + default=None, + description="CSV column header. If None, the field will be generated from transformations", + ) + target: Literal[ + "account", + "type", + "is_paid", + "date", + "reference_date", + "amount", + "notes", + "category", + "tags", + "entities", + "internal_note", + ] = Field(..., description="Transaction field to map to") + default_value: Optional[str] = None + required: bool = False + transformations: Optional[ + List[ + ReplaceTransformationRule + | DateFormatTransformationRule + | HashTransformationRule + | MergeTransformationRule + | SplitTransformationRule + ] + ] = Field(default_factory=list) + + +class ImportProfileSchema(BaseModel): + settings: ImportSettings + column_mapping: Dict[str, ColumnMapping] + deduplication: List[CompareDeduplicationRule] = Field( + default_factory=list, + description="Rules for deduplicating records during import", + ) diff --git a/app/apps/import_app/services.py b/app/apps/import_app/services.py new file mode 100644 index 0000000..e69de29 diff --git a/app/apps/import_app/services/__init__.py b/app/apps/import_app/services/__init__.py new file mode 100644 index 0000000..6001902 --- /dev/null +++ b/app/apps/import_app/services/__init__.py @@ -0,0 +1 @@ +from apps.import_app.services.v1 import ImportService as ImportServiceV1 diff --git a/app/apps/import_app/services/v1.py b/app/apps/import_app/services/v1.py new file mode 100644 index 0000000..333eb6e --- /dev/null +++ b/app/apps/import_app/services/v1.py @@ -0,0 +1,237 @@ +import csv +import hashlib +import re +from datetime import datetime +from typing import Dict, Any, Literal + +import yaml + +from django.db import transaction +from django.core.files.storage import default_storage +from django.utils import timezone + +from apps.import_app.models import ImportRun, ImportProfile +from apps.import_app.schemas import ( + SchemaV1, + ColumnMappingV1, + SettingsV1, + HashTransformationRuleV1, + CompareDeduplicationRuleV1, +) +from apps.transactions.models import Transaction + + +class ImportService: + def __init__(self, import_run: ImportRun): + self.import_run: ImportRun = import_run + self.profile: ImportProfile = import_run.profile + self.config: SchemaV1 = self._load_config() + self.settings: SettingsV1 = self.config.settings + self.deduplication: list[CompareDeduplicationRuleV1] = self.config.deduplication + self.mapping: Dict[str, ColumnMappingV1] = self.config.column_mapping + + def _load_config(self) -> SchemaV1: + yaml_data = yaml.safe_load(self.profile.yaml_config) + + if self.profile.version == ImportProfile.Versions.VERSION_1: + return SchemaV1(**yaml_data) + + raise ValueError(f"Unsupported version: {self.profile.version}") + + def _log(self, level: str, message: str, **kwargs) -> None: + """Add a log entry to the import run logs""" + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + + # Format additional context if present + context = "" + if kwargs: + context = " - " + ", ".join(f"{k}={v}" for k, v in kwargs.items()) + + log_line = f"[{timestamp}] {level.upper()}: {message}{context}\n" + + # Append to existing logs + self.import_run.logs += log_line + self.import_run.save(update_fields=["logs"]) + + def _update_status( + self, new_status: Literal["PROCESSING", "FAILED", "FINISHED"] + ) -> None: + if new_status == "PROCESSING": + self.import_run.status = ImportRun.Status.PROCESSING + elif new_status == "FAILED": + self.import_run.status = ImportRun.Status.FAILED + elif new_status == "FINISHED": + self.import_run.status = ImportRun.Status.FINISHED + + self.import_run.save(update_fields=["status"]) + + @staticmethod + def _transform_value( + value: str, mapping: ColumnMappingV1, row: Dict[str, str] = None + ) -> Any: + transformed = value + + for transform in mapping.transformations: + if transform.type == "hash": + if not isinstance(transform, HashTransformationRuleV1): + continue + + # Collect all values to be hashed + values_to_hash = [] + for field in transform.fields: + if field in row: + values_to_hash.append(str(row[field])) + + # Create hash from concatenated values + if values_to_hash: + concatenated = "|".join(values_to_hash) + transformed = hashlib.sha256(concatenated.encode()).hexdigest() + + elif transform.type == "replace": + transformed = transformed.replace( + transform.pattern, transform.replacement + ) + elif transform.type == "regex": + transformed = re.sub( + transform.pattern, transform.replacement, transformed + ) + elif transform.type == "date_format": + transformed = datetime.strptime( + transformed, transform.pattern + ).strftime(transform.replacement) + + return transformed + + def _map_row_to_transaction(self, row: Dict[str, str]) -> Dict[str, Any]: + transaction_data = {} + + for field, mapping in self.mapping.items(): + # If source is None, use None as the initial value + value = row.get(mapping.source) if mapping.source else None + + # Use default_value if value is None + if value is None: + value = mapping.default_value + + if mapping.required and value is None and not mapping.transformations: + raise ValueError(f"Required field {field} is missing") + + # Apply transformations even if initial value is None + if mapping.transformations: + value = self._transform_value(value, mapping, row) + + if value is not None: + transaction_data[field] = value + + return transaction_data + + def _check_duplicate_transaction(self, transaction_data: Dict[str, Any]) -> bool: + for rule in self.deduplication: + if rule.type == "compare": + query = Transaction.objects.all() + + # Build query conditions for each field in the rule + for field, header in rule.fields.items(): + if field in transaction_data: + if rule.match_type == "strict": + query = query.filter(**{field: transaction_data[field]}) + else: # lax matching + query = query.filter( + **{f"{field}__iexact": transaction_data[field]} + ) + + # If we found any matching transaction, it's a duplicate + if query.exists(): + return True + + return False + + def _process_csv(self, file_path): + with open(file_path, "r", encoding=self.settings.encoding) as csv_file: + reader = csv.DictReader(csv_file, delimiter=self.settings.delimiter) + + # Count total rows + self.import_run.total_rows = sum(1 for _ in reader) + csv_file.seek(0) + reader = csv.DictReader(csv_file, delimiter=self.settings.delimiter) + + self._log("info", f"Starting import with {self.import_run.total_rows} rows") + + # Skip specified number of rows + for _ in range(self.settings.skip_rows): + next(reader) + + if self.settings.skip_rows: + self._log("info", f"Skipped {self.settings.skip_rows} initial rows") + + for row_number, row in enumerate(reader, start=1): + try: + transaction_data = self._map_row_to_transaction(row) + + if transaction_data: + if self.deduplication and self._check_duplicate_transaction( + transaction_data + ): + self.import_run.skipped_rows += 1 + self._log("info", f"Skipped duplicate row {row_number}") + continue + + self.import_run.transactions.add(transaction_data) + self.import_run.successful_rows += 1 + self._log("debug", f"Successfully processed row {row_number}") + + self.import_run.processed_rows += 1 + self.import_run.save( + update_fields=[ + "processed_rows", + "successful_rows", + "skipped_rows", + ] + ) + + except Exception as e: + if not self.settings.skip_errors: + self._log( + "error", + f"Fatal error processing row {row_number}: {str(e)}", + ) + self._update_status("FAILED") + raise + else: + self._log( + "warning", f"Error processing row {row_number}: {str(e)}" + ) + self.import_run.failed_rows += 1 + self.import_run.save(update_fields=["failed_rows"]) + + def process_file(self, file_path: str): + self._update_status("PROCESSING") + self.import_run.started_at = timezone.now() + self.import_run.save(update_fields=["started_at"]) + + self._log("info", "Starting import process") + + try: + if self.settings.file_type == "csv": + self._process_csv(file_path) + + if self.import_run.processed_rows == self.import_run.total_rows: + self._update_status("FINISHED") + self._log( + "info", + f"Import completed successfully. " + f"Successful: {self.import_run.successful_rows}, " + f"Failed: {self.import_run.failed_rows}, " + f"Skipped: {self.import_run.skipped_rows}", + ) + + except Exception as e: + self._update_status("FAILED") + self._log("error", f"Import failed: {str(e)}") + raise Exception("Import failed") + + finally: + self._log("info", "Cleaning up temporary files") + default_storage.delete(file_path) + self.import_run.finished_at = timezone.now() + self.import_run.save(update_fields=["finished_at"]) diff --git a/app/apps/import_app/tasks.py b/app/apps/import_app/tasks.py new file mode 100644 index 0000000..25efcbc --- /dev/null +++ b/app/apps/import_app/tasks.py @@ -0,0 +1,18 @@ +import logging + +from procrastinate.contrib.django import app + +from apps.import_app.models import ImportRun +from apps.import_app.services import ImportServiceV1 + +logger = logging.getLogger(__name__) + + +@app.task(queue="imports") +def process_import(import_run_id: int, file_path: str): + try: + import_run = ImportRun.objects.get(id=import_run_id) + import_service = ImportServiceV1(import_run) + import_service.process_file(file_path) + except ImportRun.DoesNotExist: + raise ValueError(f"ImportRun with id {import_run_id} not found") diff --git a/app/apps/import/tests.py b/app/apps/import_app/tests.py similarity index 100% rename from app/apps/import/tests.py rename to app/apps/import_app/tests.py diff --git a/app/apps/import_app/urls.py b/app/apps/import_app/urls.py new file mode 100644 index 0000000..aea8670 --- /dev/null +++ b/app/apps/import_app/urls.py @@ -0,0 +1,6 @@ +from django.urls import path +import apps.import_app.views as views + +urlpatterns = [ + path("import/", views.ImportRunCreateView.as_view(), name="import"), +] diff --git a/app/apps/import_app/views.py b/app/apps/import_app/views.py new file mode 100644 index 0000000..d5b1d94 --- /dev/null +++ b/app/apps/import_app/views.py @@ -0,0 +1,26 @@ +from django.views.generic import CreateView +from apps.import_app.models import ImportRun +from apps.import_app.services import ImportServiceV1 + + +class ImportRunCreateView(CreateView): + model = ImportRun + fields = ["profile"] + + def form_valid(self, form): + response = super().form_valid(form) + + import_run = form.instance + file = self.request.FILES["file"] + + # Save uploaded file temporarily + temp_file_path = f"/tmp/import_{import_run.id}.csv" + with open(temp_file_path, "wb+") as destination: + for chunk in file.chunks(): + destination.write(chunk) + + # Process the import + import_service = ImportServiceV1(import_run) + import_service.process_file(temp_file_path) + + return response diff --git a/app/apps/transactions/models.py b/app/apps/transactions/models.py index 70bbc94..f131518 100644 --- a/app/apps/transactions/models.py +++ b/app/apps/transactions/models.py @@ -141,6 +141,7 @@ class Type(models.TextChoices): related_name="transactions", verbose_name=_("Recurring Transaction"), ) + internal_note = models.TextField(blank=True, verbose_name=_("Internal Note")) class Meta: verbose_name = _("Transaction") diff --git a/requirements.txt b/requirements.txt index b4e4f02..af9d39b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,3 +24,5 @@ requests~=2.32.3 pytz~=2024.2 python-dateutil~=2.9.0.post0 simpleeval~=1.0.0 +pydantic~=2.10.5 +PyYAML~=6.0.2 From 50efc51f878971010654b46a768163b918c2e881 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 11:27:14 -0300 Subject: [PATCH 22/45] feat(import): improve schema definition --- app/apps/import_app/schemas/__init__.py | 9 +- app/apps/import_app/schemas/v1.py | 330 ++++++++++++++- app/apps/import_app/services/v1.py | 512 +++++++++++++++++++----- 3 files changed, 720 insertions(+), 131 deletions(-) diff --git a/app/apps/import_app/schemas/__init__.py b/app/apps/import_app/schemas/__init__.py index f68ce79..530268d 100644 --- a/app/apps/import_app/schemas/__init__.py +++ b/app/apps/import_app/schemas/__init__.py @@ -1,8 +1 @@ -from apps.import_app.schemas.v1 import ( - ImportProfileSchema as SchemaV1, - ColumnMapping as ColumnMappingV1, - # TransformationRule as TransformationRuleV1, - ImportSettings as SettingsV1, - HashTransformationRule as HashTransformationRuleV1, - CompareDeduplicationRule as CompareDeduplicationRuleV1, -) +import apps.import_app.schemas.v1 as version_1 diff --git a/app/apps/import_app/schemas/v1.py b/app/apps/import_app/schemas/v1.py index 1cc7dc5..043f2a9 100644 --- a/app/apps/import_app/schemas/v1.py +++ b/app/apps/import_app/schemas/v1.py @@ -1,5 +1,5 @@ from typing import Dict, List, Optional, Literal -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, model_validator, field_validator class CompareDeduplicationRule(BaseModel): @@ -9,6 +9,12 @@ class CompareDeduplicationRule(BaseModel): ) match_type: Literal["lax", "strict"] + @field_validator("fields", mode="before") + def coerce_fields_to_dict(cls, v): + if isinstance(v, list): + return {k: v for d in v for k, v in d.items()} + return v + class ReplaceTransformationRule(BaseModel): field: str @@ -17,6 +23,10 @@ class ReplaceTransformationRule(BaseModel): ) pattern: str = Field(..., description="Pattern to match") replacement: str = Field(..., description="Value to replace with") + exclusive: bool = Field( + default=False, + description="If it should match against the last transformation or the original value", + ) class DateFormatTransformationRule(BaseModel): @@ -48,7 +58,7 @@ class SplitTransformationRule(BaseModel): ) -class ImportSettings(BaseModel): +class CSVImportSettings(BaseModel): skip_errors: bool = Field( default=False, description="If True, errors during import will be logged and skipped", @@ -56,7 +66,7 @@ class ImportSettings(BaseModel): file_type: Literal["csv"] = "csv" delimiter: str = Field(default=",", description="CSV delimiter character") encoding: str = Field(default="utf-8", description="File encoding") - skip_rows: int = Field( + skip_lines: int = Field( default=0, description="Number of rows to skip at the beginning of the file" ) importing: Literal[ @@ -69,20 +79,7 @@ class ColumnMapping(BaseModel): default=None, description="CSV column header. If None, the field will be generated from transformations", ) - target: Literal[ - "account", - "type", - "is_paid", - "date", - "reference_date", - "amount", - "notes", - "category", - "tags", - "entities", - "internal_note", - ] = Field(..., description="Transaction field to map to") - default_value: Optional[str] = None + default: Optional[str] = None required: bool = False transformations: Optional[ List[ @@ -95,10 +92,305 @@ class ColumnMapping(BaseModel): ] = Field(default_factory=list) +class TransactionAccountMapping(ColumnMapping): + target: Literal["account"] = Field(..., description="Transaction field to map to") + type: Literal["id", "name"] = "name" + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class TransactionTypeMapping(ColumnMapping): + target: Literal["type"] = Field(..., description="Transaction field to map to") + detection_method: Literal["sign", "always_income", "always_expense"] = "sign" + coerce_to: Literal["transaction_type"] = Field("transaction_type", frozen=True) + + +class TransactionIsPaidMapping(ColumnMapping): + target: Literal["is_paid"] = Field(..., description="Transaction field to map to") + detection_method: Literal["sign", "boolean", "always_paid", "always_unpaid"] + coerce_to: Literal["is_paid"] = Field("is_paid", frozen=True) + + +class TransactionDateMapping(ColumnMapping): + target: Literal["date"] = Field(..., description="Transaction field to map to") + format: List[str] | str + coerce_to: Literal["date"] = Field("date", frozen=True) + + +class TransactionReferenceDateMapping(ColumnMapping): + target: Literal["reference_date"] = Field( + ..., description="Transaction field to map to" + ) + format: List[str] | str + coerce_to: Literal["date"] = Field("date", frozen=True) + + +class TransactionAmountMapping(ColumnMapping): + target: Literal["amount"] = Field(..., description="Transaction field to map to") + coerce_to: Literal["positive_decimal"] = Field("positive_decimal", frozen=True) + + +class TransactionDescriptionMapping(ColumnMapping): + target: Literal["description"] = Field( + ..., description="Transaction field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class TransactionNotesMapping(ColumnMapping): + target: Literal["notes"] = Field(..., description="Transaction field to map to") + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class TransactionTagsMapping(ColumnMapping): + target: Literal["tags"] = Field(..., description="Transaction field to map to") + create: bool = Field( + default=True, description="Create new tags if they doesn't exist" + ) + coerce_to: Literal["list"] = Field("list", frozen=True) + + +class TransactionEntitiesMapping(ColumnMapping): + target: Literal["entities"] = Field(..., description="Transaction field to map to") + create: bool = Field( + default=True, description="Create new entities if they doesn't exist" + ) + coerce_to: Literal["list"] = Field("list", frozen=True) + + +class TransactionCategoryMapping(ColumnMapping): + target: Literal["category"] = Field(..., description="Transaction field to map to") + create: bool = Field( + default=True, description="Create category if it doesn't exist" + ) + type: Literal["id", "name"] = "name" + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class TransactionInternalMapping(ColumnMapping): + target: Literal["internal_note"] = Field( + ..., description="Transaction field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CategoryNameMapping(ColumnMapping): + target: Literal["category_name"] = Field( + ..., description="Category field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CategoryMuteMapping(ColumnMapping): + target: Literal["category_mute"] = Field( + ..., description="Category field to map to" + ) + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class CategoryActiveMapping(ColumnMapping): + target: Literal["category_active"] = Field( + ..., description="Category field to map to" + ) + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class TagNameMapping(ColumnMapping): + target: Literal["tag_name"] = Field(..., description="Tag field to map to") + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class TagActiveMapping(ColumnMapping): + target: Literal["tag_active"] = Field(..., description="Tag field to map to") + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class EntityNameMapping(ColumnMapping): + target: Literal["entity_name"] = Field(..., description="Entity field to map to") + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class EntityActiveMapping(ColumnMapping): + target: Literal["entitiy_active"] = Field(..., description="Entity field to map to") + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class AccountNameMapping(ColumnMapping): + target: Literal["account_name"] = Field(..., description="Account field to map to") + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class AccountGroupMapping(ColumnMapping): + target: Literal["account_group"] = Field(..., description="Account field to map to") + type: Literal["id", "name"] + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class AccountCurrencyMapping(ColumnMapping): + target: Literal["account_currency"] = Field( + ..., description="Account field to map to" + ) + type: Literal["id", "name", "code"] + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class AccountExchangeCurrencyMapping(ColumnMapping): + target: Literal["account_exchange_currency"] = Field( + ..., description="Account field to map to" + ) + type: Literal["id", "name", "code"] + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + +class AccountIsAssetMapping(ColumnMapping): + target: Literal["account_is_asset"] = Field( + ..., description="Account field to map to" + ) + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class AccountIsArchivedMapping(ColumnMapping): + target: Literal["account_is_archived"] = Field( + ..., description="Account field to map to" + ) + coerce_to: Literal["bool"] = Field("bool", frozen=True) + + +class CurrencyCodeMapping(ColumnMapping): + target: Literal["currency_code"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CurrencyNameMapping(ColumnMapping): + target: Literal["currency_name"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CurrencyDecimalPlacesMapping(ColumnMapping): + target: Literal["currency_decimal_places"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["int"] = Field("int", frozen=True) + + +class CurrencyPrefixMapping(ColumnMapping): + target: Literal["currency_prefix"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CurrencySuffixMapping(ColumnMapping): + target: Literal["currency_suffix"] = Field( + ..., description="Currency field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + +class CurrencyExchangeMapping(ColumnMapping): + target: Literal["currency_exchange"] = Field( + ..., description="Currency field to map to" + ) + type: Literal["id", "name", "code"] + coerce_to: Literal["str|int"] = Field("str|int", frozen=True) + + class ImportProfileSchema(BaseModel): - settings: ImportSettings - column_mapping: Dict[str, ColumnMapping] + settings: CSVImportSettings + mapping: Dict[ + str, + TransactionAccountMapping + | TransactionTypeMapping + | TransactionIsPaidMapping + | TransactionDateMapping + | TransactionReferenceDateMapping + | TransactionAmountMapping + | TransactionDescriptionMapping + | TransactionNotesMapping + | TransactionTagsMapping + | TransactionEntitiesMapping + | TransactionCategoryMapping + | TransactionInternalMapping + | CategoryNameMapping + | CategoryMuteMapping + | CategoryActiveMapping + | TagNameMapping + | TagActiveMapping + | EntityNameMapping + | EntityActiveMapping + | AccountNameMapping + | AccountGroupMapping + | AccountCurrencyMapping + | AccountExchangeCurrencyMapping + | AccountIsAssetMapping + | AccountIsArchivedMapping + | CurrencyCodeMapping + | CurrencyNameMapping + | CurrencyDecimalPlacesMapping + | CurrencyPrefixMapping + | CurrencySuffixMapping + | CurrencyExchangeMapping, + ] deduplication: List[CompareDeduplicationRule] = Field( default_factory=list, description="Rules for deduplicating records during import", ) + + @model_validator(mode="after") + def validate_mappings(self) -> "ImportProfileSchema": + import_type = self.settings.importing + + # Define allowed mapping types for each import type + allowed_mappings = { + "transactions": ( + TransactionAccountMapping, + TransactionTypeMapping, + TransactionIsPaidMapping, + TransactionDateMapping, + TransactionReferenceDateMapping, + TransactionAmountMapping, + TransactionDescriptionMapping, + TransactionNotesMapping, + TransactionTagsMapping, + TransactionEntitiesMapping, + TransactionCategoryMapping, + TransactionInternalMapping, + ), + "accounts": ( + AccountNameMapping, + AccountGroupMapping, + AccountCurrencyMapping, + AccountExchangeCurrencyMapping, + AccountIsAssetMapping, + AccountIsArchivedMapping, + ), + "currencies": ( + CurrencyCodeMapping, + CurrencyNameMapping, + CurrencyDecimalPlacesMapping, + CurrencyPrefixMapping, + CurrencySuffixMapping, + CurrencyExchangeMapping, + ), + "categories": ( + CategoryNameMapping, + CategoryMuteMapping, + CategoryActiveMapping, + ), + "tags": (TagNameMapping, TagActiveMapping), + "entities": (EntityNameMapping, EntityActiveMapping), + } + + allowed_types = allowed_mappings[import_type] + + for field_name, mapping in self.mapping.items(): + if not isinstance(mapping, allowed_types): + raise ValueError( + f"Mapping type '{type(mapping).__name__}' is not allowed when importing {import_type}. " + f"Allowed types are: {', '.join(t.__name__ for t in allowed_types)}" + ) + + return self diff --git a/app/apps/import_app/services/v1.py b/app/apps/import_app/services/v1.py index 333eb6e..069115b 100644 --- a/app/apps/import_app/services/v1.py +++ b/app/apps/import_app/services/v1.py @@ -1,42 +1,56 @@ import csv import hashlib +import logging +import os import re from datetime import datetime -from typing import Dict, Any, Literal +from decimal import Decimal +from typing import Dict, Any, Literal, Union import yaml - from django.db import transaction -from django.core.files.storage import default_storage from django.utils import timezone +from apps.accounts.models import Account, AccountGroup +from apps.currencies.models import Currency from apps.import_app.models import ImportRun, ImportProfile -from apps.import_app.schemas import ( - SchemaV1, - ColumnMappingV1, - SettingsV1, - HashTransformationRuleV1, - CompareDeduplicationRuleV1, +from apps.import_app.schemas import version_1 +from apps.transactions.models import ( + Transaction, + TransactionCategory, + TransactionTag, + TransactionEntity, ) -from apps.transactions.models import Transaction + +logger = logging.getLogger(__name__) class ImportService: + TEMP_DIR = "/usr/src/app/temp" + def __init__(self, import_run: ImportRun): self.import_run: ImportRun = import_run self.profile: ImportProfile = import_run.profile - self.config: SchemaV1 = self._load_config() - self.settings: SettingsV1 = self.config.settings - self.deduplication: list[CompareDeduplicationRuleV1] = self.config.deduplication - self.mapping: Dict[str, ColumnMappingV1] = self.config.column_mapping + self.config: version_1.ImportProfileSchema = self._load_config() + self.settings: version_1.CSVImportSettings = self.config.settings + self.deduplication: list[version_1.CompareDeduplicationRule] = ( + self.config.deduplication + ) + self.mapping: Dict[str, version_1.ColumnMapping] = self.config.mapping - def _load_config(self) -> SchemaV1: - yaml_data = yaml.safe_load(self.profile.yaml_config) - - if self.profile.version == ImportProfile.Versions.VERSION_1: - return SchemaV1(**yaml_data) + # Ensure temp directory exists + os.makedirs(self.TEMP_DIR, exist_ok=True) - raise ValueError(f"Unsupported version: {self.profile.version}") + def _load_config(self) -> version_1.ImportProfileSchema: + yaml_data = yaml.safe_load(self.profile.yaml_config) + try: + config = version_1.ImportProfileSchema(**yaml_data) + except Exception as e: + self._log("error", f"Fatal error processing YAML config: {str(e)}") + self._update_status("FAILED") + raise e + else: + return config def _log(self, level: str, message: str, **kwargs) -> None: """Add a log entry to the import run logs""" @@ -53,6 +67,48 @@ def _log(self, level: str, message: str, **kwargs) -> None: self.import_run.logs += log_line self.import_run.save(update_fields=["logs"]) + def _update_totals( + self, + field: Literal["total", "processed", "successful", "skipped", "failed"], + value: int, + ) -> None: + if field == "total": + self.import_run.total_rows = value + self.import_run.save(update_fields=["total_rows"]) + elif field == "processed": + self.import_run.processed_rows = value + self.import_run.save(update_fields=["processed_rows"]) + elif field == "successful": + self.import_run.successful_rows = value + self.import_run.save(update_fields=["successful_rows"]) + elif field == "skipped": + self.import_run.skipped_rows = value + self.import_run.save(update_fields=["skipped_rows"]) + elif field == "failed": + self.import_run.failed_rows = value + self.import_run.save(update_fields=["failed_rows"]) + + def _increment_totals( + self, + field: Literal["total", "processed", "successful", "skipped", "failed"], + value: int, + ) -> None: + if field == "total": + self.import_run.total_rows = self.import_run.total_rows + value + self.import_run.save(update_fields=["total_rows"]) + elif field == "processed": + self.import_run.processed_rows = self.import_run.processed_rows + value + self.import_run.save(update_fields=["processed_rows"]) + elif field == "successful": + self.import_run.successful_rows = self.import_run.successful_rows + value + self.import_run.save(update_fields=["successful_rows"]) + elif field == "skipped": + self.import_run.skipped_rows = self.import_run.skipped_rows + value + self.import_run.save(update_fields=["skipped_rows"]) + elif field == "failed": + self.import_run.failed_rows = self.import_run.failed_rows + value + self.import_run.save(update_fields=["failed_rows"]) + def _update_status( self, new_status: Literal["PROCESSING", "FAILED", "FINISHED"] ) -> None: @@ -67,15 +123,12 @@ def _update_status( @staticmethod def _transform_value( - value: str, mapping: ColumnMappingV1, row: Dict[str, str] = None + value: str, mapping: version_1.ColumnMapping, row: Dict[str, str] = None ) -> Any: transformed = value for transform in mapping.transformations: if transform.type == "hash": - if not isinstance(transform, HashTransformationRuleV1): - continue - # Collect all values to be hashed values_to_hash = [] for field in transform.fields: @@ -88,47 +141,143 @@ def _transform_value( transformed = hashlib.sha256(concatenated.encode()).hexdigest() elif transform.type == "replace": - transformed = transformed.replace( - transform.pattern, transform.replacement - ) + if transform.exclusive: + transformed = value.replace( + transform.pattern, transform.replacement + ) + else: + transformed = transformed.replace( + transform.pattern, transform.replacement + ) elif transform.type == "regex": - transformed = re.sub( - transform.pattern, transform.replacement, transformed - ) + if transform.exclusive: + transformed = re.sub( + transform.pattern, transform.replacement, value + ) + else: + transformed = re.sub( + transform.pattern, transform.replacement, transformed + ) elif transform.type == "date_format": transformed = datetime.strptime( - transformed, transform.pattern - ).strftime(transform.replacement) + transformed, transform.original_format + ).strftime(transform.new_format) + elif transform.type == "merge": + values_to_merge = [] + for field in transform.fields: + if field in row: + values_to_merge.append(str(row[field])) + transformed = transform.separator.join(values_to_merge) + elif transform.type == "split": + parts = transformed.split(transform.separator) + if transform.index is not None: + transformed = parts[transform.index] if parts else "" + else: + transformed = parts return transformed - def _map_row_to_transaction(self, row: Dict[str, str]) -> Dict[str, Any]: - transaction_data = {} - - for field, mapping in self.mapping.items(): - # If source is None, use None as the initial value - value = row.get(mapping.source) if mapping.source else None - - # Use default_value if value is None - if value is None: - value = mapping.default_value - - if mapping.required and value is None and not mapping.transformations: - raise ValueError(f"Required field {field} is missing") - - # Apply transformations even if initial value is None - if mapping.transformations: - value = self._transform_value(value, mapping, row) - - if value is not None: - transaction_data[field] = value - - return transaction_data + def _create_transaction(self, data: Dict[str, Any]) -> Transaction: + tags = [] + entities = [] + # Handle related objects first + if "category" in data: + category_name = data.pop("category") + category, _ = TransactionCategory.objects.get_or_create(name=category_name) + data["category"] = category + self.import_run.categories.add(category) + + if "account" in data: + account_id = data.pop("account") + account = None + if isinstance(account_id, str): + account = Account.objects.get(name=account_id) + elif isinstance(account_id, int): + account = Account.objects.get(id=account_id) + data["account"] = account + # self.import_run.acc.add(category) + + if "tags" in data: + tag_names = data.pop("tags").split(",") + for tag_name in tag_names: + tag, _ = TransactionTag.objects.get_or_create(name=tag_name.strip()) + tags.append(tag) + self.import_run.tags.add(tag) + + if "entities" in data: + entity_names = data.pop("entities").split(",") + for entity_name in entity_names: + entity, _ = TransactionEntity.objects.get_or_create( + name=entity_name.strip() + ) + entities.append(entity) + self.import_run.entities.add(entity) + + if "amount" in data: + amount = data.pop("amount") + data["amount"] = abs(Decimal(amount)) + + # Create the transaction + new_transaction = Transaction.objects.create(**data) + self.import_run.transactions.add(new_transaction) + + # Add many-to-many relationships + if tags: + new_transaction.tags.set(tags) + if entities: + new_transaction.entities.set(entities) + + return new_transaction + + def _create_account(self, data: Dict[str, Any]) -> Account: + if "group" in data: + group_name = data.pop("group") + group, _ = AccountGroup.objects.get_or_create(name=group_name) + data["group"] = group + + # Handle currency references + if "currency" in data: + currency = Currency.objects.get(code=data["currency"]) + data["currency"] = currency + self.import_run.currencies.add(currency) + + if "exchange_currency" in data: + exchange_currency = Currency.objects.get(code=data["exchange_currency"]) + data["exchange_currency"] = exchange_currency + self.import_run.currencies.add(exchange_currency) + + return Account.objects.create(**data) + + def _create_currency(self, data: Dict[str, Any]) -> Currency: + # Handle exchange currency reference + if "exchange_currency" in data: + exchange_currency = Currency.objects.get(code=data["exchange_currency"]) + data["exchange_currency"] = exchange_currency + self.import_run.currencies.add(exchange_currency) + + currency = Currency.objects.create(**data) + self.import_run.currencies.add(currency) + return currency + + def _create_category(self, data: Dict[str, Any]) -> TransactionCategory: + category = TransactionCategory.objects.create(**data) + self.import_run.categories.add(category) + return category + + def _create_tag(self, data: Dict[str, Any]) -> TransactionTag: + tag = TransactionTag.objects.create(**data) + self.import_run.tags.add(tag) + return tag + + def _create_entity(self, data: Dict[str, Any]) -> TransactionEntity: + entity = TransactionEntity.objects.create(**data) + self.import_run.entities.add(entity) + return entity def _check_duplicate_transaction(self, transaction_data: Dict[str, Any]) -> bool: for rule in self.deduplication: if rule.type == "compare": - query = Transaction.objects.all() + query = Transaction.objects.all().values("id") # Build query conditions for each field in the rule for field, header in rule.fields.items(): @@ -146,65 +295,214 @@ def _check_duplicate_transaction(self, transaction_data: Dict[str, Any]) -> bool return False + def _coerce_type( + self, value: str, mapping: version_1.ColumnMapping + ) -> Union[str, int, bool, Decimal, datetime, list]: + if not value: + return None + + coerce_to = mapping.coerce_to + + if "|" in coerce_to: + types = coerce_to.split("|") + for t in types: + try: + return self._coerce_single_type(value, t, mapping) + except ValueError: + continue + raise ValueError( + f"Could not coerce '{value}' to any of the types: {coerce_to}" + ) + else: + return self._coerce_single_type(value, coerce_to, mapping) + + def _coerce_single_type( + self, value: str, coerce_to: str, mapping: version_1.ColumnMapping + ) -> Union[str, int, bool, Decimal, datetime.date, list]: + if coerce_to == "str": + return str(value) + elif coerce_to == "int": + if hasattr(mapping, "type") and mapping.type == "id": + return int(value) + elif hasattr(mapping, "type") and mapping.type in ["name", "code"]: + return str(value) + else: + return int(value) + elif coerce_to == "bool": + return value.lower() in ["true", "1", "yes", "y", "on"] + elif coerce_to == "positive_decimal": + return abs(Decimal(value)) + elif coerce_to == "date": + if isinstance( + mapping, + ( + version_1.TransactionDateMapping, + version_1.TransactionReferenceDateMapping, + ), + ): + formats = ( + mapping.format + if isinstance(mapping.format, list) + else [mapping.format] + ) + for fmt in formats: + try: + return datetime.strptime(value, fmt).date() + except ValueError: + continue + raise ValueError( + f"Could not parse date '{value}' with any of the provided formats" + ) + else: + raise ValueError( + "Date coercion is only supported for TransactionDateMapping and TransactionReferenceDateMapping" + ) + elif coerce_to == "list": + return ( + value + if isinstance(value, list) + else [item.strip() for item in value.split(",") if item.strip()] + ) + elif coerce_to == "transaction_type": + if isinstance(mapping, version_1.TransactionTypeMapping): + if mapping.detection_method == "sign": + return ( + Transaction.Type.EXPENSE + if value.startswith("-") + else Transaction.Type.INCOME + ) + elif mapping.detection_method == "always_income": + return Transaction.Type.INCOME + elif mapping.detection_method == "always_expense": + return Transaction.Type.EXPENSE + raise ValueError("Invalid transaction type detection method") + elif coerce_to == "is_paid": + if isinstance(mapping, version_1.TransactionIsPaidMapping): + if mapping.detection_method == "sign": + return not value.startswith("-") + elif mapping.detection_method == "boolean": + return value.lower() in ["true", "1", "yes", "y", "on"] + elif mapping.detection_method == "always_paid": + return True + elif mapping.detection_method == "always_unpaid": + return False + raise ValueError("Invalid is_paid detection method") + else: + raise ValueError(f"Unsupported coercion type: {coerce_to}") + + def _map_row(self, row: Dict[str, str]) -> Dict[str, Any]: + mapped_data = {} + + for field, mapping in self.mapping.items(): + # If source is None, use None as the initial value + value = row.get(mapping.source) if mapping.source else None + + # Use default_value if value is None + if value is None: + value = mapping.default + + if mapping.required and value is None and not mapping.transformations: + raise ValueError(f"Required field {field} is missing") + + # Apply transformations + if mapping.transformations: + value = self._transform_value(value, mapping, row) + + value = self._coerce_type(value, mapping) + + if value is not None: + # Remove the prefix from the target field + target = mapping.target + if self.settings.importing == "transactions": + mapped_data[target] = value + else: + # Remove the model prefix (e.g., "account_" from "account_name") + field_name = target.split("_", 1)[1] + mapped_data[field_name] = value + + return mapped_data + + def _process_row(self, row: Dict[str, str], row_number: int) -> None: + try: + mapped_data = self._map_row(row) + + if mapped_data: + # Handle different import types + if self.settings.importing == "transactions": + if self.deduplication and self._check_duplicate_transaction( + mapped_data + ): + self._increment_totals("skipped", 1) + self._log("info", f"Skipped duplicate row {row_number}") + return + self._create_transaction(mapped_data) + elif self.settings.importing == "accounts": + self._create_account(mapped_data) + elif self.settings.importing == "currencies": + self._create_currency(mapped_data) + elif self.settings.importing == "categories": + self._create_category(mapped_data) + elif self.settings.importing == "tags": + self._create_tag(mapped_data) + elif self.settings.importing == "entities": + self._create_entity(mapped_data) + + self._increment_totals("successful", value=1) + self._log("info", f"Successfully processed row {row_number}") + + self._increment_totals("processed", value=1) + + except Exception as e: + if not self.settings.skip_errors: + self._log("error", f"Fatal error processing row {row_number}: {str(e)}") + self._update_status("FAILED") + raise + else: + self._log("warning", f"Error processing row {row_number}: {str(e)}") + self._increment_totals("failed", value=1) + + logger.error(f"Fatal error processing row {row_number}", exc_info=e) + def _process_csv(self, file_path): + # First pass: count rows with open(file_path, "r", encoding=self.settings.encoding) as csv_file: - reader = csv.DictReader(csv_file, delimiter=self.settings.delimiter) + # Skip specified number of rows + for _ in range(self.settings.skip_lines): + next(csv_file) - # Count total rows - self.import_run.total_rows = sum(1 for _ in reader) - csv_file.seek(0) reader = csv.DictReader(csv_file, delimiter=self.settings.delimiter) + self._update_totals("total", value=sum(1 for _ in reader)) - self._log("info", f"Starting import with {self.import_run.total_rows} rows") - + with open(file_path, "r", encoding=self.settings.encoding) as csv_file: # Skip specified number of rows - for _ in range(self.settings.skip_rows): - next(reader) + for _ in range(self.settings.skip_lines): + next(csv_file) + if self.settings.skip_lines: + self._log("info", f"Skipped {self.settings.skip_lines} initial lines") - if self.settings.skip_rows: - self._log("info", f"Skipped {self.settings.skip_rows} initial rows") + reader = csv.DictReader(csv_file, delimiter=self.settings.delimiter) - for row_number, row in enumerate(reader, start=1): - try: - transaction_data = self._map_row_to_transaction(row) - - if transaction_data: - if self.deduplication and self._check_duplicate_transaction( - transaction_data - ): - self.import_run.skipped_rows += 1 - self._log("info", f"Skipped duplicate row {row_number}") - continue - - self.import_run.transactions.add(transaction_data) - self.import_run.successful_rows += 1 - self._log("debug", f"Successfully processed row {row_number}") - - self.import_run.processed_rows += 1 - self.import_run.save( - update_fields=[ - "processed_rows", - "successful_rows", - "skipped_rows", - ] - ) + self._log("info", f"Starting import with {self.import_run.total_rows} rows") - except Exception as e: - if not self.settings.skip_errors: - self._log( - "error", - f"Fatal error processing row {row_number}: {str(e)}", - ) - self._update_status("FAILED") - raise - else: - self._log( - "warning", f"Error processing row {row_number}: {str(e)}" - ) - self.import_run.failed_rows += 1 - self.import_run.save(update_fields=["failed_rows"]) + with transaction.atomic(): + for row_number, row in enumerate(reader, start=1): + self._process_row(row, row_number) + self._increment_totals("processed", value=1) + + def _validate_file_path(self, file_path: str) -> str: + """ + Validates that the file path is within the allowed temporary directory. + Returns the absolute path. + """ + abs_path = os.path.abspath(file_path) + if not abs_path.startswith(self.TEMP_DIR): + raise ValueError(f"Invalid file path. File must be in {self.TEMP_DIR}") + return abs_path def process_file(self, file_path: str): + # Validate and get absolute path + file_path = self._validate_file_path(file_path) + self._update_status("PROCESSING") self.import_run.started_at = timezone.now() self.import_run.save(update_fields=["started_at"]) @@ -232,6 +530,12 @@ def process_file(self, file_path: str): finally: self._log("info", "Cleaning up temporary files") - default_storage.delete(file_path) + try: + if os.path.exists(file_path): + os.remove(file_path) + self._log("info", f"Deleted temporary file: {file_path}") + except OSError as e: + self._log("warning", f"Failed to delete temporary file: {str(e)}") + self.import_run.finished_at = timezone.now() self.import_run.save(update_fields=["finished_at"]) From 87345cf235bb1422f8ede4485182332b069fb72a Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 11:45:06 -0300 Subject: [PATCH 23/45] docs(requirements): add django_ace --- app/WYGIWYH/settings.py | 1 + requirements.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/app/WYGIWYH/settings.py b/app/WYGIWYH/settings.py index d597d5d..36cea84 100644 --- a/app/WYGIWYH/settings.py +++ b/app/WYGIWYH/settings.py @@ -70,6 +70,7 @@ "rest_framework", "drf_spectacular", "django_cotton", + "django_ace", "apps.rules.apps.RulesConfig", "apps.calendar_view.apps.CalendarViewConfig", "apps.dca.apps.DcaConfig", diff --git a/requirements.txt b/requirements.txt index af9d39b..8c24038 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,6 +9,7 @@ django-filter==24.3 django-debug-toolbar==4.3.0 django-cachalot~=2.6.3 django-cotton~=1.2.1 +django_ace~=1.36.2 djangorestframework~=3.15.2 drf-spectacular~=0.27.2 From 3440d4405e13250b319c21074f76441270e5b4dd Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 11:47:33 -0300 Subject: [PATCH 24/45] docker: add temp volume --- docker-compose.dev.yml | 3 ++- docker-compose.prod.yml | 7 +++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index c06c0fd..133d522 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -1,6 +1,6 @@ volumes: wygiwyh_dev_postgres_data: {} - temp: + wygiwyh_temp: services: web: &django @@ -13,6 +13,7 @@ services: volumes: - ./app/:/usr/src/app/:z - ./frontend/:/usr/src/frontend:z + - wygiwyh_temp:/usr/src/app/temp/ ports: - "${OUTBOUND_PORT}:8000" env_file: diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml index a12b4ed..b840e46 100644 --- a/docker-compose.prod.yml +++ b/docker-compose.prod.yml @@ -9,6 +9,8 @@ services: - .env depends_on: - db + volumes: + - wygiwyh_temp:/usr/src/app/temp/ restart: unless-stopped db: @@ -29,5 +31,10 @@ services: - db env_file: - .env + volumes: + - wygiwyh_temp:/usr/src/app/temp/ command: /start-procrastinate restart: unless-stopped + +volumes: + wygiwyh_temp: From a52f682c4fdcc50b3c7a1fbccafd0043cf8370c3 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 13:55:17 -0300 Subject: [PATCH 25/45] feat(transactions): soft delete --- app/apps/transactions/admin.py | 18 +++++ .../0028_transaction_internal_note.py | 18 +++++ .../0029_alter_transaction_options.py | 17 ++++ ...nsaction_deleted_transaction_deleted_at.py | 23 ++++++ .../0031_alter_transaction_deleted.py | 18 +++++ ...ction_created_at_transaction_updated_at.py | 25 ++++++ app/apps/transactions/models.py | 77 +++++++++++++++++++ 7 files changed, 196 insertions(+) create mode 100644 app/apps/transactions/migrations/0028_transaction_internal_note.py create mode 100644 app/apps/transactions/migrations/0029_alter_transaction_options.py create mode 100644 app/apps/transactions/migrations/0030_transaction_deleted_transaction_deleted_at.py create mode 100644 app/apps/transactions/migrations/0031_alter_transaction_deleted.py create mode 100644 app/apps/transactions/migrations/0032_transaction_created_at_transaction_updated_at.py diff --git a/app/apps/transactions/admin.py b/app/apps/transactions/admin.py index 5a4ef15..df4d1c8 100644 --- a/app/apps/transactions/admin.py +++ b/app/apps/transactions/admin.py @@ -12,7 +12,14 @@ @admin.register(Transaction) class TransactionModelAdmin(admin.ModelAdmin): + def get_queryset(self, request): + # Use the all_objects manager to show all transactions, including deleted ones + return self.model.all_objects.all() + + list_filter = ["deleted", "type", "is_paid", "date", "account"] + list_display = [ + "deleted", "description", "type", "account__name", @@ -22,6 +29,17 @@ class TransactionModelAdmin(admin.ModelAdmin): "reference_date", ] + actions = ["hard_delete_selected"] + + def hard_delete_selected(self, request, queryset): + for obj in queryset: + obj.hard_delete() + self.message_user( + request, f"Successfully hard deleted {queryset.count()} transactions." + ) + + hard_delete_selected.short_description = "Hard delete selected transactions" + class TransactionInline(admin.TabularInline): model = Transaction diff --git a/app/apps/transactions/migrations/0028_transaction_internal_note.py b/app/apps/transactions/migrations/0028_transaction_internal_note.py new file mode 100644 index 0000000..c88c11d --- /dev/null +++ b/app/apps/transactions/migrations/0028_transaction_internal_note.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.5 on 2025-01-19 00:44 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0027_alter_transaction_description'), + ] + + operations = [ + migrations.AddField( + model_name='transaction', + name='internal_note', + field=models.TextField(blank=True, verbose_name='Internal Note'), + ), + ] diff --git a/app/apps/transactions/migrations/0029_alter_transaction_options.py b/app/apps/transactions/migrations/0029_alter_transaction_options.py new file mode 100644 index 0000000..c06b7cd --- /dev/null +++ b/app/apps/transactions/migrations/0029_alter_transaction_options.py @@ -0,0 +1,17 @@ +# Generated by Django 5.1.5 on 2025-01-19 14:59 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0028_transaction_internal_note'), + ] + + operations = [ + migrations.AlterModelOptions( + name='transaction', + options={'default_manager_name': 'objects', 'verbose_name': 'Transaction', 'verbose_name_plural': 'Transactions'}, + ), + ] diff --git a/app/apps/transactions/migrations/0030_transaction_deleted_transaction_deleted_at.py b/app/apps/transactions/migrations/0030_transaction_deleted_transaction_deleted_at.py new file mode 100644 index 0000000..35f4c91 --- /dev/null +++ b/app/apps/transactions/migrations/0030_transaction_deleted_transaction_deleted_at.py @@ -0,0 +1,23 @@ +# Generated by Django 5.1.5 on 2025-01-19 14:59 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0029_alter_transaction_options'), + ] + + operations = [ + migrations.AddField( + model_name='transaction', + name='deleted', + field=models.BooleanField(default=False, verbose_name='Deleted'), + ), + migrations.AddField( + model_name='transaction', + name='deleted_at', + field=models.DateTimeField(blank=True, null=True, verbose_name='Deleted At'), + ), + ] diff --git a/app/apps/transactions/migrations/0031_alter_transaction_deleted.py b/app/apps/transactions/migrations/0031_alter_transaction_deleted.py new file mode 100644 index 0000000..b5d2dc4 --- /dev/null +++ b/app/apps/transactions/migrations/0031_alter_transaction_deleted.py @@ -0,0 +1,18 @@ +# Generated by Django 5.1.5 on 2025-01-19 15:14 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0030_transaction_deleted_transaction_deleted_at'), + ] + + operations = [ + migrations.AlterField( + model_name='transaction', + name='deleted', + field=models.BooleanField(db_index=True, default=False, verbose_name='Deleted'), + ), + ] diff --git a/app/apps/transactions/migrations/0032_transaction_created_at_transaction_updated_at.py b/app/apps/transactions/migrations/0032_transaction_created_at_transaction_updated_at.py new file mode 100644 index 0000000..46e76ae --- /dev/null +++ b/app/apps/transactions/migrations/0032_transaction_created_at_transaction_updated_at.py @@ -0,0 +1,25 @@ +# Generated by Django 5.1.5 on 2025-01-19 16:48 + +import django.utils.timezone +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('transactions', '0031_alter_transaction_deleted'), + ] + + operations = [ + migrations.AddField( + model_name='transaction', + name='created_at', + field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now), + preserve_default=False, + ), + migrations.AddField( + model_name='transaction', + name='updated_at', + field=models.DateTimeField(auto_now=True), + ), + ] diff --git a/app/apps/transactions/models.py b/app/apps/transactions/models.py index f131518..2bd2a68 100644 --- a/app/apps/transactions/models.py +++ b/app/apps/transactions/models.py @@ -6,6 +6,7 @@ from django.db.models import Q from django.utils import timezone from django.utils.translation import gettext_lazy as _ +from django.conf import settings from apps.common.fields.month_year import MonthYearModelField from apps.common.functions.decimals import truncate_decimal @@ -15,6 +16,53 @@ logger = logging.getLogger() +class SoftDeleteQuerySet(models.QuerySet): + def delete(self): + if not settings.ENABLE_SOFT_DELETION: + # If soft deletion is disabled, perform a normal delete + return super().delete() + + # Separate the queryset into already deleted and not deleted objects + already_deleted = self.filter(deleted=True) + not_deleted = self.filter(deleted=False) + + # Use a transaction to ensure atomicity + with transaction.atomic(): + # Perform hard delete on already deleted objects + hard_deleted_count = already_deleted._raw_delete(already_deleted.db) + + # Perform soft delete on not deleted objects + soft_deleted_count = not_deleted.update( + deleted=True, deleted_at=timezone.now() + ) + + # Return a tuple of counts as expected by Django's delete method + return ( + hard_deleted_count + soft_deleted_count, + {"Transaction": hard_deleted_count + soft_deleted_count}, + ) + + def hard_delete(self): + return super().delete() + + +class SoftDeleteManager(models.Manager): + def get_queryset(self): + qs = SoftDeleteQuerySet(self.model, using=self._db) + return qs if not settings.ENABLE_SOFT_DELETION else qs.filter(deleted=False) + + +class AllObjectsManager(models.Manager): + def get_queryset(self): + return SoftDeleteQuerySet(self.model, using=self._db) + + +class DeletedObjectsManager(models.Manager): + def get_queryset(self): + qs = SoftDeleteQuerySet(self.model, using=self._db) + return qs if not settings.ENABLE_SOFT_DELETION else qs.filter(deleted=True) + + class TransactionCategory(models.Model): name = models.CharField(max_length=255, verbose_name=_("Name"), unique=True) mute = models.BooleanField(default=False, verbose_name=_("Mute")) @@ -143,10 +191,24 @@ class Type(models.TextChoices): ) internal_note = models.TextField(blank=True, verbose_name=_("Internal Note")) + deleted = models.BooleanField( + default=False, verbose_name=_("Deleted"), db_index=True + ) + created_at = models.DateTimeField(auto_now_add=True) + updated_at = models.DateTimeField(auto_now=True) + deleted_at = models.DateTimeField( + null=True, blank=True, verbose_name=_("Deleted At") + ) + + objects = SoftDeleteManager.from_queryset(SoftDeleteQuerySet)() + all_objects = AllObjectsManager.from_queryset(SoftDeleteQuerySet)() + deleted_objects = DeletedObjectsManager.from_queryset(SoftDeleteQuerySet)() + class Meta: verbose_name = _("Transaction") verbose_name_plural = _("Transactions") db_table = "transactions" + default_manager_name = "objects" def save(self, *args, **kwargs): self.amount = truncate_decimal( @@ -161,6 +223,17 @@ def save(self, *args, **kwargs): self.full_clean() super().save(*args, **kwargs) + def delete(self, *args, **kwargs): + if settings.ENABLE_SOFT_DELETION: + self.deleted = True + self.deleted_at = timezone.now() + self.save() + else: + super().delete(*args, **kwargs) + + def hard_delete(self, *args, **kwargs): + super().delete(*args, **kwargs) + def exchanged_amount(self): if self.account.exchange_currency: converted_amount, prefix, suffix, decimal_places = convert( @@ -179,6 +252,10 @@ def exchanged_amount(self): return None + def __str__(self): + type_display = self.get_type_display() + return f"{self.description} - {type_display} - {self.account} - {self.date}" + class InstallmentPlan(models.Model): class Recurrence(models.TextChoices): From 8a127a9f4ff545591138126a80c1ebba0cc0c331 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 13:55:25 -0300 Subject: [PATCH 26/45] feat(transactions): soft delete --- app/WYGIWYH/settings.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app/WYGIWYH/settings.py b/app/WYGIWYH/settings.py index 36cea84..f663074 100644 --- a/app/WYGIWYH/settings.py +++ b/app/WYGIWYH/settings.py @@ -337,3 +337,5 @@ } CACHALOT_UNCACHABLE_TABLES = ("django_migrations", "procrastinate_jobs") + +ENABLE_SOFT_DELETION = os.environ.get("ENABLE_SOFT_DELETION", "False").lower() == "true" From 2ff33526aeb09cb2faebdfcaf489c995bab0c738 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 13:56:13 -0300 Subject: [PATCH 27/45] feat(import): disable cache when running --- app/apps/import_app/services.py | 0 app/apps/import_app/services/v1.py | 84 ++++++++++++++++-------------- 2 files changed, 44 insertions(+), 40 deletions(-) delete mode 100644 app/apps/import_app/services.py diff --git a/app/apps/import_app/services.py b/app/apps/import_app/services.py deleted file mode 100644 index e69de29..0000000 diff --git a/app/apps/import_app/services/v1.py b/app/apps/import_app/services/v1.py index 069115b..7735342 100644 --- a/app/apps/import_app/services/v1.py +++ b/app/apps/import_app/services/v1.py @@ -7,8 +7,9 @@ from decimal import Decimal from typing import Dict, Any, Literal, Union +import cachalot.api import yaml -from django.db import transaction +from cachalot.api import cachalot_disabled from django.utils import timezone from apps.accounts.models import Account, AccountGroup @@ -277,7 +278,7 @@ def _create_entity(self, data: Dict[str, Any]) -> TransactionEntity: def _check_duplicate_transaction(self, transaction_data: Dict[str, Any]) -> bool: for rule in self.deduplication: if rule.type == "compare": - query = Transaction.objects.all().values("id") + query = Transaction.all_objects.all().values("id") # Build query conditions for each field in the rule for field, header in rule.fields.items(): @@ -484,10 +485,9 @@ def _process_csv(self, file_path): self._log("info", f"Starting import with {self.import_run.total_rows} rows") - with transaction.atomic(): - for row_number, row in enumerate(reader, start=1): - self._process_row(row, row_number) - self._increment_totals("processed", value=1) + for row_number, row in enumerate(reader, start=1): + self._process_row(row, row_number) + self._increment_totals("processed", value=1) def _validate_file_path(self, file_path: str) -> str: """ @@ -500,42 +500,46 @@ def _validate_file_path(self, file_path: str) -> str: return abs_path def process_file(self, file_path: str): - # Validate and get absolute path - file_path = self._validate_file_path(file_path) + with cachalot_disabled(): + # Validate and get absolute path + file_path = self._validate_file_path(file_path) - self._update_status("PROCESSING") - self.import_run.started_at = timezone.now() - self.import_run.save(update_fields=["started_at"]) + self._update_status("PROCESSING") + self.import_run.started_at = timezone.now() + self.import_run.save(update_fields=["started_at"]) - self._log("info", "Starting import process") + self._log("info", "Starting import process") - try: - if self.settings.file_type == "csv": - self._process_csv(file_path) - - if self.import_run.processed_rows == self.import_run.total_rows: - self._update_status("FINISHED") - self._log( - "info", - f"Import completed successfully. " - f"Successful: {self.import_run.successful_rows}, " - f"Failed: {self.import_run.failed_rows}, " - f"Skipped: {self.import_run.skipped_rows}", - ) + try: + if self.settings.file_type == "csv": + self._process_csv(file_path) + + if self.import_run.processed_rows == self.import_run.total_rows: + self._update_status("FINISHED") + self._log( + "info", + f"Import completed successfully. " + f"Successful: {self.import_run.successful_rows}, " + f"Failed: {self.import_run.failed_rows}, " + f"Skipped: {self.import_run.skipped_rows}", + ) - except Exception as e: - self._update_status("FAILED") - self._log("error", f"Import failed: {str(e)}") - raise Exception("Import failed") + except Exception as e: + self._update_status("FAILED") + self._log("error", f"Import failed: {str(e)}") + raise Exception("Import failed") - finally: - self._log("info", "Cleaning up temporary files") - try: - if os.path.exists(file_path): - os.remove(file_path) - self._log("info", f"Deleted temporary file: {file_path}") - except OSError as e: - self._log("warning", f"Failed to delete temporary file: {str(e)}") - - self.import_run.finished_at = timezone.now() - self.import_run.save(update_fields=["finished_at"]) + finally: + self._log("info", "Cleaning up temporary files") + try: + if os.path.exists(file_path): + os.remove(file_path) + self._log("info", f"Deleted temporary file: {file_path}") + except OSError as e: + self._log("warning", f"Failed to delete temporary file: {str(e)}") + + self.import_run.finished_at = timezone.now() + self.import_run.save(update_fields=["finished_at"]) + + if self.import_run.successful_rows >= 1: + cachalot.api.invalidate() From 18d8e8ed1aad13ba8b33045f073dcf9f6b7d5868 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 13:56:29 -0300 Subject: [PATCH 28/45] feat(import): add migrations --- .../import_app/migrations/0001_initial.py | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 app/apps/import_app/migrations/0001_initial.py diff --git a/app/apps/import_app/migrations/0001_initial.py b/app/apps/import_app/migrations/0001_initial.py new file mode 100644 index 0000000..bcce0fe --- /dev/null +++ b/app/apps/import_app/migrations/0001_initial.py @@ -0,0 +1,51 @@ +# Generated by Django 5.1.5 on 2025-01-19 00:44 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('currencies', '0006_currency_exchange_currency'), + ('transactions', '0028_transaction_internal_note'), + ] + + operations = [ + migrations.CreateModel( + name='ImportProfile', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=100)), + ('yaml_config', models.TextField(help_text='YAML configuration')), + ('version', models.IntegerField(choices=[(1, 'Version 1')], default=1, verbose_name='Version')), + ], + options={ + 'ordering': ['name'], + }, + ), + migrations.CreateModel( + name='ImportRun', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('status', models.CharField(choices=[('QUEUED', 'Queued'), ('PROCESSING', 'Processing'), ('FAILED', 'Failed'), ('FINISHED', 'Finished')], default='QUEUED', max_length=10, verbose_name='Status')), + ('file_name', models.CharField(help_text='File name', max_length=10000)), + ('logs', models.TextField(blank=True)), + ('processed_rows', models.IntegerField(default=0)), + ('total_rows', models.IntegerField(default=0)), + ('successful_rows', models.IntegerField(default=0)), + ('skipped_rows', models.IntegerField(default=0)), + ('failed_rows', models.IntegerField(default=0)), + ('started_at', models.DateTimeField(null=True)), + ('finished_at', models.DateTimeField(null=True)), + ('categories', models.ManyToManyField(related_name='import_runs', to='transactions.transactioncategory')), + ('currencies', models.ManyToManyField(related_name='import_runs', to='currencies.currency')), + ('entities', models.ManyToManyField(related_name='import_runs', to='transactions.transactionentity')), + ('profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='import_app.importprofile')), + ('tags', models.ManyToManyField(related_name='import_runs', to='transactions.transactiontag')), + ('transactions', models.ManyToManyField(related_name='import_runs', to='transactions.transaction')), + ], + ), + ] From f2cc0705053165d064a22a2a5a087c04e93ecbbb Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 15:16:47 -0300 Subject: [PATCH 29/45] feat(settings): add KEEP_DELETED_TRANSACTIONS_FOR variable --- app/WYGIWYH/settings.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/WYGIWYH/settings.py b/app/WYGIWYH/settings.py index f663074..960b0ec 100644 --- a/app/WYGIWYH/settings.py +++ b/app/WYGIWYH/settings.py @@ -338,4 +338,5 @@ CACHALOT_UNCACHABLE_TABLES = ("django_migrations", "procrastinate_jobs") -ENABLE_SOFT_DELETION = os.environ.get("ENABLE_SOFT_DELETION", "False").lower() == "true" +ENABLE_SOFT_DELETION = os.getenv("ENABLE_SOFT_DELETION", "True").lower() == "true" +KEEP_DELETED_TRANSACTIONS_FOR = int(os.getenv("KEEP_DELETED_ENTRIES_FOR", "365")) From 34e6914d41f01647174557667da2f60458bce8e6 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 15:17:18 -0300 Subject: [PATCH 30/45] feat(transactions:tasks): add old deleted transactions cleanup task --- app/apps/transactions/tasks.py | 39 ++++++++++++++++++++++++++++++++-- 1 file changed, 37 insertions(+), 2 deletions(-) diff --git a/app/apps/transactions/tasks.py b/app/apps/transactions/tasks.py index e0bfafc..5f1c42f 100644 --- a/app/apps/transactions/tasks.py +++ b/app/apps/transactions/tasks.py @@ -1,9 +1,13 @@ import logging +from datetime import timedelta -from procrastinate.contrib.django import app +from cachalot.api import cachalot_disabled, invalidate +from django.utils import timezone +from django.conf import settings -from apps.transactions.models import RecurringTransaction +from procrastinate.contrib.django import app +from apps.transactions.models import RecurringTransaction, Transaction logger = logging.getLogger(__name__) @@ -19,3 +23,34 @@ def generate_recurring_transactions(timestamp=None): exc_info=True, ) raise e + + +@app.periodic(cron="10 1 * * *") +@app.task +def cleanup_deleted_transactions(): + with cachalot_disabled(): + if ( + settings.ENABLE_SOFT_DELETION + and settings.KEEP_DELETED_TRANSACTIONS_FOR == 0 + ): + return "KEEP_DELETED_TRANSACTIONS_FOR is 0, no cleanup performed." + + if not settings.ENABLE_SOFT_DELETION: + # Hard delete all soft-deleted transactions + deleted_count, _ = Transaction.deleted_objects.all().hard_delete() + return ( + f"Hard deleted {deleted_count} transactions (soft deletion disabled)." + ) + + # Calculate the cutoff date + cutoff_date = timezone.now() - timedelta( + days=settings.KEEP_DELETED_TRANSACTIONS_FOR + ) + + invalidate("transactions.Transaction") + + # Hard delete soft-deleted transactions older than the cutoff date + old_transactions = Transaction.deleted_objects.filter(deleted_at__lt=cutoff_date) + deleted_count, _ = old_transactions.hard_delete() + + return f"Hard deleted {deleted_count} objects older than {settings.KEEP_DELETED_TRANSACTIONS_FOR} days." From 76df16e48999670cf04b36f59eba8e4f10c1a804 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Sun, 19 Jan 2025 15:20:25 -0300 Subject: [PATCH 31/45] feat(import:v1:schema): add option for triggering rules --- app/apps/import_app/schemas/v1.py | 1 + app/apps/import_app/services/v1.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/app/apps/import_app/schemas/v1.py b/app/apps/import_app/schemas/v1.py index 043f2a9..74e37a1 100644 --- a/app/apps/import_app/schemas/v1.py +++ b/app/apps/import_app/schemas/v1.py @@ -69,6 +69,7 @@ class CSVImportSettings(BaseModel): skip_lines: int = Field( default=0, description="Number of rows to skip at the beginning of the file" ) + trigger_transaction_rules: bool = True importing: Literal[ "transactions", "accounts", "currencies", "categories", "tags", "entities" ] diff --git a/app/apps/import_app/services/v1.py b/app/apps/import_app/services/v1.py index 7735342..0416caf 100644 --- a/app/apps/import_app/services/v1.py +++ b/app/apps/import_app/services/v1.py @@ -22,6 +22,7 @@ TransactionTag, TransactionEntity, ) +from apps.rules.signals import transaction_created logger = logging.getLogger(__name__) @@ -228,6 +229,9 @@ def _create_transaction(self, data: Dict[str, Any]) -> Transaction: if entities: new_transaction.entities.set(entities) + if self.settings.trigger_transaction_rules: + transaction_created.send(sender=new_transaction) + return new_transaction def _create_account(self, data: Dict[str, Any]) -> Account: From 61d5aba67ce785c9fafc0081c2dd2034fe43988e Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Mon, 20 Jan 2025 14:30:17 -0300 Subject: [PATCH 32/45] feat(import): some layouts --- app/apps/import_app/forms.py | 58 +++++++++++++++++ .../import_app/fragments/profiles/add.html | 11 ++++ .../import_app/fragments/profiles/edit.html | 11 ++++ .../import_app/fragments/profiles/list.html | 65 +++++++++++++++++++ .../import_app/fragments/runs/add.html | 11 ++++ .../import_app/fragments/runs/list.html | 9 +++ 6 files changed, 165 insertions(+) create mode 100644 app/apps/import_app/forms.py create mode 100644 app/templates/import_app/fragments/profiles/add.html create mode 100644 app/templates/import_app/fragments/profiles/edit.html create mode 100644 app/templates/import_app/fragments/profiles/list.html create mode 100644 app/templates/import_app/fragments/runs/add.html create mode 100644 app/templates/import_app/fragments/runs/list.html diff --git a/app/apps/import_app/forms.py b/app/apps/import_app/forms.py new file mode 100644 index 0000000..78ee3d7 --- /dev/null +++ b/app/apps/import_app/forms.py @@ -0,0 +1,58 @@ +from crispy_forms.bootstrap import FormActions +from crispy_forms.helper import FormHelper +from crispy_forms.layout import ( + Layout, +) +from django import forms +from django.utils.translation import gettext_lazy as _ +from django_ace import AceWidget + +from apps.import_app.models import ImportProfile +from apps.common.widgets.crispy.submit import NoClassSubmit + + +class ImportProfileForm(forms.ModelForm): + class Meta: + model = ImportProfile + fields = [ + "name", + "version", + "yaml_config", + ] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + self.helper = FormHelper() + self.helper.form_tag = False + self.helper.form_method = "post" + self.helper.layout = Layout("name", "version", "yaml_config") + + if self.instance and self.instance.pk: + self.helper.layout.append( + FormActions( + NoClassSubmit( + "submit", _("Update"), css_class="btn btn-outline-primary w-100" + ), + ), + ) + else: + self.helper.layout.append( + FormActions( + NoClassSubmit( + "submit", _("Add"), css_class="btn btn-outline-primary w-100" + ), + ), + ) + + +class ImportRunFileUploadForm(forms.Form): + file = forms.FileField(label=_("Select a file")) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + self.helper = FormHelper() + self.helper.form_tag = False + self.helper.form_method = "post" + self.helper.layout = Layout("file") diff --git a/app/templates/import_app/fragments/profiles/add.html b/app/templates/import_app/fragments/profiles/add.html new file mode 100644 index 0000000..beda873 --- /dev/null +++ b/app/templates/import_app/fragments/profiles/add.html @@ -0,0 +1,11 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Add new import profile' %}{% endblock %} + +{% block body %} +
+ {% crispy form %} +
+{% endblock %} diff --git a/app/templates/import_app/fragments/profiles/edit.html b/app/templates/import_app/fragments/profiles/edit.html new file mode 100644 index 0000000..fa94bef --- /dev/null +++ b/app/templates/import_app/fragments/profiles/edit.html @@ -0,0 +1,11 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Edit import profile' %}{% endblock %} + +{% block body %} +
+ {% crispy form %} +
+{% endblock %} diff --git a/app/templates/import_app/fragments/profiles/list.html b/app/templates/import_app/fragments/profiles/list.html new file mode 100644 index 0000000..f1f34d2 --- /dev/null +++ b/app/templates/import_app/fragments/profiles/list.html @@ -0,0 +1,65 @@ +{% load i18n %} +
+
+ {% spaceless %} +
{% translate 'Import Profiles' %} + + +
+ {% endspaceless %} +
+ +
+
+ {% if profiles %} + + + + + + + + + + + {% for profile in profiles %} + + + + + + {% endfor %} + +
{% translate 'Name' %}{% translate 'Version' %}
+
+ + +{# #} +{#
#} +
{{ profile.name }}{{ profile.get_version_display }}
+ {% else %} + + {% endif %} +
+
+
diff --git a/app/templates/import_app/fragments/runs/add.html b/app/templates/import_app/fragments/runs/add.html new file mode 100644 index 0000000..d5a5b89 --- /dev/null +++ b/app/templates/import_app/fragments/runs/add.html @@ -0,0 +1,11 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Import file' %}{% endblock %} + +{% block body %} +
+ {% crispy form %} +
+{% endblock %} diff --git a/app/templates/import_app/fragments/runs/list.html b/app/templates/import_app/fragments/runs/list.html new file mode 100644 index 0000000..0697d26 --- /dev/null +++ b/app/templates/import_app/fragments/runs/list.html @@ -0,0 +1,9 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Runs for ' %}{{ profile.name }}{% endblock %} + +{% block body %} + +{% endblock %} From 0f14fd0c6275fcdbb91231088b35fa0e7511a471 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Mon, 20 Jan 2025 14:30:40 -0300 Subject: [PATCH 33/45] feat(import): test yaml_config before saving --- app/apps/import_app/models.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/app/apps/import_app/models.py b/app/apps/import_app/models.py index aca04e3..b489c43 100644 --- a/app/apps/import_app/models.py +++ b/app/apps/import_app/models.py @@ -1,13 +1,18 @@ +import yaml + +from django.core.exceptions import ValidationError from django.db import models from django.utils.translation import gettext_lazy as _ +from apps.import_app.schemas import version_1 + class ImportProfile(models.Model): class Versions(models.IntegerChoices): VERSION_1 = 1, _("Version 1") - name = models.CharField(max_length=100) - yaml_config = models.TextField(help_text=_("YAML configuration")) + name = models.CharField(max_length=100, verbose_name=_("Name")) + yaml_config = models.TextField(verbose_name=_("YAML Configuration")) version = models.IntegerField( choices=Versions, default=Versions.VERSION_1, @@ -20,6 +25,14 @@ def __str__(self): class Meta: ordering = ["name"] + def clean(self): + if self.version and self.version == self.Versions.VERSION_1: + try: + yaml_data = yaml.safe_load(self.yaml_config) + version_1.ImportProfileSchema(**yaml_data) + except Exception as e: + raise ValidationError({"yaml_config": _("Invalid YAML Configuration")}) + class ImportRun(models.Model): class Status(models.TextChoices): From 07fcbe1f458bf08b13cd6529037085f45ccff87f Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Mon, 20 Jan 2025 14:30:59 -0300 Subject: [PATCH 34/45] feat(import): some layouts --- app/templates/import_app/pages/profiles_index.html | 8 ++++++++ app/templates/import_app/pages/runs_index.html | 8 ++++++++ 2 files changed, 16 insertions(+) create mode 100644 app/templates/import_app/pages/profiles_index.html create mode 100644 app/templates/import_app/pages/runs_index.html diff --git a/app/templates/import_app/pages/profiles_index.html b/app/templates/import_app/pages/profiles_index.html new file mode 100644 index 0000000..a5c59ee --- /dev/null +++ b/app/templates/import_app/pages/profiles_index.html @@ -0,0 +1,8 @@ +{% extends "layouts/base.html" %} +{% load i18n %} + +{% block title %}{% translate 'Import Profiles' %}{% endblock %} + +{% block content %} +
+{% endblock %} diff --git a/app/templates/import_app/pages/runs_index.html b/app/templates/import_app/pages/runs_index.html new file mode 100644 index 0000000..38a48a6 --- /dev/null +++ b/app/templates/import_app/pages/runs_index.html @@ -0,0 +1,8 @@ +{% extends "layouts/base.html" %} +{% load i18n %} + +{% block title %}{% translate 'Import Runs' %}{% endblock %} + +{% block content %} +
+{% endblock %} From 6f096fd3ffc9b4ce8cea484fa65433fb6eef9a24 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Mon, 20 Jan 2025 14:31:12 -0300 Subject: [PATCH 35/45] feat(import): some views and urls --- app/apps/import_app/tasks.py | 2 +- app/apps/import_app/urls.py | 37 +++++++- app/apps/import_app/views.py | 168 +++++++++++++++++++++++++++++++---- 3 files changed, 186 insertions(+), 21 deletions(-) diff --git a/app/apps/import_app/tasks.py b/app/apps/import_app/tasks.py index 25efcbc..cf6f3a7 100644 --- a/app/apps/import_app/tasks.py +++ b/app/apps/import_app/tasks.py @@ -8,7 +8,7 @@ logger = logging.getLogger(__name__) -@app.task(queue="imports") +@app.task def process_import(import_run_id: int, file_path: str): try: import_run = ImportRun.objects.get(id=import_run_id) diff --git a/app/apps/import_app/urls.py b/app/apps/import_app/urls.py index aea8670..c2608a3 100644 --- a/app/apps/import_app/urls.py +++ b/app/apps/import_app/urls.py @@ -2,5 +2,40 @@ import apps.import_app.views as views urlpatterns = [ - path("import/", views.ImportRunCreateView.as_view(), name="import"), + path("import/", views.import_view, name="import"), + path( + "import/profiles/", + views.import_profile_index, + name="import_profiles_index", + ), + path( + "import/profiles/list/", + views.import_profile_list, + name="import_profiles_list", + ), + path( + "import/profiles/add/", + views.import_profile_add, + name="import_profiles_add", + ), + path( + "import/profiles//edit/", + views.import_profile_edit, + name="import_profile_edit", + ), + path( + "import/profiles//runs/", + views.import_run_add, + name="import_profile_runs_index", + ), + path( + "import/profiles//runs/list/", + views.import_run_add, + name="import_profile_runs_list", + ), + path( + "import/profiles//runs/add/", + views.import_run_add, + name="import_run_add", + ), ] diff --git a/app/apps/import_app/views.py b/app/apps/import_app/views.py index d5b1d94..ce65a2b 100644 --- a/app/apps/import_app/views.py +++ b/app/apps/import_app/views.py @@ -1,26 +1,156 @@ -from django.views.generic import CreateView -from apps.import_app.models import ImportRun -from apps.import_app.services import ImportServiceV1 +import shutil +from django.contrib import messages +from django.contrib.auth.decorators import login_required +from django.core.files.storage import FileSystemStorage +from django.http import HttpResponse +from django.shortcuts import render, get_object_or_404 +from django.views.decorators.http import require_http_methods +from django.utils.translation import gettext_lazy as _ -class ImportRunCreateView(CreateView): - model = ImportRun - fields = ["profile"] +from apps.common.decorators.htmx import only_htmx +from apps.import_app.forms import ImportRunFileUploadForm, ImportProfileForm +from apps.import_app.models import ImportRun, ImportProfile +from apps.import_app.tasks import process_import - def form_valid(self, form): - response = super().form_valid(form) - import_run = form.instance - file = self.request.FILES["file"] +def import_view(request): + import_profile = ImportProfile.objects.get(id=2) + shutil.copyfile( + "/usr/src/app/apps/import_app/teste2.csv", "/usr/src/app/temp/teste2.csv" + ) + ir = ImportRun.objects.create(profile=import_profile, file_name="teste.csv") + process_import.defer( + import_run_id=ir.id, + file_path="/usr/src/app/temp/teste2.csv", + ) + return HttpResponse("Hello, world. You're at the polls page.") - # Save uploaded file temporarily - temp_file_path = f"/tmp/import_{import_run.id}.csv" - with open(temp_file_path, "wb+") as destination: - for chunk in file.chunks(): - destination.write(chunk) - # Process the import - import_service = ImportServiceV1(import_run) - import_service.process_file(temp_file_path) +@login_required +@require_http_methods(["GET", "POST"]) +def import_profile_index(request): + return render( + request, + "import_app/pages/profiles_index.html", + ) - return response + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_profile_list(request): + profiles = ImportProfile.objects.all() + + return render( + request, + "import_app/fragments/profiles/list.html", + {"profiles": profiles}, + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_profile_add(request): + if request.method == "POST": + form = ImportProfileForm(request.POST) + + if form.is_valid(): + form.save() + messages.success(request, _("Import Profile added successfully")) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated, hide_offcanvas", + }, + ) + else: + form = ImportProfileForm() + + return render( + request, + "import_app/fragments/profiles/add.html", + {"form": form}, + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_profile_edit(request, profile_id): + profile = get_object_or_404(ImportProfile, id=profile_id) + + if request.method == "POST": + form = ImportProfileForm(request.POST, instance=profile) + + if form.is_valid(): + form.save() + messages.success(request, _("Import Profile update successfully")) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated, hide_offcanvas", + }, + ) + else: + form = ImportProfileForm(instance=profile) + + return render( + request, + "import_app/fragments/profiles/edit.html", + {"form": form, "profile": profile}, + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_run_list(request, profile_id): + profile = ImportProfile.objects.get(id=profile_id) + + runs = ImportRun.objects.filter(profile=profile).order_by("id") + + return render( + request, + "import_app/fragments/runs/list.html", + {"profile": profile, "runs": runs}, + ) + + +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_run_add(request, profile_id): + profile = ImportProfile.objects.get(id=profile_id) + + if request.method == "POST": + form = ImportRunFileUploadForm(request.POST, request.FILES) + + if form.is_valid(): + uploaded_file = request.FILES["file"] + fs = FileSystemStorage(location="/usr/src/app/temp") + filename = fs.save(uploaded_file.name, uploaded_file) + file_path = fs.path(filename) + + import_run = ImportRun.objects.create(profile=profile, file_name=filename) + + # Defer the procrastinate task + process_import.defer(import_run_id=import_run.id, file_path=file_path) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated, hide_offcanvas", + }, + ) + else: + form = ImportRunFileUploadForm() + + return render( + request, + "import_app/fragments/runs/add.html", + {"form": form, "profile": profile}, + ) From 00b8727664bfe3ee7a27c7167a100fa0618d1058 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Mon, 20 Jan 2025 23:09:49 -0300 Subject: [PATCH 36/45] feat(transactions): add internal_id field to transactions --- app/apps/import_app/schemas/v1.py | 15 +++++++++++--- .../0033_transaction_internal_id.py | 20 +++++++++++++++++++ app/apps/transactions/models.py | 3 +++ 3 files changed, 35 insertions(+), 3 deletions(-) create mode 100644 app/apps/transactions/migrations/0033_transaction_internal_id.py diff --git a/app/apps/import_app/schemas/v1.py b/app/apps/import_app/schemas/v1.py index 74e37a1..22df7c2 100644 --- a/app/apps/import_app/schemas/v1.py +++ b/app/apps/import_app/schemas/v1.py @@ -167,13 +167,20 @@ class TransactionCategoryMapping(ColumnMapping): coerce_to: Literal["str|int"] = Field("str|int", frozen=True) -class TransactionInternalMapping(ColumnMapping): +class TransactionInternalNoteMapping(ColumnMapping): target: Literal["internal_note"] = Field( ..., description="Transaction field to map to" ) coerce_to: Literal["str"] = Field("str", frozen=True) +class TransactionInternalIDMapping(ColumnMapping): + target: Literal["internal_id"] = Field( + ..., description="Transaction field to map to" + ) + coerce_to: Literal["str"] = Field("str", frozen=True) + + class CategoryNameMapping(ColumnMapping): target: Literal["category_name"] = Field( ..., description="Category field to map to" @@ -314,7 +321,8 @@ class ImportProfileSchema(BaseModel): | TransactionTagsMapping | TransactionEntitiesMapping | TransactionCategoryMapping - | TransactionInternalMapping + | TransactionInternalNoteMapping + | TransactionInternalIDMapping | CategoryNameMapping | CategoryMuteMapping | CategoryActiveMapping @@ -358,7 +366,8 @@ def validate_mappings(self) -> "ImportProfileSchema": TransactionTagsMapping, TransactionEntitiesMapping, TransactionCategoryMapping, - TransactionInternalMapping, + TransactionInternalNoteMapping, + TransactionInternalIDMapping, ), "accounts": ( AccountNameMapping, diff --git a/app/apps/transactions/migrations/0033_transaction_internal_id.py b/app/apps/transactions/migrations/0033_transaction_internal_id.py new file mode 100644 index 0000000..b7d578c --- /dev/null +++ b/app/apps/transactions/migrations/0033_transaction_internal_id.py @@ -0,0 +1,20 @@ +# Generated by Django 5.1.5 on 2025-01-21 01:56 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("transactions", "0032_transaction_created_at_transaction_updated_at"), + ] + + operations = [ + migrations.AddField( + model_name="transaction", + name="internal_id", + field=models.TextField( + blank=True, null=True, unique=True, verbose_name="Internal ID" + ), + ), + ] diff --git a/app/apps/transactions/models.py b/app/apps/transactions/models.py index 2bd2a68..85ff53a 100644 --- a/app/apps/transactions/models.py +++ b/app/apps/transactions/models.py @@ -190,6 +190,9 @@ class Type(models.TextChoices): verbose_name=_("Recurring Transaction"), ) internal_note = models.TextField(blank=True, verbose_name=_("Internal Note")) + internal_id = models.TextField( + blank=True, null=True, unique=True, verbose_name=_("Internal ID") + ) deleted = models.BooleanField( default=False, verbose_name=_("Deleted"), db_index=True From a415e285ee261264282a9d93c175e4172def92a2 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Mon, 20 Jan 2025 23:10:11 -0300 Subject: [PATCH 37/45] feat(transactions): make deleted_at readonly on admin --- app/apps/transactions/admin.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/app/apps/transactions/admin.py b/app/apps/transactions/admin.py index df4d1c8..8f37317 100644 --- a/app/apps/transactions/admin.py +++ b/app/apps/transactions/admin.py @@ -19,15 +19,16 @@ def get_queryset(self, request): list_filter = ["deleted", "type", "is_paid", "date", "account"] list_display = [ - "deleted", + "date", "description", "type", "account__name", "amount", "account__currency__code", - "date", "reference_date", + "deleted", ] + readonly_fields = ["deleted_at"] actions = ["hard_delete_selected"] From ece44f27265d2beb7bc186ba3563c143fff2937f Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Wed, 22 Jan 2025 01:41:17 -0300 Subject: [PATCH 38/45] feat(import): more UI and endpoints --- app/apps/import_app/forms.py | 9 +- app/apps/import_app/services/v1.py | 18 ++- app/apps/import_app/urls.py | 22 +++- app/apps/import_app/views.py | 58 ++++++++- .../import_app/fragments/profiles/list.html | 38 ++++-- .../import_app/fragments/runs/add.html | 4 +- .../import_app/fragments/runs/list.html | 111 ++++++++++++++++++ 7 files changed, 227 insertions(+), 33 deletions(-) diff --git a/app/apps/import_app/forms.py b/app/apps/import_app/forms.py index 78ee3d7..f300721 100644 --- a/app/apps/import_app/forms.py +++ b/app/apps/import_app/forms.py @@ -55,4 +55,11 @@ def __init__(self, *args, **kwargs): self.helper = FormHelper() self.helper.form_tag = False self.helper.form_method = "post" - self.helper.layout = Layout("file") + self.helper.layout = Layout( + "file", + FormActions( + NoClassSubmit( + "submit", _("Import"), css_class="btn btn-outline-primary w-100" + ), + ), + ) diff --git a/app/apps/import_app/services/v1.py b/app/apps/import_app/services/v1.py index 0416caf..abda751 100644 --- a/app/apps/import_app/services/v1.py +++ b/app/apps/import_app/services/v1.py @@ -491,7 +491,6 @@ def _process_csv(self, file_path): for row_number, row in enumerate(reader, start=1): self._process_row(row, row_number) - self._increment_totals("processed", value=1) def _validate_file_path(self, file_path: str) -> str: """ @@ -518,15 +517,14 @@ def process_file(self, file_path: str): if self.settings.file_type == "csv": self._process_csv(file_path) - if self.import_run.processed_rows == self.import_run.total_rows: - self._update_status("FINISHED") - self._log( - "info", - f"Import completed successfully. " - f"Successful: {self.import_run.successful_rows}, " - f"Failed: {self.import_run.failed_rows}, " - f"Skipped: {self.import_run.skipped_rows}", - ) + self._update_status("FINISHED") + self._log( + "info", + f"Import completed successfully. " + f"Successful: {self.import_run.successful_rows}, " + f"Failed: {self.import_run.failed_rows}, " + f"Skipped: {self.import_run.skipped_rows}", + ) except Exception as e: self._update_status("FAILED") diff --git a/app/apps/import_app/urls.py b/app/apps/import_app/urls.py index c2608a3..beb65ba 100644 --- a/app/apps/import_app/urls.py +++ b/app/apps/import_app/urls.py @@ -13,6 +13,11 @@ views.import_profile_list, name="import_profiles_list", ), + path( + "import/profiles//delete/", + views.import_profile_delete, + name="import_profile_delete", + ), path( "import/profiles/add/", views.import_profile_add, @@ -23,16 +28,21 @@ views.import_profile_edit, name="import_profile_edit", ), - path( - "import/profiles//runs/", - views.import_run_add, - name="import_profile_runs_index", - ), path( "import/profiles//runs/list/", - views.import_run_add, + views.import_runs_list, name="import_profile_runs_list", ), + path( + "import/profiles//runs//log/", + views.import_run_log, + name="import_run_log", + ), + path( + "import/profiles//runs//delete/", + views.import_run_delete, + name="import_run_delete", + ), path( "import/profiles//runs/add/", views.import_run_add, diff --git a/app/apps/import_app/views.py b/app/apps/import_app/views.py index ce65a2b..6b869fd 100644 --- a/app/apps/import_app/views.py +++ b/app/apps/import_app/views.py @@ -5,6 +5,7 @@ from django.core.files.storage import FileSystemStorage from django.http import HttpResponse from django.shortcuts import render, get_object_or_404 +from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_http_methods from django.utils.translation import gettext_lazy as _ @@ -105,13 +106,32 @@ def import_profile_edit(request, profile_id): ) +@only_htmx +@login_required +@csrf_exempt +@require_http_methods(["DELETE"]) +def import_profile_delete(request, profile_id): + profile = ImportProfile.objects.get(id=profile_id) + + profile.delete() + + messages.success(request, _("Import Profile deleted successfully")) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated", + }, + ) + + @only_htmx @login_required @require_http_methods(["GET", "POST"]) -def import_run_list(request, profile_id): +def import_runs_list(request, profile_id): profile = ImportProfile.objects.get(id=profile_id) - runs = ImportRun.objects.filter(profile=profile).order_by("id") + runs = ImportRun.objects.filter(profile=profile).order_by("-id") return render( request, @@ -120,6 +140,19 @@ def import_run_list(request, profile_id): ) +@only_htmx +@login_required +@require_http_methods(["GET", "POST"]) +def import_run_log(request, profile_id, run_id): + run = ImportRun.objects.get(profile__id=profile_id, id=run_id) + + return render( + request, + "import_app/fragments/runs/log.html", + {"run": run}, + ) + + @only_htmx @login_required @require_http_methods(["GET", "POST"]) @@ -140,6 +173,8 @@ def import_run_add(request, profile_id): # Defer the procrastinate task process_import.defer(import_run_id=import_run.id, file_path=file_path) + messages.success(request, _("Import Run queued successfully")) + return HttpResponse( status=204, headers={ @@ -154,3 +189,22 @@ def import_run_add(request, profile_id): "import_app/fragments/runs/add.html", {"form": form, "profile": profile}, ) + + +@only_htmx +@login_required +@csrf_exempt +@require_http_methods(["DELETE"]) +def import_run_delete(request, profile_id, run_id): + run = ImportRun.objects.get(profile__id=profile_id, id=run_id) + + run.delete() + + messages.success(request, _("Run deleted successfully")) + + return HttpResponse( + status=204, + headers={ + "HX-Trigger": "updated", + }, + ) diff --git a/app/templates/import_app/fragments/profiles/list.html b/app/templates/import_app/fragments/profiles/list.html index f1f34d2..2872897 100644 --- a/app/templates/import_app/fragments/profiles/list.html +++ b/app/templates/import_app/fragments/profiles/list.html @@ -38,18 +38,32 @@ hx-get="{% url 'import_profile_edit' profile_id=profile.id %}" hx-target="#generic-offcanvas"> -{# #} -{# #} + + + + + + {{ profile.name }} {{ profile.get_version_display }} diff --git a/app/templates/import_app/fragments/runs/add.html b/app/templates/import_app/fragments/runs/add.html index d5a5b89..9997044 100644 --- a/app/templates/import_app/fragments/runs/add.html +++ b/app/templates/import_app/fragments/runs/add.html @@ -2,10 +2,10 @@ {% load i18n %} {% load crispy_forms_tags %} -{% block title %}{% translate 'Import file' %}{% endblock %} +{% block title %}{% translate 'Import file with profile' %} {{ profile.name }}{% endblock %} {% block body %} -
+ {% crispy form %}
{% endblock %} diff --git a/app/templates/import_app/fragments/runs/list.html b/app/templates/import_app/fragments/runs/list.html index 0697d26..f67054c 100644 --- a/app/templates/import_app/fragments/runs/list.html +++ b/app/templates/import_app/fragments/runs/list.html @@ -5,5 +5,116 @@ {% block title %}{% translate 'Runs for ' %}{{ profile.name }}{% endblock %} {% block body %} +
+ {% if runs %} +
+ {% for run in runs %} +
+
+
+ {{ run.get_status_display }} +
+
+
{{ run.id }}({{ run.file_name }})
+
+
+
+
+
+
+ {% trans 'Total Items' %} +
+
+ {{ run.total_rows }} +
+
+
+
+
+
+
+
+ {% trans 'Processed Items' %} +
+
+ {{ run.processed_rows }} +
+
+
+
+ +
+
+
+
+ {% trans 'Skipped Items' %} +
+
+ {{ run.skipped_rows }} +
+
+
+
+ +
+
+
+
+ {% trans 'Failed Items' %} +
+
+ {{ run.failed_rows }} +
+
+
+
+ +
+
+
+
+ {% trans 'Successful Items' %} +
+
+ {{ run.successful_rows }} +
+
+
+
+ +
+
+ +
+
+ {% endfor %} + {% else %} + + {% endif %} +
+
{% endblock %} From cabd03e7e65b9640bcf5f0c1f3d64e05794e234a Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Thu, 23 Jan 2025 11:43:35 -0300 Subject: [PATCH 39/45] feat: presets --- app/apps/common/templatetags/json.py | 11 +++++ .../0002_alter_importprofile_name_and_more.py | 23 ++++++++++ app/apps/import_app/models.py | 4 +- app/apps/import_app/services/__init__.py | 2 + app/apps/import_app/services/presets.py | 45 +++++++++++++++++++ app/apps/import_app/urls.py | 5 +++ app/apps/import_app/views.py | 26 ++++++++++- ...alter_usersettings_date_format_and_more.py | 28 ++++++++++++ .../import_app/fragments/profiles/add.html | 10 ++++- .../import_app/fragments/profiles/list.html | 25 ++++++++--- .../fragments/profiles/list_presets.html | 43 ++++++++++++++++++ .../import_app/fragments/runs/log.html | 13 ++++++ 12 files changed, 223 insertions(+), 12 deletions(-) create mode 100644 app/apps/common/templatetags/json.py create mode 100644 app/apps/import_app/migrations/0002_alter_importprofile_name_and_more.py create mode 100644 app/apps/import_app/services/presets.py create mode 100644 app/apps/users/migrations/0014_alter_usersettings_date_format_and_more.py create mode 100644 app/templates/import_app/fragments/profiles/list_presets.html create mode 100644 app/templates/import_app/fragments/runs/log.html diff --git a/app/apps/common/templatetags/json.py b/app/apps/common/templatetags/json.py new file mode 100644 index 0000000..8fb45e2 --- /dev/null +++ b/app/apps/common/templatetags/json.py @@ -0,0 +1,11 @@ +import json + +from django import template + + +register = template.Library() + + +@register.filter("json") +def convert_to_json(value): + return json.dumps(value) diff --git a/app/apps/import_app/migrations/0002_alter_importprofile_name_and_more.py b/app/apps/import_app/migrations/0002_alter_importprofile_name_and_more.py new file mode 100644 index 0000000..efa1ee3 --- /dev/null +++ b/app/apps/import_app/migrations/0002_alter_importprofile_name_and_more.py @@ -0,0 +1,23 @@ +# Generated by Django 5.1.5 on 2025-01-23 03:03 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('import_app', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='importprofile', + name='name', + field=models.CharField(max_length=100, unique=True, verbose_name='Name'), + ), + migrations.AlterField( + model_name='importprofile', + name='yaml_config', + field=models.TextField(verbose_name='YAML Configuration'), + ), + ] diff --git a/app/apps/import_app/models.py b/app/apps/import_app/models.py index b489c43..c0224d8 100644 --- a/app/apps/import_app/models.py +++ b/app/apps/import_app/models.py @@ -9,9 +9,9 @@ class ImportProfile(models.Model): class Versions(models.IntegerChoices): - VERSION_1 = 1, _("Version 1") + VERSION_1 = 1, _("Version") + " 1" - name = models.CharField(max_length=100, verbose_name=_("Name")) + name = models.CharField(max_length=100, verbose_name=_("Name"), unique=True) yaml_config = models.TextField(verbose_name=_("YAML Configuration")) version = models.IntegerField( choices=Versions, diff --git a/app/apps/import_app/services/__init__.py b/app/apps/import_app/services/__init__.py index 6001902..88aa4e8 100644 --- a/app/apps/import_app/services/__init__.py +++ b/app/apps/import_app/services/__init__.py @@ -1 +1,3 @@ from apps.import_app.services.v1 import ImportService as ImportServiceV1 + +from apps.import_app.services.presets import PresetService diff --git a/app/apps/import_app/services/presets.py b/app/apps/import_app/services/presets.py new file mode 100644 index 0000000..15e7ac1 --- /dev/null +++ b/app/apps/import_app/services/presets.py @@ -0,0 +1,45 @@ +import json +from pathlib import Path + +from apps.import_app.models import ImportProfile + + +class PresetService: + PRESET_PATH = "/usr/src/app/import_presets" + + @classmethod + def get_all_presets(cls): + presets = [] + + for folder in Path(cls.PRESET_PATH).iterdir(): + if folder.is_dir(): + manifest_path = folder / "manifest.json" + config_path = folder / "config.yml" + + if manifest_path.exists() and config_path.exists(): + with open(manifest_path) as f: + manifest = json.load(f) + + with open(config_path) as f: + config = json.dumps(f.read()) + + try: + preset = { + "name": manifest.get("name", folder.name), + "description": manifest.get("description", ""), + "message": json.dumps(manifest.get("message", "")), + "authors": manifest.get("author", "").split(","), + "schema_version": (int(manifest.get("schema_version", 1))), + "folder_name": folder.name, + "config": config, + } + + ImportProfile.Versions( + preset["schema_version"] + ) # Check if schema version is valid + except Exception as e: + print(e) + else: + presets.append(preset) + + return presets diff --git a/app/apps/import_app/urls.py b/app/apps/import_app/urls.py index beb65ba..eae9851 100644 --- a/app/apps/import_app/urls.py +++ b/app/apps/import_app/urls.py @@ -3,6 +3,11 @@ urlpatterns = [ path("import/", views.import_view, name="import"), + path( + "import/presets/", + views.import_presets_list, + name="import_presets_list", + ), path( "import/profiles/", views.import_profile_index, diff --git a/app/apps/import_app/views.py b/app/apps/import_app/views.py index 6b869fd..720a5e1 100644 --- a/app/apps/import_app/views.py +++ b/app/apps/import_app/views.py @@ -13,6 +13,7 @@ from apps.import_app.forms import ImportRunFileUploadForm, ImportProfileForm from apps.import_app.models import ImportRun, ImportProfile from apps.import_app.tasks import process_import +from apps.import_app.services import PresetService def import_view(request): @@ -28,6 +29,18 @@ def import_view(request): return HttpResponse("Hello, world. You're at the polls page.") +@login_required +@require_http_methods(["GET"]) +def import_presets_list(request): + presets = PresetService.get_all_presets() + print(presets) + return render( + request, + "import_app/fragments/profiles/list_presets.html", + {"presets": presets}, + ) + + @login_required @require_http_methods(["GET", "POST"]) def import_profile_index(request): @@ -54,6 +67,8 @@ def import_profile_list(request): @login_required @require_http_methods(["GET", "POST"]) def import_profile_add(request): + message = request.GET.get("message", None) or request.POST.get("message", None) + if request.method == "POST": form = ImportProfileForm(request.POST) @@ -68,12 +83,19 @@ def import_profile_add(request): }, ) else: - form = ImportProfileForm() + print(int(request.GET.get("version", 1))) + form = ImportProfileForm( + initial={ + "name": request.GET.get("name"), + "version": int(request.GET.get("version", 1)), + "yaml_config": request.GET.get("yaml_config"), + } + ) return render( request, "import_app/fragments/profiles/add.html", - {"form": form}, + {"form": form, "message": message}, ) diff --git a/app/apps/users/migrations/0014_alter_usersettings_date_format_and_more.py b/app/apps/users/migrations/0014_alter_usersettings_date_format_and_more.py new file mode 100644 index 0000000..e38b096 --- /dev/null +++ b/app/apps/users/migrations/0014_alter_usersettings_date_format_and_more.py @@ -0,0 +1,28 @@ +# Generated by Django 5.1.5 on 2025-01-23 03:05 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0013_usersettings_date_format_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='usersettings', + name='date_format', + field=models.CharField(default='SHORT_DATE_FORMAT', max_length=100, verbose_name='Date Format'), + ), + migrations.AlterField( + model_name='usersettings', + name='datetime_format', + field=models.CharField(default='SHORT_DATETIME_FORMAT', max_length=100, verbose_name='Datetime Format'), + ), + migrations.AlterField( + model_name='usersettings', + name='language', + field=models.CharField(choices=[('auto', 'Auto'), ('en', 'English'), ('nl', 'Nederlands'), ('pt-br', 'Português (Brasil)')], default='auto', max_length=10, verbose_name='Language'), + ), + ] diff --git a/app/templates/import_app/fragments/profiles/add.html b/app/templates/import_app/fragments/profiles/add.html index beda873..03eb9a5 100644 --- a/app/templates/import_app/fragments/profiles/add.html +++ b/app/templates/import_app/fragments/profiles/add.html @@ -1,11 +1,19 @@ {% extends 'extends/offcanvas.html' %} +{% load json %} {% load i18n %} {% load crispy_forms_tags %} {% block title %}{% translate 'Add new import profile' %}{% endblock %} {% block body %} -
+{% if message %} + +{% endif %} + {% crispy form %}
{% endblock %} diff --git a/app/templates/import_app/fragments/profiles/list.html b/app/templates/import_app/fragments/profiles/list.html index 2872897..cdc9a83 100644 --- a/app/templates/import_app/fragments/profiles/list.html +++ b/app/templates/import_app/fragments/profiles/list.html @@ -3,13 +3,24 @@
{% spaceless %}
{% translate 'Import Profiles' %} - - + + + +
{% endspaceless %}
diff --git a/app/templates/import_app/fragments/profiles/list_presets.html b/app/templates/import_app/fragments/profiles/list_presets.html new file mode 100644 index 0000000..0b64342 --- /dev/null +++ b/app/templates/import_app/fragments/profiles/list_presets.html @@ -0,0 +1,43 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Import Presets' %}{% endblock %} + +{% block body %} + {% if presets %} + + +{% endblock %} diff --git a/app/templates/import_app/fragments/runs/log.html b/app/templates/import_app/fragments/runs/log.html new file mode 100644 index 0000000..a7445a4 --- /dev/null +++ b/app/templates/import_app/fragments/runs/log.html @@ -0,0 +1,13 @@ +{% extends 'extends/offcanvas.html' %} +{% load i18n %} +{% load crispy_forms_tags %} + +{% block title %}{% translate 'Logs for' %} #{{ run.id }}{% endblock %} + +{% block body %} +
+
+ {{ run.logs|linebreaks }} +
+
+{% endblock %} From aaee602b713c51a0528fe19b90338b8c1c53caf2 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Thu, 23 Jan 2025 12:54:26 -0300 Subject: [PATCH 40/45] refactor: remove django-ace for now --- app/WYGIWYH/settings.py | 1 - requirements.txt | 1 - 2 files changed, 2 deletions(-) diff --git a/app/WYGIWYH/settings.py b/app/WYGIWYH/settings.py index 960b0ec..9a5a496 100644 --- a/app/WYGIWYH/settings.py +++ b/app/WYGIWYH/settings.py @@ -70,7 +70,6 @@ "rest_framework", "drf_spectacular", "django_cotton", - "django_ace", "apps.rules.apps.RulesConfig", "apps.calendar_view.apps.CalendarViewConfig", "apps.dca.apps.DcaConfig", diff --git a/requirements.txt b/requirements.txt index 8c24038..af9d39b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,7 +9,6 @@ django-filter==24.3 django-debug-toolbar==4.3.0 django-cachalot~=2.6.3 django-cotton~=1.2.1 -django_ace~=1.36.2 djangorestframework~=3.15.2 drf-spectacular~=0.27.2 From a805880e9b52746dc9258b5b71d11410879820b5 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Thu, 23 Jan 2025 12:55:01 -0300 Subject: [PATCH 41/45] git: keep import_presets folder --- app/import_presets/.gitkeep | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 app/import_presets/.gitkeep diff --git a/app/import_presets/.gitkeep b/app/import_presets/.gitkeep new file mode 100644 index 0000000..e69de29 From d7de6c17a9e80d8a9c562e27b037ab4ddb64dfb0 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Thu, 23 Jan 2025 14:04:40 -0300 Subject: [PATCH 42/45] refactor: remove django-ace for now --- app/apps/import_app/forms.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app/apps/import_app/forms.py b/app/apps/import_app/forms.py index f300721..83eb6c4 100644 --- a/app/apps/import_app/forms.py +++ b/app/apps/import_app/forms.py @@ -5,7 +5,6 @@ ) from django import forms from django.utils.translation import gettext_lazy as _ -from django_ace import AceWidget from apps.import_app.models import ImportProfile from apps.common.widgets.crispy.submit import NoClassSubmit From 962a8efa269e7f4c65227f96cb32a687331d9c41 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Thu, 23 Jan 2025 14:04:58 -0300 Subject: [PATCH 43/45] feat(navbar): add import to management menu --- app/templates/includes/navbar.html | 2 ++ 1 file changed, 2 insertions(+) diff --git a/app/templates/includes/navbar.html b/app/templates/includes/navbar.html index 66aafa4..f781d05 100644 --- a/app/templates/includes/navbar.html +++ b/app/templates/includes/navbar.html @@ -120,6 +120,8 @@
  • {% translate 'Rules' %}
  • +
  • {% translate 'Import' %} beta
  • From 4ef4609a96aa3f4551966f8fa9475e5ce89cce65 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Thu, 23 Jan 2025 14:24:31 -0300 Subject: [PATCH 44/45] fix(navbar): wrong active link for navbar import item --- app/templates/includes/navbar.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/templates/includes/navbar.html b/app/templates/includes/navbar.html index f781d05..a4ea8cc 100644 --- a/app/templates/includes/navbar.html +++ b/app/templates/includes/navbar.html @@ -120,7 +120,7 @@
  • {% translate 'Rules' %}
  • -
  • {% translate 'Import' %} beta
  • From e3d3a7cf91c4b1d0eae4dd90af88176b221984e1 Mon Sep 17 00:00:00 2001 From: Herculino Trotta Date: Thu, 23 Jan 2025 14:30:59 -0300 Subject: [PATCH 45/45] feat: add new envs --- .env.example | 6 ++++++ app/WYGIWYH/settings.py | 2 +- app/apps/transactions/models.py | 8 ++++---- app/apps/transactions/tasks.py | 7 ++----- 4 files changed, 13 insertions(+), 10 deletions(-) diff --git a/.env.example b/.env.example index d7b1933..376d5dd 100644 --- a/.env.example +++ b/.env.example @@ -18,3 +18,9 @@ SQL_PORT=5432 # Gunicorn WEB_CONCURRENCY=4 + +# App Configs +# Enable this if you want to keep deleted transactions in the database +ENABLE_SOFT_DELETE=false +# If ENABLE_SOFT_DELETE is true, transactions deleted for more than KEEP_DELETED_TRANSACTIONS_FOR days will be truly deleted. Set to 0 to keep all. +KEEP_DELETED_TRANSACTIONS_FOR=365 diff --git a/app/WYGIWYH/settings.py b/app/WYGIWYH/settings.py index 9a5a496..4067a62 100644 --- a/app/WYGIWYH/settings.py +++ b/app/WYGIWYH/settings.py @@ -337,5 +337,5 @@ CACHALOT_UNCACHABLE_TABLES = ("django_migrations", "procrastinate_jobs") -ENABLE_SOFT_DELETION = os.getenv("ENABLE_SOFT_DELETION", "True").lower() == "true" +ENABLE_SOFT_DELETE = os.getenv("ENABLE_SOFT_DELETION", "false").lower() == "true" KEEP_DELETED_TRANSACTIONS_FOR = int(os.getenv("KEEP_DELETED_ENTRIES_FOR", "365")) diff --git a/app/apps/transactions/models.py b/app/apps/transactions/models.py index 85ff53a..4b21019 100644 --- a/app/apps/transactions/models.py +++ b/app/apps/transactions/models.py @@ -18,7 +18,7 @@ class SoftDeleteQuerySet(models.QuerySet): def delete(self): - if not settings.ENABLE_SOFT_DELETION: + if not settings.ENABLE_SOFT_DELETE: # If soft deletion is disabled, perform a normal delete return super().delete() @@ -49,7 +49,7 @@ def hard_delete(self): class SoftDeleteManager(models.Manager): def get_queryset(self): qs = SoftDeleteQuerySet(self.model, using=self._db) - return qs if not settings.ENABLE_SOFT_DELETION else qs.filter(deleted=False) + return qs if not settings.ENABLE_SOFT_DELETE else qs.filter(deleted=False) class AllObjectsManager(models.Manager): @@ -60,7 +60,7 @@ def get_queryset(self): class DeletedObjectsManager(models.Manager): def get_queryset(self): qs = SoftDeleteQuerySet(self.model, using=self._db) - return qs if not settings.ENABLE_SOFT_DELETION else qs.filter(deleted=True) + return qs if not settings.ENABLE_SOFT_DELETE else qs.filter(deleted=True) class TransactionCategory(models.Model): @@ -227,7 +227,7 @@ def save(self, *args, **kwargs): super().save(*args, **kwargs) def delete(self, *args, **kwargs): - if settings.ENABLE_SOFT_DELETION: + if settings.ENABLE_SOFT_DELETE: self.deleted = True self.deleted_at = timezone.now() self.save() diff --git a/app/apps/transactions/tasks.py b/app/apps/transactions/tasks.py index 5f1c42f..0833f4e 100644 --- a/app/apps/transactions/tasks.py +++ b/app/apps/transactions/tasks.py @@ -29,13 +29,10 @@ def generate_recurring_transactions(timestamp=None): @app.task def cleanup_deleted_transactions(): with cachalot_disabled(): - if ( - settings.ENABLE_SOFT_DELETION - and settings.KEEP_DELETED_TRANSACTIONS_FOR == 0 - ): + if settings.ENABLE_SOFT_DELETE and settings.KEEP_DELETED_TRANSACTIONS_FOR == 0: return "KEEP_DELETED_TRANSACTIONS_FOR is 0, no cleanup performed." - if not settings.ENABLE_SOFT_DELETION: + if not settings.ENABLE_SOFT_DELETE: # Hard delete all soft-deleted transactions deleted_count, _ = Transaction.deleted_objects.all().hard_delete() return (