Skip to content

Commit

Permalink
feat(cli): Dump all tables as JSON (#259)
Browse files Browse the repository at this point in the history
* feat(cli): Dump all tables as JSON

* feat: all tables

* test(dump_tables): not so working test

* chore: fix ci

* test: dump-table

* chore: ci fixes

* test: fix

* feat: change default output directory.

* fix: converage

* fix: updated ignore

---------

Co-authored-by: Alc-Alc <alc@localhost>
Co-authored-by: Cody Fincher <[email protected]>
  • Loading branch information
3 people authored Oct 24, 2024
1 parent e9b1355 commit d1e08bb
Show file tree
Hide file tree
Showing 4 changed files with 147 additions and 7 deletions.
43 changes: 41 additions & 2 deletions advanced_alchemy/alembic/utils.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
from __future__ import annotations

from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, ContextManager

from litestar.cli._utils import console
from sqlalchemy import Engine, MetaData, Table
from typing_extensions import TypeIs

if TYPE_CHECKING:
from sqlalchemy.ext.asyncio import AsyncEngine
from pathlib import Path
from typing import AsyncContextManager

from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession
from sqlalchemy.orm import DeclarativeBase, Session


async def drop_all(engine: AsyncEngine | Engine, version_table_name: str, metadata: MetaData) -> None:
Expand Down Expand Up @@ -35,3 +39,38 @@ async def _drop_tables_async(engine: AsyncEngine) -> None:
if _is_sync(engine):
return _drop_tables_sync(engine)
return await _drop_tables_async(engine)


async def dump_tables(dump_dir: Path, session: ContextManager[Session] | AsyncContextManager[AsyncSession], models: list[type[DeclarativeBase]]) -> None:
from types import new_class

from advanced_alchemy._serialization import encode_json

def _is_sync(
session: AsyncContextManager[AsyncSession] | ContextManager[Session],
) -> TypeIs[ContextManager[Session]]:
return isinstance(session, ContextManager)

def _dump_table_sync(session: ContextManager[Session]) -> None:
from advanced_alchemy.repository import SQLAlchemySyncRepository
with session as _session:
for model in models:
json_path = dump_dir / f"{model.__tablename__}.json"
console.rule(f"[yellow bold]Dumping table '{json_path.stem}' to '{json_path}'", style="yellow", align="left")
repo = new_class("repo", (SQLAlchemySyncRepository,), exec_body=lambda ns, model=model: ns.setdefault("model_type", model)) # type: ignore[misc]
json_path.write_text(encode_json([row.to_dict() for row in repo(session=_session).list()]))

async def _dump_table_async(session: AsyncContextManager[AsyncSession]) -> None:
from advanced_alchemy.repository import SQLAlchemyAsyncRepository
async with session as _session:
for model in models:
json_path = dump_dir / f"{model.__tablename__}.json"
console.rule(f"[yellow bold]Dumping table '{json_path.stem}' to '{json_path}'", style="yellow", align="left")
repo = new_class("repo", (SQLAlchemyAsyncRepository,), exec_body=lambda ns, model=model: ns.setdefault("model_type", model)) # type: ignore[misc]
json_path.write_text(encode_json([row.to_dict() for row in await repo(session=_session).list()]))

dump_dir.mkdir(exist_ok=True)

if _is_sync(session):
return _dump_table_sync(session)
return await _dump_table_async(session)
58 changes: 58 additions & 0 deletions advanced_alchemy/extensions/litestar/cli.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
from __future__ import annotations

from pathlib import Path
from typing import TYPE_CHECKING, Sequence, cast

from anyio import run
from click import Path as ClickPath
from click import argument, group, option
from litestar.cli._utils import LitestarGroup, console

Expand Down Expand Up @@ -356,3 +358,59 @@ async def _drop_all(
_drop_all,
config,
)

@database_group.command(name="dump-data", help="Dump specified tables from the database to JSON files.")
@option(
"--table",
"table_names",
help="Name of the table to dump. Multiple tables can be specified. Use '*' to dump all tables.",
type=str,
required=True,
multiple=True,
)
@option(
"--dir",
"fixtures",
help="Directory to save the JSON files. Defaults to WORKDIR/fixtures",
type=ClickPath(path_type=Path), # pyright: ignore[reportCallIssue, reportUntypedFunctionDecorator, reportArgumentType]
default=Path.cwd() / "fixtures",
required=False,
)
def dump_table_data(app: Litestar, table_names: tuple[str, ...], dump_dir: Path) -> None:

from rich.prompt import Confirm

all_tables = "*" in table_names

if all_tables and not Confirm.ask("[yellow bold]You have specified '*'. Are you sure you want to dump all tables from the database?"):
# user has decided not to dump all tables
return console.rule("[red bold]No data was dumped.", style="red", align="left")

from advanced_alchemy.alembic.utils import dump_tables

# _TODO: Find a way to read from different registries
from advanced_alchemy.base import orm_registry
from advanced_alchemy.extensions.litestar.alembic import get_database_migration_plugin

configs = get_database_migration_plugin(app).config

if not isinstance(configs, Sequence):
configs = [configs]

async def _dump_tables() -> None:
for config in configs:
target_tables = set(config.alembic_config.target_metadata.tables)

if not all_tables:
# only consider tables specified by user
for table_name in set(table_names) - target_tables:
console.rule(f"[red bold]Skipping table '{table_name}' because it is not available in the default registry", style="red", align="left")
target_tables.intersection_update(table_names)
else:
console.rule("[yellow bold]Dumping all tables", style="yellow", align="left")

models = [mapper.class_ for mapper in orm_registry.mappers if mapper.class_.__table__.name in target_tables]
await dump_tables(dump_dir, config.get_session(), models)
console.rule("[green bold]Data dump complete", align="left")

return run(_dump_tables)
5 changes: 3 additions & 2 deletions sonar-project.properties
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ sonar.coverage.exclusions=\
examples/*.py, \
tests/*.py, \
tests/**/*.py, \
advanced_alchemy.filters.py, \
advanced_alchemy/extensions/litestar/cli.py, \
advanced_alchemy/filters.py, \
advanced_alchemy/service/typing.py, \
advanced_alchemy/service/_util.py, \
advanced_alchemy/alembic/templates/asyncio/env.py, \
Expand All @@ -24,7 +25,7 @@ sonar.coverage.exclusions=\
sonar.cpd.exclusions=\
advanced_alchemy/repository/memory/_sync.py, \
advanced_alchemy/repository/memory/_async.py, \
advanced_alchemy.filters.py, \
advanced_alchemy/filters.py, \
advanced_alchemy/repository/_sync.py, \
advanced_alchemy/repository/_async.py, \
advanced_alchemy/service/_sync.py, \
Expand Down
48 changes: 45 additions & 3 deletions tests/integration/test_alembic_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,19 @@

from pathlib import Path
from typing import Type, cast
from uuid import UUID

import pytest
from _pytest.monkeypatch import MonkeyPatch
from pytest import CaptureFixture, FixtureRequest
from pytest_lazyfixture import lazy_fixture
from sqlalchemy import Engine
from sqlalchemy import Engine, ForeignKey, String
from sqlalchemy.ext.asyncio import AsyncEngine, async_sessionmaker
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import Mapped, mapped_column, relationship, sessionmaker

from advanced_alchemy import base
from advanced_alchemy.alembic import commands
from advanced_alchemy.alembic.utils import drop_all
from advanced_alchemy.alembic.utils import drop_all, dump_tables
from advanced_alchemy.extensions.litestar import SQLAlchemyAsyncConfig, SQLAlchemySyncConfig
from alembic.util.exc import CommandError
from tests.fixtures.uuid import models as models_uuid
Expand Down Expand Up @@ -254,6 +255,47 @@ async def test_drop_all(
assert "Successfully dropped all objects" in result.out


async def test_dump_tables(
any_config: SQLAlchemySyncConfig | SQLAlchemyAsyncConfig,
capsys: CaptureFixture[str],
tmp_project_dir: Path,
) -> None:

from advanced_alchemy.base import (
CommonTableAttributes,
DeclarativeBase,
UUIDPrimaryKey,
create_registry,
)
class _UUIDAuditBase(CommonTableAttributes, UUIDPrimaryKey, DeclarativeBase):
registry = create_registry()

class TestAuthorModel(_UUIDAuditBase):
name: Mapped[str] = mapped_column(String(10))

class TestBookModel(_UUIDAuditBase):
title: Mapped[str] = mapped_column(String(10))
author_id: Mapped[UUID] = mapped_column(ForeignKey("test_author_model.id"))

TestBookModel.author = relationship(TestAuthorModel, lazy="joined", innerjoin=True, viewonly=True)
TestAuthorModel.books = relationship(TestBookModel, back_populates="author", lazy="noload", uselist=True)

if isinstance(any_config, SQLAlchemySyncConfig):
TestBookModel.metadata.create_all(any_config.get_engine())
else:
async with any_config.get_engine().begin() as conn:
await conn.run_sync(TestBookModel.metadata.create_all)

await dump_tables(
tmp_project_dir,
any_config.get_session(),
[TestAuthorModel, TestBookModel],
)
result = capsys.readouterr()
assert "Dumping table 'test_author_model'" in result.out
assert "Dumping table 'test_book_model" in result.out


"""
async def test_alembic_revision(alembic_commands: commands.AlembicCommands, tmp_project_dir: Path) -> None:
alembic_commands.init(directory=f"{tmp_project_dir}/migrations/")
Expand Down

0 comments on commit d1e08bb

Please sign in to comment.