Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Sphinx + Phoenix - Sarah K., Charday N., & Priyanka K. #66

Open
wants to merge 16 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
16 commits
Select commit Hold shift + click to select a range
034283a
Passes Wave 1. Created virtual environment, installed dependencies, d…
skoch-neat Oct 18, 2024
11b0eab
Complete wave 02. Add new get endpoint to planets blueprint
chardayneal Oct 21, 2024
70f0a3a
Correct code for wave1 and wave2 from PR feedback
PriyankaKaramchandani Oct 25, 2024
0e7c2a9
Removed hardcoded data. Created project database. Connected the datab…
PriyankaKaramchandani Oct 25, 2024
db19c66
Initialize migration, generate migration and apply migration to database
PriyankaKaramchandani Oct 25, 2024
2198141
Complete wave 3 implementation.
PriyankaKaramchandani Oct 25, 2024
7ac8d4d
Implement validate_planet function to use in get_one_planet endpoint.
PriyankaKaramchandani Oct 25, 2024
517a1e2
Updated code to comply with PEP8 standards. Changed import statements…
skoch-neat Oct 28, 2024
9c4e80d
Merge pull request #1 from skoch-neat/pep8-cleanup
skoch-neat Oct 29, 2024
babd837
Updated validation to access database. Added functionality to update …
skoch-neat Oct 29, 2024
458915d
Merge branch 'main' of https://github.com/skoch-neat/solar-system-api
skoch-neat Oct 29, 2024
ac86dfe
Refactor code. Update get_all_planets route to consider query params.…
chardayneal Oct 30, 2024
7b738e0
"test_planet_routes"
PriyankaKaramchandani Oct 31, 2024
dd74ea2
Add app/__init__.py test_database configuration.
PriyankaKaramchandani Oct 31, 2024
60bea74
Added constants file for storing and importing strings used repeatedl…
skoch-neat Nov 1, 2024
cf0a16b
added gunicorn to requirements.txt
skoch-neat Nov 6, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 17 additions & 3 deletions app/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,21 @@
from flask import Flask
from app.db import db, migrate
from app.routes.planet_routes import planets_bp
from app.models import planet
import os


def create_app(test_config=None):
def create_app(config=None):
app = Flask(__name__)

return app
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get("SQLALCHEMY_DATABASE_URI")

if config:
app.config.update(config)

db.init_app(app)
migrate.init_app(app, db)

app.register_blueprint(planets_bp)

return app
6 changes: 6 additions & 0 deletions app/db.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from app.models.base import Base

db = SQLAlchemy(model_class=Base)
migrate = Migrate()
Empty file added app/models/__init__.py
Empty file.
4 changes: 4 additions & 0 deletions app/models/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from sqlalchemy.orm import DeclarativeBase

class Base(DeclarativeBase):
pass
17 changes: 17 additions & 0 deletions app/models/planet.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
from sqlalchemy.orm import Mapped, mapped_column
from app.db import db
from constants import ID, NAME, DESCRIPTION, NUMBER_OF_MOONS

class Planet(db.Model):
id: Mapped[int] = mapped_column(primary_key=True, autoincrement=True)
name: Mapped[str]
description: Mapped[str]
number_of_moons: Mapped[int]

def to_dict(self):
return {
ID: self.id,
NAME: self.name,
DESCRIPTION: self.description,
NUMBER_OF_MOONS: self.number_of_moons
}
2 changes: 0 additions & 2 deletions app/routes.py

This file was deleted.

Empty file added app/routes/__init__.py
Empty file.
117 changes: 117 additions & 0 deletions app/routes/planet_routes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
from flask import Blueprint, abort, make_response, request, Response
from app.db import db
from app.models.planet import Planet
from constants import ID, NAME, DESCRIPTION, NUMBER_OF_MOONS, ORDER_BY, MESSAGE, MIMETYPE_JSON, INVALID, NOT_FOUND

planets_bp = Blueprint("planets_bp", __name__, url_prefix="/planets")

@planets_bp.post("")
def create_planet():
request_body = request.get_json()

new_planet = Planet(
name=request_body[NAME],
description=request_body[DESCRIPTION],
number_of_moons=request_body[NUMBER_OF_MOONS]
)

db.session.add(new_planet)
db.session.commit()

return new_planet.to_dict(), 201

@planets_bp.get("")
def get_all_planets():
query_params = get_query_params()
query = db.select(Planet)
query = filter_query(query, query_params)
query = get_order_by_param(query)

planets = db.session.scalars(query)

return [planet.to_dict() for planet in planets]

@planets_bp.get("/<planet_id>")
def get_one_planet(planet_id):
return validate_planet(planet_id).to_dict()

@planets_bp.put("/<planet_id>")
def update_planet(planet_id):
planet = validate_planet(planet_id)
request_body = request.get_json()

planet.name = request_body[NAME]
planet.description = request_body[DESCRIPTION]
planet.number_of_moons = request_body[NUMBER_OF_MOONS]

db.session.commit()

return Response(status=204, mimetype=MIMETYPE_JSON)

@planets_bp.delete("/<planet_id>")
def delete_planet(planet_id):
planet = validate_planet(planet_id)

db.session.delete(planet)
db.session.commit()

return Response(status=204, mimetype=MIMETYPE_JSON)

def filter_query(query, params):
if params[ID]:
query = query.where(Planet.id == validate_cast_type(params[ID], int, ID))
Comment on lines +61 to +62
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If I'm understanding correctly, this handles a scenario where someone makes a GET requets to /planets and could pass along query param like ?id=1. Then you'd "filter", query for record with id 1. Is that right?

If that's the case, we duplicate functionality because your GET /planets/id already achieves this and also follows convention. I'd prefer getting rid of this filtering logic since it's duplicating what you already have.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think maybe renaming the function to validate_and_cast_value would be a little more descriptive because your function does more than validating. If all goes according to plan, it will return a new type for the value passed in.


if params[NAME]:
query = query.where(Planet.name == params[NAME])

if params[DESCRIPTION]:
query = query.where(Planet.description.ilike(f"%{params[DESCRIPTION]}%"))

if params[NUMBER_OF_MOONS]:
query = query.where(Planet.number_of_moons == validate_cast_type(
params[NUMBER_OF_MOONS], int, NUMBER_OF_MOONS))

return query

def get_query_params():
return {
ID: request.args.get(ID),
NAME: request.args.get(NAME),
DESCRIPTION: request.args.get(DESCRIPTION),
NUMBER_OF_MOONS: request.args.get(NUMBER_OF_MOONS)
}

def get_order_by_param(query):
order_by_param = request.args.get(ORDER_BY)
if order_by_param:
query = validate_order_by_param(query, order_by_param)
else:
query = query.order_by(Planet.id)

return query

def validate_cast_type(value, target_type, param_name):
try:
return target_type(value)
except (ValueError, TypeError):
abort(make_response({MESSAGE: f"{param_name} '{value}' {INVALID}"}, 400))

def validate_order_by_param(query, order_by_param):
try:
return query.order_by(getattr(Planet, order_by_param))
except AttributeError:
abort(make_response({MESSAGE: f"{ORDER_BY} '{order_by_param}' {INVALID}"}, 400))

def validate_planet(planet_id):
try:
planet_id = int(planet_id)
except ValueError:
abort(make_response({MESSAGE: f"{ID} {planet_id} {INVALID}"}, 400))

query = db.select(Planet).where(Planet.id == planet_id)
planet = db.session.scalar(query)

if not planet:
abort(make_response({MESSAGE: f"{ID} {planet_id} {NOT_FOUND}"}, 404))

return planet
19 changes: 19 additions & 0 deletions constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
'''
Constants for the Solar System API
'''

# Keys
MESSAGE = "message"
ID = "id"
NAME = "name"
DESCRIPTION = "description"
NUMBER_OF_MOONS = "number_of_moons"
FIELDS = "fields"
ORDER_BY = "order_by"

# MIME Types
MIMETYPE_JSON = "application/json"

# Error messages
NOT_FOUND = "not found"
INVALID = "invalid"
1 change: 1 addition & 0 deletions migrations/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Single-database configuration for Flask.
50 changes: 50 additions & 0 deletions migrations/alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# A generic, single database configuration.

[alembic]
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s

# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false


# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic,flask_migrate

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[logger_flask_migrate]
level = INFO
handlers =
qualname = flask_migrate

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
113 changes: 113 additions & 0 deletions migrations/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
import logging
from logging.config import fileConfig

from flask import current_app

from alembic import context

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
logger = logging.getLogger('alembic.env')


def get_engine():
try:
# this works with Flask-SQLAlchemy<3 and Alchemical
return current_app.extensions['migrate'].db.get_engine()
except (TypeError, AttributeError):
# this works with Flask-SQLAlchemy>=3
return current_app.extensions['migrate'].db.engine


def get_engine_url():
try:
return get_engine().url.render_as_string(hide_password=False).replace(
'%', '%%')
except AttributeError:
return str(get_engine().url).replace('%', '%%')


# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
config.set_main_option('sqlalchemy.url', get_engine_url())
target_db = current_app.extensions['migrate'].db

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def get_metadata():
if hasattr(target_db, 'metadatas'):
return target_db.metadatas[None]
return target_db.metadata


def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=get_metadata(), literal_binds=True
)

with context.begin_transaction():
context.run_migrations()


def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""

# this callback is used to prevent an auto-migration from being generated
# when there are no changes to the schema
# reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html
def process_revision_directives(context, revision, directives):
if getattr(config.cmd_opts, 'autogenerate', False):
script = directives[0]
if script.upgrade_ops.is_empty():
directives[:] = []
logger.info('No changes in schema detected.')

conf_args = current_app.extensions['migrate'].configure_args
if conf_args.get("process_revision_directives") is None:
conf_args["process_revision_directives"] = process_revision_directives

connectable = get_engine()

with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=get_metadata(),
**conf_args
)

with context.begin_transaction():
context.run_migrations()


if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
24 changes: 24 additions & 0 deletions migrations/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade():
${upgrades if upgrades else "pass"}


def downgrade():
${downgrades if downgrades else "pass"}
Loading