Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: redis caching service to container factory #216

Draft
wants to merge 6 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions backend/bloom/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,14 @@
from bloom.routers.v1.ports import router as router_ports_v1
from bloom.routers.v1.zones import router as router_zones_v1
from starlette.middleware.cors import CORSMiddleware
from fastapi.middleware.gzip import GZipMiddleware

from bloom.config import settings

API_PREFIX_V1='/api/v1'

app = FastAPI()
app.add_middleware(GZipMiddleware, minimum_size=1000, compresslevel=5)


@app.get("/", include_in_schema=False)
Expand Down
9 changes: 9 additions & 0 deletions backend/bloom/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from bloom.services.metrics import MetricsService
from bloom.usecase.GenerateAlerts import GenerateAlerts
from dependency_injector import containers, providers
import redis


class UseCases(containers.DeclarativeContainer):
Expand All @@ -23,6 +24,14 @@ class UseCases(containers.DeclarativeContainer):
db_url=db_url,
)

cache_service = providers.Factory(
redis.Redis,
host=settings.redis_host,
port=settings.redis_port,
password=settings.redis_password,
db=0
)

vessel_repository = providers.Factory(
VesselRepository,
session_factory=db.provided.session,
Expand Down
114 changes: 42 additions & 72 deletions backend/bloom/dependencies.py
Original file line number Diff line number Diff line change
@@ -1,84 +1,54 @@
from fastapi import Request, HTTPException
from fastapi import Request, HTTPException, Depends
from bloom.config import settings
from fastapi.security import APIKeyHeader
from pydantic import BaseModel, ConfigDict, Field,conint
from pydantic.generics import GenericModel
from datetime import datetime, timedelta
from typing import Generic,TypeVar, List
from enum import Enum

from pydantic import BaseModel
from functools import wraps
import time
import json
from bloom.logger import logger
from bloom.container import UseCases

## Reference for pagination design
## https://jayhawk24.hashnode.dev/how-to-implement-pagination-in-fastapi-feat-sqlalchemy
X_API_KEY_HEADER=APIKeyHeader(name="x-key")

class CachedRequest(BaseModel):
nocache:bool=False

## FastAPI endpoint decorator to manage Redis caching
# Needs to add request:Request and nocache:bool parameters to all endpoints
# using @cache decorator
# Example:
# @router.get('/my/endpoint')
# @cache
# def my_endpoint_function(request: Request, # needed by @cache
# ...
# nocache:bool = False, # needed by @cache
# ):
# ...
def cache(func):
@wraps(func)
async def wrapper(*args, **kwargs):
start = time.time()
request=kwargs['request']
cache_service=UseCases().cache_service()
nocache=True if request.query_params.get('nocache') \
and request.query_params.get('nocache').lower() == 'true' \
else False
cache_key=f"{request.url.path}/{request.query_params}"
incache=False
#incache= cache_service.get(cache_key)
if incache and not nocache:
logger.debug(f"{cache_key} cached ({settings.redis_cache_expiration})s")
payload=json.loads(incache)
else:
payload=await func(*args, **kwargs)
#cache_service.set(cache_key, json.dumps(payload))
#cache_service.expire(cache_key,settings.redis_cache_expiration)
logger.debug(f"{cache_key} elapsed Time: {time.time()-start}")
return payload
return wrapper


def check_apikey(key:str):
if key != settings.api_key :
raise HTTPException(status_code=401, detail="Unauthorized")
return True

def check_cache(request:Request):
cache= rd.get(request.url.path)


class DatetimeRangeRequest(BaseModel):
start_at: datetime = Field(default=datetime.now()-timedelta(days=7))
end_at: datetime = datetime.now()


class OrderByEnum(str, Enum):
ascending = "ASC"
descending = "DESC"

class OrderByRequest(BaseModel):
order: OrderByEnum = OrderByEnum.ascending

class PaginatedRequest(BaseModel):
offset: int|None = 0
limit: int|None = 100
order_by: OrderByRequest = OrderByEnum.ascending


class PageParams(BaseModel):
""" Request query params for paginated API. """
offset: conint(ge=0) = 0
limit: conint(ge=1, le=100000) = 100

T = TypeVar("T")

class PagedResponseSchema(GenericModel,Generic[T]):
total: int
limit: int
offset: int
next: str|None
previous: str|None
results: List[T]

def paginate(request: Request, page_params: PageParams, query, ResponseSchema: BaseModel) -> PagedResponseSchema[T]:
"""Paginate the query."""

print(f"{request.url.scheme}://{request.client}/{request.url.path}")
paginated_query = query.offset((page_params.offset) * page_params.limit).limit(page_params.limit).all()

return PagedResponseSchema(
total=query.count(),
offset=page_params.offset,
limit=page_params.limit,
next="",
previous="",
results=[ResponseSchema.from_orm(item) for item in paginated_query],
)

class TotalTimeActivityTypeEnum(str, Enum):
total_time_at_sea: str = "Total Time at Sea"
total_time_in_amp: str = "Total Time in AMP"
total_time_in_territorial_waters: str = "Total Time in Territorial Waters"
total_time_in_zones_with_no_fishing_rights: str = "Total Time in zones with no fishing rights"
total_time_fishing: str = "Total Time Fishing"
total_time_fishing_in_amp: str = "Total Time Fishing in AMP"
total_time_fishing_in_territorial_waters: str = "Total Time Fishing in Territorial Waters"
total_time_fishing_in_zones_with_no_fishing_rights: str = "Total Time Fishing in zones with no fishing rights"
total_time_fishing_in_extincting_amp: str = "Total Time in Extincting AMP"
13 changes: 0 additions & 13 deletions backend/bloom/domain/api.py

This file was deleted.

12 changes: 11 additions & 1 deletion backend/bloom/domain/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,17 @@
from datetime import datetime, timedelta
from enum import Enum
from bloom.domain.vessel import Vessel
from bloom.dependencies import TotalTimeActivityTypeEnum

class TotalTimeActivityTypeEnum(str, Enum):
total_time_at_sea: str = "Total Time at Sea"
total_time_in_amp: str = "Total Time in AMP"
total_time_in_territorial_waters: str = "Total Time in Territorial Waters"
total_time_in_zones_with_no_fishing_rights: str = "Total Time in zones with no fishing rights"
total_time_fishing: str = "Total Time Fishing"
total_time_fishing_in_amp: str = "Total Time Fishing in AMP"
total_time_fishing_in_territorial_waters: str = "Total Time Fishing in Territorial Waters"
total_time_fishing_in_zones_with_no_fishing_rights: str = "Total Time Fishing in zones with no fishing rights"
total_time_fishing_in_extincting_amp: str = "Total Time in Extincting AMP"

class ResponseMetricsVesselInActivitySchema(BaseModel):
model_config = ConfigDict(from_attributes=True)
Expand Down
4 changes: 2 additions & 2 deletions backend/bloom/infra/repositories/repository_excursion.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def get_param_from_last_excursion(self, session: Session, vessel_id: int) -> Uni
def get_excursions_by_vessel_id(self, session: Session, vessel_id: int) -> List[Excursion]:
"""Recheche l'excursion en cours d'un bateau, c'est-à-dire l'excursion qui n'a pas de date d'arrivée"""
stmt = select(sql_model.Excursion).where(sql_model.Excursion.vessel_id == vessel_id)
result = session.execute(stmt).scalars()
result = session.execute(stmt).scalars().all()
if not result:
return []
return [ExcursionRepository.map_to_domain(r) for r in result]
Expand All @@ -46,7 +46,7 @@ def get_vessel_excursion_by_id(self, session: Session, vessel_id: int, excursion
"""Recheche l'excursion en cours d'un bateau, c'est-à-dire l'excursion qui n'a pas de date d'arrivée"""
stmt = select(sql_model.Excursion).where((sql_model.Excursion.vessel_id == vessel_id)
& (sql_model.Excursion.id == excursion_id))
result = session.execute(stmt).scalar()
result = session.execute(stmt).scalar_one_or_none()
if not result:
return None
return ExcursionRepository.map_to_domain(result)
Expand Down
10 changes: 5 additions & 5 deletions backend/bloom/infra/repositories/repository_segment.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,9 +112,9 @@ def list_vessel_excursion_segments(self,session,vessel_id:int,excursions_id: int
sql_model.Segment.excursion_id == sql_model.Excursion.id
).where( sql_model.Segment.excursion_id == excursions_id,
sql_model.Excursion.vessel_id == vessel_id)
result = session.execute(stmt)
if result is not None :
return [ SegmentRepository.map_to_domain(record) for record in result.scalars()]
result = session.execute(stmt).scalars().all()
if result:
return [ SegmentRepository.map_to_domain(record) for record in result]
else:
return []

Expand All @@ -127,7 +127,7 @@ def get_vessel_excursion_segment_by_id(self,session,vessel_id:int,excursions_id:
).where( sql_model.Segment.excursion_id == excursions_id,
sql_model.Excursion.vessel_id == vessel_id,
sql_model.Segment.id == segment_id)
result = session.execute(stmt)
result = session.execute(stmt).scalar_one_or_none()
if result is not None :
return [ SegmentRepository.map_to_domain(record) for record in result.scalars()][0]
else:
Expand All @@ -154,7 +154,7 @@ def get_last_vessel_id_segments(self, session: Session) -> pd.DataFrame:
).filter(
sql_model.Segment.last_vessel_segment == True
)
q = session.execute(stmt)
q = session.execute(stmt).scalars().all()
if not q:
return None
df = pd.DataFrame(q, columns=["vessel_id", "excursion_id", "end_position", "timestamp_end", 'heading_at_end',
Expand Down
58 changes: 58 additions & 0 deletions backend/bloom/routers/requests.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
from pydantic import BaseModel, Field,conint
from fastapi import Request
from datetime import datetime, timedelta
from enum import Enum
from typing import Generic,TypeVar, List


class CachedRequest(BaseModel):
nocache:bool=False


class OrderByEnum(str, Enum):
ascending = "ASC"
descending = "DESC"

class DatetimeRangeRequest(BaseModel):
start_at: datetime = Field(default=datetime.now()-timedelta(days=7))
end_at: datetime = datetime.now()

class OrderByRequest(BaseModel):
order: OrderByEnum = OrderByEnum.ascending

class PaginatedRequest(BaseModel):
offset: int|None = 0
limit: int|None = 100
order_by: OrderByRequest = OrderByEnum.ascending


class PageParams(BaseModel):
""" Request query params for paginated API. """
offset: conint(ge=0) = 0
limit: conint(ge=1, le=100000) = 100

T = TypeVar("T")

class PagedResponseSchema(BaseModel,Generic[T]):
total: int
limit: int
offset: int
next: str|None
previous: str|None
results: List[T]


def paginate(request: Request, page_params: PageParams, query, ResponseSchema: BaseModel) -> PagedResponseSchema[T]:
"""Paginate the query."""

print(f"{request.url.scheme}://{request.client}/{request.url.path}")
paginated_query = query.offset((page_params.offset) * page_params.limit).limit(page_params.limit).all()

return PagedResponseSchema(
total=query.count(),
offset=page_params.offset,
limit=page_params.limit,
next="",
previous="",
results=[ResponseSchema.from_orm(item) for item in paginated_query],
)
5 changes: 2 additions & 3 deletions backend/bloom/routers/v1/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,13 @@
from bloom.config import settings
import redis
from bloom.config import settings
from bloom.logger import logger
from bloom.dependencies import (X_API_KEY_HEADER,check_apikey)
from bloom.container import UseCases

router = APIRouter()
rd = redis.Redis(host=settings.redis_host, port=settings.redis_port, db=0, password=settings.redis_password)

@router.get("/cache/all/flush")
async def cache_all_flush(request:Request,key: str = Depends(X_API_KEY_HEADER)):
check_apikey(key)
rd.flushall()
UseCases().cache_service().flushall()
return {"code":0}
Loading
Loading