Skip to content

Commit

Permalink
Initial work on release manager
Browse files Browse the repository at this point in the history
  • Loading branch information
vinhowe committed Jul 27, 2021
1 parent 437d943 commit 533e455
Show file tree
Hide file tree
Showing 4 changed files with 160 additions and 5 deletions.
49 changes: 46 additions & 3 deletions footron_controller/api.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
import asyncio
import atexit
import dataclasses
import hashlib
import tarfile
from typing import Optional

from fastapi import FastAPI, HTTPException
from fastapi import FastAPI, HTTPException, UploadFile, File
from fastapi.middleware.cors import CORSMiddleware
from pydantic import BaseModel

from .releases import ReleaseManager
from .placard import PlacardData
from .experiences import BaseExperience
from .collection import Collection
Expand All @@ -22,6 +25,7 @@
allow_headers=["*"],
)

_releases: ReleaseManager
_controller: Controller


Expand All @@ -34,6 +38,10 @@ class UpdateCurrentExperienceBody(BaseModel):
end_time: Optional[int]


class SetExperienceReleaseBody(BaseModel):
hash: str


def experience_response(experience: BaseExperience):
data = {
"id": experience.id,
Expand Down Expand Up @@ -146,8 +154,43 @@ async def update_placard(body: PlacardData):

@fastapi_app.on_event("startup")
def on_startup():
global _controller
_controller = Controller()
global _releases, _controller
_releases = ReleaseManager()
_controller = Controller(_releases)


@fastapi_app.get("/releases")
def releases():
return _releases.data


@fastapi_app.post("/releases/{id}")
async def add_release(id: str, file: UploadFile = File(...)):
# TODO: Is this too slow? Will it freeze the controller?
uncompressed_file = tarfile.open(fileobj=file.file, mode="r:gz")
tar_fileobj = uncompressed_file.fileobj

hash = hashlib.sha256()
while True:
chunk = tar_fileobj.read(4096)
if not chunk:
break
hash.update(chunk)

# Produces a hash different than sha256sum on the same file locally--this might not
# be a problem but we should be aware that it could be. Might be an encoding thing,
# not sure.
hash = bytes.hex(hash.digest())

uncompressed_file.extractall(_releases.create_release(id, hash))

return {"hash": hash}


@fastapi_app.put("/releases/{id}")
async def set_release(id: str, body: SetExperienceReleaseBody):
_releases.set_release(id, body.hash)
return {"status": "ok"}


@atexit.register
Expand Down
8 changes: 6 additions & 2 deletions footron_controller/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from .experiences import load_experiences_fs, BaseExperience
from .placard import PlacardApi, PlacardData
from .releases import ReleaseManager
from .collection import load_collections_from_fs, Collection


Expand All @@ -14,12 +15,14 @@ class Controller:
end_time: Optional[int]
last_update: datetime.datetime
placard: PlacardApi
releases: ReleaseManager

def __init__(self):
def __init__(self, releases: ReleaseManager):
self.current_experience = None
self.end_time = None

self.placard = PlacardApi()
self.releases = releases

self.load_from_fs()

Expand All @@ -30,7 +33,8 @@ def load_from_fs(self):

def load_experiences(self):
self.experiences = {
experience.id: experience for experience in load_experiences_fs()
experience.id: experience
for experience in load_experiences_fs(self.releases.path)
}

def load_collections(self):
Expand Down
107 changes: 107 additions & 0 deletions footron_controller/releases.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
import json
from datetime import datetime
from typing import Dict, Optional

import pydantic
import pydantic.json
from pydantic import BaseModel

from .constants import BASE_DATA_PATH, EXPERIENCES_PATH

_RELEASES_DIR_NAME = "releases"
_DATA_FILENAME = "data.json"


class _Release(BaseModel):
hash: str
created: datetime


class _ExperienceReleases(BaseModel):
current: Optional[str]
releases: Dict[str, _Release]


class ReleaseManager:
_release_data: Dict[str, _ExperienceReleases]

# TODO: Figure out how we limit # of older releases kept per app--maybe by size?
def __init__(self):
self._releases_path = BASE_DATA_PATH / _RELEASES_DIR_NAME
self._linked_path = EXPERIENCES_PATH / "apps"
self._data_path = self._releases_path / _DATA_FILENAME

self._load_releases_data()

@property
def path(self):
return self._linked_path

@property
def data(self):
return self._release_data

def _load_releases_data(self):
if not self._data_path.exists():
self._release_data = {}
return

with open(self._data_path) as data_file:
self._release_data = {
id: _ExperienceReleases.parse_obj(data)
for id, data in json.load(data_file).items()
}

def _save_release_data(self):
with open(self._data_path, "w") as data_file:
json.dump(
{id: data for id, data in self._release_data.items()},
data_file,
default=pydantic.json.pydantic_encoder,
)

def create_release(self, id: str, hash: str):
# TODO: This is probably the right place to clean up old releases
if id not in self._release_data:
self._release_data[id] = _ExperienceReleases(current=None, releases={})

# TODO: We don't check if there's an existing release here--might be a good
# idea?
self._release_data[id].releases[hash] = _Release(
hash=hash, created=datetime.now()
)
self._save_release_data()

release_path = self.path_for_release(id, hash)
if not release_path.exists():
release_path.mkdir(parents=True)

return release_path

def set_release(self, id: str, hash: str):
if not self.release_exists(id, hash):
raise FileNotFoundError(f"Release path does not exist: {id}/{hash}")

if not self._linked_path.exists():
self._linked_path.mkdir(parents=True)

linked_path = self._linked_path / id
if linked_path.exists():
linked_path.unlink()

linked_path.symlink_to(
self.path_for_release(id, hash), target_is_directory=True
)

self._release_data[id].current = hash
self._save_release_data()

def release_exists(self, id, hash) -> bool:
return (
id in self._release_data
and hash in self._release_data[id].releases
and self.path_for_release(id, hash).exists()
)

def path_for_release(self, id, hash):
return self._releases_path / id / hash
1 change: 1 addition & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,5 @@ install_requires =
fastapi
uvicorn[standard]
aiohttp
python-multipart
docker

0 comments on commit 533e455

Please sign in to comment.