Skip to content

Commit

Permalink
Merge branch 'main' into task/bui-raster-calc
Browse files Browse the repository at this point in the history
  • Loading branch information
brettedw authored Oct 28, 2024
2 parents 50ff7f2 + 06c42d9 commit 6f26705
Show file tree
Hide file tree
Showing 41 changed files with 761 additions and 1,244 deletions.
52 changes: 41 additions & 11 deletions .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,45 @@
// Sets the run context to one level up instead of the .devcontainer folder.
"context": "..",
// Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename.
"dockerFile": "../Dockerfile.vscode",
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Uncomment the next line to run commands after the container is created - for example installing curl.
// "postCreateCommand": "apt-get update && apt-get install -y curl",
// Uncomment when using a ptrace-based debugger like C++, Go, and Rust
// "runArgs": [ "--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined" ],
// Uncomment to use the Docker CLI from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker.
// "mounts": [ "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" ],
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "vscode"
"dockerFile": "../Dockerfile",
// make sure you don't have any local .venv in wps/api or it won't work
// note: we don't use a workspace mount because then you'll only see the files in /api, when
// you probably want to see everything. This means you must cd from /workspace to /app in order
// to actually run things though. the following mount will at least make the /app folder in the
// docker container change when files in api change.
"mounts": [
"source=${localWorkspaceFolder}/api,target=/app,type=bind"
],
"remoteUser": "worker",
"containerUser": "worker",
"updateRemoteUserUID": false, // without this you wind up as ubuntu user
// this lets the container access the database container. On linux I had to configure the db
// host to localhost for it to work in the .env file in api/app
// tested with the database running from the docker-compose file with `docker compose up db`
"forwardPorts": [
8080
],
"runArgs": [
"--network=host",
"--platform",
"linux/amd64",
"--hostname=wps-dev",
],
// sets us in the correct location, and install dev requirements like pytest
"postCreateCommand": "echo 'cd /app' >> ~/.bashrc && cd /app && poetry install",
// by default the dockerfile sets the shell to /bin/sh - you can use whatever you like here
// but if you use bash, it will automatically set you in the /app folder rather than workspace
// when the terminal connects
"customizations": {
"vscode": {
"settings": {
"terminal.integrated.defaultProfile.linux": "bash",
"terminal.integrated.profiles.linux": {
"bash": {
"path": "/bin/bash"
}
}
}
}
}
}
2 changes: 1 addition & 1 deletion .github/workflows/renovate.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
LOG_LEVEL: debug
steps:
- name: Checkout
uses: actions/[email protected].1
uses: actions/[email protected].2
- name: Self-hosted Renovate
uses: renovatebot/[email protected]
with:
Expand Down
6 changes: 3 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
ARG DOCKER_IMAGE=image-registry.openshift-image-registry.svc:5000/e1e498-tools/wps-api-base:02-10-2024
ARG DOCKER_IMAGE=ghcr.io/bcgov/wps/wps-api-base:10-21-2024
# To build locally, point to a local base image you've already built (see openshift/wps-api-base)
# e.g. : docker build --build-arg DOCKER_IMAGE=wps-api-base:my-tag .

Expand Down Expand Up @@ -70,8 +70,8 @@ COPY ./api/alembic.ini /app
COPY ./api/prestart.sh /app
COPY ./api/start.sh /app

# Copy installed Python packages
COPY --from=builder /home/worker/.cache/pypoetry/virtualenvs /home/worker/.cache/pypoetry/virtualenvs
# Copy installed Python packages (the chown lets us install the dev packages later without root if we want)
COPY --from=builder --chown=$USERNAME:$USER_GID /home/worker/.cache/pypoetry/virtualenvs /home/worker/.cache/pypoetry/virtualenvs

# The fastapi docker image defaults to port 80, but openshift doesn't allow non-root users port 80.
EXPOSE 8080
Expand Down
88 changes: 0 additions & 88 deletions Dockerfile.vscode

This file was deleted.

25 changes: 17 additions & 8 deletions Dockerfile.web
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
ARG NODE_OPTIONS="--v8-pool-size=4"

# PHASE 1 - build frontend.
FROM node:20-alpine AS static
FROM docker.io/library/node:20-alpine AS static
# Switch to root user for package installs
USER 0
WORKDIR /app
Expand All @@ -21,18 +21,27 @@ RUN corepack enable \
COPY web .
RUN yarn run build:prod

# Switch back to default user
USER 1001

# PHASE 2 - prepare hosting.
# https://catalog.redhat.com/software/containers/ubi8/nginx-120/6156abfac739c0a4123a86fd
FROM registry.access.redhat.com/ubi8/nginx-120
FROM docker.io/library/nginx:stable-alpine

# Add application sources
ADD ./openshift/nginx.conf "${NGINX_CONF_PATH}"
# Copy application sources
COPY ./openshift/nginx.conf /etc/nginx/nginx.conf

# Copy the static content:
COPY --from=static /app/build .

RUN mkdir -p /tmp/nginx/client_temp
RUN chmod -R 755 /tmp/nginx

RUN mkdir -p /var/cache/nginx/proxy_temp
RUN mkdir -p /var/cache/nginx/fastcgi_temp
RUN mkdir -p /var/cache/nginx/uwsgi_temp
RUN mkdir -p /var/cache/nginx/scgi_temp
RUN chmod -R 755 /var/cache/nginx/
RUN chmod -R 755 /var/run

# Switch back to default user
USER 1001

EXPOSE 3000
CMD nginx -g "daemon off;"
6 changes: 3 additions & 3 deletions api/app/jobs/common_model_fetchers.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
from app import config, configure_logging
import app.utils.time as time_utils
from app.utils.redis import create_redis
from app.stations import get_stations_synchronously, StationSourceEnum
from app.stations import get_stations_synchronously
from app.db.models.weather_models import (ProcessedModelRunUrl, PredictionModelRunTimestamp,
WeatherStationModelPrediction, ModelRunPrediction)
import app.db.database
Expand Down Expand Up @@ -187,10 +187,10 @@ class ModelValueProcessor:
""" Iterate through model runs that have completed, and calculate the interpolated weather predictions.
"""

def __init__(self, session, station_source: StationSourceEnum = StationSourceEnum.UNSPECIFIED):
def __init__(self, session):
""" Prepare variables we're going to use throughout """
self.session = session
self.stations = get_stations_synchronously(station_source)
self.stations = get_stations_synchronously()
self.station_count = len(self.stations)

def _process_model_run(self, model_run: PredictionModelRunTimestamp, model_type: ModelEnum):
Expand Down
11 changes: 5 additions & 6 deletions api/app/jobs/env_canada.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import app.utils.time as time_utils
from app.weather_models.process_grib import GribFileProcessor, ModelRunInfo
import app.db.database
from app.stations import StationSourceEnum
from app.rocketchat_notifications import send_rocketchat_notification
from app.jobs.env_canada_utils import adjust_model_day, get_model_run_urls

Expand Down Expand Up @@ -160,14 +159,14 @@ class EnvCanada():
Canada.
"""

def __init__(self, session: Session, model_type: ModelEnum, station_source: StationSourceEnum = StationSourceEnum.UNSPECIFIED):
def __init__(self, session: Session, model_type: ModelEnum):
""" Prep variables """
self.files_downloaded = 0
self.files_processed = 0
self.exception_count = 0
# We always work in UTC:
self.now = time_utils.get_utc_now()
self.grib_processor = GribFileProcessor(station_source)
self.grib_processor = GribFileProcessor()
self.model_type: ModelEnum = model_type
self.session = session
# set projection based on model_type
Expand Down Expand Up @@ -246,7 +245,7 @@ def process(self):
self.model_type, hour, exc_info=exception)


def process_models(station_source: StationSourceEnum = StationSourceEnum.UNSPECIFIED):
def process_models():
""" downloading and processing models """

# set the model type requested based on arg passed via command line
Expand All @@ -257,11 +256,11 @@ def process_models(station_source: StationSourceEnum = StationSourceEnum.UNSPECI
start_time = datetime.datetime.now()

with app.db.database.get_write_session_scope() as session:
env_canada = EnvCanada(session, model_type, station_source)
env_canada = EnvCanada(session, model_type)
env_canada.process()

# interpolate and machine learn everything that needs interpolating.
model_value_processor = ModelValueProcessor(session, station_source)
model_value_processor = ModelValueProcessor(session)
model_value_processor.process(model_type)

# calculate the execution time.
Expand Down
11 changes: 5 additions & 6 deletions api/app/jobs/noaa.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
from app.weather_models import ModelEnum, ProjectionEnum
from app.weather_models.process_grib import GribFileProcessor, ModelRunInfo
import app.db.database
from app.stations import StationSourceEnum
from app.rocketchat_notifications import send_rocketchat_notification

# If running as its own process, configure logging appropriately.
Expand Down Expand Up @@ -261,14 +260,14 @@ class NOAA():
""" Class that orchestrates downloading and processing of GFS weather model grib files from NOAA.
"""

def __init__(self, session: Session, model_type: ModelEnum, station_source: StationSourceEnum = StationSourceEnum.UNSPECIFIED):
def __init__(self, session: Session, model_type: ModelEnum):
""" Prep variables """
self.files_downloaded = 0
self.files_processed = 0
self.exception_count = 0
# We always work in UTC:
self.now = time_utils.get_utc_now()
self.grib_processor = GribFileProcessor(station_source)
self.grib_processor = GribFileProcessor()
self.model_type: ModelEnum = model_type
self.session = session
# projection depends on model type
Expand Down Expand Up @@ -346,7 +345,7 @@ def process(self):
self.model_type, hour, exc_info=exception)


def process_models(station_source: StationSourceEnum = StationSourceEnum.UNSPECIFIED):
def process_models():
""" downloading and processing models """
# set the model type requested based on arg passed via command line
model_type = ModelEnum(sys.argv[1])
Expand All @@ -356,11 +355,11 @@ def process_models(station_source: StationSourceEnum = StationSourceEnum.UNSPECI
start_time = datetime.datetime.now()

with app.db.database.get_write_session_scope() as session:
noaa = NOAA(session, model_type, station_source)
noaa = NOAA(session, model_type)
noaa.process()

# interpolate and machine learn everything that needs interpolating.
model_value_processor = ModelValueProcessor(session, station_source)
model_value_processor = ModelValueProcessor(session)
model_value_processor.process(model_type)

# calculate the execution time.
Expand Down
15 changes: 5 additions & 10 deletions api/app/routers/stations.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from app.utils.time import get_utc_now, get_hour_20
from app.schemas.stations import (WeatherStationGroupsMemberRequest, WeatherStationsResponse, DetailedWeatherStationsResponse, WeatherStationGroupsResponse,
WeatherStationGroupMembersResponse)
from app.stations import StationSourceEnum, get_stations_as_geojson, fetch_detailed_stations_as_geojson
from app.stations import get_stations_as_geojson, fetch_detailed_stations_as_geojson
from app.wildfire_one import wfwx_api


Expand All @@ -20,11 +20,7 @@


@router.get('/details/', response_model=DetailedWeatherStationsResponse)
async def get_detailed_stations(response: Response,
toi: datetime = None,
source: StationSourceEnum = StationSourceEnum.WILDFIRE_ONE,
__=Depends(audit),
_=Depends(authentication_required)):
async def get_detailed_stations(response: Response, toi: datetime = None, __=Depends(audit), _=Depends(authentication_required)):
""" Returns a list of fire weather stations with detailed information.
-) Unspecified: Use configuration to establish source.
-) LocalStorage: Read from json file (ignore configuration).
Expand All @@ -40,7 +36,7 @@ async def get_detailed_stations(response: Response,
toi = get_utc_now()
else:
toi = get_hour_20(toi)
weather_stations = await fetch_detailed_stations_as_geojson(toi, source)
weather_stations = await fetch_detailed_stations_as_geojson(toi)
return DetailedWeatherStationsResponse(features=weather_stations)

except Exception as exception:
Expand All @@ -49,8 +45,7 @@ async def get_detailed_stations(response: Response,


@router.get('/', response_model=WeatherStationsResponse)
async def get_stations(response: Response,
source: StationSourceEnum = StationSourceEnum.UNSPECIFIED):
async def get_stations(response: Response):
""" Return a list of fire weather stations.
Stations source can be:
-) Unspecified: Use configuration to establish source.
Expand All @@ -60,7 +55,7 @@ async def get_stations(response: Response,
try:
logger.info('/stations/')

weather_stations = await get_stations_as_geojson(source)
weather_stations = await get_stations_as_geojson()
response.headers["Cache-Control"] = no_cache

return WeatherStationsResponse(features=weather_stations)
Expand Down
Loading

0 comments on commit 6f26705

Please sign in to comment.