Skip to content

Commit

Permalink
move logger
Browse files Browse the repository at this point in the history
  • Loading branch information
mmaelicke committed Jul 31, 2024
1 parent 03b4c96 commit 1e5bea2
Show file tree
Hide file tree
Showing 11 changed files with 26 additions and 59 deletions.
5 changes: 4 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
FROM python:3.10.13

# install the toolbox runner tools
RUN pip install json2args==0.6.1
RUN pip install json2args==0.7.0

# Install GDAL which will be used by geopandas
RUN pip install --upgrade pip
Expand Down Expand Up @@ -45,6 +45,9 @@ RUN mkdir /src
COPY ./src /src
RUN mv /whitebox/WhiteboxTools_linux_amd64/WBT /src/WBT

# copy the citation file - looks funny to make COPY not fail if the file is not there
COPY ./CITATION.cf[f] /src/CITATION.cff

# download a precompiled binary of duckdb
# first line checks the architecture, and replaces x86_64 with amd64, which is what duckdb uses
RUN arch=$(uname -m | sed s/x86_64/amd64/) && \
Expand Down
1 change: 0 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
version: '3.8'
services:
db:
image: postgis/postgis:15-3.4
Expand Down
2 changes: 1 addition & 1 deletion src/aggregator.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import polars as pl

from param import load_params
from logger import logger
from json2args.logger import logger
from writer import dispatch_result_saver

class AggregationMacros(TypedDict):
Expand Down
2 changes: 1 addition & 1 deletion src/clip.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from pyproj import CRS
from geocube.api.core import make_geocube

from logger import logger
from json2args.logger import logger
from param import load_params


Expand Down
2 changes: 1 addition & 1 deletion src/ingestor.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import duckdb
from metacatalog.models import Entry

from logger import logger
from json2args.logger import logger
from param import load_params

SPATIAL_DIMENSIONS = ('lon', 'lat', 'z')
Expand Down
30 changes: 15 additions & 15 deletions src/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import geopandas as gpd
import rasterio as rio

from logger import logger
from json2args.logger import logger
from writer import dispatch_save_file, entry_metadata_saver
from param import load_params, Params
from utils import whitebox_log_handler
Expand Down Expand Up @@ -315,22 +315,22 @@ def error_handler(future):
tiles = [future.result() for future in futures if future.result() is not None]

# run the merge function and delete the other files
if len(tiles) > 1:
logger.debug('Starting WhitboxTools mosaic operation...')
_wbt_merge_raster(dataset_base_path, f"{entry.variable.name.replace(' ', '_')}_{entry.id}.tif")
# if len(tiles) > 1:
# logger.debug('Starting WhitboxTools mosaic operation...')
# _wbt_merge_raster(dataset_base_path, f"{entry.variable.name.replace(' ', '_')}_{entry.id}.tif")

# remove the tiles
for tile in tiles:
Path(tile).unlink()
# # remove the tiles
# for tile in tiles:
# Path(tile).unlink()

# check if there is exactly one tile
elif len(tiles) == 1:
# rename the file
new_name = dataset_base_path / f"{entry.variable.name.replace(' ', '_')}_{entry.id}.tif"
Path(tiles[0]).rename(new_name)
tiles = [str(new_name)]
else:
logger.warning(f'No tiles were clipped for the reference area. It might not be covered by dataset <ID={entry.id}>')
# # check if there is exactly one tile
# elif len(tiles) == 1:
# # rename the file
# new_name = dataset_base_path / f"{entry.variable.name.replace(' ', '_')}_{entry.id}.tif"
# Path(tiles[0]).rename(new_name)
# tiles = [str(new_name)]
# else:
# logger.warning(f'No tiles were clipped for the reference area. It might not be covered by dataset <ID={entry.id}>')

# save the metadata
metafile_name = str(params.dataset_path / f"{entry.variable.name.replace(' ', '_')}_{entry.id}.metadata.json")
Expand Down
35 changes: 0 additions & 35 deletions src/logger.py

This file was deleted.

2 changes: 1 addition & 1 deletion src/reporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from ydata_profiling import ProfileReport

from param import load_params
from logger import logger
from json2args.logger import logger


def generate_profile_report(file_name: str) -> None:
Expand Down
2 changes: 1 addition & 1 deletion src/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

from param import load_params, Integrations
from loader import load_entry_data
from logger import logger
from json2args.logger import logger
import ingestor
import aggregator
import reporter
Expand Down
2 changes: 1 addition & 1 deletion src/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from logger import logger
from json2args.logger import logger

# define a handler for whiteboxgis tools verbose output
def whitebox_log_handler(msg: str):
Expand Down
2 changes: 1 addition & 1 deletion src/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import pandas as pd
import xarray as xr

from logger import logger
from json2args.logger import logger


# create a union of all supported Dataframe types
Expand Down

0 comments on commit 1e5bea2

Please sign in to comment.