-
Notifications
You must be signed in to change notification settings - Fork 9
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Hook up 24 precip tiff rolling computations (#3762)
- Hooks up the 24 hour accumulating precip raster calculations as part of the RDPS job. - Uses the start datetime of the job (now in UTC) as the seed. - Decided to start by storing every computed tif in s3 for now to be able to inspect intermediate results in dev if need be, this can be changed to use memory only later (using the same keys as keys in a hashmap). - Closes #3682
- Loading branch information
Showing
6 changed files
with
165 additions
and
56 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,62 +1,71 @@ | ||
""" Utils to help with s3 | ||
""" | ||
"""Utils to help with s3""" | ||
|
||
import logging | ||
from typing import Generator, Tuple | ||
from contextlib import asynccontextmanager | ||
from aiobotocore.client import AioBaseClient | ||
from aiobotocore.session import get_session | ||
from botocore.exceptions import ClientError | ||
from osgeo import gdal | ||
from app import config | ||
|
||
logger = logging.getLogger(__name__) | ||
|
||
|
||
@asynccontextmanager | ||
async def get_client() -> Generator[Tuple[AioBaseClient, str], None, None]: | ||
""" Return AioBaseClient client and bucket | ||
""" | ||
server = config.get('OBJECT_STORE_SERVER') | ||
user_id = config.get('OBJECT_STORE_USER_ID') | ||
secret_key = config.get('OBJECT_STORE_SECRET') | ||
bucket = config.get('OBJECT_STORE_BUCKET') | ||
"""Return AioBaseClient client and bucket""" | ||
server = config.get("OBJECT_STORE_SERVER") | ||
user_id = config.get("OBJECT_STORE_USER_ID") | ||
secret_key = config.get("OBJECT_STORE_SECRET") | ||
bucket = config.get("OBJECT_STORE_BUCKET") | ||
|
||
session = get_session() | ||
async with session.create_client('s3', | ||
endpoint_url=f'https://{server}', | ||
aws_secret_access_key=secret_key, | ||
aws_access_key_id=user_id) as client: | ||
async with session.create_client("s3", endpoint_url=f"https://{server}", aws_secret_access_key=secret_key, aws_access_key_id=user_id) as client: | ||
try: | ||
yield client, bucket | ||
finally: | ||
del client | ||
|
||
|
||
async def object_exists(client: AioBaseClient, bucket: str, target_path: str): | ||
""" Check if and object exists in the object store | ||
""" | ||
"""Check if and object exists in the object store""" | ||
# using list_objects, but could be using stat as well? don't know what's best. | ||
result = await client.list_objects_v2(Bucket=bucket, | ||
Prefix=target_path) | ||
contents = result.get('Contents', None) | ||
result = await client.list_objects_v2(Bucket=bucket, Prefix=target_path) | ||
contents = result.get("Contents", None) | ||
if contents: | ||
for content in contents: | ||
key = content.get('Key') | ||
key = content.get("Key") | ||
if key == target_path: | ||
return True | ||
return False | ||
|
||
|
||
async def object_exists_v2(target_path: str): | ||
""" Check if and object exists in the object store | ||
""" | ||
"""Check if and object exists in the object store""" | ||
async with get_client() as (client, bucket): | ||
return await object_exists(client, bucket, target_path) | ||
|
||
|
||
async def read_into_memory(key: str): | ||
async with get_client() as (client, bucket): | ||
s3_source = await client.get_object(Bucket=bucket, Key=key) | ||
mem_path = f'/vsimem/{key}' | ||
s3_data = await s3_source['Body'].read() | ||
gdal.FileFromMemBuffer(mem_path, s3_data) | ||
data_source = gdal.Open(mem_path, gdal.GA_ReadOnly) | ||
gdal.Unlink(mem_path) | ||
dem_band = data_source.GetRasterBand(1) | ||
dem_data = dem_band.ReadAsArray() | ||
return dem_data | ||
try: | ||
s3_source = await client.get_object(Bucket=bucket, Key=key) | ||
mem_path = f"/vsimem/{key}" | ||
s3_data = await s3_source["Body"].read() | ||
gdal.FileFromMemBuffer(mem_path, s3_data) | ||
data_source = gdal.Open(mem_path, gdal.GA_ReadOnly) | ||
gdal.Unlink(mem_path) | ||
data_band = data_source.GetRasterBand(1) | ||
data_geotransform = data_source.GetGeoTransform() | ||
data_projection = data_source.GetProjection() | ||
data_array = data_band.ReadAsArray() | ||
data_source = None | ||
del data_source | ||
return (data_array, data_geotransform, data_projection) | ||
except ClientError as ex: | ||
if ex.response["Error"]["Code"] == "NoSuchKey": | ||
logger.info("No object found for key: %s", key) | ||
return (None, None, None) | ||
else: | ||
raise |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.