Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

more documentation: ush/generate_fire_emissions.py, ush/interp_tools.py, ush/set_namelist.py #521

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 14 additions & 11 deletions ush/generate_fire_emissions.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
#########################################################################
# #
# Python script for fire emissions preprocessing from RAVE FRP and FRE #
# (Li et al.,2022). #
# [email protected] #
# #
#########################################################################
"""
Python script for fire emissions preprocessing from RAVE FRP and FRE
(Li et al.,2022).
[email protected]
"""
import sys
import os
import time
Expand All @@ -13,14 +11,19 @@
import HWP_tools
import interp_tools as i_tools

#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Workflow
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def generate_emiss_workflow(staticdir, ravedir, newges_dir, predef_grid):
"""
Workflow

staticdir: ???
ravedir: ???
newges_dir: ???
predef_grid: ???
"""

# ----------------------------------------------------------------------
# Import envs from workflow and get the predifying grid
# Set variable names, constants and unit conversions
# Set variable names constants and unit conversions
# Set predefined grid
# Set directories
# ----------------------------------------------------------------------
Expand Down
113 changes: 101 additions & 12 deletions ush/interp_tools.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
"""
Interpolation tools.
"""
import datetime as dt
import pandas as pd
import os
Expand All @@ -7,8 +10,13 @@
import numpy as np
from netCDF4 import Dataset

#Create date range, this is later used to search for RAVE and HWP from previous 24 hours
def date_range(current_day):
"""
Create date range, this is later used to search for RAVE and HWP from previous 24 hours.

current_day: ???
"""

print(f'Searching for interpolated RAVE for {current_day}')

fcst_datetime = dt.datetime.strptime(current_day, "%Y%m%d%H")
Expand All @@ -19,8 +27,15 @@ def date_range(current_day):
print(f'Current cycle: {fcst_datetime}')
return(fcst_dates)

# Check if interoplated RAVE is available for the previous 24 hours
def check_for_intp_rave(intp_dir, fcst_dates, rave_to_intp):
"""
Check if interoplated RAVE is available for the previous 24 hours.

intp_dir: ???
fcst_dates: ???
rave_to_intp: ???

"""
intp_avail_hours = []
intp_non_avail_hours = []
# There are four situations here.
Expand Down Expand Up @@ -48,8 +63,14 @@ def check_for_intp_rave(intp_dir, fcst_dates, rave_to_intp):

return(intp_avail_hours, intp_non_avail_hours, inp_files_2use)

#Check if raw RAVE in intp_non_avail_hours list is available for interpolatation
def check_for_raw_rave(RAVE, intp_non_avail_hours, intp_avail_hours):
"""
Check if raw RAVE in intp_non_avail_hours list is available for interpolatation.

RAVE: ???
intp_non_avail_hours: ???
intp_avail_hours: ???
"""
rave_avail = []
rave_avail_hours = []
rave_nonavail_hours_test = []
Expand All @@ -72,8 +93,15 @@ def check_for_raw_rave(RAVE, intp_non_avail_hours, intp_avail_hours):
print(f'FIRST DAY?: {first_day}')
return(rave_avail, rave_avail_hours, rave_nonavail_hours_test, first_day)

#Create source and target fields
def creates_st_fields(grid_in, grid_out, intp_dir, rave_avail_hours):
"""
Create source and target fields.

grid_in: ???
grid_out: ???
intp_dir: ???
rave_avail_hours: ???
"""

# Open datasets with context managers
with xr.open_dataset(grid_in) as ds_in, xr.open_dataset(grid_out) as ds_out:
Expand All @@ -91,17 +119,31 @@ def creates_st_fields(grid_in, grid_out, intp_dir, rave_avail_hours):
print('Grid in and out files available. Generating target and source fields')
return(srcfield, tgtfield, tgt_latt, tgt_lont, srcgrid, tgtgrid, src_latt, tgt_area)

#Define output and variable meta data
def create_emiss_file(fout, cols, rows):
"""Create necessary dimensions for the emission file."""
"""Create necessary dimensions for the emission file.

fout: ???
cols: ???
rows: ???
"""
fout.createDimension('t', None)
fout.createDimension('lat', cols)
fout.createDimension('lon', rows)
setattr(fout, 'PRODUCT_ALGORITHM_VERSION', 'Beta')
setattr(fout, 'TIME_RANGE', '1 hour')

def Store_latlon_by_Level(fout, varname, var, long_name, units, dim, fval, sfactor):
"""Store a 2D variable (latitude/longitude) in the file."""
"""Store a 2D variable (latitude/longitude) in the file.

fout: ???
varname: ???
var: ???
long_name: ???
units: ???
dim: ???
fval: ???
sfactor: ???
"""
var_out = fout.createVariable(varname, 'f4', ('lat','lon'))
var_out.units=units
var_out.long_name=long_name
Expand All @@ -111,16 +153,35 @@ def Store_latlon_by_Level(fout, varname, var, long_name, units, dim, fval, sfact
var_out.coordinates='geolat geolon'

def Store_by_Level(fout, varname, long_name, units, dim, fval, sfactor):
"""Store a 3D variable (time, latitude/longitude) in the file."""
"""Store a 3D variable (time, latitude/longitude) in the file.

fout: ???
varname: ???
long_name: ???
units: ???
dim: ???
fval: ???
sfactor: ???
"""
var_out = fout.createVariable(varname, 'f4', ('t','lat','lon'))
var_out.units=units
var_out.long_name = long_name
var_out.standard_name=long_name
var_out.FillValue=fval
var_out.coordinates='t geolat geolon'

#create a dummy rave interpolated file if first day or regrider fails
def create_dummy(intp_dir, current_day, tgt_latt, tgt_lont, cols, rows):
"""
Create a dummy rave interpolated file if first day or regrider fails.

intp_dir: ???
current_day: ???
tgt_latt: ???
tgt_lont: ???
cols: ???
rows: ???

"""
file_path = os.path.join(intp_dir, f'SMOKE_RRFS_data_{current_day}00.nc')
dummy_file = np.zeros((cols, rows)) # Changed to 3D to match the '3D' dimensions
with Dataset(file_path, 'w') as fout:
Expand All @@ -143,8 +204,18 @@ def create_dummy(intp_dir, current_day, tgt_latt, tgt_lont, cols, rows):

return "Emissions dummy file created successfully"

#generate regridder
def generate_regrider(rave_avail_hours, srcfield, tgtfield, weightfile, inp_files_2use, intp_avail_hours):
"""
Generate regridder.

rave_avail_hours: ???
srcfield: ???
tgtfield: ???
weightfile: ???
inp_files_2use: ???
intp_avail_hours: ???
"""

print('Checking conditions for generating regridder.')
use_dummy_emiss = len(rave_avail_hours) == 0 and len(intp_avail_hours) == 0
regridder = None
Expand All @@ -167,9 +238,27 @@ def generate_regrider(rave_avail_hours, srcfield, tgtfield, weightfile, inp_file

return(regridder, use_dummy_emiss)

#process RAVE available for interpolation
def interpolate_rave(RAVE, rave_avail, rave_avail_hours, use_dummy_emiss, vars_emis, regridder,
srcgrid, tgtgrid, rave_to_intp, intp_dir, src_latt, tgt_latt, tgt_lont, cols, rows):
"""
Process RAVE available for interpolation.

RAVE: ???
rave_avail: ???
rave_avail_hours: ???
use_dummy_emiss: ???
vars_emis: ???
regridder: ???
srcgrid: ???
tgtgrid: ???
rave_to_intp: ???
intp_dir: ???
src_latt: ???
tgt_latt: ???
tgt_lont: ???
cols: ???
rows: ???
"""
for index, current_hour in enumerate(rave_avail_hours):
file_name = rave_avail[index]
rave_file_path = os.path.join(RAVE, file_name[0])
Expand Down Expand Up @@ -221,4 +310,4 @@ def interpolate_rave(RAVE, rave_avail, rave_avail_hours, use_dummy_emiss, vars_e
except (OSError, IOError, RuntimeError, FileNotFoundError, TypeError, IndexError, MemoryError) as e:
print(f"Error reading NetCDF file {rave_file_path}: {e}")
else:
print(f"File not found or dummy emissions required: {rave_file_path}")
print(f"File not found or dummy emissions required: {rave_file_path}")
Loading
Loading