Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature coldpool #34

Merged
merged 15 commits into from
May 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
105 changes: 103 additions & 2 deletions mom6/mom6_module/mom6_indexes.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,7 @@ def __init__(
ds_data : xr.Dataset,
ssh_name : str = 'ssh'
) -> None:
"""_summary_

"""
Parameters
----------
ds_data : xr.Dataset
Expand Down Expand Up @@ -187,3 +186,105 @@ def generate_index(
ds_gs['gulf_stream_index'] = da_gs_index

return ds_gs

class ColdPoolIndex:
"""
This class is used to create the Cold Pool Index calculation
Original sources are [Ross et al., 2023](https://gmd.copernicus.org/articles/16/6943/2023/).
and [GFDL CEFI github repository]
(https://github.com/NOAA-GFDL/CEFI-regional-MOM6/blob/main
/diagnostics/physics/NWA12/coldpool.py)
"""
def __init__(
self,
ds_data: xr.Dataset,
ds_cpi_mask: xr.Dataset,
bottom_temp_name: str = 'bottomT',
mask_name: str = 'CPI_mask'
) -> None:
"""
Parameters
----------
ds_data: xr.Dataset
The bottom temperature dataset used to
derive the cold pool index.
ds_cpi_mask: xr.Dataset
The CPI mask.
bottomT_name" str
The bottom temperature variable name in the data set
mask_name" str
The CPI mask variable name in the `ds_cpi_mask`
"""
self.dataset = ds_data
self.mask = ds_cpi_mask
self.varname = bottom_temp_name
self.maskname = mask_name

def regrid_and_mask(self)->xr.DataArray:
"""
Regrid data from MOM6 model to the mask grid
to apply the mask

Returns
-------
da_regrid : xr.DataArray
regridded and masked dataset
"""
ds_mask = self.mask
ds_data = self.dataset

# Regrid the regional MOM6 data to GLORYS grid
# Use xesmf to create regridder using bilinear method
# !!!! Regridded only suited for geolon and geolat to x and y
regridder = xe.Regridder(
ds_data.rename({'geolon':'lon','geolat':'lat'}),
ds_mask,
"bilinear",
unmapped_to_nan=True
)

# Perform regrid using adaptive masking
# https://pangeo-xesmf.readthedocs.io/en/latest/
# notebooks/Masking.html#Adaptive-masking
da_regrid = regridder(ds_data[self.varname], skipna=True, na_thres=0.25).compute()
da_regrid = da_regrid*ds_mask[self.maskname]

return da_regrid

def generate_index(self):
'''
Masked data to the Coldpool Domain and Calculate Index
Coldpool Domain:
Depth: Between 20m and 200m isobath
Time: Between June and September from 1959 to 2022
Temperature: Average bottom temperature was cooler than 10 degrees Celsius (and > 6?)
Location: Mid-Atlantic Bight (MAB) domain between 38N-41.5N and between 75W-68.5W

Returns
-------
da_cpi_ann : xr.DataArray
Yearly cold pool index calculation based on yearly climatology

'''
#Get masked data and regrid it
da_regrid = self.regrid_and_mask()

#Calculate annual time series and long term mean at each grid
da_tob_jun2sep = da_regrid.where(
(da_regrid['time.month']>=6)&
(da_regrid['time.month']<=9),
drop=True
)
da_tob_ann = (
da_tob_jun2sep
.groupby(da_tob_jun2sep['time.year'])
.mean(dim='time')
).compute()
da_tob_ann_ltm = da_tob_ann.mean('year')

#Compute Cold Pool Index using the logic found here:
# https://noaa-edab.github.io/tech-doc/cold_pool.html
da_tob_ann_anom = da_tob_ann-da_tob_ann_ltm
da_cpi_ann = da_tob_ann_anom.mean(['latitude', 'longitude'])

return da_cpi_ann
48 changes: 42 additions & 6 deletions mom6/mom6_module/mom6_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,8 @@ def get_all(self) -> xr.Dataset:
test_regrid_lon = ds['lon']
test_regrid_lat = ds['lat']
raise OSError(
f'regrid file should not have lon({len(test_regrid_lon)}) lat({len(test_regrid_lat)}) dim. '+
'regrid file should not have '+
f'lon({len(test_regrid_lon)}) lat({len(test_regrid_lat)}) dim. '+
'Check data directory path or grid setting!')
except KeyError:
pass
Expand Down Expand Up @@ -252,7 +253,8 @@ def get_all(self) -> xr.Dataset:
test_raw_x = ds['xh']
test_raw_y = ds['yh']
raise OSError(
f'regrid file should not have xh({len(test_raw_x)}) yh({len(test_raw_y)}) dim. '+
'regrid file should not have '+
f'xh({len(test_raw_x)}) yh({len(test_raw_y)}) dim. '+
'Check data directory path or grid setting!')
except KeyError:
pass
Expand Down Expand Up @@ -325,7 +327,8 @@ def get_tercile(
test_regrid_lon = ds['lon']
test_regrid_lat = ds['lat']
raise OSError(
f'regrid file should not have lon({len(test_regrid_lon)}) lat({len(test_regrid_lat)}) dim. '+
'regrid file should not have '+
f'lon({len(test_regrid_lon)}) lat({len(test_regrid_lat)}) dim. '+
'Check data directory path or grid setting!')
except KeyError:
pass
Expand Down Expand Up @@ -362,7 +365,8 @@ def get_tercile(
test_raw_x = ds['xh']
test_raw_y = ds['yh']
raise OSError(
f'regrid file should not have xh({len(test_raw_x)}) yh({len(test_raw_y)}) dim. '+
'regrid file should not have '+
f'xh({len(test_raw_x)}) yh({len(test_raw_y)}) dim. '+
'Check data directory path or grid setting!')
except KeyError:
pass
Expand Down Expand Up @@ -572,7 +576,8 @@ def get_all(self) -> xr.Dataset:
test_regrid_lon = ds['lon']
test_regrid_lat = ds['lat']
raise OSError(
f'regrid file should not have lon({len(test_regrid_lon)}) lat({len(test_regrid_lat)}) dim. '+
'regrid file should not have '+
f'lon({len(test_regrid_lon)}) lat({len(test_regrid_lat)}) dim. '+
'Check data directory path or grid setting!')
except KeyError:
pass
Expand Down Expand Up @@ -601,7 +606,8 @@ def get_all(self) -> xr.Dataset:
test_raw_x = ds['xh']
test_raw_y = ds['yh']
raise OSError(
f'regrid file should not have xh({len(test_raw_x)}) yh({len(test_raw_y)}) dim. '+
'regrid file should not have '+
f'xh({len(test_raw_x)}) yh({len(test_raw_y)}) dim. '+
'Check data directory path or grid setting!')
except KeyError:
pass
Expand Down Expand Up @@ -701,6 +707,36 @@ def get_regionl_mask(

return ds

@staticmethod
def get_cpi_mask(
data_relative_dir : str
) -> xr.Dataset:
"""return the Cold Pool Index mask in the GLORYS grid.

The mask is currently derived by Chia-Wei Hsu based
solely on the avialable GLORYS data.

The mask has three main criterias
1. within EPU MAB (Mid-Atlantic Bight)
=> within (38N-41.5N,75W-68.5W)
=> within (<41N, <70W)
2. Only consider bottom temperature between 20m-200m isobath
3. Long term mean (1993-2022) of annual mean (Jun-Sep) cooler than 10degC

Parameters
----------
data_relative_dir : str
relative path from DATAPATH setup in config file to
the actual mask data, by setting 'masks/'
which makes the absolute path to DATAPATH/masks/

Returns
-------
xr.Dataset
The Xarray Dataset object of CPI mask in GLORYS grid
"""
return xr.open_dataset(os.path.join(DATA_PATH,data_relative_dir,"cpi_mask.nc"))

@staticmethod
def get_grid(
data_relative_dir : str
Expand Down
32 changes: 19 additions & 13 deletions mom6/notebook/cold_pool_index.ipynb

Large diffs are not rendered by default.

Loading