-
Notifications
You must be signed in to change notification settings - Fork 91
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
43 changed files
with
9,457 additions
and
0 deletions.
There are no files selected for viewing
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
8,762 changes: 8,762 additions & 0 deletions
8,762
.../Pre-Processing/STOFS-3D-Pac-operation/NWM/Dai_Trenberth_climatology_1990-2018_hourly.csv
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
Climatology data |
250 changes: 250 additions & 0 deletions
250
src/Utility/Pre-Processing/STOFS-3D-Pac-operation/NWM/gen_sourcesink_oper.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,250 @@ | ||
import os | ||
from datetime import datetime, timedelta | ||
import glob | ||
import argparse | ||
import json | ||
|
||
import numpy as np | ||
import pandas as pd | ||
from netCDF4 import Dataset | ||
|
||
def read_featureID_file(filename): | ||
|
||
with open(filename) as f: | ||
lines = f.readlines() | ||
feature_ids = [] | ||
for line in lines: | ||
feature_ids.append(line.split('\n')[0]) | ||
return feature_ids | ||
|
||
def write_th_file(dataset, timeinterval, fname, issource=True): | ||
|
||
data = [] | ||
for values, interval in zip(dataset, timeinterval): | ||
if issource: | ||
data.append(" ".join([f"{interval:G}", *[f'{x: .4f}' for x in values], '\n'])) | ||
else: | ||
data.append(" ".join([f"{interval:G}", *[f'{-x: .4f}' for x in values], '\n'])) | ||
|
||
with open(fname, 'w+') as fid: | ||
fid.writelines(data) | ||
|
||
def write_mth_file(temp, salinity, timeinterval, fname): | ||
|
||
data = [] | ||
for interval in timeinterval: | ||
data.append(" ".join([f"{interval:G}", *[f'{x: .4f}' for x in temp], '\n'])) | ||
for interval in timeinterval: | ||
data.append(" ".join([f"{interval:G}", *[f'{x: .4f}' for x in salinity], '\n'])) | ||
|
||
with open(fname, 'w+') as fid: | ||
fid.writelines(data) | ||
|
||
def get_aggregated_features(nc_feature_id, features): | ||
|
||
aggregated_features = [] | ||
for source_feats in features: | ||
aggregated_features.extend(list(source_feats)) | ||
|
||
in_file=[] | ||
for feature in aggregated_features: | ||
idx=np.where(nc_feature_id == int(feature))[0] | ||
in_file.append(idx.item()) | ||
|
||
in_file_2 = [] | ||
sidx = 0 | ||
for source_feats in features: | ||
eidx = sidx + len(source_feats) | ||
#in_file_2.append(in_file[sidx:eidx].tolist()) | ||
in_file_2.append(in_file[sidx:eidx]) | ||
sidx = eidx | ||
return in_file_2 | ||
|
||
def streamflow_lookup(file, indexes, threshold=-1e-5): | ||
nc = Dataset(file) | ||
streamflow = nc["streamflow"][:] | ||
streamflow[np.where(streamflow < threshold)] = 0.0 | ||
#change masked value to zero | ||
streamflow[np.where(streamflow.mask)] = 0.0 | ||
data = [] | ||
for indxs in indexes: | ||
# Note: Dataset already consideres scale factor and offset. | ||
data.append(np.sum(streamflow[indxs])) | ||
nc.close() | ||
return data | ||
|
||
if __name__ == '__main__': | ||
''' | ||
Usage: python extract2asci.py "yyyy-mm-dd" or "yyyy-mm-dd hh:mm:ss" | ||
Inputs are: | ||
1. sources_{conus, alaska, hawaii}_global.json | ||
sinks_{conus, alaska, hawaii}_global.json | ||
2. climatology file "Dai_Trenberth_climatology_1990-2018_hourly.csv" | ||
3. work direcotry "basepath" | ||
4. cached/ (all nwm netcdf files) | ||
''' | ||
|
||
#input paramters | ||
#argparser = argparse.ArgumentParser() | ||
#argparser.add_argument('date', type=datetime.fromisoformat, help='input file date') | ||
#args=argparser.parse_args() | ||
#startdate=args.date | ||
#startdate = datetime.now().date() - timedelta(days=1) | ||
date = datetime.now() - timedelta(days=1) | ||
startdate = datetime(date.year, date.month, date.day) | ||
print(f'startdate is {startdate}') | ||
|
||
#1. region name | ||
layers = ['conus', 'alaska', 'hawaii'] | ||
|
||
#2. | ||
clima_filename = 'Dai_Trenberth_climatology_1990-2018_hourly.csv' | ||
|
||
#3. base path | ||
basepath = '/home1/06923/hyu05/work/oper_3D_Pac/river/NWM' | ||
|
||
#generate timevector | ||
rnday = timedelta(days=3) | ||
timevector1 = np.arange(startdate, rnday-timedelta(days=1), timedelta(days=1)).astype(datetime) | ||
timevector2 = np.arange(startdate, rnday+timedelta(hours=1), timedelta(hours=1)).astype(datetime) | ||
|
||
sources_all = {} | ||
sinks_all = {} | ||
eid_sources = [] | ||
eid_sinks = [] | ||
times = [] | ||
dates = [] | ||
|
||
for layer in layers: | ||
print(f'layer is {layer}') | ||
fname_source = f'./sources_{layer}_global.json' | ||
fname_sink = f'./sinks_{layer}_global.json' | ||
sources_fid = json.load(open(fname_source)) | ||
sinks_fid = json.load(open(fname_sink)) | ||
|
||
#add to the final list | ||
eid_sources.extend(list(sources_fid.keys())) | ||
eid_sinks.extend(list(sinks_fid.keys())) | ||
|
||
|
||
#link data | ||
files = glob.glob(f'./cached/nwm*.{layer}.nc') | ||
#remove old files | ||
if files is not None: | ||
print('Remove old files') | ||
for f in files: | ||
try: | ||
os.remove(f) | ||
except OSError as e: | ||
print("Error: %s : %s" % (f, e.strerror)) | ||
|
||
#link new data | ||
for i, date in enumerate(timevector2): | ||
if i == 0: | ||
date2 = timevector1[0] - timedelta(days=1) | ||
if layer == 'conus' or layer == 'alaska': | ||
src = f'{basepath}/{date2.strftime("%Y%m%d")}/nwm.t00z.medium_range.channel_rt_1.f024.{layer}.nc' | ||
elif layer == 'hawaii': | ||
src = f'{basepath}/{date2.strftime("%Y%m%d")}/nwm.t00z.short_range.channel_rt.f02400.{layer}.nc' | ||
elif i >= 1 and i <= 24: | ||
date2 = timevector1[0] | ||
if i == 24: | ||
it = f'{int(date.hour)+24:03d}' | ||
else: | ||
it = f'{int(date.hour):03d}' | ||
if layer == 'conus' or layer == 'alaska': | ||
src = f'{basepath}/{date2.strftime("%Y%m%d")}/nwm.t00z.medium_range.channel_rt_1.f{it}.{layer}.nc' | ||
elif layer == 'hawaii': | ||
src = f'{basepath}/{date2.strftime("%Y%m%d")}/nwm.t00z.short_range.channel_rt.f{it}00.{layer}.nc' | ||
else: | ||
date2 = timevector1[1] | ||
if i >= 25 and i <= 47: | ||
it = f'{int(date.hour):03d}' | ||
elif i >= 48 and i <= 71: | ||
it = f'{int(date.hour)+24:03d}' | ||
else: | ||
it = f'{int(date.hour)+48:03d}' | ||
if layer == 'conus' or layer == 'alaska': | ||
src = f'{basepath}/{date2.strftime("%Y%m%d")}/nwm.t00z.medium_range.channel_rt_1.f{it}.{layer}.nc' | ||
elif layer == 'hawaii': | ||
src = f'{basepath}/{date2.strftime("%Y%m%d")}/nwm.t00z.short_range.channel_rt.f{it}00.{layer}.nc' | ||
dst = f'{basepath}/cached/nwm.t00z.{date.strftime("%Y%m%d%H")}.{layer}.nc' | ||
os.symlink(src, dst) | ||
|
||
files = glob.glob(f'./cached/nwm*.{layer}.nc') | ||
files.sort() | ||
print(f'file 0 is {files[0]}') | ||
nc_fid0 = Dataset(files[0])["feature_id"][:] | ||
src_idxs = get_aggregated_features(nc_fid0, sources_fid.values()) | ||
snk_idxs = get_aggregated_features(nc_fid0, sinks_fid.values()) | ||
|
||
sources = [] | ||
sinks = [] | ||
for fname in files: | ||
ds = Dataset(fname) | ||
ncfeatureid=ds['feature_id'][:] | ||
if not np.all(ncfeatureid == nc_fid0): | ||
print(f'Indexes of feature_id are changed in {fname}') | ||
src_idxs=get_aggregated_features(ncfeatureid, sources_fid.values()) | ||
snk_idxs=get_aggregated_features(ncfeatureid, sinks_fid.values()) | ||
nc_fid0 = ncfeatureid | ||
|
||
sources.append(streamflow_lookup(fname, src_idxs)) | ||
sinks.append(streamflow_lookup(fname, snk_idxs)) | ||
|
||
model_time = datetime.strptime(ds.model_output_valid_time, "%Y-%m-%d_%H:%M:%S") | ||
if layer == 'conus': | ||
dates.append(str(model_time)) | ||
times.append((model_time - startdate).total_seconds()) | ||
ds.close() | ||
sources_all[layer] = np.array(sources) | ||
sinks_all[layer] = np.array(sinks) | ||
|
||
sources = np.concatenate((sources_all['conus'], sources_all['alaska'], sources_all['hawaii']), axis=1) | ||
sinks = np.concatenate((sinks_all['conus'], sinks_all['alaska'], sinks_all['hawaii']), axis=1) | ||
print(sources.shape) | ||
print(sinks.shape) | ||
|
||
#combine with Dai_Trenberth_climatology | ||
df = pd.read_csv(clima_filename) | ||
df.set_index('date', inplace=True) | ||
mask = (pd.to_datetime(df.index) >= dates[0]) & (pd.to_datetime(df.index) <=dates[-1]) | ||
df_forecast = df[mask] | ||
df_nwm = pd.DataFrame(data=sources, columns=np.array(eid_sources), index=np.array(dates)) | ||
df_nwm_transposed = df_nwm.T | ||
df_forecast_transposed = df_forecast.T | ||
|
||
#concat two dataframe | ||
df_nwm_transposed.reset_index(inplace=True) | ||
df_forecast_transposed.reset_index(inplace=True) | ||
|
||
df_source = pd.concat([df_nwm_transposed, df_forecast_transposed]) | ||
|
||
#Combine redundatn elem | ||
df_source_final = df_source.groupby(by='index', sort=False).sum() | ||
|
||
#write to file | ||
sources2 = df_source_final.T.values | ||
write_th_file(sources2, times, 'vsource.th', issource=True) | ||
write_th_file(sinks, times, 'vsink.th', issource=False) | ||
|
||
#write msource.th | ||
eid_sources2 = df_source_final.index.values | ||
nsource = eid_sources2.shape[0] | ||
print(f'nsource is {nsource}') | ||
#temp = np.full(nsource, -9999.0) | ||
#salt = np.full(nsource, 30.0) | ||
#write_mth_file(temp, salt, times, 'msource.th') | ||
|
||
nsink = np.array(eid_sinks).shape[0] | ||
#write source_sink.in | ||
with open('source_sink.in', 'w+') as f: | ||
f.write('{:<d} \n'.format(nsource)) | ||
for eid in eid_sources2: | ||
f.write('{:<d} \n'.format(int(eid))) | ||
f.write('\n') | ||
|
||
f.write('{:<d} \n'.format(nsink)) | ||
for eid in eid_sinks: | ||
f.write('{:<d} \n'.format(int(eid))) |
1 change: 1 addition & 0 deletions
1
src/Utility/Pre-Processing/STOFS-3D-Pac-operation/NWM/sinks_alaska_global.json
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
{"4970634": [19020190110298, 19020190110283, 19020190110287], "4658244": [75004300002295], "4828060": [75004200014896, 75004200016417, 75004200004851, 75004200015918], "4436098": [75000200013037], "4970636": [19020190110328], "4851922": [75004200010120, 75004200013811, 75004200013811], "4797133": [75004200007016, 75004200005674, 75004200005400, 75004200015984], "4909493": [75004200001257], "4960991": [19020209002055], "4956345": [19020209002057], "4871574": [19020209002066], "4857721": [75004200013390, 75004200011542], "4387430": [75000100003120], "4637946": [75005400005141, 75005400005141], "4828062": [75004200014900, 75004200010896, 75004200016423, 75004200010897], "4992853": [19020190094050, 19020190094083, 19020190094104, 19020190094167, 19020190094220, 19020190094261, 19020190094339, 19020190094397], "4977905": [19020190106356, 19020190106357], "4710962": [75005400027562], "5023817": [19020190106389, 19020190106404, 19020190106476], "4912010": [75004200005473, 75004200005958, 75004200007782], "5007246": [19020190102313, 19020190102575], "4384024": [75000100002519], "4977909": [19020190108460, 19020190108905, 19020190108233], "4821876": [75004200012312], "4837108": [75004200001488, 75004200004692, 75004200012758], "4851928": [75004200015469, 75004200004424, 75004200002637, 75004200004421], "4834103": [75004200010330], "4970630": [19020190098304, 19020190098752, 19020190098752], "4951618": [19020209002328], "4997642": [19020190098372, 19020190098419, 19020190096406, 19020190097797], "4619119": [75005400009385, 75005400033132, 75005400033131, 75005400027161], "4954012": [19020209002369], "4612841": [75005400002094], "4703786": [75005400039449, 75005400039449], "4918914": [19020209002414], "5023823": [19020190106673, 19020190106975, 19020190107201, 19020190107202], "4857720": [75004200015236], "4918915": [19020209002496], "4973032": [19020190102634, 19020190102842], "4975435": [19020190102641, 19020190102850, 19020190103128], "4990426": [19020190094480, 19020190094487, 19020190094498, 19020190094631, 19020190094779, 19020190094780, 19020190094878], "4977884": [19020190094486, 19020190094486], "5012126": [19020190102689], "5000043": [19020190100644, 19020190100965, 19020190100966, 19020190100997, 19020190101000, 19020190101012, 19020190101118, 19020190101946], "4803394": [75004200015955], "4821873": [75004200015958, 75004200008548], "4445393": [75000200005256, 75000200005256], "4634785": [75005400034828], "4851927": [75004200015422], "5002441": [19020190102790], "4849008": [75004200008313], "4834100": [75004200015715], "4882763": [19020209000691, 19020209000691], "5007242": [19020190100872, 19020190101739, 19020190101950, 19020190102017], "4912008": [75004200015171, 75004200013306], "5016846": [19020190104988, 19020190105195, 19020190105203, 19020190105209, 19020190105225, 19020190105286, 19020190105300, 19020190105399, 19020190105528], "4926489": [19020209000760], "4912004": [19020209002865], "4439194": [75000200015573, 75000200015573], "4923975": [19020209002890], "4911535": [19020209000848], "4445394": [75000200015474, 75000200002697], "4612822": [75005400014559], "5023825": [19020190109244], "4929207": [19020209002978, 19020209002980], "4825017": [75004200010424], "4921443": [19020209002994], "4929218": [19020209003001], "4442293": [75000200002699], "4631651": [75005400015652], "4923973": [19020209003020], "4615964": [75005400038913, 75005400026924], "4929653": [19020209003058], "4965791": [19020190101156], "4936902": [19020209003070], "4956386": [19020209003084], "4831103": [75004200004546, 75004200005592], "5004836": [19020190103356, 19020190103459], "4916446": [19020209001192, 19020209001200], "5014511": [19020190103381], "4718029": [75005400009950], "4600757": [75004500000035, 75004500000152], "5026102": [19020190109588, 19020190109604], "4634849": [75004500000176], "4603859": [75004500000073], "4240511": [75000400000018], "4890936": [19020209001340], "4644459": [75004500000126], "4638019": [75004500000174], "4898795": [19020209001378, 19020209001386], "4603871": [75004500000149], "5007261": [19020190091297], "4901375": [19020209001418], "5007251": [19020190103626], "4901351": [19020209001439], "4644463": [75004500000092], "4982882": [19020190095456], "4668496": [75004300006599, 75004300006599], "4871566": [19020209001573], "4953956": [19020209001619], "4987865": [19020190093572], "4975437": [19020190097694], "5007244": [19020190099769, 19020190099769], "4834127": [75004200013826], "4982884": [19020190093649], "4958643": [19020209001711], "4882784": [19020209001734], "4975444": [19020190105971], "4828063": [75004200003600], "4854847": [75004200009820], "4977910": [19020190108082], "5021515": [19020190106185]} |
1 change: 1 addition & 0 deletions
1
src/Utility/Pre-Processing/STOFS-3D-Pac-operation/NWM/sinks_conus_global.json
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
{"5605090": [23833453], "5478931": [23909591], "5605167": [23834127], "5466112": [23910085], "5470422": [23910129], "5474461": [8317367], "5475175": [8317409, 8316869], "4507502": [2785487], "5530602": [23865354], "4045720": [2785455], "4076959": [2785461], "5181107": [20359138, 20359346], "4852907": [2785513], "5532603": [23866220], "5532957": [23866344], "4385144": [2784697], "5673212": [23815100], "3831587": [2785637], "5596055": [23833295], "5226520": [20362621], "4858672": [2785519], "5598537": [23833001], "5596219": [23833805], "5615618": [24520390], "5615747": [24520402], "5615876": [24520402], "5615749": [24520402], "5597481": [23833869], "5619647": [24248484], "5577862": [23832763], "5576239": [23832767], "5578706": [23832895], "5581665": [23871215], "4907456": [17693921], "4971592": [1670483], "4969207": [1670483], "5679735": [17693935], "5679731": [17693935], "4878279": [17693945], "4894598": [17693945], "4881076": [17693945], "4537637": [15048277], "4573851": [15048277], "4458668": [15048251], "5500001": [24001179], "5503018": [23880632], "4694594": [2788515], "4655870": [2788515], "5576236": [23833229], "5576093": [23833229], "5679501": [2788361], "5510341": [23886126], "4611363": [2788359], "5679719": [17695611], "4855513": [17695611], "4828931": [17695611], "5679724": [17695611], "5506420": [23838450], "5482318": [4438274], "4608003": [2787611], "4589272": [2787611], "5673901": [23736165], "5673422": [23736171], "5673385": [23736171], "5606410": [24520820], "5606616": [24520880], "5623873": [24521530], "5624535": [24521530], "5623084": [24521530], "5623603": [24521530], "4683809": [2788557], "4673371": [2788385], "4705417": [2788393], "4669514": [2788517], "4652623": [2788517], "4669513": [2788517], "4683810": [2788517], "5679534": [2788517], "4522471": [163864212], "5621191": [24521426], "5591636": [23834151], "5257503": [17590146], "5604277": [23833481]} |
1 change: 1 addition & 0 deletions
1
src/Utility/Pre-Processing/STOFS-3D-Pac-operation/NWM/sinks_hawaii_global.json
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
{"4060960": [800007883], "4015271": [800006935, 800006934], "4010580": [800002348, 800004607, 800002353, 800002351, 800006915], "4005886": [800004596, 800011345, 800000082, 800009131, 800004597, 800006904, 800011357, 800011355, 800009137, 800006901, 800011344, 800006895, 800006899], "4082964": [800012378], "4015310": [800009580, 800009580], "4082956": [800016767], "4038386": [800000434], "4082950": [800005576], "4038434": [800005270, 800014189], "4010582": [800015802], "4038394": [800014105], "4060989": [800007545]} |
1 change: 1 addition & 0 deletions
1
src/Utility/Pre-Processing/STOFS-3D-Pac-operation/NWM/sources_alaska_global.json
Large diffs are not rendered by default.
Oops, something went wrong.
Oops, something went wrong.