Skip to content

Commit

Permalink
pre copernicus merge
Browse files Browse the repository at this point in the history
  • Loading branch information
fcseidl committed Jan 5, 2024
1 parent e031485 commit cc59440
Show file tree
Hide file tree
Showing 6 changed files with 149 additions and 4 deletions.
Empty file removed Copernicus/api.py
Empty file.
4 changes: 2 additions & 2 deletions Tests/test_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def test_lonlatbox_across_idl():
L2AAPI(),
["BEAM0101"],
keepobj,
constraindf=shotconstraint,
shotconstraint=shotconstraint,
progess_bar=False
)
# all shots in bounding box
Expand Down Expand Up @@ -55,7 +55,7 @@ def test_buffered_cities():
L2AAPI(),
['BEAM0101', 'BEAM0110', 'BEAM1000', 'BEAM1011'],
keepobj=keepobj,
constraindf=Buffer(500000, cities),
shotconstraint=Buffer(500000, cities),
nproc=2
)
plt.scatter([138.60], [-34.93], label='Adelaide')
Expand Down
59 changes: 59 additions & 0 deletions evergladesgranules.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
from datetime import date
from concurrent import futures

import numpy as np
import json
from Spherical.arc import Polygon
from GEDI.granuleconstraint import RegionGC
from GEDI.api import L2AAPI

import matplotlib.pyplot as plt


"""
Create an L2AAPI object to access the granules' metadata. This initialization
will fail if the credentials in BEX_USER and BEX_PWD are invalid.
"""
api = L2AAPI()

"""
Create a granuleconstraint.GranuleConstraint functor which excludes granules outside
the polygonal national park boundary. Note that the polygon is described in a json
file with a local path.
"""
with open('/Users/fcseidl/EarthLab-local/everglades-national-park_225.geojson') as f:
gj = json.load(f)
points = np.array(gj['features'][0]['geometry']['coordinates'][0]).T
points[0], points[1] = points[1], points[0].copy()
points = np.fliplr(points)
poly = Polygon(points)
granuleconstraint = RegionGC(poly, api)

"""
Plot the outline of the park.
"""
latlon = poly(np.linspace(0, poly.length(), 500))
plt.plot(latlon[1], latlon[0])
plt.show()

"""
Obtain an iterator over every L2A file in the GEDI archive with the '.xml'
extension from January 2020.
"""
urls = api.urls_in_date_range(
t_start=date(2020, 1, 1),
t_end=date(2020, 1, 31),
suffix='.xml'
)

"""
Finally, run through the iterator and print each url, along with an index and
a boolean value indicating whether the granule intersects the everglades. Note
that for the loop below to complete in under a day, map should be replaced
with, e.g. a ThreadPoolExecutor.map call to exploit parallelization.
"""
print("index, accepted, url")
n = 1
for accept, url in map(granuleconstraint, urls):
print(f"{n}, {accept}, {url}")
n += 1
4 changes: 2 additions & 2 deletions mangrovegranules.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@
"""
Specify the location of the Global Mangrove Watch data, and the number of parallel process.
"""
#gmwdir = "/pl/active/earthlab/bioextremes/gmw_v3_2020/"; nproc = os.cpu_count()
gmwdir = "/Users/fcseidl/EarthLab-local/BioExtremes/gmw_v3_2020/"; nproc = 3
gmwdir = "/pl/active/earthlab/bioextremes/gmw_v3_2020/"; nproc = os.cpu_count()
#gmwdir = "/Users/fcseidl/EarthLab-local/BioExtremes/gmw_v3_2020/"; nproc = 3


if __name__ == "__main__":
Expand Down
Empty file removed windspeedbias.py
Empty file.
86 changes: 86 additions & 0 deletions windspeedcomparison.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
"""
This script performs a comparison between maximum wind speeds from different storms as reported in IBTrACS and ERA5
datasets.
"""

import cdsapi
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import re
from urllib.request import urlopen
import xarray as xr
from tqdm import tqdm

from Spherical.functions import addtolon


mps2kts = 1.94384
savepath = "/Users/fcseidl/EarthLab-local/BioExtremes/WindComparisonBasins.csv"
np.random.seed(0)

# IBTrACS data where max wind and basin are recorded
ibt = pd.read_csv('/Users/fcseidl/Downloads/ibtracs.since1980.list.v04r00.csv')
ibt = ibt[1:]
ibt = ibt[ibt['WMO_WIND'] != ' ']
ibt = ibt[(ibt['BASIN'] == 'NA') | (ibt['BASIN'] == 'EP') | (ibt['BASIN'] == 'WP') | (ibt['BASIN'] == 'NI') |
(ibt['BASIN'] == 'SI') | (ibt['BASIN'] == 'SP') | (ibt['BASIN'] == 'SA')]
nrows, _ = ibt.shape

# Copernicus client
client = cdsapi.Client(quiet=True)

# get data from N random storms
N = 600
idx = ibt.index[np.random.randint(0, nrows, N)]
basin = []
wmowind = []
era5wind = []
for i in tqdm(idx):
basin.append(ibt['BASIN'][i])
wmowind.append(float(ibt['WMO_WIND'][i]))
lat = float(ibt['LAT'][i])
lon = float(ibt['LON'][i])
time = ibt['ISO_TIME'][i]
m = re.search("(\d\d\d\d)-(\d\d)-(\d\d) (\d\d:\d\d):\d\d", time)
params = {
'product_type': 'reanalysis',
'format': 'netcdf',
'year': m.group(1),
'month': m.group(2),
'day': m.group(3),
'time': m.group(4),
'variable': 'instantaneous_10m_wind_gust',
'area': [lat + 1, addtolon(lon, -1), lat - 1, addtolon(lon, 1)]
}
response = client.retrieve('reanalysis-era5-single-levels', params)
with urlopen(response.location) as f:
ds = xr.open_dataset(f.read())
df = ds.to_dataframe().reset_index()
'''
# this will plot all readings from near the storm
plt.scatter(df['longitude'], df['latitude'], c=df['i10fg'])
plt.colorbar()
plt.show()'''
era5wind.append(df['i10fg'].max() * mps2kts)
basin = np.array(basin)
wmowind = np.array(wmowind)
era5wind = np.array(era5wind)
for b in np.unique(basin):
plt.scatter(wmowind[basin == b], era5wind[basin == b], label=b)
plt.xlabel('IBTrACS wind speed (kts)')
plt.ylabel('ERA5 wind speed (kts)')
plt.legend()
plt.plot([0, 100], [0, 100])
plt.axis('equal')
plt.show()

savedata = pd.DataFrame({'ibtracs': wmowind, 'era5': era5wind, 'basin': basin})
print(f'Saving to {savepath}')
savedata.to_csv(savepath)






0 comments on commit cc59440

Please sign in to comment.