-
Notifications
You must be signed in to change notification settings - Fork 8
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #367 from GEMScienceTools/last_touches
Last touches
- Loading branch information
Showing
9 changed files
with
300 additions
and
10 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,174 @@ | ||
#!/usr/bin/env python | ||
# coding: utf-8 | ||
|
||
import os | ||
import h5py | ||
import pickle | ||
import numpy as np | ||
|
||
from openquake.baselib import sap | ||
|
||
from openquake.hmtk.seismicity.selector import CatalogueSelector | ||
from openquake.hmtk.parsers.catalogue.csv_catalogue_parser import ( | ||
CsvCatalogueWriter) | ||
|
||
|
||
def get_treg(treg_fname): | ||
""" | ||
Gets the labels defining the different tectonic regions. | ||
:param treg_filename: | ||
Name of the .hdf5 file with the classification of the catalogue | ||
:return: | ||
A list with the labels and the number of earthquakes in the catalogue | ||
""" | ||
|
||
# List with the labels of the varioous tectonic regions | ||
aaa = [] | ||
|
||
# Load TR | ||
if treg_fname is not None: | ||
f = h5py.File(treg_fname, 'r') | ||
for key in f.keys(): | ||
aaa.append(key) | ||
alen = len(f[key]) | ||
print(key) | ||
f.close() | ||
return aaa, alen | ||
|
||
|
||
def create_sub_catalogue(alen, aaa, pck_fname, treg_fname, out_cata, out_path): | ||
""" | ||
Creates .csv files with the subcatalogues | ||
:param alen: | ||
Number of earthquakes in the original catalogue | ||
:param aaa: | ||
List of the labels used to define the various tectonic regions | ||
:param pck_fname: | ||
Name of the file with the pickled catalogue | ||
:param treg_fname: | ||
Name of the .hdf5 file with the classification of the catalogue | ||
:param out_cata: | ||
Name of the .hdf5 file with the classification of the catalogue | ||
:param out_path: | ||
Name of the .hdf5 file with the classification of the catalogue | ||
:returns: | ||
A :class:`numpy.ndarray` vector of length N where N is the number of | ||
earthquakes in the original catalogue. | ||
""" | ||
|
||
# The output vector | ||
tot_lab = np.zeros(alen) | ||
|
||
print(' ') | ||
fmt = '# earthquakes in the catalogue: {:d}' | ||
print(fmt.format(len(tot_lab))) | ||
|
||
# Loop over the tectonic regions | ||
for label in (aaa): | ||
|
||
# Output file name | ||
csv_filename = out_cata + "_TR_{:s}.csv".format(label) | ||
csv_filename = os.path.join(out_path, csv_filename) | ||
|
||
# Read the TR classification | ||
f = h5py.File(treg_fname, 'r') | ||
tr = f[label][:] | ||
f.close() | ||
|
||
if sum(tr) > 0: | ||
tmp_lab = tr*1 | ||
tot_lab = tot_lab+tmp_lab | ||
catalogue = pickle.load(open(pck_fname, 'rb')) | ||
for lab in ['month', 'day', 'hour', 'minute', 'second']: | ||
idx = np.isnan(catalogue.data[lab]) | ||
if lab == 'day' or lab == 'month': | ||
catalogue.data[lab][idx] = 1 | ||
elif lab == 'second': | ||
catalogue.data[lab][idx] = 0.0 | ||
else: | ||
catalogue.data[lab][idx] = 0 | ||
selector = CatalogueSelector(catalogue, create_copy=False) | ||
catalogue = selector.select_catalogue(tr) | ||
catalogue.data['hour'] = catalogue.data['hour'].astype(int) | ||
catalogue.data['minute'] = catalogue.data['minute'].astype(int) | ||
|
||
print(' ') | ||
fmt = '# earthquakes in this TR : {:d}' | ||
print(fmt.format(len(catalogue.data['longitude']))) | ||
|
||
# Sub-catalogue | ||
print(csv_filename) | ||
csvcat = CsvCatalogueWriter(csv_filename) | ||
|
||
# Write the purged catalogue | ||
csvcat.write_file(catalogue) | ||
print("Catalogue successfully written to %s" % csv_filename) | ||
|
||
return tot_lab | ||
|
||
|
||
def get_unclassified(tot_lab, pck_fname, out_cata, path_out): | ||
""" | ||
Create a text file (.csv formatted) with the unclassified earthquakes | ||
:param tot_lab: | ||
:param pck_fname: | ||
Name of the pickle file with the catalogue | ||
:param out_cata: | ||
Name of the .csv output catalogue | ||
:param path_out: | ||
Path to output folder | ||
""" | ||
|
||
# ID of the unclassified earthquakes | ||
tr_undef = abs(tot_lab-1) | ||
|
||
# Load the catalogue of unclassified earthquakes | ||
catalogue = pickle.load(open(pck_fname, 'rb')) | ||
|
||
# Select the unclassified | ||
selector = CatalogueSelector(catalogue, create_copy=False) | ||
catalogue = selector.select_catalogue(tr_undef) | ||
print('') | ||
print('# earthquakes: {:d}'.format(len(catalogue.data['longitude']))) | ||
|
||
# Sub-catalogue | ||
csv_filename = out_cata + "_TR_unclassified.csv" | ||
csv_filename = os.path.join(path_out, csv_filename) | ||
|
||
# Write the purged catalogue | ||
csvcat = CsvCatalogueWriter(csv_filename) | ||
csvcat.write_file(catalogue) | ||
print("Catalogue successfully written to %s" % csv_filename) | ||
|
||
|
||
def create_sub_cata(pck_fname, treg_fname, out_cata, path_out): | ||
""" | ||
Create a subcatalogue | ||
""" | ||
aaa, alen = get_treg(treg_fname) | ||
tot_lab = create_sub_catalogue(alen, aaa, pck_fname, treg_fname, out_cata, | ||
path_out) | ||
get_unclassified(tot_lab, pck_fname, out_cata, path_out) | ||
|
||
def main(pck_fname, treg_fname, *, out_cata='cat', path_out='.'): | ||
|
||
# create output folder if doesn't exist | ||
if not os.path.exists(path_out): | ||
os.makedirs(path_out) | ||
|
||
create_sub_cata(pck_fname, treg_fname, out_cata, path_out) | ||
|
||
|
||
msg = 'Name of the pickle file with the catalogue' | ||
create_sub_cata.pck_fname = msg | ||
msg = 'Name of the .hdf5 file with the catalogue classification' | ||
create_sub_cata.treg_fname = msg | ||
create_sub_cata.out_cata = 'Prefix in for the output files [cat]' | ||
create_sub_cata.path_out = 'Output path [./]' | ||
|
||
if __name__ == "__main__": | ||
sap.run(main) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,107 @@ | ||
#!/usr/bin/env python | ||
# coding: utf-8 | ||
|
||
import os | ||
import re | ||
import toml | ||
import copy | ||
from openquake.baselib import sap | ||
from openquake.mbt.tools.model_building.dclustering import decluster | ||
|
||
|
||
def main(config_fname, *, root=None): | ||
""" | ||
Decluster a catalogue for one or more tectonic regions and | ||
create the declustered subcatalogues | ||
:param config_fname: | ||
name of the .toml file that includes the declustering | ||
settings | ||
Example of .toml file | ||
``` | ||
[main] | ||
catalogue = './input/cam.pkl' | ||
tr_file = './cam_ccl/classified.hdf5' | ||
output = './cam_ccl/dec_subcats/' | ||
create_subcatalogues = 'true' | ||
save_aftershocks = 'true' | ||
catalogue_add_defaults = 'true' | ||
[method1] | ||
name = 'GardnerKnopoffType1' | ||
params = {'time_distance_window' = 'UhrhammerWindow', 'fs_time_prop' = 0.1} | ||
label = 'UH' | ||
[case1] | ||
regions = ['crustal', 'int_A'] | ||
label = 'int_cru' | ||
[case2] | ||
regions = ['slab_A'] | ||
label = 'slab' | ||
``` | ||
Configuration keys that include the string 'method' are used to | ||
define the declustering parameters that should be used for all cases. | ||
Keys that include 'case' are a list of one or more tectonic region | ||
types (as defined when running classify.py) should be declustered | ||
together. | ||
:param root: | ||
root folder to which all other paths are relative. | ||
default is the current working directory | ||
""" | ||
|
||
if root is None: | ||
root = os.getcwd() | ||
|
||
print('\nReading:', config_fname) | ||
config = toml.load(config_fname) | ||
|
||
fname_cat = os.path.join(root, config['main']['catalogue']) | ||
fname_reg = os.path.join(root, config['main']['tr_file']) | ||
fname_out = os.path.join(root, config['main']['output']) | ||
create_sc = config['main']['create_subcatalogues'] | ||
save_afrs = config['main']['save_aftershocks'] | ||
add_deflt = config['main']['catalogue_add_defaults'] | ||
|
||
assert os.path.exists(fname_cat) | ||
assert os.path.exists(fname_reg) | ||
if not os.path.exists(fname_out): | ||
os.makedirs(fname_out) | ||
|
||
methods = [] | ||
for key in config: | ||
if re.search('^method', key): | ||
method = config[key]['name'] | ||
params = config[key]['params'] | ||
label = config[key]['label'] | ||
methods.append([method, params, label]) | ||
|
||
for key in config: | ||
if re.search('^case', key): | ||
print('\n Case {:s}'.format(key)) | ||
regions = config[key]['regions'] | ||
cat_lab = config[key]['label'] | ||
|
||
for meth in methods: | ||
print('') | ||
params = copy.deepcopy(meth[1]) | ||
_ = decluster(fname_cat, meth[0], params, fname_out, regions, | ||
fname_reg, create_sc, 'csv', meth[2], save_afrs, | ||
cat_lab, add_deflt) | ||
|
||
|
||
MSG = 'Path to the configuration fname - typically a .toml file' | ||
main.config_fname = MSG | ||
MSG = 'Root folder used as a reference for paths [pwd]' | ||
main.root = MSG | ||
|
||
if __name__ == "__main__": | ||
sap.run(main) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -32,3 +32,4 @@ pygmt | |
reportlab | ||
netCDF4 | ||
igraph | ||
pyvista |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -32,3 +32,4 @@ pygmt | |
reportlab | ||
netCDF4 | ||
igraph | ||
pyvista |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -32,3 +32,4 @@ pygmt | |
reportlab | ||
netCDF4 | ||
igraph | ||
pyvista |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -36,3 +36,4 @@ pygmt | |
reportlab | ||
netCDF4 | ||
igraph | ||
pyvista |