Skip to content

Commit

Permalink
Merge pull request #367 from GEMScienceTools/last_touches
Browse files Browse the repository at this point in the history
Last touches
  • Loading branch information
kejohnso authored Nov 3, 2023
2 parents 9cb28c8 + 2875578 commit ca6eacb
Show file tree
Hide file tree
Showing 9 changed files with 300 additions and 10 deletions.
174 changes: 174 additions & 0 deletions openquake/mbi/ccl/create_sub_catalogues.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,174 @@
#!/usr/bin/env python
# coding: utf-8

import os
import h5py
import pickle
import numpy as np

from openquake.baselib import sap

from openquake.hmtk.seismicity.selector import CatalogueSelector
from openquake.hmtk.parsers.catalogue.csv_catalogue_parser import (
CsvCatalogueWriter)


def get_treg(treg_fname):
"""
Gets the labels defining the different tectonic regions.
:param treg_filename:
Name of the .hdf5 file with the classification of the catalogue
:return:
A list with the labels and the number of earthquakes in the catalogue
"""

# List with the labels of the varioous tectonic regions
aaa = []

# Load TR
if treg_fname is not None:
f = h5py.File(treg_fname, 'r')
for key in f.keys():
aaa.append(key)
alen = len(f[key])
print(key)
f.close()
return aaa, alen


def create_sub_catalogue(alen, aaa, pck_fname, treg_fname, out_cata, out_path):
"""
Creates .csv files with the subcatalogues
:param alen:
Number of earthquakes in the original catalogue
:param aaa:
List of the labels used to define the various tectonic regions
:param pck_fname:
Name of the file with the pickled catalogue
:param treg_fname:
Name of the .hdf5 file with the classification of the catalogue
:param out_cata:
Name of the .hdf5 file with the classification of the catalogue
:param out_path:
Name of the .hdf5 file with the classification of the catalogue
:returns:
A :class:`numpy.ndarray` vector of length N where N is the number of
earthquakes in the original catalogue.
"""

# The output vector
tot_lab = np.zeros(alen)

print(' ')
fmt = '# earthquakes in the catalogue: {:d}'
print(fmt.format(len(tot_lab)))

# Loop over the tectonic regions
for label in (aaa):

# Output file name
csv_filename = out_cata + "_TR_{:s}.csv".format(label)
csv_filename = os.path.join(out_path, csv_filename)

# Read the TR classification
f = h5py.File(treg_fname, 'r')
tr = f[label][:]
f.close()

if sum(tr) > 0:
tmp_lab = tr*1
tot_lab = tot_lab+tmp_lab
catalogue = pickle.load(open(pck_fname, 'rb'))
for lab in ['month', 'day', 'hour', 'minute', 'second']:
idx = np.isnan(catalogue.data[lab])
if lab == 'day' or lab == 'month':
catalogue.data[lab][idx] = 1
elif lab == 'second':
catalogue.data[lab][idx] = 0.0
else:
catalogue.data[lab][idx] = 0
selector = CatalogueSelector(catalogue, create_copy=False)
catalogue = selector.select_catalogue(tr)
catalogue.data['hour'] = catalogue.data['hour'].astype(int)
catalogue.data['minute'] = catalogue.data['minute'].astype(int)

print(' ')
fmt = '# earthquakes in this TR : {:d}'
print(fmt.format(len(catalogue.data['longitude'])))

# Sub-catalogue
print(csv_filename)
csvcat = CsvCatalogueWriter(csv_filename)

# Write the purged catalogue
csvcat.write_file(catalogue)
print("Catalogue successfully written to %s" % csv_filename)

return tot_lab


def get_unclassified(tot_lab, pck_fname, out_cata, path_out):
"""
Create a text file (.csv formatted) with the unclassified earthquakes
:param tot_lab:
:param pck_fname:
Name of the pickle file with the catalogue
:param out_cata:
Name of the .csv output catalogue
:param path_out:
Path to output folder
"""

# ID of the unclassified earthquakes
tr_undef = abs(tot_lab-1)

# Load the catalogue of unclassified earthquakes
catalogue = pickle.load(open(pck_fname, 'rb'))

# Select the unclassified
selector = CatalogueSelector(catalogue, create_copy=False)
catalogue = selector.select_catalogue(tr_undef)
print('')
print('# earthquakes: {:d}'.format(len(catalogue.data['longitude'])))

# Sub-catalogue
csv_filename = out_cata + "_TR_unclassified.csv"
csv_filename = os.path.join(path_out, csv_filename)

# Write the purged catalogue
csvcat = CsvCatalogueWriter(csv_filename)
csvcat.write_file(catalogue)
print("Catalogue successfully written to %s" % csv_filename)


def create_sub_cata(pck_fname, treg_fname, out_cata, path_out):
"""
Create a subcatalogue
"""
aaa, alen = get_treg(treg_fname)
tot_lab = create_sub_catalogue(alen, aaa, pck_fname, treg_fname, out_cata,
path_out)
get_unclassified(tot_lab, pck_fname, out_cata, path_out)

def main(pck_fname, treg_fname, *, out_cata='cat', path_out='.'):

# create output folder if doesn't exist
if not os.path.exists(path_out):
os.makedirs(path_out)

create_sub_cata(pck_fname, treg_fname, out_cata, path_out)


msg = 'Name of the pickle file with the catalogue'
create_sub_cata.pck_fname = msg
msg = 'Name of the .hdf5 file with the catalogue classification'
create_sub_cata.treg_fname = msg
create_sub_cata.out_cata = 'Prefix in for the output files [cat]'
create_sub_cata.path_out = 'Output path [./]'

if __name__ == "__main__":
sap.run(main)
107 changes: 107 additions & 0 deletions openquake/mbi/ccl/decluster_multiple_TR.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
#!/usr/bin/env python
# coding: utf-8

import os
import re
import toml
import copy
from openquake.baselib import sap
from openquake.mbt.tools.model_building.dclustering import decluster


def main(config_fname, *, root=None):
"""
Decluster a catalogue for one or more tectonic regions and
create the declustered subcatalogues
:param config_fname:
name of the .toml file that includes the declustering
settings
Example of .toml file
```
[main]
catalogue = './input/cam.pkl'
tr_file = './cam_ccl/classified.hdf5'
output = './cam_ccl/dec_subcats/'
create_subcatalogues = 'true'
save_aftershocks = 'true'
catalogue_add_defaults = 'true'
[method1]
name = 'GardnerKnopoffType1'
params = {'time_distance_window' = 'UhrhammerWindow', 'fs_time_prop' = 0.1}
label = 'UH'
[case1]
regions = ['crustal', 'int_A']
label = 'int_cru'
[case2]
regions = ['slab_A']
label = 'slab'
```
Configuration keys that include the string 'method' are used to
define the declustering parameters that should be used for all cases.
Keys that include 'case' are a list of one or more tectonic region
types (as defined when running classify.py) should be declustered
together.
:param root:
root folder to which all other paths are relative.
default is the current working directory
"""

if root is None:
root = os.getcwd()

print('\nReading:', config_fname)
config = toml.load(config_fname)

fname_cat = os.path.join(root, config['main']['catalogue'])
fname_reg = os.path.join(root, config['main']['tr_file'])
fname_out = os.path.join(root, config['main']['output'])
create_sc = config['main']['create_subcatalogues']
save_afrs = config['main']['save_aftershocks']
add_deflt = config['main']['catalogue_add_defaults']

assert os.path.exists(fname_cat)
assert os.path.exists(fname_reg)
if not os.path.exists(fname_out):
os.makedirs(fname_out)

methods = []
for key in config:
if re.search('^method', key):
method = config[key]['name']
params = config[key]['params']
label = config[key]['label']
methods.append([method, params, label])

for key in config:
if re.search('^case', key):
print('\n Case {:s}'.format(key))
regions = config[key]['regions']
cat_lab = config[key]['label']

for meth in methods:
print('')
params = copy.deepcopy(meth[1])
_ = decluster(fname_cat, meth[0], params, fname_out, regions,
fname_reg, create_sc, 'csv', meth[2], save_afrs,
cat_lab, add_deflt)


MSG = 'Path to the configuration fname - typically a .toml file'
main.config_fname = MSG
MSG = 'Root folder used as a reference for paths [pwd]'
main.root = MSG

if __name__ == "__main__":
sap.run(main)
16 changes: 9 additions & 7 deletions openquake/mbi/sub/plot_geometries.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def plot_edges(foldername, plotter, p1, color):
_ = plotter.add_mesh(tube, smooth_shading=True, color=color)


def main(fname_config, plot_catalogue, *, plot_classification=False):
def main(fname_config, plot_catalogue, plot_classification):

if plot_catalogue in ['true', 'True', 'TRUE']:
plot_catalogue = True
Expand Down Expand Up @@ -126,20 +126,21 @@ def main(fname_config, plot_catalogue, *, plot_classification=False):
plt_catalogue(catalogue_filename, plotter, projection, point_size=5,
color='white')

# Open the file with the classification
classification_fname = os.path.join(rf, 'classified.hdf5')
classification = h5py.File(classification_fname, 'r')
# find classification fname
classification_fname = os.path.join(rf, config['general']['treg_filename'])

for section in config.sections():

if section == 'general':
pass
elif section == 'crustal':
mask = classification['crustal'][:]
# Plot classified earthquakes
if plot_classification:
classification = h5py.File(classification_fname, 'r')
mask = classification['crustal'][:]
# Plot classified earthquakes
plt_catalogue(catalogue_filename, plotter, projection,
mask=mask, color='cyan')
classification.close()
else:
foldername = config[section]['folder']
foldername = os.path.join(rf, foldername)
Expand All @@ -153,12 +154,13 @@ def main(fname_config, plot_catalogue, *, plot_classification=False):
plot_profiles(foldername, plotter, projection)
# Plot classified earthquakes
if plot_classification:
classification = h5py.File(classification_fname, 'r')
mask = classification[section][:]
if sum(mask) > 0:
plt_catalogue(catalogue_filename, plotter, projection,
mask=mask, color=color, point_size=15)
classification.close()

classification.close()
_ = plotter.show(interactive=True)


Expand Down
4 changes: 1 addition & 3 deletions openquake/sub/misc/edge.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,7 @@
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D

EDGE_TOL = 0.2
PROF_TOL = 0.4
TOL = 0.5
TOL = 0.7


def line_between_two_points(pnt1, pnt2):
Expand Down
5 changes: 5 additions & 0 deletions openquake/sub/slab/rupture.py
Original file line number Diff line number Diff line change
Expand Up @@ -546,6 +546,11 @@ def calculate_ruptures(ini_fname, only_plt=False, ref_fdr=None, agr=None,
out_hdf5_smoothing_fname = config.get('main', 'out_hdf5_smoothing_fname')
tmps = os.path.join(ref_fdr, out_hdf5_smoothing_fname)
out_hdf5_smoothing_fname = os.path.abspath(tmps)
# create the smoothing directory if it doesn't exist
smoothing_dir = '/'.join(out_hdf5_smoothing_fname.split('/')[:-1])
if not os.path.exists(smoothing_dir):
os.makedirs(smoothing_dir)


# Tectonic regionalisation
treg_filename = config.get('main', 'treg_fname')
Expand Down
1 change: 1 addition & 0 deletions requirements_linux.txt
Original file line number Diff line number Diff line change
Expand Up @@ -32,3 +32,4 @@ pygmt
reportlab
netCDF4
igraph
pyvista
1 change: 1 addition & 0 deletions requirements_macos_arm64.txt
Original file line number Diff line number Diff line change
Expand Up @@ -32,3 +32,4 @@ pygmt
reportlab
netCDF4
igraph
pyvista
1 change: 1 addition & 0 deletions requirements_macos_x86_64.txt
Original file line number Diff line number Diff line change
Expand Up @@ -32,3 +32,4 @@ pygmt
reportlab
netCDF4
igraph
pyvista
1 change: 1 addition & 0 deletions requirements_win64.txt
Original file line number Diff line number Diff line change
Expand Up @@ -36,3 +36,4 @@ pygmt
reportlab
netCDF4
igraph
pyvista

0 comments on commit ca6eacb

Please sign in to comment.