Skip to content

Commit

Permalink
Bugfixes Phase Picks
Browse files Browse the repository at this point in the history
* bugfixes: upscaling and timing

* picks: filtering after event time
  • Loading branch information
miili authored Mar 15, 2024
1 parent bdb8bd2 commit c832c32
Show file tree
Hide file tree
Showing 19 changed files with 225 additions and 95 deletions.
8 changes: 2 additions & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,11 @@ repos:
- id: end-of-file-fixer
- id: check-added-large-files
- id: mixed-line-ending
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: v0.3.0
hooks:
- id: ruff
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.3.0
rev: v0.3.2
hooks:
- id: ruff
- id: ruff-format
- repo: https://github.com/pre-commit/mirrors-clang-format
rev: v17.0.6
Expand Down
Binary file added docs/images/delay-volume.webp
Binary file not shown.
Binary file added docs/images/station-delay-times.webp
Binary file not shown.
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ extend-select = [
'C4',
'I',
'RUF',
'T20',
]

ignore = ["RUF012", "RUF009"]
Expand Down
94 changes: 66 additions & 28 deletions src/qseek/apps/qseek.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,20 +46,20 @@
subparsers.add_parser(
"config",
help="print a new config",
description="initialze a new default config configuration file.",
description="Print a new default config configuration file.",
)

run = subparsers.add_parser(
search = subparsers.add_parser(
"search",
help="start a search",
description="detect, localize and characterize earthquakes in a dataset",
)
config_arg = run.add_argument(
search_config = search.add_argument(
"config",
type=Path,
help="path to config file",
)
run.add_argument(
search.add_argument(
"--force",
action="store_true",
default=False,
Expand All @@ -68,47 +68,71 @@

continue_run = subparsers.add_parser(
"continue",
help="continue an aborted run",
description="continue a run from an existing rundir",
help="continue an existing search",
description="Continue a run from an existing rundir",
)
rundir_continue = continue_run.add_argument(
continue_rundir = continue_run.add_argument(
"rundir",
type=Path,
help="existing runding to continue",
)

features_extract = subparsers.add_parser(
"feature-extraction",
help="extract features from an existing run",
description="modify the search.json for re-evaluation of the event's features",
snuffler = subparsers.add_parser(
"snuffler",
help="start the Pyrocko snuffler to inspect waveforms, events and picks",
description="Start the snuffler to inspect and validate "
"the detections and waveforms for an existing run",
)
rundir_features = features_extract.add_argument(
snuffler_rundir = snuffler.add_argument(
"rundir",
type=Path,
help="path of existing run",
help="path of the existing rundir",
)
snuffler.add_argument(
"--show-picks",
action="store_true",
default=False,
help="load and show picks in snuffler",
)
snuffler.add_argument(
"--show-semblance",
action="store_true",
default=False,
help="show semblance trace in snuffler",
)

station_corrections = subparsers.add_parser(
"corrections",
help="analyse station corrections from existing run",
description="analyze and plot station corrections from a finished run",
help="analyse and extract station corrections from a run",
description="Analyze and plot station corrections from a finished run",
)
station_corrections.add_argument(
"--plot",
action="store_true",
default=False,
help="plot station correction results and save to rundir",
)
rundir_corrections = station_corrections.add_argument(
corrections_rundir = station_corrections.add_argument(
"rundir",
type=Path,
help="path of existing run",
)

features_extract = subparsers.add_parser(
"feature-extraction",
help="extract features from an existing run",
description="Modify the search.json for re-evaluation of the event's features",
)
features_rundir = features_extract.add_argument(
"rundir",
type=Path,
help="path of existing run",
)

modules = subparsers.add_parser(
"modules",
help="list available modules",
description="list all available modules",
help="show available modules",
description="Show all available modules",
)
modules.add_argument(
"--json",
Expand All @@ -121,24 +145,25 @@
serve = subparsers.add_parser(
"serve",
help="start webserver and serve results from an existing run",
description="start a webserver and serve detections and results from a run",
description="Start a webserver and serve detections and results from a run",
)
serve.add_argument(
"rundir",
type=Path,
help="rundir to serve",
)


subparsers.add_parser(
"clear-cache",
help="clear the cach directory",
description="clear all data in the cache directory",
description="Clear all data in the cache directory",
)

dump_schemas = subparsers.add_parser(
"dump-schemas",
help="dump data models to json-schema (development)",
description="dump data models to json-schema, "
description="dDump data models to json-schema, "
"this is for development purposes only",
)
dump_dir = dump_schemas.add_argument(
Expand All @@ -152,10 +177,11 @@
import argcomplete
from argcomplete.completers import DirectoriesCompleter, FilesCompleter

config_arg.completer = FilesCompleter(["*.json"])
rundir_continue.completer = DirectoriesCompleter()
rundir_features.completer = DirectoriesCompleter()
rundir_corrections.completer = DirectoriesCompleter()
search_config.completer = FilesCompleter(["*.json"])
continue_rundir.completer = DirectoriesCompleter()
snuffler_rundir.completer = DirectoriesCompleter()
features_rundir.completer = DirectoriesCompleter()
corrections_rundir.completer = DirectoriesCompleter()
dump_dir.completer = DirectoriesCompleter()

argcomplete.autocomplete(parser)
Expand Down Expand Up @@ -215,6 +241,18 @@ async def run() -> None:

asyncio.run(run(), debug=loop_debug)

case "snuffler":
search = Search.load_rundir(args.rundir)
squirrel = search.data_provider.get_squirrel()
show_picks = args.show_picks
if args.show_semblance:
squirrel.add([str(search._rundir / "semblance.mseed")])
squirrel.snuffle(
events=None if show_picks else search.catalog.as_pyrocko_events(),
markers=search.catalog.get_pyrocko_markers() if show_picks else None,
stations=search.stations.as_pyrocko_stations(),
)

case "feature-extraction":
search = Search.load_rundir(args.rundir)
search.data_provider.prepare(search.stations)
Expand All @@ -238,8 +276,8 @@ async def extract() -> None:
event = await result
if event.magnitudes:
for mag in event.magnitudes:
print(f"{mag.magnitude} {mag.average:.2f}±{mag.error:.2f}")
print("--")
print(f"{mag.magnitude} {mag.average:.2f}±{mag.error:.2f}") # noqa: T201
print("--") # noqa: T201

await search._catalog.save()
await search._catalog.export_detections(
Expand Down Expand Up @@ -353,7 +391,7 @@ def is_insight(module: type) -> bool:
raise EnvironmentError(f"folder {args.folder} does not exist")

file = args.folder / "search.schema.json"
print(f"writing JSON schemas to {args.folder}")
print(f"writing JSON schemas to {args.folder}") # noqa: T201
file.write_text(json.dumps(Search.model_json_schema(), indent=2))

file = args.folder / "detections.schema.json"
Expand Down
35 changes: 23 additions & 12 deletions src/qseek/images/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from pydantic import BaseModel, Field

from qseek.models.station import Stations
from qseek.utils import PhaseDescription, downsample
from qseek.utils import PhaseDescription, resample

if TYPE_CHECKING:
from datetime import datetime, timedelta
Expand All @@ -32,9 +32,16 @@ async def process_traces(self, traces: list[Trace]) -> list[WaveformImage]: ...
def name(self) -> str:
return self.__class__.__name__

@property
def blinding(self) -> timedelta:
"""Blinding duration for the image function. Added to padded waveforms."""
def get_blinding(self, sampling_rate: float) -> timedelta:
"""
Blinding duration for the image function. Added to padded waveforms.
Args:
sampling_rate (float): The sampling rate of the waveform.
Returns:
timedelta: The blinding duration for the image function.
"""
raise NotImplementedError("must be implemented by subclass")

def get_provided_phases(self) -> tuple[PhaseDescription, ...]: ...
Expand Down Expand Up @@ -64,23 +71,22 @@ def set_stations(self, stations: Stations) -> None:
"""Set stations from the image's available traces."""
self.stations = stations.select_from_traces(self.traces)

def downsample(self, sampling_rate: float, max_normalize: bool = False) -> None:
"""Downsample traces in-place.
def resample(self, sampling_rate: float, max_normalize: bool = False) -> None:
"""Resample traces in-place.
Args:
sampling_rate (float): Desired sampling rate in Hz.
max_normalize (bool): Normalize by maximum value to keep the scale of the
maximum detection. Defaults to False.
"""
if sampling_rate >= self.sampling_rate:
return
downsample = self.sampling_rate > sampling_rate

for tr in self.traces:
if max_normalize:
# We can use maximum here since the PhaseNet output is single-sided
_, max_value = tr.max()
downsample(tr, sampling_rate)
resample(tr, sampling_rate)

if max_normalize:
if max_normalize and downsample:
tr.ydata /= tr.ydata.max()
tr.ydata *= max_value

Expand All @@ -101,8 +107,8 @@ def get_offsets(self, reference: datetime) -> np.ndarray:
Returns:
np.ndarray: Integer offset towards the reference for each trace.
"""
offsets = np.fromiter((tr.tmin for tr in self.traces), float)
return np.round((offsets - reference.timestamp()) / self.delta_t).astype(
trace_tmins = np.fromiter((tr.tmin for tr in self.traces), float)
return np.round((trace_tmins - reference.timestamp()) / self.delta_t).astype(
np.int32
)

Expand All @@ -120,6 +126,7 @@ def apply_exponent(self, exponent: float) -> None:
def search_phase_arrival(
self,
trace_idx: int,
event_time: datetime,
modelled_arrival: datetime,
search_window_seconds: float = 5,
threshold: float = 0.1,
Expand All @@ -128,6 +135,7 @@ def search_phase_arrival(
Args:
trace_idx (int): Index of the trace.
event_time (datetime): Time of the event.
modelled_arrival (datetime): Time to search around.
search_length_seconds (float, optional): Total search length in seconds
around modelled arrival time. Defaults to 5.
Expand All @@ -140,13 +148,15 @@ def search_phase_arrival(

def search_phase_arrivals(
self,
event_time: datetime,
modelled_arrivals: list[datetime | None],
search_window_seconds: float = 5.0,
threshold: float = 0.1,
) -> list[ObservedArrival | None]:
"""Search for a peak in all station's image functions.
Args:
event_time (datetime): Time of the event.
modelled_arrivals (list[datetime]): Time to search around.
search_length_seconds (float, optional): Total search length in seconds
around modelled arrival time. Defaults to 5.
Expand All @@ -158,6 +168,7 @@ def search_phase_arrivals(
return [
self.search_phase_arrival(
idx,
event_time,
modelled_arrival,
search_window_seconds=search_window_seconds,
threshold=threshold,
Expand Down
10 changes: 5 additions & 5 deletions src/qseek/images/images.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,8 +143,8 @@ def get_phases(self) -> tuple[PhaseDescription, ...]:
"""
return tuple(chain.from_iterable(image.get_provided_phases() for image in self))

def get_blinding(self) -> timedelta:
return max(image.blinding for image in self)
def get_blinding(self, sampling_rate: float) -> timedelta:
return max(image.get_blinding(sampling_rate) for image in self)

def __iter__(self) -> Iterator[ImageFunction]:
return iter(self.root)
Expand All @@ -164,16 +164,16 @@ def n_stations(self) -> int:
"""Number of stations in the images."""
return max(0, *(image.stations.n_stations for image in self if image.stations))

def downsample(self, sampling_rate: float, max_normalize: bool = False) -> None:
"""Downsample traces in-place.
def resample(self, sampling_rate: float, max_normalize: bool = False) -> None:
"""Resample traces in-place.
Args:
sampling_rate (float): Desired sampling rate in Hz.
max_normalize (bool): Normalize by maximum value to keep the scale of the
maximum detection. Defaults to False
"""
for image in self:
image.downsample(sampling_rate, max_normalize)
image.resample(sampling_rate, max_normalize)

def apply_exponent(self, exponent: float) -> None:
"""Apply exponent to all images.
Expand Down
Loading

0 comments on commit c832c32

Please sign in to comment.