Compare commits

...

25 Commits

Author SHA1 Message Date
41c9183be3 Merge branch 'refs/heads/correlation_picker' into develop 2024-09-12 16:24:50 +02:00
28f75cedcb Merge branch 'refs/heads/develop' into correlation_picker 2024-09-11 10:31:50 +02:00
424d42aa1c Merge branch 'refs/heads/develop' into correlation_picker
# Conflicts:
#	pylot/core/pick/charfuns.py
#	tests/test_autopicker/pylot_alparray_mantle_corr_stack_0.03-0.5.in
#	tests/test_autopicker/test_autopylot.py
2024-08-29 16:37:15 +02:00
2cea10088d [update] added test for AutoPyLoT, added test files for correlation picker as well 2024-08-29 16:35:04 +02:00
466f19eb2e [bugfix] fixed import for tukey in newer scipy versions 2024-08-28 18:01:03 +02:00
5d90904838 Merge branch 'develop' into correlation_picker 2024-08-27 17:46:21 +02:00
29107ee40c [update] WIP: adding tests for autopylot (global) 2024-08-26 17:18:41 +02:00
db11e125c0 [todos] add todos 2024-08-09 16:53:21 +02:00
b59232d77b [bugfix] function name accidentally overwritten on parameter renaming 2024-08-09 16:52:57 +02:00
176e93d833 [refactor] finished annotations (type hints) 2024-08-09 16:52:32 +02:00
759e7bb848 [bugfix] partially reverted signature of an inner function with shadowed variable name
[refactor] minor
2024-08-09 16:24:40 +02:00
61c3f40063 Merge branch 'develop' into correlation_picker 2024-08-09 15:50:46 +02:00
67f34cc871 Merge branch 'develop' into correlation_picker
# Conflicts:
#	pylot.yml
#	requirements.txt
2024-08-09 15:05:30 +02:00
f4f48a930f [refactor] moved unittest to existing test folder 2024-08-09 15:03:55 +02:00
a068bb8457 [update] refactoring, added type hints 2024-08-08 16:49:15 +02:00
452f2a2e18 [bugfix] test raised different Exception than planned 2024-08-08 14:41:16 +02:00
c3a2ef5022 [minor] changed test to be approximately equal to test result on different machine 2024-08-08 11:28:10 +02:00
8e7bd87711 [new] added some unit tests for correlation picker (WIP) 2024-08-07 17:11:27 +02:00
d5817adc46 [merge] changes to correlation picker from different machines that were not committed 2024-08-07 10:17:35 +02:00
14f01ec46d Merge branch 'correlation_picker' of git.geophysik.ruhr-uni-bochum.de:marcel/pylot into correlation_picker 2024-08-07 10:08:57 +02:00
1b074d14ff [update] WIP: Adding type hints, docstrings etc. 2024-08-06 16:03:50 +02:00
ce71c549ca [bugfix] removed parameter that was re-introduced accidentally from manual merge 2024-08-06 16:03:16 +02:00
c4220b389e Merge branch 'correlation_picker' of git.geophysik.ruhr-uni-bochum.de:marcel/pylot into correlation_picker 2024-07-25 15:36:06 +02:00
0f29d0e20d [minor] small modifications (naming conventions) 2024-07-25 14:50:40 +02:00
cdcd226c87 [initial] adding files from correlation picker 2024-07-24 14:07:13 +02:00
13 changed files with 2288 additions and 8 deletions

View File

@ -2196,7 +2196,8 @@ class MainWindow(QMainWindow):
if event.pylot_autopicks:
self.drawPicks(picktype='auto')
if event.pylot_picks or event.pylot_autopicks:
self.locateEventAction.setEnabled(True)
if not self._inputs.get('extent') == 'global':
self.locateEventAction.setEnabled(True)
self.qualities_action.setEnabled(True)
self.eventlist_xml_action.setEnabled(True)
@ -2631,7 +2632,6 @@ class MainWindow(QMainWindow):
picks=self.getPicksOnStation(station, 'manual'),
autopicks=self.getPicksOnStation(station, 'auto'),
metadata=self.metadata, event=event,
model=self.inputs.get('taup_model'),
filteroptions=self.filteroptions, wftype=wftype,
show_comp_data=self.dataPlot.comp_checkbox.isChecked())
if self.filterActionP.isChecked():

View File

@ -9,7 +9,7 @@ PyLoT - the Python picking and Localization Tool
This python library contains a graphical user interfaces for picking
seismic phases. This software needs ObsPy (http://github.com/obspy/obspy/wiki)
and the Qt4 libraries to be installed first.
and the Qt libraries to be installed first.
PILOT has been developed in Mathworks' MatLab. In order to distribute
PILOT without facing portability problems, it has been decided to re-

View File

@ -501,7 +501,7 @@ defaults = {'datapath': {'type': str,
'taup_model': {'type': str,
'tooltip': 'Define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6',
'value': None,
'value': 'iasp91',
'namestring': 'TauPy model'},
'taup_phases': {'type': str,

View File

@ -21,6 +21,7 @@ try:
from scipy.signal import tukey
except ImportError:
from scipy.signal.windows import tukey
from obspy.core import Stream
from pylot.core.pick.utils import PickingFailedException

View File

@ -521,7 +521,6 @@ class Array_map(QtWidgets.QWidget):
picks=self._parent.get_current_event().getPick(station),
autopicks=self._parent.get_current_event().getAutopick(station),
filteroptions=self._parent.filteroptions, metadata=self.metadata,
model=self.parameter.get('taup_model'),
event=pyl_mw.get_current_event())
except Exception as e:
message = 'Could not generate Plot for station {st}.\n {er}'.format(st=station, er=e)

View File

@ -1871,13 +1871,14 @@ class PickDlg(QDialog):
def __init__(self, parent=None, data=None, data_compare=None, station=None, network=None, location=None, picks=None,
autopicks=None, rotate=False, parameter=None, embedded=False, metadata=None, show_comp_data=False,
event=None, filteroptions=None, model=None, wftype=None):
event=None, filteroptions=None, wftype=None):
super(PickDlg, self).__init__(parent, Qt.Window)
self.orig_parent = parent
self.setAttribute(Qt.WA_DeleteOnClose)
# initialize attributes
self.parameter = parameter
model = self.parameter.get('taup_model')
self._embedded = embedded
self.showCompData = show_comp_data
self.station = station
@ -2270,8 +2271,8 @@ class PickDlg(QDialog):
arrivals = func[plot](source_origin.depth,
source_origin.latitude,
source_origin.longitude,
station_coords['latitude'],
station_coords['longitude'],
station_coords.get('latitude'),
station_coords.get('longitude'),
phases)
self.arrivals = arrivals

View File

@ -0,0 +1,2 @@
# -*- coding: utf-8 -*-
#

View File

@ -0,0 +1,90 @@
############################# correlation parameters #####################################
# min_corr_stacking: minimum correlation coefficient for building beam trace
# min_corr_export: minimum correlation coefficient for pick export
# min_stack: minimum number of stations for building beam trace
# t_before: correlation window before pick
# t_after: correlation window after pick#
# cc_maxlag: maximum shift for initial correlation
# cc_maxlag2: maximum shift for second (final) correlation (also for calculating pick uncertainty)
# initial_pick_outlier_threshold: (hopefully) threshold for excluding large outliers of initial (AIC) picks
# export_threshold: automatically exclude all onsets which deviate more than this threshold from corrected taup onsets
# min_picks_export: minimum number of correlated picks for export
# min_picks_autopylot: minimum number of reference autopicks picks to continue with event
# check_RMS: do RMS check to search for restitution errors (very experimental)
# use_taupy_onsets: use taupy onsets as reference picks instead of external picks
# station_list: use the following stations as reference for stacking
# use_stacked_trace: use existing stacked trace if found (spare re-computation)
# data_dir: obspyDMT data subdirectory (e.g. 'raw', 'processed')
# pickfile_extension: use quakeML files (PyLoT output) with the following extension, e.g. '_autopylot' for pickfiles
# such as 'PyLoT_20170501_141822_autopylot.xml'
logging: info
pick_phases: ['P', 'S']
# P-phase
P:
min_corr_stacking: 0.8
min_corr_export: 0.6
min_stack: 20
t_before: 30.
t_after: 50.
cc_maxlag: 50.
cc_maxlag2: 5.
initial_pick_outlier_threshold: 30.
export_threshold: 2.5
min_picks_export: 100
min_picks_autopylot: 50
check_RMS: True
use_taupy_onsets: False
station_list: ['HU.MORH', 'HU.TIH', 'OX.FUSE', 'OX.BAD']
use_stacked_trace: False
data_dir: 'processed'
pickfile_extension: '_autopylot'
dt_stacking: [250, 250]
# filter for first correlation (rough)
filter_options:
freqmax: 0.5
freqmin: 0.03
# filter for second correlation (fine)
filter_options_final:
freqmax: 0.5
freqmin: 0.03
filter_type: bandpass
sampfreq: 20.0
# S-phase
S:
min_corr_stacking: 0.7
min_corr_export: 0.6
min_stack: 20
t_before: 60.
t_after: 60.
cc_maxlag: 100.
cc_maxlag2: 25.
initial_pick_outlier_threshold: 30.
export_threshold: 5.0
min_picks_export: 200
min_picks_autopylot: 50
check_RMS: True
use_taupy_onsets: False
station_list: ['HU.MORH','HU.TIH', 'OX.FUSE', 'OX.BAD']
use_stacked_trace: False
data_dir: 'processed'
pickfile_extension: '_autopylot'
dt_stacking: [250, 250]
# filter for first correlation (rough)
filter_options:
freqmax: 0.1
freqmin: 0.01
# filter for second correlation (fine)
filter_options_final:
freqmax: 0.2
freqmin: 0.01
filter_type: bandpass
sampfreq: 20.0

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,40 @@
#!/bin/bash
#ulimit -s 8192
#ulimit -v $(ulimit -v | awk '{printf("%d",$1*0.95)}')
#ulimit -v
#655360
source /opt/anaconda3/etc/profile.d/conda.sh
conda activate pylot_311
NSLOTS=20
#qsub -l low -cwd -l "os=*stretch" -pe smp 40 submit_pick_corr_correction.sh
#$ -l low
#$ -l h_vmem=6G
#$ -cwd
#$ -pe smp 20
#$ -N corr_pick
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot_tools/"
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/"
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot/"
#export MKL_NUM_THREADS=${NSLOTS:=1}
#export NUMEXPR_NUM_THREADS=${NSLOTS:=1}
#export OMP_NUM_THREADS=${NSLOTS:=1}
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 0 -istop 100
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 100 -istop 200
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M6.0-6.5' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 0 -istop 100
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 100 -istop 200
#python pick_correlation_correction.py 'H:\sciebo\dmt_database' 'H:\Sciebo\dmt_database\pylot_alparray_mantle_corr_S_0.01-0.2.in' -pd -n 4 -t
pylot_infile='/home/marcel/.pylot/pylot_alparray_syn_fwi_mk6_it3.in'
#pylot_infile='/home/marcel/.pylot/pylot_adriaarray_corr_P_and_S.in'
# THIS SCRIPT SHOLD BE CALLED BY "submit_to_grid_engine.py" using the following line:
python pick_correlation_correction.py $1 $pylot_infile -pd -n ${NSLOTS:=1} -istart $2 --params 'parameters_fwi_mk6_it3.yaml'
#--event_blacklist eventlist.txt

View File

@ -0,0 +1,23 @@
#!/usr/bin/env python
import subprocess
fnames = [
('/data/AlpArray_Data/dmt_database_synth_model_mk6_it3_no_rotation', 0),
]
#fnames = [('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_SKS-phase', 0),
# ('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_S-phase', 0),]
####
script_location = '/home/marcel/VersionCtrl/git/code_base/correlation_picker/submit_pick_corr_correction.sh'
####
for fnin, istart in fnames:
input_cmds = f'qsub -q low.q@minos15,low.q@minos14,low.q@minos13,low.q@minos12,low.q@minos11 {script_location} {fnin} {istart}'
print(input_cmds)
print(subprocess.check_output(input_cmds.split()))

View File

@ -0,0 +1,61 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import glob
import json
from obspy import read_events
from pylot.core.util.dataprocessing import Metadata
from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT
def get_event_obspy_dmt(eventdir):
event_pkl_file = os.path.join(eventdir, 'info', 'event.pkl')
if not os.path.exists(event_pkl_file):
raise IOError('Could not find event path for event: {}'.format(eventdir))
event = qml_from_obspyDMT(event_pkl_file)
return event
def get_event_pylot(eventdir, extension=''):
event_id = get_event_id(eventdir)
filename = os.path.join(eventdir, 'PyLoT_{}{}.xml'.format(event_id, extension))
if not os.path.isfile(filename):
return
cat = read_events(filename)
return cat[0]
def get_event_id(eventdir):
event_id = os.path.split(eventdir)[-1]
return event_id
def get_picks(eventdir, extension=''):
event_id = get_event_id(eventdir)
filename = 'PyLoT_{}{}.xml'
filename = filename.format(event_id, extension)
fpath = os.path.join(eventdir, filename)
fpaths = glob.glob(fpath)
if len(fpaths) == 1:
cat = read_events(fpaths[0])
picks = cat[0].picks
return picks
elif len(fpaths) == 0:
print('get_picks: File not found: {}'.format(fpath))
return
print(f'WARNING: Ambiguous pick file specification. Found the following pick files {fpaths}\nFilemask: {fpath}')
return
def write_json(object, fname):
with open(fname, 'w') as outfile:
json.dump(object, outfile, sort_keys=True, indent=4)
def get_metadata(eventdir):
metadata_path = os.path.join(eventdir, 'resp')
metadata = Metadata(inventory=metadata_path, verbosity=0)
return metadata

View File

@ -0,0 +1,76 @@
import pytest
from obspy import read, Trace, UTCDateTime
from pylot.correlation.pick_correlation_correction import XCorrPickCorrection
class TestXCorrPickCorrection():
def setup(self):
self.make_test_traces()
self.make_test_picks()
self.t_before = 2.
self.t_after = 2.
self.cc_maxlag = 0.5
def make_test_traces(self):
# take first trace of test Stream from obspy
tr1 = read()[0]
# filter trace
tr1.filter('bandpass', freqmin=1, freqmax=20)
# make a copy and shift the copy by 0.1 s
tr2 = tr1.copy()
tr2.stats.starttime += 0.1
self.trace1 = tr1
self.trace2 = tr2
def make_test_picks(self):
# create an artificial reference pick on reference trace (trace1) and another one on the 0.1 s shifted trace
self.tpick1 = UTCDateTime('2009-08-24T00:20:07.7')
# shift the second pick by 0.2 s, the correction should be around 0.1 s now
self.tpick2 = self.tpick1 + 0.2
def test_slice_trace_okay(self):
self.setup()
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
t_before=self.t_before, t_after=self.t_after, cc_maxlag=self.cc_maxlag)
test_trace = self.trace1
pick_time = self.tpick2
sliced_trace = xcpc.slice_trace(test_trace, pick_time)
assert ((sliced_trace.stats.starttime == pick_time - self.t_before - self.cc_maxlag / 2)
and (sliced_trace.stats.endtime == pick_time + self.t_after + self.cc_maxlag / 2))
def test_slice_trace_fails(self):
self.setup()
test_trace = self.trace1
pick_time = self.tpick1
with pytest.raises(ValueError):
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
t_before=self.t_before + 20, t_after=self.t_after, cc_maxlag=self.cc_maxlag)
xcpc.slice_trace(test_trace, pick_time)
with pytest.raises(ValueError):
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
t_before=self.t_before, t_after=self.t_after + 50, cc_maxlag=self.cc_maxlag)
xcpc.slice_trace(test_trace, pick_time)
def test_cross_correlation(self):
self.setup()
# create XCorrPickCorrection object
xcpc = XCorrPickCorrection(self.tpick1, self.trace1, self.tpick2, self.trace2, t_before=self.t_before,
t_after=self.t_after, cc_maxlag=self.cc_maxlag)
# execute correlation
correction, cc_max, uncert, fwfm = xcpc.cross_correlation(False, '', '')
# define awaited test result
test_result = (-0.09983091718314982, 0.9578431835689154, 0.0015285160561610929, 0.03625786256084631)
# check results
assert pytest.approx(test_result, rel=1e-6) == (correction, cc_max, uncert, fwfm)