Compare commits
36 Commits
d0fbb91ffe
...
correlatio
| Author | SHA1 | Date | |
|---|---|---|---|
| 28f75cedcb | |||
| e02b62696d | |||
| e4217f0e30 | |||
| 8f154e70d7 | |||
| 6542b6cc4f | |||
| 5ab6c494c5 | |||
| 3da47c6f6b | |||
| cc7716a2b7 | |||
| 03947d2363 | |||
| e1b0d48527 | |||
| 431dbe8924 | |||
| 63810730e5 | |||
| f2159c47f9 | |||
| 424d42aa1c | |||
| 2cea10088d | |||
| 466f19eb2e | |||
| 5d90904838 | |||
| 29107ee40c | |||
| db11e125c0 | |||
| b59232d77b | |||
| 176e93d833 | |||
| 759e7bb848 | |||
| 61c3f40063 | |||
| 67f34cc871 | |||
| f4f48a930f | |||
| a068bb8457 | |||
| 452f2a2e18 | |||
| c3a2ef5022 | |||
| 8e7bd87711 | |||
| d5817adc46 | |||
| 14f01ec46d | |||
| 1b074d14ff | |||
| ce71c549ca | |||
| c4220b389e | |||
| 0f29d0e20d | |||
| cdcd226c87 |
4
PyLoT.py
4
PyLoT.py
@@ -2196,7 +2196,8 @@ class MainWindow(QMainWindow):
|
|||||||
if event.pylot_autopicks:
|
if event.pylot_autopicks:
|
||||||
self.drawPicks(picktype='auto')
|
self.drawPicks(picktype='auto')
|
||||||
if event.pylot_picks or event.pylot_autopicks:
|
if event.pylot_picks or event.pylot_autopicks:
|
||||||
self.locateEventAction.setEnabled(True)
|
if not self._inputs.get('extent') == 'global':
|
||||||
|
self.locateEventAction.setEnabled(True)
|
||||||
self.qualities_action.setEnabled(True)
|
self.qualities_action.setEnabled(True)
|
||||||
self.eventlist_xml_action.setEnabled(True)
|
self.eventlist_xml_action.setEnabled(True)
|
||||||
|
|
||||||
@@ -2631,7 +2632,6 @@ class MainWindow(QMainWindow):
|
|||||||
picks=self.getPicksOnStation(station, 'manual'),
|
picks=self.getPicksOnStation(station, 'manual'),
|
||||||
autopicks=self.getPicksOnStation(station, 'auto'),
|
autopicks=self.getPicksOnStation(station, 'auto'),
|
||||||
metadata=self.metadata, event=event,
|
metadata=self.metadata, event=event,
|
||||||
model=self.inputs.get('taup_model'),
|
|
||||||
filteroptions=self.filteroptions, wftype=wftype,
|
filteroptions=self.filteroptions, wftype=wftype,
|
||||||
show_comp_data=self.dataPlot.comp_checkbox.isChecked())
|
show_comp_data=self.dataPlot.comp_checkbox.isChecked())
|
||||||
if self.filterActionP.isChecked():
|
if self.filterActionP.isChecked():
|
||||||
|
|||||||
@@ -41,6 +41,7 @@ global #extent# %extent of a
|
|||||||
1150.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
1150.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||||
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||||
iasp91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
iasp91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
||||||
|
P,Pdiff #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||||
0.05 0.5 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
0.05 0.5 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||||
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||||
0.05 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
0.05 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||||
|
|||||||
@@ -41,6 +41,7 @@ local #extent# %extent of a
|
|||||||
10.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
10.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||||
False #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
False #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||||
iasp91 #taup_model# %define TauPy model for traveltime estimation
|
iasp91 #taup_model# %define TauPy model for traveltime estimation
|
||||||
|
P #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||||
2.0 20.0 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
2.0 20.0 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||||
2.0 30.0 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
2.0 30.0 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||||
2.0 10.0 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
2.0 10.0 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||||
|
|||||||
@@ -41,6 +41,7 @@ local #extent# %extent of a
|
|||||||
10.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
10.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||||
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||||
iasp91 #taup_model# %define TauPy model for traveltime estimation
|
iasp91 #taup_model# %define TauPy model for traveltime estimation
|
||||||
|
P #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||||
2.0 10.0 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
2.0 10.0 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||||
2.0 12.0 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
2.0 12.0 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||||
2.0 8.0 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
2.0 8.0 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ PyLoT - the Python picking and Localization Tool
|
|||||||
|
|
||||||
This python library contains a graphical user interfaces for picking
|
This python library contains a graphical user interfaces for picking
|
||||||
seismic phases. This software needs ObsPy (http://github.com/obspy/obspy/wiki)
|
seismic phases. This software needs ObsPy (http://github.com/obspy/obspy/wiki)
|
||||||
and the Qt4 libraries to be installed first.
|
and the Qt libraries to be installed first.
|
||||||
|
|
||||||
PILOT has been developed in Mathworks' MatLab. In order to distribute
|
PILOT has been developed in Mathworks' MatLab. In order to distribute
|
||||||
PILOT without facing portability problems, it has been decided to re-
|
PILOT without facing portability problems, it has been decided to re-
|
||||||
|
|||||||
@@ -501,7 +501,7 @@ defaults = {'datapath': {'type': str,
|
|||||||
|
|
||||||
'taup_model': {'type': str,
|
'taup_model': {'type': str,
|
||||||
'tooltip': 'Define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6',
|
'tooltip': 'Define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6',
|
||||||
'value': None,
|
'value': 'iasp91',
|
||||||
'namestring': 'TauPy model'},
|
'namestring': 'TauPy model'},
|
||||||
|
|
||||||
'taup_phases': {'type': str,
|
'taup_phases': {'type': str,
|
||||||
|
|||||||
@@ -262,6 +262,10 @@ class AutopickStation(object):
|
|||||||
self.metadata = metadata
|
self.metadata = metadata
|
||||||
self.origin = origin
|
self.origin = origin
|
||||||
|
|
||||||
|
# initialize TauPy pick estimates
|
||||||
|
self.estFirstP = None
|
||||||
|
self.estFirstS = None
|
||||||
|
|
||||||
# initialize picking results
|
# initialize picking results
|
||||||
self.p_results = PickingResults()
|
self.p_results = PickingResults()
|
||||||
self.s_results = PickingResults()
|
self.s_results = PickingResults()
|
||||||
@@ -443,15 +447,15 @@ class AutopickStation(object):
|
|||||||
for arr in arrivals:
|
for arr in arrivals:
|
||||||
phases[identifyPhaseID(arr.phase.name)].append(arr)
|
phases[identifyPhaseID(arr.phase.name)].append(arr)
|
||||||
# get first P and S onsets from arrivals list
|
# get first P and S onsets from arrivals list
|
||||||
estFirstP = 0
|
arrival_time_p = 0
|
||||||
estFirstS = 0
|
arrival_time_s = 0
|
||||||
if len(phases['P']) > 0:
|
if len(phases['P']) > 0:
|
||||||
arrP, estFirstP = min([(arr, arr.time) for arr in phases['P']], key=lambda t: t[1])
|
arrP, arrival_time_p = min([(arr, arr.time) for arr in phases['P']], key=lambda t: t[1])
|
||||||
if len(phases['S']) > 0:
|
if len(phases['S']) > 0:
|
||||||
arrS, estFirstS = min([(arr, arr.time) for arr in phases['S']], key=lambda t: t[1])
|
arrS, arrival_time_s = min([(arr, arr.time) for arr in phases['S']], key=lambda t: t[1])
|
||||||
print('autopick: estimated first arrivals for P: {} s, S:{} s after event'
|
print('autopick: estimated first arrivals for P: {} s, S:{} s after event'
|
||||||
' origin time using TauPy'.format(estFirstP, estFirstS))
|
' origin time using TauPy'.format(arrival_time_p, arrival_time_s))
|
||||||
return estFirstP, estFirstS
|
return arrival_time_p, arrival_time_s
|
||||||
|
|
||||||
def exit_taupy():
|
def exit_taupy():
|
||||||
"""If taupy failed to calculate theoretical starttimes, picking continues.
|
"""If taupy failed to calculate theoretical starttimes, picking continues.
|
||||||
@@ -477,10 +481,13 @@ class AutopickStation(object):
|
|||||||
raise AttributeError('No source origins given!')
|
raise AttributeError('No source origins given!')
|
||||||
|
|
||||||
arrivals = create_arrivals(self.metadata, self.origin, self.pickparams["taup_model"])
|
arrivals = create_arrivals(self.metadata, self.origin, self.pickparams["taup_model"])
|
||||||
estFirstP, estFirstS = first_PS_onsets(arrivals)
|
arrival_P, arrival_S = first_PS_onsets(arrivals)
|
||||||
|
|
||||||
|
self.estFirstP = (self.origin[0].time + arrival_P) - self.ztrace.stats.starttime
|
||||||
|
|
||||||
# modifiy pstart and pstop relative to estimated first P arrival (relative to station time axis)
|
# modifiy pstart and pstop relative to estimated first P arrival (relative to station time axis)
|
||||||
self.pickparams["pstart"] += (self.origin[0].time + estFirstP) - self.ztrace.stats.starttime
|
self.pickparams["pstart"] += self.estFirstP
|
||||||
self.pickparams["pstop"] += (self.origin[0].time + estFirstP) - self.ztrace.stats.starttime
|
self.pickparams["pstop"] += self.estFirstP
|
||||||
print('autopick: CF calculation times respectively:'
|
print('autopick: CF calculation times respectively:'
|
||||||
' pstart: {} s, pstop: {} s'.format(self.pickparams["pstart"], self.pickparams["pstop"]))
|
' pstart: {} s, pstop: {} s'.format(self.pickparams["pstart"], self.pickparams["pstop"]))
|
||||||
# make sure pstart and pstop are inside the starttime/endtime of vertical trace
|
# make sure pstart and pstop are inside the starttime/endtime of vertical trace
|
||||||
@@ -491,9 +498,10 @@ class AutopickStation(object):
|
|||||||
# for the two horizontal components take earliest and latest time to make sure that the s onset is not clipped
|
# for the two horizontal components take earliest and latest time to make sure that the s onset is not clipped
|
||||||
# if start and endtime of horizontal traces differ, the s windowsize will automatically increase
|
# if start and endtime of horizontal traces differ, the s windowsize will automatically increase
|
||||||
trace_s_start = min([self.etrace.stats.starttime, self.ntrace.stats.starttime])
|
trace_s_start = min([self.etrace.stats.starttime, self.ntrace.stats.starttime])
|
||||||
|
self.estFirstS = (self.origin[0].time + arrival_S) - trace_s_start
|
||||||
# modifiy sstart and sstop relative to estimated first S arrival (relative to station time axis)
|
# modifiy sstart and sstop relative to estimated first S arrival (relative to station time axis)
|
||||||
self.pickparams["sstart"] += (self.origin[0].time + estFirstS) - trace_s_start
|
self.pickparams["sstart"] += self.estFirstS
|
||||||
self.pickparams["sstop"] += (self.origin[0].time + estFirstS) - trace_s_start
|
self.pickparams["sstop"] += self.estFirstS
|
||||||
print('autopick: CF calculation times respectively:'
|
print('autopick: CF calculation times respectively:'
|
||||||
' sstart: {} s, sstop: {} s'.format(self.pickparams["sstart"], self.pickparams["sstop"]))
|
' sstart: {} s, sstop: {} s'.format(self.pickparams["sstart"], self.pickparams["sstop"]))
|
||||||
# make sure pstart and pstop are inside the starttime/endtime of horizontal traces
|
# make sure pstart and pstop are inside the starttime/endtime of horizontal traces
|
||||||
@@ -609,6 +617,12 @@ class AutopickStation(object):
|
|||||||
# plot tapered trace filtered with bpz2 filter settings
|
# plot tapered trace filtered with bpz2 filter settings
|
||||||
ax1.plot(tdata, self.tr_filt_z_bpz2.data / max(self.tr_filt_z_bpz2.data), color=linecolor, linewidth=0.7,
|
ax1.plot(tdata, self.tr_filt_z_bpz2.data / max(self.tr_filt_z_bpz2.data), color=linecolor, linewidth=0.7,
|
||||||
label='Data')
|
label='Data')
|
||||||
|
# plot pickwindows for P
|
||||||
|
pstart, pstop = self.pickparams['pstart'], self.pickparams['pstop']
|
||||||
|
if pstart is not None and pstop is not None:
|
||||||
|
ax1.axvspan(pstart, pstop, color='r', alpha=0.1, zorder=0, label='P window')
|
||||||
|
if self.estFirstP is not None:
|
||||||
|
ax1.axvline(self.estFirstP, ls='dashed', color='r', alpha=0.4, label='TauPy estimate')
|
||||||
if self.p_results.weight < 4:
|
if self.p_results.weight < 4:
|
||||||
# plot CF of initial onset (HOScf or ARZcf)
|
# plot CF of initial onset (HOScf or ARZcf)
|
||||||
ax1.plot(self.cf1.getTimeArray(), self.cf1.getCF() / max(self.cf1.getCF()), 'b', label='CF1')
|
ax1.plot(self.cf1.getTimeArray(), self.cf1.getCF() / max(self.cf1.getCF()), 'b', label='CF1')
|
||||||
@@ -713,6 +727,15 @@ class AutopickStation(object):
|
|||||||
ax3.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5], [-1.3, -1.3], 'g', linewidth=2)
|
ax3.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5], [-1.3, -1.3], 'g', linewidth=2)
|
||||||
ax3.plot([self.s_results.lpp, self.s_results.lpp], [-1.1, 1.1], 'g--', label='lpp')
|
ax3.plot([self.s_results.lpp, self.s_results.lpp], [-1.1, 1.1], 'g--', label='lpp')
|
||||||
ax3.plot([self.s_results.epp, self.s_results.epp], [-1.1, 1.1], 'g--', label='epp')
|
ax3.plot([self.s_results.epp, self.s_results.epp], [-1.1, 1.1], 'g--', label='epp')
|
||||||
|
|
||||||
|
# plot pickwindows for S
|
||||||
|
sstart, sstop = self.pickparams['sstart'], self.pickparams['sstop']
|
||||||
|
if sstart is not None and sstop is not None:
|
||||||
|
for axis in [ax2, ax3]:
|
||||||
|
axis.axvspan(sstart, sstop, color='b', alpha=0.1, zorder=0, label='S window')
|
||||||
|
if self.estFirstS is not None:
|
||||||
|
axis.axvline(self.estFirstS, ls='dashed', color='b', alpha=0.4, label='TauPy estimate')
|
||||||
|
|
||||||
ax3.legend(loc=1)
|
ax3.legend(loc=1)
|
||||||
ax3.set_yticks([])
|
ax3.set_yticks([])
|
||||||
ax3.set_ylim([-1.5, 1.5])
|
ax3.set_ylim([-1.5, 1.5])
|
||||||
@@ -835,14 +858,21 @@ class AutopickStation(object):
|
|||||||
self.cf1 = None
|
self.cf1 = None
|
||||||
assert isinstance(self.cf1, CharacteristicFunction), 'cf1 is not set correctly: maybe the algorithm name ({})' \
|
assert isinstance(self.cf1, CharacteristicFunction), 'cf1 is not set correctly: maybe the algorithm name ({})' \
|
||||||
' is corrupted'.format(self.pickparams["algoP"])
|
' is corrupted'.format(self.pickparams["algoP"])
|
||||||
|
# get the original waveform stream from first CF class cut to identical length as CF for plotting
|
||||||
|
cut_ogstream = self.cf1.getDataArray(self.cf1.getCut())
|
||||||
|
|
||||||
|
# MP: Rename to cf_stream for further use of z_copy and to prevent chaos when z_copy suddenly becomes a cf
|
||||||
|
# stream and later again a waveform stream
|
||||||
|
cf_stream = z_copy.copy()
|
||||||
|
cf_stream[0].data = self.cf1.getCF()
|
||||||
|
|
||||||
# calculate AIC cf from first cf (either HOS or ARZ)
|
# calculate AIC cf from first cf (either HOS or ARZ)
|
||||||
z_copy[0].data = self.cf1.getCF()
|
aiccf = AICcf(cf_stream, cuttimes)
|
||||||
aiccf = AICcf(z_copy, cuttimes)
|
|
||||||
# get preliminary onset time from AIC-CF
|
# get preliminary onset time from AIC-CF
|
||||||
self.set_current_figure('aicFig')
|
self.set_current_figure('aicFig')
|
||||||
aicpick = AICPicker(aiccf, self.pickparams["tsnrz"], self.pickparams["pickwinP"], self.iplot,
|
aicpick = AICPicker(aiccf, self.pickparams["tsnrz"], self.pickparams["pickwinP"], self.iplot,
|
||||||
Tsmooth=self.pickparams["aictsmooth"], fig=self.current_figure,
|
Tsmooth=self.pickparams["aictsmooth"], fig=self.current_figure,
|
||||||
linecolor=self.current_linecolor)
|
linecolor=self.current_linecolor, ogstream=cut_ogstream)
|
||||||
# save aicpick for plotting later
|
# save aicpick for plotting later
|
||||||
self.p_data.aicpick = aicpick
|
self.p_data.aicpick = aicpick
|
||||||
# add pstart and pstop to aic plot
|
# add pstart and pstop to aic plot
|
||||||
@@ -855,7 +885,7 @@ class AutopickStation(object):
|
|||||||
label='P stop')
|
label='P stop')
|
||||||
ax.legend(loc=1)
|
ax.legend(loc=1)
|
||||||
|
|
||||||
Pflag = self._pick_p_quality_control(aicpick, z_copy, tr_filt)
|
Pflag = self._pick_p_quality_control(aicpick, cf_stream, tr_filt)
|
||||||
# go on with processing if AIC onset passes quality control
|
# go on with processing if AIC onset passes quality control
|
||||||
slope = aicpick.getSlope()
|
slope = aicpick.getSlope()
|
||||||
if not slope: slope = 0
|
if not slope: slope = 0
|
||||||
@@ -894,7 +924,7 @@ class AutopickStation(object):
|
|||||||
refPpick = PragPicker(self.cf2, self.pickparams["tsnrz"], self.pickparams["pickwinP"], self.iplot,
|
refPpick = PragPicker(self.cf2, self.pickparams["tsnrz"], self.pickparams["pickwinP"], self.iplot,
|
||||||
self.pickparams["ausP"],
|
self.pickparams["ausP"],
|
||||||
self.pickparams["tsmoothP"], aicpick.getpick(), self.current_figure,
|
self.pickparams["tsmoothP"], aicpick.getpick(), self.current_figure,
|
||||||
self.current_linecolor)
|
self.current_linecolor, ogstream=cut_ogstream)
|
||||||
# save PragPicker result for plotting
|
# save PragPicker result for plotting
|
||||||
self.p_data.refPpick = refPpick
|
self.p_data.refPpick = refPpick
|
||||||
self.p_results.mpp = refPpick.getpick()
|
self.p_results.mpp = refPpick.getpick()
|
||||||
@@ -1146,11 +1176,14 @@ class AutopickStation(object):
|
|||||||
# calculate AIC cf
|
# calculate AIC cf
|
||||||
haiccf = self._calculate_aic_cf_s_pick(cuttimesh)
|
haiccf = self._calculate_aic_cf_s_pick(cuttimesh)
|
||||||
|
|
||||||
|
# get the original waveform stream cut to identical length as CF for plotting
|
||||||
|
ogstream = haiccf.getDataArray(haiccf.getCut())
|
||||||
|
|
||||||
# get preliminary onset time from AIC cf
|
# get preliminary onset time from AIC cf
|
||||||
self.set_current_figure('aicARHfig')
|
self.set_current_figure('aicARHfig')
|
||||||
aicarhpick = AICPicker(haiccf, self.pickparams["tsnrh"], self.pickparams["pickwinS"], self.iplot,
|
aicarhpick = AICPicker(haiccf, self.pickparams["tsnrh"], self.pickparams["pickwinS"], self.iplot,
|
||||||
Tsmooth=self.pickparams["aictsmoothS"], fig=self.current_figure,
|
Tsmooth=self.pickparams["aictsmoothS"], fig=self.current_figure,
|
||||||
linecolor=self.current_linecolor)
|
linecolor=self.current_linecolor, ogstream=ogstream)
|
||||||
# save pick for later plotting
|
# save pick for later plotting
|
||||||
self.aicarhpick = aicarhpick
|
self.aicarhpick = aicarhpick
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ try:
|
|||||||
from scipy.signal import tukey
|
from scipy.signal import tukey
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from scipy.signal.windows import tukey
|
from scipy.signal.windows import tukey
|
||||||
|
|
||||||
from obspy.core import Stream
|
from obspy.core import Stream
|
||||||
|
|
||||||
from pylot.core.pick.utils import PickingFailedException
|
from pylot.core.pick.utils import PickingFailedException
|
||||||
@@ -59,7 +60,7 @@ class CharacteristicFunction(object):
|
|||||||
self.setOrder(order)
|
self.setOrder(order)
|
||||||
self.setFnoise(fnoise)
|
self.setFnoise(fnoise)
|
||||||
self.setARdetStep(t2)
|
self.setARdetStep(t2)
|
||||||
self.calcCF(self.getDataArray())
|
self.calcCF()
|
||||||
self.arpara = np.array([])
|
self.arpara = np.array([])
|
||||||
self.xpred = np.array([])
|
self.xpred = np.array([])
|
||||||
|
|
||||||
@@ -211,17 +212,15 @@ class CharacteristicFunction(object):
|
|||||||
data = self.orig_data.copy()
|
data = self.orig_data.copy()
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def calcCF(self, data=None):
|
def calcCF(self):
|
||||||
self.cf = data
|
pass
|
||||||
|
|
||||||
|
|
||||||
class AICcf(CharacteristicFunction):
|
class AICcf(CharacteristicFunction):
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self):
|
||||||
"""
|
"""
|
||||||
Function to calculate the Akaike Information Criterion (AIC) after Maeda (1985).
|
Function to calculate the Akaike Information Criterion (AIC) after Maeda (1985).
|
||||||
:param data: data, time series (whether seismogram or CF)
|
|
||||||
:type data: tuple
|
|
||||||
:return: AIC function
|
:return: AIC function
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
@@ -259,13 +258,11 @@ class HOScf(CharacteristicFunction):
|
|||||||
"""
|
"""
|
||||||
super(HOScf, self).__init__(data, cut, pickparams["tlta"], pickparams["hosorder"])
|
super(HOScf, self).__init__(data, cut, pickparams["tlta"], pickparams["hosorder"])
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self):
|
||||||
"""
|
"""
|
||||||
Function to calculate skewness (statistics of order 3) or kurtosis
|
Function to calculate skewness (statistics of order 3) or kurtosis
|
||||||
(statistics of order 4), using one long moving window, as published
|
(statistics of order 4), using one long moving window, as published
|
||||||
in Kueperkoch et al. (2010), or order 2, i.e. STA/LTA.
|
in Kueperkoch et al. (2010), or order 2, i.e. STA/LTA.
|
||||||
:param data: data, time series (whether seismogram or CF)
|
|
||||||
:type data: tuple
|
|
||||||
:return: HOS cf
|
:return: HOS cf
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
@@ -280,47 +277,28 @@ class HOScf(CharacteristicFunction):
|
|||||||
elif self.getOrder() == 4: # this is kurtosis
|
elif self.getOrder() == 4: # this is kurtosis
|
||||||
y = np.power(xnp, 4)
|
y = np.power(xnp, 4)
|
||||||
y1 = np.power(xnp, 2)
|
y1 = np.power(xnp, 2)
|
||||||
elif self.getOrder() == 2: # this is variance, used for STA/LTA processing
|
|
||||||
y = np.power(xnp, 2)
|
|
||||||
y1 = np.power(xnp, 2)
|
|
||||||
|
|
||||||
# Initialisation
|
# Initialisation
|
||||||
# t2: long term moving window
|
# t2: long term moving window
|
||||||
ilta = int(round(self.getTime2() / self.getIncrement()))
|
ilta = int(round(self.getTime2() / self.getIncrement()))
|
||||||
ista = int(round((self.getTime2() / 10) / self.getIncrement())) # TODO: still hard coded!!
|
|
||||||
lta = y[0]
|
lta = y[0]
|
||||||
lta1 = y1[0]
|
lta1 = y1[0]
|
||||||
sta = y[0]
|
|
||||||
# moving windows
|
# moving windows
|
||||||
LTA = np.zeros(len(xnp))
|
LTA = np.zeros(len(xnp))
|
||||||
STA = np.zeros(len(xnp))
|
|
||||||
for j in range(0, len(xnp)):
|
for j in range(0, len(xnp)):
|
||||||
if j < 4:
|
if j < 4:
|
||||||
LTA[j] = 0
|
LTA[j] = 0
|
||||||
STA[j] = 0
|
|
||||||
elif j <= ista and self.getOrder() == 2:
|
|
||||||
lta = (y[j] + lta * (j - 1)) / j
|
|
||||||
if self.getOrder() == 2:
|
|
||||||
sta = (y[j] + sta * (j - 1)) / j
|
|
||||||
# elif j < 4:
|
|
||||||
elif j <= ilta:
|
elif j <= ilta:
|
||||||
lta = (y[j] + lta * (j - 1)) / j
|
lta = (y[j] + lta * (j - 1)) / j
|
||||||
lta1 = (y1[j] + lta1 * (j - 1)) / j
|
lta1 = (y1[j] + lta1 * (j - 1)) / j
|
||||||
if self.getOrder() == 2:
|
|
||||||
sta = (y[j] - y[j - ista]) / ista + sta
|
|
||||||
else:
|
else:
|
||||||
lta = (y[j] - y[j - ilta]) / ilta + lta
|
lta = (y[j] - y[j - ilta]) / ilta + lta
|
||||||
lta1 = (y1[j] - y1[j - ilta]) / ilta + lta1
|
lta1 = (y1[j] - y1[j - ilta]) / ilta + lta1
|
||||||
if self.getOrder() == 2:
|
|
||||||
sta = (y[j] - y[j - ista]) / ista + sta
|
|
||||||
# define LTA
|
# define LTA
|
||||||
if self.getOrder() == 3:
|
if self.getOrder() == 3:
|
||||||
LTA[j] = lta / np.power(lta1, 1.5)
|
LTA[j] = lta / np.power(lta1, 1.5)
|
||||||
elif self.getOrder() == 4:
|
elif self.getOrder() == 4:
|
||||||
LTA[j] = lta / np.power(lta1, 2)
|
LTA[j] = lta / np.power(lta1, 2)
|
||||||
else:
|
|
||||||
LTA[j] = lta
|
|
||||||
STA[j] = sta
|
|
||||||
|
|
||||||
# remove NaN's with first not-NaN-value,
|
# remove NaN's with first not-NaN-value,
|
||||||
# so autopicker doesnt pick discontinuity at start of the trace
|
# so autopicker doesnt pick discontinuity at start of the trace
|
||||||
@@ -329,10 +307,7 @@ class HOScf(CharacteristicFunction):
|
|||||||
first = ind[0]
|
first = ind[0]
|
||||||
LTA[:first] = LTA[first]
|
LTA[:first] = LTA[first]
|
||||||
|
|
||||||
if self.getOrder() > 2:
|
self.cf = LTA
|
||||||
self.cf = LTA
|
|
||||||
else: # order 2 means STA/LTA!
|
|
||||||
self.cf = STA / LTA
|
|
||||||
self.xcf = x
|
self.xcf = x
|
||||||
|
|
||||||
|
|
||||||
@@ -342,12 +317,10 @@ class ARZcf(CharacteristicFunction):
|
|||||||
super(ARZcf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Parorder"],
|
super(ARZcf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Parorder"],
|
||||||
fnoise=pickparams["addnoise"])
|
fnoise=pickparams["addnoise"])
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self):
|
||||||
"""
|
"""
|
||||||
function used to calculate the AR prediction error from a single vertical trace. Can be used to pick
|
function used to calculate the AR prediction error from a single vertical trace. Can be used to pick
|
||||||
P onsets.
|
P onsets.
|
||||||
:param data:
|
|
||||||
:type data: ~obspy.core.stream.Stream
|
|
||||||
:return: ARZ cf
|
:return: ARZ cf
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
@@ -478,14 +451,12 @@ class ARHcf(CharacteristicFunction):
|
|||||||
super(ARHcf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Sarorder"],
|
super(ARHcf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Sarorder"],
|
||||||
fnoise=pickparams["addnoise"])
|
fnoise=pickparams["addnoise"])
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self):
|
||||||
"""
|
"""
|
||||||
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
||||||
both horizontal traces.
|
both horizontal traces.
|
||||||
The waveform is predicted in a moving time window using the calculated AR parameters. The difference
|
The waveform is predicted in a moving time window using the calculated AR parameters. The difference
|
||||||
between the predicted and the actual waveform servers as a characteristic function.
|
between the predicted and the actual waveform servers as a characteristic function.
|
||||||
:param data: wavefor stream
|
|
||||||
:type data: ~obspy.core.stream.Stream
|
|
||||||
:return: ARH cf
|
:return: ARH cf
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
@@ -634,14 +605,12 @@ class AR3Ccf(CharacteristicFunction):
|
|||||||
super(AR3Ccf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Sarorder"],
|
super(AR3Ccf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Sarorder"],
|
||||||
fnoise=pickparams["addnoise"])
|
fnoise=pickparams["addnoise"])
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self):
|
||||||
"""
|
"""
|
||||||
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
||||||
all three traces.
|
all three traces.
|
||||||
The waveform is predicted in a moving time window using the calculated AR parameters. The difference
|
The waveform is predicted in a moving time window using the calculated AR parameters. The difference
|
||||||
between the predicted and the actual waveform servers as a characteristic function
|
between the predicted and the actual waveform servers as a characteristic function
|
||||||
:param data: stream holding all three traces
|
|
||||||
:type data: ~obspy.core.stream.Stream
|
|
||||||
:return: AR3C cf
|
:return: AR3C cf
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -37,7 +37,8 @@ class AutoPicker(object):
|
|||||||
|
|
||||||
warnings.simplefilter('ignore')
|
warnings.simplefilter('ignore')
|
||||||
|
|
||||||
def __init__(self, cf, TSNR, PickWindow, iplot=0, aus=None, Tsmooth=None, Pick1=None, fig=None, linecolor='k'):
|
def __init__(self, cf, TSNR, PickWindow, iplot=0, aus=None, Tsmooth=None, Pick1=None,
|
||||||
|
fig=None, linecolor='k', ogstream=None):
|
||||||
"""
|
"""
|
||||||
Create AutoPicker object
|
Create AutoPicker object
|
||||||
:param cf: characteristic function, on which the picking algorithm is applied
|
:param cf: characteristic function, on which the picking algorithm is applied
|
||||||
@@ -59,12 +60,15 @@ class AutoPicker(object):
|
|||||||
:type fig: `~matplotlib.figure.Figure`
|
:type fig: `~matplotlib.figure.Figure`
|
||||||
:param linecolor: matplotlib line color string
|
:param linecolor: matplotlib line color string
|
||||||
:type linecolor: str
|
:type linecolor: str
|
||||||
|
:param ogstream: original stream (waveform), e.g. for plotting purposes
|
||||||
|
:type ogstream: `~obspy.core.stream.Stream`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
assert isinstance(cf, CharacteristicFunction), "%s is not a CharacteristicFunction object" % str(cf)
|
assert isinstance(cf, CharacteristicFunction), "%s is not a CharacteristicFunction object" % str(cf)
|
||||||
self._linecolor = linecolor
|
self._linecolor = linecolor
|
||||||
self._pickcolor_p = 'b'
|
self._pickcolor_p = 'b'
|
||||||
self.cf = cf.getCF()
|
self.cf = cf.getCF()
|
||||||
|
self.ogstream = ogstream
|
||||||
self.Tcf = cf.getTimeArray()
|
self.Tcf = cf.getTimeArray()
|
||||||
self.Data = cf.getXCF()
|
self.Data = cf.getXCF()
|
||||||
self.dt = cf.getIncrement()
|
self.dt = cf.getIncrement()
|
||||||
@@ -173,7 +177,7 @@ class AICPicker(AutoPicker):
|
|||||||
nn = np.isnan(self.cf)
|
nn = np.isnan(self.cf)
|
||||||
if len(nn) > 1:
|
if len(nn) > 1:
|
||||||
self.cf[nn] = 0
|
self.cf[nn] = 0
|
||||||
# taper AIC-CF to get rid off side maxima
|
# taper AIC-CF to get rid of side maxima
|
||||||
tap = np.hanning(len(self.cf))
|
tap = np.hanning(len(self.cf))
|
||||||
aic = tap * self.cf + max(abs(self.cf))
|
aic = tap * self.cf + max(abs(self.cf))
|
||||||
# smooth AIC-CF
|
# smooth AIC-CF
|
||||||
@@ -316,16 +320,7 @@ class AICPicker(AutoPicker):
|
|||||||
plt.close(fig)
|
plt.close(fig)
|
||||||
return
|
return
|
||||||
iislope = islope[0][0:imax + 1]
|
iislope = islope[0][0:imax + 1]
|
||||||
# MP MP change slope calculation
|
dataslope = self.Data[0].data[iislope]
|
||||||
# get all maxima of aicsmooth
|
|
||||||
iaicmaxima = argrelmax(aicsmooth)[0]
|
|
||||||
# get first index of maximum after pickindex (indices saved in iaicmaxima)
|
|
||||||
aicmax = iaicmaxima[np.where(iaicmaxima > pickindex)[0]]
|
|
||||||
if len(aicmax) > 0:
|
|
||||||
iaicmax = aicmax[0]
|
|
||||||
else:
|
|
||||||
iaicmax = -1
|
|
||||||
dataslope = aicsmooth[pickindex: iaicmax]
|
|
||||||
# calculate slope as polynomal fit of order 1
|
# calculate slope as polynomal fit of order 1
|
||||||
xslope = np.arange(0, len(dataslope), 1)
|
xslope = np.arange(0, len(dataslope), 1)
|
||||||
try:
|
try:
|
||||||
@@ -336,7 +331,7 @@ class AICPicker(AutoPicker):
|
|||||||
else:
|
else:
|
||||||
self.slope = 1 / (len(dataslope) * self.Data[0].stats.delta) * (datafit[-1] - datafit[0])
|
self.slope = 1 / (len(dataslope) * self.Data[0].stats.delta) * (datafit[-1] - datafit[0])
|
||||||
# normalize slope to maximum of cf to make it unit independent
|
# normalize slope to maximum of cf to make it unit independent
|
||||||
self.slope /= aicsmooth[iaicmax]
|
self.slope /= self.Data[0].data[icfmax]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("AICPicker: Problems with data fitting! {}".format(e))
|
print("AICPicker: Problems with data fitting! {}".format(e))
|
||||||
|
|
||||||
@@ -356,6 +351,12 @@ class AICPicker(AutoPicker):
|
|||||||
self.Tcf = self.Tcf[0:len(self.Tcf) - 1]
|
self.Tcf = self.Tcf[0:len(self.Tcf) - 1]
|
||||||
ax1.plot(self.Tcf, cf / max(cf), color=self._linecolor, linewidth=0.7, label='(HOS-/AR-) Data')
|
ax1.plot(self.Tcf, cf / max(cf), color=self._linecolor, linewidth=0.7, label='(HOS-/AR-) Data')
|
||||||
ax1.plot(self.Tcf, aicsmooth / max(aicsmooth), 'r', label='Smoothed AIC-CF')
|
ax1.plot(self.Tcf, aicsmooth / max(aicsmooth), 'r', label='Smoothed AIC-CF')
|
||||||
|
# plot the original waveform also for evaluation of the CF and pick
|
||||||
|
if self.ogstream:
|
||||||
|
data = self.ogstream[0].data
|
||||||
|
if len(data) == len(self.Tcf):
|
||||||
|
ax1.plot(self.Tcf, 0.5 * data / max(data), 'k', label='Seismogram', alpha=0.3, zorder=0,
|
||||||
|
lw=0.5)
|
||||||
if self.Pick is not None:
|
if self.Pick is not None:
|
||||||
ax1.plot([self.Pick, self.Pick], [-0.1, 0.5], 'b', linewidth=2, label='AIC-Pick')
|
ax1.plot([self.Pick, self.Pick], [-0.1, 0.5], 'b', linewidth=2, label='AIC-Pick')
|
||||||
ax1.set_xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
|
ax1.set_xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
|
||||||
@@ -376,7 +377,7 @@ class AICPicker(AutoPicker):
|
|||||||
label='Signal Window')
|
label='Signal Window')
|
||||||
ax2.axvspan(self.Tcf[iislope[0]], self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0,
|
ax2.axvspan(self.Tcf[iislope[0]], self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0,
|
||||||
label='Slope Window')
|
label='Slope Window')
|
||||||
ax2.plot(self.Tcf[pickindex: iaicmax], datafit, 'g', linewidth=2,
|
ax2.plot(self.Tcf[iislope], datafit, 'g', linewidth=2,
|
||||||
label='Slope') # MP MP changed temporarily!
|
label='Slope') # MP MP changed temporarily!
|
||||||
|
|
||||||
if self.slope is not None:
|
if self.slope is not None:
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ import numpy as np
|
|||||||
from obspy.core import Stream, UTCDateTime
|
from obspy.core import Stream, UTCDateTime
|
||||||
from scipy.signal import argrelmax
|
from scipy.signal import argrelmax
|
||||||
|
|
||||||
from pylot.core.util.utils import get_bool, get_none, SetChannelComponents
|
from pylot.core.util.utils import get_bool, get_none, SetChannelComponents, common_range
|
||||||
|
|
||||||
|
|
||||||
def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None, linecolor='k'):
|
def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None, linecolor='k'):
|
||||||
@@ -828,14 +828,22 @@ def checksignallength(X, pick, minsiglength, pickparams, iplot=0, fig=None, line
|
|||||||
if len(X) > 1:
|
if len(X) > 1:
|
||||||
# all three components available
|
# all three components available
|
||||||
# make sure, all components have equal lengths
|
# make sure, all components have equal lengths
|
||||||
ilen = min([len(X[0].data), len(X[1].data), len(X[2].data)])
|
earliest_starttime = min(tr.stats.starttime for tr in X)
|
||||||
x1 = X[0][0:ilen]
|
cuttimes = common_range(X)
|
||||||
x2 = X[1][0:ilen]
|
X = X.slice(cuttimes[0], cuttimes[1])
|
||||||
x3 = X[2][0:ilen]
|
x1, x2, x3 = X[:3]
|
||||||
|
|
||||||
|
if not (len(x1) == len(x2) == len(x3)):
|
||||||
|
raise PickingFailedException('checksignallength: unequal lengths of components!')
|
||||||
|
|
||||||
# get RMS trace
|
# get RMS trace
|
||||||
rms = np.sqrt((np.power(x1, 2) + np.power(x2, 2) + np.power(x3, 2)) / 3)
|
rms = np.sqrt((np.power(x1, 2) + np.power(x2, 2) + np.power(x3, 2)) / 3)
|
||||||
|
ilen = len(rms)
|
||||||
|
dt = earliest_starttime - X[0].stats.starttime
|
||||||
|
pick -= dt
|
||||||
else:
|
else:
|
||||||
x1 = X[0].data
|
x1 = X[0].data
|
||||||
|
x2 = x3 = None
|
||||||
ilen = len(x1)
|
ilen = len(x1)
|
||||||
rms = abs(x1)
|
rms = abs(x1)
|
||||||
|
|
||||||
@@ -874,6 +882,10 @@ def checksignallength(X, pick, minsiglength, pickparams, iplot=0, fig=None, line
|
|||||||
fig._tight = True
|
fig._tight = True
|
||||||
ax = fig.add_subplot(111)
|
ax = fig.add_subplot(111)
|
||||||
ax.plot(t, rms, color=linecolor, linewidth=0.7, label='RMS Data')
|
ax.plot(t, rms, color=linecolor, linewidth=0.7, label='RMS Data')
|
||||||
|
ax.plot(t, x1, 'k', alpha=0.3, lw=0.3, zorder=0)
|
||||||
|
if x2 is not None and x3 is not None:
|
||||||
|
ax.plot(t, x2, 'r', alpha=0.3, lw=0.3, zorder=0)
|
||||||
|
ax.plot(t, x3, 'g', alpha=0.3, lw=0.3, zorder=0)
|
||||||
ax.axvspan(t[inoise[0]], t[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
ax.axvspan(t[inoise[0]], t[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
||||||
ax.axvspan(t[isignal[0]], t[isignal[-1]], color='b', alpha=0.2, lw=0, label='Signal Window')
|
ax.axvspan(t[isignal[0]], t[isignal[-1]], color='b', alpha=0.2, lw=0, label='Signal Window')
|
||||||
ax.plot([t[isignal[0]], t[isignal[len(isignal) - 1]]],
|
ax.plot([t[isignal[0]], t[isignal[len(isignal) - 1]]],
|
||||||
@@ -883,6 +895,7 @@ def checksignallength(X, pick, minsiglength, pickparams, iplot=0, fig=None, line
|
|||||||
ax.set_xlabel('Time [s] since %s' % X[0].stats.starttime)
|
ax.set_xlabel('Time [s] since %s' % X[0].stats.starttime)
|
||||||
ax.set_ylabel('Counts')
|
ax.set_ylabel('Counts')
|
||||||
ax.set_title('Check for Signal Length, Station %s' % X[0].stats.station)
|
ax.set_title('Check for Signal Length, Station %s' % X[0].stats.station)
|
||||||
|
ax.set_xlim(pickparams["pstart"], pickparams["pstop"])
|
||||||
ax.set_yticks([])
|
ax.set_yticks([])
|
||||||
if plt_flag == 1:
|
if plt_flag == 1:
|
||||||
fig.show()
|
fig.show()
|
||||||
|
|||||||
@@ -521,7 +521,6 @@ class Array_map(QtWidgets.QWidget):
|
|||||||
picks=self._parent.get_current_event().getPick(station),
|
picks=self._parent.get_current_event().getPick(station),
|
||||||
autopicks=self._parent.get_current_event().getAutopick(station),
|
autopicks=self._parent.get_current_event().getAutopick(station),
|
||||||
filteroptions=self._parent.filteroptions, metadata=self.metadata,
|
filteroptions=self._parent.filteroptions, metadata=self.metadata,
|
||||||
model=self.parameter.get('taup_model'),
|
|
||||||
event=pyl_mw.get_current_event())
|
event=pyl_mw.get_current_event())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
message = 'Could not generate Plot for station {st}.\n {er}'.format(st=station, er=e)
|
message = 'Could not generate Plot for station {st}.\n {er}'.format(st=station, er=e)
|
||||||
|
|||||||
@@ -1076,7 +1076,7 @@ def check4rotated(data, metadata=None, verbosity=1):
|
|||||||
return wfs_in
|
return wfs_in
|
||||||
|
|
||||||
# check metadata quality
|
# check metadata quality
|
||||||
t_start = full_range(wfs_in)
|
t_start = full_range(wfs_in)[0]
|
||||||
try:
|
try:
|
||||||
azimuths = []
|
azimuths = []
|
||||||
dips = []
|
dips = []
|
||||||
|
|||||||
@@ -1871,13 +1871,14 @@ class PickDlg(QDialog):
|
|||||||
|
|
||||||
def __init__(self, parent=None, data=None, data_compare=None, station=None, network=None, location=None, picks=None,
|
def __init__(self, parent=None, data=None, data_compare=None, station=None, network=None, location=None, picks=None,
|
||||||
autopicks=None, rotate=False, parameter=None, embedded=False, metadata=None, show_comp_data=False,
|
autopicks=None, rotate=False, parameter=None, embedded=False, metadata=None, show_comp_data=False,
|
||||||
event=None, filteroptions=None, model=None, wftype=None):
|
event=None, filteroptions=None, wftype=None):
|
||||||
super(PickDlg, self).__init__(parent, Qt.Window)
|
super(PickDlg, self).__init__(parent, Qt.Window)
|
||||||
self.orig_parent = parent
|
self.orig_parent = parent
|
||||||
self.setAttribute(Qt.WA_DeleteOnClose)
|
self.setAttribute(Qt.WA_DeleteOnClose)
|
||||||
|
|
||||||
# initialize attributes
|
# initialize attributes
|
||||||
self.parameter = parameter
|
self.parameter = parameter
|
||||||
|
model = self.parameter.get('taup_model')
|
||||||
self._embedded = embedded
|
self._embedded = embedded
|
||||||
self.showCompData = show_comp_data
|
self.showCompData = show_comp_data
|
||||||
self.station = station
|
self.station = station
|
||||||
@@ -2270,8 +2271,8 @@ class PickDlg(QDialog):
|
|||||||
arrivals = func[plot](source_origin.depth,
|
arrivals = func[plot](source_origin.depth,
|
||||||
source_origin.latitude,
|
source_origin.latitude,
|
||||||
source_origin.longitude,
|
source_origin.longitude,
|
||||||
station_coords['latitude'],
|
station_coords.get('latitude'),
|
||||||
station_coords['longitude'],
|
station_coords.get('longitude'),
|
||||||
phases)
|
phases)
|
||||||
self.arrivals = arrivals
|
self.arrivals = arrivals
|
||||||
|
|
||||||
|
|||||||
2
pylot/correlation/__init__.py
Normal file
2
pylot/correlation/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
90
pylot/correlation/parameters_adriaarray.yaml
Normal file
90
pylot/correlation/parameters_adriaarray.yaml
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
############################# correlation parameters #####################################
|
||||||
|
# min_corr_stacking: minimum correlation coefficient for building beam trace
|
||||||
|
# min_corr_export: minimum correlation coefficient for pick export
|
||||||
|
# min_stack: minimum number of stations for building beam trace
|
||||||
|
# t_before: correlation window before pick
|
||||||
|
# t_after: correlation window after pick#
|
||||||
|
# cc_maxlag: maximum shift for initial correlation
|
||||||
|
# cc_maxlag2: maximum shift for second (final) correlation (also for calculating pick uncertainty)
|
||||||
|
# initial_pick_outlier_threshold: (hopefully) threshold for excluding large outliers of initial (AIC) picks
|
||||||
|
# export_threshold: automatically exclude all onsets which deviate more than this threshold from corrected taup onsets
|
||||||
|
# min_picks_export: minimum number of correlated picks for export
|
||||||
|
# min_picks_autopylot: minimum number of reference autopicks picks to continue with event
|
||||||
|
# check_RMS: do RMS check to search for restitution errors (very experimental)
|
||||||
|
# use_taupy_onsets: use taupy onsets as reference picks instead of external picks
|
||||||
|
# station_list: use the following stations as reference for stacking
|
||||||
|
# use_stacked_trace: use existing stacked trace if found (spare re-computation)
|
||||||
|
# data_dir: obspyDMT data subdirectory (e.g. 'raw', 'processed')
|
||||||
|
# pickfile_extension: use quakeML files (PyLoT output) with the following extension, e.g. '_autopylot' for pickfiles
|
||||||
|
# such as 'PyLoT_20170501_141822_autopylot.xml'
|
||||||
|
|
||||||
|
logging: info
|
||||||
|
pick_phases: ['P', 'S']
|
||||||
|
|
||||||
|
# P-phase
|
||||||
|
P:
|
||||||
|
min_corr_stacking: 0.8
|
||||||
|
min_corr_export: 0.6
|
||||||
|
min_stack: 20
|
||||||
|
t_before: 30.
|
||||||
|
t_after: 50.
|
||||||
|
cc_maxlag: 50.
|
||||||
|
cc_maxlag2: 5.
|
||||||
|
initial_pick_outlier_threshold: 30.
|
||||||
|
export_threshold: 2.5
|
||||||
|
min_picks_export: 100
|
||||||
|
min_picks_autopylot: 50
|
||||||
|
check_RMS: True
|
||||||
|
use_taupy_onsets: False
|
||||||
|
station_list: ['HU.MORH', 'HU.TIH', 'OX.FUSE', 'OX.BAD']
|
||||||
|
use_stacked_trace: False
|
||||||
|
data_dir: 'processed'
|
||||||
|
pickfile_extension: '_autopylot'
|
||||||
|
dt_stacking: [250, 250]
|
||||||
|
|
||||||
|
# filter for first correlation (rough)
|
||||||
|
filter_options:
|
||||||
|
freqmax: 0.5
|
||||||
|
freqmin: 0.03
|
||||||
|
# filter for second correlation (fine)
|
||||||
|
filter_options_final:
|
||||||
|
freqmax: 0.5
|
||||||
|
freqmin: 0.03
|
||||||
|
|
||||||
|
filter_type: bandpass
|
||||||
|
sampfreq: 20.0
|
||||||
|
|
||||||
|
# S-phase
|
||||||
|
S:
|
||||||
|
min_corr_stacking: 0.7
|
||||||
|
min_corr_export: 0.6
|
||||||
|
min_stack: 20
|
||||||
|
t_before: 60.
|
||||||
|
t_after: 60.
|
||||||
|
cc_maxlag: 100.
|
||||||
|
cc_maxlag2: 25.
|
||||||
|
initial_pick_outlier_threshold: 30.
|
||||||
|
export_threshold: 5.0
|
||||||
|
min_picks_export: 200
|
||||||
|
min_picks_autopylot: 50
|
||||||
|
check_RMS: True
|
||||||
|
use_taupy_onsets: False
|
||||||
|
station_list: ['HU.MORH','HU.TIH', 'OX.FUSE', 'OX.BAD']
|
||||||
|
use_stacked_trace: False
|
||||||
|
data_dir: 'processed'
|
||||||
|
pickfile_extension: '_autopylot'
|
||||||
|
dt_stacking: [250, 250]
|
||||||
|
|
||||||
|
# filter for first correlation (rough)
|
||||||
|
filter_options:
|
||||||
|
freqmax: 0.1
|
||||||
|
freqmin: 0.01
|
||||||
|
|
||||||
|
# filter for second correlation (fine)
|
||||||
|
filter_options_final:
|
||||||
|
freqmax: 0.2
|
||||||
|
freqmin: 0.01
|
||||||
|
|
||||||
|
filter_type: bandpass
|
||||||
|
sampfreq: 20.0
|
||||||
|
|
||||||
1987
pylot/correlation/pick_correlation_correction.py
Normal file
1987
pylot/correlation/pick_correlation_correction.py
Normal file
File diff suppressed because it is too large
Load Diff
40
pylot/correlation/submit_pick_corr_correction.sh
Executable file
40
pylot/correlation/submit_pick_corr_correction.sh
Executable file
@@ -0,0 +1,40 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
#ulimit -s 8192
|
||||||
|
#ulimit -v $(ulimit -v | awk '{printf("%d",$1*0.95)}')
|
||||||
|
#ulimit -v
|
||||||
|
|
||||||
|
#655360
|
||||||
|
|
||||||
|
source /opt/anaconda3/etc/profile.d/conda.sh
|
||||||
|
conda activate pylot_311
|
||||||
|
NSLOTS=20
|
||||||
|
|
||||||
|
#qsub -l low -cwd -l "os=*stretch" -pe smp 40 submit_pick_corr_correction.sh
|
||||||
|
#$ -l low
|
||||||
|
#$ -l h_vmem=6G
|
||||||
|
#$ -cwd
|
||||||
|
#$ -pe smp 20
|
||||||
|
#$ -N corr_pick
|
||||||
|
|
||||||
|
|
||||||
|
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot_tools/"
|
||||||
|
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/"
|
||||||
|
export PYTHONPATH="$PYTHONPATH:/home/marcel/git/pylot/"
|
||||||
|
|
||||||
|
#export MKL_NUM_THREADS=${NSLOTS:=1}
|
||||||
|
#export NUMEXPR_NUM_THREADS=${NSLOTS:=1}
|
||||||
|
#export OMP_NUM_THREADS=${NSLOTS:=1}
|
||||||
|
|
||||||
|
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 0 -istop 100
|
||||||
|
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 100 -istop 200
|
||||||
|
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M6.0-6.5' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 0 -istop 100
|
||||||
|
#python pick_correlation_correction.py '/data/AlpArray_Data/dmt_database_mantle_M5.8-6.0' '/home/marcel/.pylot/pylot_alparray_mantle_corr_stack_0.03-0.5.in' -pd -n ${NSLOTS:=1} -istart 100 -istop 200
|
||||||
|
#python pick_correlation_correction.py 'H:\sciebo\dmt_database' 'H:\Sciebo\dmt_database\pylot_alparray_mantle_corr_S_0.01-0.2.in' -pd -n 4 -t
|
||||||
|
|
||||||
|
pylot_infile='/home/marcel/.pylot/pylot_alparray_syn_fwi_mk6_it3.in'
|
||||||
|
#pylot_infile='/home/marcel/.pylot/pylot_adriaarray_corr_P_and_S.in'
|
||||||
|
|
||||||
|
# THIS SCRIPT SHOLD BE CALLED BY "submit_to_grid_engine.py" using the following line:
|
||||||
|
python pick_correlation_correction.py $1 $pylot_infile -pd -n ${NSLOTS:=1} -istart $2 --params 'parameters_fwi_mk6_it3.yaml'
|
||||||
|
#--event_blacklist eventlist.txt
|
||||||
23
pylot/correlation/submit_to_grid_engine.py
Executable file
23
pylot/correlation/submit_to_grid_engine.py
Executable file
@@ -0,0 +1,23 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
fnames = [
|
||||||
|
('/data/AlpArray_Data/dmt_database_synth_model_mk6_it3_no_rotation', 0),
|
||||||
|
]
|
||||||
|
|
||||||
|
#fnames = [('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_SKS-phase', 0),
|
||||||
|
# ('/data/AlpArray_Data/dmt_database_mantle_0.01-0.2_S-phase', 0),]
|
||||||
|
|
||||||
|
####
|
||||||
|
script_location = '/home/marcel/VersionCtrl/git/code_base/correlation_picker/submit_pick_corr_correction.sh'
|
||||||
|
####
|
||||||
|
|
||||||
|
for fnin, istart in fnames:
|
||||||
|
input_cmds = f'qsub -q low.q@minos15,low.q@minos14,low.q@minos13,low.q@minos12,low.q@minos11 {script_location} {fnin} {istart}'
|
||||||
|
|
||||||
|
print(input_cmds)
|
||||||
|
print(subprocess.check_output(input_cmds.split()))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
61
pylot/correlation/utils.py
Normal file
61
pylot/correlation/utils.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
|
import glob
|
||||||
|
import json
|
||||||
|
|
||||||
|
from obspy import read_events
|
||||||
|
|
||||||
|
from pylot.core.util.dataprocessing import Metadata
|
||||||
|
from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT
|
||||||
|
|
||||||
|
|
||||||
|
def get_event_obspy_dmt(eventdir):
|
||||||
|
event_pkl_file = os.path.join(eventdir, 'info', 'event.pkl')
|
||||||
|
if not os.path.exists(event_pkl_file):
|
||||||
|
raise IOError('Could not find event path for event: {}'.format(eventdir))
|
||||||
|
event = qml_from_obspyDMT(event_pkl_file)
|
||||||
|
return event
|
||||||
|
|
||||||
|
|
||||||
|
def get_event_pylot(eventdir, extension=''):
|
||||||
|
event_id = get_event_id(eventdir)
|
||||||
|
filename = os.path.join(eventdir, 'PyLoT_{}{}.xml'.format(event_id, extension))
|
||||||
|
if not os.path.isfile(filename):
|
||||||
|
return
|
||||||
|
cat = read_events(filename)
|
||||||
|
return cat[0]
|
||||||
|
|
||||||
|
|
||||||
|
def get_event_id(eventdir):
|
||||||
|
event_id = os.path.split(eventdir)[-1]
|
||||||
|
return event_id
|
||||||
|
|
||||||
|
|
||||||
|
def get_picks(eventdir, extension=''):
|
||||||
|
event_id = get_event_id(eventdir)
|
||||||
|
filename = 'PyLoT_{}{}.xml'
|
||||||
|
filename = filename.format(event_id, extension)
|
||||||
|
fpath = os.path.join(eventdir, filename)
|
||||||
|
fpaths = glob.glob(fpath)
|
||||||
|
if len(fpaths) == 1:
|
||||||
|
cat = read_events(fpaths[0])
|
||||||
|
picks = cat[0].picks
|
||||||
|
return picks
|
||||||
|
elif len(fpaths) == 0:
|
||||||
|
print('get_picks: File not found: {}'.format(fpath))
|
||||||
|
return
|
||||||
|
print(f'WARNING: Ambiguous pick file specification. Found the following pick files {fpaths}\nFilemask: {fpath}')
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def write_json(object, fname):
|
||||||
|
with open(fname, 'w') as outfile:
|
||||||
|
json.dump(object, outfile, sort_keys=True, indent=4)
|
||||||
|
|
||||||
|
|
||||||
|
def get_metadata(eventdir):
|
||||||
|
metadata_path = os.path.join(eventdir, 'resp')
|
||||||
|
metadata = Metadata(inventory=metadata_path, verbosity=0)
|
||||||
|
return metadata
|
||||||
@@ -41,6 +41,7 @@ global #extent# %extent of a
|
|||||||
875.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
875.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||||
False #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
False #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||||
IASP91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
IASP91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
||||||
|
P,Pdiff,S,Sdiff #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||||
0.01 0.1 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
0.01 0.1 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||||
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||||
0.01 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
0.01 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||||
|
|||||||
@@ -41,6 +41,7 @@ global #extent# %extent of a
|
|||||||
875.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
875.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||||
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||||
IASP91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
IASP91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
||||||
|
P,Pdiff,S,Sdiff #taup_phases# %Specify possible phases for TauPy (comma separated). See Obspy TauPy documentation for possible values.
|
||||||
0.01 0.1 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
0.01 0.1 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||||
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||||
0.01 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
0.01 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
import pytest
|
||||||
|
|
||||||
import obspy
|
import obspy
|
||||||
from obspy import UTCDateTime
|
from obspy import UTCDateTime
|
||||||
@@ -105,7 +106,6 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
# show complete diff when difference in results dictionaries are found
|
# show complete diff when difference in results dictionaries are found
|
||||||
self.maxDiff = None
|
self.maxDiff = None
|
||||||
|
|
||||||
# @skip("Works")
|
|
||||||
def test_autopickstation_taupy_disabled_gra1(self):
|
def test_autopickstation_taupy_disabled_gra1(self):
|
||||||
expected = {
|
expected = {
|
||||||
'P': {'picker': 'auto', 'snrdb': 15.405649120980094, 'weight': 0, 'Mo': None, 'marked': [], 'Mw': None,
|
'P': {'picker': 'auto', 'snrdb': 15.405649120980094, 'weight': 0, 'Mo': None, 'marked': [], 'Mw': None,
|
||||||
@@ -121,8 +121,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.gra1, pickparam=self.pickparam_taupy_disabled,
|
result, station = autopickstation(wfstream=self.gra1, pickparam=self.pickparam_taupy_disabled,
|
||||||
metadata=(None, None))
|
metadata=(None, None))
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('GRA1', station)
|
self.assertEqual('GRA1', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_enabled_gra1(self):
|
def test_autopickstation_taupy_enabled_gra1(self):
|
||||||
@@ -140,8 +140,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.gra1, pickparam=self.pickparam_taupy_enabled,
|
result, station = autopickstation(wfstream=self.gra1, pickparam=self.pickparam_taupy_enabled,
|
||||||
metadata=self.metadata, origin=self.origin)
|
metadata=self.metadata, origin=self.origin)
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('GRA1', station)
|
self.assertEqual('GRA1', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_disabled_gra2(self):
|
def test_autopickstation_taupy_disabled_gra2(self):
|
||||||
@@ -157,8 +157,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.gra2, pickparam=self.pickparam_taupy_disabled,
|
result, station = autopickstation(wfstream=self.gra2, pickparam=self.pickparam_taupy_disabled,
|
||||||
metadata=(None, None))
|
metadata=(None, None))
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('GRA2', station)
|
self.assertEqual('GRA2', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_enabled_gra2(self):
|
def test_autopickstation_taupy_enabled_gra2(self):
|
||||||
@@ -175,8 +175,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.gra2, pickparam=self.pickparam_taupy_enabled,
|
result, station = autopickstation(wfstream=self.gra2, pickparam=self.pickparam_taupy_enabled,
|
||||||
metadata=self.metadata, origin=self.origin)
|
metadata=self.metadata, origin=self.origin)
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('GRA2', station)
|
self.assertEqual('GRA2', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_disabled_ech(self):
|
def test_autopickstation_taupy_disabled_ech(self):
|
||||||
@@ -190,8 +190,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
'fm': None, 'spe': None, 'channel': u'LHE'}}
|
'fm': None, 'spe': None, 'channel': u'LHE'}}
|
||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.ech, pickparam=self.pickparam_taupy_disabled)
|
result, station = autopickstation(wfstream=self.ech, pickparam=self.pickparam_taupy_disabled)
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('ECH', station)
|
self.assertEqual('ECH', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_enabled_ech(self):
|
def test_autopickstation_taupy_enabled_ech(self):
|
||||||
@@ -208,8 +208,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.ech, pickparam=self.pickparam_taupy_enabled,
|
result, station = autopickstation(wfstream=self.ech, pickparam=self.pickparam_taupy_enabled,
|
||||||
metadata=self.metadata, origin=self.origin)
|
metadata=self.metadata, origin=self.origin)
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('ECH', station)
|
self.assertEqual('ECH', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_disabled_fiesa(self):
|
def test_autopickstation_taupy_disabled_fiesa(self):
|
||||||
@@ -224,8 +224,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
'fm': None, 'spe': None, 'channel': u'LHE'}}
|
'fm': None, 'spe': None, 'channel': u'LHE'}}
|
||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.fiesa, pickparam=self.pickparam_taupy_disabled)
|
result, station = autopickstation(wfstream=self.fiesa, pickparam=self.pickparam_taupy_disabled)
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('FIESA', station)
|
self.assertEqual('FIESA', station)
|
||||||
|
|
||||||
def test_autopickstation_taupy_enabled_fiesa(self):
|
def test_autopickstation_taupy_enabled_fiesa(self):
|
||||||
@@ -242,8 +242,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.fiesa, pickparam=self.pickparam_taupy_enabled,
|
result, station = autopickstation(wfstream=self.fiesa, pickparam=self.pickparam_taupy_enabled,
|
||||||
metadata=self.metadata, origin=self.origin)
|
metadata=self.metadata, origin=self.origin)
|
||||||
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('FIESA', station)
|
self.assertEqual('FIESA', station)
|
||||||
|
|
||||||
def test_autopickstation_gra1_z_comp_missing(self):
|
def test_autopickstation_gra1_z_comp_missing(self):
|
||||||
@@ -272,7 +272,8 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=wfstream, pickparam=self.pickparam_taupy_disabled,
|
result, station = autopickstation(wfstream=wfstream, pickparam=self.pickparam_taupy_disabled,
|
||||||
metadata=(None, None))
|
metadata=(None, None))
|
||||||
self.assertEqual(expected, result)
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
self.assertEqual('GRA1', station)
|
self.assertEqual('GRA1', station)
|
||||||
|
|
||||||
def test_autopickstation_a106_taupy_enabled(self):
|
def test_autopickstation_a106_taupy_enabled(self):
|
||||||
@@ -290,7 +291,9 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.a106, pickparam=self.pickparam_taupy_enabled,
|
result, station = autopickstation(wfstream=self.a106, pickparam=self.pickparam_taupy_enabled,
|
||||||
metadata=self.metadata, origin=self.origin)
|
metadata=self.metadata, origin=self.origin)
|
||||||
self.assertEqual(expected, result)
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
|
|
||||||
|
|
||||||
def test_autopickstation_station_missing_in_metadata(self):
|
def test_autopickstation_station_missing_in_metadata(self):
|
||||||
"""This station is not in the metadata, but Taupy is enabled. Taupy should exit cleanly and modify the starttime
|
"""This station is not in the metadata, but Taupy is enabled. Taupy should exit cleanly and modify the starttime
|
||||||
@@ -311,8 +314,37 @@ class TestAutopickStation(unittest.TestCase):
|
|||||||
with HidePrints():
|
with HidePrints():
|
||||||
result, station = autopickstation(wfstream=self.a005a, pickparam=self.pickparam_taupy_enabled,
|
result, station = autopickstation(wfstream=self.a005a, pickparam=self.pickparam_taupy_enabled,
|
||||||
metadata=self.metadata, origin=self.origin)
|
metadata=self.metadata, origin=self.origin)
|
||||||
self.assertEqual(expected, result)
|
compare_dicts(expected=expected['P'], result=result['P'], hint='P-')
|
||||||
|
compare_dicts(expected=expected['S'], result=result['S'], hint='S-')
|
||||||
|
|
||||||
|
|
||||||
|
def run_dict_comparison(result, expected):
|
||||||
|
for key, expected_value in expected.items():
|
||||||
|
if isinstance(expected_value, dict):
|
||||||
|
run_dict_comparison(result[key], expected[key])
|
||||||
|
else:
|
||||||
|
res = result[key]
|
||||||
|
if isinstance(res, UTCDateTime) and isinstance(expected_value, UTCDateTime):
|
||||||
|
res = res.timestamp
|
||||||
|
expected_value = expected_value.timestamp
|
||||||
|
assert expected_value == pytest.approx(res), f'{key}: {expected_value} != {res}'
|
||||||
|
|
||||||
|
|
||||||
|
def compare_dicts(result, expected, hint=''):
|
||||||
|
try:
|
||||||
|
run_dict_comparison(result, expected)
|
||||||
|
except AssertionError:
|
||||||
|
raise AssertionError(f'{hint}Dictionaries not equal.'
|
||||||
|
f'\n\n<<Expected>>\n{pretty_print_dict(expected)}'
|
||||||
|
f'\n\n<<Result>>\n{pretty_print_dict(result)}')
|
||||||
|
|
||||||
|
|
||||||
|
def pretty_print_dict(dct):
|
||||||
|
retstr = ''
|
||||||
|
for key, value in sorted(dct.items(), key=lambda x: x[0]):
|
||||||
|
retstr += f"{key} : {value}\n"
|
||||||
|
|
||||||
|
return retstr
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
@@ -0,0 +1,76 @@
|
|||||||
|
import pytest
|
||||||
|
from obspy import read, Trace, UTCDateTime
|
||||||
|
|
||||||
|
from pylot.correlation.pick_correlation_correction import XCorrPickCorrection
|
||||||
|
|
||||||
|
|
||||||
|
class TestXCorrPickCorrection():
|
||||||
|
def setup(self):
|
||||||
|
self.make_test_traces()
|
||||||
|
self.make_test_picks()
|
||||||
|
self.t_before = 2.
|
||||||
|
self.t_after = 2.
|
||||||
|
self.cc_maxlag = 0.5
|
||||||
|
|
||||||
|
def make_test_traces(self):
|
||||||
|
# take first trace of test Stream from obspy
|
||||||
|
tr1 = read()[0]
|
||||||
|
# filter trace
|
||||||
|
tr1.filter('bandpass', freqmin=1, freqmax=20)
|
||||||
|
# make a copy and shift the copy by 0.1 s
|
||||||
|
tr2 = tr1.copy()
|
||||||
|
tr2.stats.starttime += 0.1
|
||||||
|
|
||||||
|
self.trace1 = tr1
|
||||||
|
self.trace2 = tr2
|
||||||
|
|
||||||
|
def make_test_picks(self):
|
||||||
|
# create an artificial reference pick on reference trace (trace1) and another one on the 0.1 s shifted trace
|
||||||
|
self.tpick1 = UTCDateTime('2009-08-24T00:20:07.7')
|
||||||
|
# shift the second pick by 0.2 s, the correction should be around 0.1 s now
|
||||||
|
self.tpick2 = self.tpick1 + 0.2
|
||||||
|
|
||||||
|
def test_slice_trace_okay(self):
|
||||||
|
|
||||||
|
self.setup()
|
||||||
|
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
|
||||||
|
t_before=self.t_before, t_after=self.t_after, cc_maxlag=self.cc_maxlag)
|
||||||
|
|
||||||
|
test_trace = self.trace1
|
||||||
|
pick_time = self.tpick2
|
||||||
|
|
||||||
|
sliced_trace = xcpc.slice_trace(test_trace, pick_time)
|
||||||
|
assert ((sliced_trace.stats.starttime == pick_time - self.t_before - self.cc_maxlag / 2)
|
||||||
|
and (sliced_trace.stats.endtime == pick_time + self.t_after + self.cc_maxlag / 2))
|
||||||
|
|
||||||
|
def test_slice_trace_fails(self):
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
test_trace = self.trace1
|
||||||
|
pick_time = self.tpick1
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
|
||||||
|
t_before=self.t_before + 20, t_after=self.t_after, cc_maxlag=self.cc_maxlag)
|
||||||
|
xcpc.slice_trace(test_trace, pick_time)
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
xcpc = XCorrPickCorrection(UTCDateTime(), Trace(), UTCDateTime(), Trace(),
|
||||||
|
t_before=self.t_before, t_after=self.t_after + 50, cc_maxlag=self.cc_maxlag)
|
||||||
|
xcpc.slice_trace(test_trace, pick_time)
|
||||||
|
|
||||||
|
def test_cross_correlation(self):
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
# create XCorrPickCorrection object
|
||||||
|
xcpc = XCorrPickCorrection(self.tpick1, self.trace1, self.tpick2, self.trace2, t_before=self.t_before,
|
||||||
|
t_after=self.t_after, cc_maxlag=self.cc_maxlag)
|
||||||
|
|
||||||
|
# execute correlation
|
||||||
|
correction, cc_max, uncert, fwfm = xcpc.cross_correlation(False, '', '')
|
||||||
|
|
||||||
|
# define awaited test result
|
||||||
|
test_result = (-0.09983091718314982, 0.9578431835689154, 0.0015285160561610929, 0.03625786256084631)
|
||||||
|
|
||||||
|
# check results
|
||||||
|
assert pytest.approx(test_result, rel=1e-6) == (correction, cc_max, uncert, fwfm)
|
||||||
Reference in New Issue
Block a user