2015-03-30 14:35:21 +02:00
|
|
|
#!/usr/bin/env python
|
2015-10-19 05:32:10 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2015-03-30 14:35:21 +02:00
|
|
|
#
|
|
|
|
"""
|
|
|
|
Created Mar/Apr 2015
|
|
|
|
Collection of helpful functions for manual and automatic picking.
|
|
|
|
|
2017-04-06 13:16:28 +02:00
|
|
|
:author: Ludger Kueperkoch, BESTEC GmbH
|
2015-03-30 14:35:21 +02:00
|
|
|
"""
|
2015-11-11 14:51:14 +01:00
|
|
|
|
2016-05-01 21:10:30 +02:00
|
|
|
import warnings
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
import matplotlib.pyplot as plt
|
2016-05-01 21:10:30 +02:00
|
|
|
import numpy as np
|
2015-06-19 15:28:53 +02:00
|
|
|
from obspy.core import Stream, UTCDateTime
|
2015-07-09 11:37:03 +02:00
|
|
|
|
|
|
|
|
2017-07-14 11:09:18 +02:00
|
|
|
def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None):
|
2015-03-30 14:35:21 +02:00
|
|
|
'''
|
2015-04-02 15:45:38 +02:00
|
|
|
Function to derive earliest and latest possible pick after Diehl & Kissling (2009)
|
2015-03-30 14:35:21 +02:00
|
|
|
as reasonable uncertainties. Latest possible pick is based on noise level,
|
2015-04-02 15:45:38 +02:00
|
|
|
earliest possible pick is half a signal wavelength in front of most likely
|
2015-03-30 14:35:21 +02:00
|
|
|
pick given by PragPicker or manually set by analyst. Most likely pick
|
2015-04-02 15:45:38 +02:00
|
|
|
(initial pick Pick1) must be given.
|
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
:param: X, time series (seismogram)
|
|
|
|
:type: `~obspy.core.stream.Stream`
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
:param: nfac (noise factor), nfac times noise level to calculate latest possible pick
|
|
|
|
:type: int
|
|
|
|
|
|
|
|
:param: TSNR, length of time windows around pick used to determine SNR [s]
|
|
|
|
:type: tuple (T_noise, T_gap, T_signal)
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
:param: Pick1, initial (most likely) onset time, starting point for earllatepicker
|
|
|
|
:type: float
|
|
|
|
|
|
|
|
:param: iplot, if given, results are plotted in figure(iplot)
|
|
|
|
:type: int
|
|
|
|
'''
|
|
|
|
|
|
|
|
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
|
|
|
|
2017-06-01 15:25:43 +02:00
|
|
|
if verbosity == 2:
|
2017-08-03 09:41:54 +02:00
|
|
|
print('earllatepicker:')
|
2017-06-01 15:25:43 +02:00
|
|
|
print('nfac:', nfac)
|
|
|
|
print('Init pick:', Pick1)
|
|
|
|
print('TSNR (T_noise, T_gap, T_signal):', TSNR)
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2015-04-02 15:45:38 +02:00
|
|
|
LPick = None
|
2015-03-30 14:35:21 +02:00
|
|
|
EPick = None
|
|
|
|
PickError = None
|
2017-08-08 16:38:29 +02:00
|
|
|
plt_flag = 0
|
2017-08-09 13:17:54 +02:00
|
|
|
try:
|
|
|
|
iplot = int(iplot)
|
|
|
|
except:
|
|
|
|
if iplot == True or iplot == 'True':
|
|
|
|
iplot = 2
|
|
|
|
else:
|
|
|
|
iplot = 0
|
|
|
|
|
2017-06-01 15:25:43 +02:00
|
|
|
if verbosity:
|
2016-05-20 10:11:40 +02:00
|
|
|
print('earllatepicker: Get earliest and latest possible pick'
|
|
|
|
' relative to most likely pick ...')
|
2015-03-30 14:35:21 +02:00
|
|
|
|
|
|
|
x = X[0].data
|
2015-04-02 15:45:38 +02:00
|
|
|
t = np.arange(0, X[0].stats.npts / X[0].stats.sampling_rate,
|
|
|
|
X[0].stats.delta)
|
2015-03-30 16:22:20 +02:00
|
|
|
inoise = getnoisewin(t, Pick1, TSNR[0], TSNR[1])
|
2015-06-22 11:06:53 +02:00
|
|
|
# get signal window
|
2015-03-30 16:22:20 +02:00
|
|
|
isignal = getsignalwin(t, Pick1, TSNR[2])
|
2015-06-22 12:39:29 +02:00
|
|
|
# remove mean
|
2015-06-24 09:29:59 +02:00
|
|
|
x = x - np.mean(x[inoise])
|
2015-06-22 11:06:53 +02:00
|
|
|
# calculate noise level
|
2015-04-21 08:14:26 +02:00
|
|
|
nlevel = np.sqrt(np.mean(np.square(x[inoise]))) * nfac
|
2017-06-01 15:25:43 +02:00
|
|
|
if verbosity == 2:
|
|
|
|
print('x:', x)
|
|
|
|
print('t:', t)
|
|
|
|
print('x_inoise:', x[inoise])
|
|
|
|
print('x_isignal:', x[isignal])
|
|
|
|
print('nlevel:', nlevel)
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2015-06-22 11:06:53 +02:00
|
|
|
# get time where signal exceeds nlevel
|
2015-04-21 08:14:26 +02:00
|
|
|
ilup, = np.where(x[isignal] > nlevel)
|
|
|
|
ildown, = np.where(x[isignal] < -nlevel)
|
|
|
|
if not ilup.size and not ildown.size:
|
2017-06-01 15:25:43 +02:00
|
|
|
if verbosity:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("earllatepicker: Signal lower than noise level!\n"
|
|
|
|
"Skip this trace!")
|
2015-06-23 12:02:04 +02:00
|
|
|
return LPick, EPick, PickError
|
2015-04-21 08:14:26 +02:00
|
|
|
il = min(np.min(ilup) if ilup.size else float('inf'),
|
|
|
|
np.min(ildown) if ildown.size else float('inf'))
|
2015-03-30 14:35:21 +02:00
|
|
|
LPick = t[isignal][il]
|
|
|
|
|
2015-06-22 11:06:53 +02:00
|
|
|
# get earliest possible pick
|
2015-04-21 08:14:26 +02:00
|
|
|
|
2016-03-30 08:14:58 +02:00
|
|
|
EPick = np.nan;
|
|
|
|
count = 0
|
2015-09-22 13:41:19 +02:00
|
|
|
pis = isignal
|
|
|
|
|
|
|
|
# if EPick stays NaN the signal window size will be doubled
|
2015-09-22 11:58:43 +02:00
|
|
|
while np.isnan(EPick):
|
2015-09-22 13:41:19 +02:00
|
|
|
if count > 0:
|
2017-06-01 15:25:43 +02:00
|
|
|
if verbosity:
|
2015-10-12 12:59:53 +02:00
|
|
|
print("\nearllatepicker: Doubled signal window size %s time(s) "
|
2016-03-30 08:14:58 +02:00
|
|
|
"because of NaN for earliest pick." % count)
|
2015-09-22 13:41:19 +02:00
|
|
|
isigDoubleWinStart = pis[-1] + 1
|
|
|
|
isignalDoubleWin = np.arange(isigDoubleWinStart,
|
2016-03-30 08:14:58 +02:00
|
|
|
isigDoubleWinStart + len(pis))
|
2015-09-22 13:41:19 +02:00
|
|
|
if (isigDoubleWinStart + len(pis)) < X[0].data.size:
|
|
|
|
pis = np.concatenate((pis, isignalDoubleWin))
|
|
|
|
else:
|
2017-06-01 15:25:43 +02:00
|
|
|
if verbosity:
|
2016-05-20 10:11:40 +02:00
|
|
|
print("Could not double signal window. Index out of bounds.")
|
2015-09-22 13:41:19 +02:00
|
|
|
break
|
|
|
|
count += 1
|
2015-09-22 11:58:43 +02:00
|
|
|
# determine all zero crossings in signal window (demeaned)
|
2015-09-22 12:29:42 +02:00
|
|
|
zc = crossings_nonzero_all(x[pis] - x[pis].mean())
|
2015-09-22 11:58:43 +02:00
|
|
|
# calculate mean half period T0 of signal as the average of the
|
2015-11-11 14:51:14 +01:00
|
|
|
T0 = np.mean(np.diff(zc)) * X[0].stats.delta # this is half wave length!
|
2016-03-30 08:14:58 +02:00
|
|
|
EPick = Pick1 - T0 # half wavelength as suggested by Diehl et al.
|
2015-03-30 14:35:21 +02:00
|
|
|
|
2015-06-22 11:06:53 +02:00
|
|
|
# get symmetric pick error as mean from earliest and latest possible pick
|
|
|
|
# by weighting latest possible pick two times earliest possible pick
|
2015-04-02 15:45:38 +02:00
|
|
|
diffti_tl = LPick - Pick1
|
2015-03-30 14:35:21 +02:00
|
|
|
diffti_te = Pick1 - EPick
|
2016-09-21 14:12:58 +02:00
|
|
|
PickError = symmetrize_error(diffti_te, diffti_tl)
|
2015-03-30 14:35:21 +02:00
|
|
|
|
2015-05-29 16:28:50 +02:00
|
|
|
if iplot > 1:
|
2017-08-09 13:17:54 +02:00
|
|
|
if fig == None or fig == 'None':
|
2017-08-03 09:41:54 +02:00
|
|
|
fig = plt.figure() # iplot)
|
2017-08-08 16:38:29 +02:00
|
|
|
plt_flag = 1
|
2017-05-08 15:38:41 +02:00
|
|
|
ax = fig.add_subplot(111)
|
|
|
|
ax.plot(t, x, 'k', label='Data')
|
2017-05-15 11:20:40 +02:00
|
|
|
ax.axvspan(t[inoise[0]], t[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
|
|
|
ax.axvspan(t[isignal[0]], t[isignal[-1]], color='b', alpha=0.2, lw=0, label='Signal Window')
|
2017-05-08 15:38:41 +02:00
|
|
|
ax.plot([t[0], t[int(len(t)) - 1]], [nlevel, nlevel], '--k', label='Noise Level')
|
|
|
|
ax.plot(t[isignal[zc]], np.zeros(len(zc)), '*g',
|
|
|
|
markersize=14, label='Zero Crossings')
|
|
|
|
ax.plot([t[0], t[int(len(t)) - 1]], [-nlevel, -nlevel], '--k')
|
|
|
|
ax.plot([Pick1, Pick1], [max(x), -max(x)], 'b', linewidth=2, label='mpp')
|
|
|
|
ax.plot([LPick, LPick], [max(x) / 2, -max(x) / 2], '--k', label='lpp')
|
|
|
|
ax.plot([EPick, EPick], [max(x) / 2, -max(x) / 2], '--k', label='epp')
|
|
|
|
ax.plot([Pick1 + PickError, Pick1 + PickError],
|
2017-08-03 09:41:54 +02:00
|
|
|
[max(x) / 2, -max(x) / 2], 'r--', label='spe')
|
2017-05-08 15:38:41 +02:00
|
|
|
ax.plot([Pick1 - PickError, Pick1 - PickError],
|
2017-08-03 09:41:54 +02:00
|
|
|
[max(x) / 2, -max(x) / 2], 'r--')
|
2017-05-08 15:38:41 +02:00
|
|
|
ax.set_xlabel('Time [s] since %s' % X[0].stats.starttime)
|
|
|
|
ax.set_yticks([])
|
|
|
|
ax.set_title(
|
2015-04-02 15:45:38 +02:00
|
|
|
'Earliest-/Latest Possible/Most Likely Pick & Symmetric Pick Error, %s' %
|
|
|
|
X[0].stats.station)
|
2017-05-08 15:38:41 +02:00
|
|
|
ax.legend()
|
2017-08-08 16:38:29 +02:00
|
|
|
if plt_flag == 1:
|
|
|
|
fig.show()
|
2017-08-14 14:21:58 +02:00
|
|
|
try: input()
|
|
|
|
except SyntaxError: pass
|
2017-08-08 16:38:29 +02:00
|
|
|
plt.close(fig)
|
2015-03-30 14:35:21 +02:00
|
|
|
|
2017-05-12 13:58:05 +02:00
|
|
|
return EPick, LPick, PickError
|
2015-03-30 14:35:21 +02:00
|
|
|
|
|
|
|
|
2017-07-14 11:09:18 +02:00
|
|
|
def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None):
|
2015-03-30 14:35:21 +02:00
|
|
|
'''
|
|
|
|
Function to derive first motion (polarity) of given phase onset Pick.
|
|
|
|
Calculation is based on zero crossings determined within time window pickwin
|
2015-04-02 15:45:38 +02:00
|
|
|
after given onset time.
|
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
:param: Xraw, unfiltered time series (seismogram)
|
|
|
|
:type: `~obspy.core.stream.Stream`
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
:param: Xfilt, filtered time series (seismogram)
|
|
|
|
:type: `~obspy.core.stream.Stream`
|
|
|
|
|
|
|
|
:param: pickwin, time window after onset Pick within zero crossings are calculated
|
|
|
|
:type: float
|
|
|
|
|
|
|
|
:param: Pick, initial (most likely) onset time, starting point for fmpicker
|
|
|
|
:type: float
|
|
|
|
|
|
|
|
:param: iplot, if given, results are plotted in figure(iplot)
|
|
|
|
:type: int
|
|
|
|
'''
|
|
|
|
|
2017-08-08 16:38:29 +02:00
|
|
|
plt_flag = 0
|
2017-08-09 13:17:54 +02:00
|
|
|
try:
|
|
|
|
iplot = int(iplot)
|
|
|
|
except:
|
|
|
|
if iplot == True or iplot == 'True':
|
|
|
|
iplot = 2
|
|
|
|
else:
|
|
|
|
iplot = 0
|
|
|
|
|
2015-06-19 15:48:04 +02:00
|
|
|
warnings.simplefilter('ignore', np.RankWarning)
|
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
assert isinstance(Xraw, Stream), "%s is not a stream object" % str(Xraw)
|
|
|
|
assert isinstance(Xfilt, Stream), "%s is not a stream object" % str(Xfilt)
|
|
|
|
|
|
|
|
FM = None
|
|
|
|
if Pick is not None:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("fmpicker: Get first motion (polarity) of onset using unfiltered seismogram...")
|
2015-04-02 15:45:38 +02:00
|
|
|
|
|
|
|
xraw = Xraw[0].data
|
|
|
|
xfilt = Xfilt[0].data
|
|
|
|
t = np.arange(0, Xraw[0].stats.npts / Xraw[0].stats.sampling_rate,
|
|
|
|
Xraw[0].stats.delta)
|
|
|
|
# get pick window
|
|
|
|
ipick = np.where(
|
|
|
|
(t <= min([Pick + pickwin, len(Xraw[0])])) & (t >= Pick))
|
2015-06-22 11:06:53 +02:00
|
|
|
# remove mean
|
2015-04-02 15:45:38 +02:00
|
|
|
xraw[ipick] = xraw[ipick] - np.mean(xraw[ipick])
|
|
|
|
xfilt[ipick] = xfilt[ipick] - np.mean(xfilt[ipick])
|
|
|
|
|
2015-06-25 11:11:19 +02:00
|
|
|
# get zero crossings after most likely pick
|
2015-06-22 11:06:53 +02:00
|
|
|
# initial onset is assumed to be the first zero crossing
|
|
|
|
# first from unfiltered trace
|
2015-04-02 15:45:38 +02:00
|
|
|
zc1 = []
|
|
|
|
zc1.append(Pick)
|
|
|
|
index1 = []
|
|
|
|
i = 0
|
|
|
|
for j in range(ipick[0][1], ipick[0][len(t[ipick]) - 1]):
|
2015-06-19 15:28:53 +02:00
|
|
|
i = i + 1
|
2015-10-19 11:25:15 +02:00
|
|
|
if xraw[j - 1] <= 0 <= xraw[j]:
|
2015-04-02 15:45:38 +02:00
|
|
|
zc1.append(t[ipick][i])
|
|
|
|
index1.append(i)
|
2015-10-19 11:25:15 +02:00
|
|
|
elif xraw[j - 1] > 0 >= xraw[j]:
|
2015-04-02 15:45:38 +02:00
|
|
|
zc1.append(t[ipick][i])
|
|
|
|
index1.append(i)
|
|
|
|
if len(zc1) == 3:
|
|
|
|
break
|
|
|
|
|
2017-08-11 16:07:23 +02:00
|
|
|
if len(zc1) < 3:
|
|
|
|
print('fmpicker: Could not determine zero crossings!')
|
|
|
|
return
|
|
|
|
|
2015-06-22 11:06:53 +02:00
|
|
|
# if time difference betweeen 1st and 2cnd zero crossing
|
|
|
|
# is too short, get time difference between 1st and 3rd
|
|
|
|
# to derive maximum
|
2015-04-02 15:45:38 +02:00
|
|
|
if zc1[1] - zc1[0] <= Xraw[0].stats.delta:
|
|
|
|
li1 = index1[1]
|
|
|
|
else:
|
|
|
|
li1 = index1[0]
|
2017-08-10 12:44:28 +02:00
|
|
|
if np.size(xraw[ipick[0][1]:ipick[0][li1]]) == 0 or len(index1) <= 1:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("fmpicker: Onset on unfiltered trace too emergent for first motion determination!")
|
2015-04-02 15:45:38 +02:00
|
|
|
P1 = None
|
|
|
|
else:
|
|
|
|
imax1 = np.argmax(abs(xraw[ipick[0][1]:ipick[0][li1]]))
|
2015-06-23 16:23:18 +02:00
|
|
|
if imax1 == 0:
|
2015-09-22 12:29:42 +02:00
|
|
|
imax1 = np.argmax(abs(xraw[ipick[0][1]:ipick[0][index1[1]]]))
|
2015-06-23 16:23:18 +02:00
|
|
|
if imax1 == 0:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("fmpicker: Zero crossings too close!")
|
|
|
|
print("Skip first motion determination!")
|
2015-09-22 12:29:42 +02:00
|
|
|
return FM
|
2015-06-23 16:23:18 +02:00
|
|
|
|
2015-04-02 15:45:38 +02:00
|
|
|
islope1 = np.where((t >= Pick) & (t <= Pick + t[imax1]))
|
2015-06-22 11:06:53 +02:00
|
|
|
# calculate slope as polynomal fit of order 1
|
2015-04-02 15:45:38 +02:00
|
|
|
xslope1 = np.arange(0, len(xraw[islope1]), 1)
|
|
|
|
P1 = np.polyfit(xslope1, xraw[islope1], 1)
|
|
|
|
datafit1 = np.polyval(P1, xslope1)
|
|
|
|
|
2015-06-22 11:06:53 +02:00
|
|
|
# now using filterd trace
|
2015-06-25 11:11:19 +02:00
|
|
|
# next zero crossings after most likely pick
|
2015-04-02 15:45:38 +02:00
|
|
|
zc2 = []
|
|
|
|
zc2.append(Pick)
|
|
|
|
index2 = []
|
|
|
|
i = 0
|
|
|
|
for j in range(ipick[0][1], ipick[0][len(t[ipick]) - 1]):
|
2015-06-19 15:28:53 +02:00
|
|
|
i = i + 1
|
2015-10-19 11:25:15 +02:00
|
|
|
if xfilt[j - 1] <= 0 <= xfilt[j]:
|
2015-04-02 15:45:38 +02:00
|
|
|
zc2.append(t[ipick][i])
|
|
|
|
index2.append(i)
|
2015-10-19 11:25:15 +02:00
|
|
|
elif xfilt[j - 1] > 0 >= xfilt[j]:
|
2015-04-02 15:45:38 +02:00
|
|
|
zc2.append(t[ipick][i])
|
|
|
|
index2.append(i)
|
|
|
|
if len(zc2) == 3:
|
|
|
|
break
|
|
|
|
|
2015-06-22 11:06:53 +02:00
|
|
|
# if time difference betweeen 1st and 2cnd zero crossing
|
|
|
|
# is too short, get time difference between 1st and 3rd
|
|
|
|
# to derive maximum
|
2015-04-02 15:45:38 +02:00
|
|
|
if zc2[1] - zc2[0] <= Xfilt[0].stats.delta:
|
|
|
|
li2 = index2[1]
|
|
|
|
else:
|
|
|
|
li2 = index2[0]
|
2017-08-10 12:44:28 +02:00
|
|
|
if np.size(xfilt[ipick[0][1]:ipick[0][li2]]) == 0 or len(index2) <= 1:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("fmpicker: Onset on filtered trace too emergent for first motion determination!")
|
2015-04-02 15:45:38 +02:00
|
|
|
P2 = None
|
|
|
|
else:
|
|
|
|
imax2 = np.argmax(abs(xfilt[ipick[0][1]:ipick[0][li2]]))
|
2015-06-23 16:23:18 +02:00
|
|
|
if imax2 == 0:
|
2015-09-22 12:29:42 +02:00
|
|
|
imax2 = np.argmax(abs(xfilt[ipick[0][1]:ipick[0][index2[1]]]))
|
2015-07-10 15:23:48 +02:00
|
|
|
if imax2 == 0:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("fmpicker: Zero crossings too close!")
|
|
|
|
print("Skip first motion determination!")
|
2015-09-22 12:29:42 +02:00
|
|
|
return FM
|
2015-06-23 16:23:18 +02:00
|
|
|
|
2015-04-02 15:45:38 +02:00
|
|
|
islope2 = np.where((t >= Pick) & (t <= Pick + t[imax2]))
|
2015-06-22 11:06:53 +02:00
|
|
|
# calculate slope as polynomal fit of order 1
|
2015-04-02 15:45:38 +02:00
|
|
|
xslope2 = np.arange(0, len(xfilt[islope2]), 1)
|
|
|
|
P2 = np.polyfit(xslope2, xfilt[islope2], 1)
|
|
|
|
datafit2 = np.polyval(P2, xslope2)
|
|
|
|
|
2015-06-22 11:06:53 +02:00
|
|
|
# compare results
|
2015-04-02 15:45:38 +02:00
|
|
|
if P1 is not None and P2 is not None:
|
|
|
|
if P1[0] < 0 and P2[0] < 0:
|
|
|
|
FM = 'D'
|
2015-10-19 11:25:15 +02:00
|
|
|
elif P1[0] >= 0 > P2[0]:
|
2015-04-02 15:45:38 +02:00
|
|
|
FM = '-'
|
2015-10-19 11:25:15 +02:00
|
|
|
elif P1[0] < 0 <= P2[0]:
|
2015-04-02 15:45:38 +02:00
|
|
|
FM = '-'
|
|
|
|
elif P1[0] > 0 and P2[0] > 0:
|
|
|
|
FM = 'U'
|
2015-10-19 11:25:15 +02:00
|
|
|
elif P1[0] <= 0 < P2[0]:
|
2015-04-02 15:45:38 +02:00
|
|
|
FM = '+'
|
2015-10-19 11:25:15 +02:00
|
|
|
elif P1[0] > 0 >= P2[0]:
|
2015-04-02 15:45:38 +02:00
|
|
|
FM = '+'
|
2015-07-09 11:37:03 +02:00
|
|
|
|
2017-08-03 09:41:54 +02:00
|
|
|
print("fmpicker: Found polarity %s" % FM)
|
2015-03-30 14:35:21 +02:00
|
|
|
|
2015-05-29 16:28:50 +02:00
|
|
|
if iplot > 1:
|
2017-08-09 13:17:54 +02:00
|
|
|
if fig == None or fig == 'None':
|
2017-08-03 09:41:54 +02:00
|
|
|
fig = plt.figure() # iplot)
|
2017-08-08 16:38:29 +02:00
|
|
|
plt_flag = 1
|
2017-05-08 15:38:41 +02:00
|
|
|
ax1 = fig.add_subplot(211)
|
|
|
|
ax1.plot(t, xraw, 'k')
|
|
|
|
ax1.plot([Pick, Pick], [max(xraw), -max(xraw)], 'b', linewidth=2, label='Pick')
|
2015-04-02 15:45:38 +02:00
|
|
|
if P1 is not None:
|
2017-05-08 15:38:41 +02:00
|
|
|
ax1.plot(t[islope1], xraw[islope1], label='Slope Window')
|
|
|
|
ax1.plot(zc1, np.zeros(len(zc1)), '*g', markersize=14, label='Zero Crossings')
|
|
|
|
ax1.plot(t[islope1], datafit1, '--g', linewidth=2)
|
|
|
|
ax1.legend()
|
|
|
|
ax1.text(Pick + 0.02, max(xraw) / 2, '%s' % FM, fontsize=14)
|
|
|
|
ax1.set_yticks([])
|
|
|
|
ax1.set_title('First-Motion Determination, %s, Unfiltered Data' % Xraw[
|
2015-04-02 15:45:38 +02:00
|
|
|
0].stats.station)
|
|
|
|
|
2017-08-03 09:41:54 +02:00
|
|
|
ax2 = fig.add_subplot(2, 1, 2, sharex=ax1)
|
2017-05-08 15:38:41 +02:00
|
|
|
ax2.set_title('First-Motion Determination, Filtered Data')
|
|
|
|
ax2.plot(t, xfilt, 'k')
|
|
|
|
ax2.plot([Pick, Pick], [max(xfilt), -max(xfilt)], 'b',
|
2017-08-03 09:41:54 +02:00
|
|
|
linewidth=2)
|
2015-04-02 15:45:38 +02:00
|
|
|
if P2 is not None:
|
2017-05-08 15:38:41 +02:00
|
|
|
ax2.plot(t[islope2], xfilt[islope2])
|
|
|
|
ax2.plot(zc2, np.zeros(len(zc2)), '*g', markersize=14)
|
|
|
|
ax2.plot(t[islope2], datafit2, '--g', linewidth=2)
|
|
|
|
ax2.text(Pick + 0.02, max(xraw) / 2, '%s' % FM, fontsize=14)
|
|
|
|
ax2.set_xlabel('Time [s] since %s' % Xraw[0].stats.starttime)
|
|
|
|
ax2.set_yticks([])
|
2017-08-08 16:38:29 +02:00
|
|
|
if plt_flag == 1:
|
|
|
|
fig.show()
|
2017-08-14 14:21:58 +02:00
|
|
|
try: input()
|
|
|
|
except SyntaxError: pass
|
2017-08-08 16:38:29 +02:00
|
|
|
plt.close(fig)
|
2017-05-08 15:38:41 +02:00
|
|
|
|
2017-05-12 13:58:05 +02:00
|
|
|
return FM
|
2015-03-30 14:35:21 +02:00
|
|
|
|
2015-06-22 11:06:53 +02:00
|
|
|
|
2015-04-21 08:14:26 +02:00
|
|
|
def crossings_nonzero_all(data):
|
|
|
|
pos = data > 0
|
|
|
|
npos = ~pos
|
|
|
|
return ((pos[:-1] & npos[1:]) | (npos[:-1] & pos[1:])).nonzero()[0]
|
2015-03-30 14:35:21 +02:00
|
|
|
|
2015-06-22 11:06:53 +02:00
|
|
|
|
2016-09-21 14:12:58 +02:00
|
|
|
def symmetrize_error(dte, dtl):
|
|
|
|
"""
|
|
|
|
takes earliest and latest possible pick and returns the symmetrized pick
|
|
|
|
uncertainty value
|
|
|
|
:param dte: relative lower uncertainty
|
|
|
|
:param dtl: relative upper uncertainty
|
|
|
|
:return: symmetrized error
|
|
|
|
"""
|
|
|
|
return (dte + 2 * dtl) / 3
|
|
|
|
|
|
|
|
|
2016-03-02 11:06:31 +01:00
|
|
|
def getSNR(X, TSNR, t1, tracenum=0):
|
2015-07-01 15:31:50 +02:00
|
|
|
'''
|
|
|
|
Function to calculate SNR of certain part of seismogram relative to
|
|
|
|
given time (onset) out of given noise and signal windows. A safety gap
|
|
|
|
between noise and signal part can be set. Returns SNR and SNR [dB] and
|
|
|
|
noiselevel.
|
2015-03-30 14:35:21 +02:00
|
|
|
|
2015-07-01 15:31:50 +02:00
|
|
|
:param: X, time series (seismogram)
|
2015-03-30 14:35:21 +02:00
|
|
|
:type: `~obspy.core.stream.Stream`
|
2015-06-29 16:16:59 +02:00
|
|
|
|
2015-07-01 15:31:50 +02:00
|
|
|
:param: TSNR, length of time windows [s] around t1 (onset) used to determine SNR
|
|
|
|
:type: tuple (T_noise, T_gap, T_signal)
|
2015-06-29 16:16:59 +02:00
|
|
|
|
2015-07-01 15:31:50 +02:00
|
|
|
:param: t1, initial time (onset) from which noise and signal windows are calculated
|
|
|
|
:type: float
|
|
|
|
'''
|
2015-06-29 16:16:59 +02:00
|
|
|
|
2015-07-01 15:31:50 +02:00
|
|
|
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
2015-06-29 16:16:59 +02:00
|
|
|
|
2017-05-30 16:09:48 +02:00
|
|
|
SNR = None
|
2017-07-14 14:16:12 +02:00
|
|
|
SNRdB = None
|
2017-05-30 16:09:48 +02:00
|
|
|
noiselevel = None
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2016-03-02 11:06:31 +01:00
|
|
|
x = X[tracenum].data
|
|
|
|
npts = X[tracenum].stats.npts
|
|
|
|
sr = X[tracenum].stats.sampling_rate
|
|
|
|
dt = X[tracenum].stats.delta
|
|
|
|
t = np.arange(0, npts / sr, dt)
|
2015-06-29 16:16:59 +02:00
|
|
|
|
2015-07-01 15:31:50 +02:00
|
|
|
# get noise window
|
|
|
|
inoise = getnoisewin(t, t1, TSNR[0], TSNR[1])
|
2015-06-29 16:16:59 +02:00
|
|
|
|
2015-07-01 15:31:50 +02:00
|
|
|
# get signal window
|
|
|
|
isignal = getsignalwin(t, t1, TSNR[2])
|
|
|
|
if np.size(inoise) < 1:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("getSNR: Empty array inoise, check noise window!")
|
2017-05-30 16:09:48 +02:00
|
|
|
return SNR, SNRdB, noiselevel
|
2015-06-29 16:16:59 +02:00
|
|
|
|
2015-07-01 15:31:50 +02:00
|
|
|
# demean over entire waveform
|
|
|
|
x = x - np.mean(x[inoise])
|
2015-06-23 08:24:21 +02:00
|
|
|
|
2015-07-01 15:31:50 +02:00
|
|
|
# calculate ratios
|
2017-05-29 12:02:56 +02:00
|
|
|
noiselevel = np.sqrt(np.mean(np.square(x[inoise])))
|
2017-08-03 09:41:54 +02:00
|
|
|
# signallevel = np.sqrt(np.mean(np.square(x[isignal])))
|
2016-03-03 10:37:35 +01:00
|
|
|
|
2017-05-30 16:09:48 +02:00
|
|
|
if np.size(isignal) < 1:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("getSNR: Empty array isignal, check signal window!")
|
2017-05-30 16:09:48 +02:00
|
|
|
return SNR, SNRdB, noiselevel
|
2017-08-03 09:41:54 +02:00
|
|
|
|
|
|
|
# noiselevel = np.abs(x[inoise]).max()
|
2016-03-03 10:37:35 +01:00
|
|
|
signallevel = np.abs(x[isignal]).max()
|
|
|
|
|
2015-07-01 15:31:50 +02:00
|
|
|
SNR = signallevel / noiselevel
|
2015-04-13 09:42:17 +02:00
|
|
|
SNRdB = 10 * np.log10(SNR)
|
2015-03-30 14:35:21 +02:00
|
|
|
|
2015-04-02 15:45:38 +02:00
|
|
|
return SNR, SNRdB, noiselevel
|
2015-03-30 14:35:21 +02:00
|
|
|
|
2015-03-30 16:22:20 +02:00
|
|
|
|
|
|
|
def getnoisewin(t, t1, tnoise, tgap):
|
|
|
|
'''
|
|
|
|
Function to extract indeces of data out of time series for noise calculation.
|
|
|
|
Returns an array of indeces.
|
|
|
|
|
|
|
|
:param: t, array of time stamps
|
|
|
|
:type: numpy array
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 16:22:20 +02:00
|
|
|
:param: t1, time from which relativ to it noise window is extracted
|
|
|
|
:type: float
|
|
|
|
|
|
|
|
:param: tnoise, length of time window [s] for noise part extraction
|
|
|
|
:type: float
|
|
|
|
|
|
|
|
:param: tgap, safety gap between t1 (onset) and noise window to
|
|
|
|
ensure, that noise window contains no signal
|
2015-04-02 15:45:38 +02:00
|
|
|
:type: float
|
2015-03-30 16:22:20 +02:00
|
|
|
'''
|
2015-04-02 15:45:38 +02:00
|
|
|
|
|
|
|
# get noise window
|
2015-06-23 08:48:14 +02:00
|
|
|
inoise, = np.where((t <= max([t1 - tgap, 0])) \
|
2016-03-30 08:14:58 +02:00
|
|
|
& (t >= max([t1 - tnoise - tgap, 0])))
|
2015-03-30 16:22:20 +02:00
|
|
|
if np.size(inoise) < 1:
|
2017-08-03 09:41:54 +02:00
|
|
|
inoise, = np.where((t >= t[0]) & (t <= t1))
|
2017-05-05 14:07:35 +02:00
|
|
|
if np.size(inoise) < 1:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("getnoisewin: Empty array inoise, check noise window!")
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 16:22:20 +02:00
|
|
|
return inoise
|
|
|
|
|
|
|
|
|
|
|
|
def getsignalwin(t, t1, tsignal):
|
|
|
|
'''
|
|
|
|
Function to extract data out of time series for signal level calculation.
|
|
|
|
Returns an array of indeces.
|
|
|
|
|
|
|
|
:param: t, array of time stamps
|
|
|
|
:type: numpy array
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 16:22:20 +02:00
|
|
|
:param: t1, time from which relativ to it signal window is extracted
|
|
|
|
:type: float
|
|
|
|
|
|
|
|
:param: tsignal, length of time window [s] for signal level calculation
|
|
|
|
:type: float
|
|
|
|
'''
|
2015-04-02 15:45:38 +02:00
|
|
|
|
|
|
|
# get signal window
|
2017-08-15 17:01:27 +02:00
|
|
|
isignal, = np.where((t <= min([t1 + tsignal, t[-1]])) \
|
2016-03-30 08:14:58 +02:00
|
|
|
& (t >= t1))
|
2015-03-30 16:22:20 +02:00
|
|
|
if np.size(isignal) < 1:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("getsignalwin: Empty array isignal, check signal window!")
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 16:22:20 +02:00
|
|
|
return isignal
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-06-22 11:06:53 +02:00
|
|
|
|
2017-05-31 10:28:31 +02:00
|
|
|
def getResolutionWindow(snr, extent):
|
2015-06-25 10:21:52 +02:00
|
|
|
"""
|
|
|
|
Number -> Float
|
|
|
|
produce the half of the time resolution window width from given SNR
|
|
|
|
value
|
|
|
|
SNR >= 3 -> 2 sec HRW
|
|
|
|
3 > SNR >= 2 -> 5 sec MRW
|
|
|
|
2 > SNR >= 1.5 -> 10 sec LRW
|
|
|
|
1.5 > SNR -> 15 sec VLRW
|
|
|
|
see also Diehl et al. 2009
|
|
|
|
|
2017-05-31 10:28:31 +02:00
|
|
|
:parameter: extent, can be 'local', 'regional', 'global'
|
|
|
|
|
2015-06-25 10:21:52 +02:00
|
|
|
>>> getResolutionWindow(0.5)
|
|
|
|
7.5
|
|
|
|
>>> getResolutionWindow(1.8)
|
|
|
|
5.0
|
|
|
|
>>> getResolutionWindow(2.3)
|
|
|
|
2.5
|
|
|
|
>>> getResolutionWindow(4)
|
|
|
|
1.0
|
|
|
|
>>> getResolutionWindow(2)
|
|
|
|
2.5
|
|
|
|
"""
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2017-05-30 16:09:48 +02:00
|
|
|
res_wins = {
|
|
|
|
'regional': {'HRW': 2., 'MRW': 5., 'LRW': 10., 'VLRW': 15.},
|
|
|
|
'local': {'HRW': 2., 'MRW': 5., 'LRW': 10., 'VLRW': 15.},
|
|
|
|
'global': {'HRW': 40., 'MRW': 100., 'LRW': 200., 'VLRW': 300.}
|
|
|
|
}
|
2015-06-25 10:21:52 +02:00
|
|
|
|
2017-07-14 15:26:39 +02:00
|
|
|
if snr:
|
|
|
|
if snr < 1.5:
|
|
|
|
time_resolution = res_wins[extent]['VLRW']
|
|
|
|
elif snr < 2.:
|
|
|
|
time_resolution = res_wins[extent]['LRW']
|
|
|
|
elif snr < 3.:
|
|
|
|
time_resolution = res_wins[extent]['MRW']
|
2017-08-03 09:41:54 +02:00
|
|
|
elif snr > 3.:
|
2017-07-14 15:26:39 +02:00
|
|
|
time_resolution = res_wins[extent]['HRW']
|
2015-06-25 10:21:52 +02:00
|
|
|
else:
|
2017-05-31 10:28:31 +02:00
|
|
|
time_resolution = res_wins[extent]['VLRW']
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2016-03-30 08:14:58 +02:00
|
|
|
return time_resolution / 2
|
2015-06-25 10:21:52 +02:00
|
|
|
|
|
|
|
|
2016-05-03 13:54:59 +02:00
|
|
|
def select_for_phase(st, phase):
|
|
|
|
'''
|
|
|
|
takes a STream object and a phase name and returns that particular component
|
|
|
|
which presumably shows the chosen PHASE best
|
|
|
|
|
|
|
|
:param st: stream object containing one or more component[s]
|
|
|
|
:type st: `~obspy.core.stream.Stream`
|
|
|
|
:param phase: label of the phase for which the stream selection is carried
|
|
|
|
out; 'P' or 'S'
|
|
|
|
:type phase: str
|
|
|
|
:return:
|
|
|
|
'''
|
2017-05-29 12:02:56 +02:00
|
|
|
from pylot.core.util.defaults import SetChannelComponents
|
2016-05-03 13:54:59 +02:00
|
|
|
|
|
|
|
sel_st = Stream()
|
2017-05-29 12:02:56 +02:00
|
|
|
compclass = SetChannelComponents()
|
2016-05-03 15:09:51 +02:00
|
|
|
if phase.upper() == 'P':
|
2016-05-03 13:54:59 +02:00
|
|
|
comp = 'Z'
|
2017-05-29 12:02:56 +02:00
|
|
|
alter_comp = compclass.getCompPosition(comp)
|
|
|
|
alter_comp = str(alter_comp[0])
|
2016-05-03 13:54:59 +02:00
|
|
|
sel_st += st.select(component=comp)
|
|
|
|
sel_st += st.select(component=alter_comp)
|
2016-05-03 15:09:51 +02:00
|
|
|
elif phase.upper() == 'S':
|
2016-05-03 13:54:59 +02:00
|
|
|
comps = 'NE'
|
|
|
|
for comp in comps:
|
2017-05-29 12:02:56 +02:00
|
|
|
alter_comp = compclass.getCompPosition(comp)
|
|
|
|
alter_comp = str(alter_comp[0])
|
2016-05-03 13:54:59 +02:00
|
|
|
sel_st += st.select(component=comp)
|
|
|
|
sel_st += st.select(component=alter_comp)
|
|
|
|
else:
|
|
|
|
raise TypeError('Unknown phase label: {0}'.format(phase))
|
|
|
|
return sel_st
|
|
|
|
|
|
|
|
|
2015-06-19 15:28:53 +02:00
|
|
|
def wadaticheck(pickdic, dttolerance, iplot):
|
|
|
|
'''
|
|
|
|
Function to calculate Wadati-diagram from given P and S onsets in order
|
2015-06-22 11:07:22 +02:00
|
|
|
to detect S pick outliers. If a certain S-P time deviates by dttolerance
|
|
|
|
from regression of S-P time the S pick is marked and down graded.
|
2015-06-19 15:28:53 +02:00
|
|
|
|
|
|
|
: param: pickdic, dictionary containing picks and quality parameters
|
|
|
|
: type: dictionary
|
2015-06-23 08:22:59 +02:00
|
|
|
|
|
|
|
: param: dttolerance, maximum adjusted deviation of S-P time from
|
2015-06-19 15:28:53 +02:00
|
|
|
S-P time regression
|
|
|
|
: type: float
|
|
|
|
|
|
|
|
: param: iplot, if iplot > 1, Wadati diagram is shown
|
|
|
|
: type: int
|
|
|
|
'''
|
|
|
|
|
|
|
|
checkedonsets = pickdic
|
|
|
|
|
|
|
|
# search for good quality picks and calculate S-P time
|
|
|
|
Ppicks = []
|
|
|
|
Spicks = []
|
|
|
|
SPtimes = []
|
|
|
|
for key in pickdic:
|
|
|
|
if pickdic[key]['P']['weight'] < 4 and pickdic[key]['S']['weight'] < 4:
|
2016-03-30 08:14:58 +02:00
|
|
|
# calculate S-P time
|
|
|
|
spt = pickdic[key]['S']['mpp'] - pickdic[key]['P']['mpp']
|
|
|
|
# add S-P time to dictionary
|
|
|
|
pickdic[key]['SPt'] = spt
|
|
|
|
# add P onsets and corresponding S-P times to list
|
|
|
|
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
|
|
|
|
UTCSpick = UTCDateTime(pickdic[key]['S']['mpp'])
|
|
|
|
Ppicks.append(UTCPpick.timestamp)
|
|
|
|
Spicks.append(UTCSpick.timestamp)
|
|
|
|
SPtimes.append(spt)
|
2015-06-19 15:28:53 +02:00
|
|
|
|
|
|
|
if len(SPtimes) >= 3:
|
2015-10-19 05:32:10 +02:00
|
|
|
# calculate slope
|
|
|
|
p1 = np.polyfit(Ppicks, SPtimes, 1)
|
|
|
|
wdfit = np.polyval(p1, Ppicks)
|
2015-06-19 15:28:53 +02:00
|
|
|
wfitflag = 0
|
2015-06-23 08:22:59 +02:00
|
|
|
|
2015-06-22 11:07:22 +02:00
|
|
|
# calculate vp/vs ratio before check
|
2015-06-22 09:32:33 +02:00
|
|
|
vpvsr = p1[0] + 1
|
2017-08-03 09:41:54 +02:00
|
|
|
print("###############################################")
|
|
|
|
print("wadaticheck: Average Vp/Vs ratio before check: %f" % vpvsr)
|
2015-06-23 08:22:59 +02:00
|
|
|
|
2015-06-19 15:28:53 +02:00
|
|
|
checkedPpicks = []
|
|
|
|
checkedSpicks = []
|
|
|
|
checkedSPtimes = []
|
|
|
|
# calculate deviations from Wadati regression
|
2015-06-24 16:42:50 +02:00
|
|
|
ii = 0
|
2015-07-09 09:17:10 +02:00
|
|
|
ibad = 0
|
2015-06-19 15:28:53 +02:00
|
|
|
for key in pickdic:
|
|
|
|
if pickdic[key].has_key('SPt'):
|
2015-06-22 11:06:53 +02:00
|
|
|
wddiff = abs(pickdic[key]['SPt'] - wdfit[ii])
|
2015-06-25 10:21:52 +02:00
|
|
|
ii += 1
|
2015-06-19 15:28:53 +02:00
|
|
|
# check, if deviation is larger than adjusted
|
2015-06-26 08:48:24 +02:00
|
|
|
if wddiff > dttolerance:
|
2015-06-23 08:22:59 +02:00
|
|
|
# mark onset and downgrade S-weight to 9
|
2015-06-19 15:28:53 +02:00
|
|
|
# (not used anymore)
|
|
|
|
marker = 'badWadatiCheck'
|
2015-06-19 15:48:04 +02:00
|
|
|
pickdic[key]['S']['weight'] = 9
|
2015-07-09 09:17:10 +02:00
|
|
|
ibad += 1
|
2015-06-19 15:28:53 +02:00
|
|
|
else:
|
|
|
|
marker = 'goodWadatiCheck'
|
2016-03-30 08:14:58 +02:00
|
|
|
checkedPpick = UTCDateTime(pickdic[key]['P']['mpp'])
|
2015-06-22 12:39:29 +02:00
|
|
|
checkedPpicks.append(checkedPpick.timestamp)
|
|
|
|
checkedSpick = UTCDateTime(pickdic[key]['S']['mpp'])
|
|
|
|
checkedSpicks.append(checkedSpick.timestamp)
|
2015-07-01 15:31:50 +02:00
|
|
|
checkedSPtime = pickdic[key]['S']['mpp'] - pickdic[key]['P']['mpp']
|
2015-06-19 15:28:53 +02:00
|
|
|
checkedSPtimes.append(checkedSPtime)
|
|
|
|
|
|
|
|
pickdic[key]['S']['marked'] = marker
|
|
|
|
|
2015-06-23 16:23:18 +02:00
|
|
|
if len(checkedPpicks) >= 3:
|
2015-10-19 05:32:10 +02:00
|
|
|
# calculate new slope
|
|
|
|
p2 = np.polyfit(checkedPpicks, checkedSPtimes, 1)
|
|
|
|
wdfit2 = np.polyval(p2, checkedPpicks)
|
|
|
|
|
|
|
|
# calculate vp/vs ratio after check
|
|
|
|
cvpvsr = p2[0] + 1
|
2017-08-03 09:41:54 +02:00
|
|
|
print("wadaticheck: Average Vp/Vs ratio after check: %f" % cvpvsr)
|
|
|
|
print("wadatacheck: Skipped %d S pick(s)" % ibad)
|
2015-06-23 16:23:18 +02:00
|
|
|
else:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("###############################################")
|
|
|
|
print("wadatacheck: Not enough checked S-P times available!")
|
|
|
|
print("Skip Wadati check!")
|
2015-06-22 09:32:33 +02:00
|
|
|
|
2015-06-19 15:28:53 +02:00
|
|
|
checkedonsets = pickdic
|
2015-06-23 08:22:59 +02:00
|
|
|
|
2015-06-19 15:28:53 +02:00
|
|
|
else:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("wadaticheck: Not enough S-P times available for reliable regression!")
|
|
|
|
print("Skip wadati check!")
|
2015-06-19 15:28:53 +02:00
|
|
|
wfitflag = 1
|
2015-07-09 11:37:03 +02:00
|
|
|
|
2015-06-19 15:28:53 +02:00
|
|
|
# plot results
|
2017-05-05 14:07:35 +02:00
|
|
|
if iplot > 0:
|
2017-08-03 09:41:54 +02:00
|
|
|
plt.figure() # iplot)
|
2015-10-19 05:32:10 +02:00
|
|
|
f1, = plt.plot(Ppicks, SPtimes, 'ro')
|
2015-06-19 15:28:53 +02:00
|
|
|
if wfitflag == 0:
|
2015-10-19 05:32:10 +02:00
|
|
|
f2, = plt.plot(Ppicks, wdfit, 'k')
|
|
|
|
f3, = plt.plot(checkedPpicks, checkedSPtimes, 'ko')
|
|
|
|
f4, = plt.plot(checkedPpicks, wdfit2, 'g')
|
|
|
|
plt.title('Wadati-Diagram, %d S-P Times, Vp/Vs(raw)=%5.2f,' \
|
|
|
|
'Vp/Vs(checked)=%5.2f' % (len(SPtimes), vpvsr, cvpvsr))
|
2015-10-19 11:25:15 +02:00
|
|
|
plt.legend([f1, f2, f3, f4], ['Skipped S-Picks', 'Wadati 1',
|
|
|
|
'Reliable S-Picks', 'Wadati 2'], loc='best')
|
2015-06-22 15:07:54 +02:00
|
|
|
else:
|
2015-10-19 05:32:10 +02:00
|
|
|
plt.title('Wadati-Diagram, %d S-P Times' % len(SPtimes))
|
2015-06-22 15:07:54 +02:00
|
|
|
|
2015-06-19 15:28:53 +02:00
|
|
|
plt.ylabel('S-P Times [s]')
|
|
|
|
plt.xlabel('P Times [s]')
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-06-19 15:28:53 +02:00
|
|
|
return checkedonsets
|
2015-06-24 14:15:54 +02:00
|
|
|
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2017-07-17 18:02:29 +02:00
|
|
|
def RMS(X):
|
|
|
|
'''
|
|
|
|
Function returns root mean square of a given array X
|
|
|
|
'''
|
2017-08-03 09:41:54 +02:00
|
|
|
return np.sqrt(np.sum(np.power(X, 2)) / len(X))
|
|
|
|
|
|
|
|
|
2017-05-12 11:03:41 +02:00
|
|
|
def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot=0, fig=None):
|
2015-06-24 14:15:54 +02:00
|
|
|
'''
|
|
|
|
Function to detect spuriously picked noise peaks.
|
2015-10-19 05:32:10 +02:00
|
|
|
Uses RMS trace of all 3 components (if available) to determine,
|
|
|
|
how many samples [per cent] after P onset are below certain
|
2015-09-04 11:16:34 +02:00
|
|
|
threshold, calculated from noise level times noise factor.
|
2015-06-24 14:15:54 +02:00
|
|
|
|
|
|
|
: param: X, time series (seismogram)
|
|
|
|
: type: `~obspy.core.stream.Stream`
|
|
|
|
|
|
|
|
: param: pick, initial (AIC) P onset time
|
2015-06-25 10:21:52 +02:00
|
|
|
: type: float
|
2015-06-24 14:15:54 +02:00
|
|
|
|
|
|
|
: param: TSNR, length of time windows around initial pick [s]
|
|
|
|
: type: tuple (T_noise, T_gap, T_signal)
|
|
|
|
|
|
|
|
: param: minsiglength, minium required signal length [s] to
|
|
|
|
declare pick as P onset
|
|
|
|
: type: float
|
|
|
|
|
|
|
|
: param: nfac, noise factor (nfac * noise level = threshold)
|
|
|
|
: type: float
|
|
|
|
|
|
|
|
: param: minpercent, minimum required percentage of samples
|
|
|
|
above calculated threshold
|
|
|
|
: type: float
|
|
|
|
|
|
|
|
: param: iplot, if iplot > 1, results are shown in figure
|
|
|
|
: type: int
|
|
|
|
'''
|
2015-06-25 10:21:52 +02:00
|
|
|
|
2017-08-08 16:38:29 +02:00
|
|
|
plt_flag = 0
|
2017-08-09 13:17:54 +02:00
|
|
|
try:
|
2017-08-10 14:01:25 +02:00
|
|
|
iplot = int(iplot)
|
2017-08-09 13:17:54 +02:00
|
|
|
except:
|
|
|
|
if iplot == True or iplot == 'True':
|
|
|
|
iplot = 2
|
|
|
|
else:
|
|
|
|
iplot = 0
|
|
|
|
|
2015-06-24 14:15:54 +02:00
|
|
|
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
|
|
|
|
2017-08-03 09:41:54 +02:00
|
|
|
print("Checking signal length ...")
|
2015-06-24 14:15:54 +02:00
|
|
|
|
2015-09-04 11:16:34 +02:00
|
|
|
if len(X) > 1:
|
2015-10-19 05:32:10 +02:00
|
|
|
# all three components available
|
2015-09-04 11:16:34 +02:00
|
|
|
# make sure, all components have equal lengths
|
|
|
|
ilen = min([len(X[0].data), len(X[1].data), len(X[2].data)])
|
|
|
|
x1 = X[0][0:ilen]
|
|
|
|
x2 = X[1][0:ilen]
|
|
|
|
x3 = X[2][0:ilen]
|
|
|
|
# get RMS trace
|
|
|
|
rms = np.sqrt((np.power(x1, 2) + np.power(x2, 2) + np.power(x3, 2)) / 3)
|
|
|
|
else:
|
|
|
|
x1 = X[0].data
|
2017-08-08 10:24:12 +02:00
|
|
|
ilen = len(x1)
|
2017-08-15 15:10:36 +02:00
|
|
|
rms = abs(x1)
|
2015-09-04 11:16:34 +02:00
|
|
|
|
|
|
|
t = np.arange(0, ilen / X[0].stats.sampling_rate,
|
2015-06-24 14:15:54 +02:00
|
|
|
X[0].stats.delta)
|
|
|
|
|
2015-09-03 14:55:25 +02:00
|
|
|
# get noise window in front of pick plus saftey gap
|
2017-08-08 17:06:41 +02:00
|
|
|
inoise = getnoisewin(t, pick, TSNR[0], TSNR[1])
|
2015-06-24 14:15:54 +02:00
|
|
|
# get signal window
|
2015-09-04 11:16:34 +02:00
|
|
|
isignal = getsignalwin(t, pick, minsiglength)
|
2015-06-24 14:15:54 +02:00
|
|
|
# calculate minimum adjusted signal level
|
2017-08-15 16:57:52 +02:00
|
|
|
minsiglevel = np.mean(rms[inoise]) * nfac
|
2015-06-24 14:15:54 +02:00
|
|
|
# minimum adjusted number of samples over minimum signal level
|
2016-03-30 08:14:58 +02:00
|
|
|
minnum = len(isignal) * minpercent / 100
|
2015-06-24 14:15:54 +02:00
|
|
|
# get number of samples above minimum adjusted signal level
|
2015-09-04 11:16:34 +02:00
|
|
|
numoverthr = len(np.where(rms[isignal] >= minsiglevel)[0])
|
2015-06-24 14:15:54 +02:00
|
|
|
|
|
|
|
if numoverthr >= minnum:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("checksignallength: Signal reached required length.")
|
2015-06-24 14:15:54 +02:00
|
|
|
returnflag = 1
|
|
|
|
else:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("checksignallength: Signal shorter than required minimum signal length!")
|
|
|
|
print("Presumably picked noise peak, pick is rejected!")
|
|
|
|
print("(min. signal length required: %s s)" % minsiglength)
|
2015-06-24 14:15:54 +02:00
|
|
|
returnflag = 0
|
2015-07-09 11:37:03 +02:00
|
|
|
|
2017-08-08 17:06:41 +02:00
|
|
|
if iplot > 1:
|
2017-08-09 13:17:54 +02:00
|
|
|
if fig == None or fig == 'None':
|
2017-08-03 09:41:54 +02:00
|
|
|
fig = plt.figure() # iplot)
|
2017-08-08 16:38:29 +02:00
|
|
|
plt_flag = 1
|
2017-05-08 15:38:41 +02:00
|
|
|
ax = fig.add_subplot(111)
|
|
|
|
ax.plot(t, rms, 'k', label='RMS Data')
|
2017-08-03 09:41:54 +02:00
|
|
|
ax.axvspan(t[inoise[0]], t[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
|
|
|
ax.axvspan(t[isignal[0]], t[isignal[-1]], color='b', alpha=0.2, lw=0, label='Signal Window')
|
2017-05-08 15:38:41 +02:00
|
|
|
ax.plot([t[isignal[0]], t[isignal[len(isignal) - 1]]],
|
|
|
|
[minsiglevel, minsiglevel], 'g', linewidth=2, label='Minimum Signal Level')
|
|
|
|
ax.plot([pick, pick], [min(rms), max(rms)], 'b', linewidth=2, label='Onset')
|
|
|
|
ax.legend()
|
|
|
|
ax.set_xlabel('Time [s] since %s' % X[0].stats.starttime)
|
|
|
|
ax.set_ylabel('Counts')
|
|
|
|
ax.set_title('Check for Signal Length, Station %s' % X[0].stats.station)
|
|
|
|
ax.set_yticks([])
|
2017-08-08 16:38:29 +02:00
|
|
|
if plt_flag == 1:
|
|
|
|
fig.show()
|
2017-08-14 14:21:58 +02:00
|
|
|
try: input()
|
|
|
|
except SyntaxError: pass
|
2017-08-08 16:38:29 +02:00
|
|
|
plt.close(fig)
|
2017-05-08 15:38:41 +02:00
|
|
|
|
2017-05-12 11:03:41 +02:00
|
|
|
return returnflag
|
2015-06-24 14:15:54 +02:00
|
|
|
|
2015-06-26 08:48:24 +02:00
|
|
|
|
|
|
|
def checkPonsets(pickdic, dttolerance, iplot):
|
|
|
|
'''
|
2015-07-09 11:37:03 +02:00
|
|
|
Function to check statistics of P-onset times: Control deviation from
|
2015-06-26 08:48:24 +02:00
|
|
|
median (maximum adjusted deviation = dttolerance) and apply pseudo-
|
|
|
|
bootstrapping jackknife.
|
|
|
|
|
|
|
|
: param: pickdic, dictionary containing picks and quality parameters
|
|
|
|
: type: dictionary
|
|
|
|
|
|
|
|
: param: dttolerance, maximum adjusted deviation of P-onset time from
|
|
|
|
median of all P onsets
|
|
|
|
: type: float
|
|
|
|
|
|
|
|
: param: iplot, if iplot > 1, Wadati diagram is shown
|
|
|
|
: type: int
|
|
|
|
'''
|
|
|
|
|
|
|
|
checkedonsets = pickdic
|
|
|
|
|
|
|
|
# search for good quality P picks
|
|
|
|
Ppicks = []
|
2015-06-26 15:59:50 +02:00
|
|
|
stations = []
|
2015-06-26 08:48:24 +02:00
|
|
|
for key in pickdic:
|
|
|
|
if pickdic[key]['P']['weight'] < 4:
|
2016-03-30 08:14:58 +02:00
|
|
|
# add P onsets to list
|
|
|
|
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
|
|
|
|
Ppicks.append(UTCPpick.timestamp)
|
|
|
|
stations.append(key)
|
2015-06-26 08:48:24 +02:00
|
|
|
|
|
|
|
# apply jackknife bootstrapping on variance of P onsets
|
2017-08-03 09:41:54 +02:00
|
|
|
print("###############################################")
|
|
|
|
print("checkPonsets: Apply jackknife bootstrapping on P-onset times ...")
|
2016-03-30 08:14:58 +02:00
|
|
|
[xjack, PHI_pseudo, PHI_sub] = jackknife(Ppicks, 'VAR', 1)
|
2015-06-26 08:48:24 +02:00
|
|
|
# get pseudo variances smaller than average variances
|
2015-07-06 15:52:25 +02:00
|
|
|
# (times safety factor), these picks passed jackknife test
|
2017-05-05 14:07:35 +02:00
|
|
|
ij = np.where(PHI_pseudo <= 5 * xjack)
|
2015-06-26 15:59:50 +02:00
|
|
|
# these picks did not pass jackknife test
|
2017-05-05 14:07:35 +02:00
|
|
|
badjk = np.where(PHI_pseudo > 5 * xjack)
|
2015-06-26 15:59:50 +02:00
|
|
|
badjkstations = np.array(stations)[badjk]
|
2017-08-03 09:41:54 +02:00
|
|
|
print("checkPonsets: %d pick(s) did not pass jackknife test!" % len(badjkstations))
|
2017-05-05 14:07:35 +02:00
|
|
|
print(badjkstations)
|
2015-06-26 08:48:24 +02:00
|
|
|
|
|
|
|
# calculate median from these picks
|
2015-06-26 15:59:50 +02:00
|
|
|
pmedian = np.median(np.array(Ppicks)[ij])
|
|
|
|
# find picks that deviate less than dttolerance from median
|
|
|
|
ii = np.where(abs(np.array(Ppicks)[ij] - pmedian) <= dttolerance)
|
|
|
|
jj = np.where(abs(np.array(Ppicks)[ij] - pmedian) > dttolerance)
|
|
|
|
igood = ij[0][ii]
|
|
|
|
ibad = ij[0][jj]
|
|
|
|
goodstations = np.array(stations)[igood]
|
|
|
|
badstations = np.array(stations)[ibad]
|
|
|
|
|
2017-08-03 09:41:54 +02:00
|
|
|
print("checkPonsets: %d pick(s) deviate too much from median!" % len(ibad))
|
|
|
|
print("checkPonsets: Skipped %d P pick(s) out of %d" % (len(badstations) \
|
|
|
|
+ len(badjkstations), len(stations)))
|
2015-06-26 15:59:50 +02:00
|
|
|
|
|
|
|
goodmarker = 'goodPonsetcheck'
|
|
|
|
badmarker = 'badPonsetcheck'
|
|
|
|
badjkmarker = 'badjkcheck'
|
|
|
|
for i in range(0, len(goodstations)):
|
|
|
|
# mark P onset as checked and keep P weight
|
2015-10-19 05:32:10 +02:00
|
|
|
pickdic[goodstations[i]]['P']['marked'] = goodmarker
|
2015-06-26 15:59:50 +02:00
|
|
|
for i in range(0, len(badstations)):
|
2015-10-19 05:32:10 +02:00
|
|
|
# mark P onset and downgrade P weight to 9
|
|
|
|
# (not used anymore)
|
|
|
|
pickdic[badstations[i]]['P']['marked'] = badmarker
|
|
|
|
pickdic[badstations[i]]['P']['weight'] = 9
|
2015-06-26 15:59:50 +02:00
|
|
|
for i in range(0, len(badjkstations)):
|
2015-10-19 05:32:10 +02:00
|
|
|
# mark P onset and downgrade P weight to 9
|
|
|
|
# (not used anymore)
|
|
|
|
pickdic[badjkstations[i]]['P']['marked'] = badjkmarker
|
|
|
|
pickdic[badjkstations[i]]['P']['weight'] = 9
|
2015-06-26 15:59:50 +02:00
|
|
|
|
|
|
|
checkedonsets = pickdic
|
|
|
|
|
2017-05-05 14:07:35 +02:00
|
|
|
if iplot > 0:
|
|
|
|
p1, = plt.plot(np.arange(0, len(Ppicks)), Ppicks, 'ro', markersize=14)
|
|
|
|
if len(badstations) < 1 and len(badjkstations) < 1:
|
|
|
|
p2, = plt.plot(np.arange(0, len(Ppicks)), Ppicks, 'go', markersize=14)
|
|
|
|
else:
|
|
|
|
p2, = plt.plot(igood, np.array(Ppicks)[igood], 'go', markersize=14)
|
2015-10-19 11:25:15 +02:00
|
|
|
p3, = plt.plot([0, len(Ppicks) - 1], [pmedian, pmedian], 'g',
|
|
|
|
linewidth=2)
|
2015-06-26 15:59:50 +02:00
|
|
|
for i in range(0, len(Ppicks)):
|
2017-05-05 14:07:35 +02:00
|
|
|
plt.text(i, Ppicks[i] + 0.01, '{0}'.format(stations[i]))
|
2015-06-26 15:59:50 +02:00
|
|
|
|
2015-07-09 11:37:03 +02:00
|
|
|
plt.xlabel('Number of P Picks')
|
2015-06-26 15:59:50 +02:00
|
|
|
plt.ylabel('Onset Time [s] from 1.1.1970')
|
2015-10-19 11:25:15 +02:00
|
|
|
plt.legend([p1, p2, p3], ['Skipped P Picks', 'Good P Picks', 'Median'],
|
|
|
|
loc='best')
|
2017-05-05 14:07:35 +02:00
|
|
|
plt.title('Jackknifing and Median Tests on P Onsets')
|
2015-06-26 08:48:24 +02:00
|
|
|
|
2015-06-26 15:59:50 +02:00
|
|
|
return checkedonsets
|
2015-06-26 08:48:24 +02:00
|
|
|
|
2015-06-29 16:14:11 +02:00
|
|
|
|
2015-06-26 08:48:24 +02:00
|
|
|
def jackknife(X, phi, h):
|
|
|
|
'''
|
|
|
|
Function to calculate the Jackknife Estimator for a given quantity,
|
|
|
|
special type of boot strapping. Returns the jackknife estimator PHI_jack
|
|
|
|
the pseudo values PHI_pseudo and the subgroup parameters PHI_sub.
|
|
|
|
|
|
|
|
: param: X, given quantity
|
|
|
|
: type: list
|
|
|
|
|
|
|
|
: param: phi, chosen estimator, choose between:
|
|
|
|
"MED" for median
|
|
|
|
"MEA" for arithmetic mean
|
|
|
|
"VAR" for variance
|
|
|
|
: type: string
|
|
|
|
|
|
|
|
: param: h, size of subgroups, optinal, default = 1
|
|
|
|
: type: integer
|
|
|
|
'''
|
2015-07-09 11:37:03 +02:00
|
|
|
|
2015-06-26 08:48:24 +02:00
|
|
|
PHI_jack = None
|
|
|
|
PHI_pseudo = None
|
|
|
|
PHI_sub = None
|
|
|
|
|
|
|
|
# determine number of subgroups
|
|
|
|
g = len(X) / h
|
|
|
|
|
|
|
|
if type(g) is not int:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("jackknife: Cannot divide quantity X in equal sized subgroups!")
|
|
|
|
print("Choose another size for subgroups!")
|
2015-06-26 08:48:24 +02:00
|
|
|
return PHI_jack, PHI_pseudo, PHI_sub
|
|
|
|
else:
|
2015-10-19 05:32:10 +02:00
|
|
|
# estimator of undisturbed spot check
|
|
|
|
if phi == 'MEA':
|
|
|
|
phi_sc = np.mean(X)
|
2015-06-26 08:48:24 +02:00
|
|
|
elif phi == 'VAR':
|
2015-10-19 05:32:10 +02:00
|
|
|
phi_sc = np.var(X)
|
2015-06-26 08:48:24 +02:00
|
|
|
elif phi == 'MED':
|
2015-10-19 05:32:10 +02:00
|
|
|
phi_sc = np.median(X)
|
2015-06-26 08:48:24 +02:00
|
|
|
|
2015-10-19 05:32:10 +02:00
|
|
|
# estimators of subgroups
|
2015-06-26 08:48:24 +02:00
|
|
|
PHI_pseudo = []
|
|
|
|
PHI_sub = []
|
|
|
|
for i in range(0, g - 1):
|
2015-10-19 05:32:10 +02:00
|
|
|
# subgroup i, remove i-th sample
|
|
|
|
xx = X[:]
|
|
|
|
del xx[i]
|
|
|
|
# calculate estimators of disturbed spot check
|
|
|
|
if phi == 'MEA':
|
|
|
|
phi_sub = np.mean(xx)
|
|
|
|
elif phi == 'VAR':
|
|
|
|
phi_sub = np.var(xx)
|
|
|
|
elif phi == 'MED':
|
|
|
|
phi_sub = np.median(xx)
|
|
|
|
|
|
|
|
PHI_sub.append(phi_sub)
|
|
|
|
# pseudo values
|
|
|
|
phi_pseudo = g * phi_sc - ((g - 1) * phi_sub)
|
|
|
|
PHI_pseudo.append(phi_pseudo)
|
2015-06-26 08:48:24 +02:00
|
|
|
# jackknife estimator
|
|
|
|
PHI_jack = np.mean(PHI_pseudo)
|
|
|
|
|
|
|
|
return PHI_jack, PHI_pseudo, PHI_sub
|
|
|
|
|
|
|
|
|
2017-05-12 11:03:41 +02:00
|
|
|
def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None):
|
2015-07-01 15:31:50 +02:00
|
|
|
'''
|
2015-07-09 11:37:03 +02:00
|
|
|
Function to compare energy content of vertical trace with
|
|
|
|
energy content of horizontal traces to detect spuriously
|
2015-07-01 15:31:50 +02:00
|
|
|
picked S onsets instead of P onsets. Usually, P coda shows
|
|
|
|
larger longitudal energy on vertical trace than on horizontal
|
|
|
|
traces, where the transversal energy is larger within S coda.
|
2015-07-09 11:37:03 +02:00
|
|
|
Be careful: there are special circumstances, where this is not
|
2015-07-01 15:31:50 +02:00
|
|
|
the case!
|
|
|
|
|
|
|
|
: param: X, fitered(!) time series, three traces
|
2015-07-09 11:37:03 +02:00
|
|
|
: type: `~obspy.core.stream.Stream`
|
2015-07-01 15:31:50 +02:00
|
|
|
|
|
|
|
: param: pick, initial (AIC) P onset time
|
|
|
|
: type: float
|
2015-07-09 11:37:03 +02:00
|
|
|
|
|
|
|
: param: zfac, factor for threshold determination,
|
2015-07-01 15:31:50 +02:00
|
|
|
vertical energy must exceed coda level times zfac
|
|
|
|
to declare a pick as P onset
|
|
|
|
: type: float
|
|
|
|
|
|
|
|
: param: checkwin, window length [s] for calculating P-coda
|
|
|
|
energy content
|
|
|
|
: type: float
|
|
|
|
|
|
|
|
: param: iplot, if iplot > 1, energy content and threshold
|
|
|
|
are shown
|
|
|
|
: type: int
|
|
|
|
'''
|
2017-08-11 16:57:10 +02:00
|
|
|
|
2017-08-11 17:33:41 +02:00
|
|
|
plt_flag = 0
|
2017-08-11 16:57:10 +02:00
|
|
|
try:
|
|
|
|
iplot = int(iplot)
|
|
|
|
except:
|
|
|
|
if iplot == True or iplot == 'True':
|
|
|
|
iplot = 2
|
|
|
|
else:
|
|
|
|
iplot = 0
|
|
|
|
|
2015-07-01 15:31:50 +02:00
|
|
|
|
|
|
|
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
|
|
|
|
2017-08-03 09:41:54 +02:00
|
|
|
print("Check for spuriously picked S onset instead of P onset ...")
|
2015-07-01 15:31:50 +02:00
|
|
|
|
|
|
|
returnflag = 0
|
|
|
|
|
|
|
|
# split components
|
|
|
|
zdat = X.select(component="Z")
|
2015-11-11 14:10:44 +01:00
|
|
|
if len(zdat) == 0: # check for other components
|
|
|
|
zdat = X.select(component="3")
|
2015-07-01 15:31:50 +02:00
|
|
|
edat = X.select(component="E")
|
|
|
|
if len(edat) == 0: # check for other components
|
|
|
|
edat = X.select(component="2")
|
|
|
|
ndat = X.select(component="N")
|
|
|
|
if len(ndat) == 0: # check for other components
|
|
|
|
ndat = X.select(component="1")
|
2015-07-09 11:37:03 +02:00
|
|
|
|
2017-07-17 18:02:29 +02:00
|
|
|
# get earliest time of all 3 traces
|
|
|
|
min_t = min(zdat[0].stats.starttime, edat[0].stats.starttime, ndat[0].stats.starttime)
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2017-07-17 18:02:29 +02:00
|
|
|
# generate time arrays for all 3 traces
|
2015-07-01 15:31:50 +02:00
|
|
|
tz = np.arange(0, zdat[0].stats.npts / zdat[0].stats.sampling_rate,
|
2016-03-30 08:14:58 +02:00
|
|
|
zdat[0].stats.delta)
|
2017-07-17 18:02:29 +02:00
|
|
|
tn = np.arange(0, ndat[0].stats.npts / ndat[0].stats.sampling_rate,
|
|
|
|
ndat[0].stats.delta)
|
|
|
|
te = np.arange(0, edat[0].stats.npts / edat[0].stats.sampling_rate,
|
|
|
|
edat[0].stats.delta)
|
|
|
|
|
|
|
|
zdiff = (zdat[0].stats.starttime - min_t)
|
|
|
|
ndiff = (ndat[0].stats.starttime - min_t)
|
|
|
|
ediff = (edat[0].stats.starttime - min_t)
|
2017-08-03 09:41:54 +02:00
|
|
|
|
2017-07-17 18:02:29 +02:00
|
|
|
# get signal windows
|
2017-08-03 09:41:54 +02:00
|
|
|
isignalz = getsignalwin(tz, pick - zdiff, checkwin)
|
|
|
|
isignaln = getsignalwin(tn, pick - ndiff, checkwin)
|
|
|
|
isignale = getsignalwin(te, pick - ediff, checkwin)
|
2015-07-01 15:31:50 +02:00
|
|
|
|
2017-07-17 18:02:29 +02:00
|
|
|
# calculate RMS of traces
|
|
|
|
rmsz = RMS(zdat[0].data[isignalz])
|
|
|
|
rmsn = RMS(ndat[0].data[isignaln])
|
|
|
|
rmse = RMS(edat[0].data[isignale])
|
2015-07-09 11:37:03 +02:00
|
|
|
|
2015-07-01 15:31:50 +02:00
|
|
|
# calculate threshold
|
2017-08-15 13:06:26 +02:00
|
|
|
minsiglevel = (rmsn + rmse) / 2 * zfac
|
2015-07-01 15:31:50 +02:00
|
|
|
|
|
|
|
# vertical P-coda level must exceed horizontal P-coda level
|
|
|
|
# zfac times encodalevel
|
2017-07-17 18:02:29 +02:00
|
|
|
if rmsz < minsiglevel:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("checkZ4S: Maybe S onset? Skip this P pick!")
|
2015-07-01 15:31:50 +02:00
|
|
|
else:
|
2017-08-03 09:41:54 +02:00
|
|
|
print("checkZ4S: P onset passes checkZ4S test!")
|
2015-07-09 11:37:03 +02:00
|
|
|
returnflag = 1
|
2015-07-01 15:31:50 +02:00
|
|
|
|
|
|
|
if iplot > 1:
|
2017-07-17 18:02:29 +02:00
|
|
|
rms_dict = {'Z': rmsz,
|
|
|
|
'N': rmsn,
|
|
|
|
'E': rmse}
|
|
|
|
|
|
|
|
traces_dict = {'Z': zdat[0],
|
|
|
|
'N': ndat[0],
|
|
|
|
'E': edat[0]}
|
|
|
|
|
|
|
|
diff_dict = {'Z': zdiff,
|
|
|
|
'N': ndiff,
|
|
|
|
'E': ediff}
|
|
|
|
|
2017-08-03 09:41:54 +02:00
|
|
|
signal_dict = {'Z': isignalz,
|
|
|
|
'N': isignaln,
|
|
|
|
'E': isignale}
|
2017-07-17 18:02:29 +02:00
|
|
|
|
|
|
|
for i, key in enumerate(['Z', 'N', 'E']):
|
|
|
|
rms = rms_dict[key]
|
|
|
|
trace = traces_dict[key]
|
2017-08-03 09:41:54 +02:00
|
|
|
t = np.arange(diff_dict[key], trace.stats.npts / trace.stats.sampling_rate + diff_dict[key],
|
|
|
|
trace.stats.delta)
|
2017-07-17 18:02:29 +02:00
|
|
|
if i == 0:
|
2017-08-11 16:57:10 +02:00
|
|
|
if fig == None or fig == 'None':
|
|
|
|
fig = plt.figure() # self.iplot) ### WHY? MP MP
|
2017-08-11 17:33:41 +02:00
|
|
|
plt_flag = 1
|
2017-08-03 09:41:54 +02:00
|
|
|
ax1 = fig.add_subplot(3, 1, i + 1)
|
2017-07-17 18:02:29 +02:00
|
|
|
ax = ax1
|
|
|
|
ax.set_title('CheckZ4S, Station %s' % zdat[0].stats.station)
|
|
|
|
else:
|
2017-08-11 16:57:10 +02:00
|
|
|
if fig == None or fig == 'None':
|
|
|
|
fig = plt.figure() # self.iplot) ### WHY? MP MP
|
2017-08-11 17:33:41 +02:00
|
|
|
plt_flag = 1
|
2017-08-03 09:41:54 +02:00
|
|
|
ax = fig.add_subplot(3, 1, i + 1, sharex=ax1)
|
2017-07-17 18:02:29 +02:00
|
|
|
ax.plot(t, abs(trace.data), color='b', label='abs')
|
|
|
|
ax.plot(t, trace.data, color='k')
|
|
|
|
name = str(trace.stats.channel) + ': {}'.format(rms)
|
2017-08-03 09:41:54 +02:00
|
|
|
ax.plot([pick, pick + checkwin], [rms, rms], 'r', label='RMS {}'.format(name))
|
2017-07-17 18:02:29 +02:00
|
|
|
ax.plot([pick, pick], ax.get_ylim(), 'm', label='Pick')
|
|
|
|
ax.set_ylabel('Normalized Counts')
|
2017-08-03 09:41:54 +02:00
|
|
|
ax.axvspan(pick, pick + checkwin, color='c', alpha=0.2,
|
2017-07-17 18:02:29 +02:00
|
|
|
lw=0)
|
|
|
|
ax.legend()
|
2017-05-08 15:38:41 +02:00
|
|
|
ax.set_xlabel('Time [s] since %s' % zdat[0].stats.starttime)
|
2017-08-11 17:33:41 +02:00
|
|
|
if plt_flag == 1:
|
|
|
|
fig.show()
|
2017-08-14 14:21:58 +02:00
|
|
|
try: input()
|
|
|
|
except SyntaxError: pass
|
2017-08-11 17:33:41 +02:00
|
|
|
plt.close(fig)
|
2017-05-12 11:03:41 +02:00
|
|
|
return returnflag
|
2015-07-01 15:31:50 +02:00
|
|
|
|
2017-08-15 10:02:46 +02:00
|
|
|
|
2017-08-21 14:50:18 +02:00
|
|
|
def getQualityFromUncertainty(uncertainty, Errors):
|
2017-08-11 16:04:18 +02:00
|
|
|
'''Script to transform uncertainty into quality classes 0-4
|
|
|
|
regarding adjusted time errors Errors.
|
|
|
|
'''
|
|
|
|
|
2017-08-14 11:40:22 +02:00
|
|
|
# set initial quality to 4 (worst) and change only if one condition is hit
|
|
|
|
quality = 4
|
|
|
|
|
2017-08-14 11:43:09 +02:00
|
|
|
if uncertainty == None or uncertainty == 'None':
|
|
|
|
return quality
|
2017-08-14 13:30:27 +02:00
|
|
|
|
2017-08-14 11:40:22 +02:00
|
|
|
if uncertainty <= Errors[0]:
|
|
|
|
quality = 0
|
|
|
|
elif (uncertainty > Errors[0]) and \
|
|
|
|
(uncertainty < Errors[1]):
|
|
|
|
quality = 1
|
|
|
|
elif (uncertainty > Errors[1]) and \
|
|
|
|
(uncertainty < Errors[2]):
|
|
|
|
quality = 2
|
|
|
|
elif (uncertainty > Errors[2]) and \
|
|
|
|
(uncertainty < Errors[3]):
|
|
|
|
quality = 3
|
|
|
|
elif uncertainty > Errors[3]:
|
|
|
|
quality = 4
|
2017-08-11 16:04:18 +02:00
|
|
|
|
|
|
|
return quality
|
2015-10-26 09:41:02 +01:00
|
|
|
|
2015-06-25 10:21:52 +02:00
|
|
|
if __name__ == '__main__':
|
|
|
|
import doctest
|
2016-03-30 08:14:58 +02:00
|
|
|
|
2015-06-25 10:21:52 +02:00
|
|
|
doctest.testmod()
|