2015-03-30 14:35:21 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
"""
|
|
|
|
Created Mar/Apr 2015
|
|
|
|
Collection of helpful functions for manual and automatic picking.
|
|
|
|
|
|
|
|
:author: Ludger Kueperkoch / MAGS2 EP3 working group
|
|
|
|
"""
|
|
|
|
|
|
|
|
import numpy as np
|
|
|
|
import matplotlib.pyplot as plt
|
2015-06-19 15:28:53 +02:00
|
|
|
from obspy.core import Stream, UTCDateTime
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
|
|
|
|
def earllatepicker(X, nfac, TSNR, Pick1, iplot=None):
|
|
|
|
'''
|
2015-04-02 15:45:38 +02:00
|
|
|
Function to derive earliest and latest possible pick after Diehl & Kissling (2009)
|
2015-03-30 14:35:21 +02:00
|
|
|
as reasonable uncertainties. Latest possible pick is based on noise level,
|
2015-04-02 15:45:38 +02:00
|
|
|
earliest possible pick is half a signal wavelength in front of most likely
|
2015-03-30 14:35:21 +02:00
|
|
|
pick given by PragPicker or manually set by analyst. Most likely pick
|
2015-04-02 15:45:38 +02:00
|
|
|
(initial pick Pick1) must be given.
|
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
:param: X, time series (seismogram)
|
|
|
|
:type: `~obspy.core.stream.Stream`
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
:param: nfac (noise factor), nfac times noise level to calculate latest possible pick
|
|
|
|
:type: int
|
|
|
|
|
|
|
|
:param: TSNR, length of time windows around pick used to determine SNR [s]
|
|
|
|
:type: tuple (T_noise, T_gap, T_signal)
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
:param: Pick1, initial (most likely) onset time, starting point for earllatepicker
|
|
|
|
:type: float
|
|
|
|
|
|
|
|
:param: iplot, if given, results are plotted in figure(iplot)
|
|
|
|
:type: int
|
|
|
|
'''
|
|
|
|
|
|
|
|
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
|
|
|
|
2015-04-02 15:45:38 +02:00
|
|
|
LPick = None
|
2015-03-30 14:35:21 +02:00
|
|
|
EPick = None
|
|
|
|
PickError = None
|
|
|
|
print 'earllatepicker: Get earliest and latest possible pick relative to most likely pick ...'
|
|
|
|
|
|
|
|
x = X[0].data
|
2015-04-02 15:45:38 +02:00
|
|
|
t = np.arange(0, X[0].stats.npts / X[0].stats.sampling_rate,
|
|
|
|
X[0].stats.delta)
|
|
|
|
# get latest possible pick
|
2015-03-30 14:35:21 +02:00
|
|
|
#get noise window
|
2015-03-30 16:22:20 +02:00
|
|
|
inoise = getnoisewin(t, Pick1, TSNR[0], TSNR[1])
|
2015-03-30 14:35:21 +02:00
|
|
|
#get signal window
|
2015-03-30 16:22:20 +02:00
|
|
|
isignal = getsignalwin(t, Pick1, TSNR[2])
|
2015-03-30 14:35:21 +02:00
|
|
|
#calculate noise level
|
2015-04-21 08:14:26 +02:00
|
|
|
nlevel = np.sqrt(np.mean(np.square(x[inoise]))) * nfac
|
2015-03-30 14:35:21 +02:00
|
|
|
#get time where signal exceeds nlevel
|
2015-04-21 08:14:26 +02:00
|
|
|
ilup, = np.where(x[isignal] > nlevel)
|
|
|
|
ildown, = np.where(x[isignal] < -nlevel)
|
|
|
|
if not ilup.size and not ildown.size:
|
|
|
|
raise ValueError('earllatepicker: Signal lower than noise level')
|
|
|
|
il = min(np.min(ilup) if ilup.size else float('inf'),
|
|
|
|
np.min(ildown) if ildown.size else float('inf'))
|
2015-03-30 14:35:21 +02:00
|
|
|
LPick = t[isignal][il]
|
|
|
|
|
|
|
|
#get earliest possible pick
|
2015-04-21 08:14:26 +02:00
|
|
|
|
|
|
|
#determine all zero crossings in signal window
|
2015-06-19 15:28:53 +02:00
|
|
|
zc = crossings_nonzero_all(x[isignal])
|
2015-04-21 08:14:26 +02:00
|
|
|
#calculate mean half period T0 of signal as the average of the
|
|
|
|
T0 = np.mean(np.diff(zc)) * X[0].stats.delta #this is half wave length!
|
2015-03-30 14:35:21 +02:00
|
|
|
#T0/4 is assumed as time difference between most likely and earliest possible pick!
|
2015-04-02 15:45:38 +02:00
|
|
|
EPick = Pick1 - T0 / 2
|
2015-03-30 14:35:21 +02:00
|
|
|
|
|
|
|
#get symmetric pick error as mean from earliest and latest possible pick
|
|
|
|
#by weighting latest possible pick two times earliest possible pick
|
2015-04-02 15:45:38 +02:00
|
|
|
diffti_tl = LPick - Pick1
|
2015-03-30 14:35:21 +02:00
|
|
|
diffti_te = Pick1 - EPick
|
2015-04-02 15:45:38 +02:00
|
|
|
PickError = (diffti_te + 2 * diffti_tl) / 3
|
2015-03-30 14:35:21 +02:00
|
|
|
|
2015-05-29 16:28:50 +02:00
|
|
|
if iplot > 1:
|
|
|
|
p = plt.figure(iplot)
|
2015-04-02 15:45:38 +02:00
|
|
|
p1, = plt.plot(t, x, 'k')
|
|
|
|
p2, = plt.plot(t[inoise], x[inoise])
|
|
|
|
p3, = plt.plot(t[isignal], x[isignal], 'r')
|
|
|
|
p4, = plt.plot([t[0], t[int(len(t)) - 1]], [nlevel, nlevel], '--k')
|
2015-05-27 09:36:12 +02:00
|
|
|
p5, = plt.plot(t[isignal[0][zc]], np.zeros(len(zc)), '*g', markersize=14)
|
2015-04-02 15:45:38 +02:00
|
|
|
plt.legend([p1, p2, p3, p4, p5],
|
|
|
|
['Data', 'Noise Window', 'Signal Window', 'Noise Level',
|
|
|
|
'Zero Crossings'], \
|
2015-03-30 14:35:21 +02:00
|
|
|
loc='best')
|
2015-04-02 15:45:38 +02:00
|
|
|
plt.plot([t[0], t[int(len(t)) - 1]], [-nlevel, -nlevel], '--k')
|
|
|
|
plt.plot([Pick1, Pick1], [max(x), -max(x)], 'b', linewidth=2)
|
|
|
|
plt.plot([LPick, LPick], [max(x) / 2, -max(x) / 2], '--k')
|
|
|
|
plt.plot([EPick, EPick], [max(x) / 2, -max(x) / 2], '--k')
|
|
|
|
plt.plot([Pick1 + PickError, Pick1 + PickError],
|
|
|
|
[max(x) / 2, -max(x) / 2], 'r--')
|
|
|
|
plt.plot([Pick1 - PickError, Pick1 - PickError],
|
|
|
|
[max(x) / 2, -max(x) / 2], 'r--')
|
|
|
|
plt.xlabel('Time [s] since %s' % X[0].stats.starttime)
|
|
|
|
plt.yticks([])
|
|
|
|
ax = plt.gca()
|
|
|
|
ax.set_xlim([t[inoise[0][0]] - 2, t[isignal[0][len(isignal) - 1]] + 3])
|
|
|
|
plt.title(
|
|
|
|
'Earliest-/Latest Possible/Most Likely Pick & Symmetric Pick Error, %s' %
|
|
|
|
X[0].stats.station)
|
|
|
|
plt.show()
|
|
|
|
raw_input()
|
2015-05-29 16:28:50 +02:00
|
|
|
plt.close(p)
|
2015-03-30 14:35:21 +02:00
|
|
|
|
|
|
|
return EPick, LPick, PickError
|
|
|
|
|
|
|
|
|
|
|
|
def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=None):
|
|
|
|
'''
|
|
|
|
Function to derive first motion (polarity) of given phase onset Pick.
|
|
|
|
Calculation is based on zero crossings determined within time window pickwin
|
2015-04-02 15:45:38 +02:00
|
|
|
after given onset time.
|
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
:param: Xraw, unfiltered time series (seismogram)
|
|
|
|
:type: `~obspy.core.stream.Stream`
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
:param: Xfilt, filtered time series (seismogram)
|
|
|
|
:type: `~obspy.core.stream.Stream`
|
|
|
|
|
|
|
|
:param: pickwin, time window after onset Pick within zero crossings are calculated
|
|
|
|
:type: float
|
|
|
|
|
|
|
|
:param: Pick, initial (most likely) onset time, starting point for fmpicker
|
|
|
|
:type: float
|
|
|
|
|
|
|
|
:param: iplot, if given, results are plotted in figure(iplot)
|
|
|
|
:type: int
|
|
|
|
'''
|
|
|
|
|
|
|
|
assert isinstance(Xraw, Stream), "%s is not a stream object" % str(Xraw)
|
|
|
|
assert isinstance(Xfilt, Stream), "%s is not a stream object" % str(Xfilt)
|
|
|
|
|
|
|
|
FM = None
|
|
|
|
if Pick is not None:
|
2015-04-02 15:45:38 +02:00
|
|
|
print 'fmpicker: Get first motion (polarity) of onset using unfiltered seismogram...'
|
|
|
|
|
|
|
|
xraw = Xraw[0].data
|
|
|
|
xfilt = Xfilt[0].data
|
|
|
|
t = np.arange(0, Xraw[0].stats.npts / Xraw[0].stats.sampling_rate,
|
|
|
|
Xraw[0].stats.delta)
|
|
|
|
# get pick window
|
|
|
|
ipick = np.where(
|
|
|
|
(t <= min([Pick + pickwin, len(Xraw[0])])) & (t >= Pick))
|
|
|
|
#remove mean
|
|
|
|
xraw[ipick] = xraw[ipick] - np.mean(xraw[ipick])
|
|
|
|
xfilt[ipick] = xfilt[ipick] - np.mean(xfilt[ipick])
|
|
|
|
|
|
|
|
#get next zero crossing after most likely pick
|
|
|
|
#initial onset is assumed to be the first zero crossing
|
|
|
|
#first from unfiltered trace
|
|
|
|
zc1 = []
|
|
|
|
zc1.append(Pick)
|
|
|
|
index1 = []
|
|
|
|
i = 0
|
|
|
|
for j in range(ipick[0][1], ipick[0][len(t[ipick]) - 1]):
|
2015-06-19 15:28:53 +02:00
|
|
|
i = i + 1
|
2015-04-02 15:45:38 +02:00
|
|
|
if xraw[j - 1] <= 0 and xraw[j] >= 0:
|
|
|
|
zc1.append(t[ipick][i])
|
|
|
|
index1.append(i)
|
|
|
|
elif xraw[j - 1] > 0 and xraw[j] <= 0:
|
|
|
|
zc1.append(t[ipick][i])
|
|
|
|
index1.append(i)
|
|
|
|
if len(zc1) == 3:
|
|
|
|
break
|
|
|
|
|
|
|
|
#if time difference betweeen 1st and 2cnd zero crossing
|
|
|
|
#is too short, get time difference between 1st and 3rd
|
|
|
|
#to derive maximum
|
|
|
|
if zc1[1] - zc1[0] <= Xraw[0].stats.delta:
|
|
|
|
li1 = index1[1]
|
|
|
|
else:
|
|
|
|
li1 = index1[0]
|
|
|
|
if np.size(xraw[ipick[0][1]:ipick[0][li1]]) == 0:
|
|
|
|
print 'earllatepicker: Onset on unfiltered trace too emergent for first motion determination!'
|
|
|
|
P1 = None
|
|
|
|
else:
|
|
|
|
imax1 = np.argmax(abs(xraw[ipick[0][1]:ipick[0][li1]]))
|
|
|
|
islope1 = np.where((t >= Pick) & (t <= Pick + t[imax1]))
|
|
|
|
#calculate slope as polynomal fit of order 1
|
|
|
|
xslope1 = np.arange(0, len(xraw[islope1]), 1)
|
|
|
|
P1 = np.polyfit(xslope1, xraw[islope1], 1)
|
|
|
|
datafit1 = np.polyval(P1, xslope1)
|
|
|
|
|
|
|
|
#now using filterd trace
|
|
|
|
#next zero crossing after most likely pick
|
|
|
|
zc2 = []
|
|
|
|
zc2.append(Pick)
|
|
|
|
index2 = []
|
|
|
|
i = 0
|
|
|
|
for j in range(ipick[0][1], ipick[0][len(t[ipick]) - 1]):
|
2015-06-19 15:28:53 +02:00
|
|
|
i = i + 1
|
2015-04-02 15:45:38 +02:00
|
|
|
if xfilt[j - 1] <= 0 and xfilt[j] >= 0:
|
|
|
|
zc2.append(t[ipick][i])
|
|
|
|
index2.append(i)
|
|
|
|
elif xfilt[j - 1] > 0 and xfilt[j] <= 0:
|
|
|
|
zc2.append(t[ipick][i])
|
|
|
|
index2.append(i)
|
|
|
|
if len(zc2) == 3:
|
|
|
|
break
|
|
|
|
|
|
|
|
#if time difference betweeen 1st and 2cnd zero crossing
|
|
|
|
#is too short, get time difference between 1st and 3rd
|
|
|
|
#to derive maximum
|
|
|
|
if zc2[1] - zc2[0] <= Xfilt[0].stats.delta:
|
|
|
|
li2 = index2[1]
|
|
|
|
else:
|
|
|
|
li2 = index2[0]
|
|
|
|
if np.size(xfilt[ipick[0][1]:ipick[0][li2]]) == 0:
|
|
|
|
print 'earllatepicker: Onset on filtered trace too emergent for first motion determination!'
|
|
|
|
P2 = None
|
|
|
|
else:
|
|
|
|
imax2 = np.argmax(abs(xfilt[ipick[0][1]:ipick[0][li2]]))
|
|
|
|
islope2 = np.where((t >= Pick) & (t <= Pick + t[imax2]))
|
|
|
|
#calculate slope as polynomal fit of order 1
|
|
|
|
xslope2 = np.arange(0, len(xfilt[islope2]), 1)
|
|
|
|
P2 = np.polyfit(xslope2, xfilt[islope2], 1)
|
|
|
|
datafit2 = np.polyval(P2, xslope2)
|
|
|
|
|
|
|
|
#compare results
|
|
|
|
if P1 is not None and P2 is not None:
|
|
|
|
if P1[0] < 0 and P2[0] < 0:
|
|
|
|
FM = 'D'
|
|
|
|
elif P1[0] >= 0 and P2[0] < 0:
|
|
|
|
FM = '-'
|
|
|
|
elif P1[0] < 0 and P2[0] >= 0:
|
|
|
|
FM = '-'
|
|
|
|
elif P1[0] > 0 and P2[0] > 0:
|
|
|
|
FM = 'U'
|
|
|
|
elif P1[0] <= 0 and P2[0] > 0:
|
|
|
|
FM = '+'
|
|
|
|
elif P1[0] > 0 and P2[0] <= 0:
|
|
|
|
FM = '+'
|
2015-03-30 14:35:21 +02:00
|
|
|
|
2015-05-29 16:28:50 +02:00
|
|
|
if iplot > 1:
|
2015-04-02 15:45:38 +02:00
|
|
|
plt.figure(iplot)
|
|
|
|
plt.subplot(2, 1, 1)
|
|
|
|
plt.plot(t, xraw, 'k')
|
|
|
|
p1, = plt.plot([Pick, Pick], [max(xraw), -max(xraw)], 'b', linewidth=2)
|
|
|
|
if P1 is not None:
|
|
|
|
p2, = plt.plot(t[islope1], xraw[islope1])
|
|
|
|
p3, = plt.plot(zc1, np.zeros(len(zc1)), '*g', markersize=14)
|
|
|
|
p4, = plt.plot(t[islope1], datafit1, '--g', linewidth=2)
|
|
|
|
plt.legend([p1, p2, p3, p4],
|
|
|
|
['Pick', 'Slope Window', 'Zero Crossings', 'Slope'], \
|
|
|
|
loc='best')
|
|
|
|
plt.text(Pick + 0.02, max(xraw) / 2, '%s' % FM, fontsize=14)
|
|
|
|
ax = plt.gca()
|
|
|
|
ax.set_xlim(
|
|
|
|
[t[islope1[0][0]] - 0.1, t[islope1[0][len(islope1) - 1]] + 0.3])
|
|
|
|
plt.yticks([])
|
|
|
|
plt.title('First-Motion Determination, %s, Unfiltered Data' % Xraw[
|
|
|
|
0].stats.station)
|
|
|
|
|
|
|
|
plt.subplot(2, 1, 2)
|
|
|
|
plt.title('First-Motion Determination, Filtered Data')
|
|
|
|
plt.plot(t, xfilt, 'k')
|
|
|
|
p1, = plt.plot([Pick, Pick], [max(xfilt), -max(xfilt)], 'b',
|
|
|
|
linewidth=2)
|
|
|
|
if P2 is not None:
|
|
|
|
p2, = plt.plot(t[islope2], xfilt[islope2])
|
|
|
|
p3, = plt.plot(zc2, np.zeros(len(zc2)), '*g', markersize=14)
|
|
|
|
p4, = plt.plot(t[islope2], datafit2, '--g', linewidth=2)
|
|
|
|
plt.text(Pick + 0.02, max(xraw) / 2, '%s' % FM, fontsize=14)
|
|
|
|
ax = plt.gca()
|
|
|
|
ax.set_xlim(
|
|
|
|
[t[islope2[0][0]] - 0.1, t[islope2[0][len(islope2) - 1]] + 0.3])
|
|
|
|
plt.xlabel('Time [s] since %s' % Xraw[0].stats.starttime)
|
|
|
|
plt.yticks([])
|
|
|
|
plt.show()
|
|
|
|
raw_input()
|
|
|
|
plt.close(iplot)
|
2015-03-30 14:35:21 +02:00
|
|
|
|
|
|
|
return FM
|
|
|
|
|
2015-04-21 08:14:26 +02:00
|
|
|
def crossings_nonzero_all(data):
|
|
|
|
pos = data > 0
|
|
|
|
npos = ~pos
|
|
|
|
return ((pos[:-1] & npos[1:]) | (npos[:-1] & pos[1:])).nonzero()[0]
|
2015-03-30 14:35:21 +02:00
|
|
|
|
|
|
|
def getSNR(X, TSNR, t1):
|
|
|
|
'''
|
|
|
|
Function to calculate SNR of certain part of seismogram relative to
|
|
|
|
given time (onset) out of given noise and signal windows. A safety gap
|
2015-04-13 09:42:17 +02:00
|
|
|
between noise and signal part can be set. Returns SNR and SNR [dB] and
|
|
|
|
noiselevel.
|
2015-03-30 14:35:21 +02:00
|
|
|
|
|
|
|
:param: X, time series (seismogram)
|
|
|
|
:type: `~obspy.core.stream.Stream`
|
|
|
|
|
|
|
|
:param: TSNR, length of time windows [s] around t1 (onset) used to determine SNR
|
|
|
|
:type: tuple (T_noise, T_gap, T_signal)
|
|
|
|
|
|
|
|
:param: t1, initial time (onset) from which noise and signal windows are calculated
|
|
|
|
:type: float
|
|
|
|
'''
|
|
|
|
|
|
|
|
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
|
|
|
|
|
|
|
x = X[0].data
|
2015-04-02 15:45:38 +02:00
|
|
|
t = np.arange(0, X[0].stats.npts / X[0].stats.sampling_rate,
|
|
|
|
X[0].stats.delta)
|
|
|
|
|
|
|
|
# get noise window
|
2015-03-30 16:22:20 +02:00
|
|
|
inoise = getnoisewin(t, t1, TSNR[0], TSNR[1])
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 14:35:21 +02:00
|
|
|
#get signal window
|
2015-03-30 16:22:20 +02:00
|
|
|
isignal = getsignalwin(t, t1, TSNR[2])
|
2015-03-30 14:35:21 +02:00
|
|
|
if np.size(inoise) < 1:
|
2015-04-02 15:45:38 +02:00
|
|
|
print 'getSNR: Empty array inoise, check noise window!'
|
|
|
|
return
|
2015-03-30 14:35:21 +02:00
|
|
|
elif np.size(isignal) < 1:
|
2015-04-02 15:45:38 +02:00
|
|
|
print 'getSNR: Empty array isignal, check signal window!'
|
|
|
|
return
|
2015-03-30 14:35:21 +02:00
|
|
|
|
|
|
|
#calculate ratios
|
2015-04-13 09:42:17 +02:00
|
|
|
noiselevel = np.sqrt(np.mean(np.square(x[inoise])))
|
|
|
|
signallevel = np.sqrt(np.mean(np.square(x[isignal])))
|
|
|
|
SNR = signallevel / noiselevel
|
|
|
|
SNRdB = 10 * np.log10(SNR)
|
2015-03-30 14:35:21 +02:00
|
|
|
|
2015-04-02 15:45:38 +02:00
|
|
|
return SNR, SNRdB, noiselevel
|
2015-03-30 14:35:21 +02:00
|
|
|
|
2015-03-30 16:22:20 +02:00
|
|
|
|
|
|
|
def getnoisewin(t, t1, tnoise, tgap):
|
|
|
|
'''
|
|
|
|
Function to extract indeces of data out of time series for noise calculation.
|
|
|
|
Returns an array of indeces.
|
|
|
|
|
|
|
|
:param: t, array of time stamps
|
|
|
|
:type: numpy array
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 16:22:20 +02:00
|
|
|
:param: t1, time from which relativ to it noise window is extracted
|
|
|
|
:type: float
|
|
|
|
|
|
|
|
:param: tnoise, length of time window [s] for noise part extraction
|
|
|
|
:type: float
|
|
|
|
|
|
|
|
:param: tgap, safety gap between t1 (onset) and noise window to
|
|
|
|
ensure, that noise window contains no signal
|
2015-04-02 15:45:38 +02:00
|
|
|
:type: float
|
2015-03-30 16:22:20 +02:00
|
|
|
'''
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 16:22:20 +02:00
|
|
|
inoise = None
|
2015-04-02 15:45:38 +02:00
|
|
|
# get noise window
|
2015-03-30 16:22:20 +02:00
|
|
|
inoise = np.where((t <= max([t1 - tgap, 0])) \
|
2015-04-02 15:45:38 +02:00
|
|
|
& (t >= max([t1 - tnoise - tgap, 0])))
|
2015-03-30 16:22:20 +02:00
|
|
|
if np.size(inoise) < 1:
|
2015-04-02 15:45:38 +02:00
|
|
|
print 'getnoisewin: Empty array inoise, check noise window!'
|
|
|
|
|
2015-03-30 16:22:20 +02:00
|
|
|
return inoise
|
|
|
|
|
|
|
|
|
|
|
|
def getsignalwin(t, t1, tsignal):
|
|
|
|
'''
|
|
|
|
Function to extract data out of time series for signal level calculation.
|
|
|
|
Returns an array of indeces.
|
|
|
|
|
|
|
|
:param: t, array of time stamps
|
|
|
|
:type: numpy array
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 16:22:20 +02:00
|
|
|
:param: t1, time from which relativ to it signal window is extracted
|
|
|
|
:type: float
|
|
|
|
|
|
|
|
:param: tsignal, length of time window [s] for signal level calculation
|
|
|
|
:type: float
|
|
|
|
'''
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-03-30 16:22:20 +02:00
|
|
|
inoise = None
|
2015-04-02 15:45:38 +02:00
|
|
|
# get signal window
|
2015-03-30 16:22:20 +02:00
|
|
|
isignal = np.where((t <= min([t1 + tsignal, len(t)])) \
|
2015-04-02 15:45:38 +02:00
|
|
|
& (t >= t1))
|
2015-03-30 16:22:20 +02:00
|
|
|
if np.size(isignal) < 1:
|
2015-04-02 15:45:38 +02:00
|
|
|
print 'getsignalwin: Empty array isignal, check signal window!'
|
|
|
|
|
2015-03-30 16:22:20 +02:00
|
|
|
return isignal
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-06-19 15:28:53 +02:00
|
|
|
def wadaticheck(pickdic, dttolerance, iplot):
|
|
|
|
'''
|
|
|
|
Function to calculate Wadati-diagram from given P and S onsets in order
|
|
|
|
to detect S pick outliers. If a certain S-P time deviates from regression
|
|
|
|
of S-P time the S pick is marked and down graded.
|
|
|
|
|
|
|
|
: param: pickdic, dictionary containing picks and quality parameters
|
|
|
|
: type: dictionary
|
|
|
|
|
|
|
|
: param: dttolerance, maximum adjusted deviation of S-P time from
|
|
|
|
S-P time regression
|
|
|
|
: type: float
|
|
|
|
|
|
|
|
: param: iplot, if iplot > 1, Wadati diagram is shown
|
|
|
|
: type: int
|
|
|
|
'''
|
|
|
|
|
|
|
|
checkedonsets = pickdic
|
|
|
|
|
|
|
|
# search for good quality picks and calculate S-P time
|
|
|
|
Ppicks = []
|
|
|
|
Spicks = []
|
|
|
|
SPtimes = []
|
|
|
|
vpvs = []
|
|
|
|
for key in pickdic:
|
|
|
|
if pickdic[key]['P']['weight'] < 4 and pickdic[key]['S']['weight'] < 4:
|
|
|
|
# calculate S-P time
|
|
|
|
spt = UTCDateTime(pickdic[key]['S']['mpp']) - UTCDateTime(pickdic[key]['P']['mpp'])
|
|
|
|
# add S-P time to dictionary
|
|
|
|
pickdic[key]['SPt'] = spt
|
|
|
|
# add P onsets and corresponding S-P times to list
|
|
|
|
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp']) - UTCDateTime(1970,1,1,0,0,0)
|
|
|
|
UTCSpick = UTCDateTime(pickdic[key]['S']['mpp']) - UTCDateTime(1970,1,1,0,0,0)
|
|
|
|
Ppicks.append(UTCPpick)
|
|
|
|
Spicks.append(UTCSpick)
|
|
|
|
SPtimes.append(spt)
|
|
|
|
vpvs.append(UTCPpick/UTCSpick)
|
|
|
|
|
|
|
|
# calculate average vp/vs ratio before check
|
|
|
|
vpvsr = np.mean(vpvs)
|
|
|
|
print 'wadaticheck: Average Vp/Vs ratio before check:', vpvsr
|
|
|
|
|
|
|
|
if len(SPtimes) >= 3:
|
|
|
|
# calculate slope
|
|
|
|
p1 = np.polyfit(Ppicks, SPtimes, 1)
|
|
|
|
wdfit = np.polyval(p1, Ppicks)
|
|
|
|
wfitflag = 0
|
|
|
|
|
|
|
|
checkedPpicks = []
|
|
|
|
checkedSpicks = []
|
|
|
|
checkedSPtimes = []
|
|
|
|
checkedvpvs = []
|
|
|
|
# calculate deviations from Wadati regression
|
|
|
|
for key in pickdic:
|
|
|
|
if pickdic[key].has_key('SPt'):
|
|
|
|
ii = 0
|
|
|
|
wddiff = abs(pickdic[key]['SPt'] - wdfit[ii])
|
|
|
|
ii += 1
|
|
|
|
# check, if deviation is larger than adjusted
|
|
|
|
if wddiff >= dttolerance:
|
|
|
|
# mark onset and downgrade S-weight to 4
|
|
|
|
# (not used anymore)
|
|
|
|
marker = 'badWadatiCheck'
|
|
|
|
pickdic[key]['S']['weight'] = 4
|
|
|
|
else:
|
|
|
|
marker = 'goodWadatiCheck'
|
|
|
|
checkedPpick = UTCDateTime(pickdic[key]['P']['mpp']) - \
|
|
|
|
UTCDateTime(1970,1,1,0,0,0)
|
|
|
|
checkedPpicks.append(checkedPpick)
|
|
|
|
checkedSpick = UTCDateTime(pickdic[key]['S']['mpp']) - \
|
|
|
|
UTCDateTime(1970,1,1,0,0,0)
|
|
|
|
checkedSpicks.append(checkedSpick)
|
|
|
|
checkedSPtime = UTCDateTime(pickdic[key]['S']['mpp']) - \
|
|
|
|
UTCDateTime(pickdic[key]['P']['mpp'])
|
|
|
|
checkedSPtimes.append(checkedSPtime)
|
|
|
|
checkedvpvs.append(checkedPpick/checkedSpick)
|
|
|
|
|
|
|
|
pickdic[key]['S']['marked'] = marker
|
|
|
|
|
|
|
|
# calculate average vp/vs ratio after check
|
|
|
|
cvpvsr = np.mean(checkedvpvs)
|
|
|
|
print 'wadaticheck: Average Vp/Vs ratio after check:', cvpvsr
|
|
|
|
|
|
|
|
# calculate new slope
|
|
|
|
p2 = np.polyfit(checkedPpicks, checkedSPtimes, 1)
|
|
|
|
wdfit2 = np.polyval(p2, checkedPpicks)
|
|
|
|
|
|
|
|
checkedonsets = pickdic
|
|
|
|
|
|
|
|
else:
|
|
|
|
print 'wadaticheck: Not enough S-P times available for reliable regression!'
|
|
|
|
print 'Skip wadati check!'
|
|
|
|
wfitflag = 1
|
|
|
|
|
|
|
|
# plot results
|
|
|
|
iplot = 2
|
|
|
|
if iplot > 1:
|
|
|
|
f = plt.figure(iplot)
|
|
|
|
f1, = plt.plot(Ppicks, SPtimes, 'ro')
|
|
|
|
if wfitflag == 0:
|
|
|
|
f2, = plt.plot(Ppicks, wdfit, 'k')
|
|
|
|
f3, = plt.plot(checkedPpicks, checkedSPtimes, 'ko')
|
|
|
|
f4, = plt.plot(checkedPpicks, wdfit2, 'g')
|
|
|
|
plt.ylabel('S-P Times [s]')
|
|
|
|
plt.xlabel('P Times [s]')
|
|
|
|
plt.title('Wadati-Diagram, %d S-P Times, Vp/Vs(old)=%5.2f, Vp/Vs(checked)=%5.2f' \
|
|
|
|
% (len(SPtimes), vpvsr, cvpvsr))
|
|
|
|
plt.legend([f1, f2, f3, f4], ['Skipped S-Picks', 'Wadati 1', 'Reliable S-Picks', \
|
|
|
|
'Wadati 2'], loc='best')
|
|
|
|
plt.show()
|
|
|
|
raw_input()
|
|
|
|
plt.close(f)
|
2015-04-02 15:45:38 +02:00
|
|
|
|
2015-06-19 15:28:53 +02:00
|
|
|
return checkedonsets
|