just cleaning up the code to meet coding conventions

This commit is contained in:
Sebastian Wehling-Benatelli 2015-06-12 09:02:00 +02:00
parent c5da8fd994
commit c5ce958a41
2 changed files with 643 additions and 518 deletions

View File

@ -3,58 +3,65 @@
Created Dec 2014 to Feb 2015
Implementation of the automated picking algorithms published and described in:
Kueperkoch, L., Meier, T., Lee, J., Friederich, W., & Egelados Working Group, 2010:
Automated determination of P-phase arrival times at regional and local distances
using higher order statistics, Geophys. J. Int., 181, 1159-1170
Kueperkoch, L., Meier, T., Lee, J., Friederich, W., & Egelados Working Group,
2010: Automated determination of P-phase arrival times at regional and local
distances using higher order statistics, Geophys. J. Int., 181, 1159-1170
Kueperkoch, L., Meier, T., Bruestle, A., Lee, J., Friederich, W., & Egelados
Working Group, 2012: Automated determination of S-phase arrival times using
autoregressive prediction: application ot local and regional distances, Geophys. J. Int.,
188, 687-702.
autoregressive prediction: application ot local and regional distances,
Geophys. J. Int., 188, 687-702.
The picks with the above described algorithms are assumed to be the most likely picks.
For each most likely pick the corresponding earliest and latest possible picks are
calculated after Diehl & Kissling (2009).
The picks with the above described algorithms are assumed to be the most likely
picks. For each most likely pick the corresponding earliest and latest possible
picks are calculated after Diehl & Kissling (2009).
:author: MAGS2 EP3 working group / Ludger Kueperkoch
:author: MAGS2 EP3 working group / Ludger Kueperkoch
"""
import numpy as np
import matplotlib.pyplot as plt
from pylot.core.pick.utils import *
from pylot.core.pick.CharFuns import CharacteristicFunction
class AutoPicking(object):
'''
Superclass of different, automated picking algorithms applied on a CF determined
using AIC, HOS, or AR prediction.
'''
def __init__(self, cf, TSNR, PickWindow, iplot=None, aus=None, Tsmooth=None, Pick1=None):
Superclass of different, automated picking algorithms applied on a CF
determined using AIC, HOS, or AR prediction.
'''
def __init__(self, cf, TSNR, PickWindow, iplot=None, aus=None, Tsmooth=None,
Pick1=None):
'''
:param: cf, characteristic function, on which the picking algorithm is applied
:type: `~pylot.core.pick.CharFuns.CharacteristicFunction` object
:param cf: characteristic function, on which the picking algorithm is
applied
:type cf: `~pylot.core.pick.CharFuns.CharacteristicFunction` object
:param: TSNR, length of time windows around pick used to determine SNR [s]
:type: tuple (T_noise, T_gap, T_signal)
:param TSNR: length of time windows for SNR determination - [s]
:type TSNR: tuple (T_noise, T_gap, T_signal)
:param: PickWindow, length of pick window [s]
:type: float
:param PickWindow: length of pick window - [s]
:type PickWindow: float
:param: iplot, no. of figure window for plotting interims results
:type: integer
:param iplot: no. of figure window for plotting interims results
:type iplot: integer
:param: aus ("artificial uplift of samples"), find local minimum at i if aic(i-1)*(1+aus) >= aic(i)
:type: float
:param aus: aus ("artificial uplift of samples"), find local minimum at
i if aic(i-1)*(1+aus) >= aic(i)
:type aus: float
:param: Tsmooth, length of moving smoothing window to calculate smoothed CF [s]
:type: float
:param Tsmooth: length of moving window to calculate smoothed CF - [s]
:type Tsmooth: float
:param: Pick1, initial (prelimenary) onset time, starting point for PragPicker and
EarlLatePicker
:type: float
:param Pick1: initial (prelimenary) onset time, starting point for
PragPicker and EarlLatePicker
:type Pick1: float
'''
assert isinstance(cf, CharacteristicFunction), "%s is not a CharacteristicFunction object" % str(cf)
assert isinstance(cf,
CharacteristicFunction), "%s is of wrong type" % str(
cf)
self.cf = cf.getCF()
self.Tcf = cf.getTimeArray()
@ -80,9 +87,8 @@ class AutoPicking(object):
PickWindow=self.getPickWindow(),
aus=self.getaus(),
Tsmooth=self.getTsmooth(),
Pick1=self.getpick1())
Pick1=self.getpick1())
def getTSNR(self):
return self.TSNR
@ -112,7 +118,7 @@ class AutoPicking(object):
def getSNR(self):
return self.SNR
def getSlope(self):
return self.slope
@ -136,144 +142,156 @@ class AICPicker(AutoPicking):
'''
Method to derive the onset time of an arriving phase based on CF
derived from AIC. In order to get an impression of the quality of this inital pick,
a quality assessment is applied based on SNR and slope determination derived from the CF,
a quality assessment is applied based on SNR and slope determination derived from the CF,
from which the AIC has been calculated.
'''
def calcPick(self):
print 'AICPicker: Get initial onset time (pick) from AIC-CF ...'
self.Pick = None
self.slope = None
self.SNR = None
#find NaN's
# find NaN's
nn = np.isnan(self.cf)
if len(nn) > 1:
self.cf[nn] = 0
#taper AIC-CF to get rid off side maxima
self.cf[nn] = 0
# taper AIC-CF to get rid off side maxima
tap = np.hanning(len(self.cf))
aic = tap * self.cf + max(abs(self.cf))
#smooth AIC-CF
# smooth AIC-CF
ismooth = int(round(self.Tsmooth / self.dt))
aicsmooth = np.zeros(len(aic))
if len(aic) < ismooth:
print 'AICPicker: Tsmooth larger than CF!'
return
print 'AICPicker: Tsmooth larger than CF!'
return
else:
for i in range(1, len(aic)):
if i > ismooth:
ii1 = i - ismooth
aicsmooth[i] = aicsmooth[i - 1] + (aic[i] - aic[ii1]) / ismooth
else:
aicsmooth[i] = np.mean(aic[1 : i])
#remove offset
for i in range(1, len(aic)):
if i > ismooth:
ii1 = i - ismooth
aicsmooth[i] = aicsmooth[i - 1] + (aic[i] - aic[
ii1]) / ismooth
else:
aicsmooth[i] = np.mean(aic[1: i])
# remove offset
offset = abs(min(aic) - min(aicsmooth))
aicsmooth = aicsmooth - offset
#get maximum of 1st derivative of AIC-CF (more stable!) as starting point
# get maximum of 1st derivative of AIC-CF (more stable!) as starting
# point
diffcf = np.diff(aicsmooth)
#find NaN's
# find NaN's
nn = np.isnan(diffcf)
if len(nn) > 1:
diffcf[nn] = 0
#taper CF to get rid off side maxima
diffcf[nn] = 0
# taper CF to get rid off side maxima
tap = np.hanning(len(diffcf))
diffcf = tap * diffcf * max(abs(aicsmooth))
icfmax = np.argmax(diffcf)
#find minimum in AIC-CF front of maximum
lpickwindow = int(round(self.PickWindow / self.dt))
for i in range(icfmax - 1, max([icfmax - lpickwindow, 2]), -1):
if aicsmooth[i - 1] >= aicsmooth[i]:
self.Pick = self.Tcf[i]
break
#if no minimum could be found:
#search in 1st derivative of AIC-CF
if self.Pick is None:
for i in range(icfmax -1, max([icfmax -lpickwindow, 2]), -1):
if diffcf[i -1] >= diffcf[i]:
self.Pick = self.Tcf[i]
break
#quality assessment using SNR and slope from CF
if self.Pick is not None:
#get noise window
inoise = getnoisewin(self.Tcf, self.Pick, self.TSNR[0], self.TSNR[1])
#check, if these are counts or m/s, important for slope estimation!
#this is quick and dirty, better solution?
if max(self.Data[0].data < 1e-3):
self.Data[0].data = self.Data[0].data * 1000000
#get signal window
isignal = getsignalwin(self.Tcf, self.Pick, self.TSNR[2])
#calculate SNR from CF
self.SNR = max(abs(aic[isignal] - np.mean(aic[isignal]))) / max(abs(aic[inoise] \
- np.mean(aic[inoise])))
#calculate slope from CF after initial pick
#get slope window
tslope = self.TSNR[3] #slope determination window
islope = np.where((self.Tcf <= min([self.Pick + tslope, len(self.Data[0].data)])) \
& (self.Tcf >= self.Pick))
#find maximum within slope determination window
#'cause slope should be calculated up to first local minimum only!
imax = np.argmax(self.Data[0].data[islope])
if imax == 0:
print 'AICPicker: Maximum for slope determination right at the beginning of the window!'
print 'Choose longer slope determination window!'
return
islope = islope[0][0 :imax]
dataslope = self.Data[0].data[islope]
#calculate slope as polynomal fit of order 1
xslope = np.arange(0, len(dataslope), 1)
P = np.polyfit(xslope, dataslope, 1)
datafit = np.polyval(P, xslope)
if datafit[0] >= datafit[len(datafit) - 1]:
print 'AICPicker: Negative slope, bad onset skipped!'
return
self.slope = 1 / tslope * datafit[len(dataslope) - 1] - datafit[0]
# find minimum in AIC-CF front of maximum
lpickwindow = int(round(self.PickWindow / self.dt))
for i in range(icfmax - 1, max([icfmax - lpickwindow, 2]), -1):
if aicsmooth[i - 1] >= aicsmooth[i]:
self.Pick = self.Tcf[i]
break
# if no minimum could be found:
# search in 1st derivative of AIC-CF
if self.Pick is None:
for i in range(icfmax - 1, max([icfmax - lpickwindow, 2]), -1):
if diffcf[i - 1] >= diffcf[i]:
self.Pick = self.Tcf[i]
break
# quality assessment using SNR and slope from CF
if self.Pick is not None:
# get noise window
inoise = getnoisewin(self.Tcf, self.Pick, self.TSNR[0],
self.TSNR[1])
# check, if these are counts or m/s, important for slope estimation!
# this is quick and dirty, better solution?
if max(self.Data[0].data < 1e-3):
self.Data[0].data *= 1000000
# get signal window
isignal = getsignalwin(self.Tcf, self.Pick, self.TSNR[2])
# calculate SNR from CF
self.SNR = max(abs(aic[isignal] - np.mean(aic[isignal]))) / \
max(abs(aic[inoise] - np.mean(aic[inoise])))
# calculate slope from CF after initial pick
# get slope window
tslope = self.TSNR[3] # slope determination window
islope = np.where(
(self.Tcf <= min([self.Pick + tslope, len(self.Data[0].data)]))
and (self.Tcf >= self.Pick))
# find maximum within slope determination window
# 'cause slope should be calculated up to first local minimum only!
imax = np.argmax(self.Data[0].data[islope])
if imax == 0:
print 'AICPicker: Maximum for slope determination right at ' \
'the beginning of the window!'
print 'Choose longer slope determination window!'
return
islope = islope[0][0:imax]
dataslope = self.Data[0].data[islope]
# calculate slope as polynomal fit of order 1
xslope = np.arange(0, len(dataslope), 1)
P = np.polyfit(xslope, dataslope, 1)
datafit = np.polyval(P, xslope)
if datafit[0] >= datafit[len(datafit) - 1]:
print 'AICPicker: Negative slope, bad onset skipped!'
return
self.slope = 1 / tslope * datafit[len(dataslope) - 1] - datafit[0]
else:
self.SNR = None
self.slope = None
self.SNR = None
self.slope = None
if self.iplot > 1:
p = plt.figure(self.iplot)
x = self.Data[0].data
p1, = plt.plot(self.Tcf, x / max(x), 'k')
p2, = plt.plot(self.Tcf, aicsmooth / max(aicsmooth), 'r')
if self.Pick is not None:
p3, = plt.plot([self.Pick, self.Pick], [-0.1 , 0.5], 'b', linewidth=2)
plt.legend([p1, p2, p3], ['(HOS-/AR-) Data', 'Smoothed AIC-CF', 'AIC-Pick'])
else:
plt.legend([p1, p2], ['(HOS-/AR-) Data', 'Smoothed AIC-CF'])
plt.xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
plt.yticks([])
plt.title(self.Data[0].stats.station)
p = plt.figure(self.iplot)
x = self.Data[0].data
p1, = plt.plot(self.Tcf, x / max(x), 'k')
p2, = plt.plot(self.Tcf, aicsmooth / max(aicsmooth), 'r')
if self.Pick is not None:
p3, = plt.plot([self.Pick, self.Pick], [-0.1, 0.5], 'b',
linewidth=2)
plt.legend([p1, p2, p3],
['(HOS-/AR-) Data', 'Smoothed AIC-CF', 'AIC-Pick'])
else:
plt.legend([p1, p2], ['(HOS-/AR-) Data', 'Smoothed AIC-CF'])
plt.xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
plt.yticks([])
plt.title(self.Data[0].stats.station)
if self.Pick is not None:
plt.figure(self.iplot + 1)
p11, = plt.plot(self.Tcf, x, 'k')
p12, = plt.plot(self.Tcf[inoise], self.Data[0].data[inoise])
p13, = plt.plot(self.Tcf[isignal], self.Data[0].data[isignal], 'r')
p14, = plt.plot(self.Tcf[islope], dataslope, 'g--')
p15, = plt.plot(self.Tcf[islope], datafit, 'g', linewidth=2)
plt.legend([p11, p12, p13, p14, p15], ['Data', 'Noise Window', 'Signal Window', 'Slope Window', 'Slope'], \
loc='best')
plt.title('Station %s, SNR=%7.2f, Slope= %12.2f counts/s' % (self.Data[0].stats.station, \
self.SNR, self.slope))
plt.xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
plt.ylabel('Counts')
ax = plt.gca()
plt.yticks([])
ax.set_xlim([self.Tcf[inoise[0][0]] - 5, self.Tcf[isignal[0][len(isignal) - 1]] + 5])
if self.Pick is not None:
plt.figure(self.iplot + 1)
p11, = plt.plot(self.Tcf, x, 'k')
p12, = plt.plot(self.Tcf[inoise], self.Data[0].data[inoise])
p13, = plt.plot(self.Tcf[isignal], self.Data[0].data[isignal],
'r')
p14, = plt.plot(self.Tcf[islope], dataslope, 'g--')
p15, = plt.plot(self.Tcf[islope], datafit, 'g', linewidth=2)
plt.legend([p11, p12, p13, p14, p15],
['Data', 'Noise Window', 'Signal Window',
'Slope Window', 'Slope'],
loc='best')
plt.title('Station %s, SNR=%7.2f, Slope= %12.2f counts/s' % (
self.Data[0].stats.station,
self.SNR, self.slope))
plt.xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
plt.ylabel('Counts')
ax = plt.gca()
plt.yticks([])
ax.set_xlim([self.Tcf[inoise[0][0]] - 5,
self.Tcf[isignal[0][len(isignal) - 1]] + 5])
plt.show()
raw_input()
plt.close(p)
plt.show()
raw_input()
plt.close(p)
if self.Pick is None:
print 'AICPicker: Could not find minimum, picking window too short?'
if self.Pick == None:
print 'AICPicker: Could not find minimum, picking window too short?'
class PragPicker(AutoPicking):
'''
@ -283,90 +301,95 @@ class PragPicker(AutoPicking):
def calcPick(self):
if self.getpick1() is not None:
print 'PragPicker: Get most likely pick from HOS- or AR-CF using pragmatic picking algorithm ...'
print 'PragPicker: Get most likely pick from HOS- or AR-CF using ' \
'pragmatic picking algorithm ...'
self.Pick = None
self.SNR = None
self.slope = None
#smooth CF
ismooth = int(round(self.Tsmooth / self.dt))
cfsmooth = np.zeros(len(self.cf))
if len(self.cf) < ismooth:
print 'PragPicker: Tsmooth larger than CF!'
return
else:
for i in range(1, len(self.cf)):
if i > ismooth:
ii1 = i - ismooth;
cfsmooth[i] = cfsmooth[i - 1] + (self.cf[i] - self.cf[ii1]) / ismooth
else:
cfsmooth[i] = np.mean(self.cf[1 : i])
self.Pick = None
self.SNR = None
self.slope = None
# smooth CF
ismooth = int(round(self.Tsmooth / self.dt))
cfsmooth = np.zeros(len(self.cf))
if len(self.cf) < ismooth:
print 'PragPicker: Tsmooth larger than CF!'
return
else:
for i in range(1, len(self.cf)):
if i > ismooth:
ii1 = i - ismooth
cfsmooth[i] = cfsmooth[i - 1] + (self.cf[i] - self.cf[
ii1]) / ismooth
else:
cfsmooth[i] = np.mean(self.cf[1: i])
#select picking window
#which is centered around tpick1
ipick = np.where((self.Tcf >= self.getpick1() - self.PickWindow / 2) \
& (self.Tcf <= self.getpick1() + self.PickWindow / 2))
cfipick = self.cf[ipick] - np.mean(self.cf[ipick])
Tcfpick = self.Tcf[ipick]
cfsmoothipick = cfsmooth[ipick]- np.mean(self.cf[ipick])
ipick1 = np.argmin(abs(self.Tcf - self.getpick1()))
cfpick1 = 2 * self.cf[ipick1]
# select picking window
# which is centered around tpick1
ipick = np.where((self.Tcf >=
(self.getpick1() - self.PickWindow / 2)) and
(self.Tcf <=
(self.getpick1() + self.PickWindow / 2)))
cfipick = self.cf[ipick] - np.mean(self.cf[ipick])
Tcfpick = self.Tcf[ipick]
cfsmoothipick = cfsmooth[ipick] - np.mean(self.cf[ipick])
ipick1 = np.argmin(abs(self.Tcf - self.getpick1()))
cfpick1 = 2 * self.cf[ipick1]
#check trend of CF, i.e. differences of CF and adjust aus regarding this trend
#prominent trend: decrease aus
#flat: use given aus
cfdiff = np.diff(cfipick);
i0diff = np.where(cfdiff > 0)
cfdiff = cfdiff[i0diff]
minaus = min(cfdiff * (1 + self.aus));
aus1 = max([minaus, self.aus]);
# check trend of CF, i.e. differences of CF and adjust aus regarding this trend
# prominent trend: decrease aus
# flat: use given aus
cfdiff = np.diff(cfipick)
i0diff = np.where(cfdiff > 0)
cfdiff = cfdiff[i0diff]
minaus = min(cfdiff * (1 + self.aus))
aus1 = max([minaus, self.aus])
#at first we look to the right until the end of the pick window is reached
flagpick_r = 0
flagpick_l = 0
flagpick = 0
lpickwindow = int(round(self.PickWindow / self.dt))
for i in range(max(np.insert(ipick, 0, 2)), min([ipick1 + lpickwindow + 1, len(self.cf) - 1])):
if self.cf[i + 1] > self.cf[i] and self.cf[i - 1] >= self.cf[i]:
if cfsmooth[i - 1] * (1 + aus1) >= cfsmooth[i]:
if cfpick1 >= self.cf[i]:
pick_r = self.Tcf[i]
self.Pick = pick_r
flagpick_l = 1
cfpick_r = self.cf[i]
break
# at first we look to the right until the end of the pick window is reached
flagpick_r = 0
flagpick_l = 0
flagpick = 0
lpickwindow = int(round(self.PickWindow / self.dt))
for i in range(max(np.insert(ipick, 0, 2)),
min([ipick1 + lpickwindow + 1, len(self.cf) - 1])):
if self.cf[i + 1] > self.cf[i] and self.cf[i - 1] >= self.cf[i]:
if cfsmooth[i - 1] * (1 + aus1) >= cfsmooth[i]:
if cfpick1 >= self.cf[i]:
pick_r = self.Tcf[i]
self.Pick = pick_r
flagpick_l = 1
cfpick_r = self.cf[i]
break
#now we look to the left
for i in range(ipick1, max([ipick1 - lpickwindow + 1, 2]), -1):
if self.cf[i + 1] > self.cf[i] and self.cf[i - 1] >= self.cf[i]:
if cfsmooth[i - 1] * (1 + aus1) >= cfsmooth[i]:
if cfpick1 >= self.cf[i]:
pick_l = self.Tcf[i]
self.Pick = pick_l
flagpick_r = 1
cfpick_l = self.cf[i]
break
# now we look to the left
for i in range(ipick1, max([ipick1 - lpickwindow + 1, 2]), -1):
if self.cf[i + 1] > self.cf[i] and self.cf[i - 1] >= self.cf[i]:
if cfsmooth[i - 1] * (1 + aus1) >= cfsmooth[i]:
if cfpick1 >= self.cf[i]:
pick_l = self.Tcf[i]
self.Pick = pick_l
flagpick_r = 1
cfpick_l = self.cf[i]
break
#now decide which pick: left or right?
if flagpick_l > 0 and flagpick_r > 0 and cfpick_l <= cfpick_r:
self.Pick = pick_l
elif flagpick_l > 0 and flagpick_r > 0 and cfpick_l >= cfpick_r:
self.Pick = pick_r
# now decide which pick: left or right?
if flagpick_l > 0 and flagpick_r > 0 and cfpick_l <= cfpick_r:
self.Pick = pick_l
elif flagpick_l > 0 and flagpick_r > 0 and cfpick_l >= cfpick_r:
self.Pick = pick_r
if self.getiplot() > 1:
p = plt.figure(self.getiplot())
p1, = plt.plot(Tcfpick,cfipick, 'k')
p2, = plt.plot(Tcfpick,cfsmoothipick, 'r')
p3, = plt.plot([self.Pick, self.Pick], [min(cfipick), max(cfipick)], 'b', linewidth=2)
plt.legend([p1, p2, p3], ['CF', 'Smoothed CF', 'Pick'])
plt.xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
plt.yticks([])
plt.title(self.Data[0].stats.station)
plt.show()
raw_input()
plt.close(p)
if self.getiplot() > 1:
p = plt.figure(self.getiplot())
p1, = plt.plot(Tcfpick, cfipick, 'k')
p2, = plt.plot(Tcfpick, cfsmoothipick, 'r')
p3, = plt.plot([self.Pick, self.Pick],
[min(cfipick), max(cfipick)], 'b', linewidth=2)
plt.legend([p1, p2, p3], ['CF', 'Smoothed CF', 'Pick'])
plt.xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
plt.yticks([])
plt.title(self.Data[0].stats.station)
plt.show()
raw_input()
plt.close(p)
else:
self.Pick = None
print 'PragPicker: No initial onset time given! Check input!'
return
else:
self.Pick = None
print 'PragPicker: No initial onset time given! Check input!'

View File

@ -3,43 +3,38 @@
"""
Function to run automated picking algorithms using AIC,
HOS and AR prediction. Uses object CharFuns and Picker and
HOS and AR prediction. Uses object CharFuns and Picker and
function conglomerate utils.
:author: MAGS2 EP3 working group / Ludger Kueperkoch
"""
from obspy.core import read
import matplotlib.pyplot as plt
import numpy as np
from pylot.core.pick.CharFuns import *
from pylot.core.pick.Picker import *
from pylot.core.pick.CharFuns import *
from pylot.core.pick import utils
def run_autopicking(wfstream, pickparam):
'''
"""
param: wfstream
:type: `~obspy.core.stream.Stream`
param: pickparam
:type: container of picking parameters from input file,
usually autoPyLoT.in
'''
"""
# declaring pickparam variables (only for convenience)
# read your autoPyLoT.in for details!
#special parameters for P picking
algoP = pickparam.getParam('algoP')
# special parameters for P picking
algoP = pickparam.getParam('algoP')
iplot = pickparam.getParam('iplot')
pstart = pickparam.getParam('pstart')
pstop = pickparam.getParam('pstop')
thosmw = pickparam.getParam('tlta')
hosorder = pickparam.getParam('hosorder')
tsnrz = pickparam.getParam('tsnrz')
tsnrz = pickparam.getParam('tsnrz')
hosorder = pickparam.getParam('hosorder')
bpz1 = pickparam.getParam('bpz1')
bpz2 = pickparam.getParam('bpz2')
@ -55,13 +50,13 @@ def run_autopicking(wfstream, pickparam):
minAICPslope = pickparam.getParam('minAICPslope')
minAICPSNR = pickparam.getParam('minAICPSNR')
timeerrorsP = pickparam.getParam('timeerrorsP')
#special parameters for S picking
algoS = pickparam.getParam('algoS')
# special parameters for S picking
algoS = pickparam.getParam('algoS')
sstart = pickparam.getParam('sstart')
sstop = pickparam.getParam('sstop')
bph1 = pickparam.getParam('bph1')
bph2 = pickparam.getParam('bph2')
tsnrh = pickparam.getParam('tsnrh')
tsnrh = pickparam.getParam('tsnrh')
pickwinS = pickparam.getParam('pickwinS')
tpred1h = pickparam.getParam('tpred1h')
tdet1h = pickparam.getParam('tdet1h')
@ -76,7 +71,7 @@ def run_autopicking(wfstream, pickparam):
Srecalcwin = pickparam.getParam('Srecalcwin')
nfacS = pickparam.getParam('nfacS')
timeerrorsS = pickparam.getParam('timeerrorsS')
#parameters for first-motion determination
# parameters for first-motion determination
minFMSNR = pickparam.getParam('minFMSNR')
fmpickwin = pickparam.getParam('fmpickwin')
minfmweight = pickparam.getParam('minfmweight')
@ -84,164 +79,192 @@ def run_autopicking(wfstream, pickparam):
# split components
zdat = wfstream.select(component="Z")
edat = wfstream.select(component="E")
if len(edat) == 0: #check for other components
if len(edat) == 0: # check for other components
edat = wfstream.select(component="2")
ndat = wfstream.select(component="N")
if len(ndat) == 0: #check for other components
if len(ndat) == 0: # check for other components
ndat = wfstream.select(component="1")
if algoP == 'HOS' or algoP == 'ARZ' and zdat is not None:
print '##########################################'
print 'run_autopicking: Working on P onset of station %s' % zdat[0].stats.station
print 'run_autopicking: Working on P onset of station %s' % zdat[
0].stats.station
print 'Filtering vertical trace ...'
print zdat
z_copy = zdat.copy()
#filter and taper data
# filter and taper data
tr_filt = zdat[0].copy()
tr_filt.filter('bandpass', freqmin=bpz1[0], freqmax=bpz1[1], zerophase=False)
tr_filt.filter('bandpass', freqmin=bpz1[0], freqmax=bpz1[1],
zerophase=False)
tr_filt.taper(max_percentage=0.05, type='hann')
z_copy[0].data = tr_filt.data
##############################################################
#check length of waveform and compare with cut times
# check length of waveform and compare with cut times
Lc = pstop - pstart
Lwf = zdat[0].stats.endtime - zdat[0].stats.starttime
Ldiff = Lwf - Lc
if Ldiff < 0:
print 'run_autopicking: Cutting times are too large for actual waveform!'
print 'Use entire waveform instead!'
pstart = 0
pstop = len(zdat[0].data) * zdat[0].stats.delta
print 'run_autopicking: Cutting times are too large for actual ' \
'waveform!'
print 'Use entire waveform instead!'
pstart = 0
pstop = len(zdat[0].data) * zdat[0].stats.delta
cuttimes = [pstart, pstop]
if algoP == 'HOS':
#calculate HOS-CF using subclass HOScf of class CharacteristicFunction
cf1 = HOScf(z_copy, cuttimes, thosmw, hosorder) #instance of HOScf
# calculate HOS-CF using subclass HOScf of class
# CharacteristicFunction
cf1 = HOScf(z_copy, cuttimes, thosmw, hosorder) # instance of HOScf
elif algoP == 'ARZ':
#calculate ARZ-CF using subclass ARZcf of class CharcteristicFunction
cf1 = ARZcf(z_copy, cuttimes, tpred1z, Parorder, tdet1z, addnoise) #instance of ARZcf
# calculate ARZ-CF using subclass ARZcf of class
# CharcteristicFunction
cf1 = ARZcf(z_copy, cuttimes, tpred1z, Parorder, tdet1z,
addnoise) # instance of ARZcf
##############################################################
#calculate AIC-HOS-CF using subclass AICcf of class CharacteristicFunction
#class needs stream object => build it
# calculate AIC-HOS-CF using subclass AICcf of class
# CharacteristicFunction
# class needs stream object => build it
tr_aic = tr_filt.copy()
tr_aic.data =cf1.getCF()
tr_aic.data = cf1.getCF()
z_copy[0].data = tr_aic.data
aiccf = AICcf(z_copy, cuttimes) #instance of AICcf
aiccf = AICcf(z_copy, cuttimes) # instance of AICcf
##############################################################
#get prelimenary onset time from AIC-HOS-CF using subclass AICPicker of class AutoPicking
# get prelimenary onset time from AIC-HOS-CF using subclass AICPicker
# of class AutoPicking
aicpick = AICPicker(aiccf, tsnrz, pickwinP, iplot, None, tsmoothP)
##############################################################
#go on with processing if AIC onset passes quality control
if aicpick.getSlope() >= minAICPslope and aicpick.getSNR() >= minAICPSNR:
aicPflag = 1
print 'AIC P-pick passes quality control: Slope: %f, SNR: %f' % \
# go on with processing if AIC onset passes quality control
if (aicpick.getSlope() >= minAICPslope and
aicpick.getSNR() >= minAICPSNR):
aicPflag = 1
print 'AIC P-pick passes quality control: Slope: %f, SNR: %f' % \
(aicpick.getSlope(), aicpick.getSNR())
print 'Go on with refined picking ...'
#re-filter waveform with larger bandpass
print 'run_autopicking: re-filtering vertical trace ...'
z_copy = zdat.copy()
tr_filt = zdat[0].copy()
tr_filt.filter('bandpass', freqmin=bpz2[0], freqmax=bpz2[1], zerophase=False)
tr_filt.taper(max_percentage=0.05, type='hann')
z_copy[0].data = tr_filt.data
#############################################################
#re-calculate CF from re-filtered trace in vicinity of initial onset
cuttimes2 = [round(max([aicpick.getpick() - Precalcwin, 0])), \
round(min([len(zdat[0].data) * zdat[0].stats.delta, \
aicpick.getpick() + Precalcwin]))]
if algoP == 'HOS':
#calculate HOS-CF using subclass HOScf of class CharacteristicFunction
cf2 = HOScf(z_copy, cuttimes2, thosmw, hosorder) #instance of HOScf
elif algoP == 'ARZ':
#calculate ARZ-CF using subclass ARZcf of class CharcteristicFunction
cf2 = ARZcf(z_copy, cuttimes2, tpred1z, Parorder, tdet1z, addnoise) #instance of ARZcf
##############################################################
#get refined onset time from CF2 using class Picker
refPpick = PragPicker(cf2, tsnrz, pickwinP, iplot, ausP, tsmoothP, aicpick.getpick())
#############################################################
#quality assessment
#get earliest and latest possible pick and symmetrized uncertainty
[lpickP, epickP, Perror] = earllatepicker(z_copy, nfacP, tsnrz, refPpick.getpick(), iplot)
print 'Go on with refined picking ...'
# re-filter waveform with larger bandpass
print 'run_autopicking: re-filtering vertical trace ...'
z_copy = zdat.copy()
tr_filt = zdat[0].copy()
tr_filt.filter('bandpass', freqmin=bpz2[0], freqmax=bpz2[1],
zerophase=False)
tr_filt.taper(max_percentage=0.05, type='hann')
z_copy[0].data = tr_filt.data
#############################################################
# re-calculate CF from re-filtered trace in vicinity of initial
# onset
cuttimes2 = [round(max([aicpick.getpick() - Precalcwin, 0])),
round(min([len(zdat[0].data) * zdat[0].stats.delta,
aicpick.getpick() + Precalcwin]))]
if algoP == 'HOS':
# calculate HOS-CF using subclass HOScf of class
# CharacteristicFunction
cf2 = HOScf(z_copy, cuttimes2, thosmw,
hosorder) # instance of HOScf
elif algoP == 'ARZ':
# calculate ARZ-CF using subclass ARZcf of class
# CharcteristicFunction
cf2 = ARZcf(z_copy, cuttimes2, tpred1z, Parorder, tdet1z,
addnoise) # instance of ARZcf
##############################################################
# get refined onset time from CF2 using class Picker
refPpick = PragPicker(cf2, tsnrz, pickwinP, iplot, ausP, tsmoothP,
aicpick.getpick())
#############################################################
# quality assessment
# get earliest and latest possible pick and symmetrized uncertainty
[lpickP, epickP, Perror] = earllatepicker(z_copy, nfacP, tsnrz,
refPpick.getpick(), iplot)
#get SNR
[SNRP, SNRPdB, Pnoiselevel] = getSNR(z_copy, tsnrz, refPpick.getpick())
# get SNR
[SNRP, SNRPdB, Pnoiselevel] = getSNR(z_copy, tsnrz,
refPpick.getpick())
#weight P-onset using symmetric error
if Perror <= timeerrorsP[0]:
Pweight = 0
elif Perror > timeerrorsP[0] and Perror <= timeerrorsP[1]:
Pweight = 1
elif Perror > timeerrorsP[1] and Perror <= timeerrorsP[2]:
Pweight = 2
elif Perror > timeerrorsP[2] and Perror <= timeerrorsP[3]:
Pweight = 3
elif Perror > timeerrorsP[3]:
Pweight = 4
# weight P-onset using symmetric error
if Perror <= timeerrorsP[0]:
Pweight = 0
elif timeerrorsP[0] < Perror <= timeerrorsP[1]:
Pweight = 1
elif timeerrorsP[1] < Perror <= timeerrorsP[2]:
Pweight = 2
elif timeerrorsP[2] < Perror <= timeerrorsP[3]:
Pweight = 3
elif Perror > timeerrorsP[3]:
Pweight = 4
##############################################################
# get first motion of P onset
# certain quality required
if Pweight <= minfmweight and SNRP >= minFMSNR:
FM = fmpicker(zdat, z_copy, fmpickwin, refPpick.getpick(),
iplot)
else:
FM = 'N'
print 'run_autopicking: P-weight: %d, SNR: %f, SNR[dB]: %f, ' \
'Polarity: %s' % (Pweight, SNRP, SNRPdB, FM)
##############################################################
#get first motion of P onset
#certain quality required
if Pweight <= minfmweight and SNRP >= minFMSNR:
FM = fmpicker(zdat, z_copy, fmpickwin, refPpick.getpick(), iplot)
else:
FM = 'N'
print 'run_autopicking: P-weight: %d, SNR: %f, SNR[dB]: %f, Polarity: %s' % (Pweight, SNRP, SNRPdB, FM)
else:
print 'Bad initial (AIC) P-pick, skip this onset!'
print 'AIC-SNR=', aicpick.getSNR(), 'AIC-Slope=', aicpick.getSlope()
Pweight = 4
Sweight = 4
FM = 'N'
SNRP = None
SNRPdB = None
SNRS = None
SNRSdB = None
aicSflag = 0
aicPflag = 0
print 'Bad initial (AIC) P-pick, skip this onset!'
print 'AIC-SNR=', aicpick.getSNR(), 'AIC-Slope=', aicpick.getSlope()
Pweight = 4
Sweight = 4
FM = 'N'
SNRP = None
SNRPdB = None
SNRS = None
SNRSdB = None
aicSflag = 0
aicPflag = 0
else:
print 'run_autopicking: No vertical component data available, skipping station!'
return
print 'run_autopicking: No vertical component data available, ' \
'skipping station!'
return
if edat is not None and ndat is not None and len(edat) > 0 and len(ndat) > 0 and Pweight < 4:
print 'Go on picking S onset ...'
if edat is not None and ndat is not None and len(edat) > 0 and len(
ndat) > 0 and Pweight < 4:
print 'Go on picking S onset ...'
print '##################################################'
print 'Working on S onset of station %s' % edat[0].stats.station
print 'Filtering horizontal traces ...'
#determine time window for calculating CF after P onset
#cuttimesh = [round(refPpick.getpick() + sstart), round(refPpick.getpick() + sstop)]
cuttimesh = [round(max([refPpick.getpick() + sstart, 0])), \
# determine time window for calculating CF after P onset
# cuttimesh = [round(refPpick.getpick() + sstart),
# round(refPpick.getpick() + sstop)]
cuttimesh = [round(max([refPpick.getpick() + sstart, 0])),
round(min([refPpick.getpick() + sstop, Lwf]))]
if algoS == 'ARH':
print edat, ndat
#re-create stream object including both horizontal components
# re-create stream object including both horizontal components
hdat = edat.copy()
hdat += ndat
h_copy = hdat.copy()
#filter and taper data
# filter and taper data
trH1_filt = hdat[0].copy()
trH2_filt = hdat[1].copy()
trH1_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1], zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1], zerophase=False)
trH1_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1],
zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1],
zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
h_copy[0].data = trH1_filt.data
h_copy[1].data = trH2_filt.data
elif algoS == 'AR3':
print zdat, edat, ndat
#re-create stream object including both horizontal components
# re-create stream object including both horizontal components
hdat = zdat.copy()
hdat += edat
hdat += ndat
h_copy = hdat.copy()
#filter and taper data
# filter and taper data
trH1_filt = hdat[0].copy()
trH2_filt = hdat[1].copy()
trH3_filt = hdat[2].copy()
trH1_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1], zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1], zerophase=False)
trH3_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1], zerophase=False)
trH1_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1],
zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1],
zerophase=False)
trH3_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1],
zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
trH3_filt.taper(max_percentage=0.05, type='hann')
@ -250,210 +273,289 @@ def run_autopicking(wfstream, pickparam):
h_copy[2].data = trH3_filt.data
##############################################################
if algoS == 'ARH':
#calculate ARH-CF using subclass ARHcf of class CharcteristicFunction
arhcf1 = ARHcf(h_copy, cuttimesh, tpred1h, Sarorder, tdet1h, addnoise) #instance of ARHcf
# calculate ARH-CF using subclass ARHcf of class
# CharcteristicFunction
arhcf1 = ARHcf(h_copy, cuttimesh, tpred1h, Sarorder, tdet1h,
addnoise) # instance of ARHcf
elif algoS == 'AR3':
#calculate ARH-CF using subclass AR3cf of class CharcteristicFunction
arhcf1 = AR3Ccf(h_copy, cuttimesh, tpred1h, Sarorder, tdet1h, addnoise) #instance of ARHcf
# calculate ARH-CF using subclass AR3cf of class
# CharcteristicFunction
arhcf1 = AR3Ccf(h_copy, cuttimesh, tpred1h, Sarorder, tdet1h,
addnoise) # instance of ARHcf
##############################################################
#calculate AIC-ARH-CF using subclass AICcf of class CharacteristicFunction
#class needs stream object => build it
# calculate AIC-ARH-CF using subclass AICcf of class
# CharacteristicFunction
# class needs stream object => build it
tr_arhaic = trH1_filt.copy()
tr_arhaic.data = arhcf1.getCF()
h_copy[0].data = tr_arhaic.data
#calculate ARH-AIC-CF
haiccf = AICcf(h_copy, cuttimesh) #instance of AICcf
# calculate ARH-AIC-CF
haiccf = AICcf(h_copy, cuttimesh) # instance of AICcf
##############################################################
#get prelimenary onset time from AIC-HOS-CF using subclass AICPicker of class AutoPicking
aicarhpick = AICPicker(haiccf, tsnrh, pickwinS, iplot, None, aictsmoothS)
# get prelimenary onset time from AIC-HOS-CF using subclass AICPicker
# of class AutoPicking
aicarhpick = AICPicker(haiccf, tsnrh, pickwinS, iplot, None,
aictsmoothS)
###############################################################
#go on with processing if AIC onset passes quality control
if aicarhpick.getSlope() >= minAICSslope and aicarhpick.getSNR() >= minAICSSNR:
aicSflag = 1
print 'AIC S-pick passes quality control: Slope: %f, SNR: %f' \
% (aicarhpick.getSlope(), aicarhpick.getSNR())
print 'Go on with refined picking ...'
#re-calculate CF from re-filtered trace in vicinity of initial onset
cuttimesh2 = [round(aicarhpick.getpick() - Srecalcwin), \
round(aicarhpick.getpick() + Srecalcwin)]
#re-filter waveform with larger bandpass
print 'run_autopicking: re-filtering horizontal traces...'
h_copy = hdat.copy()
#filter and taper data
if algoS == 'ARH':
trH1_filt = hdat[0].copy()
trH2_filt = hdat[1].copy()
trH1_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1], zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1], zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
h_copy[0].data = trH1_filt.data
h_copy[1].data = trH2_filt.data
#############################################################
arhcf2 = ARHcf(h_copy, cuttimesh2, tpred2h, Sarorder, tdet2h, addnoise) #instance of ARHcf
elif algoS == 'AR3':
trH1_filt = hdat[0].copy()
trH2_filt = hdat[1].copy()
trH3_filt = hdat[2].copy()
trH1_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1], zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1], zerophase=False)
trH3_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1], zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
trH3_filt.taper(max_percentage=0.05, type='hann')
h_copy[0].data = trH1_filt.data
h_copy[1].data = trH2_filt.data
h_copy[2].data = trH3_filt.data
#############################################################
arhcf2 = AR3Ccf(h_copy, cuttimesh2, tpred2h, Sarorder, tdet2h, addnoise) #instance of ARHcf
# go on with processing if AIC onset passes quality control
if (aicarhpick.getSlope() >= minAICSslope and
aicarhpick.getSNR() >= minAICSSNR):
aicSflag = 1
print 'AIC S-pick passes quality control: Slope: %f, SNR: %f' \
% (aicarhpick.getSlope(), aicarhpick.getSNR())
print 'Go on with refined picking ...'
# re-calculate CF from re-filtered trace in vicinity of initial
# onset
cuttimesh2 = [round(aicarhpick.getpick() - Srecalcwin),
round(aicarhpick.getpick() + Srecalcwin)]
# re-filter waveform with larger bandpass
print 'run_autopicking: re-filtering horizontal traces...'
h_copy = hdat.copy()
# filter and taper data
if algoS == 'ARH':
trH1_filt = hdat[0].copy()
trH2_filt = hdat[1].copy()
trH1_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1],
zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1],
zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
h_copy[0].data = trH1_filt.data
h_copy[1].data = trH2_filt.data
#############################################################
arhcf2 = ARHcf(h_copy, cuttimesh2, tpred2h, Sarorder, tdet2h,
addnoise) # instance of ARHcf
elif algoS == 'AR3':
trH1_filt = hdat[0].copy()
trH2_filt = hdat[1].copy()
trH3_filt = hdat[2].copy()
trH1_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1],
zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1],
zerophase=False)
trH3_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1],
zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
trH3_filt.taper(max_percentage=0.05, type='hann')
h_copy[0].data = trH1_filt.data
h_copy[1].data = trH2_filt.data
h_copy[2].data = trH3_filt.data
#############################################################
arhcf2 = AR3Ccf(h_copy, cuttimesh2, tpred2h, Sarorder, tdet2h,
addnoise) # instance of ARHcf
#get refined onset time from CF2 using class Picker
refSpick = PragPicker(arhcf2, tsnrh, pickwinS, iplot, ausS, tsmoothS, aicarhpick.getpick())
#############################################################
#quality assessment
#get earliest and latest possible pick and symmetrized uncertainty
h_copy[0].data = trH1_filt.data
[lpickS1, epickS1, Serror1] = earllatepicker(h_copy, nfacS, tsnrh, refSpick.getpick(), iplot)
h_copy[0].data = trH2_filt.data
[lpickS2, epickS2, Serror2] = earllatepicker(h_copy, nfacS, tsnrh, refSpick.getpick(), iplot)
if algoS == 'ARH':
#get earliest pick of both earliest possible picks
epick = [epickS1, epickS2]
lpick = [lpickS1, lpickS2]
pickerr = [Serror1, Serror2]
ipick =np.argmin([epickS1, epickS2])
elif algoS == 'AR3':
[lpickS3, epickS3, Serror3] = earllatepicker(h_copy, nfacS, tsnrh, refSpick.getpick(), iplot)
#get earliest pick of all three picks
epick = [epickS1, epickS2, epickS3]
lpick = [lpickS1, lpickS2, lpickS3]
pickerr = [Serror1, Serror2, Serror3]
ipick =np.argmin([epickS1, epickS2, epickS3])
epickS = epick[ipick]
lpickS = lpick[ipick]
Serror = pickerr[ipick]
# get refined onset time from CF2 using class Picker
refSpick = PragPicker(arhcf2, tsnrh, pickwinS, iplot, ausS,
tsmoothS, aicarhpick.getpick())
#############################################################
# quality assessment
# get earliest and latest possible pick and symmetrized uncertainty
h_copy[0].data = trH1_filt.data
[lpickS1, epickS1, Serror1] = earllatepicker(h_copy, nfacS, tsnrh,
refSpick.getpick(),
iplot)
h_copy[0].data = trH2_filt.data
[lpickS2, epickS2, Serror2] = earllatepicker(h_copy, nfacS, tsnrh,
refSpick.getpick(),
iplot)
if algoS == 'ARH':
# get earliest pick of both earliest possible picks
epick = [epickS1, epickS2]
lpick = [lpickS1, lpickS2]
pickerr = [Serror1, Serror2]
ipick = np.argmin([epickS1, epickS2])
elif algoS == 'AR3':
[lpickS3, epickS3, Serror3] = earllatepicker(h_copy, nfacS,
tsnrh,
refSpick.getpick(),
iplot)
# get earliest pick of all three picks
epick = [epickS1, epickS2, epickS3]
lpick = [lpickS1, lpickS2, lpickS3]
pickerr = [Serror1, Serror2, Serror3]
ipick = np.argmin([epickS1, epickS2, epickS3])
epickS = epick[ipick]
lpickS = lpick[ipick]
Serror = pickerr[ipick]
#get SNR
[SNRS, SNRSdB, Snoiselevel] = getSNR(h_copy, tsnrh, refSpick.getpick())
# get SNR
[SNRS, SNRSdB, Snoiselevel] = getSNR(h_copy, tsnrh,
refSpick.getpick())
#weight S-onset using symmetric error
if Serror <= timeerrorsS[0]:
Sweight = 0
elif Serror > timeerrorsS[0] and Serror <= timeerrorsS[1]:
Sweight = 1
elif Perror > timeerrorsS[1] and Serror <= timeerrorsS[2]:
Sweight = 2
elif Serror > timeerrorsS[2] and Serror <= timeerrorsS[3]:
Sweight = 3
elif Serror > timeerrorsS[3]:
Sweight = 4
# weight S-onset using symmetric error
if Serror <= timeerrorsS[0]:
Sweight = 0
elif timeerrorsS[0] < Serror <= timeerrorsS[1]:
Sweight = 1
elif Perror > timeerrorsS[1] and Serror <= timeerrorsS[2]:
Sweight = 2
elif timeerrorsS[2] < Serror <= timeerrorsS[3]:
Sweight = 3
elif Serror > timeerrorsS[3]:
Sweight = 4
print 'run_autopicking: S-weight: %d, SNR: %f, SNR[dB]: %f' % (Sweight, SNRS, SNRSdB)
print 'run_autopicking: S-weight: %d, SNR: %f, SNR[dB]: %f' % (
Sweight, SNRS, SNRSdB)
else:
print 'Bad initial (AIC) S-pick, skip this onset!'
print 'AIC-SNR=', aicarhpick.getSNR(), 'AIC-Slope=', aicarhpick.getSlope()
Sweight = 4
SNRS = None
SNRSdB = None
aicSflag = 0
print 'Bad initial (AIC) S-pick, skip this onset!'
print 'AIC-SNR=', aicarhpick.getSNR(), \
'AIC-Slope=', aicarhpick.getSlope()
Sweight = 4
SNRS = None
SNRSdB = None
aicSflag = 0
else:
print 'run_autopicking: No horizontal component data available or bad P onset, skipping S picking!'
return
print 'run_autopicking: No horizontal component data available or ' \
'bad P onset, skipping S picking!'
return
##############################################################
if iplot > 0:
#plot vertical trace
plt.figure()
plt.subplot(3,1,1)
tdata = np.arange(0, zdat[0].stats.npts / tr_filt.stats.sampling_rate, tr_filt.stats.delta)
#check equal length of arrays, sometimes they are different!?
wfldiff = len(tr_filt.data) - len(tdata)
if wfldiff < 0:
tdata = tdata[0:len(tdata) - abs(wfldiff)]
p1, = plt.plot(tdata, tr_filt.data/max(tr_filt.data), 'k')
if Pweight < 4:
p2, = plt.plot(cf1.getTimeArray(), cf1.getCF() / max(cf1.getCF()), 'b')
if aicPflag == 1:
p3, = plt.plot(cf2.getTimeArray(), cf2.getCF() / max(cf2.getCF()), 'm')
p4, = plt.plot([aicpick.getpick(), aicpick.getpick()], [-1, 1], 'r')
plt.plot([aicpick.getpick()-0.5, aicpick.getpick()+0.5], [1, 1], 'r')
plt.plot([aicpick.getpick()-0.5, aicpick.getpick()+0.5], [-1, -1], 'r')
p5, = plt.plot([refPpick.getpick(), refPpick.getpick()], [-1.3, 1.3], 'r', linewidth=2)
plt.plot([refPpick.getpick()-0.5, refPpick.getpick()+0.5], [1.3, 1.3], 'r', linewidth=2)
plt.plot([refPpick.getpick()-0.5, refPpick.getpick()+0.5], [-1.3, -1.3], 'r', linewidth=2)
plt.plot([lpickP, lpickP], [-1.1, 1.1], 'r--')
plt.plot([epickP, epickP], [-1.1, 1.1], 'r--')
plt.legend([p1, p2, p3, p4, p5], ['Data', 'CF1', 'CF2', 'Initial P Onset', 'Final P Pick'])
plt.title('%s, %s, P Weight=%d, SNR=%7.2f, SNR[dB]=%7.2f Polarity: %s' % (tr_filt.stats.station, \
tr_filt.stats.channel, Pweight, SNRP, SNRPdB, FM))
else:
plt.legend([p1, p2], ['Data', 'CF1'])
plt.title('%s, P Weight=%d, SNR=None, SNRdB=None' % (tr_filt.stats.channel, Pweight))
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.ylabel('Normalized Counts')
plt.suptitle(tr_filt.stats.starttime)
# plot vertical trace
plt.figure()
plt.subplot(3, 1, 1)
tdata = np.arange(0, zdat[0].stats.npts / tr_filt.stats.sampling_rate,
tr_filt.stats.delta)
# check equal length of arrays, sometimes they are different!?
wfldiff = len(tr_filt.data) - len(tdata)
if wfldiff < 0:
tdata = tdata[0:len(tdata) - abs(wfldiff)]
p1, = plt.plot(tdata, tr_filt.data / max(tr_filt.data), 'k')
if Pweight < 4:
p2, = plt.plot(cf1.getTimeArray(), cf1.getCF() / max(cf1.getCF()),
'b')
if aicPflag == 1:
p3, = plt.plot(cf2.getTimeArray(),
cf2.getCF() / max(cf2.getCF()), 'm')
p4, = plt.plot([aicpick.getpick(), aicpick.getpick()], [-1, 1],
'r')
plt.plot([aicpick.getpick() - 0.5, aicpick.getpick() + 0.5],
[1, 1], 'r')
plt.plot([aicpick.getpick() - 0.5, aicpick.getpick() + 0.5],
[-1, -1], 'r')
p5, = plt.plot([refPpick.getpick(), refPpick.getpick()],
[-1.3, 1.3], 'r', linewidth=2)
plt.plot([refPpick.getpick() - 0.5, refPpick.getpick() + 0.5],
[1.3, 1.3], 'r', linewidth=2)
plt.plot([refPpick.getpick() - 0.5, refPpick.getpick() + 0.5],
[-1.3, -1.3], 'r', linewidth=2)
plt.plot([lpickP, lpickP], [-1.1, 1.1], 'r--')
plt.plot([epickP, epickP], [-1.1, 1.1], 'r--')
plt.legend([p1, p2, p3, p4, p5],
['Data', 'CF1', 'CF2', 'Initial P Onset',
'Final P Pick'])
plt.title('%s, %s, P Weight=%d, SNR=%7.2f, SNR[dB]=%7.2f '
'Polarity: %s' % (tr_filt.stats.station,
tr_filt.stats.channel,
Pweight,
SNRP,
SNRPdB,
FM))
else:
plt.legend([p1, p2], ['Data', 'CF1'])
plt.title('%s, P Weight=%d, SNR=None, '
'SNRdB=None' % (tr_filt.stats.channel, Pweight))
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.ylabel('Normalized Counts')
plt.suptitle(tr_filt.stats.starttime)
#plot horizontal traces
plt.subplot(3,1,2)
th1data = np.arange(0, trH1_filt.stats.npts / trH1_filt.stats.sampling_rate, trH1_filt.stats.delta)
#check equal length of arrays, sometimes they are different!?
wfldiff = len(trH1_filt.data) - len(th1data)
if wfldiff < 0:
th1data = th1data[0:len(th1data) - abs(wfldiff)]
p21, = plt.plot(th1data, trH1_filt.data/max(trH1_filt.data), 'k')
if Pweight < 4:
p22, = plt.plot(arhcf1.getTimeArray(), arhcf1.getCF()/max(arhcf1.getCF()), 'b')
if aicSflag == 1:
p23, = plt.plot(arhcf2.getTimeArray(), arhcf2.getCF()/max(arhcf2.getCF()), 'm')
p24, = plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'g')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [1, 1], 'g')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [-1, -1], 'g')
p25, = plt.plot([refSpick.getpick(), refSpick.getpick()], [-1.3, 1.3], 'g', linewidth=2)
plt.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5], [1.3, 1.3], 'g', linewidth=2)
plt.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5], [-1.3, -1.3], 'g', linewidth=2)
plt.plot([lpickS, lpickS], [-1.1, 1.1], 'g--')
plt.plot([epickS, epickS], [-1.1, 1.1], 'g--')
plt.legend([p21, p22, p23, p24, p25], ['Data', 'CF1', 'CF2', 'Initial S Onset', 'Final S Pick'])
plt.title('%s, S Weight=%d, SNR=%7.2f, SNR[dB]=%7.2f' % (trH1_filt.stats.channel, \
Sweight, SNRS, SNRSdB))
else:
plt.legend([p21, p22], ['Data', 'CF1'])
plt.title('%s, S Weight=%d, SNR=None, SNRdB=None' % (trH1_filt.stats.channel, Sweight))
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.ylabel('Normalized Counts')
plt.suptitle(trH1_filt.stats.starttime)
# plot horizontal traces
plt.subplot(3, 1, 2)
th1data = np.arange(0,
trH1_filt.stats.npts /
trH1_filt.stats.sampling_rate,
trH1_filt.stats.delta)
# check equal length of arrays, sometimes they are different!?
wfldiff = len(trH1_filt.data) - len(th1data)
if wfldiff < 0:
th1data = th1data[0:len(th1data) - abs(wfldiff)]
p21, = plt.plot(th1data, trH1_filt.data / max(trH1_filt.data), 'k')
if Pweight < 4:
p22, = plt.plot(arhcf1.getTimeArray(),
arhcf1.getCF() / max(arhcf1.getCF()), 'b')
if aicSflag == 1:
p23, = plt.plot(arhcf2.getTimeArray(),
arhcf2.getCF() / max(arhcf2.getCF()), 'm')
p24, = plt.plot([aicarhpick.getpick(), aicarhpick.getpick()],
[-1, 1], 'g')
plt.plot(
[aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5],
[1, 1], 'g')
plt.plot(
[aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5],
[-1, -1], 'g')
p25, = plt.plot([refSpick.getpick(), refSpick.getpick()],
[-1.3, 1.3], 'g', linewidth=2)
plt.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5],
[1.3, 1.3], 'g', linewidth=2)
plt.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5],
[-1.3, -1.3], 'g', linewidth=2)
plt.plot([lpickS, lpickS], [-1.1, 1.1], 'g--')
plt.plot([epickS, epickS], [-1.1, 1.1], 'g--')
plt.legend([p21, p22, p23, p24, p25],
['Data', 'CF1', 'CF2', 'Initial S Onset',
'Final S Pick'])
plt.title('%s, S Weight=%d, SNR=%7.2f, SNR[dB]=%7.2f' % (
trH1_filt.stats.channel,
Sweight, SNRS, SNRSdB))
else:
plt.legend([p21, p22], ['Data', 'CF1'])
plt.title('%s, S Weight=%d, SNR=None, SNRdB=None' % (
trH1_filt.stats.channel, Sweight))
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.ylabel('Normalized Counts')
plt.suptitle(trH1_filt.stats.starttime)
plt.subplot(3,1,3)
th2data = np.arange(0, trH2_filt.stats.npts / trH2_filt.stats.sampling_rate, trH2_filt.stats.delta)
#check equal length of arrays, sometimes they are different!?
wfldiff = len(trH2_filt.data) - len(th2data)
if wfldiff < 0:
th2data = th2data[0:len(th2data) - abs(wfldiff)]
plt.plot(th2data, trH2_filt.data/max(trH2_filt.data), 'k')
if Pweight < 4:
p22, = plt.plot(arhcf1.getTimeArray(), arhcf1.getCF()/max(arhcf1.getCF()), 'b')
if aicSflag == 1:
p23, = plt.plot(arhcf2.getTimeArray(), arhcf2.getCF()/max(arhcf2.getCF()), 'm')
p24, = plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'g')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [1, 1], 'g')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [-1, -1], 'g')
p25, = plt.plot([refSpick.getpick(), refSpick.getpick()], [-1.3, 1.3], 'g', linewidth=2)
plt.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5], [1.3, 1.3], 'g', linewidth=2)
plt.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5], [-1.3, -1.3], 'g', linewidth=2)
plt.plot([lpickS, lpickS], [-1.1, 1.1], 'g--')
plt.plot([epickS, epickS], [-1.1, 1.1], 'g--')
plt.legend([p21, p22, p23, p24, p25], ['Data', 'CF1', 'CF2', 'Initial S Onset', 'Final S Pick'])
else:
plt.legend([p21, p22], ['Data', 'CF1'])
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.xlabel('Time [s] after %s' % tr_filt.stats.starttime)
plt.ylabel('Normalized Counts')
plt.title(trH2_filt.stats.channel)
plt.show()
raw_input()
plt.close()
plt.subplot(3, 1, 3)
th2data = np.arange(0,
trH2_filt.stats.npts /
trH2_filt.stats.sampling_rate,
trH2_filt.stats.delta)
# check equal length of arrays, sometimes they are different!?
wfldiff = len(trH2_filt.data) - len(th2data)
if wfldiff < 0:
th2data = th2data[0:len(th2data) - abs(wfldiff)]
plt.plot(th2data, trH2_filt.data / max(trH2_filt.data), 'k')
if Pweight < 4:
p22, = plt.plot(arhcf1.getTimeArray(),
arhcf1.getCF() / max(arhcf1.getCF()), 'b')
if aicSflag == 1:
p23, = plt.plot(arhcf2.getTimeArray(),
arhcf2.getCF() / max(arhcf2.getCF()), 'm')
p24, = plt.plot([aicarhpick.getpick(), aicarhpick.getpick()],
[-1, 1], 'g')
plt.plot(
[aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5],
[1, 1], 'g')
plt.plot(
[aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5],
[-1, -1], 'g')
p25, = plt.plot([refSpick.getpick(), refSpick.getpick()],
[-1.3, 1.3], 'g', linewidth=2)
plt.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5],
[1.3, 1.3], 'g', linewidth=2)
plt.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5],
[-1.3, -1.3], 'g', linewidth=2)
plt.plot([lpickS, lpickS], [-1.1, 1.1], 'g--')
plt.plot([epickS, epickS], [-1.1, 1.1], 'g--')
plt.legend([p21, p22, p23, p24, p25],
['Data', 'CF1', 'CF2', 'Initial S Onset',
'Final S Pick'])
else:
plt.legend([p21, p22], ['Data', 'CF1'])
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.xlabel('Time [s] after %s' % tr_filt.stats.starttime)
plt.ylabel('Normalized Counts')
plt.title(trH2_filt.stats.channel)
plt.show()
raw_input()
plt.close()