Merge branch 'develop' of 134.147.164.251:/data/git/pylot into develop

This commit is contained in:
Sebastian Wehling-Benatelli 2015-06-10 15:49:15 +02:00
commit e6e38dbb95
6 changed files with 670 additions and 61 deletions

99
autoPyLoT.in Normal file
View File

@ -0,0 +1,99 @@
%This is a parameter input file for autoPyLoT.
%All main and special settings regarding data handling
%and picking are to be set here!
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#main settings#
/DATA/Insheim #rootpath# %project path
EVENT_DATA/LOCAL #datapath# %data path
2013.02_Insheim #database# %name of data base
e0019.048.13 #eventID# %certain evnt ID for processing
PILOT #datastructure# %choose data structure
0 #iplot# %flag for plotting: 0 none, 1, partly, >1 everything
AUTOPHASES_AIC_HOS4_ARH #phasefile# %name of autoPILOT output phase file
AUTOLOC_AIC_HOS4_ARH #locfile# %name of autoPILOT output location file
AUTOFOCMEC_AIC_HOS4_ARH.in #focmecin# %name of focmec input file containing polarities
HYPOSAT #locrt# %location routine used ("HYPOINVERSE" or "HYPOSAT")
6 #pmin# %minimum required P picks for location
4 #p0min# %minimum required P picks for location if at least
%3 excellent P picks are found
2 #smin# %minimum required S picks for location
/home/ludger/bin/run_HYPOSAT4autoPILOT.csh #cshellp# %path and name of c-shell script to run location routine
7.6 8.5 #blon# %longitude bounding for location map
49 49.4 #blat# %lattitude bounding for location map
#parameters for moment magnitude estimation#
5000 #vp# %average P-wave velocity
2800 #vs# %average S-wave velocity
2200 #rho# %rock density [kg/m^3]
300 #Qp# %quality factor for P waves
100 #Qs# %quality factor for S waves
#common settings picker#
15 #pstart# %start time [s] for calculating CF for P-picking
40 #pstop# %end time [s] for calculating CF for P-picking
-1.0 #sstart# %start time [s] after or before(-) P-onset for calculating CF for S-picking
7 #sstop# %end time [s] after P-onset for calculating CF for S-picking
2 20 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
2 30 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
2 15 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
2 20 #bph2# %lower/upper corner freq. of second band pass filter z-comp. [Hz]
#special settings for calculating CF#
%!!Be careful when editing the following!!
#Z-component#
HOS #algoP# %choose algorithm for P-onset determination (HOS, ARZ, or AR3)
7 #tlta# %for HOS-/AR-AIC-picker, length of LTA window [s]
4 #hosorder# %for HOS-picker, order of Higher Order Statistics
2 #Parorder# %for AR-picker, order of AR process of Z-component
1.2 #tdet1z# %for AR-picker, length of AR determination window [s] for Z-component, 1st pick
0.4 #tpred1z# %for AR-picker, length of AR prediction window [s] for Z-component, 1st pick
0.6 #tdet2z# %for AR-picker, length of AR determination window [s] for Z-component, 2nd pick
0.2 #tpred2z# %for AR-picker, length of AR prediction window [s] for Z-component, 2nd pick
0.001 #addnoise# %add noise to seismogram for stable AR prediction
3 0.1 0.5 0.1 #tsnrz# %for HOS/AR, window lengths for SNR-and slope estimation [tnoise,tsafetey,tsignal,tslope] [s]
3 #pickwinP# %for initial AIC pick, length of P-pick window [s]
8 #Precalcwin# %for HOS/AR, window length [s] for recalculation of CF (relative to 1st pick)
0 #peps4aic# %for HOS/AR, artificial uplift of samples of AIC-function (P)
0.2 #aictsmooth# %for HOS/AR, take average of samples for smoothing of AIC-function [s]
0.1 #tsmoothP# %for HOS/AR, take average of samples for smoothing CF [s]
0.001 #ausP# %for HOS/AR, artificial uplift of samples (aus) of CF (P)
1.3 #nfacP# %for HOS/AR, noise factor for noise level determination (P)
#H-components#
ARH #algoS# %choose algorithm for S-onset determination (ARH or AR3)
0.8 #tdet1h# %for HOS/AR, length of AR-determination window [s], H-components, 1st pick
0.4 #tpred1h# %for HOS/AR, length of AR-prediction window [s], H-components, 1st pick
0.6 #tdet2h# %for HOS/AR, length of AR-determinaton window [s], H-components, 2nd pick
0.3 #tpred2h# %for HOS/AR, length of AR-prediction window [s], H-components, 2nd pick
4 #Sarorder# %for AR-picker, order of AR process of H-components
6 #Srecalcwin# %for AR-picker, window length [s] for recalculation of CF (2nd pick) (H)
3 #pickwinS# %for initial AIC pick, length of S-pick window [s]
2 0.2 1.5 0.5 #tsnrh# %for ARH/AR3, window lengths for SNR-and slope estimation [tnoise,tsafetey,tsignal,tslope] [s]
0.05 #aictsmoothS# %for AIC-picker, take average of samples for smoothing of AIC-function [s]
0.02 #tsmoothS# %for AR-picker, take average of samples for smoothing CF [s] (S)
0.2 #pepsS# %for AR-picker, artificial uplift of samples of CF (S)
0.4 #ausS# %for HOS/AR, artificial uplift of samples (aus) of CF (S)
1.5 #nfacS# %for AR-picker, noise factor for noise level determination (S)
%first-motion picker%
1 #minfmweight# %minimum required p weight for first-motion determination
2 #minFMSNR# %miniumum required SNR for first-motion determination
0.2 #fmpickwin# %pick window around P onset for calculating zero crossings
%quality assessment%
#inital AIC onset#
0.01 0.02 0.04 0.08 #timeerrorsP# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for P
0.04 0.08 0.16 0.32 #timeerrorsS# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for S
80 #minAICPslope# %below this slope [counts/s] the initial P pick is rejected
1.2 #minAICPSNR# %below this SNR the initial P pick is rejected
50 #minAICSslope# %below this slope [counts/s] the initial S pick is rejected
1.5 #minAICSSNR# %below this SNR the initial S pick is rejected
#check duration of signal using envelope function#
1.5 #prepickwin# %pre-signal window length [s] for noise level estimation
0.7 #minsiglength# %minimum required length of signal [s]
0.2 #sgap# %safety gap between noise and signal window [s]
2 #noisefactor# %noiselevel*noisefactor=threshold
60 #minpercent# %per cent of samples required higher than threshold
#check for spuriously picked S-onsets#
3.0 #zfac# %P-amplitude must exceed zfac times RMS-S amplitude
#jackknife-processing for P-picks#
3 #thresholdweight#%minimum required weight of picks
3 #dttolerance# %maximum allowed deviation of P picks from median [s]
4 #minstats# %minimum number of stations with reliable P picks
3 #Sdttolerance# %maximum allowed deviation from Wadati-diagram

View File

@ -6,15 +6,17 @@ import os
import argparse
import glob
import matplotlib.pyplot as plt
from obspy.core import read
from pylot.core.util import _getVersionString
from pylot.core.read import Data, AutoPickParameter
from pylot.core.pick.CharFuns import HOScf, AICcf
from pylot.core.pick.run_autopicking import run_autopicking
from pylot.core.util.structure import DATASTRUCTURE
__version__ = _getVersionString()
METHOD = {'HOS':HOScf, 'AIC':AICcf}
#METHOD = {'HOS':HOScf, 'AIC':AICcf}
def autoPyLoT(inputfile):
'''
@ -37,16 +39,6 @@ def autoPyLoT(inputfile):
data = Data()
# declaring parameter variables (only for convenience)
meth = parameter.getParam('algoP')
tsnr1 = parameter.getParam('tsnr1')
tsnr2 = parameter.getParam('tsnr2')
tnoise = parameter.getParam('pnoiselen')
tsignal = parameter.getParam('tlim')
order = parameter.getParam('hosorder')
thosmw = parameter.getParam('tlta')
# getting information on data structure
if parameter.hasParam('datastructure'):
@ -60,30 +52,63 @@ def autoPyLoT(inputfile):
if parameter.hasParam('eventID'):
dsfields['eventID'] = parameter.getParam('eventID')
exf.append('eventID')
datastructure.modifyFields(**dsfields)
datastructure.modifyFields(**dsfields)
datastructure.setExpandFields(exf)
# process each event in database
# process each event in database
# get streams
# read each event in database
datapath = datastructure.expandDataPath()
if not parameter.hasParam('eventID'):
for event in [events for events in
glob.glob(os.path.join(datapath, '*'))
if os.path.isdir(events)]:
for event in [events for events in glob.glob(os.path.join(datapath, '*')) if os.path.isdir(events)]:
data.setWFData(glob.glob(os.path.join(datapath, event, '*')))
print 'Working on event %s' %event
print data
else:
data.setWFData(glob.glob(os.path.join(datapath,
parameter.getParam('eventID'),
'*')))
print data
wfdat = data.getWFData() # all available streams
##########################################################
# !automated picking starts here!
procstats = []
for i in range(len(wfdat)):
stationID = wfdat[i].stats.station
#check if station has already been processed
if stationID not in procstats:
procstats.append(stationID)
#find corresponding streams
statdat = wfdat.select(station=stationID)
run_autopicking(statdat, parameter)
print '------------------------------------------'
print '-----Finished event %s!-----' % event
print '------------------------------------------'
#for single event processing
else:
data.setWFData(glob.glob(os.path.join(datapath, parameter.getParam('eventID'), '*')))
print 'Working on event ', parameter.getParam('eventID')
print data
wfdat = data.getWFData() # all available streams
##########################################################
# !automated picking starts here!
procstats = []
for i in range(len(wfdat)):
stationID = wfdat[i].stats.station
#check if station has already been processed
if stationID not in procstats:
procstats.append(stationID)
#find corresponding streams
statdat = wfdat.select(station=stationID)
run_autopicking(statdat, parameter)
print '------------------------------------------'
print '-------Finished event %s!-------' % parameter.getParam('eventID')
print '------------------------------------------'
if __name__ == "__main__":
# parse arguments
parser = argparse.ArgumentParser(
description='''This program ''')
description='''autoPyLoT automatically picks phase onset times using higher order statistics,
autoregressive prediction and AIC''')
parser.add_argument('-i', '-I', '--inputfile', type=str,
action='store',

View File

@ -218,13 +218,12 @@ class AICcf(CharacteristicFunction):
nn = np.isnan(xnp)
if len(nn) > 1:
xnp[nn] = 0
i0 = np.where(xnp == 0)
i = np.where(xnp > 0)
xnp[i0] = xnp[i[0][0]]
datlen = len(xnp)
k = np.arange(1, datlen)
cf = np.zeros(datlen)
cumsumcf = np.cumsum(np.power(xnp, 2))
i = np.where(cumsumcf == 0)
cumsumcf[i] = np.finfo(np.float64).eps
cf[k] = ((k - 1) * np.log(cumsumcf[k] / k) + (datlen - k + 1) * \
np.log((cumsumcf[datlen - 1] - cumsumcf[k - 1]) / (datlen - k + 1)))
cf[0] = cf[1]
@ -236,7 +235,6 @@ class AICcf(CharacteristicFunction):
self.cf = cf - np.mean(cf)
self.xcf = x
class HOScf(CharacteristicFunction):
'''
Function to calculate skewness (statistics of order 3) or kurtosis
@ -310,8 +308,8 @@ class ARZcf(CharacteristicFunction):
cf = np.zeros(len(xnp))
loopstep = self.getARdetStep()
arcalci = ldet + self.getOrder() - 1 #AR-calculation index
for i in range(ldet + self.getOrder() - 1, tend - 2 * lpred + 1):
arcalci = ldet + self.getOrder() #AR-calculation index
for i in range(ldet + self.getOrder(), tend - lpred - 1):
if i == arcalci:
#determination of AR coefficients
#to speed up calculation, AR-coefficients are calculated only every i+loopstep[1]!
@ -320,10 +318,17 @@ class ARZcf(CharacteristicFunction):
#AR prediction of waveform using calculated AR coefficients
self.arPredZ(xnp, self.arpara, i + 1, lpred)
#prediction error = CF
cf[i + lpred] = np.sqrt(np.sum(np.power(self.xpred[i:i + lpred] - xnp[i:i + lpred], 2)) / lpred)
cf[i + lpred-1] = np.sqrt(np.sum(np.power(self.xpred[i:i + lpred-1] - xnp[i:i + lpred-1], 2)) / lpred)
nn = np.isnan(cf)
if len(nn) > 1:
cf[nn] = 0
#remove zeros and artefacts
tap = np.hanning(len(cf))
cf = tap * cf
io = np.where(cf == 0)
ino = np.where(cf > 0)
cf[io] = cf[ino[0][0]]
self.cf = cf
self.xcf = x
@ -350,17 +355,18 @@ class ARZcf(CharacteristicFunction):
#recursive calculation of data vector (right part of eq. 6.5 in Kueperkoch et al. (2012)
rhs = np.zeros(self.getOrder())
for k in range(0, self.getOrder()):
for i in range(rind, ldet):
rhs[k] = rhs[k] + data[i] * data[i - k]
for i in range(rind, ldet+1):
ki = k + 1
rhs[k] = rhs[k] + data[i] * data[i - ki]
#recursive calculation of data array (second sum at left part of eq. 6.5 in Kueperkoch et al. 2012)
A = np.zeros((2,2))
A = np.zeros((self.getOrder(),self.getOrder()))
for k in range(1, self.getOrder() + 1):
for j in range(1, k + 1):
for i in range(rind, ldet):
for i in range(rind, ldet+1):
ki = k - 1
ji = j - 1
A[ki,ji] = A[ki,ji] + data[i - ji] * data[i - ki]
A[ki,ji] = A[ki,ji] + data[i - j] * data[i - k]
A[ji,ki] = A[ki,ji]
@ -387,20 +393,20 @@ class ARZcf(CharacteristicFunction):
Output: predicted waveform z
'''
#be sure of the summation indeces
if rind < len(arpara) + 1:
rind = len(arpara) + 1
if rind > len(data) - lpred + 1:
rind = len(data) - lpred + 1
if rind < len(arpara):
rind = len(arpara)
if rind > len(data) - lpred :
rind = len(data) - lpred
if lpred < 1:
lpred = 1
if lpred > len(data) - 1:
lpred = len(data) - 1
if lpred > len(data) - 2:
lpred = len(data) - 2
z = np.append(data[0:rind], np.zeros(lpred))
for i in range(rind, rind + lpred):
for j in range(1, len(arpara) + 1):
ji = j - 1
z[i] = z[i] + arpara[ji] * z[i - ji]
z[i] = z[i] + arpara[ji] * z[i - j]
self.xpred = z
@ -432,8 +438,9 @@ class ARHcf(CharacteristicFunction):
cf = np.zeros(len(xenoise))
loopstep = self.getARdetStep()
arcalci = ldet + self.getOrder() - 1 #AR-calculation index
for i in range(ldet + self.getOrder() - 1, tend - 2 * lpred + 1):
arcalci = lpred + self.getOrder() - 1 #AR-calculation index
#arcalci = ldet + self.getOrder() - 1 #AR-calculation index
for i in range(lpred + self.getOrder() - 1, tend - 2 * lpred + 1):
if i == arcalci:
#determination of AR coefficients
#to speed up calculation, AR-coefficients are calculated only every i+loopstep[1]!
@ -447,6 +454,13 @@ class ARHcf(CharacteristicFunction):
nn = np.isnan(cf)
if len(nn) > 1:
cf[nn] = 0
#remove zeros and artefacts
tap = np.hanning(len(cf))
cf = tap * cf
io = np.where(cf == 0)
ino = np.where(cf > 0)
cf[io] = cf[ino[0][0]]
self.cf = cf
self.xcf = xnp
@ -581,6 +595,13 @@ class AR3Ccf(CharacteristicFunction):
nn = np.isnan(cf)
if len(nn) > 1:
cf[nn] = 0
#remove zeros and artefacts
tap = np.hanning(len(cf))
cf = tap * cf
io = np.where(cf == 0)
ino = np.where(cf > 0)
cf[io] = cf[ino[0][0]]
self.cf = cf
self.xcf = xnp

View File

@ -145,6 +145,8 @@ class AICPicker(AutoPicking):
print 'AICPicker: Get initial onset time (pick) from AIC-CF ...'
self.Pick = None
self.slope = None
self.SNR = None
#find NaN's
nn = np.isnan(self.cf)
if len(nn) > 1:
@ -173,7 +175,7 @@ class AICPicker(AutoPicking):
#find NaN's
nn = np.isnan(diffcf)
if len(nn) > 1:
diffcf[nn] = 0
diffcf[nn] = 0
#taper CF to get rid off side maxima
tap = np.hanning(len(diffcf))
diffcf = tap * diffcf * max(abs(aicsmooth))
@ -197,11 +199,15 @@ class AICPicker(AutoPicking):
if self.Pick is not None:
#get noise window
inoise = getnoisewin(self.Tcf, self.Pick, self.TSNR[0], self.TSNR[1])
#check, if these are counts or m/s, important for slope estimation!
#this is quick and dirty, better solution?
if max(self.Data[0].data < 1e-3):
self.Data[0].data = self.Data[0].data * 1000000
#get signal window
isignal = getsignalwin(self.Tcf, self.Pick, self.TSNR[2])
#calculate SNR from CF
self.SNR = max(abs(self.cf[isignal] - np.mean(self.cf[isignal]))) / max(abs(self.cf[inoise] \
- np.mean(self.cf[inoise])))
self.SNR = max(abs(aic[isignal] - np.mean(aic[isignal]))) / max(abs(aic[inoise] \
- np.mean(aic[inoise])))
#calculate slope from CF after initial pick
#get slope window
tslope = self.TSNR[3] #slope determination window
@ -230,8 +236,8 @@ class AICPicker(AutoPicking):
self.SNR = None
self.slope = None
if self.iplot is not None:
plt.figure(self.iplot)
if self.iplot > 1:
p = plt.figure(self.iplot)
x = self.Data[0].data
p1, = plt.plot(self.Tcf, x / max(x), 'k')
p2, = plt.plot(self.Tcf, aicsmooth / max(aicsmooth), 'r')
@ -243,7 +249,6 @@ class AICPicker(AutoPicking):
plt.xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
plt.yticks([])
plt.title(self.Data[0].stats.station)
plt.show()
if self.Pick is not None:
plt.figure(self.iplot + 1)
@ -259,11 +264,12 @@ class AICPicker(AutoPicking):
plt.xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
plt.ylabel('Counts')
ax = plt.gca()
ax.set_ylim([-10, max(self.Data[0].data)])
plt.yticks([])
ax.set_xlim([self.Tcf[inoise[0][0]] - 5, self.Tcf[isignal[0][len(isignal) - 1]] + 5])
plt.show()
raw_input()
plt.close(self.iplot)
plt.close(p)
if self.Pick == None:
print 'AICPicker: Could not find minimum, picking window too short?'
@ -347,8 +353,8 @@ class PragPicker(AutoPicking):
elif flagpick_l > 0 and flagpick_r > 0 and cfpick_l >= cfpick_r:
self.Pick = pick_r
if self.getiplot() is not None:
plt.figure(self.getiplot())
if self.getiplot() > 1:
p = plt.figure(self.getiplot())
p1, = plt.plot(Tcfpick,cfipick, 'k')
p2, = plt.plot(Tcfpick,cfsmoothipick, 'r')
p3, = plt.plot([self.Pick, self.Pick], [min(cfipick), max(cfipick)], 'b', linewidth=2)
@ -358,7 +364,7 @@ class PragPicker(AutoPicking):
plt.title(self.Data[0].stats.station)
plt.show()
raw_input()
plt.close(self.getiplot())
plt.close(p)
else:
self.Pick = None

View File

@ -0,0 +1,459 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Function to run automated picking algorithms using AIC,
HOS and AR prediction. Uses object CharFuns and Picker and
function conglomerate utils.
:author: MAGS2 EP3 working group / Ludger Kueperkoch
"""
from obspy.core import read
import matplotlib.pyplot as plt
import numpy as np
from pylot.core.pick.CharFuns import *
from pylot.core.pick.Picker import *
from pylot.core.pick.CharFuns import *
from pylot.core.pick import utils
def run_autopicking(wfstream, pickparam):
'''
param: wfstream
:type: `~obspy.core.stream.Stream`
param: pickparam
:type: container of picking parameters from input file,
usually autoPyLoT.in
'''
# declaring pickparam variables (only for convenience)
# read your autoPyLoT.in for details!
#special parameters for P picking
algoP = pickparam.getParam('algoP')
iplot = pickparam.getParam('iplot')
pstart = pickparam.getParam('pstart')
pstop = pickparam.getParam('pstop')
thosmw = pickparam.getParam('tlta')
hosorder = pickparam.getParam('hosorder')
tsnrz = pickparam.getParam('tsnrz')
hosorder = pickparam.getParam('hosorder')
bpz1 = pickparam.getParam('bpz1')
bpz2 = pickparam.getParam('bpz2')
pickwinP = pickparam.getParam('pickwinP')
tsmoothP = pickparam.getParam('tsmoothP')
ausP = pickparam.getParam('ausP')
nfacP = pickparam.getParam('nfacP')
tpred1z = pickparam.getParam('tpred1z')
tdet1z = pickparam.getParam('tdet1z')
Parorder = pickparam.getParam('Parorder')
addnoise = pickparam.getParam('addnoise')
Precalcwin = pickparam.getParam('Precalcwin')
minAICPslope = pickparam.getParam('minAICPslope')
minAICPSNR = pickparam.getParam('minAICPSNR')
timeerrorsP = pickparam.getParam('timeerrorsP')
#special parameters for S picking
algoS = pickparam.getParam('algoS')
sstart = pickparam.getParam('sstart')
sstop = pickparam.getParam('sstop')
bph1 = pickparam.getParam('bph1')
bph2 = pickparam.getParam('bph2')
tsnrh = pickparam.getParam('tsnrh')
pickwinS = pickparam.getParam('pickwinS')
tpred1h = pickparam.getParam('tpred1h')
tdet1h = pickparam.getParam('tdet1h')
tpred2h = pickparam.getParam('tpred2h')
tdet2h = pickparam.getParam('tdet2h')
Sarorder = pickparam.getParam('Sarorder')
aictsmoothS = pickparam.getParam('aictsmoothS')
tsmoothS = pickparam.getParam('tsmoothS')
ausS = pickparam.getParam('ausS')
minAICSslope = pickparam.getParam('minAICSslope')
minAICSSNR = pickparam.getParam('minAICSSNR')
Srecalcwin = pickparam.getParam('Srecalcwin')
nfacS = pickparam.getParam('nfacS')
timeerrorsS = pickparam.getParam('timeerrorsS')
#parameters for first-motion determination
minFMSNR = pickparam.getParam('minFMSNR')
fmpickwin = pickparam.getParam('fmpickwin')
minfmweight = pickparam.getParam('minfmweight')
# split components
zdat = wfstream.select(component="Z")
edat = wfstream.select(component="E")
if len(edat) == 0: #check for other components
edat = wfstream.select(component="2")
ndat = wfstream.select(component="N")
if len(ndat) == 0: #check for other components
ndat = wfstream.select(component="1")
if algoP == 'HOS' or algoP == 'ARZ' and zdat is not None:
print '##########################################'
print 'run_autopicking: Working on P onset of station %s' % zdat[0].stats.station
print 'Filtering vertical trace ...'
print zdat
z_copy = zdat.copy()
#filter and taper data
tr_filt = zdat[0].copy()
tr_filt.filter('bandpass', freqmin=bpz1[0], freqmax=bpz1[1], zerophase=False)
tr_filt.taper(max_percentage=0.05, type='hann')
z_copy[0].data = tr_filt.data
##############################################################
#check length of waveform and compare with cut times
Lc = pstop - pstart
Lwf = zdat[0].stats.endtime - zdat[0].stats.starttime
Ldiff = Lwf - Lc
if Ldiff < 0:
print 'run_autopicking: Cutting times are too large for actual waveform!'
print 'Use entire waveform instead!'
pstart = 0
pstop = len(zdat[0].data) * zdat[0].stats.delta
cuttimes = [pstart, pstop]
if algoP == 'HOS':
#calculate HOS-CF using subclass HOScf of class CharacteristicFunction
cf1 = HOScf(z_copy, cuttimes, thosmw, hosorder) #instance of HOScf
elif algoP == 'ARZ':
#calculate ARZ-CF using subclass ARZcf of class CharcteristicFunction
cf1 = ARZcf(z_copy, cuttimes, tpred1z, Parorder, tdet1z, addnoise) #instance of ARZcf
##############################################################
#calculate AIC-HOS-CF using subclass AICcf of class CharacteristicFunction
#class needs stream object => build it
tr_aic = tr_filt.copy()
tr_aic.data =cf1.getCF()
z_copy[0].data = tr_aic.data
aiccf = AICcf(z_copy, cuttimes) #instance of AICcf
##############################################################
#get prelimenary onset time from AIC-HOS-CF using subclass AICPicker of class AutoPicking
aicpick = AICPicker(aiccf, tsnrz, pickwinP, iplot, None, tsmoothP)
##############################################################
#go on with processing if AIC onset passes quality control
if aicpick.getSlope() >= minAICPslope and aicpick.getSNR() >= minAICPSNR:
aicPflag = 1
print 'AIC P-pick passes quality control: Slope: %f, SNR: %f' % \
(aicpick.getSlope(), aicpick.getSNR())
print 'Go on with refined picking ...'
#re-filter waveform with larger bandpass
print 'run_autopicking: re-filtering vertical trace ...'
z_copy = zdat.copy()
tr_filt = zdat[0].copy()
tr_filt.filter('bandpass', freqmin=bpz2[0], freqmax=bpz2[1], zerophase=False)
tr_filt.taper(max_percentage=0.05, type='hann')
z_copy[0].data = tr_filt.data
#############################################################
#re-calculate CF from re-filtered trace in vicinity of initial onset
cuttimes2 = [round(max([aicpick.getpick() - Precalcwin, 0])), \
round(min([len(zdat[0].data) * zdat[0].stats.delta, \
aicpick.getpick() + Precalcwin]))]
if algoP == 'HOS':
#calculate HOS-CF using subclass HOScf of class CharacteristicFunction
cf2 = HOScf(z_copy, cuttimes2, thosmw, hosorder) #instance of HOScf
elif algoP == 'ARZ':
#calculate ARZ-CF using subclass ARZcf of class CharcteristicFunction
cf2 = ARZcf(z_copy, cuttimes2, tpred1z, Parorder, tdet1z, addnoise) #instance of ARZcf
##############################################################
#get refined onset time from CF2 using class Picker
refPpick = PragPicker(cf2, tsnrz, pickwinP, iplot, ausP, tsmoothP, aicpick.getpick())
#############################################################
#quality assessment
#get earliest and latest possible pick and symmetrized uncertainty
[lpickP, epickP, Perror] = earllatepicker(z_copy, nfacP, tsnrz, refPpick.getpick(), iplot)
#get SNR
[SNRP, SNRPdB, Pnoiselevel] = getSNR(z_copy, tsnrz, refPpick.getpick())
#weight P-onset using symmetric error
if Perror <= timeerrorsP[0]:
Pweight = 0
elif Perror > timeerrorsP[0] and Perror <= timeerrorsP[1]:
Pweight = 1
elif Perror > timeerrorsP[1] and Perror <= timeerrorsP[2]:
Pweight = 2
elif Perror > timeerrorsP[2] and Perror <= timeerrorsP[3]:
Pweight = 3
elif Perror > timeerrorsP[3]:
Pweight = 4
##############################################################
#get first motion of P onset
#certain quality required
if Pweight <= minfmweight and SNRP >= minFMSNR:
FM = fmpicker(zdat, z_copy, fmpickwin, refPpick.getpick(), iplot)
else:
FM = 'N'
print 'run_autopicking: P-weight: %d, SNR: %f, SNR[dB]: %f, Polarity: %s' % (Pweight, SNRP, SNRPdB, FM)
else:
print 'Bad initial (AIC) P-pick, skip this onset!'
print 'AIC-SNR=', aicpick.getSNR(), 'AIC-Slope=', aicpick.getSlope()
Pweight = 4
Sweight = 4
FM = 'N'
SNRP = None
SNRPdB = None
SNRS = None
SNRSdB = None
aicSflag = 0
aicPflag = 0
else:
print 'run_autopicking: No vertical component data available, skipping station!'
return
if edat is not None and ndat is not None and len(edat) > 0 and len(ndat) > 0 and Pweight < 4:
print 'Go on picking S onset ...'
print '##################################################'
print 'Working on S onset of station %s' % edat[0].stats.station
print 'Filtering horizontal traces ...'
#determine time window for calculating CF after P onset
#cuttimesh = [round(refPpick.getpick() + sstart), round(refPpick.getpick() + sstop)]
cuttimesh = [round(max([refPpick.getpick() + sstart, 0])), \
round(min([refPpick.getpick() + sstop, Lwf]))]
if algoS == 'ARH':
print edat, ndat
#re-create stream object including both horizontal components
hdat = edat.copy()
hdat += ndat
h_copy = hdat.copy()
#filter and taper data
trH1_filt = hdat[0].copy()
trH2_filt = hdat[1].copy()
trH1_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1], zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1], zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
h_copy[0].data = trH1_filt.data
h_copy[1].data = trH2_filt.data
elif algoS == 'AR3':
print zdat, edat, ndat
#re-create stream object including both horizontal components
hdat = zdat.copy()
hdat += edat
hdat += ndat
h_copy = hdat.copy()
#filter and taper data
trH1_filt = hdat[0].copy()
trH2_filt = hdat[1].copy()
trH3_filt = hdat[2].copy()
trH1_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1], zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1], zerophase=False)
trH3_filt.filter('bandpass', freqmin=bph1[0], freqmax=bph1[1], zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
trH3_filt.taper(max_percentage=0.05, type='hann')
h_copy[0].data = trH1_filt.data
h_copy[1].data = trH2_filt.data
h_copy[2].data = trH3_filt.data
##############################################################
if algoS == 'ARH':
#calculate ARH-CF using subclass ARHcf of class CharcteristicFunction
arhcf1 = ARHcf(h_copy, cuttimesh, tpred1h, Sarorder, tdet1h, addnoise) #instance of ARHcf
elif algoS == 'AR3':
#calculate ARH-CF using subclass AR3cf of class CharcteristicFunction
arhcf1 = AR3Ccf(h_copy, cuttimesh, tpred1h, Sarorder, tdet1h, addnoise) #instance of ARHcf
##############################################################
#calculate AIC-ARH-CF using subclass AICcf of class CharacteristicFunction
#class needs stream object => build it
tr_arhaic = trH1_filt.copy()
tr_arhaic.data = arhcf1.getCF()
h_copy[0].data = tr_arhaic.data
#calculate ARH-AIC-CF
haiccf = AICcf(h_copy, cuttimesh) #instance of AICcf
##############################################################
#get prelimenary onset time from AIC-HOS-CF using subclass AICPicker of class AutoPicking
aicarhpick = AICPicker(haiccf, tsnrh, pickwinS, iplot, None, aictsmoothS)
###############################################################
#go on with processing if AIC onset passes quality control
if aicarhpick.getSlope() >= minAICSslope and aicarhpick.getSNR() >= minAICSSNR:
aicSflag = 1
print 'AIC S-pick passes quality control: Slope: %f, SNR: %f' \
% (aicarhpick.getSlope(), aicarhpick.getSNR())
print 'Go on with refined picking ...'
#re-calculate CF from re-filtered trace in vicinity of initial onset
cuttimesh2 = [round(aicarhpick.getpick() - Srecalcwin), \
round(aicarhpick.getpick() + Srecalcwin)]
#re-filter waveform with larger bandpass
print 'run_autopicking: re-filtering horizontal traces...'
h_copy = hdat.copy()
#filter and taper data
if algoS == 'ARH':
trH1_filt = hdat[0].copy()
trH2_filt = hdat[1].copy()
trH1_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1], zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1], zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
h_copy[0].data = trH1_filt.data
h_copy[1].data = trH2_filt.data
#############################################################
arhcf2 = ARHcf(h_copy, cuttimesh2, tpred2h, Sarorder, tdet2h, addnoise) #instance of ARHcf
elif algoS == 'AR3':
trH1_filt = hdat[0].copy()
trH2_filt = hdat[1].copy()
trH3_filt = hdat[2].copy()
trH1_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1], zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1], zerophase=False)
trH3_filt.filter('bandpass', freqmin=bph2[0], freqmax=bph2[1], zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
trH3_filt.taper(max_percentage=0.05, type='hann')
h_copy[0].data = trH1_filt.data
h_copy[1].data = trH2_filt.data
h_copy[2].data = trH3_filt.data
#############################################################
arhcf2 = AR3Ccf(h_copy, cuttimesh2, tpred2h, Sarorder, tdet2h, addnoise) #instance of ARHcf
#get refined onset time from CF2 using class Picker
refSpick = PragPicker(arhcf2, tsnrh, pickwinS, iplot, ausS, tsmoothS, aicarhpick.getpick())
#############################################################
#quality assessment
#get earliest and latest possible pick and symmetrized uncertainty
h_copy[0].data = trH1_filt.data
[lpickS1, epickS1, Serror1] = earllatepicker(h_copy, nfacS, tsnrh, refSpick.getpick(), iplot)
h_copy[0].data = trH2_filt.data
[lpickS2, epickS2, Serror2] = earllatepicker(h_copy, nfacS, tsnrh, refSpick.getpick(), iplot)
if algoS == 'ARH':
#get earliest pick of both earliest possible picks
epick = [epickS1, epickS2]
lpick = [lpickS1, lpickS2]
pickerr = [Serror1, Serror2]
ipick =np.argmin([epickS1, epickS2])
elif algoS == 'AR3':
[lpickS3, epickS3, Serror3] = earllatepicker(h_copy, nfacS, tsnrh, refSpick.getpick(), iplot)
#get earliest pick of all three picks
epick = [epickS1, epickS2, epickS3]
lpick = [lpickS1, lpickS2, lpickS3]
pickerr = [Serror1, Serror2, Serror3]
ipick =np.argmin([epickS1, epickS2, epickS3])
epickS = epick[ipick]
lpickS = lpick[ipick]
Serror = pickerr[ipick]
#get SNR
[SNRS, SNRSdB, Snoiselevel] = getSNR(h_copy, tsnrh, refSpick.getpick())
#weight S-onset using symmetric error
if Serror <= timeerrorsS[0]:
Sweight = 0
elif Serror > timeerrorsS[0] and Serror <= timeerrorsS[1]:
Sweight = 1
elif Perror > timeerrorsS[1] and Serror <= timeerrorsS[2]:
Sweight = 2
elif Serror > timeerrorsS[2] and Serror <= timeerrorsS[3]:
Sweight = 3
elif Serror > timeerrorsS[3]:
Sweight = 4
print 'run_autopicking: S-weight: %d, SNR: %f, SNR[dB]: %f' % (Sweight, SNRS, SNRSdB)
else:
print 'Bad initial (AIC) S-pick, skip this onset!'
print 'AIC-SNR=', aicarhpick.getSNR(), 'AIC-Slope=', aicarhpick.getSlope()
Sweight = 4
SNRS = None
SNRSdB = None
aicSflag = 0
else:
print 'run_autopicking: No horizontal component data available or bad P onset, skipping S picking!'
return
##############################################################
if iplot > 0:
#plot vertical trace
plt.figure()
plt.subplot(3,1,1)
tdata = np.arange(0, zdat[0].stats.npts / tr_filt.stats.sampling_rate, tr_filt.stats.delta)
#check equal length of arrays, sometimes they are different!?
wfldiff = len(tr_filt.data) - len(tdata)
if wfldiff < 0:
tdata = tdata[0:len(tdata) - abs(wfldiff)]
p1, = plt.plot(tdata, tr_filt.data/max(tr_filt.data), 'k')
if Pweight < 4:
p2, = plt.plot(cf1.getTimeArray(), cf1.getCF() / max(cf1.getCF()), 'b')
if aicPflag == 1:
p3, = plt.plot(cf2.getTimeArray(), cf2.getCF() / max(cf2.getCF()), 'm')
p4, = plt.plot([aicpick.getpick(), aicpick.getpick()], [-1, 1], 'r')
plt.plot([aicpick.getpick()-0.5, aicpick.getpick()+0.5], [1, 1], 'r')
plt.plot([aicpick.getpick()-0.5, aicpick.getpick()+0.5], [-1, -1], 'r')
p5, = plt.plot([refPpick.getpick(), refPpick.getpick()], [-1.3, 1.3], 'r', linewidth=2)
plt.plot([refPpick.getpick()-0.5, refPpick.getpick()+0.5], [1.3, 1.3], 'r', linewidth=2)
plt.plot([refPpick.getpick()-0.5, refPpick.getpick()+0.5], [-1.3, -1.3], 'r', linewidth=2)
plt.plot([lpickP, lpickP], [-1.1, 1.1], 'r--')
plt.plot([epickP, epickP], [-1.1, 1.1], 'r--')
plt.legend([p1, p2, p3, p4, p5], ['Data', 'CF1', 'CF2', 'Initial P Onset', 'Final P Pick'])
plt.title('%s, %s, P Weight=%d, SNR=%7.2f, SNR[dB]=%7.2f Polarity: %s' % (tr_filt.stats.station, \
tr_filt.stats.channel, Pweight, SNRP, SNRPdB, FM))
else:
plt.legend([p1, p2], ['Data', 'CF1'])
plt.title('%s, P Weight=%d, SNR=None, SNRdB=None' % (tr_filt.stats.channel, Pweight))
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.ylabel('Normalized Counts')
plt.suptitle(tr_filt.stats.starttime)
#plot horizontal traces
plt.subplot(3,1,2)
th1data = np.arange(0, trH1_filt.stats.npts / trH1_filt.stats.sampling_rate, trH1_filt.stats.delta)
#check equal length of arrays, sometimes they are different!?
wfldiff = len(trH1_filt.data) - len(th1data)
if wfldiff < 0:
th1data = th1data[0:len(th1data) - abs(wfldiff)]
p21, = plt.plot(th1data, trH1_filt.data/max(trH1_filt.data), 'k')
if Pweight < 4:
p22, = plt.plot(arhcf1.getTimeArray(), arhcf1.getCF()/max(arhcf1.getCF()), 'b')
if aicSflag == 1:
p23, = plt.plot(arhcf2.getTimeArray(), arhcf2.getCF()/max(arhcf2.getCF()), 'm')
p24, = plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'g')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [1, 1], 'g')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [-1, -1], 'g')
p25, = plt.plot([refSpick.getpick(), refSpick.getpick()], [-1.3, 1.3], 'g', linewidth=2)
plt.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5], [1.3, 1.3], 'g', linewidth=2)
plt.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5], [-1.3, -1.3], 'g', linewidth=2)
plt.plot([lpickS, lpickS], [-1.1, 1.1], 'g--')
plt.plot([epickS, epickS], [-1.1, 1.1], 'g--')
plt.legend([p21, p22, p23, p24, p25], ['Data', 'CF1', 'CF2', 'Initial S Onset', 'Final S Pick'])
plt.title('%s, S Weight=%d, SNR=%7.2f, SNR[dB]=%7.2f' % (trH1_filt.stats.channel, \
Sweight, SNRS, SNRSdB))
else:
plt.legend([p21, p22], ['Data', 'CF1'])
plt.title('%s, S Weight=%d, SNR=None, SNRdB=None' % (trH1_filt.stats.channel, Sweight))
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.ylabel('Normalized Counts')
plt.suptitle(trH1_filt.stats.starttime)
plt.subplot(3,1,3)
th2data = np.arange(0, trH2_filt.stats.npts / trH2_filt.stats.sampling_rate, trH2_filt.stats.delta)
#check equal length of arrays, sometimes they are different!?
wfldiff = len(trH2_filt.data) - len(th2data)
if wfldiff < 0:
th2data = th2data[0:len(th2data) - abs(wfldiff)]
plt.plot(th2data, trH2_filt.data/max(trH2_filt.data), 'k')
if Pweight < 4:
p22, = plt.plot(arhcf1.getTimeArray(), arhcf1.getCF()/max(arhcf1.getCF()), 'b')
if aicSflag == 1:
p23, = plt.plot(arhcf2.getTimeArray(), arhcf2.getCF()/max(arhcf2.getCF()), 'm')
p24, = plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'g')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [1, 1], 'g')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [-1, -1], 'g')
p25, = plt.plot([refSpick.getpick(), refSpick.getpick()], [-1.3, 1.3], 'g', linewidth=2)
plt.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5], [1.3, 1.3], 'g', linewidth=2)
plt.plot([refSpick.getpick() - 0.5, refSpick.getpick() + 0.5], [-1.3, -1.3], 'g', linewidth=2)
plt.plot([lpickS, lpickS], [-1.1, 1.1], 'g--')
plt.plot([epickS, epickS], [-1.1, 1.1], 'g--')
plt.legend([p21, p22, p23, p24, p25], ['Data', 'CF1', 'CF2', 'Initial S Onset', 'Final S Pick'])
else:
plt.legend([p21, p22], ['Data', 'CF1'])
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.xlabel('Time [s] after %s' % tr_filt.stats.starttime)
plt.ylabel('Normalized Counts')
plt.title(trH2_filt.stats.channel)
plt.show()
raw_input()
plt.close()

View File

@ -11,7 +11,6 @@
import numpy as np
import matplotlib.pyplot as plt
from obspy.core import Stream
import pdb
def earllatepicker(X, nfac, TSNR, Pick1, iplot=None):
@ -81,8 +80,8 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=None):
diffti_te = Pick1 - EPick
PickError = (diffti_te + 2 * diffti_tl) / 3
if iplot is not None:
plt.figure(iplot)
if iplot > 1:
p = plt.figure(iplot)
p1, = plt.plot(t, x, 'k')
p2, = plt.plot(t[inoise], x[inoise])
p3, = plt.plot(t[isignal], x[isignal], 'r')
@ -109,7 +108,7 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=None):
X[0].stats.station)
plt.show()
raw_input()
plt.close(iplot)
plt.close(p)
return EPick, LPick, PickError
@ -240,7 +239,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=None):
elif P1[0] > 0 and P2[0] <= 0:
FM = '+'
if iplot is not None:
if iplot > 1:
plt.figure(iplot)
plt.subplot(2, 1, 1)
plt.plot(t, xraw, 'k')