Merge branch 'develop' of ariadne.geophysik.ruhr-uni-bochum.de:/data/git/pylot into develop
Conflicts: pylot/core/read/data.py
This commit is contained in:
commit
779b9e7313
0
autoPyLoT.py
Normal file → Executable file
0
autoPyLoT.py
Normal file → Executable file
@ -10,7 +10,6 @@ EVENT_DATA/LOCAL #datapath# %data path
|
||||
2013.02_Insheim #database# %name of data base
|
||||
e0019.048.13 #eventID# %event ID for single event processing
|
||||
/DATA/Insheim/STAT_INFO #invdir# %full path to inventory or dataless-seed file
|
||||
0.5 0.9 190.0 195.0 #prefilt# %corner frequencies for pre-filtering traces before restitution
|
||||
PILOT #datastructure# %choose data structure
|
||||
0 #iplot# %flag for plotting: 0 none, 1, partly, >1 everything
|
||||
AUTOPHASES_AIC_HOS4_ARH #phasefile# %name of autoPILOT output phase file
|
||||
|
@ -163,7 +163,7 @@ def autopickstation(wfstream, pickparam):
|
||||
if Ldiff < 0:
|
||||
print 'autopickstation: Cutting times are too large for actual ' \
|
||||
'waveform!'
|
||||
print 'Use entire waveform instead!'
|
||||
print 'Using entire waveform instead!'
|
||||
pstart = 0
|
||||
pstop = len(zdat[0].data) * zdat[0].stats.delta
|
||||
cuttimes = [pstart, pstop]
|
||||
@ -200,7 +200,7 @@ def autopickstation(wfstream, pickparam):
|
||||
# both horizontal traces needed
|
||||
if len(ndat) == 0 or len(edat) == 0:
|
||||
print 'One or more horizontal components missing!'
|
||||
print 'Skip control function checkZ4S.'
|
||||
print 'Skipping control function checkZ4S.'
|
||||
else:
|
||||
# filter and taper horizontal traces
|
||||
trH1_filt = edat.copy()
|
||||
@ -297,14 +297,14 @@ def autopickstation(wfstream, pickparam):
|
||||
Sflag = 1
|
||||
|
||||
else:
|
||||
print 'Bad initial (AIC) P-pick, skip this onset!'
|
||||
print 'Bad initial (AIC) P-pick, skipping this onset!'
|
||||
print 'AIC-SNR=', aicpick.getSNR(), 'AIC-Slope=', aicpick.getSlope(), 'counts/s'
|
||||
print '(min. AIC-SNR=', minAICPSNR, ', min. AIC-Slope=', minAICPslope, 'counts/s)'
|
||||
Sflag = 0
|
||||
|
||||
else:
|
||||
print 'autopickstation: No vertical component data availabler!, ' \
|
||||
'Skip station!'
|
||||
print 'autopickstation: No vertical component data available!, ' \
|
||||
'Skipping station!'
|
||||
|
||||
if edat is not None and ndat is not None and len(edat) > 0 and len(
|
||||
ndat) > 0 and Pweight < 4:
|
||||
@ -503,7 +503,7 @@ def autopickstation(wfstream, pickparam):
|
||||
Sweight, SNRS, SNRSdB)
|
||||
|
||||
else:
|
||||
print 'Bad initial (AIC) S-pick, skip this onset!'
|
||||
print 'Bad initial (AIC) S-pick, skipping this onset!'
|
||||
print 'AIC-SNR=', aicarhpick.getSNR(), \
|
||||
'AIC-Slope=', aicarhpick.getSlope(), 'counts/s'
|
||||
print '(min. AIC-SNR=', minAICSSNR, ', min. AIC-Slope=', \
|
||||
|
@ -1,7 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pdb
|
||||
import os
|
||||
import glob
|
||||
import matplotlib.pyplot as plt
|
||||
@ -149,50 +148,114 @@ class Data(object):
|
||||
def resetPicks(self):
|
||||
self.getEvtData().picks = []
|
||||
|
||||
def restituteWFData(self, invdlpath, prefilt):
|
||||
def restituteWFData(self, invdlpath, streams=None):
|
||||
if streams == None:
|
||||
st = self.getWFData()
|
||||
else:
|
||||
st = streams
|
||||
|
||||
for tr in st:
|
||||
if tr.stats.station[3] == '_': # this is for some ugly station naming
|
||||
# remove underscores
|
||||
if tr.stats.station[3] == '_':
|
||||
tr.stats.station = tr.stats.station[0:3]
|
||||
dlp = '%s/*.dataless' % invdlpath
|
||||
invp = '%s/*.inv' % invdlpath
|
||||
dlp = '%s/*.dless' % invdlpath
|
||||
invp = '%s/*.xml' % invdlpath
|
||||
respp = '%s/*.resp' % invdlpath
|
||||
dlfile = glob.glob(dlp)
|
||||
invfile = glob.glob(invp)
|
||||
respfile = glob.glob(respp)
|
||||
|
||||
# check for dataless-SEED file
|
||||
if len(dlfile) >= 1:
|
||||
print "Found dataless-SEED file!"
|
||||
print "Found dataless-SEED file(s)!"
|
||||
print "Reading meta data information ..."
|
||||
print dlfile[0]
|
||||
parser = Parser('%s' % dlfile[0])
|
||||
|
||||
for j in range(len(dlfile)):
|
||||
print "Found dataless-SEED file %s" % dlfile[j]
|
||||
parser = Parser('%s' % dlfile[j])
|
||||
for i in range(len(st)):
|
||||
# check, whether this trace has already been corrected
|
||||
try:
|
||||
print "Correcting for instrument response ..."
|
||||
st.simulate(pre_filt=prefilt, seedresp={'filename': parser, \
|
||||
'date': st[0].stats.starttime, 'units': "VEL"})
|
||||
st[i].stats.processing
|
||||
except:
|
||||
try:
|
||||
print "Correcting %s, %s for instrument response ..." \
|
||||
% (st[i].stats.station, st[i].stats.channel)
|
||||
# get corner frequencies for pre-filtering
|
||||
fny = st[i].stats.sampling_rate / 2
|
||||
fc21 = fny - (fny * 0.05)
|
||||
fc22 = fny - (fny * 0.02)
|
||||
prefilt = [0.5, 0.9, fc21, fc22]
|
||||
# instrument correction
|
||||
st[i].simulate(pre_filt=prefilt, seedresp={'filename': parser, \
|
||||
'date': st[i].stats.starttime, 'units': "VEL"})
|
||||
except ValueError, e:
|
||||
vmsg = '{0}'.format(e)
|
||||
print vmsg
|
||||
|
||||
elif len(invfile) >= 1:
|
||||
print "Found inventory-xml file!"
|
||||
else:
|
||||
print "Trace has already been corrected!"
|
||||
# check for inventory-xml file
|
||||
if len(invfile) >= 1:
|
||||
print "Found inventory-xml file(s)!"
|
||||
print "Reading meta data information ..."
|
||||
inv = read_inventory(invfile, format="STATIONXML")
|
||||
print "Applying instrument correction ..."
|
||||
st.attach_response(inv)
|
||||
st.remove_response(output='VEL', pre_filt=prefilt)
|
||||
elif len(respfile) >= 1:
|
||||
print "Found response file!"
|
||||
print respfile
|
||||
for j in range(len(invfile)):
|
||||
print "Found inventory-xml file %s" % invfile[j]
|
||||
inv = read_inventory(invfile[j], format="STATIONXML")
|
||||
for i in range(len(st)):
|
||||
# check, whether this trace has already been corrected
|
||||
try:
|
||||
st[i].stats.processing
|
||||
except:
|
||||
try:
|
||||
print "Correcting %s, %s for instrument response ..." \
|
||||
% (st[i].stats.station, st[i].stats.channel)
|
||||
# get corner frequencies for pre-filtering
|
||||
fny = st[i].stats.sampling_rate / 2
|
||||
fc21 = fny - (fny * 0.05)
|
||||
fc22 = fny - (fny * 0.02)
|
||||
prefilt = [0.5, 0.9, fc21, fc22]
|
||||
# instrument correction
|
||||
st[i].attach_response(inv)
|
||||
st[i].remove_response(output='VEL', pre_filt=prefilt)
|
||||
except ValueError, e:
|
||||
vmsg = '{0}'.format(e)
|
||||
print vmsg
|
||||
else:
|
||||
print "Trace has already been corrected!"
|
||||
# check for RESP-file
|
||||
if len(respfile) >= 1:
|
||||
print "Found response file(s)!"
|
||||
print "Reading meta data information ..."
|
||||
for j in range(len(respfile)):
|
||||
print "Found RESP-file %s" % respfile[j]
|
||||
for i in range(len(st)):
|
||||
# check, whether this trace has already been corrected
|
||||
try:
|
||||
st[i].stats.processing
|
||||
except:
|
||||
try:
|
||||
print "Correcting %s, %s for instrument response ..." \
|
||||
% (st[i].stats.station, st[i].stats.channel)
|
||||
# get corner frequencies for pre-filtering
|
||||
fny = st[i].stats.sampling_rate / 2
|
||||
fc21 = fny - (fny * 0.05)
|
||||
fc22 = fny - (fny * 0.02)
|
||||
prefilt = [0.5, 0.9, fc21, fc22]
|
||||
# instrument correction
|
||||
seedresp={'filename': respfile[0], 'date': st[0].stats.starttime, \
|
||||
'units': "VEL"}
|
||||
st.simulate(paz_remove=None, pre_filt=prefilt, seedresp=seedresp)
|
||||
st[i].simulate(paz_remove=None, pre_filt=prefilt, seedresp=seedresp)
|
||||
except ValueError, e:
|
||||
vmsg = '{0}'.format(e)
|
||||
print vmsg
|
||||
else:
|
||||
print "Trace has already been corrected!"
|
||||
|
||||
if len(respfile) < 1 and len(invfile) < 1 and len(dlfile) < 1:
|
||||
print "No dataless-SEED file,inventory-xml file nor RESP-file found!"
|
||||
print "Go on processing data without source parameter determination!"
|
||||
|
||||
return st
|
||||
|
||||
def getEvtData(self):
|
||||
return self.evtdata
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user