[move] pseudo method restituteWFData changed to function restitute_data and moved to dataprocessing

This commit is contained in:
Sebastian Wehling-Benatelli 2016-09-10 13:23:38 +02:00
parent 8866d54428
commit 8a5fcf05bf
4 changed files with 143 additions and 140 deletions

View File

@ -14,6 +14,7 @@ from pylot.core.util.utils import getPatternLine
from scipy.optimize import curve_fit from scipy.optimize import curve_fit
from scipy import integrate, signal from scipy import integrate, signal
from pylot.core.io.data import Data from pylot.core.io.data import Data
from pylot.core.util.dataprocessing import restitute_data
class Magnitude(object): class Magnitude(object):
@ -354,7 +355,7 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp
data = Data() data = Data()
wf_copy = wfstream.copy() wf_copy = wfstream.copy()
[cordat, restflag] = data.restituteWFData(inventory, wf_copy) [cordat, restflag] = restitute_data(wf_copy, inventory)
if restflag == 1: if restflag == 1:
zdat = cordat.select(component="Z") zdat = cordat.select(component="Z")
if len(zdat) == 0: if len(zdat) == 0:

View File

@ -2,12 +2,10 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import copy import copy
import glob
import os import os
from obspy import read_events, read_inventory from obspy import read_events
from obspy.core import read, Stream, UTCDateTime from obspy.core import read, Stream, UTCDateTime
from obspy.core.event import Event from obspy.core.event import Event
from obspy.io.xseed import Parser
from pylot.core.io.phases import readPILOTEvent, picks_from_picksdict, \ from pylot.core.io.phases import readPILOTEvent, picks_from_picksdict, \
picksdict_from_pilot, merge_picks picksdict_from_pilot, merge_picks
@ -269,139 +267,6 @@ class Data(object):
""" """
self.get_evt_data().picks = [] self.get_evt_data().picks = []
def restituteWFData(self, invdlpath, streams=None):
"""
:param invdlpath:
:param streams:
:return:
"""
restflag = 0
if streams is None:
st_raw = self.getWFData()
st = st_raw.copy()
else:
st = streams
for tr in st:
# remove underscores
if len(tr.stats.station) > 3 and tr.stats.station[3] == '_':
tr.stats.station = tr.stats.station[0:3]
dlp = '%s/*.dless' % invdlpath
invp = '%s/*.xml' % invdlpath
respp = '%s/*.resp' % invdlpath
dlfile = glob.glob(dlp)
invfile = glob.glob(invp)
respfile = glob.glob(respp)
# check for dataless-SEED file
if len(dlfile) >= 1:
print("Found dataless-SEED file(s)!")
print("Reading meta data information ...")
for j in range(len(dlfile)):
print("Found dataless-SEED file %s" % dlfile[j])
parser = Parser('%s' % dlfile[j])
for i in range(len(st)):
# check, whether this trace has already been corrected
try:
st[i].stats.processing
except:
try:
print(
"Correcting %s, %s for instrument response "
"..." % (st[i].stats.station,
st[i].stats.channel))
# get corner frequencies for pre-filtering
fny = st[i].stats.sampling_rate / 2
fc21 = fny - (fny * 0.05)
fc22 = fny - (fny * 0.02)
prefilt = [0.5, 0.9, fc21, fc22]
# instrument correction
st[i].simulate(pre_filt=prefilt,
seedresp={'filename': parser,
'date': st[
i].stats.starttime,
'units': "VEL"})
restflag = 1
except ValueError as e:
vmsg = '{0}'.format(e)
print(vmsg)
else:
print("Trace has already been corrected!")
# check for inventory-xml file
if len(invfile) >= 1:
print("Found inventory-xml file(s)!")
print("Reading meta data information ...")
for j in range(len(invfile)):
print("Found inventory-xml file %s" % invfile[j])
inv = read_inventory(invfile[j], format="STATIONXML")
for i in range(len(st)):
# check, whether this trace has already been corrected
try:
st[i].stats.processing
except:
try:
print("Correcting %s, %s for instrument response "
"..." % (st[i].stats.station,
st[i].stats.channel))
# get corner frequencies for pre-filtering
fny = st[i].stats.sampling_rate / 2
fc21 = fny - (fny * 0.05)
fc22 = fny - (fny * 0.02)
prefilt = [0.5, 0.9, fc21, fc22]
# instrument correction
st[i].attach_response(inv)
st[i].remove_response(output='VEL',
pre_filt=prefilt)
restflag = 1
except ValueError as e:
vmsg = '{0}'.format(e)
print(vmsg)
else:
print("Trace has already been corrected!")
# check for RESP-file
if len(respfile) >= 1:
print("Found response file(s)!")
print("Reading meta data information ...")
for j in range(len(respfile)):
print("Found RESP-file %s" % respfile[j])
for i in range(len(st)):
# check, whether this trace has already been corrected
try:
st[i].stats.processing
except:
try:
print("Correcting %s, %s for instrument response "
"..." % (st[i].stats.station,
st[i].stats.channel))
# get corner frequencies for pre-filtering
fny = st[i].stats.sampling_rate / 2
fc21 = fny - (fny * 0.05)
fc22 = fny - (fny * 0.02)
prefilt = [0.5, 0.9, fc21, fc22]
# instrument correction
seedresp = {'filename': respfile[0],
'date': st[0].stats.starttime,
'units': "VEL"}
st[i].simulate(paz_remove=None, pre_filt=prefilt,
seedresp=seedresp)
restflag = 1
except ValueError as e:
vmsg = '{0}'.format(e)
print(vmsg)
else:
print("Trace has already been corrected!")
if len(respfile) < 1 and len(invfile) < 1 and len(dlfile) < 1:
print("No dataless-SEED file,inventory-xml file nor RESP-file "
"found!")
print("Go on processing data without source parameter "
"determination!")
return st, restflag
def get_evt_data(self): def get_evt_data(self):
""" """

View File

@ -17,6 +17,7 @@ from pylot.core.pick.charfuns import CharacteristicFunction
from pylot.core.pick.charfuns import HOScf, AICcf, ARZcf, ARHcf, AR3Ccf from pylot.core.pick.charfuns import HOScf, AICcf, ARZcf, ARHcf, AR3Ccf
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \ from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \
getSNR, fmpicker, checkPonsets, wadaticheck getSNR, fmpicker, checkPonsets, wadaticheck
from pylot.core.util.dataprocessing import restitute_data
from pylot.core.util.utils import getPatternLine from pylot.core.util.utils import getPatternLine
from pylot.core.io.data import Data from pylot.core.io.data import Data
from pylot.core.analysis.magnitude import WApp from pylot.core.analysis.magnitude import WApp
@ -564,7 +565,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
hdat = edat.copy() hdat = edat.copy()
hdat += ndat hdat += ndat
h_copy = hdat.copy() h_copy = hdat.copy()
[cordat, restflag] = data.restituteWFData(invdir, h_copy) [cordat, restflag] = restitute_data(h_copy, invdir)
# calculate WA-peak-to-peak amplitude # calculate WA-peak-to-peak amplitude
# using subclass WApp of superclass Magnitude # using subclass WApp of superclass Magnitude
if restflag == 1: if restflag == 1:
@ -598,7 +599,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
hdat = edat.copy() hdat = edat.copy()
hdat += ndat hdat += ndat
h_copy = hdat.copy() h_copy = hdat.copy()
[cordat, restflag] = data.restituteWFData(invdir, h_copy) [cordat, restflag] = restitute_data(h_copy, invdir)
if restflag == 1: if restflag == 1:
# calculate WA-peak-to-peak amplitude # calculate WA-peak-to-peak amplitude
# using subclass WApp of superclass Magnitude # using subclass WApp of superclass Magnitude

View File

@ -3,8 +3,10 @@
import os import os
import glob import glob
from obspy import UTCDateTime from obspy import UTCDateTime, read_inventory
import sys import sys
from obspy.io.xseed import Parser
def time_from_header(header): def time_from_header(header):
""" """
@ -148,6 +150,140 @@ def evt_head_check(root_dir, out_dir = None):
print(nfiles) print(nfiles)
def restitute_data(data, invdlpath):
"""
:param invdlpath:
:param data:
:return:
"""
restflag = False
for tr in data:
# remove underscores
if len(tr.stats.station) > 3 and tr.stats.station[3] == '_':
tr.stats.station = tr.stats.station[0:3]
dlfile = list()
invfile = list()
respfile = list()
inv = dict(dless=dlfile, xml=invfile, resp=respfile)
if os.path.isfile(invdlpath):
ext = os.path.splitext(invdlpath)[1].split('.')[1]
inv[ext] += [invdlpath]
else:
for ext in inv.keys():
inv[ext] += glob.glob1(invdlpath,'*.{}'.format(ext))
# check for dataless-SEED file
# TODO ineffective loop -> better concatenate inventory entries and
# loop over traces
if len(dlfile) >= 1:
print("Found dataless-SEED file(s)!")
print("Reading meta data information ...")
for j in range(len(dlfile)):
print("Found dataless-SEED file %s" % dlfile[j])
parser = Parser('%s' % dlfile[j])
for i in range(len(data)):
# check, whether this trace has already been corrected
try:
data[i].stats.processing
except:
try:
print(
"Correcting %s, %s for instrument response "
"..." % (data[i].stats.station,
data[i].stats.channel))
# get corner frequencies for pre-filtering
fny = data[i].stats.sampling_rate / 2
fc21 = fny - (fny * 0.05)
fc22 = fny - (fny * 0.02)
prefilt = [0.5, 0.9, fc21, fc22]
# instrument correction
data[i].simulate(pre_filt=prefilt,
seedresp={'filename': parser,
'date': data[
i].stats.starttime,
'units': "VEL"})
restflag = True
except ValueError as e:
vmsg = '{0}'.format(e)
print(vmsg)
else:
print("Trace has already been corrected!")
# check for inventory-xml file
if len(invfile) >= 1:
print("Found inventory-xml file(s)!")
print("Reading meta data information ...")
for j in range(len(invfile)):
print("Found inventory-xml file %s" % invfile[j])
inv = read_inventory(invfile[j], format="STATIONXML")
for i in range(len(data)):
# check, whether this trace has already been corrected
try:
data[i].stats.processing
except:
try:
print("Correcting %s, %s for instrument response "
"..." % (data[i].stats.station,
data[i].stats.channel))
# get corner frequencies for pre-filtering
fny = data[i].stats.sampling_rate / 2
fc21 = fny - (fny * 0.05)
fc22 = fny - (fny * 0.02)
prefilt = [0.5, 0.9, fc21, fc22]
# instrument correction
data[i].attach_response(inv)
data[i].remove_response(output='VEL',
pre_filt=prefilt)
restflag = True
except ValueError as e:
vmsg = '{0}'.format(e)
print(vmsg)
else:
print("Trace has already been corrected!")
# check for RESP-file
if len(respfile) >= 1:
print("Found response file(s)!")
print("Reading meta data information ...")
for j in range(len(respfile)):
print("Found RESP-file %s" % respfile[j])
for i in range(len(data)):
# check, whether this trace has already been corrected
try:
data[i].stats.processing
except:
try:
print("Correcting %s, %s for instrument response "
"..." % (data[i].stats.station,
data[i].stats.channel))
# get corner frequencies for pre-filtering
fny = data[i].stats.sampling_rate / 2
fc21 = fny - (fny * 0.05)
fc22 = fny - (fny * 0.02)
prefilt = [0.5, 0.9, fc21, fc22]
# instrument correction
seedresp = {'filename': respfile[0],
'date': data[0].stats.starttime,
'units': "VEL"}
data[i].simulate(paz_remove=None, pre_filt=prefilt,
seedresp=seedresp)
restflag = True
except ValueError as e:
vmsg = '{0}'.format(e)
print(vmsg)
else:
print("Trace has already been corrected!")
if len(respfile) < 1 and len(invfile) < 1 and len(dlfile) < 1:
print("No dataless-SEED file,inventory-xml file nor RESP-file "
"found!")
print("Go on processing data without source parameter "
"determination!")
return data, restflag
if __name__ == "__main__": if __name__ == "__main__":
import doctest import doctest