Merge branch 'feature/magnitude4QtPyLoT'

Conflicts:
	pylot/core/util/dataprocessing.py
	pylot/core/util/widgets.py
This commit is contained in:
Sebastian Wehling-Benatelli 2016-09-20 13:24:37 +02:00
commit 21042bc071
4 changed files with 96 additions and 44 deletions

View File

@ -520,7 +520,7 @@ class MainWindow(QMainWindow):
def getSavePath(e):
print('warning: {0}'.format(e))
directory = os.path.join(self.getRoot(), self.getEventFileName())
directory = os.path.realpath(self.getRoot())
file_filter = "QuakeML file (*.xml);;VELEST observation file " \
"format (*.cnv);;NonLinLoc observation file (*.obs)"
title = 'Save event data ...'
@ -1005,17 +1005,28 @@ class MainWindow(QMainWindow):
settings.setValue("inventoryFile", fninv)
settings.sync()
for a in o.arrivals:
if a.phase in 'sS':
continue
pick = a.pick_id.get_referred_object()
station = pick.waveform_id.station_code
wf = self.get_data().getWFData().select(station=station)
if not wf:
continue
onset = pick.time
dist = degrees2kilometers(a.distance)
w0, fc = calcsourcespec(wf, onset, fninv, self.inputs.get('vp'), dist,
a.azimuth, a.takeoff_angle,
self.inputs.get('Qp'), 0)
stat_mags = calcMoMw(wf, w0, 2700., 3000., dist, fninv)
mags[station] = stat_mags
if w0 is None or fc is None:
continue
station_mag = calcMoMw(wf, w0, 2700., 3000., dist, fninv)
mags[station] = station_mag
mag = np.median([M[1] for M in mags.values()])
# give some information on the processing
print('number of stations used: {0}\n'.format(len(mags.values())))
print('stations used:\n')
for s in mags.keys(): print('\t{0}'.format(s))
return Magnitude(mag=mag, magnitude_type='Mw')
else:
return None

View File

@ -306,7 +306,8 @@ def calcMoMw(wfstream, w0, rho, vp, delta, inv):
return Mo, Mw
def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, qp, iplot):
def calcsourcespec(wfstream, onset, metadata, vp, delta, azimuth, incidence,
qp, iplot):
'''
Subfunction to calculate the source spectrum and to derive from that the plateau
(usually called omega0) and the corner frequency assuming Aki's omega-square
@ -353,8 +354,10 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, qp
data = Data()
wf_copy = wfstream.copy()
[cordat, restflag] = restitute_data(wf_copy, inventory)
if restflag == 1:
invtype, inventory = metadata
[cordat, restflag] = restitute_data(wf_copy, invtype, inventory)
if restflag is True:
zdat = cordat.select(component="Z")
if len(zdat) == 0:
zdat = cordat.select(component="3")
@ -380,10 +383,10 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, qp
# integrate to displacement
# unrotated vertical component (for copmarison)
inttrz = signal.detrend(integrate.cumtrapz(zdat[0].data, None, \
inttrz = signal.detrend(integrate.cumtrapz(zdat[0].data, None,
zdat[0].stats.delta))
# rotated component Z => L
Ldat = signal.detrend(integrate.cumtrapz(ldat[0].data, None, \
Ldat = signal.detrend(integrate.cumtrapz(ldat[0].data, None,
ldat[0].stats.delta))
# get window after P pulse for
@ -445,7 +448,8 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, qp
# use of implicit scipy otimization function
fit = synthsourcespec(F, w0in, Fcin)
[optspecfit, pcov] = curve_fit(synthsourcespec, F, YYcor, [w0in, Fcin])
[optspecfit, _] = curve_fit(synthsourcespec, F, YYcor, [w0in,
Fcin])
w01 = optspecfit[0]
fc1 = optspecfit[1]
print ("calcsourcespec: Determined w0-value: %e m/Hz, \n"

View File

@ -17,7 +17,7 @@ from pylot.core.pick.charfuns import CharacteristicFunction
from pylot.core.pick.charfuns import HOScf, AICcf, ARZcf, ARHcf, AR3Ccf
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \
getSNR, fmpicker, checkPonsets, wadaticheck
from pylot.core.util.dataprocessing import restitute_data
from pylot.core.util.dataprocessing import restitute_data, read_metadata
from pylot.core.util.utils import getPatternLine
from pylot.core.io.data import Data
from pylot.core.analysis.magnitude import WApp
@ -122,6 +122,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
zfac = pickparam.get('zfac')
# path to inventory-, dataless- or resp-files
invdir = pickparam.get('invdir')
invtype, inventory = read_metadata(invdir)
# initialize output
Pweight = 4 # weight for P onset
@ -565,7 +566,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
hdat = edat.copy()
hdat += ndat
h_copy = hdat.copy()
[cordat, restflag] = restitute_data(h_copy, invdir)
[cordat, restflag] = restitute_data(h_copy, invtype, inventory)
# calculate WA-peak-to-peak amplitude
# using subclass WApp of superclass Magnitude
if restflag:
@ -602,7 +603,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
hdat = edat.copy()
hdat += ndat
h_copy = hdat.copy()
[cordat, restflag] = restitute_data(h_copy, invdir)
[cordat, restflag] = restitute_data(h_copy, invtype, inventory)
if restflag == 1:
# calculate WA-peak-to-peak amplitude
# using subclass WApp of superclass Magnitude

View File

@ -155,22 +155,16 @@ def evt_head_check(root_dir, out_dir = None):
print(nfiles)
def restitute_data(data, path_to_inventory, unit='VEL', force=False):
def read_metadata(path_to_inventory):
"""
takes a data stream and a path_to_inventory and returns the corrected
waveform data stream
:param data: seismic data stream
:param path_to_inventory: path to inventory folder or file
:param unit: unit to correct for (default: 'VEL')
:param force: force restitution for already corrected traces (default:
False)
:return: corrected data stream
take path_to_inventory and return either the corresponding list of files
found or the Parser object for a network dataless seed volume to prevent
read overhead for large dataless seed volumes
:param path_to_inventory:
:return: tuple containing a either list of files or `obspy.io.xseed.Parser`
object and the inventory type found
:rtype: tuple
"""
restflag = list()
data = remove_underscores(data)
dlfile = list()
invfile = list()
respfile = list()
@ -185,22 +179,53 @@ def restitute_data(data, path_to_inventory, unit='VEL', force=False):
invtype = key_for_set_value(inv)
if invtype is None:
print("Neither dataless-SEED file,inventory-xml file nor RESP-file "
"found!")
return data, False
raise IOError("Neither dataless-SEED file, inventory-xml file nor "
"RESP-file found!")
elif invtype == 'dless': # prevent multiple read of large dlsv
if len(inv[invtype]) == 1:
robj = Parser(inv[invtype][0])
else:
robj = inv[invtype]
else:
robj = inv[invtype]
return invtype, robj
def restitute_data(data, invtype, inobj, unit='VEL', force=False):
"""
takes a data stream and a path_to_inventory and returns the corrected
waveform data stream
:param data: seismic data stream
:param invtype: type of found metadata
:param inobj: either list of metadata files or `obspy.io.xseed.Parser`
object
:param unit: unit to correct for (default: 'VEL')
:param force: force restitution for already corrected traces (default:
False)
:return: corrected data stream
"""
restflag = list()
data = remove_underscores(data)
# loop over traces
for tr in data:
seed_id = tr.get_id()
# check, whether this trace has already been corrected
if 'processing' in tr.stats.keys() and not force:
# TODO read actual value of processing key in Trace's stats instead
# of just looking for thr key: if processing is setit doesn't
# necessarily mean that the trace has been corrected so far but only
# processed in some way, e.g. normalized
if 'processing' in tr.stats.keys() \
and np.all(['remove' in p for p in tr.stats.processing]) \
and not force:
print("Trace {0} has already been corrected!".format(seed_id))
continue
stime = tr.stats.starttime
seedresp = None
prefilt = get_prefilt(tr)
if invtype == 'resp':
fresp = find_in_list(inv[invtype], seed_id)
fresp = find_in_list(inobj, seed_id)
if not fresp:
raise IOError('no response file found '
'for trace {0}'.format(seed_id))
@ -210,35 +235,46 @@ def restitute_data(data, path_to_inventory, unit='VEL', force=False):
units=unit)
kwargs = dict(paz_remove=None, pre_filt=prefilt, seedresp=seedresp)
elif invtype == 'dless':
if len(inv[invtype]) > 1:
fname = Parser(find_in_list(inv[invtype], seed_id))
if type(inobj) is list:
fname = Parser(find_in_list(inobj, seed_id))
else:
fullpath = os.path.join(path_to_inventory, inv[invtype][0])
fname = Parser(fullpath)
fname = inobj
seedresp = dict(filename=fname,
date=stime,
units=unit)
kwargs = dict(pre_filt=prefilt, seedresp=seedresp)
elif invtype == 'xml':
invlist = inv[invtype]
invlist = inobj
if len(invlist) > 1:
finv = find_in_list(invlist, seed_id)
else:
finv = invlist[0]
inventory = read_inventory(finv, format='STATIONXML')
else:
restflag.append(False)
continue
# apply restitution to data
try:
if invtype in ['resp', 'dless']:
tr.simulate(**kwargs)
else:
tr.attach_response(inventory)
tr.remove_response(output=unit,
pre_filt=prefilt)
except ValueError as e:
msg0 = 'Response for {0} not found in Parser'.format(seed_id)
msg1 = 'evalresp failed to calculate response'
if msg0 not in e.message or msg1 not in e.message:
raise
else:
restflag.append(False)
continue
restflag.append(True)
# check if ALL traces could be restituted, take care of large datasets
# better try restitution for smaller subsets of data (e.g. station by
# station)
if len(restflag) > 0:
restflag = np.all(restflag)
restflag = bool(np.all(restflag))
else:
restflag = False
return data, restflag