Merge branch 'feature/magnitude4QtPyLoT'
Conflicts: pylot/core/util/dataprocessing.py pylot/core/util/widgets.py
This commit is contained in:
commit
21042bc071
17
QtPyLoT.py
17
QtPyLoT.py
@ -520,7 +520,7 @@ class MainWindow(QMainWindow):
|
|||||||
|
|
||||||
def getSavePath(e):
|
def getSavePath(e):
|
||||||
print('warning: {0}'.format(e))
|
print('warning: {0}'.format(e))
|
||||||
directory = os.path.join(self.getRoot(), self.getEventFileName())
|
directory = os.path.realpath(self.getRoot())
|
||||||
file_filter = "QuakeML file (*.xml);;VELEST observation file " \
|
file_filter = "QuakeML file (*.xml);;VELEST observation file " \
|
||||||
"format (*.cnv);;NonLinLoc observation file (*.obs)"
|
"format (*.cnv);;NonLinLoc observation file (*.obs)"
|
||||||
title = 'Save event data ...'
|
title = 'Save event data ...'
|
||||||
@ -1005,17 +1005,28 @@ class MainWindow(QMainWindow):
|
|||||||
settings.setValue("inventoryFile", fninv)
|
settings.setValue("inventoryFile", fninv)
|
||||||
settings.sync()
|
settings.sync()
|
||||||
for a in o.arrivals:
|
for a in o.arrivals:
|
||||||
|
if a.phase in 'sS':
|
||||||
|
continue
|
||||||
pick = a.pick_id.get_referred_object()
|
pick = a.pick_id.get_referred_object()
|
||||||
station = pick.waveform_id.station_code
|
station = pick.waveform_id.station_code
|
||||||
wf = self.get_data().getWFData().select(station=station)
|
wf = self.get_data().getWFData().select(station=station)
|
||||||
|
if not wf:
|
||||||
|
continue
|
||||||
onset = pick.time
|
onset = pick.time
|
||||||
dist = degrees2kilometers(a.distance)
|
dist = degrees2kilometers(a.distance)
|
||||||
w0, fc = calcsourcespec(wf, onset, fninv, self.inputs.get('vp'), dist,
|
w0, fc = calcsourcespec(wf, onset, fninv, self.inputs.get('vp'), dist,
|
||||||
a.azimuth, a.takeoff_angle,
|
a.azimuth, a.takeoff_angle,
|
||||||
self.inputs.get('Qp'), 0)
|
self.inputs.get('Qp'), 0)
|
||||||
stat_mags = calcMoMw(wf, w0, 2700., 3000., dist, fninv)
|
if w0 is None or fc is None:
|
||||||
mags[station] = stat_mags
|
continue
|
||||||
|
station_mag = calcMoMw(wf, w0, 2700., 3000., dist, fninv)
|
||||||
|
mags[station] = station_mag
|
||||||
mag = np.median([M[1] for M in mags.values()])
|
mag = np.median([M[1] for M in mags.values()])
|
||||||
|
# give some information on the processing
|
||||||
|
print('number of stations used: {0}\n'.format(len(mags.values())))
|
||||||
|
print('stations used:\n')
|
||||||
|
for s in mags.keys(): print('\t{0}'.format(s))
|
||||||
|
|
||||||
return Magnitude(mag=mag, magnitude_type='Mw')
|
return Magnitude(mag=mag, magnitude_type='Mw')
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
@ -306,7 +306,8 @@ def calcMoMw(wfstream, w0, rho, vp, delta, inv):
|
|||||||
return Mo, Mw
|
return Mo, Mw
|
||||||
|
|
||||||
|
|
||||||
def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, qp, iplot):
|
def calcsourcespec(wfstream, onset, metadata, vp, delta, azimuth, incidence,
|
||||||
|
qp, iplot):
|
||||||
'''
|
'''
|
||||||
Subfunction to calculate the source spectrum and to derive from that the plateau
|
Subfunction to calculate the source spectrum and to derive from that the plateau
|
||||||
(usually called omega0) and the corner frequency assuming Aki's omega-square
|
(usually called omega0) and the corner frequency assuming Aki's omega-square
|
||||||
@ -353,8 +354,10 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, qp
|
|||||||
data = Data()
|
data = Data()
|
||||||
wf_copy = wfstream.copy()
|
wf_copy = wfstream.copy()
|
||||||
|
|
||||||
[cordat, restflag] = restitute_data(wf_copy, inventory)
|
invtype, inventory = metadata
|
||||||
if restflag == 1:
|
|
||||||
|
[cordat, restflag] = restitute_data(wf_copy, invtype, inventory)
|
||||||
|
if restflag is True:
|
||||||
zdat = cordat.select(component="Z")
|
zdat = cordat.select(component="Z")
|
||||||
if len(zdat) == 0:
|
if len(zdat) == 0:
|
||||||
zdat = cordat.select(component="3")
|
zdat = cordat.select(component="3")
|
||||||
@ -380,10 +383,10 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, qp
|
|||||||
|
|
||||||
# integrate to displacement
|
# integrate to displacement
|
||||||
# unrotated vertical component (for copmarison)
|
# unrotated vertical component (for copmarison)
|
||||||
inttrz = signal.detrend(integrate.cumtrapz(zdat[0].data, None, \
|
inttrz = signal.detrend(integrate.cumtrapz(zdat[0].data, None,
|
||||||
zdat[0].stats.delta))
|
zdat[0].stats.delta))
|
||||||
# rotated component Z => L
|
# rotated component Z => L
|
||||||
Ldat = signal.detrend(integrate.cumtrapz(ldat[0].data, None, \
|
Ldat = signal.detrend(integrate.cumtrapz(ldat[0].data, None,
|
||||||
ldat[0].stats.delta))
|
ldat[0].stats.delta))
|
||||||
|
|
||||||
# get window after P pulse for
|
# get window after P pulse for
|
||||||
@ -445,7 +448,8 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, qp
|
|||||||
|
|
||||||
# use of implicit scipy otimization function
|
# use of implicit scipy otimization function
|
||||||
fit = synthsourcespec(F, w0in, Fcin)
|
fit = synthsourcespec(F, w0in, Fcin)
|
||||||
[optspecfit, pcov] = curve_fit(synthsourcespec, F, YYcor, [w0in, Fcin])
|
[optspecfit, _] = curve_fit(synthsourcespec, F, YYcor, [w0in,
|
||||||
|
Fcin])
|
||||||
w01 = optspecfit[0]
|
w01 = optspecfit[0]
|
||||||
fc1 = optspecfit[1]
|
fc1 = optspecfit[1]
|
||||||
print ("calcsourcespec: Determined w0-value: %e m/Hz, \n"
|
print ("calcsourcespec: Determined w0-value: %e m/Hz, \n"
|
||||||
|
@ -17,7 +17,7 @@ from pylot.core.pick.charfuns import CharacteristicFunction
|
|||||||
from pylot.core.pick.charfuns import HOScf, AICcf, ARZcf, ARHcf, AR3Ccf
|
from pylot.core.pick.charfuns import HOScf, AICcf, ARZcf, ARHcf, AR3Ccf
|
||||||
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \
|
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \
|
||||||
getSNR, fmpicker, checkPonsets, wadaticheck
|
getSNR, fmpicker, checkPonsets, wadaticheck
|
||||||
from pylot.core.util.dataprocessing import restitute_data
|
from pylot.core.util.dataprocessing import restitute_data, read_metadata
|
||||||
from pylot.core.util.utils import getPatternLine
|
from pylot.core.util.utils import getPatternLine
|
||||||
from pylot.core.io.data import Data
|
from pylot.core.io.data import Data
|
||||||
from pylot.core.analysis.magnitude import WApp
|
from pylot.core.analysis.magnitude import WApp
|
||||||
@ -122,6 +122,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
|
|||||||
zfac = pickparam.get('zfac')
|
zfac = pickparam.get('zfac')
|
||||||
# path to inventory-, dataless- or resp-files
|
# path to inventory-, dataless- or resp-files
|
||||||
invdir = pickparam.get('invdir')
|
invdir = pickparam.get('invdir')
|
||||||
|
invtype, inventory = read_metadata(invdir)
|
||||||
|
|
||||||
# initialize output
|
# initialize output
|
||||||
Pweight = 4 # weight for P onset
|
Pweight = 4 # weight for P onset
|
||||||
@ -565,7 +566,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
|
|||||||
hdat = edat.copy()
|
hdat = edat.copy()
|
||||||
hdat += ndat
|
hdat += ndat
|
||||||
h_copy = hdat.copy()
|
h_copy = hdat.copy()
|
||||||
[cordat, restflag] = restitute_data(h_copy, invdir)
|
[cordat, restflag] = restitute_data(h_copy, invtype, inventory)
|
||||||
# calculate WA-peak-to-peak amplitude
|
# calculate WA-peak-to-peak amplitude
|
||||||
# using subclass WApp of superclass Magnitude
|
# using subclass WApp of superclass Magnitude
|
||||||
if restflag:
|
if restflag:
|
||||||
@ -602,7 +603,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
|
|||||||
hdat = edat.copy()
|
hdat = edat.copy()
|
||||||
hdat += ndat
|
hdat += ndat
|
||||||
h_copy = hdat.copy()
|
h_copy = hdat.copy()
|
||||||
[cordat, restflag] = restitute_data(h_copy, invdir)
|
[cordat, restflag] = restitute_data(h_copy, invtype, inventory)
|
||||||
if restflag == 1:
|
if restflag == 1:
|
||||||
# calculate WA-peak-to-peak amplitude
|
# calculate WA-peak-to-peak amplitude
|
||||||
# using subclass WApp of superclass Magnitude
|
# using subclass WApp of superclass Magnitude
|
||||||
|
@ -155,22 +155,16 @@ def evt_head_check(root_dir, out_dir = None):
|
|||||||
print(nfiles)
|
print(nfiles)
|
||||||
|
|
||||||
|
|
||||||
def restitute_data(data, path_to_inventory, unit='VEL', force=False):
|
def read_metadata(path_to_inventory):
|
||||||
"""
|
"""
|
||||||
takes a data stream and a path_to_inventory and returns the corrected
|
take path_to_inventory and return either the corresponding list of files
|
||||||
waveform data stream
|
found or the Parser object for a network dataless seed volume to prevent
|
||||||
:param data: seismic data stream
|
read overhead for large dataless seed volumes
|
||||||
:param path_to_inventory: path to inventory folder or file
|
:param path_to_inventory:
|
||||||
:param unit: unit to correct for (default: 'VEL')
|
:return: tuple containing a either list of files or `obspy.io.xseed.Parser`
|
||||||
:param force: force restitution for already corrected traces (default:
|
object and the inventory type found
|
||||||
False)
|
:rtype: tuple
|
||||||
:return: corrected data stream
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
restflag = list()
|
|
||||||
|
|
||||||
data = remove_underscores(data)
|
|
||||||
|
|
||||||
dlfile = list()
|
dlfile = list()
|
||||||
invfile = list()
|
invfile = list()
|
||||||
respfile = list()
|
respfile = list()
|
||||||
@ -185,22 +179,53 @@ def restitute_data(data, path_to_inventory, unit='VEL', force=False):
|
|||||||
invtype = key_for_set_value(inv)
|
invtype = key_for_set_value(inv)
|
||||||
|
|
||||||
if invtype is None:
|
if invtype is None:
|
||||||
print("Neither dataless-SEED file,inventory-xml file nor RESP-file "
|
raise IOError("Neither dataless-SEED file, inventory-xml file nor "
|
||||||
"found!")
|
"RESP-file found!")
|
||||||
return data, False
|
elif invtype == 'dless': # prevent multiple read of large dlsv
|
||||||
|
if len(inv[invtype]) == 1:
|
||||||
|
robj = Parser(inv[invtype][0])
|
||||||
|
else:
|
||||||
|
robj = inv[invtype]
|
||||||
|
else:
|
||||||
|
robj = inv[invtype]
|
||||||
|
return invtype, robj
|
||||||
|
|
||||||
|
|
||||||
|
def restitute_data(data, invtype, inobj, unit='VEL', force=False):
|
||||||
|
"""
|
||||||
|
takes a data stream and a path_to_inventory and returns the corrected
|
||||||
|
waveform data stream
|
||||||
|
:param data: seismic data stream
|
||||||
|
:param invtype: type of found metadata
|
||||||
|
:param inobj: either list of metadata files or `obspy.io.xseed.Parser`
|
||||||
|
object
|
||||||
|
:param unit: unit to correct for (default: 'VEL')
|
||||||
|
:param force: force restitution for already corrected traces (default:
|
||||||
|
False)
|
||||||
|
:return: corrected data stream
|
||||||
|
"""
|
||||||
|
|
||||||
|
restflag = list()
|
||||||
|
|
||||||
|
data = remove_underscores(data)
|
||||||
|
|
||||||
# loop over traces
|
# loop over traces
|
||||||
for tr in data:
|
for tr in data:
|
||||||
seed_id = tr.get_id()
|
seed_id = tr.get_id()
|
||||||
# check, whether this trace has already been corrected
|
# check, whether this trace has already been corrected
|
||||||
if 'processing' in tr.stats.keys() and not force:
|
# TODO read actual value of processing key in Trace's stats instead
|
||||||
|
# of just looking for thr key: if processing is setit doesn't
|
||||||
|
# necessarily mean that the trace has been corrected so far but only
|
||||||
|
# processed in some way, e.g. normalized
|
||||||
|
if 'processing' in tr.stats.keys() \
|
||||||
|
and np.all(['remove' in p for p in tr.stats.processing]) \
|
||||||
|
and not force:
|
||||||
print("Trace {0} has already been corrected!".format(seed_id))
|
print("Trace {0} has already been corrected!".format(seed_id))
|
||||||
continue
|
continue
|
||||||
stime = tr.stats.starttime
|
stime = tr.stats.starttime
|
||||||
seedresp = None
|
|
||||||
prefilt = get_prefilt(tr)
|
prefilt = get_prefilt(tr)
|
||||||
if invtype == 'resp':
|
if invtype == 'resp':
|
||||||
fresp = find_in_list(inv[invtype], seed_id)
|
fresp = find_in_list(inobj, seed_id)
|
||||||
if not fresp:
|
if not fresp:
|
||||||
raise IOError('no response file found '
|
raise IOError('no response file found '
|
||||||
'for trace {0}'.format(seed_id))
|
'for trace {0}'.format(seed_id))
|
||||||
@ -210,35 +235,46 @@ def restitute_data(data, path_to_inventory, unit='VEL', force=False):
|
|||||||
units=unit)
|
units=unit)
|
||||||
kwargs = dict(paz_remove=None, pre_filt=prefilt, seedresp=seedresp)
|
kwargs = dict(paz_remove=None, pre_filt=prefilt, seedresp=seedresp)
|
||||||
elif invtype == 'dless':
|
elif invtype == 'dless':
|
||||||
if len(inv[invtype]) > 1:
|
if type(inobj) is list:
|
||||||
fname = Parser(find_in_list(inv[invtype], seed_id))
|
fname = Parser(find_in_list(inobj, seed_id))
|
||||||
else:
|
else:
|
||||||
fullpath = os.path.join(path_to_inventory, inv[invtype][0])
|
fname = inobj
|
||||||
fname = Parser(fullpath)
|
|
||||||
seedresp = dict(filename=fname,
|
seedresp = dict(filename=fname,
|
||||||
date=stime,
|
date=stime,
|
||||||
units=unit)
|
units=unit)
|
||||||
kwargs = dict(pre_filt=prefilt, seedresp=seedresp)
|
kwargs = dict(pre_filt=prefilt, seedresp=seedresp)
|
||||||
elif invtype == 'xml':
|
elif invtype == 'xml':
|
||||||
invlist = inv[invtype]
|
invlist = inobj
|
||||||
if len(invlist) > 1:
|
if len(invlist) > 1:
|
||||||
finv = find_in_list(invlist, seed_id)
|
finv = find_in_list(invlist, seed_id)
|
||||||
else:
|
else:
|
||||||
finv = invlist[0]
|
finv = invlist[0]
|
||||||
inventory = read_inventory(finv, format='STATIONXML')
|
inventory = read_inventory(finv, format='STATIONXML')
|
||||||
# apply restitution to data
|
|
||||||
if invtype in ['resp', 'dless']:
|
|
||||||
tr.simulate(**kwargs)
|
|
||||||
else:
|
else:
|
||||||
tr.attach_response(inventory)
|
restflag.append(False)
|
||||||
tr.remove_response(output=unit,
|
continue
|
||||||
pre_filt=prefilt)
|
# apply restitution to data
|
||||||
|
try:
|
||||||
|
if invtype in ['resp', 'dless']:
|
||||||
|
tr.simulate(**kwargs)
|
||||||
|
else:
|
||||||
|
tr.attach_response(inventory)
|
||||||
|
tr.remove_response(output=unit,
|
||||||
|
pre_filt=prefilt)
|
||||||
|
except ValueError as e:
|
||||||
|
msg0 = 'Response for {0} not found in Parser'.format(seed_id)
|
||||||
|
msg1 = 'evalresp failed to calculate response'
|
||||||
|
if msg0 not in e.message or msg1 not in e.message:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
restflag.append(False)
|
||||||
|
continue
|
||||||
restflag.append(True)
|
restflag.append(True)
|
||||||
# check if ALL traces could be restituted, take care of large datasets
|
# check if ALL traces could be restituted, take care of large datasets
|
||||||
# better try restitution for smaller subsets of data (e.g. station by
|
# better try restitution for smaller subsets of data (e.g. station by
|
||||||
# station)
|
# station)
|
||||||
if len(restflag) > 0:
|
if len(restflag) > 0:
|
||||||
restflag = np.all(restflag)
|
restflag = bool(np.all(restflag))
|
||||||
else:
|
else:
|
||||||
restflag = False
|
restflag = False
|
||||||
return data, restflag
|
return data, restflag
|
||||||
|
Loading…
Reference in New Issue
Block a user