Merge remote-tracking branch 'origin/develop' into feature/refactor
This commit is contained in:
commit
a45e817213
51
autoPyLoT.py
51
autoPyLoT.py
@ -7,6 +7,7 @@ import argparse
|
||||
import datetime
|
||||
import glob
|
||||
import os
|
||||
import traceback
|
||||
|
||||
import pylot.core.loc.focmec as focmec
|
||||
import pylot.core.loc.hash as hash
|
||||
@ -22,7 +23,7 @@ from pylot.core.analysis.magnitude import MomentMagnitude, LocalMagnitude
|
||||
from pylot.core.io.data import Data
|
||||
from pylot.core.io.inputs import PylotParameter
|
||||
from pylot.core.pick.autopick import autopickevent, iteratepicker
|
||||
from pylot.core.util.dataprocessing import restitute_data, read_metadata
|
||||
from pylot.core.util.dataprocessing import restitute_data, read_metadata, Metadata
|
||||
from pylot.core.util.defaults import SEPARATOR
|
||||
from pylot.core.util.event import Event
|
||||
from pylot.core.util.structure import DATASTRUCTURE
|
||||
@ -38,6 +39,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
"""
|
||||
Determine phase onsets automatically utilizing the automatic picking
|
||||
algorithms by Kueperkoch et al. 2010/2012.
|
||||
:param obspyDMT_wfpath: if obspyDMT is used, name of data directory ("raw" or "processed")
|
||||
:param input_dict:
|
||||
:type input_dict:
|
||||
:param parameter: PylotParameter object containing parameters used for automatic picking
|
||||
@ -53,7 +55,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
:type savepath: str
|
||||
:param savexml: export results in XML file if True
|
||||
:type savexml: bool
|
||||
:param station: list of station names or 'all' to pick all stations
|
||||
:param station: choose specific station name or 'all' to pick all stations
|
||||
:type station: str
|
||||
:param iplot: logical variable for plotting: 0=none, 1=partial, 2=all
|
||||
:type iplot: int
|
||||
@ -117,7 +119,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
if not parameter:
|
||||
if inputfile:
|
||||
parameter = PylotParameter(inputfile)
|
||||
#iplot = parameter['iplot']
|
||||
# iplot = parameter['iplot']
|
||||
else:
|
||||
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
|
||||
print('Using default input file {}'.format(infile))
|
||||
@ -149,8 +151,8 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
datastructure.modifyFields(**dsfields)
|
||||
datastructure.setExpandFields(exf)
|
||||
|
||||
# check if default location routine NLLoc is available
|
||||
if real_None(parameter['nllocbin']):
|
||||
# check if default location routine NLLoc is available and all stations are used
|
||||
if real_None(parameter['nllocbin']) and station == 'all':
|
||||
locflag = 1
|
||||
# get NLLoc-root path
|
||||
nllocroot = parameter.get('nllocroot')
|
||||
@ -200,8 +202,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
events.append(os.path.join(datapath, eventID))
|
||||
else:
|
||||
# autoPyLoT was initialized from GUI
|
||||
events = []
|
||||
events.append(eventid)
|
||||
events = [eventid]
|
||||
evID = os.path.split(eventid)[-1]
|
||||
locflag = 2
|
||||
else:
|
||||
@ -271,24 +272,26 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
if not wfdat:
|
||||
print('Could not find station {}. STOP!'.format(station))
|
||||
return
|
||||
wfdat = remove_underscores(wfdat)
|
||||
#wfdat = remove_underscores(wfdat)
|
||||
# trim components for each station to avoid problems with different trace starttimes for one station
|
||||
wfdat = check4gaps(wfdat)
|
||||
wfdat = check4doubled(wfdat)
|
||||
wfdat = trim_station_components(wfdat, trim_start=True, trim_end=False)
|
||||
metadata = read_metadata(parameter.get('invdir'))
|
||||
# TODO: (idea) read metadata from obspy_dmt database
|
||||
# if not wfpath_extension:
|
||||
# metadata = read_metadata(parameter.get('invdir'))
|
||||
# else:
|
||||
# metadata = None
|
||||
if not wfpath_extension:
|
||||
metadata = Metadata(parameter.get('invdir'))
|
||||
else:
|
||||
metadata = Metadata(os.path.join(eventpath, 'resp'))
|
||||
corr_dat = None
|
||||
if metadata:
|
||||
# rotate stations to ZNE
|
||||
wfdat = check4rotated(wfdat, metadata)
|
||||
try:
|
||||
wfdat = check4rotated(wfdat, metadata)
|
||||
except Exception as e:
|
||||
print('Could not rotate station {} to ZNE:\n{}'.format(wfdat[0].stats.station,
|
||||
traceback.format_exc()))
|
||||
if locflag:
|
||||
print("Restitute data ...")
|
||||
corr_dat = restitute_data(wfdat.copy(), *metadata, ncores=ncores)
|
||||
corr_dat = restitute_data(wfdat.copy(), metadata, ncores=ncores)
|
||||
if not corr_dat and locflag:
|
||||
locflag = 2
|
||||
print('Working on event %s. Stations: %s' % (eventpath, station))
|
||||
@ -363,8 +366,9 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
WAscaling[2]))
|
||||
evt = local_mag.updated_event(magscaling)
|
||||
net_ml = local_mag.net_magnitude(magscaling)
|
||||
print("Network local magnitude: %4.1f" % net_ml.mag)
|
||||
if magscaling == None:
|
||||
if net_ml:
|
||||
print("Network local magnitude: %4.1f" % net_ml.mag)
|
||||
if magscaling is None:
|
||||
scaling = False
|
||||
elif magscaling[0] != 0 and magscaling[1] != 0:
|
||||
scaling = False
|
||||
@ -447,8 +451,9 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
WAscaling[2]))
|
||||
evt = local_mag.updated_event(magscaling)
|
||||
net_ml = local_mag.net_magnitude(magscaling)
|
||||
print("Network local magnitude: %4.1f" % net_ml.mag)
|
||||
if magscaling == None:
|
||||
if net_ml:
|
||||
print("Network local magnitude: %4.1f" % net_ml.mag)
|
||||
if magscaling is None:
|
||||
scaling = False
|
||||
elif magscaling[0] != 0 and magscaling[1] != 0:
|
||||
scaling = False
|
||||
@ -499,7 +504,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
endsplash = '''------------------------------------------\n'
|
||||
-----Finished event %s!-----\n'
|
||||
------------------------------------------'''.format \
|
||||
(version=_getVersionString()) % evID
|
||||
(version=_getVersionString()) % evID
|
||||
print(endsplash)
|
||||
locflag = glocflag
|
||||
if locflag == 0:
|
||||
@ -528,9 +533,9 @@ if __name__ == "__main__":
|
||||
action='store',
|
||||
help='''full path to the file containing the input
|
||||
parameters for autoPyLoT''')
|
||||
parser.add_argument('-p', '-P', '--iplot', type=int,
|
||||
parser.add_argument('-p', '-P', '--iplot', type=int,
|
||||
action='store', default=0,
|
||||
help='''optional, logical variable for plotting: 0=none, 1=partial, 2=all''')
|
||||
help='''optional, logical variable for plotting: 0=none, 1=partial, 2=all''')
|
||||
parser.add_argument('-f', '-F', '--fnames', type=str,
|
||||
action='store',
|
||||
help='''optional, list of data file names''')
|
||||
|
@ -17,6 +17,7 @@ from pylot.core.util.utils import common_range, fit_curve
|
||||
from scipy import integrate, signal
|
||||
from scipy.optimize import curve_fit
|
||||
|
||||
|
||||
def richter_magnitude_scaling(delta):
|
||||
distance = np.array([0, 10, 20, 25, 30, 35, 40, 45, 50, 60, 70, 75, 85, 90, 100, 110,
|
||||
120, 130, 140, 150, 160, 170, 180, 190, 200, 210, 230, 240, 250,
|
||||
@ -122,7 +123,7 @@ class Magnitude(object):
|
||||
|
||||
def net_magnitude(self, magscaling=None):
|
||||
if self:
|
||||
if magscaling == None:
|
||||
if magscaling is None:
|
||||
scaling = False
|
||||
elif magscaling[0] != 0 and magscaling[1] != 0:
|
||||
scaling = False
|
||||
@ -225,7 +226,7 @@ class LocalMagnitude(Magnitude):
|
||||
sqH = np.sqrt(power_sum)
|
||||
|
||||
# get time array
|
||||
th=np.arange(0, st[0].stats.npts/st[0].stats.sampling_rate, st[0].stats.delta)
|
||||
th = np.arange(0, st[0].stats.npts / st[0].stats.sampling_rate, st[0].stats.delta)
|
||||
# get maximum peak within pick window
|
||||
iwin = getsignalwin(th, t0 - stime, self.calc_win)
|
||||
ii = min([iwin[len(iwin) - 1], len(th)])
|
||||
@ -245,9 +246,9 @@ class LocalMagnitude(Magnitude):
|
||||
ax.plot(th[iwin], sqH[iwin], 'g')
|
||||
ax.plot([t0 - stime, t0 - stime], [0, max(sqH)], 'r', linewidth=2)
|
||||
ax.set_title('Station %s, Channel %s, RMS Horizontal Trace, '
|
||||
'WA-peak-to-peak=%6.3f mm' % (st[0].stats.station,
|
||||
'WA-peak-to-peak=%6.3f mm' % (st[0].stats.station,
|
||||
st[0].stats.channel,
|
||||
wapp))
|
||||
wapp))
|
||||
ax.set_xlabel('Time [s]')
|
||||
ax.set_ylabel('Displacement [mm]')
|
||||
ax = fig.add_subplot(212)
|
||||
@ -257,15 +258,16 @@ class LocalMagnitude(Magnitude):
|
||||
ax.plot([t0 - stime, t0 - stime], [0, max(sqH)], 'r', linewidth=2)
|
||||
ax.set_title('Channel %s, RMS Horizontal Trace, '
|
||||
'WA-peak-to-peak=%6.3f mm' % (st[1].stats.channel,
|
||||
wapp))
|
||||
wapp))
|
||||
ax.set_xlabel('Time [s]')
|
||||
ax.set_ylabel('Displacement [mm]')
|
||||
fig.show()
|
||||
try: input()
|
||||
except SyntaxError: pass
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
|
||||
|
||||
return wapp, fig
|
||||
|
||||
def calc(self):
|
||||
@ -309,7 +311,7 @@ class LocalMagnitude(Magnitude):
|
||||
a0 = a0 * 1e03 # mm to nm (see Havskov & Ottemöller, 2010)
|
||||
magnitude = ope.StationMagnitude(mag=np.log10(a0) \
|
||||
+ self.wascaling[0] * np.log10(delta) + self.wascaling[1]
|
||||
* delta + self.wascaling[
|
||||
* delta + self.wascaling[
|
||||
2])
|
||||
magnitude.origin_id = self.origin_id
|
||||
magnitude.waveform_id = pick.waveform_id
|
||||
@ -372,7 +374,7 @@ class MomentMagnitude(Magnitude):
|
||||
|
||||
def calc(self):
|
||||
for a in self.arrivals:
|
||||
if a.phase not in 'pP':
|
||||
if a.phase not in 'pP':
|
||||
continue
|
||||
# make sure calculating Mo only from reliable onsets
|
||||
# NLLoc: time_weight = 0 => do not use onset!
|
||||
@ -510,6 +512,9 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
|
||||
|
||||
zdat = select_for_phase(wfstream, "P")
|
||||
|
||||
if len(zdat) == 0:
|
||||
raise IOError('No vertical component found in stream:\n{}'.format(wfstream))
|
||||
|
||||
dt = zdat[0].stats.delta
|
||||
|
||||
freq = zdat[0].stats.sampling_rate
|
||||
|
@ -17,6 +17,7 @@ from pylot.core.util.utils import fnConstructor, full_range, remove_underscores,
|
||||
import pylot.core.loc.velest as velest
|
||||
from pylot.core.util.obspyDMT_interface import qml_from_obspyDMT
|
||||
|
||||
|
||||
class Data(object):
|
||||
"""
|
||||
Data container with attributes wfdata holding ~obspy.core.stream.
|
||||
@ -284,7 +285,7 @@ class Data(object):
|
||||
mstation_ext = mstation + '_'
|
||||
for k in range(len(picks_copy)):
|
||||
if ((picks_copy[k].waveform_id.station_code == mstation) or
|
||||
(picks_copy[k].waveform_id.station_code == mstation_ext)) and \
|
||||
(picks_copy[k].waveform_id.station_code == mstation_ext)) and \
|
||||
(picks_copy[k].method_id == 'auto'):
|
||||
del picks_copy[k]
|
||||
break
|
||||
@ -299,7 +300,7 @@ class Data(object):
|
||||
for i in range(len(picks_copy)):
|
||||
if picks_copy[i].phase_hint[0] == 'P':
|
||||
if (picks_copy[i].time_errors['upper_uncertainty'] >= upperErrors[0]) or \
|
||||
(picks_copy[i].time_errors['uncertainty'] == None):
|
||||
(picks_copy[i].time_errors['uncertainty'] is None):
|
||||
print("Uncertainty exceeds or equal adjusted upper time error!")
|
||||
print("Adjusted uncertainty: {}".format(upperErrors[0]))
|
||||
print("Pick uncertainty: {}".format(picks_copy[i].time_errors['uncertainty']))
|
||||
@ -311,7 +312,7 @@ class Data(object):
|
||||
break
|
||||
if picks_copy[i].phase_hint[0] == 'S':
|
||||
if (picks_copy[i].time_errors['upper_uncertainty'] >= upperErrors[1]) or \
|
||||
(picks_copy[i].time_errors['uncertainty'] == None):
|
||||
(picks_copy[i].time_errors['uncertainty'] is None):
|
||||
print("Uncertainty exceeds or equal adjusted upper time error!")
|
||||
print("Adjusted uncertainty: {}".format(upperErrors[1]))
|
||||
print("Pick uncertainty: {}".format(picks_copy[i].time_errors['uncertainty']))
|
||||
@ -404,7 +405,7 @@ class Data(object):
|
||||
|
||||
# various pre-processing steps:
|
||||
# remove possible underscores in station names
|
||||
self.wfdata = remove_underscores(self.wfdata)
|
||||
#self.wfdata = remove_underscores(self.wfdata)
|
||||
# check for stations with rotated components
|
||||
if checkRotated and metadata is not None:
|
||||
self.wfdata = check4rotated(self.wfdata, metadata, verbosity=0)
|
||||
@ -416,7 +417,6 @@ class Data(object):
|
||||
self.dirty = False
|
||||
return True
|
||||
|
||||
|
||||
def appendWFData(self, fnames, synthetic=False):
|
||||
"""
|
||||
Read waveform data from fnames and append it to current wf data
|
||||
@ -506,8 +506,10 @@ class Data(object):
|
||||
# check for automatic picks
|
||||
print("Writing phases to ObsPy-quakeml file")
|
||||
for key in picks:
|
||||
if not picks[key].get('P'):
|
||||
continue
|
||||
if picks[key]['P']['picker'] == 'auto':
|
||||
print("Existing picks will be overwritten!")
|
||||
print("Existing auto-picks will be overwritten in pick-dictionary!")
|
||||
picks = picks_from_picksdict(picks)
|
||||
break
|
||||
else:
|
||||
|
@ -456,11 +456,11 @@ defaults = {'rootpath': {'type': str,
|
||||
'namestring': 'Wadati tolerance'},
|
||||
|
||||
'jackfactor': {'type': float,
|
||||
'tooltip': 'pick is removed if the variance of the subgroup with the pick removed is larger than the mean variance of all subgroups times safety factor',
|
||||
'value': 5.0,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Jackknife safety factor'},
|
||||
'tooltip': 'pick is removed if the variance of the subgroup with the pick removed is larger than the mean variance of all subgroups times safety factor',
|
||||
'value': 5.0,
|
||||
'min': 0.,
|
||||
'max': np.inf,
|
||||
'namestring': 'Jackknife safety factor'},
|
||||
|
||||
'WAscaling': {'type': (float, float, float),
|
||||
'tooltip': 'Scaling relation (log(Ao)+Alog(r)+Br+C) of Wood-Anderson amplitude Ao [nm] \
|
||||
|
@ -245,7 +245,7 @@ def picksdict_from_picks(evt):
|
||||
if picker.startswith('smi:local/'):
|
||||
picker = picker.split('smi:local/')[1]
|
||||
except IndexError:
|
||||
picker = 'manual' # MP MP TODO maybe improve statement
|
||||
picker = 'manual' # MP MP TODO maybe improve statement
|
||||
try:
|
||||
onsets = picksdict[picker][station]
|
||||
except KeyError as e:
|
||||
@ -346,6 +346,7 @@ def picks_from_picksdict(picks, creation_info=None):
|
||||
picks_list.append(pick)
|
||||
return picks_list
|
||||
|
||||
|
||||
def reassess_pilot_db(root_dir, db_dir, out_dir=None, fn_param=None, verbosity=0):
|
||||
import glob
|
||||
|
||||
@ -499,7 +500,7 @@ def writephases(arrivals, fformat, filename, parameter=None, eventinfo=None):
|
||||
except KeyError as e:
|
||||
print(e)
|
||||
fm = None
|
||||
if fm == None:
|
||||
if fm is None:
|
||||
fm = '?'
|
||||
onset = arrivals[key]['P']['mpp']
|
||||
year = onset.year
|
||||
@ -916,9 +917,9 @@ def merge_picks(event, picks):
|
||||
network = pick.waveform_id.network_code
|
||||
method = pick.method_id
|
||||
for p in event.picks:
|
||||
if p.waveform_id.station_code == station\
|
||||
and p.waveform_id.network_code == network\
|
||||
and p.phase_hint == phase\
|
||||
if p.waveform_id.station_code == station \
|
||||
and p.waveform_id.network_code == network \
|
||||
and p.phase_hint == phase \
|
||||
and (str(p.method_id) in str(method)
|
||||
or str(method) in str(p.method_id)):
|
||||
p.time, p.time_errors, p.waveform_id.network_code, p.method_id = time, err, network, method
|
||||
@ -965,22 +966,22 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
|
||||
arrivals_copy = cat_copy.events[0].picks
|
||||
# Prefere manual picks if qualities are sufficient!
|
||||
for Pick in arrivals:
|
||||
if (Pick.method_id.id).split('/')[1] == 'manual':
|
||||
if Pick.method_id.id.split('/')[1] == 'manual':
|
||||
mstation = Pick.waveform_id.station_code
|
||||
mstation_ext = mstation + '_'
|
||||
for mpick in arrivals_copy:
|
||||
phase = identifyPhase(loopIdentifyPhase(Pick.phase_hint))
|
||||
if phase == 'P':
|
||||
if ((mpick.waveform_id.station_code == mstation) or
|
||||
(mpick.waveform_id.station_code == mstation_ext)) and \
|
||||
((mpick.method_id).split('/')[1] == 'auto') and \
|
||||
(mpick.waveform_id.station_code == mstation_ext)) and \
|
||||
(mpick.method_id.split('/')[1] == 'auto') and \
|
||||
(mpick.time_errors['uncertainty'] <= ErrorsP[3]):
|
||||
del mpick
|
||||
break
|
||||
elif phase == 'S':
|
||||
if ((mpick.waveform_id.station_code == mstation) or
|
||||
(mpick.waveform_id.station_code == mstation_ext)) and \
|
||||
((mpick.method_id).split('/')[1] == 'auto') and \
|
||||
(mpick.waveform_id.station_code == mstation_ext)) and \
|
||||
(mpick.method_id.split('/')[1] == 'auto') and \
|
||||
(mpick.time_errors['uncertainty'] <= ErrorsS[3]):
|
||||
del mpick
|
||||
break
|
||||
@ -1032,19 +1033,19 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
|
||||
P0perc = 0
|
||||
if len(Pw1) > 0:
|
||||
P1perc = 100 / numPweights * len(Pw1)
|
||||
else:
|
||||
else:
|
||||
P1perc = 0
|
||||
if len(Pw2) > 0:
|
||||
P2perc = 100 / numPweights * len(Pw2)
|
||||
else:
|
||||
else:
|
||||
P2perc = 0
|
||||
if len(Pw3) > 0:
|
||||
P3perc = 100 / numPweights * len(Pw3)
|
||||
else:
|
||||
else:
|
||||
P3perc = 0
|
||||
if len(Pw4) > 0:
|
||||
P4perc = 100 / numPweights * len(Pw4)
|
||||
else:
|
||||
else:
|
||||
P4perc = 0
|
||||
if len(Sw0) > 0:
|
||||
S0perc = 100 / numSweights * len(Sw0)
|
||||
@ -1052,19 +1053,19 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
|
||||
S0perc = 0
|
||||
if len(Sw1) > 0:
|
||||
S1perc = 100 / numSweights * len(Sw1)
|
||||
else:
|
||||
else:
|
||||
S1perc = 0
|
||||
if len(Sw2) > 0:
|
||||
S2perc = 100 / numSweights * len(Sw2)
|
||||
else:
|
||||
else:
|
||||
S2perc = 0
|
||||
if len(Sw3) > 0:
|
||||
S3perc = 100 / numSweights * len(Sw3)
|
||||
else:
|
||||
else:
|
||||
S3perc = 0
|
||||
if len(Sw4) > 0:
|
||||
S4perc = 100 / numSweights * len(Sw4)
|
||||
else:
|
||||
else:
|
||||
S4perc = 0
|
||||
|
||||
weights = ('0', '1', '2', '3', '4')
|
||||
|
@ -76,6 +76,7 @@ def modify_inputs(ctrfn, root, nllocoutn, phasefn, tttn):
|
||||
def locate(fnin, parameter=None):
|
||||
"""
|
||||
takes an external program name and tries to run it
|
||||
:param parameter: PyLoT Parameter object
|
||||
:param fnin: external program name
|
||||
:return: None
|
||||
"""
|
||||
|
@ -8,6 +8,7 @@ function conglomerate utils.
|
||||
|
||||
:author: MAGS2 EP3 working group / Ludger Kueperkoch
|
||||
"""
|
||||
import traceback
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
@ -71,42 +72,34 @@ def autopickevent(data, param, iplot=0, fig_dict=None, fig_dict_wadatijack=None,
|
||||
|
||||
for station in stations:
|
||||
topick = data.select(station=station)
|
||||
|
||||
if iplot is None or iplot == 'None' or iplot == 0:
|
||||
input_tuples.append((topick, param, apverbose, metadata, origin))
|
||||
if iplot > 0:
|
||||
all_onsets[station] = autopickstation(topick, param, verbose=apverbose,
|
||||
iplot=iplot, fig_dict=fig_dict,
|
||||
metadata=metadata, origin=origin)
|
||||
input_tuples.append((topick, param, apverbose, iplot, fig_dict, metadata, origin))
|
||||
|
||||
if iplot > 0:
|
||||
print('iPlot Flag active: NO MULTIPROCESSING possible.')
|
||||
return all_onsets
|
||||
ncores = 1
|
||||
|
||||
# rename str for ncores in case ncores == 0 (use all cores)
|
||||
# rename ncores for string representation in case ncores == 0 (use all cores)
|
||||
ncores_str = ncores if ncores != 0 else 'all available'
|
||||
|
||||
print('Autopickstation: Distribute autopicking for {} '
|
||||
'stations on {} cores.'.format(len(input_tuples), ncores_str))
|
||||
|
||||
pool = gen_Pool(ncores)
|
||||
results = pool.map(call_autopickstation, input_tuples)
|
||||
pool.close()
|
||||
if ncores == 1:
|
||||
results = serial_picking(input_tuples)
|
||||
else:
|
||||
results = parallel_picking(input_tuples, ncores)
|
||||
|
||||
for result, wfstream in results:
|
||||
for result, station in results:
|
||||
if type(result) == dict:
|
||||
station = result['station']
|
||||
result.pop('station')
|
||||
all_onsets[station] = result
|
||||
else:
|
||||
if result == None:
|
||||
if result is None:
|
||||
result = 'Picker exited unexpectedly.'
|
||||
if len(wfstream) > 0:
|
||||
station = wfstream[0].stats.station
|
||||
else:
|
||||
station = None
|
||||
print('Could not pick a station: {}\nReason: {}'.format(station, result))
|
||||
|
||||
# no Wadati/JK for single station (also valid for tuning mode)
|
||||
if len(stations) == 1:
|
||||
return all_onsets
|
||||
|
||||
# quality control
|
||||
# median check and jackknife on P-onset times
|
||||
jk_checked_onsets = checkPonsets(all_onsets, mdttolerance, jackfactor, iplot, fig_dict_wadatijack)
|
||||
@ -115,6 +108,20 @@ def autopickevent(data, param, iplot=0, fig_dict=None, fig_dict_wadatijack=None,
|
||||
return wadationsets
|
||||
|
||||
|
||||
def serial_picking(input_tuples):
|
||||
result = []
|
||||
for input_tuple in input_tuples:
|
||||
result.append(call_autopickstation(input_tuple))
|
||||
return result
|
||||
|
||||
|
||||
def parallel_picking(input_tuples, ncores):
|
||||
pool = gen_Pool(ncores)
|
||||
result = pool.imap_unordered(call_autopickstation, input_tuples)
|
||||
pool.close()
|
||||
return result
|
||||
|
||||
|
||||
def call_autopickstation(input_tuple):
|
||||
"""
|
||||
helper function used for multiprocessing
|
||||
@ -123,12 +130,16 @@ def call_autopickstation(input_tuple):
|
||||
:return: dictionary containing P pick, S pick and station name
|
||||
:rtype: dict
|
||||
"""
|
||||
wfstream, pickparam, verbose, metadata, origin = input_tuple
|
||||
wfstream, pickparam, verbose, iplot, fig_dict, metadata, origin = input_tuple
|
||||
if fig_dict:
|
||||
print('Running in interactive mode')
|
||||
# multiprocessing not possible with interactive plotting
|
||||
try:
|
||||
return autopickstation(wfstream, pickparam, verbose, iplot=0, metadata=metadata, origin=origin), wfstream
|
||||
return autopickstation(wfstream, pickparam, verbose, fig_dict=fig_dict, iplot=iplot, metadata=metadata,
|
||||
origin=origin)
|
||||
except Exception as e:
|
||||
return e, wfstream
|
||||
tbe = traceback.format_exc()
|
||||
return tbe, wfstream[0].stats.station
|
||||
|
||||
|
||||
def get_source_coords(parser, station_id):
|
||||
@ -1312,9 +1323,12 @@ def nautopickstation(wfstream, pickparam, verbose=False,
|
||||
# split components
|
||||
zdat, ndat, edat = get_components_from_waveformstream(wfstream)
|
||||
|
||||
picks = {}
|
||||
station = wfstream[0].stats.station
|
||||
|
||||
if not zdat:
|
||||
print('No z-component found for station {}. STOP'.format(wfstream[0].stats.station))
|
||||
return
|
||||
print('No z-component found for station {}. STOP'.format(station))
|
||||
return picks, station
|
||||
|
||||
if p_params['algoP'] == 'HOS' or p_params['algoP'] == 'ARZ' and zdat is not None:
|
||||
msg = '##################################################\nautopickstation:' \
|
||||
@ -1331,13 +1345,10 @@ def nautopickstation(wfstream, pickparam, verbose=False,
|
||||
Lc = np.inf
|
||||
print('autopickstation: use_taup flag active.')
|
||||
if not metadata:
|
||||
metadata = [None, None]
|
||||
if not metadata[1]:
|
||||
print('Warning: Could not use TauPy to estimate onsets as there are no metadata given.')
|
||||
else:
|
||||
station_id = wfstream[0].get_id()
|
||||
parser = metadata[1]
|
||||
station_coords = get_source_coords(parser, station_id)
|
||||
station_coords = metadata.get_coordinates(station_id, time=wfstream[0].stats.starttime)
|
||||
if station_coords and origin:
|
||||
source_origin = origin[0]
|
||||
model = TauPyModel(p_params['taup_model'])
|
||||
@ -1377,7 +1388,7 @@ def nautopickstation(wfstream, pickparam, verbose=False,
|
||||
Lwf = zdat[0].stats.endtime - zdat[0].stats.starttime
|
||||
if not Lwf > 0:
|
||||
print('autopickstation: empty trace! Return!')
|
||||
return
|
||||
return picks, station
|
||||
|
||||
Ldiff = Lwf - abs(Lc)
|
||||
if Ldiff <= 0 or pstop <= pstart or pstop - pstart <= thosmw:
|
||||
@ -1574,8 +1585,8 @@ def nautopickstation(wfstream, pickparam, verbose=False,
|
||||
msg = "autopickstation: P-weight: {0}, " \
|
||||
"SNR: {1}, SNR[dB]: {2}, Polarity: {3}".format(Pweight, SNRP, SNRPdB, FM)
|
||||
print(msg)
|
||||
msg = 'autopickstation: Refind P-Pick: {} s | P-Error: {} s'.format(zdat[0].stats.starttime \
|
||||
+ mpickP, Perror)
|
||||
msg = 'autopickstation: Refined P-Pick: {} s | P-Error: {} s'.format(zdat[0].stats.starttime \
|
||||
+ mpickP, Perror)
|
||||
print(msg)
|
||||
Sflag = 1
|
||||
|
||||
@ -1609,7 +1620,7 @@ def nautopickstation(wfstream, pickparam, verbose=False,
|
||||
ndat = edat
|
||||
|
||||
pickSonset = (edat is not None and ndat is not None and len(edat) > 0 and len(
|
||||
ndat) > 0 and Pweight < 4)
|
||||
ndat) > 0 and Pweight < 4)
|
||||
|
||||
if pickSonset:
|
||||
# determine time window for calculating CF after P onset
|
||||
@ -1617,8 +1628,8 @@ def nautopickstation(wfstream, pickparam, verbose=False,
|
||||
round(max([mpickP + s_params['sstart'], 0])), # MP MP relative time axis
|
||||
round(min([
|
||||
mpickP + s_params['sstop'],
|
||||
edat[0].stats.endtime-edat[0].stats.starttime,
|
||||
ndat[0].stats.endtime-ndat[0].stats.starttime
|
||||
edat[0].stats.endtime - edat[0].stats.starttime,
|
||||
ndat[0].stats.endtime - ndat[0].stats.starttime
|
||||
]))
|
||||
]
|
||||
|
||||
@ -1842,7 +1853,7 @@ def nautopickstation(wfstream, pickparam, verbose=False,
|
||||
Serror = pickerr[ipick]
|
||||
|
||||
msg = 'autopickstation: Refined S-Pick: {} s | S-Error: {} s'.format(hdat[0].stats.starttime \
|
||||
+ mpickS, Serror)
|
||||
+ mpickS, Serror)
|
||||
print(msg)
|
||||
|
||||
# get SNR
|
||||
@ -1885,7 +1896,7 @@ def nautopickstation(wfstream, pickparam, verbose=False,
|
||||
# re-create stream object including both horizontal components
|
||||
hdat = edat.copy()
|
||||
hdat += ndat
|
||||
|
||||
|
||||
else:
|
||||
print('autopickstation: No horizontal component data available or '
|
||||
'bad P onset, skipping S picking!')
|
||||
@ -1956,115 +1967,114 @@ def nautopickstation(wfstream, pickparam, verbose=False,
|
||||
ax1.set_ylim([-1.5, 1.5])
|
||||
ax1.set_ylabel('Normalized Counts')
|
||||
# fig.suptitle(tr_filt.stats.starttime)
|
||||
try:
|
||||
len(edat[0])
|
||||
except:
|
||||
edat = ndat
|
||||
try:
|
||||
len(ndat[0])
|
||||
except:
|
||||
ndat = edat
|
||||
if len(edat[0]) > 1 and len(ndat[0]) > 1 and Sflag == 1:
|
||||
# plot horizontal traces
|
||||
ax2 = fig.add_subplot(3, 1, 2, sharex=ax1)
|
||||
th1data = np.arange(0,
|
||||
trH1_filt.stats.npts /
|
||||
trH1_filt.stats.sampling_rate,
|
||||
trH1_filt.stats.delta)
|
||||
# check equal length of arrays, sometimes they are different!?
|
||||
wfldiff = len(trH1_filt.data) - len(th1data)
|
||||
if wfldiff < 0:
|
||||
th1data = th1data[0:len(th1data) - abs(wfldiff)]
|
||||
ax2.plot(th1data, trH1_filt.data / max(trH1_filt.data), color=linecolor, linewidth=0.7, label='Data')
|
||||
if Pweight < 4:
|
||||
ax2.plot(arhcf1.getTimeArray(),
|
||||
arhcf1.getCF() / max(arhcf1.getCF()), 'b', label='CF1')
|
||||
if aicSflag == 1 and Sweight < 4:
|
||||
ax2.plot(arhcf2.getTimeArray(),
|
||||
arhcf2.getCF() / max(arhcf2.getCF()), 'm', label='CF2')
|
||||
ax2.plot(
|
||||
[aicarhpick.getpick(), aicarhpick.getpick()],
|
||||
[-1, 1], 'g', label='Initial S Onset')
|
||||
ax2.plot(
|
||||
[aicarhpick.getpick() - 0.5,
|
||||
aicarhpick.getpick() + 0.5],
|
||||
[1, 1], 'g')
|
||||
ax2.plot(
|
||||
[aicarhpick.getpick() - 0.5,
|
||||
aicarhpick.getpick() + 0.5],
|
||||
[-1, -1], 'g')
|
||||
ax2.plot([refSpick.getpick(), refSpick.getpick()],
|
||||
[-1.3, 1.3], 'g', linewidth=2, label='Final S Pick')
|
||||
ax2.plot(
|
||||
[refSpick.getpick() - 0.5, refSpick.getpick() + 0.5],
|
||||
[1.3, 1.3], 'g', linewidth=2)
|
||||
ax2.plot(
|
||||
[refSpick.getpick() - 0.5, refSpick.getpick() + 0.5],
|
||||
[-1.3, -1.3], 'g', linewidth=2)
|
||||
ax2.plot([lpickS, lpickS], [-1.1, 1.1], 'g--', label='lpp')
|
||||
ax2.plot([epickS, epickS], [-1.1, 1.1], 'g--', label='epp')
|
||||
ax2.set_title('%s, S Weight=%d, SNR=%7.2f, SNR[dB]=%7.2f' % (
|
||||
trH1_filt.stats.channel,
|
||||
Sweight, SNRS, SNRSdB))
|
||||
else:
|
||||
ax2.set_title('%s, S Weight=%d, SNR=None, SNRdB=None' % (
|
||||
trH1_filt.stats.channel, Sweight))
|
||||
ax2.legend(loc=1)
|
||||
ax2.set_yticks([])
|
||||
ax2.set_ylim([-1.5, 1.5])
|
||||
ax2.set_ylabel('Normalized Counts')
|
||||
# fig.suptitle(trH1_filt.stats.starttime)
|
||||
# only continue if one horizontal stream exists
|
||||
if (ndat or edat) and Sflag == 1:
|
||||
# mirror components in case one does not exist
|
||||
if not edat:
|
||||
edat = ndat
|
||||
if not ndat:
|
||||
ndat = edat
|
||||
if len(edat[0]) > 1 and len(ndat[0]) > 1:
|
||||
# plot horizontal traces
|
||||
ax2 = fig.add_subplot(3, 1, 2, sharex=ax1)
|
||||
th1data = np.arange(0,
|
||||
trH1_filt.stats.npts /
|
||||
trH1_filt.stats.sampling_rate,
|
||||
trH1_filt.stats.delta)
|
||||
# check equal length of arrays, sometimes they are different!?
|
||||
wfldiff = len(trH1_filt.data) - len(th1data)
|
||||
if wfldiff < 0:
|
||||
th1data = th1data[0:len(th1data) - abs(wfldiff)]
|
||||
ax2.plot(th1data, trH1_filt.data / max(trH1_filt.data), color=linecolor, linewidth=0.7, label='Data')
|
||||
if Pweight < 4:
|
||||
ax2.plot(arhcf1.getTimeArray(),
|
||||
arhcf1.getCF() / max(arhcf1.getCF()), 'b', label='CF1')
|
||||
if aicSflag == 1 and Sweight < 4:
|
||||
ax2.plot(arhcf2.getTimeArray(),
|
||||
arhcf2.getCF() / max(arhcf2.getCF()), 'm', label='CF2')
|
||||
ax2.plot(
|
||||
[aicarhpick.getpick(), aicarhpick.getpick()],
|
||||
[-1, 1], 'g', label='Initial S Onset')
|
||||
ax2.plot(
|
||||
[aicarhpick.getpick() - 0.5,
|
||||
aicarhpick.getpick() + 0.5],
|
||||
[1, 1], 'g')
|
||||
ax2.plot(
|
||||
[aicarhpick.getpick() - 0.5,
|
||||
aicarhpick.getpick() + 0.5],
|
||||
[-1, -1], 'g')
|
||||
ax2.plot([refSpick.getpick(), refSpick.getpick()],
|
||||
[-1.3, 1.3], 'g', linewidth=2, label='Final S Pick')
|
||||
ax2.plot(
|
||||
[refSpick.getpick() - 0.5, refSpick.getpick() + 0.5],
|
||||
[1.3, 1.3], 'g', linewidth=2)
|
||||
ax2.plot(
|
||||
[refSpick.getpick() - 0.5, refSpick.getpick() + 0.5],
|
||||
[-1.3, -1.3], 'g', linewidth=2)
|
||||
ax2.plot([lpickS, lpickS], [-1.1, 1.1], 'g--', label='lpp')
|
||||
ax2.plot([epickS, epickS], [-1.1, 1.1], 'g--', label='epp')
|
||||
ax2.set_title('%s, S Weight=%d, SNR=%7.2f, SNR[dB]=%7.2f' % (
|
||||
trH1_filt.stats.channel,
|
||||
Sweight, SNRS, SNRSdB))
|
||||
else:
|
||||
ax2.set_title('%s, S Weight=%d, SNR=None, SNRdB=None' % (
|
||||
trH1_filt.stats.channel, Sweight))
|
||||
ax2.legend(loc=1)
|
||||
ax2.set_yticks([])
|
||||
ax2.set_ylim([-1.5, 1.5])
|
||||
ax2.set_ylabel('Normalized Counts')
|
||||
# fig.suptitle(trH1_filt.stats.starttime)
|
||||
|
||||
ax3 = fig.add_subplot(3, 1, 3, sharex=ax1)
|
||||
th2data = np.arange(0,
|
||||
trH2_filt.stats.npts /
|
||||
trH2_filt.stats.sampling_rate,
|
||||
trH2_filt.stats.delta)
|
||||
# check equal length of arrays, sometimes they are different!?
|
||||
wfldiff = len(trH2_filt.data) - len(th2data)
|
||||
if wfldiff < 0:
|
||||
th2data = th2data[0:len(th2data) - abs(wfldiff)]
|
||||
ax3.plot(th2data, trH2_filt.data / max(trH2_filt.data), color=linecolor, linewidth=0.7, label='Data')
|
||||
if Pweight < 4:
|
||||
p22, = ax3.plot(arhcf1.getTimeArray(),
|
||||
arhcf1.getCF() / max(arhcf1.getCF()), 'b', label='CF1')
|
||||
if aicSflag == 1:
|
||||
ax3.plot(arhcf2.getTimeArray(),
|
||||
arhcf2.getCF() / max(arhcf2.getCF()), 'm', label='CF2')
|
||||
ax3.plot(
|
||||
[aicarhpick.getpick(), aicarhpick.getpick()],
|
||||
[-1, 1], 'g', label='Initial S Onset')
|
||||
ax3.plot(
|
||||
[aicarhpick.getpick() - 0.5,
|
||||
aicarhpick.getpick() + 0.5],
|
||||
[1, 1], 'g')
|
||||
ax3.plot(
|
||||
[aicarhpick.getpick() - 0.5,
|
||||
aicarhpick.getpick() + 0.5],
|
||||
[-1, -1], 'g')
|
||||
ax3.plot([refSpick.getpick(), refSpick.getpick()],
|
||||
[-1.3, 1.3], 'g', linewidth=2, label='Final S Pick')
|
||||
ax3.plot(
|
||||
[refSpick.getpick() - 0.5, refSpick.getpick() + 0.5],
|
||||
[1.3, 1.3], 'g', linewidth=2)
|
||||
ax3.plot(
|
||||
[refSpick.getpick() - 0.5, refSpick.getpick() + 0.5],
|
||||
[-1.3, -1.3], 'g', linewidth=2)
|
||||
ax3.plot([lpickS, lpickS], [-1.1, 1.1], 'g--', label='lpp')
|
||||
ax3.plot([epickS, epickS], [-1.1, 1.1], 'g--', label='epp')
|
||||
ax3.legend(loc=1)
|
||||
ax3.set_yticks([])
|
||||
ax3.set_ylim([-1.5, 1.5])
|
||||
ax3.set_xlabel('Time [s] after %s' % tr_filt.stats.starttime)
|
||||
ax3.set_ylabel('Normalized Counts')
|
||||
ax3.set_title(trH2_filt.stats.channel)
|
||||
if plt_flag == 1:
|
||||
fig.show()
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
ax3 = fig.add_subplot(3, 1, 3, sharex=ax1)
|
||||
th2data = np.arange(0,
|
||||
trH2_filt.stats.npts /
|
||||
trH2_filt.stats.sampling_rate,
|
||||
trH2_filt.stats.delta)
|
||||
# check equal length of arrays, sometimes they are different!?
|
||||
wfldiff = len(trH2_filt.data) - len(th2data)
|
||||
if wfldiff < 0:
|
||||
th2data = th2data[0:len(th2data) - abs(wfldiff)]
|
||||
ax3.plot(th2data, trH2_filt.data / max(trH2_filt.data), color=linecolor, linewidth=0.7, label='Data')
|
||||
if Pweight < 4:
|
||||
p22, = ax3.plot(arhcf1.getTimeArray(),
|
||||
arhcf1.getCF() / max(arhcf1.getCF()), 'b', label='CF1')
|
||||
if aicSflag == 1:
|
||||
ax3.plot(arhcf2.getTimeArray(),
|
||||
arhcf2.getCF() / max(arhcf2.getCF()), 'm', label='CF2')
|
||||
ax3.plot(
|
||||
[aicarhpick.getpick(), aicarhpick.getpick()],
|
||||
[-1, 1], 'g', label='Initial S Onset')
|
||||
ax3.plot(
|
||||
[aicarhpick.getpick() - 0.5,
|
||||
aicarhpick.getpick() + 0.5],
|
||||
[1, 1], 'g')
|
||||
ax3.plot(
|
||||
[aicarhpick.getpick() - 0.5,
|
||||
aicarhpick.getpick() + 0.5],
|
||||
[-1, -1], 'g')
|
||||
ax3.plot([refSpick.getpick(), refSpick.getpick()],
|
||||
[-1.3, 1.3], 'g', linewidth=2, label='Final S Pick')
|
||||
ax3.plot(
|
||||
[refSpick.getpick() - 0.5, refSpick.getpick() + 0.5],
|
||||
[1.3, 1.3], 'g', linewidth=2)
|
||||
ax3.plot(
|
||||
[refSpick.getpick() - 0.5, refSpick.getpick() + 0.5],
|
||||
[-1.3, -1.3], 'g', linewidth=2)
|
||||
ax3.plot([lpickS, lpickS], [-1.1, 1.1], 'g--', label='lpp')
|
||||
ax3.plot([epickS, epickS], [-1.1, 1.1], 'g--', label='epp')
|
||||
ax3.legend(loc=1)
|
||||
ax3.set_yticks([])
|
||||
ax3.set_ylim([-1.5, 1.5])
|
||||
ax3.set_xlabel('Time [s] after %s' % tr_filt.stats.starttime)
|
||||
ax3.set_ylabel('Normalized Counts')
|
||||
ax3.set_title(trH2_filt.stats.channel)
|
||||
if plt_flag == 1:
|
||||
fig.show()
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
##########################################################################
|
||||
# calculate "real" onset times
|
||||
if lpickP is not None and lpickP == mpickP:
|
||||
@ -2082,12 +2092,22 @@ def nautopickstation(wfstream, pickparam, verbose=False,
|
||||
epickP = zdat[0].stats.starttime - p_params['timeerrorsP'][3]
|
||||
mpickP = zdat[0].stats.starttime
|
||||
|
||||
# create dictionary
|
||||
# for P phase
|
||||
ccode = zdat[0].stats.channel
|
||||
ncode = zdat[0].stats.network
|
||||
ppick = dict(channel=ccode, network=ncode, lpp=lpickP, epp=epickP, mpp=mpickP, spe=Perror, snr=SNRP,
|
||||
snrdb=SNRPdB, weight=Pweight, fm=FM, w0=None, fc=None, Mo=None,
|
||||
Mw=None, picker=picker, marked=Pmarker)
|
||||
|
||||
if edat:
|
||||
hdat = edat[0]
|
||||
elif ndat:
|
||||
hdat = ndat[0]
|
||||
else:
|
||||
return
|
||||
# no horizontal components given
|
||||
picks = dict(P=ppick)
|
||||
return picks, station
|
||||
|
||||
if lpickS is not None and lpickS == mpickS:
|
||||
lpickS += hdat.stats.delta
|
||||
@ -2104,21 +2124,14 @@ def nautopickstation(wfstream, pickparam, verbose=False,
|
||||
epickS = hdat.stats.starttime - s_params['timeerrorsS'][3]
|
||||
mpickS = hdat.stats.starttime
|
||||
|
||||
# create dictionary
|
||||
# for P phase
|
||||
ccode = zdat[0].stats.channel
|
||||
ncode = zdat[0].stats.network
|
||||
ppick = dict(channel=ccode, network=ncode, lpp=lpickP, epp=epickP, mpp=mpickP, spe=Perror, snr=SNRP,
|
||||
snrdb=SNRPdB, weight=Pweight, fm=FM, w0=None, fc=None, Mo=None,
|
||||
Mw=None, picker=picker, marked=Pmarker)
|
||||
# add S phase
|
||||
ccode = hdat.stats.channel
|
||||
ncode = hdat.stats.network
|
||||
spick = dict(channel=ccode, network=ncode, lpp=lpickS, epp=epickS, mpp=mpickS, spe=Serror, snr=SNRS,
|
||||
snrdb=SNRSdB, weight=Sweight, fm=None, picker=picker, Ao=Ao)
|
||||
# merge picks into returning dictionary
|
||||
picks = dict(P=ppick, S=spick, station=zdat[0].stats.station)
|
||||
return picks
|
||||
picks = dict(P=ppick, S=spick)
|
||||
return picks, station
|
||||
|
||||
|
||||
def iteratepicker(wf, NLLocfile, picks, badpicks, pickparameter, fig_dict=None):
|
||||
@ -2189,11 +2202,11 @@ def iteratepicker(wf, NLLocfile, picks, badpicks, pickparameter, fig_dict=None):
|
||||
print(
|
||||
"iteratepicker: The following picking parameters have been modified for iterative picking:")
|
||||
print(
|
||||
"pstart: %fs => %fs" % (pstart_old, pickparameter.get('pstart')))
|
||||
"pstart: %fs => %fs" % (pstart_old, pickparameter.get('pstart')))
|
||||
print(
|
||||
"pstop: %fs => %fs" % (pstop_old, pickparameter.get('pstop')))
|
||||
"pstop: %fs => %fs" % (pstop_old, pickparameter.get('pstop')))
|
||||
print(
|
||||
"sstop: %fs => %fs" % (sstop_old, pickparameter.get('sstop')))
|
||||
"sstop: %fs => %fs" % (sstop_old, pickparameter.get('sstop')))
|
||||
print("pickwinP: %fs => %fs" % (
|
||||
pickwinP_old, pickparameter.get('pickwinP')))
|
||||
print("Precalcwin: %fs => %fs" % (
|
||||
@ -2203,7 +2216,7 @@ def iteratepicker(wf, NLLocfile, picks, badpicks, pickparameter, fig_dict=None):
|
||||
print("zfac: %f => %f" % (zfac_old, pickparameter.get('zfac')))
|
||||
|
||||
# repick station
|
||||
newpicks = autopickstation(wf2pick, pickparameter, fig_dict=fig_dict)
|
||||
newpicks, _ = autopickstation(wf2pick, pickparameter, fig_dict=fig_dict)
|
||||
|
||||
# replace old dictionary with new one
|
||||
picks[badpicks[i][0]] = newpicks
|
||||
|
@ -506,7 +506,8 @@ class PDFstatistics(object):
|
||||
|
||||
return rlist
|
||||
|
||||
def writeThetaToFile(self, array, out_dir):
|
||||
@staticmethod
|
||||
def writeThetaToFile(array, out_dir):
|
||||
"""
|
||||
Method to write array like data to file. Useful since acquiring can take
|
||||
serious amount of time when dealing with large databases.
|
||||
|
@ -164,9 +164,9 @@ class AICPicker(AutoPicker):
|
||||
iplot = int(self.iplot)
|
||||
except:
|
||||
if self.iplot == True or self.iplot == 'True':
|
||||
iplot = 2
|
||||
iplot = 2
|
||||
else:
|
||||
iplot = 0
|
||||
iplot = 0
|
||||
|
||||
# find NaN's
|
||||
nn = np.isnan(self.cf)
|
||||
@ -191,17 +191,32 @@ class AICPicker(AutoPicker):
|
||||
# remove offset in AIC function
|
||||
offset = abs(min(aic) - min(aicsmooth))
|
||||
aicsmooth = aicsmooth - offset
|
||||
cf = self.Data[0].data
|
||||
# get maximum of HOS/AR-CF as startimg point for searching
|
||||
# minimum in AIC function
|
||||
icfmax = np.argmax(self.Data[0].data)
|
||||
# minimum in AIC function
|
||||
icfmax = np.argmax(cf)
|
||||
|
||||
# MP MP testing threshold
|
||||
thresh_hit = False
|
||||
thresh_factor = 0.7
|
||||
thresh = thresh_factor * cf[icfmax]
|
||||
for index, sample in enumerate(cf):
|
||||
if sample >= thresh:
|
||||
thresh_hit = True
|
||||
# go on searching for the following maximum
|
||||
if index > 0 and thresh_hit:
|
||||
if sample <= cf[index - 1]:
|
||||
icfmax = index - 1
|
||||
break
|
||||
# MP MP ---
|
||||
|
||||
# find minimum in AIC-CF front of maximum of HOS/AR-CF
|
||||
lpickwindow = int(round(self.PickWindow / self.dt))
|
||||
tsafety = self.TSNR[1] # safety gap, AIC is usually a little bit too late
|
||||
left_corner_ind = max([icfmax - lpickwindow, 2])
|
||||
right_corner_ind = icfmax + int(tsafety / self.dt)
|
||||
aic_snip = aicsmooth[left_corner_ind : right_corner_ind]
|
||||
minima = argrelmin(aic_snip)[0] # 0th entry of tuples for axes
|
||||
aic_snip = aicsmooth[left_corner_ind: right_corner_ind]
|
||||
minima = argrelmin(aic_snip)[0] # 0th entry of tuples for axes
|
||||
if len(minima) > 0:
|
||||
pickindex = minima[-1] + left_corner_ind
|
||||
self.Pick = self.Tcf[pickindex]
|
||||
@ -233,27 +248,28 @@ class AICPicker(AutoPicker):
|
||||
ii = min([isignal[len(isignal) - 1], len(self.Tcf)])
|
||||
isignal = isignal[0:ii]
|
||||
try:
|
||||
self.Data[0].data[isignal]
|
||||
cf[isignal]
|
||||
except IndexError as e:
|
||||
msg = "Time series out of bounds! {}".format(e)
|
||||
print(msg)
|
||||
return
|
||||
# calculate SNR from CF
|
||||
self.SNR = max(abs(self.Data[0].data[isignal])) / \
|
||||
abs(np.mean(self.Data[0].data[inoise]))
|
||||
self.SNR = max(abs(cf[isignal])) / \
|
||||
abs(np.mean(cf[inoise]))
|
||||
# calculate slope from CF after initial pick
|
||||
# get slope window
|
||||
tslope = self.TSNR[3] # slope determination window
|
||||
if tsafety >= 0:
|
||||
islope = np.where((self.Tcf <= min([self.Pick + tslope + tsafety, self.Tcf[-1]])) \
|
||||
& (self.Tcf >= self.Pick)) # TODO: put this in a seperate function like getsignalwin
|
||||
& (self.Tcf >= self.Pick)) # TODO: put this in a seperate function like getsignalwin
|
||||
else:
|
||||
islope = np.where((self.Tcf <= min([self.Pick + tslope, self.Tcf[-1]])) \
|
||||
& (self.Tcf >= self.Pick + tsafety)) # TODO: put this in a seperate function like getsignalwin
|
||||
& (
|
||||
self.Tcf >= self.Pick + tsafety)) # TODO: put this in a seperate function like getsignalwin
|
||||
# find maximum within slope determination window
|
||||
# 'cause slope should be calculated up to first local minimum only!
|
||||
try:
|
||||
dataslope = self.Data[0].data[islope[0][0:-1]]
|
||||
dataslope = cf[islope[0][0:-1]]
|
||||
except IndexError:
|
||||
print("Slope Calculation: empty array islope, check signal window")
|
||||
return
|
||||
@ -263,7 +279,7 @@ class AICPicker(AutoPicker):
|
||||
try:
|
||||
imaxs, = argrelmax(dataslope)
|
||||
imax = imaxs[0]
|
||||
except ValueError as e:
|
||||
except (ValueError, IndexError) as e:
|
||||
print(e, 'picker: argrelmax not working!')
|
||||
imax = np.argmax(dataslope)
|
||||
iislope = islope[0][0:imax + 1]
|
||||
@ -276,14 +292,14 @@ class AICPicker(AutoPicker):
|
||||
print("AICPicker: Maximum for slope determination right at the beginning of the window!")
|
||||
print("Choose longer slope determination window!")
|
||||
if self.iplot > 1:
|
||||
if self.fig == None or self.fig == 'None':
|
||||
if self.fig is None or self.fig == 'None':
|
||||
fig = plt.figure()
|
||||
plt_flag = iplot
|
||||
else:
|
||||
fig = self.fig
|
||||
ax = fig.add_subplot(111)
|
||||
x = self.Data[0].data
|
||||
ax.plot(self.Tcf, x / max(x), color=self._linecolor, linewidth=0.7, label='(HOS-/AR-) Data')
|
||||
cf = cf
|
||||
ax.plot(self.Tcf, cf / max(cf), color=self._linecolor, linewidth=0.7, label='(HOS-/AR-) Data')
|
||||
ax.plot(self.Tcf, aicsmooth / max(aicsmooth), 'r', label='Smoothed AIC-CF')
|
||||
ax.legend(loc=1)
|
||||
ax.set_xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
|
||||
@ -291,12 +307,23 @@ class AICPicker(AutoPicker):
|
||||
ax.set_title(self.Data[0].stats.station)
|
||||
if plt_flag in [1, 2]:
|
||||
fig.show()
|
||||
try: input()
|
||||
except SyntaxError: pass
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
return
|
||||
iislope = islope[0][0:imax+1]
|
||||
dataslope = self.Data[0].data[iislope]
|
||||
iislope = islope[0][0:imax + 1]
|
||||
# MP MP change slope calculation
|
||||
# get all maxima of aicsmooth
|
||||
iaicmaxima = argrelmax(aicsmooth)[0]
|
||||
# get first index of maximum after pickindex (indices saved in iaicmaxima)
|
||||
aicmax = iaicmaxima[np.where(iaicmaxima > pickindex)[0]]
|
||||
if len(aicmax) > 0:
|
||||
iaicmax = aicmax[0]
|
||||
else:
|
||||
iaicmax = -1
|
||||
dataslope = aicsmooth[pickindex: iaicmax]
|
||||
# calculate slope as polynomal fit of order 1
|
||||
xslope = np.arange(0, len(dataslope), 1)
|
||||
P = np.polyfit(xslope, dataslope, 1)
|
||||
@ -306,24 +333,23 @@ class AICPicker(AutoPicker):
|
||||
else:
|
||||
self.slope = 1 / (len(dataslope) * self.Data[0].stats.delta) * (datafit[-1] - datafit[0])
|
||||
# normalize slope to maximum of cf to make it unit independent
|
||||
self.slope /= self.Data[0].data[icfmax]
|
||||
self.slope /= aicsmooth[iaicmax]
|
||||
|
||||
else:
|
||||
self.SNR = None
|
||||
self.slope = None
|
||||
|
||||
if iplot > 1:
|
||||
if self.fig == None or self.fig == 'None':
|
||||
if self.fig is None or self.fig == 'None':
|
||||
fig = plt.figure() # self.iplot)
|
||||
plt_flag = iplot
|
||||
else:
|
||||
fig = self.fig
|
||||
fig._tight = True
|
||||
ax1 = fig.add_subplot(211)
|
||||
x = self.Data[0].data
|
||||
if len(self.Tcf) > len(self.Data[0].data): # why? LK
|
||||
self.Tcf = self.Tcf[0:len(self.Tcf)-1]
|
||||
ax1.plot(self.Tcf, x / max(x), color=self._linecolor, linewidth=0.7, label='(HOS-/AR-) Data')
|
||||
if len(self.Tcf) > len(cf): # why? LK
|
||||
self.Tcf = self.Tcf[0:len(self.Tcf) - 1]
|
||||
ax1.plot(self.Tcf, cf / max(cf), color=self._linecolor, linewidth=0.7, label='(HOS-/AR-) Data')
|
||||
ax1.plot(self.Tcf, aicsmooth / max(aicsmooth), 'r', label='Smoothed AIC-CF')
|
||||
if self.Pick is not None:
|
||||
ax1.plot([self.Pick, self.Pick], [-0.1, 0.5], 'b', linewidth=2, label='AIC-Pick')
|
||||
@ -333,7 +359,7 @@ class AICPicker(AutoPicker):
|
||||
|
||||
if self.Pick is not None:
|
||||
ax2 = fig.add_subplot(2, 1, 2, sharex=ax1)
|
||||
ax2.plot(self.Tcf, x, color=self._linecolor, linewidth=0.7, label='Data')
|
||||
ax2.plot(self.Tcf, aicsmooth, color='r', linewidth=0.7, label='Data')
|
||||
ax1.axvspan(self.Tcf[inoise[0]], self.Tcf[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
||||
ax1.axvspan(self.Tcf[isignal[0]], self.Tcf[isignal[-1]], color='b', alpha=0.2, lw=0,
|
||||
label='Signal Window')
|
||||
@ -345,7 +371,8 @@ class AICPicker(AutoPicker):
|
||||
label='Signal Window')
|
||||
ax2.axvspan(self.Tcf[iislope[0]], self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0,
|
||||
label='Slope Window')
|
||||
ax2.plot(self.Tcf[iislope], datafit, 'g', linewidth=2, label='Slope')
|
||||
ax2.plot(self.Tcf[pickindex: iaicmax], datafit, 'g', linewidth=2,
|
||||
label='Slope') # MP MP changed temporarily!
|
||||
|
||||
if self.slope is not None:
|
||||
ax1.set_title('Station %s, SNR=%7.2f, Slope= %12.2f counts/s' % (self.Data[0].stats.station,
|
||||
@ -361,15 +388,17 @@ class AICPicker(AutoPicker):
|
||||
|
||||
if plt_flag in [1, 2]:
|
||||
fig.show()
|
||||
try: input()
|
||||
except SyntaxError: pass
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
if plt_flag == 3:
|
||||
stats = self.Data[0].stats
|
||||
netstlc = '{}.{}.{}'.format(stats.network, stats.station, stats.location)
|
||||
fig.savefig('aicfig_{}_{}.png'.format(netstlc, stats.channel))
|
||||
|
||||
if self.Pick == None:
|
||||
if self.Pick is None:
|
||||
print('AICPicker: Could not find minimum, picking window too short?')
|
||||
|
||||
return
|
||||
@ -386,9 +415,9 @@ class PragPicker(AutoPicker):
|
||||
iplot = int(self.getiplot())
|
||||
except:
|
||||
if self.getiplot() == True or self.getiplot() == 'True':
|
||||
iplot = 2
|
||||
iplot = 2
|
||||
else:
|
||||
iplot = 0
|
||||
iplot = 0
|
||||
|
||||
if self.getpick1() is not None:
|
||||
print('PragPicker: Get most likely pick from HOS- or AR-CF using pragmatic picking algorithm ...')
|
||||
@ -427,11 +456,11 @@ class PragPicker(AutoPicker):
|
||||
# prominent trend: decrease aus
|
||||
# flat: use given aus
|
||||
cfdiff = np.diff(cfipick)
|
||||
if len(cfdiff)<20:
|
||||
if len(cfdiff) < 20:
|
||||
print('PragPicker: Very few samples for CF. Check LTA window dimensions!')
|
||||
i0diff = np.where(cfdiff > 0)
|
||||
cfdiff = cfdiff[i0diff]
|
||||
if len(cfdiff)<1:
|
||||
if len(cfdiff) < 1:
|
||||
print('PragPicker: Negative slope for CF. Check LTA window dimensions! STOP')
|
||||
self.Pick = None
|
||||
return
|
||||
@ -455,7 +484,7 @@ class PragPicker(AutoPicker):
|
||||
break
|
||||
|
||||
# now we look to the left
|
||||
if len(self.cf) > ipick1 +1:
|
||||
if len(self.cf) > ipick1 + 1:
|
||||
for i in range(ipick1, max([ipick1 - lpickwindow + 1, 2]), -1):
|
||||
if self.cf[i + 1] > self.cf[i] and self.cf[i - 1] >= self.cf[i]:
|
||||
if cfsmooth[i - 1] * (1 + aus1) >= cfsmooth[i]:
|
||||
@ -466,7 +495,7 @@ class PragPicker(AutoPicker):
|
||||
cfpick_l = self.cf[i]
|
||||
break
|
||||
else:
|
||||
msg ='PragPicker: Initial onset too close to start of CF! \
|
||||
msg = 'PragPicker: Initial onset too close to start of CF! \
|
||||
Stop finalizing pick to the left.'
|
||||
print(msg)
|
||||
|
||||
@ -486,7 +515,7 @@ class PragPicker(AutoPicker):
|
||||
pickflag = 0
|
||||
|
||||
if iplot > 1:
|
||||
if self.fig == None or self.fig == 'None':
|
||||
if self.fig is None or self.fig == 'None':
|
||||
fig = plt.figure() # self.getiplot())
|
||||
plt_flag = 1
|
||||
else:
|
||||
@ -496,15 +525,18 @@ class PragPicker(AutoPicker):
|
||||
ax.plot(Tcfpick, cfipick, color=self._linecolor, linewidth=0.7, label='CF')
|
||||
ax.plot(Tcfpick, cfsmoothipick, 'r', label='Smoothed CF')
|
||||
if pickflag > 0:
|
||||
ax.plot([self.Pick, self.Pick], [min(cfipick), max(cfipick)], self._pickcolor_p, linewidth=2, label='Pick')
|
||||
ax.plot([self.Pick, self.Pick], [min(cfipick), max(cfipick)], self._pickcolor_p, linewidth=2,
|
||||
label='Pick')
|
||||
ax.set_xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
|
||||
ax.set_yticks([])
|
||||
ax.set_title(self.Data[0].stats.station)
|
||||
ax.legend(loc=1)
|
||||
if plt_flag == 1:
|
||||
fig.show()
|
||||
try: input()
|
||||
except SyntaxError: pass
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
return
|
||||
|
||||
|
@ -14,8 +14,7 @@ import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from scipy.signal import argrelmax
|
||||
from obspy.core import Stream, UTCDateTime
|
||||
from pylot.core.util.utils import real_Bool, real_None
|
||||
|
||||
from pylot.core.util.utils import real_Bool, real_None, SetChannelComponents
|
||||
|
||||
|
||||
def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None, linecolor='k'):
|
||||
@ -144,13 +143,16 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None, linecol
|
||||
ax.plot(t, x, color=linecolor, linewidth=0.7, label='Data')
|
||||
ax.axvspan(t[inoise[0]], t[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
||||
ax.axvspan(t[isignal[0]], t[isignal[-1]], color='b', alpha=0.2, lw=0, label='Signal Window')
|
||||
ax.plot([t[0], t[int(len(t)) - 1]], [nlevel, nlevel], color=linecolor, linewidth=0.7, linestyle='dashed', label='Noise Level')
|
||||
ax.plot([t[0], t[int(len(t)) - 1]], [nlevel, nlevel], color=linecolor, linewidth=0.7, linestyle='dashed',
|
||||
label='Noise Level')
|
||||
ax.plot(t[pis[zc]], np.zeros(len(zc)), '*g',
|
||||
markersize=14, label='Zero Crossings')
|
||||
ax.plot([t[0], t[int(len(t)) - 1]], [-nlevel, -nlevel], color=linecolor, linewidth=0.7, linestyle='dashed')
|
||||
ax.plot([Pick1, Pick1], [max(x), -max(x)], 'b', linewidth=2, label='mpp')
|
||||
ax.plot([LPick, LPick], [max(x) / 2, -max(x) / 2], color=linecolor, linewidth=0.7, linestyle='dashed', label='lpp')
|
||||
ax.plot([EPick, EPick], [max(x) / 2, -max(x) / 2], color=linecolor, linewidth=0.7, linestyle='dashed', label='epp')
|
||||
ax.plot([LPick, LPick], [max(x) / 2, -max(x) / 2], color=linecolor, linewidth=0.7, linestyle='dashed',
|
||||
label='lpp')
|
||||
ax.plot([EPick, EPick], [max(x) / 2, -max(x) / 2], color=linecolor, linewidth=0.7, linestyle='dashed',
|
||||
label='epp')
|
||||
ax.plot([Pick1 + PickError, Pick1 + PickError],
|
||||
[max(x) / 2, -max(x) / 2], 'r--', label='spe')
|
||||
ax.plot([Pick1 - PickError, Pick1 - PickError],
|
||||
@ -163,8 +165,10 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None, linecol
|
||||
ax.legend(loc=1)
|
||||
if plt_flag == 1:
|
||||
fig.show()
|
||||
try: input()
|
||||
except SyntaxError: pass
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
|
||||
return EPick, LPick, PickError
|
||||
@ -198,9 +202,9 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None, linecolor='k'):
|
||||
iplot = int(iplot)
|
||||
except:
|
||||
if iplot == True or iplot == 'True':
|
||||
iplot = 2
|
||||
iplot = 2
|
||||
else:
|
||||
iplot = 0
|
||||
iplot = 0
|
||||
|
||||
warnings.simplefilter('ignore', np.RankWarning)
|
||||
|
||||
@ -227,8 +231,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None, linecolor='k'):
|
||||
# get zero crossings after most likely pick
|
||||
# initial onset is assumed to be the first zero crossing
|
||||
# first from unfiltered trace
|
||||
zc1 = []
|
||||
zc1.append(Pick)
|
||||
zc1 = [Pick]
|
||||
index1 = []
|
||||
i = 0
|
||||
for j in range(ipick[0][1], ipick[0][len(t[ipick]) - 1]):
|
||||
@ -273,8 +276,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None, linecolor='k'):
|
||||
|
||||
# now using filterd trace
|
||||
# next zero crossings after most likely pick
|
||||
zc2 = []
|
||||
zc2.append(Pick)
|
||||
zc2 = [Pick]
|
||||
index2 = []
|
||||
i = 0
|
||||
for j in range(ipick[0][1], ipick[0][len(t[ipick]) - 1]):
|
||||
@ -362,8 +364,10 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None, linecolor='k'):
|
||||
ax2.set_yticks([])
|
||||
if plt_flag == 1:
|
||||
fig.show()
|
||||
try: input()
|
||||
except SyntaxError: pass
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
|
||||
return FM
|
||||
@ -573,8 +577,6 @@ def select_for_phase(st, phase):
|
||||
:rtype: `~obspy.core.stream.Stream`
|
||||
"""
|
||||
|
||||
from pylot.core.util.defaults import SetChannelComponents
|
||||
|
||||
sel_st = Stream()
|
||||
compclass = SetChannelComponents()
|
||||
if phase.upper() == 'P':
|
||||
@ -623,14 +625,18 @@ def wadaticheck(pickdic, dttolerance, iplot=0, fig_dict=None):
|
||||
ibad = 0
|
||||
|
||||
for key in list(pickdic.keys()):
|
||||
if pickdic[key]['P']['weight'] < 4 and pickdic[key]['S']['weight'] < 4:
|
||||
ppick = pickdic[key].get('P')
|
||||
spick = pickdic[key].get('S')
|
||||
if not ppick or not spick:
|
||||
continue
|
||||
if ppick['weight'] < 4 and spick['weight'] < 4:
|
||||
# calculate S-P time
|
||||
spt = pickdic[key]['S']['mpp'] - pickdic[key]['P']['mpp']
|
||||
spt = spick['mpp'] - ppick['mpp']
|
||||
# add S-P time to dictionary
|
||||
pickdic[key]['SPt'] = spt
|
||||
# add P onsets and corresponding S-P times to list
|
||||
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
|
||||
UTCSpick = UTCDateTime(pickdic[key]['S']['mpp'])
|
||||
UTCPpick = UTCDateTime(ppick['mpp'])
|
||||
UTCSpick = UTCDateTime(spick['mpp'])
|
||||
Ppicks.append(UTCPpick.timestamp)
|
||||
Spicks.append(UTCSpick.timestamp)
|
||||
SPtimes.append(spt)
|
||||
@ -660,11 +666,11 @@ def wadaticheck(pickdic, dttolerance, iplot=0, fig_dict=None):
|
||||
# check, if deviation is larger than adjusted
|
||||
if wddiff > dttolerance:
|
||||
# remove pick from dictionary
|
||||
pickdic.pop(key)
|
||||
# # mark onset and downgrade S-weight to 9
|
||||
# # mark onset and downgrade S-weight to 9, also set SPE to None (disregarded in GUI)
|
||||
# # (not used anymore)
|
||||
# marker = 'badWadatiCheck'
|
||||
# pickdic[key]['S']['weight'] = 9
|
||||
marker = 'badWadatiCheck'
|
||||
pickdic[key]['S']['weight'] = 9
|
||||
pickdic[key]['S']['spe'] = None
|
||||
badstations.append(key)
|
||||
ibad += 1
|
||||
else:
|
||||
@ -676,8 +682,7 @@ def wadaticheck(pickdic, dttolerance, iplot=0, fig_dict=None):
|
||||
checkedSPtime = pickdic[key]['S']['mpp'] - pickdic[key]['P']['mpp']
|
||||
checkedSPtimes.append(checkedSPtime)
|
||||
|
||||
pickdic[key]['S']['marked'] = marker
|
||||
#pickdic[key]['S']['marked'] = marker
|
||||
pickdic[key]['S']['marked'] = marker
|
||||
print("wadaticheck: the following stations failed the check:")
|
||||
print(badstations)
|
||||
|
||||
@ -719,8 +724,8 @@ def wadaticheck(pickdic, dttolerance, iplot=0, fig_dict=None):
|
||||
ax.plot(Ppicks, SPtimes, 'ro', label='Skipped S-Picks')
|
||||
if wfitflag == 0:
|
||||
ax.plot(Ppicks, wdfit, color=linecolor, linewidth=0.7, label='Wadati 1')
|
||||
ax.plot(Ppicks, wdfit+dttolerance, color='0.9', linewidth=0.5, label='Wadati 1 Tolerance')
|
||||
ax.plot(Ppicks, wdfit-dttolerance, color='0.9', linewidth=0.5)
|
||||
ax.plot(Ppicks, wdfit + dttolerance, color='0.9', linewidth=0.5, label='Wadati 1 Tolerance')
|
||||
ax.plot(Ppicks, wdfit - dttolerance, color='0.9', linewidth=0.5)
|
||||
ax.plot(checkedPpicks, wdfit2, 'g', label='Wadati 2')
|
||||
ax.plot(checkedPpicks, checkedSPtimes, color=linecolor,
|
||||
linewidth=0, marker='o', label='Reliable S-Picks')
|
||||
@ -787,9 +792,9 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot=0, fi
|
||||
iplot = int(iplot)
|
||||
except:
|
||||
if real_Bool(iplot):
|
||||
iplot = 2
|
||||
iplot = 2
|
||||
else:
|
||||
iplot = 0
|
||||
iplot = 0
|
||||
|
||||
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
||||
|
||||
@ -851,8 +856,10 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot=0, fi
|
||||
ax.set_yticks([])
|
||||
if plt_flag == 1:
|
||||
fig.show()
|
||||
try: input()
|
||||
except SyntaxError: pass
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
|
||||
return returnflag
|
||||
@ -886,9 +893,12 @@ def checkPonsets(pickdic, dttolerance, jackfactor=5, iplot=0, fig_dict=None):
|
||||
Ppicks = []
|
||||
stations = []
|
||||
for station in pickdic:
|
||||
if pickdic[station]['P']['weight'] < 4:
|
||||
pick = pickdic[station].get('P')
|
||||
if not pick:
|
||||
continue
|
||||
if pick['weight'] < 4:
|
||||
# add P onsets to list
|
||||
UTCPpick = UTCDateTime(pickdic[station]['P']['mpp'])
|
||||
UTCPpick = UTCDateTime(pick['mpp'])
|
||||
Ppicks.append(UTCPpick.timestamp)
|
||||
stations.append(station)
|
||||
|
||||
@ -1077,16 +1087,15 @@ def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None, linecolor='k'):
|
||||
:return: returnflag; 0 if onset failed test, 1 if onset passed test
|
||||
:rtype: int
|
||||
"""
|
||||
|
||||
|
||||
plt_flag = 0
|
||||
try:
|
||||
iplot = int(iplot)
|
||||
except:
|
||||
if real_Bool(iplot):
|
||||
iplot = 2
|
||||
iplot = 2
|
||||
else:
|
||||
iplot = 0
|
||||
|
||||
iplot = 0
|
||||
|
||||
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
||||
|
||||
@ -1188,12 +1197,81 @@ def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None, linecolor='k'):
|
||||
ax.set_xlabel('Time [s] since %s' % zdat[0].stats.starttime)
|
||||
if plt_flag == 1:
|
||||
fig.show()
|
||||
try: input()
|
||||
except SyntaxError: pass
|
||||
try:
|
||||
input()
|
||||
except SyntaxError:
|
||||
pass
|
||||
plt.close(fig)
|
||||
return returnflag
|
||||
|
||||
|
||||
def getPickQuality(wfdata, picks, inputs, phase, compclass=None):
|
||||
quality = 4
|
||||
components4phases = {'P': ['Z'],
|
||||
'S': ['N', 'E']}
|
||||
timeErrors4phases = {'P': 'timeerrorsP',
|
||||
'S': 'timeerrorsS'}
|
||||
tsnr4phases = {'P': 'tsnrz',
|
||||
'S': 'tsnrh'}
|
||||
|
||||
if not phase in components4phases.keys():
|
||||
raise IOError('getPickQuality: Could not understand phase: {}'.format(phase))
|
||||
|
||||
if not compclass:
|
||||
print('Warning: No settings for channel components found. Using default')
|
||||
compclass = SetChannelComponents()
|
||||
|
||||
picks = picks[phase]
|
||||
mpp = picks.get('mpp')
|
||||
uncertainty = picks.get('spe')
|
||||
if not mpp:
|
||||
print('getPickQuality: No pick found!')
|
||||
return quality
|
||||
if not uncertainty:
|
||||
print('getPickQuality: No pick uncertainty (spe) found!')
|
||||
return quality
|
||||
|
||||
tsnr = inputs[tsnr4phases[phase]]
|
||||
timeErrors = inputs[timeErrors4phases[phase]]
|
||||
snrdb_final = 0
|
||||
|
||||
for component in components4phases[phase]:
|
||||
alter_comp = compclass.getCompPosition(component)
|
||||
st_select = wfdata.select(component=component)
|
||||
st_select += wfdata.select(component=alter_comp)
|
||||
if st_select:
|
||||
trace = st_select[0]
|
||||
_, snrdb, _ = getSNR(st_select, tsnr,
|
||||
mpp - trace.stats.starttime)
|
||||
if snrdb > snrdb_final:
|
||||
snrdb_final = snrdb
|
||||
|
||||
quality = getQualityFromUncertainty(uncertainty, timeErrors)
|
||||
quality += getQualityFromSNR(snrdb_final)
|
||||
|
||||
return quality
|
||||
|
||||
|
||||
def getQualityFromSNR(snrdb):
|
||||
quality_modifier = 4
|
||||
if not snrdb:
|
||||
print('getQualityFromSNR: No snrdb!')
|
||||
return quality_modifier
|
||||
# MP MP ++++ experimental,
|
||||
# raise pick quality by x classes if snrdb is lower than corresponding key
|
||||
quality4snrdb = {3: 4,
|
||||
5: 3,
|
||||
7: 2,
|
||||
9: 1,
|
||||
11: 0}
|
||||
# MP MP ---
|
||||
# iterate over all thresholds and check whether snrdb is larger, if so, set new quality_modifier
|
||||
for snrdb_threshold in sorted(list(quality4snrdb.keys())):
|
||||
if snrdb > snrdb_threshold:
|
||||
quality_modifier = quality4snrdb[snrdb_threshold]
|
||||
return quality_modifier
|
||||
|
||||
|
||||
def get_quality_class(uncertainty, weight_classes):
|
||||
"""
|
||||
Script to transform uncertainty into quality classes 0-4 regarding adjusted time errors
|
||||
@ -1378,6 +1456,26 @@ def get_pickparams(pickparam):
|
||||
|
||||
return p_params, s_params, first_motion_params, signal_length_params
|
||||
|
||||
def getQualityFromUncertainty(uncertainty, Errors):
|
||||
# set initial quality to 4 (worst) and change only if one condition is hit
|
||||
quality = 4
|
||||
|
||||
if real_None(uncertainty) is None:
|
||||
return quality
|
||||
|
||||
if uncertainty <= Errors[0]:
|
||||
quality = 0
|
||||
elif (uncertainty > Errors[0]) and \
|
||||
(uncertainty <= Errors[1]):
|
||||
quality = 1
|
||||
elif (uncertainty > Errors[1]) and \
|
||||
(uncertainty <= Errors[2]):
|
||||
quality = 2
|
||||
elif (uncertainty > Errors[2]) and \
|
||||
(uncertainty <= Errors[3]):
|
||||
quality = 3
|
||||
elif uncertainty > Errors[3]:
|
||||
quality = 4
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
|
@ -23,8 +23,7 @@ class Array_map(QtGui.QWidget):
|
||||
'''
|
||||
QtGui.QWidget.__init__(self)
|
||||
self._parent = parent
|
||||
self.metadata_type = parent.metadata[0]
|
||||
self.metadata = parent.metadata[1]
|
||||
self.metadata = parent.metadata
|
||||
self.picks = None
|
||||
self.picks_dict = None
|
||||
self.autopicks_dict = None
|
||||
@ -74,15 +73,11 @@ class Array_map(QtGui.QWidget):
|
||||
if pickDlg.exec_():
|
||||
pyl_mw.setDirty(True)
|
||||
pyl_mw.update_status('picks accepted ({0})'.format(station))
|
||||
replot = pyl_mw.get_current_event().setPick(station, pickDlg.getPicks())
|
||||
pyl_mw.addPicks(station, pickDlg.getPicks(picktype='manual'), type='manual')
|
||||
pyl_mw.addPicks(station, pickDlg.getPicks(picktype='auto'), type='auto')
|
||||
self._refresh_drawings()
|
||||
if replot:
|
||||
pyl_mw.plotWaveformData()
|
||||
pyl_mw.drawPicks()
|
||||
pyl_mw.draw()
|
||||
else:
|
||||
pyl_mw.drawPicks(station)
|
||||
pyl_mw.draw()
|
||||
pyl_mw.drawPicks(station)
|
||||
pyl_mw.draw()
|
||||
else:
|
||||
pyl_mw.update_status('picks discarded ({0})'.format(station))
|
||||
except Exception as e:
|
||||
@ -94,7 +89,7 @@ class Array_map(QtGui.QWidget):
|
||||
self.comboBox_phase.currentIndexChanged.connect(self._refresh_drawings)
|
||||
self.comboBox_am.currentIndexChanged.connect(self._refresh_drawings)
|
||||
self.canvas.mpl_connect('motion_notify_event', self.mouse_moved)
|
||||
#self.zoom_id = self.basemap.ax.figure.canvas.mpl_connect('scroll_event', self.zoom)
|
||||
# self.zoom_id = self.basemap.ax.figure.canvas.mpl_connect('scroll_event', self.zoom)
|
||||
|
||||
def _from_dict(self, function, key):
|
||||
return function(self.stations_dict.values(), key=lambda x: x[key])[key]
|
||||
@ -159,35 +154,8 @@ class Array_map(QtGui.QWidget):
|
||||
self.main_box.addWidget(self.canvas, 1)
|
||||
self.main_box.addWidget(self.status_label, 0)
|
||||
|
||||
|
||||
def init_stations(self):
|
||||
def stat_info_from_parser(parser):
|
||||
stations_dict = {}
|
||||
for station in parser.stations:
|
||||
station_name = station[0].station_call_letters
|
||||
network_name = station[0].network_code
|
||||
if not station_name in stations_dict.keys():
|
||||
st_id = network_name + '.' + station_name
|
||||
stations_dict[st_id] = {'latitude': station[0].latitude,
|
||||
'longitude': station[0].longitude}
|
||||
return stations_dict
|
||||
|
||||
def stat_info_from_inventory(inventory):
|
||||
stations_dict = {}
|
||||
for network in inventory.networks:
|
||||
for station in network.stations:
|
||||
station_name = station.code
|
||||
network_name = network_name.code
|
||||
if not station_name in stations_dict.keys():
|
||||
st_id = network_name + '.' + station_name
|
||||
stations_dict[st_id] = {'latitude': station[0].latitude,
|
||||
'longitude': station[0].longitude}
|
||||
return stations_dict
|
||||
|
||||
read_stat = {'xml': stat_info_from_inventory,
|
||||
'dless': stat_info_from_parser}
|
||||
|
||||
self.stations_dict = read_stat[self.metadata_type](self.metadata)
|
||||
self.stations_dict = self.metadata.get_all_coordinates()
|
||||
self.latmin = self.get_min_from_stations('latitude')
|
||||
self.lonmin = self.get_min_from_stations('longitude')
|
||||
self.latmax = self.get_max_from_stations('latitude')
|
||||
@ -196,13 +164,15 @@ class Array_map(QtGui.QWidget):
|
||||
def init_picks(self):
|
||||
def get_picks(station_dict):
|
||||
picks = {}
|
||||
# selected phase
|
||||
phase = self.comboBox_phase.currentText()
|
||||
for st_id in station_dict.keys():
|
||||
try:
|
||||
station_name = st_id.split('.')[-1]
|
||||
# current_picks_dict: auto or manual
|
||||
pick = self.current_picks_dict()[station_name][phase]
|
||||
if pick['picker'] == 'auto':
|
||||
if pick['weight'] > 3:
|
||||
if not pick['spe']:
|
||||
continue
|
||||
picks[st_id] = pick['mpp']
|
||||
except KeyError:
|
||||
@ -217,11 +187,12 @@ class Array_map(QtGui.QWidget):
|
||||
for pick in picks.values():
|
||||
if type(pick) is obspy.core.utcdatetime.UTCDateTime:
|
||||
picks_utc.append(pick)
|
||||
self._earliest_picktime = min(picks_utc)
|
||||
for st_id, pick in picks.items():
|
||||
if type(pick) is obspy.core.utcdatetime.UTCDateTime:
|
||||
pick -= self._earliest_picktime
|
||||
picks_rel[st_id] = pick
|
||||
if picks_utc:
|
||||
self._earliest_picktime = min(picks_utc)
|
||||
for st_id, pick in picks.items():
|
||||
if type(pick) is obspy.core.utcdatetime.UTCDateTime:
|
||||
pick -= self._earliest_picktime
|
||||
picks_rel[st_id] = pick
|
||||
return picks_rel
|
||||
|
||||
self.picks = get_picks(self.stations_dict)
|
||||
@ -331,6 +302,8 @@ class Array_map(QtGui.QWidget):
|
||||
|
||||
def scatter_picked_stations(self):
|
||||
picks, lats, lons = self.get_picks_lat_lon()
|
||||
if len(lons) < 1 and len(lats) < 1:
|
||||
return
|
||||
# workaround because of an issue with latlon transformation of arrays with len <3
|
||||
if len(lons) <= 2 and len(lats) <= 2:
|
||||
self.sc_picked = self.basemap.scatter(lons[0], lats[0], s=50, facecolor='white',
|
||||
@ -354,7 +327,7 @@ class Array_map(QtGui.QWidget):
|
||||
def add_cbar(self, label):
|
||||
self.cbax_bg = inset_axes(self.main_ax, width="6%", height="75%", loc=5)
|
||||
cbax = inset_axes(self.main_ax, width='2%', height='70%', loc=5)
|
||||
cbar = self.main_ax.figure.colorbar(self.sc_picked, cax = cbax)
|
||||
cbar = self.main_ax.figure.colorbar(self.sc_picked, cax=cbax)
|
||||
cbar.set_label(label)
|
||||
cbax.yaxis.tick_left()
|
||||
cbax.yaxis.set_label_position('left')
|
||||
@ -375,13 +348,16 @@ class Array_map(QtGui.QWidget):
|
||||
self.draw_everything()
|
||||
|
||||
def draw_everything(self):
|
||||
if self.picks_dict or self.autopicks_dict:
|
||||
picktype = self.comboBox_am.currentText()
|
||||
if (self.picks_dict and picktype == 'manual') \
|
||||
or (self.autopicks_dict and picktype == 'auto'):
|
||||
self.init_picks()
|
||||
if len(self.picks) >= 3:
|
||||
self.init_picksgrid()
|
||||
self.draw_contour_filled()
|
||||
self.scatter_all_stations()
|
||||
if self.picks_dict or self.autopicks_dict:
|
||||
if (self.picks_dict and picktype == 'manual') \
|
||||
or (self.autopicks_dict and picktype == 'auto'):
|
||||
self.scatter_picked_stations()
|
||||
self.cbar = self.add_cbar(label='Time relative to first onset ({}) [s]'.format(self._earliest_picktime))
|
||||
self.comboBox_phase.setEnabled(True)
|
||||
@ -397,16 +373,16 @@ class Array_map(QtGui.QWidget):
|
||||
del (self.cbar, self.cbax_bg)
|
||||
if hasattr(self, 'sc_picked'):
|
||||
self.sc_picked.remove()
|
||||
del (self.sc_picked)
|
||||
del self.sc_picked
|
||||
if hasattr(self, 'sc_event'):
|
||||
self.sc_event.remove()
|
||||
del (self.sc_event)
|
||||
del self.sc_event
|
||||
if hasattr(self, 'contourf'):
|
||||
self.remove_contourf()
|
||||
del (self.contourf)
|
||||
del self.contourf
|
||||
if hasattr(self, 'cid'):
|
||||
self.canvas.mpl_disconnect(self.cid)
|
||||
del (self.cid)
|
||||
del self.cid
|
||||
try:
|
||||
self.sc.remove()
|
||||
except Exception as e:
|
||||
|
@ -13,86 +13,95 @@ from pylot.core.util.utils import key_for_set_value, find_in_list, \
|
||||
|
||||
|
||||
class Metadata(object):
|
||||
|
||||
def __init__(self, inventory=None):
|
||||
self.inventories = []
|
||||
# saves read metadata objects (Parser/inventory) for a filename
|
||||
self.inventory_files = {}
|
||||
# saves filenames holding metadata for a seed_id
|
||||
# seed id as key, path to file as value
|
||||
self.seed_ids = {}
|
||||
self.stations_dict = {}
|
||||
if inventory:
|
||||
if os.path.isdir(inventory):
|
||||
self.add_inventory(inventory)
|
||||
if os.path.isfile(inventory):
|
||||
self.add_inventory_file(inventory)
|
||||
|
||||
|
||||
def __str__(self):
|
||||
repr = 'PyLoT Metadata object including the following inventories:\n\n'
|
||||
ntotal = len(self.inventories)
|
||||
for index, inventory in enumerate(self.inventories):
|
||||
if index < 2 or (ntotal - index) < 3:
|
||||
repr += '{}\n'.format(inventory)
|
||||
if ntotal > 4 and int(ntotal/2) == index:
|
||||
if ntotal > 4 and int(ntotal / 2) == index:
|
||||
repr += '...\n'
|
||||
if ntotal > 4:
|
||||
repr += '\nTotal of {} inventories. Use Metadata.inventories to see all.'.format(ntotal)
|
||||
return repr
|
||||
|
||||
|
||||
def __repr__(self):
|
||||
return self.__str__()
|
||||
|
||||
|
||||
def add_inventory(self, path_to_inventory):
|
||||
'''
|
||||
add paths to list of inventories
|
||||
|
||||
:param path_to_inventory:
|
||||
:return:
|
||||
'''
|
||||
"""
|
||||
Add path to list of inventories.
|
||||
:param path_to_inventory: Path to a folder
|
||||
:type path_to_inventory: str
|
||||
:return: None
|
||||
"""
|
||||
assert (os.path.isdir(path_to_inventory)), '{} is no directory'.format(path_to_inventory)
|
||||
if not path_to_inventory in self.inventories:
|
||||
self.inventories.append(path_to_inventory)
|
||||
|
||||
|
||||
def add_inventory_file(self, path_to_inventory_file):
|
||||
'''
|
||||
add a single file to inventory files
|
||||
|
||||
:param path_to_inventory_file:
|
||||
:return:
|
||||
|
||||
'''
|
||||
"""
|
||||
Add the folder in which the file exists to the list of inventories.
|
||||
:param path_to_inventory_file: full path including filename
|
||||
:type path_to_inventory_file: str
|
||||
:return: None
|
||||
"""
|
||||
assert (os.path.isfile(path_to_inventory_file)), '{} is no file'.format(path_to_inventory_file)
|
||||
self.add_inventory(os.path.split(path_to_inventory_file)[0])
|
||||
if not path_to_inventory_file in self.inventory_files.keys():
|
||||
self.read_single_file(path_to_inventory_file)
|
||||
|
||||
|
||||
def remove_all_inventories(self):
|
||||
self.__init__()
|
||||
|
||||
|
||||
def remove_inventory(self, path_to_inventory):
|
||||
'''
|
||||
remove a path from inventories list
|
||||
|
||||
:param path_to_inventory:
|
||||
:return:
|
||||
'''
|
||||
"""
|
||||
Remove a path from inventories list. If path is not in inventories list, do nothing.
|
||||
:param path_to_inventory: Path to a folder
|
||||
"""
|
||||
if not path_to_inventory in self.inventories:
|
||||
print('Path {} not in inventories list.'.format(path_to_inventory))
|
||||
return
|
||||
self.inventories.remove(path_to_inventory)
|
||||
for filename in self.inventory_files.keys():
|
||||
if filename.startswith(path_to_inventory):
|
||||
del(self.inventory_files[filename])
|
||||
del (self.inventory_files[filename])
|
||||
for seed_id in self.seed_ids.keys():
|
||||
if self.seed_ids[seed_id].startswith(path_to_inventory):
|
||||
del(self.seed_ids[seed_id])
|
||||
del (self.seed_ids[seed_id])
|
||||
|
||||
|
||||
def get_metadata(self, seed_id):
|
||||
def get_metadata(self, seed_id, time=None):
|
||||
"""
|
||||
Get metadata for seed id at time. When time is not specified, metadata for current time is fetched.
|
||||
:param seed_id: Seed id such as BW.WETR..HHZ (Network.Station.Location.Channel)
|
||||
:type seed_id: str
|
||||
:param time: Time for which the metadata should be returned
|
||||
:type time: UTCDateTime
|
||||
:return: Dictionary with keys data and invtype.
|
||||
data is a obspy.io.xseed.parser.Parser or an obspy.core.inventory.inventory.Inventory depending on the metadata
|
||||
file.
|
||||
invtype is a string denoting of which type the value of the data key is. It can take the values 'dless',
|
||||
'dseed', 'xml', 'resp', according to the filetype of the metadata.
|
||||
:rtype: dict
|
||||
"""
|
||||
# try most recent data if no time is specified
|
||||
if not time:
|
||||
time = UTCDateTime()
|
||||
# get metadata for a specific seed_id, if not already read, try to read from inventories
|
||||
if not seed_id in self.seed_ids.keys():
|
||||
self._read_inventory_data(seed_id)
|
||||
@ -100,12 +109,13 @@ class Metadata(object):
|
||||
if not seed_id in self.seed_ids.keys():
|
||||
print('No data found for seed id {}. Trying to find it in all known inventories...'.format(seed_id))
|
||||
self.read_all()
|
||||
for inv_fname, metadata in self.inventory_files.items():
|
||||
for inv_fname, metadata_dict in self.inventory_files.items():
|
||||
# use get_coordinates to check for seed_id
|
||||
try:
|
||||
metadata['data'].get_coordinates(seed_id)
|
||||
metadata_dict['data'].get_coordinates(seed_id, time)
|
||||
self.seed_ids[seed_id] = inv_fname
|
||||
return metadata
|
||||
print('Found metadata for station {}!'.format(seed_id))
|
||||
return metadata_dict
|
||||
except Exception as e:
|
||||
continue
|
||||
print('Could not find metadata for station {}'.format(seed_id))
|
||||
@ -113,30 +123,30 @@ class Metadata(object):
|
||||
fname = self.seed_ids[seed_id]
|
||||
return self.inventory_files[fname]
|
||||
|
||||
|
||||
def read_all(self):
|
||||
'''
|
||||
read all metadata files found in all inventories
|
||||
:return:
|
||||
'''
|
||||
"""
|
||||
Read all metadata files found in all inventories
|
||||
"""
|
||||
for inventory in self.inventories:
|
||||
for inv_fname in os.listdir(inventory):
|
||||
inv_fname = os.path.join(inventory, inv_fname)
|
||||
if not self.read_single_file(inv_fname):
|
||||
continue
|
||||
|
||||
|
||||
def read_single_file(self, inv_fname):
|
||||
if not inv_fname in self.inventory_files.keys():
|
||||
pass
|
||||
else:
|
||||
if not self.inventory_files[inv_fname]:
|
||||
pass
|
||||
else:
|
||||
return
|
||||
"""
|
||||
Try to read a single file as Parser/Inventory and add its dictionary to inventory files if reading sudceeded.
|
||||
:param inv_fname: path/filename of inventory file
|
||||
:type inv_fname: str
|
||||
:rtype: None
|
||||
"""
|
||||
# return if it was read already
|
||||
if self.inventory_files.get(inv_fname, None):
|
||||
return
|
||||
|
||||
try:
|
||||
invtype, robj = self._read_metadata_file(inv_fname)
|
||||
if robj == None:
|
||||
if robj is None:
|
||||
return
|
||||
except Exception as e:
|
||||
print('Could not read file {}'.format(inv_fname))
|
||||
@ -145,15 +155,64 @@ class Metadata(object):
|
||||
'data': robj}
|
||||
return True
|
||||
|
||||
|
||||
def get_coordinates(self, seed_id):
|
||||
metadata = self.get_metadata(seed_id)
|
||||
def get_coordinates(self, seed_id, time=None):
|
||||
"""
|
||||
Get coordinates of given seed id.
|
||||
:param seed_id: Seed id such as BW.WETR..HHZ (Network.Station.Location.Channel)
|
||||
:type seed_id: str
|
||||
:param time: Used when a station has data available at multiple time intervals
|
||||
:type time: UTCDateTime
|
||||
:return: dict containing position information of the station
|
||||
:rtype: dict
|
||||
"""
|
||||
# try most recent data if no time is specified
|
||||
if not time:
|
||||
time = UTCDateTime()
|
||||
metadata = self.get_metadata(seed_id, time)
|
||||
if not metadata:
|
||||
return
|
||||
return metadata['data'].get_coordinates(seed_id)
|
||||
return metadata['data'].get_coordinates(seed_id, time)
|
||||
|
||||
def get_all_coordinates(self):
|
||||
def stat_info_from_parser(parser):
|
||||
for station in parser.stations:
|
||||
station_name = station[0].station_call_letters
|
||||
network_name = station[0].network_code
|
||||
if not station_name in self.stations_dict.keys():
|
||||
st_id = network_name + '.' + station_name
|
||||
self.stations_dict[st_id] = {'latitude': station[0].latitude,
|
||||
'longitude': station[0].longitude}
|
||||
|
||||
def stat_info_from_inventory(inventory):
|
||||
for network in inventory.networks:
|
||||
for station in network.stations:
|
||||
station_name = station.code
|
||||
network_name = network_name.code
|
||||
if not station_name in self.stations_dict.keys():
|
||||
st_id = network_name + '.' + station_name
|
||||
self.stations_dict[st_id] = {'latitude': station[0].latitude,
|
||||
'longitude': station[0].longitude}
|
||||
|
||||
read_stat = {'xml': stat_info_from_inventory,
|
||||
'dless': stat_info_from_parser}
|
||||
|
||||
self.read_all()
|
||||
for item in self.inventory_files.values():
|
||||
inventory = item['data']
|
||||
invtype = item['invtype']
|
||||
read_stat[invtype](inventory)
|
||||
|
||||
return self.stations_dict
|
||||
|
||||
def get_paz(self, seed_id, time):
|
||||
"""
|
||||
|
||||
:param seed_id: Seed id such as BW.WETR..HHZ (Network.Station.Location.Channel)
|
||||
:type seed_id: str
|
||||
:param time: Used when a station has data available at multiple time intervals
|
||||
:type time: UTCDateTime
|
||||
:rtype: dict
|
||||
"""
|
||||
metadata = self.get_metadata(seed_id)
|
||||
if not metadata:
|
||||
return
|
||||
@ -163,18 +222,17 @@ class Metadata(object):
|
||||
resp = metadata['data'].get_response(seed_id, time)
|
||||
return resp.get_paz(seed_id)
|
||||
|
||||
|
||||
def _read_inventory_data(self, seed_id=None):
|
||||
for inventory in self.inventories:
|
||||
if self._read_metadata_iterator(path_to_inventory=inventory, station_seed_id=seed_id):
|
||||
return
|
||||
|
||||
|
||||
def _read_metadata_iterator(self, path_to_inventory, station_seed_id):
|
||||
'''
|
||||
search for metadata for a specific station iteratively
|
||||
'''
|
||||
"""
|
||||
Search for metadata for a specific station iteratively.
|
||||
"""
|
||||
station, network, location, channel = station_seed_id.split('.')
|
||||
# seach for station seed id in filenames in invetory
|
||||
fnames = glob.glob(os.path.join(path_to_inventory, '*' + station_seed_id + '*'))
|
||||
if not fnames:
|
||||
# search for station name in filename
|
||||
@ -203,13 +261,13 @@ class Metadata(object):
|
||||
continue
|
||||
print('Could not find metadata for station_seed_id {} in path {}'.format(station_seed_id, path_to_inventory))
|
||||
|
||||
|
||||
def _read_metadata_file(self, path_to_inventory_filename):
|
||||
'''
|
||||
"""
|
||||
function reading metadata files (either dataless seed, xml or resp)
|
||||
:param path_to_inventory_filename:
|
||||
:return: file type/ending, inventory object (Parser or Inventory)
|
||||
'''
|
||||
:rtype: (str, obspy.io.xseed.Parser or obspy.core.inventory.inventory.Inventory)
|
||||
"""
|
||||
# functions used to read metadata for different file endings (or file types)
|
||||
read_functions = {'dless': self._read_dless,
|
||||
'dseed': self._read_dless,
|
||||
@ -228,8 +286,8 @@ class Metadata(object):
|
||||
return file_type, robj
|
||||
return None, None
|
||||
|
||||
|
||||
def _read_dless(self, path_to_inventory):
|
||||
@staticmethod
|
||||
def _read_dless(path_to_inventory):
|
||||
exc = None
|
||||
try:
|
||||
parser = Parser(path_to_inventory)
|
||||
@ -237,8 +295,8 @@ class Metadata(object):
|
||||
parser = None
|
||||
return parser, exc
|
||||
|
||||
|
||||
def _read_inventory_file(self, path_to_inventory):
|
||||
@staticmethod
|
||||
def _read_inventory_file(path_to_inventory):
|
||||
exc = None
|
||||
try:
|
||||
inv = read_inventory(path_to_inventory)
|
||||
@ -247,7 +305,6 @@ class Metadata(object):
|
||||
return inv, exc
|
||||
|
||||
|
||||
|
||||
def time_from_header(header):
|
||||
"""
|
||||
Function takes in the second line from a .gse file and takes out the date and time from that line.
|
||||
@ -458,15 +515,22 @@ def read_metadata(path_to_inventory):
|
||||
# return metadata_objects
|
||||
|
||||
|
||||
|
||||
def restitute_trace(input_tuple):
|
||||
def no_metadata(tr, seed_id):
|
||||
print('no metadata file found '
|
||||
'for trace {0}'.format(seed_id))
|
||||
return tr, True
|
||||
|
||||
tr, metadata, unit, force = input_tuple
|
||||
|
||||
remove_trace = False
|
||||
|
||||
seed_id = tr.get_id()
|
||||
|
||||
mdata = metadata.get_metadata(seed_id)
|
||||
mdata = metadata.get_metadata(seed_id, time=tr.stats.starttime)
|
||||
if not mdata:
|
||||
return no_metadata(tr, seed_id)
|
||||
|
||||
invtype = mdata['invtype']
|
||||
inobj = mdata['data']
|
||||
|
||||
@ -481,8 +545,7 @@ def restitute_trace(input_tuple):
|
||||
if invtype == 'resp':
|
||||
fresp = find_in_list(inobj, seed_id)
|
||||
if not fresp:
|
||||
raise IOError('no response file found '
|
||||
'for trace {0}'.format(seed_id))
|
||||
return no_metadata(tr, seed_id)
|
||||
fname = fresp
|
||||
seedresp = dict(filename=fname,
|
||||
date=stime,
|
||||
@ -504,9 +567,8 @@ def restitute_trace(input_tuple):
|
||||
else:
|
||||
finv = invlist[0]
|
||||
inventory = read_inventory(finv, format='STATIONXML')
|
||||
elif invtype == None:
|
||||
print("No restitution possible, as there are no station-meta data available!")
|
||||
return tr, True
|
||||
elif invtype is None:
|
||||
return no_metadata(tr, seed_id)
|
||||
else:
|
||||
remove_trace = True
|
||||
# apply restitution to data
|
||||
@ -542,9 +604,6 @@ def restitute_data(data, metadata, unit='VEL', force=False, ncores=0):
|
||||
takes a data stream and a path_to_inventory and returns the corrected
|
||||
waveform data stream
|
||||
:param data: seismic data stream
|
||||
:param invtype: type of found metadata
|
||||
:param inobj: either list of metadata files or `obspy.io.xseed.Parser`
|
||||
object
|
||||
:param unit: unit to correct for (default: 'VEL')
|
||||
:param force: force restitution for already corrected traces (default:
|
||||
False)
|
||||
@ -553,7 +612,7 @@ def restitute_data(data, metadata, unit='VEL', force=False, ncores=0):
|
||||
|
||||
restflag = list()
|
||||
|
||||
data = remove_underscores(data)
|
||||
#data = remove_underscores(data)
|
||||
|
||||
# loop over traces
|
||||
input_tuples = []
|
||||
@ -562,7 +621,7 @@ def restitute_data(data, metadata, unit='VEL', force=False, ncores=0):
|
||||
data.remove(tr)
|
||||
|
||||
pool = gen_Pool(ncores)
|
||||
result = pool.map(restitute_trace, input_tuples)
|
||||
result = pool.imap_unordered(restitute_trace, input_tuples)
|
||||
pool.close()
|
||||
|
||||
for tr, remove_trace in result:
|
||||
@ -612,7 +671,7 @@ def get_prefilt(trace, tlow=(0.5, 0.9), thi=(5., 2.), verbosity=0):
|
||||
fny = trace.stats.sampling_rate / 2
|
||||
fc21 = fny - (fny * thi[0] / 100.)
|
||||
fc22 = fny - (fny * thi[1] / 100.)
|
||||
return (tlow[0], tlow[1], fc21, fc22)
|
||||
return tlow[0], tlow[1], fc21, fc22
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -16,7 +16,6 @@ from pylot.core.loc import hyposat
|
||||
from pylot.core.loc import nll
|
||||
from pylot.core.loc import velest
|
||||
|
||||
|
||||
# determine system dependent path separator
|
||||
system_name = platform.system()
|
||||
if system_name in ["Linux", "Darwin"]:
|
||||
@ -42,54 +41,3 @@ OUTPUTFORMATS = {'.xml': 'QUAKEML',
|
||||
LOCTOOLS = dict(nll=nll, hyposat=hyposat, velest=velest, hypo71=hypo71, hypodd=hypodd)
|
||||
|
||||
|
||||
class SetChannelComponents(object):
|
||||
def __init__(self):
|
||||
self.setDefaultCompPosition()
|
||||
|
||||
def setDefaultCompPosition(self):
|
||||
# default component order
|
||||
self.compPosition_Map = dict(Z=2, N=1, E=0)
|
||||
self.compName_Map = {'3': 'Z',
|
||||
'1': 'N',
|
||||
'2': 'E'}
|
||||
|
||||
def _getCurrentPosition(self, component):
|
||||
for key, value in self.compName_Map.items():
|
||||
if value == component:
|
||||
return key, value
|
||||
errMsg = 'getCurrentPosition: Could not find former position of component {}.'.format(component)
|
||||
raise ValueError(errMsg)
|
||||
|
||||
def _switch(self, component, component_alter):
|
||||
# Without switching, multiple definitions of the same alter_comp are possible
|
||||
old_alter_comp, _ = self._getCurrentPosition(component)
|
||||
old_comp = self.compName_Map[component_alter]
|
||||
if not old_alter_comp == component_alter and not old_comp == component:
|
||||
self.compName_Map[old_alter_comp] = old_comp
|
||||
print('switch: Automatically switched component {} to {}'.format(old_alter_comp, old_comp))
|
||||
|
||||
def setCompPosition(self, component_alter, component, switch=True):
|
||||
component_alter = str(component_alter)
|
||||
if not component_alter in self.compName_Map.keys():
|
||||
errMsg = 'setCompPosition: Unrecognized alternative component {}. Expecting one of {}.'
|
||||
raise ValueError(errMsg.format(component_alter, self.compName_Map.keys()))
|
||||
if not component in self.compPosition_Map.keys():
|
||||
errMsg = 'setCompPosition: Unrecognized target component {}. Expecting one of {}.'
|
||||
raise ValueError(errMsg.format(component, self.compPosition_Map.keys()))
|
||||
print('setCompPosition: set component {} to {}'.format(component_alter, component))
|
||||
if switch:
|
||||
self._switch(component, component_alter)
|
||||
self.compName_Map[component_alter] = component
|
||||
|
||||
def getCompPosition(self, component):
|
||||
return self._getCurrentPosition(component)[0]
|
||||
|
||||
def getPlotPosition(self, component):
|
||||
component = str(component)
|
||||
if component in self.compPosition_Map.keys():
|
||||
return self.compPosition_Map[component]
|
||||
elif component in self.compName_Map.keys():
|
||||
return self.compPosition_Map[self.compName_Map[component]]
|
||||
else:
|
||||
errMsg = 'getCompPosition: Unrecognized component {}. Expecting one of {} or {}.'
|
||||
raise ValueError(errMsg.format(component, self.compPosition_Map.keys(), self.compName_Map.keys()))
|
||||
|
@ -35,6 +35,7 @@ class Event(ObsPyEvent):
|
||||
self._refEvent = False
|
||||
self.get_notes()
|
||||
self.get_obspy_event_info()
|
||||
self.dirty = False
|
||||
|
||||
def get_notes_path(self):
|
||||
"""
|
||||
@ -143,6 +144,7 @@ class Event(ObsPyEvent):
|
||||
for index, pick in reversed(list(enumerate(self.picks))):
|
||||
if picktype in str(pick.method_id):
|
||||
self.picks.pop(index)
|
||||
self.dirty = True
|
||||
|
||||
def addPicks(self, picks):
|
||||
"""
|
||||
@ -157,12 +159,12 @@ class Event(ObsPyEvent):
|
||||
# add ObsPy picks (clear old manual and copy all new manual from pylot)
|
||||
self.clearObsPyPicks('manual')
|
||||
self.picks += picks_from_picksdict(self.pylot_picks)
|
||||
self.dirty = True
|
||||
|
||||
def addAutopicks(self, autopicks):
|
||||
"""
|
||||
Add automatic picks to event
|
||||
:param autopicks: automatic picks to add to event
|
||||
:type autopicks dict:
|
||||
:return:
|
||||
:rtype: None
|
||||
"""
|
||||
@ -171,6 +173,7 @@ class Event(ObsPyEvent):
|
||||
# add ObsPy picks (clear old auto and copy all new auto from pylot)
|
||||
self.clearObsPyPicks('auto')
|
||||
self.picks += picks_from_picksdict(self.pylot_autopicks)
|
||||
self.dirty = True
|
||||
|
||||
def setPick(self, station, pick):
|
||||
"""
|
||||
@ -186,11 +189,13 @@ class Event(ObsPyEvent):
|
||||
self.pylot_picks[station] = pick
|
||||
else:
|
||||
try:
|
||||
self.pylot_picks.pop(station)
|
||||
if station in self.pylot_picks:
|
||||
self.pylot_picks.pop(station)
|
||||
except Exception as e:
|
||||
print('Could not remove pick {} from station {}: {}'.format(pick, station, e))
|
||||
self.clearObsPyPicks('manual')
|
||||
self.picks += picks_from_picksdict(self.pylot_picks)
|
||||
self.dirty = True
|
||||
|
||||
def setPicks(self, picks):
|
||||
"""
|
||||
@ -203,6 +208,7 @@ class Event(ObsPyEvent):
|
||||
self.pylot_picks = picks
|
||||
self.clearObsPyPicks('manual')
|
||||
self.picks += picks_from_picksdict(self.pylot_picks)
|
||||
self.dirty = True
|
||||
|
||||
def getPick(self, station):
|
||||
"""
|
||||
@ -237,11 +243,13 @@ class Event(ObsPyEvent):
|
||||
self.pylot_autopicks[station] = pick
|
||||
else:
|
||||
try:
|
||||
self.pylot_autopicks.pop(station)
|
||||
if station in self.pylot_autopicks:
|
||||
self.pylot_autopicks.pop(station)
|
||||
except Exception as e:
|
||||
print('Could not remove pick {} from station {}: {}'.format(pick, station, e))
|
||||
self.clearObsPyPicks('auto')
|
||||
self.picks += picks_from_picksdict(self.pylot_autopicks)
|
||||
self.dirty = True
|
||||
|
||||
def setAutopicks(self, picks):
|
||||
"""
|
||||
@ -254,6 +262,7 @@ class Event(ObsPyEvent):
|
||||
self.pylot_autopicks = picks
|
||||
self.clearObsPyPicks('auto')
|
||||
self.picks += picks_from_picksdict(self.pylot_autopicks)
|
||||
self.dirty = True
|
||||
|
||||
def getAutopick(self, station):
|
||||
"""
|
||||
@ -292,6 +301,7 @@ class Event(ObsPyEvent):
|
||||
try:
|
||||
outfile = open(filename, 'wb')
|
||||
cPickle.dump(self, outfile, -1)
|
||||
self.dirty = False
|
||||
except Exception as e:
|
||||
print('Could not pickle PyLoT event. Reason: {}'.format(e))
|
||||
|
||||
@ -310,5 +320,6 @@ class Event(ObsPyEvent):
|
||||
import _pickle as cPickle
|
||||
infile = open(filename, 'rb')
|
||||
event = cPickle.load(infile)
|
||||
event.dirty = False
|
||||
print('Loaded %s' % filename)
|
||||
return event
|
||||
|
@ -4,6 +4,7 @@
|
||||
import os
|
||||
from obspy import UTCDateTime
|
||||
|
||||
|
||||
def check_obspydmt_structure(path):
|
||||
'''
|
||||
Check path for obspyDMT event structure.
|
||||
@ -16,6 +17,7 @@ def check_obspydmt_structure(path):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def check_obspydmt_eventfolder(folder):
|
||||
try:
|
||||
time = folder.split('.')[0]
|
||||
@ -25,6 +27,7 @@ def check_obspydmt_eventfolder(folder):
|
||||
except Exception as e:
|
||||
return False, e
|
||||
|
||||
|
||||
def qml_from_obspyDMT(path):
|
||||
import pickle
|
||||
from obspy.core.event import Event, Magnitude, Origin
|
||||
@ -41,4 +44,3 @@ def qml_from_obspyDMT(path):
|
||||
ev.magnitudes.append(mag)
|
||||
ev.origins.append(origin)
|
||||
return ev
|
||||
|
||||
|
@ -33,31 +33,21 @@ class Thread(QThread):
|
||||
self._executed = False
|
||||
self._executedError = e
|
||||
traceback.print_exc()
|
||||
exctype, value = sys.exc_info ()[:2]
|
||||
self._executedErrorInfo = '{} {} {}'.\
|
||||
exctype, value = sys.exc_info()[:2]
|
||||
self._executedErrorInfo = '{} {} {}'. \
|
||||
format(exctype, value, traceback.format_exc())
|
||||
sys.stdout = sys.__stdout__
|
||||
|
||||
def showProgressbar(self):
|
||||
if self.progressText:
|
||||
|
||||
# generate widget if not given in init
|
||||
if not self.pb_widget:
|
||||
self.pb_widget = QDialog(self.parent())
|
||||
self.pb_widget.setWindowFlags(Qt.SplashScreen)
|
||||
self.pb_widget.setModal(True)
|
||||
# # generate widget if not given in init
|
||||
# if not self.pb_widget:
|
||||
# self.pb_widget = ProgressBarWidget(self.parent())
|
||||
# self.pb_widget.setWindowFlags(Qt.SplashScreen)
|
||||
# self.pb_widget.setModal(True)
|
||||
|
||||
# add button
|
||||
delete_button = QPushButton('X')
|
||||
delete_button.clicked.connect(self.exit)
|
||||
hl = QHBoxLayout()
|
||||
pb = QProgressBar()
|
||||
pb.setRange(0, 0)
|
||||
hl.addWidget(pb)
|
||||
hl.addWidget(QLabel(self.progressText))
|
||||
if self.abortButton:
|
||||
hl.addWidget(delete_button)
|
||||
self.pb_widget.setLayout(hl)
|
||||
self.pb_widget.label.setText(self.progressText)
|
||||
self.pb_widget.show()
|
||||
|
||||
def hideProgressbar(self):
|
||||
@ -75,6 +65,7 @@ class Worker(QRunnable):
|
||||
'''
|
||||
Worker class to be run by MultiThread(QThread).
|
||||
'''
|
||||
|
||||
def __init__(self, fun, args,
|
||||
progressText=None,
|
||||
pb_widget=None,
|
||||
@ -82,7 +73,7 @@ class Worker(QRunnable):
|
||||
super(Worker, self).__init__()
|
||||
self.fun = fun
|
||||
self.args = args
|
||||
#self.kwargs = kwargs
|
||||
# self.kwargs = kwargs
|
||||
self.signals = WorkerSignals()
|
||||
self.progressText = progressText
|
||||
self.pb_widget = pb_widget
|
||||
@ -96,9 +87,9 @@ class Worker(QRunnable):
|
||||
try:
|
||||
result = self.fun(self.args)
|
||||
except:
|
||||
exctype, value = sys.exc_info ()[:2]
|
||||
exctype, value = sys.exc_info()[:2]
|
||||
print(exctype, value, traceback.format_exc())
|
||||
self.signals.error.emit ((exctype, value, traceback.format_exc ()))
|
||||
self.signals.error.emit((exctype, value, traceback.format_exc()))
|
||||
else:
|
||||
self.signals.result.emit(result)
|
||||
finally:
|
||||
@ -140,13 +131,13 @@ class MultiThread(QThread):
|
||||
|
||||
def run(self):
|
||||
if self.redirect_stdout:
|
||||
sys.stdout = self
|
||||
sys.stdout = self
|
||||
try:
|
||||
if not self.ncores:
|
||||
self.ncores = multiprocessing.cpu_count()
|
||||
pool = multiprocessing.Pool(self.ncores)
|
||||
self.data = pool.map_async(self.func, self.args, callback=self.emitDone)
|
||||
#self.data = pool.apply_async(self.func, self.shotlist, callback=self.emitDone) #emit each time returned
|
||||
# self.data = pool.apply_async(self.func, self.shotlist, callback=self.emitDone) #emit each time returned
|
||||
pool.close()
|
||||
self._executed = True
|
||||
except Exception as e:
|
||||
|
@ -22,11 +22,7 @@ from pylot.styles import style_settings
|
||||
from scipy.interpolate import splrep, splev
|
||||
from PySide import QtCore, QtGui
|
||||
|
||||
try:
|
||||
import pyqtgraph as pg
|
||||
except Exception as e:
|
||||
print('PyLoT: Could not import pyqtgraph. {}'.format(e))
|
||||
pg = None
|
||||
import pyqtgraph as pg
|
||||
|
||||
def _pickle_method(m):
|
||||
if m.im_self is None:
|
||||
@ -34,6 +30,7 @@ def _pickle_method(m):
|
||||
else:
|
||||
return getattr, (m.im_self, m.im_func.func_name)
|
||||
|
||||
|
||||
def getAutoFilteroptions(phase, parameter):
|
||||
filtername = {'P': 'bpz2',
|
||||
'S': 'bph2'}
|
||||
@ -41,9 +38,10 @@ def getAutoFilteroptions(phase, parameter):
|
||||
print('autoPickParameter: No filter options for phase {}.'.format(phase))
|
||||
return
|
||||
freqmin, freqmax = parameter.get(filtername[phase])
|
||||
filteroptions = FilterOptions(type='bandpass', freq=[freqmin, freqmax], order=4) # order=4 default from obspy
|
||||
filteroptions = FilterOptions(type='bandpass', freq=[freqmin, freqmax], order=4) # order=4 default from obspy
|
||||
return filteroptions
|
||||
|
||||
|
||||
def readDefaultFilterInformation(fname):
|
||||
"""
|
||||
Read default filter information from pylot.in file
|
||||
@ -118,8 +116,12 @@ def gen_Pool(ncores=0):
|
||||
"""
|
||||
import multiprocessing
|
||||
|
||||
if ncores == 0:
|
||||
ncores = multiprocessing.cpu_count()
|
||||
ncores_max = multiprocessing.cpu_count()
|
||||
|
||||
if ncores == 0 or ncores > ncores_max:
|
||||
ncores = ncores_max
|
||||
if ncores > ncores_max:
|
||||
print('Reduced number of requested CPU slots to available number: {}'.format(ncores))
|
||||
|
||||
print('gen_Pool: Generated multiprocessing Pool with {} cores\n'.format(ncores))
|
||||
|
||||
@ -397,6 +399,10 @@ def full_range(stream):
|
||||
:return: minimum start time and maximum end time
|
||||
:rtype: (`~maximum start time and minimum end time`, maximum start time and minimum end time)
|
||||
"""
|
||||
if not stream:
|
||||
print('full_range: Empty Stream!')
|
||||
return None, None
|
||||
|
||||
min_start = min([trace.stats.starttime for trace in stream])
|
||||
max_end = max([trace.stats.endtime for trace in stream])
|
||||
|
||||
@ -537,7 +543,7 @@ def isSorted(iterable):
|
||||
False
|
||||
"""
|
||||
assert isIterable(iterable), 'object is not iterable; object: {' \
|
||||
'0}'.format(iterable)
|
||||
'}'.format(iterable)
|
||||
if type(iterable) is str:
|
||||
iterable = [s for s in iterable]
|
||||
return sorted(iterable) == iterable
|
||||
@ -675,7 +681,7 @@ def pick_color(picktype, phase, quality=0):
|
||||
bpc = base_phase_colors(picktype, phase) # returns dict like {'modifier': 'g', 'rgba': (0, 0, 255, 255)}
|
||||
rgba = bpc['rgba']
|
||||
modifier = bpc['modifier']
|
||||
intensity = 255.*quality/min_quality
|
||||
intensity = 255. * quality / min_quality
|
||||
rgba = modify_rgba(rgba, modifier, intensity)
|
||||
return rgba
|
||||
|
||||
@ -787,6 +793,7 @@ def base_phase_colors(picktype, phase):
|
||||
phasecolors = style_settings.phasecolors
|
||||
return phasecolors[picktype][phase]
|
||||
|
||||
|
||||
def transform_colors_mpl_str(colors, no_alpha=False):
|
||||
"""
|
||||
Transforms rgba color values to a matplotlib string of color values with a range of [0, 1]
|
||||
@ -805,6 +812,7 @@ def transform_colors_mpl_str(colors, no_alpha=False):
|
||||
colors_mpl = '({}, {}, {}, {})'.format(*colors_mpl)
|
||||
return colors_mpl
|
||||
|
||||
|
||||
def transform_colors_mpl(colors):
|
||||
"""
|
||||
Transform rgba colors from [0, 255] to [0, 1]
|
||||
@ -817,6 +825,7 @@ def transform_colors_mpl(colors):
|
||||
colors_mpl = tuple([color / 255. for color in colors])
|
||||
return colors_mpl
|
||||
|
||||
|
||||
def remove_underscores(data):
|
||||
"""
|
||||
takes a `obspy.core.stream.Stream` object and removes all underscores
|
||||
@ -826,9 +835,9 @@ def remove_underscores(data):
|
||||
:return: data stream
|
||||
:rtype: `~obspy.core.stream.Stream`
|
||||
"""
|
||||
for tr in data:
|
||||
# remove underscores
|
||||
tr.stats.station = tr.stats.station.strip('_')
|
||||
#for tr in data:
|
||||
# # remove underscores
|
||||
# tr.stats.station = tr.stats.station.strip('_')
|
||||
return data
|
||||
|
||||
|
||||
@ -926,7 +935,10 @@ def get_stations(data):
|
||||
|
||||
def check4rotated(data, metadata=None, verbosity=1):
|
||||
"""
|
||||
|
||||
Check all traces in data. If a trace is not in ZNE rotation (last symbol of channel code is numeric) and the trace
|
||||
is in the metadata with azimuth and dip, rotate it to classical ZNE orientation.
|
||||
Rotating the traces requires them to be of the same length, so, all traces will be trimmed to a common length as a
|
||||
side effect.
|
||||
:param data: stream object containing seismic traces
|
||||
:type data: `~obspy.core.stream.Stream`
|
||||
:param metadata: tuple containing metadata type string and metadata parser object
|
||||
@ -943,100 +955,59 @@ def check4rotated(data, metadata=None, verbosity=1):
|
||||
|
||||
Azimut and dip are fetched from metadata. To be rotated, traces of a station have to be cut to the same length.
|
||||
Returns unrotated traces of no metadata is provided
|
||||
:param wfstream: stream containing seismic traces
|
||||
:param wfstream: stream containing seismic traces of a station
|
||||
:type wfstream: `~obspy.core.stream.Stream`
|
||||
:param metadata: tuple containing metadata type string and metadata parser object
|
||||
:type metadata: (str, `~obspy.io.xseed.parser.Parser`)
|
||||
:return: stream object with traditionally oriented traces (ZNE)
|
||||
:rtype: `~obspy.core.stream.Stream`
|
||||
"""
|
||||
try:
|
||||
# indexing fails if metadata is None
|
||||
metadata[0]
|
||||
except TypeError:
|
||||
if verbosity:
|
||||
msg = 'Warning: could not rotate traces since no metadata was given\nset Inventory file!'
|
||||
print(msg)
|
||||
return wfstream
|
||||
if metadata[0] is None:
|
||||
# sometimes metadata is (None, (None,))
|
||||
if verbosity:
|
||||
msg = 'Warning: could not rotate traces since no metadata was given\nCheck inventory directory!'
|
||||
print(msg)
|
||||
return wfstream
|
||||
else:
|
||||
parser = metadata[1]
|
||||
|
||||
def get_dip_azimut(parser, trace_id):
|
||||
"""
|
||||
Gets azimuth and dip by trace id out of the metadata parser
|
||||
:param parser: metadata parser object
|
||||
:type parser: `~obspy.io.xseed.parser.Parser`
|
||||
:param trace_id: eg. 'BW.RJOB..EHZ',
|
||||
:type trace_id: str
|
||||
:return: tuple containing dip and azimuth of the trace corresponding to trace_id
|
||||
:rtype: (float, float)
|
||||
"""
|
||||
dip = None
|
||||
azimut = None
|
||||
try:
|
||||
blockettes = parser._select(trace_id)
|
||||
except SEEDParserException as e:
|
||||
print(e)
|
||||
raise ValueError
|
||||
for blockette_ in blockettes:
|
||||
if blockette_.id != 52:
|
||||
continue
|
||||
dip = blockette_.dip
|
||||
azimut = blockette_.azimuth
|
||||
break
|
||||
if (dip is None or azimut is None) or (dip == 0 and azimut == 0):
|
||||
error_msg = 'Dip and azimuth not available for trace_id {}'.format(trace_id)
|
||||
raise ValueError(error_msg)
|
||||
return dip, azimut
|
||||
|
||||
# check if any traces in this station need to be rotated
|
||||
trace_ids = [trace.id for trace in wfstream]
|
||||
for trace_id in trace_ids:
|
||||
orientation = trace_id[-1] # last letter if trace id is orientation code, ZNE or 123
|
||||
if orientation.isnumeric():
|
||||
# misaligned channels have a number as orientation
|
||||
azimuts = []
|
||||
dips = []
|
||||
for trace_id in trace_ids:
|
||||
try:
|
||||
dip, azimut = get_dip_azimut(parser, trace_id)
|
||||
except ValueError as e:
|
||||
print(e)
|
||||
print('Failed to rotate station {}, no azimuth or dip available in metadata'.format(trace_id))
|
||||
return wfstream
|
||||
azimuts.append(azimut)
|
||||
dips.append(dip)
|
||||
# to rotate all traces must have same length
|
||||
wfstream = trim_station_components(wfstream, trim_start=True, trim_end=True)
|
||||
z, n, e = rotate2zne(wfstream[0], azimuts[0], dips[0],
|
||||
wfstream[1], azimuts[1], dips[1],
|
||||
wfstream[2], azimuts[2], dips[2])
|
||||
print('check4rotated: rotated station {} to ZNE'.format(trace_id))
|
||||
z_index = dips.index(min(dips)) # get z-trace index (dip is measured from 0 to -90)
|
||||
wfstream[z_index].data = z
|
||||
wfstream[z_index].stats.channel = wfstream[z_index].stats.channel[0:-1] + 'Z'
|
||||
del trace_ids[z_index]
|
||||
for trace_id in trace_ids:
|
||||
dip, az = get_dip_azimut(parser, trace_id)
|
||||
trace = wfstream.select(id=trace_id)[0]
|
||||
if az > 315 or az <= 45 or az > 135 and az <= 225:
|
||||
trace.data = n
|
||||
trace.stats.channel = trace.stats.channel[0:-1] + 'N'
|
||||
elif az > 45 and az <= 135 or az > 225 and az <= 315:
|
||||
trace.data = e
|
||||
trace.stats.channel = trace.stats.channel[0:-1] + 'E'
|
||||
break
|
||||
else:
|
||||
continue
|
||||
orientations = [trace_id[-1] for trace_id in trace_ids]
|
||||
rotation_required = [orientation.isnumeric() for orientation in orientations]
|
||||
if any(rotation_required):
|
||||
t_start = full_range(wfstream)
|
||||
try:
|
||||
azimuts = [metadata.get_coordinates(tr_id, t_start)['azimuth'] for tr_id in trace_ids]
|
||||
dips = [metadata.get_coordinates(tr_id, t_start)['dip'] for tr_id in trace_ids]
|
||||
except (KeyError, TypeError) as e:
|
||||
print('Failed to rotate trace {}, no azimuth or dip available in metadata'.format(trace_id))
|
||||
return wfstream
|
||||
if len(wfstream) < 3:
|
||||
print('Failed to rotate Stream {}, not enough components available.'.format(wfstream))
|
||||
return wfstream
|
||||
# to rotate all traces must have same length, so trim them
|
||||
wfstream = trim_station_components(wfstream, trim_start=True, trim_end=True)
|
||||
z, n, e = rotate2zne(wfstream[0], azimuts[0], dips[0],
|
||||
wfstream[1], azimuts[1], dips[1],
|
||||
wfstream[2], azimuts[2], dips[2])
|
||||
print('check4rotated: rotated trace {} to ZNE'.format(trace_id))
|
||||
# replace old data with rotated data, change the channel code to ZNE
|
||||
z_index = dips.index(min(
|
||||
dips)) # get z-trace index, z has minimum dip of -90 (dip is measured from 0 to -90, with -90 being vertical)
|
||||
wfstream[z_index].data = z
|
||||
wfstream[z_index].stats.channel = wfstream[z_index].stats.channel[0:-1] + 'Z'
|
||||
del trace_ids[z_index]
|
||||
for trace_id in trace_ids:
|
||||
coordinates = metadata.get_coordinates(trace_id, t_start)
|
||||
dip, az = coordinates['dip'], coordinates['azimuth']
|
||||
trace = wfstream.select(id=trace_id)[0]
|
||||
if az > 315 or az <= 45 or az > 135 and az <= 225:
|
||||
trace.data = n
|
||||
trace.stats.channel = trace.stats.channel[0:-1] + 'N'
|
||||
elif az > 45 and az <= 135 or az > 225 and az <= 315:
|
||||
trace.data = e
|
||||
trace.stats.channel = trace.stats.channel[0:-1] + 'E'
|
||||
return wfstream
|
||||
|
||||
if metadata is None:
|
||||
if verbosity:
|
||||
msg = 'Warning: could not rotate traces since no metadata was given\nset Inventory file!'
|
||||
print(msg)
|
||||
return data
|
||||
stations = get_stations(data)
|
||||
|
||||
for station in stations: # loop through all stations and rotate data if neccessary
|
||||
wf_station = data.select(station=station)
|
||||
rotate_components(wf_station, metadata)
|
||||
@ -1149,7 +1120,7 @@ def loopIdentifyPhase(phase):
|
||||
"""
|
||||
from pylot.core.util.defaults import ALTSUFFIX
|
||||
|
||||
if phase == None:
|
||||
if phase is None:
|
||||
raise NameError('Can not identify phase that is None')
|
||||
|
||||
phase_copy = phase
|
||||
@ -1199,20 +1170,6 @@ def identifyPhaseID(phase):
|
||||
return identifyPhase(loopIdentifyPhase(phase))
|
||||
|
||||
|
||||
def has_spe(pick):
|
||||
"""
|
||||
Check for 'spe' key (symmetric picking error) in dict and return its value if found, else return None
|
||||
:param pick: pick dictionary
|
||||
:type pick: dict
|
||||
:return: value of 'spe' key
|
||||
:rtype: float or None
|
||||
"""
|
||||
if not 'spe' in pick.keys():
|
||||
return None
|
||||
else:
|
||||
return pick['spe']
|
||||
|
||||
|
||||
def check_all_obspy(eventlist):
|
||||
ev_type = 'obspydmt'
|
||||
return check_event_folders(eventlist, ev_type)
|
||||
@ -1245,8 +1202,8 @@ def check_event_folder(path):
|
||||
folder = path.split('/')[-1]
|
||||
# for pylot: select only folders that start with 'e', containin two dots and have length 12
|
||||
if (folder.startswith('e')
|
||||
and len(folder.split('.')) == 3
|
||||
and len(folder) == 12):
|
||||
and len(folder.split('.')) == 3
|
||||
and len(folder) == 12):
|
||||
ev_type = 'pylot'
|
||||
elif check_obspydmt_eventfolder(folder)[0]:
|
||||
ev_type = 'obspydmt'
|
||||
@ -1276,3 +1233,56 @@ if __name__ == "__main__":
|
||||
import doctest
|
||||
|
||||
doctest.testmod()
|
||||
|
||||
|
||||
class SetChannelComponents(object):
|
||||
def __init__(self):
|
||||
self.setDefaultCompPosition()
|
||||
|
||||
def setDefaultCompPosition(self):
|
||||
# default component order
|
||||
self.compPosition_Map = dict(Z=2, N=1, E=0)
|
||||
self.compName_Map = {'3': 'Z',
|
||||
'1': 'N',
|
||||
'2': 'E'}
|
||||
|
||||
def _getCurrentPosition(self, component):
|
||||
for key, value in self.compName_Map.items():
|
||||
if value == component:
|
||||
return key, value
|
||||
errMsg = 'getCurrentPosition: Could not find former position of component {}.'.format(component)
|
||||
raise ValueError(errMsg)
|
||||
|
||||
def _switch(self, component, component_alter):
|
||||
# Without switching, multiple definitions of the same alter_comp are possible
|
||||
old_alter_comp, _ = self._getCurrentPosition(component)
|
||||
old_comp = self.compName_Map[component_alter]
|
||||
if not old_alter_comp == component_alter and not old_comp == component:
|
||||
self.compName_Map[old_alter_comp] = old_comp
|
||||
print('switch: Automatically switched component {} to {}'.format(old_alter_comp, old_comp))
|
||||
|
||||
def setCompPosition(self, component_alter, component, switch=True):
|
||||
component_alter = str(component_alter)
|
||||
if not component_alter in self.compName_Map.keys():
|
||||
errMsg = 'setCompPosition: Unrecognized alternative component {}. Expecting one of {}.'
|
||||
raise ValueError(errMsg.format(component_alter, self.compName_Map.keys()))
|
||||
if not component in self.compPosition_Map.keys():
|
||||
errMsg = 'setCompPosition: Unrecognized target component {}. Expecting one of {}.'
|
||||
raise ValueError(errMsg.format(component, self.compPosition_Map.keys()))
|
||||
print('setCompPosition: set component {} to {}'.format(component_alter, component))
|
||||
if switch:
|
||||
self._switch(component, component_alter)
|
||||
self.compName_Map[component_alter] = component
|
||||
|
||||
def getCompPosition(self, component):
|
||||
return self._getCurrentPosition(component)[0]
|
||||
|
||||
def getPlotPosition(self, component):
|
||||
component = str(component)
|
||||
if component in self.compPosition_Map.keys():
|
||||
return self.compPosition_Map[component]
|
||||
elif component in self.compName_Map.keys():
|
||||
return self.compPosition_Map[self.compName_Map[component]]
|
||||
else:
|
||||
errMsg = 'getCompPosition: Unrecognized component {}. Expecting one of {} or {}.'
|
||||
raise ValueError(errMsg.format(component, self.compPosition_Map.keys(), self.compName_Map.keys()))
|
File diff suppressed because it is too large
Load Diff
@ -5,18 +5,18 @@
|
||||
# the base color
|
||||
phasecolors = {
|
||||
'manual': {
|
||||
'P':{
|
||||
'P': {
|
||||
'rgba': (0, 0, 255, 255),
|
||||
'modifier': 'g'},
|
||||
'S':{
|
||||
'S': {
|
||||
'rgba': (255, 0, 0, 255),
|
||||
'modifier': 'b'}
|
||||
},
|
||||
'auto':{
|
||||
'P':{
|
||||
'auto': {
|
||||
'P': {
|
||||
'rgba': (140, 0, 255, 255),
|
||||
'modifier': 'g'},
|
||||
'S':{
|
||||
'S': {
|
||||
'rgba': (255, 140, 0, 255),
|
||||
'modifier': 'b'}
|
||||
}
|
||||
@ -24,8 +24,8 @@ phasecolors = {
|
||||
|
||||
# Set plot colors and stylesheet for each style
|
||||
stylecolors = {
|
||||
'default':{
|
||||
'linecolor':{
|
||||
'default': {
|
||||
'linecolor': {
|
||||
'rgba': (0, 0, 0, 255)},
|
||||
'background': {
|
||||
'rgba': (255, 255, 255, 255)},
|
||||
@ -67,4 +67,3 @@ stylecolors = {
|
||||
'filename': 'bright.qss'}
|
||||
}
|
||||
}
|
||||
|
||||
|
2
setup.py
2
setup.py
@ -8,7 +8,7 @@ setup(
|
||||
packages=['pylot', 'pylot.core', 'pylot.core.loc', 'pylot.core.pick',
|
||||
'pylot.core.io', 'pylot.core.util', 'pylot.core.active',
|
||||
'pylot.core.analysis', 'pylot.testing'],
|
||||
requires=['obspy', 'PySide', 'matplotlib', 'numpy'],
|
||||
requires=['obspy', 'PySide', 'matplotlib', 'numpy', 'scipy', 'pyqtgraph'],
|
||||
url='dummy',
|
||||
license='LGPLv3',
|
||||
author='Sebastian Wehling-Benatelli',
|
||||
|
27
tests/__init__.py
Normal file
27
tests/__init__.py
Normal file
@ -0,0 +1,27 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# --------------------------------------------------------
|
||||
# Purpose: Convience imports for PyLoT
|
||||
#
|
||||
'''
|
||||
================================================
|
||||
PyLoT - the Python picking and Localization Tool
|
||||
================================================
|
||||
|
||||
This python library contains a graphical user interfaces for picking
|
||||
seismic phases. This software needs ObsPy (http://github.com/obspy/obspy/wiki)
|
||||
and the Qt4 libraries to be installed first.
|
||||
|
||||
PILOT has been developed in Mathworks' MatLab. In order to distribute
|
||||
PILOT without facing portability problems, it has been decided to re-
|
||||
develop the software package in Python. The great work of the ObsPy
|
||||
group allows easy handling of a bunch of seismic data and PyLoT will
|
||||
benefit a lot compared to the former MatLab version.
|
||||
|
||||
The development of PyLoT is part of the joint research project MAGS2.
|
||||
|
||||
:copyright:
|
||||
The PyLoT Development Team
|
||||
:license:
|
||||
GNU Lesser General Public License, Version 3
|
||||
(http://www.gnu.org/copyleft/lesser.html)
|
||||
'''
|
@ -1,32 +1,59 @@
|
||||
import unittest
|
||||
import os
|
||||
|
||||
from obspy import UTCDateTime
|
||||
from obspy.io.xseed.utils import SEEDParserException
|
||||
from obspy.io.xseed import Parser
|
||||
from pylot.core.util.dataprocessing import Metadata
|
||||
from tests.utils import HidePrints
|
||||
|
||||
|
||||
class TestMetadata(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.station_id = 'BW.WETR..HH'
|
||||
metadata_folder = 'metadata1'
|
||||
self.time = UTCDateTime('2012-08-01')
|
||||
metadata_folder = os.path.join('test_data', 'dless_multiple_files', 'metadata1')
|
||||
self.m = Metadata(metadata_folder)
|
||||
|
||||
def test_get_coordinates_sucess(self):
|
||||
expected = {'Z': {u'elevation': 607.0, u'longitude': 12.87571, u'local_depth': 0.0, u'azimuth': 0.0, u'latitude': 49.14502, u'dip': -90.0},
|
||||
'E': {u'azimuth': 90.0, u'dip': 0.0, u'elevation': 607.0, u'latitude': 49.14502, u'local_depth': 0.0, u'longitude': 12.87571},
|
||||
'N': {u'azimuth': 0.0, u'dip': 0.0, u'elevation': 607.0, u'latitude': 49.14502, u'local_depth': 0.0, u'longitude': 12.87571}
|
||||
expected = {'Z': {u'elevation': 607.0, u'longitude': 12.87571, u'local_depth': 0.0, u'azimuth': 0.0,
|
||||
u'latitude': 49.14502, u'dip': -90.0},
|
||||
'E': {u'azimuth': 90.0, u'dip': 0.0, u'elevation': 607.0, u'latitude': 49.14502,
|
||||
u'local_depth': 0.0, u'longitude': 12.87571},
|
||||
'N': {u'azimuth': 0.0, u'dip': 0.0, u'elevation': 607.0, u'latitude': 49.14502, u'local_depth': 0.0,
|
||||
u'longitude': 12.87571}
|
||||
}
|
||||
result = {}
|
||||
for channel in ('Z', 'N', 'E'):
|
||||
coords = self.m.get_coordinates(self.station_id+channel)
|
||||
with HidePrints():
|
||||
coords = self.m.get_coordinates(self.station_id+channel, time=self.time)
|
||||
result[channel] = coords
|
||||
self.assertDictEqual(result[channel], expected[channel])
|
||||
|
||||
def test_get_coordinates_sucess_no_time(self):
|
||||
expected = {'Z': {u'elevation': 607.0, u'longitude': 12.87571, u'local_depth': 0.0, u'azimuth': 0.0,
|
||||
u'latitude': 49.14502, u'dip': -90.0},
|
||||
'E': {u'azimuth': 90.0, u'dip': 0.0, u'elevation': 607.0, u'latitude': 49.14502,
|
||||
u'local_depth': 0.0, u'longitude': 12.87571},
|
||||
'N': {u'azimuth': 0.0, u'dip': 0.0, u'elevation': 607.0, u'latitude': 49.14502, u'local_depth': 0.0,
|
||||
u'longitude': 12.87571}
|
||||
}
|
||||
result = {}
|
||||
for channel in ('Z', 'N', 'E'):
|
||||
with HidePrints():
|
||||
coords = self.m.get_coordinates(self.station_id+channel)
|
||||
result[channel] = coords
|
||||
self.assertDictEqual(result[channel], expected[channel])
|
||||
|
||||
|
||||
class TestMetadataAdding(unittest.TestCase):
|
||||
"""Tests if adding files and directories to a metadata object works."""
|
||||
|
||||
def setUp(self):
|
||||
self.station_id = 'BW.WETR..HH'
|
||||
self.metadata_folders = ('metadata1', 'metadata2')
|
||||
self.metadata_folders = (os.path.join('test_data', 'dless_multiple_files', 'metadata1'),
|
||||
os.path.join('test_data', 'dless_multiple_files', 'metadata2'))
|
||||
self.m = Metadata()
|
||||
|
||||
def test_add_inventory_folder(self):
|
||||
@ -43,8 +70,10 @@ class TestMetadataAdding(unittest.TestCase):
|
||||
fpath = os.path.join(self.metadata_folders[0], 'DATALESS.BW.WETR..HHZ')
|
||||
self.m.add_inventory_file(fpath)
|
||||
# adding an inventory file should append its folder to the list of inventories and the file to the
|
||||
self.assertEqual(['metadata1/DATALESS.BW.WETR..HHZ'], self.m.inventory_files.keys()) # does the filename exist in inventory files?
|
||||
self.assertEqual(['data', 'invtype'], self.m.inventory_files['metadata1/DATALESS.BW.WETR..HHZ'].keys()) # is the required information attacht to the filename?
|
||||
self.assertEqual([os.path.join(self.metadata_folders[0], 'DATALESS.BW.WETR..HHZ')],
|
||||
self.m.inventory_files.keys()) # does the filename exist in inventory files?
|
||||
self.assertEqual(['data', 'invtype'], self.m.inventory_files[os.path.join(self.metadata_folders[0],
|
||||
'DATALESS.BW.WETR..HHZ')].keys()) # is the required information attacht to the filename?
|
||||
self.assertDictEqual({}, self.m.seed_ids)
|
||||
self.assertEqual([self.metadata_folders[0]], self.m.inventories)
|
||||
|
||||
@ -66,7 +95,8 @@ class TestMetadataRemoval(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.station_id = 'BW.WETR..HH'
|
||||
self.metadata_folders = ('metadata1', 'metadata2')
|
||||
self.metadata_folders = (os.path.join('test_data', 'dless_multiple_files', 'metadata1'),
|
||||
os.path.join('test_data', 'dless_multiple_files', 'metadata2'))
|
||||
self.m = Metadata()
|
||||
|
||||
def test_remove_all_inventories(self):
|
||||
@ -93,7 +123,8 @@ class TestMetadataRemoval(unittest.TestCase):
|
||||
exist in the instance."""
|
||||
# add multiple inventories
|
||||
self.m.add_inventory(self.metadata_folders[0])
|
||||
self.m.remove_inventory('metadata_not_existing')
|
||||
with HidePrints():
|
||||
self.m.remove_inventory('metadata_not_existing')
|
||||
self.assertIn(self.metadata_folders[0], self.m.inventories)
|
||||
|
||||
def isEmpty(self, metadata):
|
||||
@ -102,3 +133,202 @@ class TestMetadataRemoval(unittest.TestCase):
|
||||
self.assertDictEqual({}, metadata.seed_ids)
|
||||
self.assertEqual([], metadata.inventories)
|
||||
|
||||
|
||||
class TestMetadata_read_single_file(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.station_id = 'BW.WETR..HHZ'
|
||||
self.metadata_folders = (os.path.join('test_data', 'dless_multiple_files', 'metadata1'),
|
||||
os.path.join('test_data', 'dless_multiple_files', 'metadata2'))
|
||||
self.metadata_paths = []
|
||||
self.m = Metadata()
|
||||
|
||||
def test_read_single_file(self):
|
||||
"""Test if reading a single file works"""
|
||||
fname = os.path.join(self.metadata_folders[0], 'DATALESS.'+self.station_id)
|
||||
with HidePrints():
|
||||
res = self.m.read_single_file(fname)
|
||||
# method should return true if file is successfully read
|
||||
self.assertTrue(res)
|
||||
# list of inventories (folders) should be empty
|
||||
self.assertEqual([], self.m.inventories)
|
||||
# list of inventory files should contain the added file
|
||||
self.assertIn(fname, self.m.inventory_files.keys())
|
||||
self.assertEqual({}, self.m.seed_ids)
|
||||
|
||||
def test_read_single_file_invalid_path(self):
|
||||
"""Test if reading from a non existing file fails. The filename should not be
|
||||
added to the metadata object"""
|
||||
fname = os.path.join("this", "path", "doesnt", "exist")
|
||||
with HidePrints():
|
||||
res = self.m.read_single_file(fname)
|
||||
# method should return None if file reading fails
|
||||
self.assertIsNone(res)
|
||||
# list of inventories (folders) should be empty
|
||||
self.assertEqual([], self.m.inventories)
|
||||
# list of inventory files should not contain the added file
|
||||
self.assertNotIn(fname, self.m.inventory_files.keys())
|
||||
self.assertEqual({}, self.m.seed_ids)
|
||||
|
||||
def test_read_single_file_multiple_times(self):
|
||||
"""Test if reading a file twice doesnt add it twice to the metadata object"""
|
||||
fname = os.path.join(self.metadata_folders[0], 'DATALESS.'+self.station_id)
|
||||
with HidePrints():
|
||||
res1 = self.m.read_single_file(fname)
|
||||
res2 = self.m.read_single_file(fname)
|
||||
self.assertTrue(res1)
|
||||
self.assertIsNone(res2)
|
||||
self.assertItemsEqual([fname], self.m.inventory_files.keys())
|
||||
|
||||
|
||||
class TestMetadataMultipleTime(unittest.TestCase):
|
||||
"""Test if stations with multiple metadata entries in a single file are handled correctly.
|
||||
The user must specify the time where he wants to get metadata.
|
||||
|
||||
The station ROTT changed has metadata available at multiple times
|
||||
LE.ROTT..HNE | 200.00 Hz | Titan 4g-EDR-209, Very Low gain, 200 sps | 2015-01-08 - 2015-03-19 | Lat: 49.1, Lng: 8.1
|
||||
LE.ROTT..HNE | 200.00 Hz | Titan 4g-EDR-209, Very Low gain, 200 sps | 2015-03-19 - | Lat: 49.1, Lng: 8.1
|
||||
LE.ROTT..HNN | 200.00 Hz | Titan 4g-EDR-209, Very Low gain, 200 sps | 2015-01-08 - 2015-03-19 | Lat: 49.1, Lng: 8.1
|
||||
LE.ROTT..HNN | 200.00 Hz | Titan 4g-EDR-209, Very Low gain, 200 sps | 2015-03-19 - | Lat: 49.1, Lng: 8.1
|
||||
LE.ROTT..HNZ | 200.00 Hz | Titan 4g-EDR-209, Very Low gain, 200 sps | 2015-01-08 - 2015-03-19 | Lat: 49.1, Lng: 8.1
|
||||
LE.ROTT..HNZ | 200.00 Hz | Titan 4g-EDR-209, Very Low gain, 200 sps | 2015-03-19 - | Lat: 49.1, Lng: 8.1
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.seed_id = 'LE.ROTT..HN'
|
||||
path = os.path.dirname(__file__) # gets path to currently running script
|
||||
metadata = os.path.join('test_data', 'dless_multiple_times', 'MAGS2_LE_ROTT.dless') # specific subfolder of test data
|
||||
metadata_path = os.path.join(path, metadata)
|
||||
self.m = Metadata(metadata_path)
|
||||
self.p = Parser(metadata_path)
|
||||
|
||||
def test_get_metadata_works_without_datetime(self):
|
||||
"""Test if get_metadata works if multiple metadata entries are available but no time is
|
||||
specified."""
|
||||
for channel in ('Z', 'N', 'E'):
|
||||
with HidePrints():
|
||||
md = self.m.get_metadata(self.seed_id + channel)
|
||||
self.assertDictEqual(md['data'].get_inventory(), self.p.get_inventory())
|
||||
|
||||
def test_get_metadata_works_with_first_datetime(self):
|
||||
"""Test if get_metadata works if multiple metadata entries are available and the older time is specified."""
|
||||
t = UTCDateTime('2015-02-08')
|
||||
for channel in ('Z', 'N', 'E'):
|
||||
with HidePrints():
|
||||
md = self.m.get_metadata(self.seed_id + channel, t)
|
||||
self.assertDictEqual(md['data'].get_inventory(), self.p.get_inventory())
|
||||
|
||||
def test_get_metadata_fails_when_time_before_starttime(self):
|
||||
"""Tests if get_metadata returns None when given a data that is before the start date
|
||||
of the metadata"""
|
||||
with HidePrints():
|
||||
md = self.m.get_metadata(self.seed_id, UTCDateTime('1960-07-20'))
|
||||
self.assertIs(md, None)
|
||||
|
||||
def test_get_metadata_invalid_seed_id(self):
|
||||
"""Tes if get metadata returns none when asked for a seed id that does not exist"""
|
||||
with HidePrints():
|
||||
res = self.m.get_metadata("this.doesnt..exist")
|
||||
self.assertIsNone(res)
|
||||
|
||||
|
||||
class TestMetadataMultipleEntries(unittest.TestCase):
|
||||
"""
|
||||
The station KB.TMO07 has changed instruments multiple times.
|
||||
Networks:
|
||||
KB (KB network)
|
||||
Stations:
|
||||
KB.TMO07 (Karlsruhe GPI)
|
||||
Channels:
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Streckeisen KABBA-STS-2 | 2004-12-06 - 2005-04-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Streckeisen KABBA-STS-2 | 2005-04-18 - 2006-07-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2006-10-10 - 2006-11-14 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2006-11-24 - 2007-01-12 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-01-18 - 2007-03-15 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-10-25 - 2007-11-21 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHE | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-11-21 - 2008-01-17 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Streckeisen KABBA-STS-2 | 2004-12-06 - 2005-04-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Streckeisen KABBA-STS-2 | 2005-04-18 - 2006-07-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2006-10-10 - 2006-11-14 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2006-11-24 - 2007-01-12 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-01-18 - 2007-03-15 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-10-25 - 2007-11-21 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHN | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-11-21 - 2008-01-17 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Streckeisen KABBA-STS-2 | 2004-12-06 - 2005-04-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Streckeisen KABBA-STS-2 | 2005-04-18 - 2006-07-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2006-10-10 - 2006-11-14 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2006-11-24 - 2007-01-12 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-01-18 - 2007-03-15 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-10-25 - 2007-11-21 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.BHZ | 50.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-11-21 - 2008-01-17 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-01-12 - 2007-01-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-10-10 - 2007-10-25 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Streckeisen KABBA-STS-2 | 2008-07-11 - 2008-12-05 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Streckeisen KABBA-STS-2 | 2009-05-12 - 2010-02-15 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Streckeisen KABBA-STS-2 | 2010-02-15 - 2010-04-07 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Lennartz KABBA-LE-3D/1 | 2010-04-07 - 2010-08-03 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 200.00 Hz | Streckeisen KABBA-STS-2 | 2010-08-05 - 2010-12-20 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 100.00 Hz | Streckeisen KABBA-STS-2 | 2010-12-20 - 2010-12-22 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 200.00 Hz | Streckeisen KABBA-STS-2 | 2010-12-22 - 2011-04-02 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 200.00 Hz | Streckeisen KABBA-STS-2 | 2011-04-15 - 2012-05-07 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHE | 200.00 Hz | Streckeisen KABBA-STS-2 | 2012-05-07 - | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-01-12 - 2007-01-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-10-10 - 2007-10-25 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Streckeisen KABBA-STS-2 | 2008-07-11 - 2008-12-05 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Streckeisen KABBA-STS-2 | 2009-05-12 - 2010-02-15 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Streckeisen KABBA-STS-2 | 2010-02-15 - 2010-04-07 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Lennartz KABBA-LE-3D/1 | 2010-04-07 - 2010-08-03 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 200.00 Hz | Streckeisen KABBA-STS-2 | 2010-08-05 - 2010-12-20 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 100.00 Hz | Streckeisen KABBA-STS-2 | 2010-12-20 - 2010-12-22 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 200.00 Hz | Streckeisen KABBA-STS-2 | 2010-12-22 - 2011-04-02 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 200.00 Hz | Streckeisen KABBA-STS-2 | 2011-04-15 - 2012-05-07 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHN | 200.00 Hz | Streckeisen KABBA-STS-2 | 2012-05-07 - | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-01-12 - 2007-01-18 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Lennartz KABBA-LE-3D/5 | 2007-10-10 - 2007-10-25 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Streckeisen KABBA-STS-2 | 2008-07-11 - 2008-12-05 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Streckeisen KABBA-STS-2 | 2009-05-12 - 2010-02-15 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Streckeisen KABBA-STS-2 | 2010-02-15 - 2010-04-07 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Lennartz KABBA-LE-3D/1 | 2010-04-07 - 2010-08-03 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 200.00 Hz | Streckeisen KABBA-STS-2 | 2010-08-05 - 2010-12-20 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 100.00 Hz | Streckeisen KABBA-STS-2 | 2010-12-20 - 2010-12-22 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 200.00 Hz | Streckeisen KABBA-STS-2 | 2010-12-22 - 2011-04-02 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 200.00 Hz | Streckeisen KABBA-STS-2 | 2011-04-15 - 2012-05-07 | Lat: 49.0, Lng: 8.4
|
||||
KB.TMO07.00.HHZ | 200.00 Hz | Streckeisen KABBA-STS-2 | 2012-05-07 - | Lat: 49.0, Lng: 8.4
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.seed_id = 'KB.TMO07.00.HHZ'
|
||||
path = os.path.dirname(__file__) # gets path to currently running script
|
||||
metadata = os.path.join('test_data', 'dless_multiple_instruments', 'MAGS2_KB_TMO07.dless') # specific subfolder of test data
|
||||
metadata_path = os.path.join(path, metadata)
|
||||
self.m = Metadata(metadata_path)
|
||||
self.p = Parser(metadata_path)
|
||||
|
||||
def test_get_paz_current_time(self):
|
||||
"""Test if getting the paz from the metadata object with the current time works"""
|
||||
t = UTCDateTime()
|
||||
with HidePrints():
|
||||
pazm = self.m.get_paz(self.seed_id, t)
|
||||
pazp = self.p.get_paz(self.seed_id, t)
|
||||
self.assertEqual(pazm, pazp)
|
||||
|
||||
def test_get_paz_past(self):
|
||||
"""Test if getting paz from metadata object with a time in the past works"""
|
||||
t = UTCDateTime('2007-01-13')
|
||||
with HidePrints():
|
||||
pazm = self.m.get_paz(self.seed_id, t)
|
||||
pazp = self.p.get_paz(self.seed_id, t)
|
||||
self.assertEqual(pazm, pazp)
|
||||
|
||||
def test_get_paz_time_not_exisiting(self):
|
||||
"""Test if getting paz from metadata at a time where there is no metadata
|
||||
available fails correctly"""
|
||||
with self.assertRaises(SEEDParserException):
|
||||
with HidePrints():
|
||||
self.m.get_paz(self.seed_id, UTCDateTime('1990-1-1'))
|
||||
|
||||
def test_get_paz_seed_id_not_existing(self):
|
||||
"""Test if getting paz from a non existing seed id returns None as expected."""
|
||||
with HidePrints():
|
||||
res = self.m.get_paz('This.doesnt..exist', UTCDateTime)
|
||||
self.assertIsNone(res)
|
||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
52
tests/utils.py
Normal file
52
tests/utils.py
Normal file
@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""Utilities/helpers for testing"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
class HidePrints:
|
||||
"""
|
||||
Context manager that hides all standard output within its body.
|
||||
The optional hide_prints argument can be used to quickly enable printing during debugging of tests.
|
||||
|
||||
|
||||
Use (This will result in all console output of noisy_function to be suppressed):
|
||||
from tests.utils import HidePrints
|
||||
with HidePrints():
|
||||
noise_function()
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def hide(func, *args, **kwargs):
|
||||
"""Decorator that hides all prints of the decorated function.
|
||||
|
||||
Use:
|
||||
from tests.utils import HidePrints
|
||||
@HidePrints.hide
|
||||
def noise()
|
||||
print("NOISE")
|
||||
"""
|
||||
|
||||
def silencer(*args, **kwargs):
|
||||
with HidePrints():
|
||||
func(*args, **kwargs)
|
||||
return silencer
|
||||
|
||||
def __init__(self, hide_prints=True):
|
||||
"""Create object with hide_prints=False to disable print hiding"""
|
||||
self.hide = hide_prints
|
||||
|
||||
def __enter__(self):
|
||||
"""Redirect stdout to /dev/null, save old stdout"""
|
||||
if self.hide:
|
||||
self._original_stdout = sys.stdout
|
||||
devnull = open(os.devnull, "w")
|
||||
sys.stdout = devnull
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Reinstate old stdout"""
|
||||
if self.hide:
|
||||
sys.stdout = self._original_stdout
|
Loading…
Reference in New Issue
Block a user