[merge] feature/refactor into develop
This commit is contained in:
commit
6b9231abd3
2
PyLoT.py
2
PyLoT.py
@ -62,7 +62,7 @@ from pylot.core.io.data import Data
|
|||||||
from pylot.core.io.inputs import FilterOptions, PylotParameter
|
from pylot.core.io.inputs import FilterOptions, PylotParameter
|
||||||
from autoPyLoT import autoPyLoT
|
from autoPyLoT import autoPyLoT
|
||||||
from pylot.core.pick.compare import Comparison
|
from pylot.core.pick.compare import Comparison
|
||||||
from pylot.core.pick.utils import getQualityFromUncertainty
|
from pylot.core.pick.utils import symmetrize_error, getQualityFromUncertainty, getPickQuality, get_quality_class
|
||||||
from pylot.core.io.phases import picksdict_from_picks
|
from pylot.core.io.phases import picksdict_from_picks
|
||||||
import pylot.core.loc.nll as nll
|
import pylot.core.loc.nll as nll
|
||||||
from pylot.core.util.errors import DatastructureError, \
|
from pylot.core.util.errors import DatastructureError, \
|
||||||
|
@ -127,9 +127,7 @@ class PylotParameter(object):
|
|||||||
:return:
|
:return:
|
||||||
:rtype: bool
|
:rtype: bool
|
||||||
"""
|
"""
|
||||||
if parameter in self.__parameter.keys():
|
return parameter in self.__parameter.keys()
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get(self, *args):
|
def get(self, *args):
|
||||||
"""
|
"""
|
||||||
|
@ -944,7 +944,7 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
|
|||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from pylot.core.pick.utils import getQualityFromUncertainty
|
from pylot.core.pick.utils import get_quality_class
|
||||||
from pylot.core.util.utils import loopIdentifyPhase, identifyPhase
|
from pylot.core.util.utils import loopIdentifyPhase, identifyPhase
|
||||||
|
|
||||||
# read all onset weights
|
# read all onset weights
|
||||||
@ -992,7 +992,7 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
|
|||||||
for Pick in arrivals_copy:
|
for Pick in arrivals_copy:
|
||||||
phase = identifyPhase(loopIdentifyPhase(Pick.phase_hint))
|
phase = identifyPhase(loopIdentifyPhase(Pick.phase_hint))
|
||||||
if phase == 'P':
|
if phase == 'P':
|
||||||
Pqual = getQualityFromUncertainty(Pick.time_errors.uncertainty, ErrorsP)
|
Pqual = get_quality_class(Pick.time_errors.uncertainty, ErrorsP)
|
||||||
if Pqual == 0:
|
if Pqual == 0:
|
||||||
Pw0.append(Pick.time_errors.uncertainty)
|
Pw0.append(Pick.time_errors.uncertainty)
|
||||||
elif Pqual == 1:
|
elif Pqual == 1:
|
||||||
@ -1004,7 +1004,7 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
|
|||||||
elif Pqual == 4:
|
elif Pqual == 4:
|
||||||
Pw4.append(Pick.time_errors.uncertainty)
|
Pw4.append(Pick.time_errors.uncertainty)
|
||||||
elif phase == 'S':
|
elif phase == 'S':
|
||||||
Squal = getQualityFromUncertainty(Pick.time_errors.uncertainty, ErrorsS)
|
Squal = get_quality_class(Pick.time_errors.uncertainty, ErrorsS)
|
||||||
if Squal == 0:
|
if Squal == 0:
|
||||||
Sw0.append(Pick.time_errors.uncertainty)
|
Sw0.append(Pick.time_errors.uncertainty)
|
||||||
elif Squal == 1:
|
elif Squal == 1:
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -243,6 +243,13 @@ class AICcf(CharacteristicFunction):
|
|||||||
|
|
||||||
class HOScf(CharacteristicFunction):
|
class HOScf(CharacteristicFunction):
|
||||||
|
|
||||||
|
def __init__(self, data, cut, pickparams):
|
||||||
|
"""
|
||||||
|
Call parent constructor while extracting the right parameters:
|
||||||
|
:param pickparams: PylotParameters instance
|
||||||
|
"""
|
||||||
|
super(HOScf, self).__init__(data, cut, pickparams["tlta"], pickparams["hosorder"])
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self, data):
|
||||||
"""
|
"""
|
||||||
Function to calculate skewness (statistics of order 3) or kurtosis
|
Function to calculate skewness (statistics of order 3) or kurtosis
|
||||||
@ -299,6 +306,9 @@ class HOScf(CharacteristicFunction):
|
|||||||
|
|
||||||
class ARZcf(CharacteristicFunction):
|
class ARZcf(CharacteristicFunction):
|
||||||
|
|
||||||
|
def __init__(self, data, cut, t1, t2, pickparams):
|
||||||
|
super(ARZcf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Parorder"], fnoise=pickparams["addnoise"])
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self, data):
|
||||||
"""
|
"""
|
||||||
function used to calculate the AR prediction error from a single vertical trace. Can be used to pick
|
function used to calculate the AR prediction error from a single vertical trace. Can be used to pick
|
||||||
@ -431,6 +441,9 @@ class ARZcf(CharacteristicFunction):
|
|||||||
|
|
||||||
class ARHcf(CharacteristicFunction):
|
class ARHcf(CharacteristicFunction):
|
||||||
|
|
||||||
|
def __init__(self, data, cut, t1, t2, pickparams):
|
||||||
|
super(ARHcf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Sarorder"], fnoise=pickparams["addnoise"])
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self, data):
|
||||||
"""
|
"""
|
||||||
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
||||||
@ -580,6 +593,9 @@ class ARHcf(CharacteristicFunction):
|
|||||||
|
|
||||||
class AR3Ccf(CharacteristicFunction):
|
class AR3Ccf(CharacteristicFunction):
|
||||||
|
|
||||||
|
def __init__(self, data, cut, t1, t2, pickparams):
|
||||||
|
super(AR3Ccf, self).__init__(data, cut, t1=t1, t2=t2, order=pickparams["Sarorder"], fnoise=pickparams["addnoise"])
|
||||||
|
|
||||||
def calcCF(self, data):
|
def calcCF(self, data):
|
||||||
"""
|
"""
|
||||||
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
Function to calculate a characteristic function using autoregressive modelling of the waveform of
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
|
|
||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import warnings
|
from scipy.signal import argrelmax
|
||||||
from obspy.core import Stream, UTCDateTime
|
from obspy.core import Stream, UTCDateTime
|
||||||
|
|
||||||
from pylot.core.util.utils import real_Bool, real_None, SetChannelComponents
|
from pylot.core.util.utils import real_Bool, real_None, SetChannelComponents
|
||||||
@ -417,9 +417,9 @@ def getSNR(X, TSNR, t1, tracenum=0):
|
|||||||
|
|
||||||
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
||||||
|
|
||||||
SNR = None
|
SNR = -1
|
||||||
SNRdB = None
|
SNRdB = -1
|
||||||
noiselevel = None
|
noiselevel = -1
|
||||||
|
|
||||||
x = X[tracenum].data
|
x = X[tracenum].data
|
||||||
npts = X[tracenum].stats.npts
|
npts = X[tracenum].stats.npts
|
||||||
@ -489,13 +489,13 @@ def getsignalwin(t, t1, tsignal):
|
|||||||
Function to extract data out of time series for signal level calculation.
|
Function to extract data out of time series for signal level calculation.
|
||||||
Returns an array of indices.
|
Returns an array of indices.
|
||||||
:param t: array of time stamps
|
:param t: array of time stamps
|
||||||
:type t: `numpy.ndarray`
|
:type t: `~numpy.ndarray`
|
||||||
:param t1: time from which relative to it signal window is extracted
|
:param t1: time from which relative to it signal window is extracted
|
||||||
:type t1: float
|
:type t1: float
|
||||||
:param tsignal: length of time window [s] for signal level calculation
|
:param tsignal: length of time window [s] for signal level calculation
|
||||||
:type tsignal: float
|
:type tsignal: float
|
||||||
:return: indices of signal window i t
|
:return: indices of signal window in t
|
||||||
:rtype: `numpy.ndarray`
|
:rtype: `~numpy.ndarray`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# get signal window
|
# get signal window
|
||||||
@ -507,6 +507,26 @@ def getsignalwin(t, t1, tsignal):
|
|||||||
return isignal
|
return isignal
|
||||||
|
|
||||||
|
|
||||||
|
def getslopewin(Tcf, Pick, tslope):
|
||||||
|
"""
|
||||||
|
Function to extract slope window out of time series
|
||||||
|
|
||||||
|
>>> (np.arange(15., 85.), 30.0, 10.0)
|
||||||
|
array([15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25])
|
||||||
|
|
||||||
|
:param Tcf:
|
||||||
|
:type Tcf:
|
||||||
|
:param Pick:
|
||||||
|
:type Pick:
|
||||||
|
:param tslope:a
|
||||||
|
:type tslope:
|
||||||
|
:return:
|
||||||
|
:rtype: `numpy.ndarray`
|
||||||
|
"""
|
||||||
|
# TODO: fill out docstring
|
||||||
|
slope = np.where( (Tcf <= min(Pick + tslope, Tcf[-1])) & (Tcf >= Pick) )
|
||||||
|
return slope[0]
|
||||||
|
|
||||||
def getResolutionWindow(snr, extent):
|
def getResolutionWindow(snr, extent):
|
||||||
"""
|
"""
|
||||||
Produce the half of the time resolution window width from given SNR value
|
Produce the half of the time resolution window width from given SNR value
|
||||||
@ -736,7 +756,7 @@ def RMS(X):
|
|||||||
return np.sqrt(np.sum(np.power(X, 2)) / len(X))
|
return np.sqrt(np.sum(np.power(X, 2)) / len(X))
|
||||||
|
|
||||||
|
|
||||||
def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot=0, fig=None, linecolor='k'):
|
def checksignallength(X, pick, minsiglength, pickparams, iplot=0, fig=None, linecolor='k'):
|
||||||
"""
|
"""
|
||||||
Function to detect spuriously picked noise peaks.
|
Function to detect spuriously picked noise peaks.
|
||||||
|
|
||||||
@ -747,14 +767,10 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot=0, fi
|
|||||||
:type X: `~obspy.core.stream.Stream`
|
:type X: `~obspy.core.stream.Stream`
|
||||||
:param pick: initial (AIC) P onset time
|
:param pick: initial (AIC) P onset time
|
||||||
:type pick: float
|
:type pick: float
|
||||||
:param TSNR: length of time windows around initial pick [s]
|
|
||||||
:type TSNR: (T_noise, T_gap, T_signal)
|
|
||||||
:param minsiglength: minium required signal length [s] to declare pick as P onset
|
:param minsiglength: minium required signal length [s] to declare pick as P onset
|
||||||
:type minsiglength: float
|
:type minsiglength: float
|
||||||
:param nfac: noise factor (nfac * noise level = threshold)
|
:param pickparams: PylotParameter instance that holds the current picker settings loaded from a .in file
|
||||||
:type nfac: float
|
:type pickparams: PylotParameter
|
||||||
:param minpercent: minimum required percentage of samples above calculated threshold
|
|
||||||
:type minpercent: float
|
|
||||||
:param iplot: iplot, if iplot > 1, results are shown in figure
|
:param iplot: iplot, if iplot > 1, results are shown in figure
|
||||||
:type iplot: int
|
:type iplot: int
|
||||||
:param fig: Matplotlib figure to plot results in
|
:param fig: Matplotlib figure to plot results in
|
||||||
@ -766,6 +782,19 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot=0, fi
|
|||||||
:rtype: int
|
:rtype: int
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
"""
|
||||||
|
Extract additional parameters from pickparams
|
||||||
|
:param TSNR: length of time windows around initial pick [s]
|
||||||
|
:type TSNR: (T_noise, T_gap, T_signal)
|
||||||
|
:param nfac: noise factor (nfac * noise level = threshold)
|
||||||
|
:type nfac: float
|
||||||
|
:param minpercent: minimum required percentage of samples above calculated threshold
|
||||||
|
:type minpercent: float
|
||||||
|
"""
|
||||||
|
TSNR = pickparams["tsnrz"]
|
||||||
|
nfac = pickparams["noisefactor"]
|
||||||
|
minpercent = pickparams["minpercent"]
|
||||||
|
|
||||||
plt_flag = 0
|
plt_flag = 0
|
||||||
try:
|
try:
|
||||||
iplot = int(iplot)
|
iplot = int(iplot)
|
||||||
@ -1034,7 +1063,7 @@ def jackknife(X, phi, h=1):
|
|||||||
return PHI_jack, PHI_pseudo, PHI_sub
|
return PHI_jack, PHI_pseudo, PHI_sub
|
||||||
|
|
||||||
|
|
||||||
def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None, linecolor='k'):
|
def checkZ4S(X, pick, pickparams, iplot, fig=None, linecolor='k'):
|
||||||
"""
|
"""
|
||||||
Function to compare energy content of vertical trace with
|
Function to compare energy content of vertical trace with
|
||||||
energy content of horizontal traces to detect spuriously
|
energy content of horizontal traces to detect spuriously
|
||||||
@ -1051,11 +1080,8 @@ def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None, linecolor='k'):
|
|||||||
:type X: `~obspy.core.stream.Stream`
|
:type X: `~obspy.core.stream.Stream`
|
||||||
:param pick: initial (AIC) P onset time
|
:param pick: initial (AIC) P onset time
|
||||||
:type pick: float
|
:type pick: float
|
||||||
:param zfac: factor for threshold determination, vertical energy must
|
:param pickparams: PylotParameter instance that holds the current picker settings loaded from a .in file
|
||||||
exceed coda level times zfac to declare a pick as P onset
|
:type pickparams: PylotParameter
|
||||||
:type zfac: float
|
|
||||||
:param checkwin: window length [s] for calculating P-coda engergy content
|
|
||||||
:type checkwin: float
|
|
||||||
:param iplot: if iplot > 1, energy content and threshold are shown
|
:param iplot: if iplot > 1, energy content and threshold are shown
|
||||||
:type iplot: int
|
:type iplot: int
|
||||||
:param fig: Matplotlib figure to plot results in
|
:param fig: Matplotlib figure to plot results in
|
||||||
@ -1066,6 +1092,17 @@ def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None, linecolor='k'):
|
|||||||
:rtype: int
|
:rtype: int
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
"""
|
||||||
|
Extract required parameters from pickparams
|
||||||
|
:param zfac: factor for threshold determination, vertical energy must
|
||||||
|
exceed coda level times zfac to declare a pick as P onset
|
||||||
|
:type zfac: float
|
||||||
|
:param checkwin: window length [s] for calculating P-coda engergy content
|
||||||
|
:type checkwin: float
|
||||||
|
"""
|
||||||
|
zfac = pickparams["zfac"]
|
||||||
|
checkwin = pickparams["tsnrz"][2]
|
||||||
|
|
||||||
plt_flag = 0
|
plt_flag = 0
|
||||||
try:
|
try:
|
||||||
iplot = int(iplot)
|
iplot = int(iplot)
|
||||||
@ -1250,7 +1287,7 @@ def getQualityFromSNR(snrdb):
|
|||||||
return quality_modifier
|
return quality_modifier
|
||||||
|
|
||||||
|
|
||||||
def getQualityFromUncertainty(uncertainty, Errors):
|
def get_quality_class(uncertainty, weight_classes):
|
||||||
"""
|
"""
|
||||||
Script to transform uncertainty into quality classes 0-4 regarding adjusted time errors
|
Script to transform uncertainty into quality classes 0-4 regarding adjusted time errors
|
||||||
:param uncertainty: symmetric picking error of picks
|
:param uncertainty: symmetric picking error of picks
|
||||||
@ -1260,7 +1297,182 @@ def getQualityFromUncertainty(uncertainty, Errors):
|
|||||||
:return: quality of pick (0-4)
|
:return: quality of pick (0-4)
|
||||||
:rtype: int
|
:rtype: int
|
||||||
"""
|
"""
|
||||||
|
try:
|
||||||
|
# create generator expression containing all indices of values in weight classes that are >= than uncertainty.
|
||||||
|
# call next on it once to receive first value
|
||||||
|
quality = next(i for i, v in enumerate(weight_classes) if v >= uncertainty)
|
||||||
|
except StopIteration:
|
||||||
|
# raised when uncertainty is larger than all values in weight_classes
|
||||||
|
# set quality to max possible value
|
||||||
|
quality = len(weight_classes)
|
||||||
|
return quality
|
||||||
|
|
||||||
|
def set_NaNs_to(data, nan_value):
|
||||||
|
"""
|
||||||
|
Replace all NaNs in data with nan_value
|
||||||
|
:param data: array holding data
|
||||||
|
:type data: `~numpy.ndarray`
|
||||||
|
:param nan_value: value which all NaNs are set to
|
||||||
|
:type nan_value: float, int
|
||||||
|
:return: data array with all NaNs replaced with nan_value
|
||||||
|
:rtype: `~numpy.ndarray`
|
||||||
|
"""
|
||||||
|
nn = np.isnan(data)
|
||||||
|
if np.any(nn):
|
||||||
|
data[nn] = nan_value
|
||||||
|
return data
|
||||||
|
|
||||||
|
def taper_cf(cf):
|
||||||
|
"""
|
||||||
|
Taper cf data to get rid off of side maximas
|
||||||
|
:param cf: characteristic function data
|
||||||
|
:type cf: `~numpy.ndarray`
|
||||||
|
:return: tapered cf
|
||||||
|
:rtype: `~numpy.ndarray`
|
||||||
|
"""
|
||||||
|
tap = np.hanning(len(cf))
|
||||||
|
return tap * cf
|
||||||
|
|
||||||
|
def cf_positive(cf):
|
||||||
|
"""
|
||||||
|
Shifts cf so that all values are positive
|
||||||
|
:param cf:
|
||||||
|
:type cf: `~numpy.ndarray`
|
||||||
|
:return:
|
||||||
|
:rtype: `~numpy.ndarray`
|
||||||
|
"""
|
||||||
|
return cf + max(abs(cf))
|
||||||
|
|
||||||
|
def smooth_cf(cf, t_smooth, delta):
|
||||||
|
"""
|
||||||
|
Smooth cf by taking samples over t_smooth length
|
||||||
|
:param cf: characteristic function data
|
||||||
|
:type cf: `~numpy.ndarray`
|
||||||
|
:param t_smooth: Time from which samples for smoothing will be taken (s)
|
||||||
|
:type t_smooth: float
|
||||||
|
:param delta: Sample rate of cf
|
||||||
|
:type delta: float
|
||||||
|
:return: smoothed cf data
|
||||||
|
:rtype: `~numpy.ndarray`
|
||||||
|
"""
|
||||||
|
|
||||||
|
ismooth = int(round(t_smooth / delta)) # smooth values this many indexes apart
|
||||||
|
cf_smooth = np.zeros(len(cf))
|
||||||
|
|
||||||
|
if len(cf) < ismooth:
|
||||||
|
raise ValueError
|
||||||
|
for i in range(1, len(cf)):
|
||||||
|
if i > ismooth:
|
||||||
|
ii1 = i - ismooth
|
||||||
|
cf_smooth[i] = cf_smooth[i - 1] + (cf[i] - cf[ii1]) / ismooth
|
||||||
|
else:
|
||||||
|
cf_smooth[i] = np.mean(cf[1: i])
|
||||||
|
offset = abs(min(cf) - min(cf_smooth))
|
||||||
|
cf_smooth -= offset # remove offset from smoothed function
|
||||||
|
return cf_smooth
|
||||||
|
|
||||||
|
def check_counts_ms(data):
|
||||||
|
"""
|
||||||
|
check if data is in counts or m/s
|
||||||
|
:param data: data array
|
||||||
|
:type data: `~numpy.ndarray`
|
||||||
|
:return:
|
||||||
|
:rtype: `~numpy.ndarray`
|
||||||
|
"""
|
||||||
|
# this is quick and dirty, better solution?
|
||||||
|
if max(data < 1e-3) and max(data >= 1e-6):
|
||||||
|
data = data * 1000000.
|
||||||
|
elif max(data < 1e-6):
|
||||||
|
data = data * 1e13
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def calcSlope(Data, datasmooth, Tcf, Pick, TSNR):
|
||||||
|
"""
|
||||||
|
Calculate Slope for Data around a given time Pick.
|
||||||
|
|
||||||
|
:param Data: trace containing data for which a slope will be calculated
|
||||||
|
:type Data: `~obspy.core.trace.Trace`
|
||||||
|
:param datasmooth: smoothed data array
|
||||||
|
:type datasmooth: ~numpy.ndarray`
|
||||||
|
:param Tcf: array of time indices for Data array
|
||||||
|
:type Tcf: ~numpy.ndarray`
|
||||||
|
:param Pick: onset time around which the slope should be calculated
|
||||||
|
:type Pick: float
|
||||||
|
:param TSNR: tuple containing (tnoise, tsafety, tsignal, tslope). Slope will be calculated in time
|
||||||
|
window tslope around the onset
|
||||||
|
:type TSNR: (float, float, float, float)
|
||||||
|
:return: tuple containing (slope of onset, slope index array, data fit information)
|
||||||
|
:rtype: (float, `~numpy.ndarray`, `~numpy.ndarray`
|
||||||
|
"""
|
||||||
|
islope = getslopewin(Tcf, Pick, TSNR[3])
|
||||||
|
try:
|
||||||
|
dataslope = Data[0].data[islope]
|
||||||
|
except IndexError as e:
|
||||||
|
print("Slope Calculation: empty array islope, check signal window")
|
||||||
|
raise e
|
||||||
|
if len(dataslope) <= 1:
|
||||||
|
print('Slope window outside data. No or not enough data in slope window found!')
|
||||||
|
raise ValueError
|
||||||
|
# find maximum within slope determination window
|
||||||
|
# 'cause slope should be calculated up to first local minimum only!
|
||||||
|
imaxs, = argrelmax(dataslope)
|
||||||
|
if imaxs.size:
|
||||||
|
imax = imaxs[0]
|
||||||
|
else:
|
||||||
|
imax = np.argmax(dataslope)
|
||||||
|
iislope = islope[0:imax + 1] # cut index so it contains only the first maximum
|
||||||
|
if len(iislope) < 2:
|
||||||
|
# calculate slope from initial onset to maximum of AIC function
|
||||||
|
print("AICPicker: Not enough data samples left for slope calculation!")
|
||||||
|
print("Calculating slope from initial onset to maximum of AIC function ...")
|
||||||
|
imax = np.argmax(datasmooth[islope])
|
||||||
|
if imax == 0:
|
||||||
|
print("AICPicker: Maximum for slope determination right at the beginning of the window!")
|
||||||
|
print("Choose longer slope determination window!")
|
||||||
|
raise IndexError
|
||||||
|
iislope = islope[0][0:imax + 1] # cut index so it contains only the first maximum
|
||||||
|
dataslope = Data[0].data[iislope] # slope will only be calculated to the first maximum
|
||||||
|
# calculate slope as polynomal fit of order 1
|
||||||
|
xslope = np.arange(0, len(dataslope))
|
||||||
|
P = np.polyfit(xslope, dataslope, 1)
|
||||||
|
datafit = np.polyval(P, xslope)
|
||||||
|
if datafit[0] >= datafit[-1]:
|
||||||
|
print('AICPicker: Negative slope, bad onset skipped!')
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
|
slope = 1 / (len(dataslope) * Data[0].stats.delta) * (datafit[-1] - datafit[0])
|
||||||
|
return slope, iislope, datafit
|
||||||
|
|
||||||
|
|
||||||
|
def get_pickparams(pickparam):
|
||||||
|
"""
|
||||||
|
Get parameter names out of pickparam into dictionaries and return them
|
||||||
|
:return: dictionaries containing 1. p pick parameters, 2. s pick parameters, 3. first motion determinatiion
|
||||||
|
parameters, 4. signal length parameters
|
||||||
|
:rtype: (dict, dict, dict, dict)
|
||||||
|
"""
|
||||||
|
# Define names of all parameters in different groups
|
||||||
|
p_parameter_names = 'algoP pstart pstop use_taup taup_model tlta tsnrz hosorder bpz1 bpz2 pickwinP aictsmooth tsmoothP ausP nfacP tpred1z tdet1z Parorder addnoise Precalcwin minAICPslope minAICPSNR timeerrorsP checkwindowP minfactorP'.split(' ')
|
||||||
|
s_parameter_names = 'algoS sstart sstop bph1 bph2 tsnrh pickwinS tpred1h tdet1h tpred2h tdet2h Sarorder aictsmoothS tsmoothS ausS minAICSslope minAICSSNR Srecalcwin nfacS timeerrorsS zfac checkwindowS minfactorS'.split(' ')
|
||||||
|
first_motion_names = 'minFMSNR fmpickwin minfmweight'.split(' ')
|
||||||
|
signal_length_names = 'minsiglength minpercent noisefactor'.split(' ')
|
||||||
|
# Get list of values from pickparam by name
|
||||||
|
p_parameter_values = map(pickparam.get, p_parameter_names)
|
||||||
|
s_parameter_values = map(pickparam.get, s_parameter_names)
|
||||||
|
fm_parameter_values = map(pickparam.get, first_motion_names)
|
||||||
|
sl_parameter_values = map(pickparam.get, signal_length_names)
|
||||||
|
# construct dicts from names and values
|
||||||
|
p_params = dict(zip(p_parameter_names, p_parameter_values))
|
||||||
|
s_params = dict(zip(s_parameter_names, s_parameter_values))
|
||||||
|
first_motion_params = dict(zip(first_motion_names, fm_parameter_values))
|
||||||
|
signal_length_params = dict(zip(signal_length_names, sl_parameter_values))
|
||||||
|
|
||||||
|
p_params['use_taup'] = real_Bool(p_params['use_taup'])
|
||||||
|
|
||||||
|
return p_params, s_params, first_motion_params, signal_length_params
|
||||||
|
|
||||||
|
def getQualityFromUncertainty(uncertainty, Errors):
|
||||||
# set initial quality to 4 (worst) and change only if one condition is hit
|
# set initial quality to 4 (worst) and change only if one condition is hit
|
||||||
quality = 4
|
quality = 4
|
||||||
|
|
||||||
@ -1283,7 +1495,6 @@ def getQualityFromUncertainty(uncertainty, Errors):
|
|||||||
|
|
||||||
return quality
|
return quality
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import doctest
|
import doctest
|
||||||
|
|
||||||
|
@ -139,7 +139,7 @@ def excludeQualityClasses(picks, qClasses, timeerrorsP, timeerrorsS):
|
|||||||
:return: dictionary containing only picks above the excluded quality class(es)
|
:return: dictionary containing only picks above the excluded quality class(es)
|
||||||
:rtype: dict
|
:rtype: dict
|
||||||
"""
|
"""
|
||||||
from pylot.core.pick.utils import getQualityFromUncertainty
|
from pylot.core.pick.utils import get_quality_class
|
||||||
|
|
||||||
if type(qClasses) in [int, float]:
|
if type(qClasses) in [int, float]:
|
||||||
qClasses = [qClasses]
|
qClasses = [qClasses]
|
||||||
@ -154,7 +154,7 @@ def excludeQualityClasses(picks, qClasses, timeerrorsP, timeerrorsS):
|
|||||||
if not type(pick) in [AttribDict, dict]:
|
if not type(pick) in [AttribDict, dict]:
|
||||||
continue
|
continue
|
||||||
pickerror = phaseError[identifyPhaseID(phase)]
|
pickerror = phaseError[identifyPhaseID(phase)]
|
||||||
quality = getQualityFromUncertainty(pick['spe'], pickerror)
|
quality = get_quality_class(pick['spe'], pickerror)
|
||||||
if not quality in qClasses:
|
if not quality in qClasses:
|
||||||
if not station in picksdict_new:
|
if not station in picksdict_new:
|
||||||
picksdict_new[station] = {}
|
picksdict_new[station] = {}
|
||||||
@ -1219,6 +1219,25 @@ def check_event_folder(path):
|
|||||||
return ev_type
|
return ev_type
|
||||||
|
|
||||||
|
|
||||||
|
def correct_iplot(iplot):
|
||||||
|
"""
|
||||||
|
iplot should be in range 0...2, but it can be given as True or 'True' as well, which should be converted
|
||||||
|
to an integer. Both will be converted to 2.
|
||||||
|
:type iplot: Bool or int
|
||||||
|
:return: iplot as an integer
|
||||||
|
:rtype: int
|
||||||
|
"""
|
||||||
|
# TODO this is a hack, there should never be the ability to pass anything else but an int
|
||||||
|
try:
|
||||||
|
iplot = int(iplot)
|
||||||
|
except ValueError:
|
||||||
|
if real_Bool(iplot):
|
||||||
|
iplot = 2
|
||||||
|
else:
|
||||||
|
iplot = 0
|
||||||
|
return iplot
|
||||||
|
|
||||||
|
|
||||||
def station_id_remove_channel(station_id):
|
def station_id_remove_channel(station_id):
|
||||||
"""
|
"""
|
||||||
Remove the channel from a SEED station id and return Network.Station.Location.
|
Remove the channel from a SEED station id and return Network.Station.Location.
|
||||||
|
@ -44,7 +44,7 @@ from obspy.taup.utils import get_phase_names
|
|||||||
from pylot.core.io.data import Data
|
from pylot.core.io.data import Data
|
||||||
from pylot.core.io.inputs import FilterOptions, PylotParameter
|
from pylot.core.io.inputs import FilterOptions, PylotParameter
|
||||||
from pylot.core.pick.utils import getSNR, earllatepicker, getnoisewin, \
|
from pylot.core.pick.utils import getSNR, earllatepicker, getnoisewin, \
|
||||||
getResolutionWindow, getQualityFromUncertainty
|
getResolutionWindow, get_quality_class
|
||||||
from pylot.core.pick.compare import Comparison
|
from pylot.core.pick.compare import Comparison
|
||||||
from pylot.core.util.defaults import OUTPUTFORMATS, FILTERDEFAULTS
|
from pylot.core.util.defaults import OUTPUTFORMATS, FILTERDEFAULTS
|
||||||
from pylot.core.util.utils import prepTimeAxis, full_range, demeanTrace, isSorted, findComboBoxIndex, clims, \
|
from pylot.core.util.utils import prepTimeAxis, full_range, demeanTrace, isSorted, findComboBoxIndex, clims, \
|
||||||
@ -2604,10 +2604,10 @@ class PickDlg(QDialog):
|
|||||||
|
|
||||||
# get quality classes
|
# get quality classes
|
||||||
if self.getPhaseID(phase) == 'P':
|
if self.getPhaseID(phase) == 'P':
|
||||||
quality = getQualityFromUncertainty(picks['spe'], self.parameter['timeerrorsP'])
|
quality = get_quality_class(picks['spe'], self.parameter['timeerrorsP'])
|
||||||
phaseID = 'P'
|
phaseID = 'P'
|
||||||
elif self.getPhaseID(phase) == 'S':
|
elif self.getPhaseID(phase) == 'S':
|
||||||
quality = getQualityFromUncertainty(picks['spe'], self.parameter['timeerrorsS'])
|
quality = get_quality_class(picks['spe'], self.parameter['timeerrorsS'])
|
||||||
phaseID = 'S'
|
phaseID = 'S'
|
||||||
|
|
||||||
mpp = picks['mpp'] - self.getStartTime()
|
mpp = picks['mpp'] - self.getStartTime()
|
||||||
@ -3619,8 +3619,8 @@ class TuneAutopicker(QWidget):
|
|||||||
('refSpick', 0),
|
('refSpick', 0),
|
||||||
('el_S1pick', 0),
|
('el_S1pick', 0),
|
||||||
('el_S2pick', 0)]
|
('el_S2pick', 0)]
|
||||||
qualityPpick = getQualityFromUncertainty(picks['P']['spe'], self.parameter['timeerrorsP'])
|
qualityPpick = get_quality_class(picks['P']['spe'], self.parameter['timeerrorsP'])
|
||||||
qualitySpick = getQualityFromUncertainty(picks['S']['spe'], self.parameter['timeerrorsS'])
|
qualitySpick = get_quality_class(picks['S']['spe'], self.parameter['timeerrorsS'])
|
||||||
for p_ax in p_axes:
|
for p_ax in p_axes:
|
||||||
axes = self.parent().fig_dict[p_ax[0]].axes
|
axes = self.parent().fig_dict[p_ax[0]].axes
|
||||||
if not axes:
|
if not axes:
|
||||||
|
78
tests/testPickingResults.py
Normal file
78
tests/testPickingResults.py
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
import unittest
|
||||||
|
from pylot.core.pick.autopick import PickingResults
|
||||||
|
|
||||||
|
class TestPickingResults(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.pr = PickingResults()
|
||||||
|
|
||||||
|
def test_non_existing_key_dot_access(self):
|
||||||
|
"""Accessing an attribute in the class that wasnt added to the dict should give a AttributeError"""
|
||||||
|
with self.assertRaises(AttributeError):
|
||||||
|
self.pr.doesntexist
|
||||||
|
|
||||||
|
def test_non_existing_key_dict_access(self):
|
||||||
|
"""Accessing a missing attribute in a dictionary throws a KeyError"""
|
||||||
|
with self.assertRaises(KeyError):
|
||||||
|
self.pr['keydoesnotexist']
|
||||||
|
|
||||||
|
def test_dot_member_creation(self):
|
||||||
|
self.pr.x = 0
|
||||||
|
self.assertEqual(self.pr.x, 0)
|
||||||
|
self.pr.x += 42
|
||||||
|
self.assertEqual(self.pr.x, 42)
|
||||||
|
|
||||||
|
def test_dot_builtin_member(self):
|
||||||
|
self.assertEqual(self.pr.weight, 4)
|
||||||
|
self.pr.weight = 99
|
||||||
|
self.assertEqual(self.pr.weight, 99)
|
||||||
|
|
||||||
|
def test_key_access(self):
|
||||||
|
self.pr['y'] = 11
|
||||||
|
self.assertEqual(self.pr['y'], 11)
|
||||||
|
|
||||||
|
def test_builtin_fields(self):
|
||||||
|
self.assertEqual(self.pr['weight'], 4)
|
||||||
|
|
||||||
|
def test_in(self):
|
||||||
|
self.assertFalse('keydoesnotexist' in self.pr)
|
||||||
|
self.pr['k'] = 0
|
||||||
|
self.assertTrue('k' in self.pr)
|
||||||
|
|
||||||
|
def test_keys_function(self):
|
||||||
|
a = 99
|
||||||
|
self.pr.newkey = a
|
||||||
|
self.assertIn(a, self.pr.values())
|
||||||
|
self.assertIn('newkey', self.pr.keys())
|
||||||
|
|
||||||
|
def test_len_and_clear(self):
|
||||||
|
self.pr.clear()
|
||||||
|
self.assertEqual(len(self.pr), 0)
|
||||||
|
self.pr.a = 6
|
||||||
|
self.pr['b'] = 9
|
||||||
|
self.assertEqual(len(self.pr), 2)
|
||||||
|
|
||||||
|
def test_get_default(self):
|
||||||
|
self.assertEqual(self.pr.get('keynotexisting', 42), 42)
|
||||||
|
weight = self.pr.get('weight', -1)
|
||||||
|
self.assertEqual(weight, 4)
|
||||||
|
self.assertNotEqual(weight, -1)
|
||||||
|
|
||||||
|
def test_dunder_attributes(self):
|
||||||
|
"""Storing Pythons special dunder method in a dictionary is valid and should not override the instances dunder
|
||||||
|
methods"""
|
||||||
|
prev_len = len(self.pr)
|
||||||
|
try:
|
||||||
|
self.pr['__len__'] = None
|
||||||
|
except Exception:
|
||||||
|
self.fail("test_dunder_attributes failed to add a dunder attribute to the dictionary keys")
|
||||||
|
try:
|
||||||
|
curr_len = len(self.pr)
|
||||||
|
except Exception:
|
||||||
|
self.fail("test_dunder_attributes overwrote an instance internal dunder method")
|
||||||
|
self.assertEqual(prev_len+1, curr_len) # +1 for the added __len__ key/value-pair
|
||||||
|
|
||||||
|
self.pr.__len__ = 42
|
||||||
|
|
||||||
|
self.assertEqual(42, self.pr['__len__'])
|
||||||
|
self.assertEqual(prev_len+1, curr_len, msg="__len__ was overwritten")
|
35
tests/test_PickingParameters.py
Normal file
35
tests/test_PickingParameters.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
import unittest
|
||||||
|
from pylot.core.pick.autopick import PickingParameters
|
||||||
|
|
||||||
|
class TestPickingParameters(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.simple_dict = {'a': 3, 'b': 14}
|
||||||
|
self.nested_dict = {'a': self.simple_dict, 'b': self.simple_dict}
|
||||||
|
|
||||||
|
def assertParameterEquality(self, dic, instance):
|
||||||
|
"""Test wether all parameters given in dic are found in instance"""
|
||||||
|
for key, value in dic.items():
|
||||||
|
self.assertEqual(value, getattr(instance, key))
|
||||||
|
|
||||||
|
def test_add_params_from_dict_simple(self):
|
||||||
|
pickparam = PickingParameters()
|
||||||
|
pickparam.add_params_from_dict(self.simple_dict)
|
||||||
|
self.assertParameterEquality(self.simple_dict, pickparam)
|
||||||
|
|
||||||
|
def test_add_params_from_dict_nested(self):
|
||||||
|
pickparam = PickingParameters()
|
||||||
|
pickparam.add_params_from_dict(self.nested_dict)
|
||||||
|
self.assertParameterEquality(self.nested_dict, pickparam)
|
||||||
|
|
||||||
|
def test_init(self):
|
||||||
|
pickparam = PickingParameters(self.simple_dict)
|
||||||
|
self.assertParameterEquality(self.simple_dict, pickparam)
|
||||||
|
|
||||||
|
def test_dot_access(self):
|
||||||
|
pickparam = PickingParameters(self.simple_dict)
|
||||||
|
self.assertEqual(pickparam.a, self.simple_dict['a'])
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
104
tests/test_autopickstation/autoPyLoT_global_taupy_false.in
Normal file
104
tests/test_autopickstation/autoPyLoT_global_taupy_false.in
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
%This is a parameter input file for PyLoT/autoPyLoT.
|
||||||
|
%All main and special settings regarding data handling
|
||||||
|
%and picking are to be set here!
|
||||||
|
%Parameters are optimized for %extent data sets!
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#main settings#
|
||||||
|
/home/darius #rootpath# %project path
|
||||||
|
alparray #datapath# %data path
|
||||||
|
waveforms_used #database# %name of data base
|
||||||
|
e0093.173.16 #eventID# %event ID for single event processing (* for all events found in database)
|
||||||
|
/home/darius/alparray/metadata #invdir# %full path to inventory or dataless-seed file
|
||||||
|
PILOT #datastructure# %choose data structure
|
||||||
|
True #apverbose# %choose 'True' or 'False' for terminal output
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#NLLoc settings#
|
||||||
|
None #nllocbin# %path to NLLoc executable
|
||||||
|
/home/darius/alparray/auto #nllocroot# %root of NLLoc-processing directory
|
||||||
|
AUTOPHASES.obs #phasefile# %name of autoPyLoT-output phase file for NLLoc
|
||||||
|
Insheim_min1d032016_auto.in #ctrfile# %name of autoPyLoT-output control file for NLLoc
|
||||||
|
ttime #ttpatter# %pattern of NLLoc ttimes from grid
|
||||||
|
AUTOLOC_nlloc #outpatter# %pattern of NLLoc-output file
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#parameters for seismic moment estimation#
|
||||||
|
3530.0 #vp# %average P-wave velocity
|
||||||
|
2500.0 #rho# %average rock density [kg/m^3]
|
||||||
|
300.0 0.8 #Qp# %quality factor for P waves (Qp*f^a); list(Qp, a)
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#settings local magnitude#
|
||||||
|
1.0 1.0 1.0 #WAscaling# %Scaling relation (log(Ao)+Alog(r)+Br+C) of Wood-Anderson amplitude Ao [nm] If zeros are set, original Richter magnitude is calculated!
|
||||||
|
1.0 1.0 #magscaling# %Scaling relation for derived local magnitude [a*Ml+b]. If zeros are set, no scaling of network magnitude is applied!
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#filter settings#
|
||||||
|
0.01 0.01 #minfreq# %Lower filter frequency [P, S]
|
||||||
|
0.5 0.5 #maxfreq# %Upper filter frequency [P, S]
|
||||||
|
3 3 #filter_order# %filter order [P, S]
|
||||||
|
bandpass bandpass #filter_type# %filter type (bandpass, bandstop, lowpass, highpass) [P, S]
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#common settings picker#
|
||||||
|
global #extent# %extent of array ("local", "regional" or "global")
|
||||||
|
-100.0 #pstart# %start time [s] for calculating CF for P-picking (if TauPy: seconds relative to estimated onset)
|
||||||
|
350.0 #pstop# %end time [s] for calculating CF for P-picking (if TauPy: seconds relative to estimated onset)
|
||||||
|
200.0 #sstart# %start time [s] relative to P-onset for calculating CF for S-picking
|
||||||
|
875.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||||
|
False #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||||
|
IASP91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
||||||
|
0.01 0.1 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||||
|
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||||
|
0.01 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||||
|
0.001 0.5 #bph2# %lower/upper corner freq. of second band pass filter z-comp. [Hz]
|
||||||
|
#special settings for calculating CF#
|
||||||
|
%!!Edit the following only if you know what you are doing!!%
|
||||||
|
#Z-component#
|
||||||
|
HOS #algoP# %choose algorithm for P-onset determination (HOS, ARZ, or AR3)
|
||||||
|
100.0 #tlta# %for HOS-/AR-AIC-picker, length of LTA window [s]
|
||||||
|
4 #hosorder# %for HOS-picker, order of Higher Order Statistics
|
||||||
|
2 #Parorder# %for AR-picker, order of AR process of Z-component
|
||||||
|
24.0 #tdet1z# %for AR-picker, length of AR determination window [s] for Z-component, 1st pick
|
||||||
|
20.0 #tpred1z# %for AR-picker, length of AR prediction window [s] for Z-component, 1st pick
|
||||||
|
16.0 #tdet2z# %for AR-picker, length of AR determination window [s] for Z-component, 2nd pick
|
||||||
|
8.0 #tpred2z# %for AR-picker, length of AR prediction window [s] for Z-component, 2nd pick
|
||||||
|
0.5 #addnoise# %add noise to seismogram for stable AR prediction
|
||||||
|
30.0 5.0 20.0 10.0 #tsnrz# %for HOS/AR, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]
|
||||||
|
55.0 #pickwinP# %for initial AIC pick, length of P-pick window [s]
|
||||||
|
20.0 #Precalcwin# %for HOS/AR, window length [s] for recalculation of CF (relative to 1st pick)
|
||||||
|
6.0 #aictsmooth# %for HOS/AR, take average of samples for smoothing of AIC-function [s]
|
||||||
|
4.0 #tsmoothP# %for HOS/AR, take average of samples for smoothing CF [s]
|
||||||
|
0.5 #ausP# %for HOS/AR, artificial uplift of samples (aus) of CF (P)
|
||||||
|
1.1 #nfacP# %for HOS/AR, noise factor for noise level determination (P)
|
||||||
|
50.0 #checkwindowP# %time window before HOS/AR-maximum to check for smaller maxima [s]
|
||||||
|
0.7 #minfactorP# %Second maximum must be at least minfactor * first maximum [-]
|
||||||
|
#H-components#
|
||||||
|
ARH #algoS# %choose algorithm for S-onset determination (ARH or AR3)
|
||||||
|
30.0 #tdet1h# %for HOS/AR, length of AR-determination window [s], H-components, 1st pick
|
||||||
|
18.0 #tpred1h# %for HOS/AR, length of AR-prediction window [s], H-components, 1st pick
|
||||||
|
16.0 #tdet2h# %for HOS/AR, length of AR-determinaton window [s], H-components, 2nd pick
|
||||||
|
8.0 #tpred2h# %for HOS/AR, length of AR-prediction window [s], H-components, 2nd pick
|
||||||
|
4 #Sarorder# %for AR-picker, order of AR process of H-components
|
||||||
|
30.0 #Srecalcwin# %for AR-picker, window length [s] for recalculation of CF (2nd pick) (H)
|
||||||
|
195.0 #pickwinS# %for initial AIC pick, length of S-pick window [s]
|
||||||
|
30.0 10.0 15.0 10.0 #tsnrh# %for ARH/AR3, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]
|
||||||
|
22.0 #aictsmoothS# %for AIC-picker, take average of samples for smoothing of AIC-function [s]
|
||||||
|
10.0 #tsmoothS# %for AR-picker, take average of samples for smoothing CF [s] (S)
|
||||||
|
0.001 #ausS# %for HOS/AR, artificial uplift of samples (aus) of CF (S)
|
||||||
|
1.2 #nfacS# %for AR-picker, noise factor for noise level determination (S)
|
||||||
|
250.0 #checkwindowS# %time window before AR-maximum to check for smaller maxima [s]
|
||||||
|
0.4 #minfactorS# %Second maximum must be at least minfactor * first maximum [-]
|
||||||
|
#first-motion picker#
|
||||||
|
1 #minfmweight# %minimum required P weight for first-motion determination
|
||||||
|
3.0 #minFMSNR# %miniumum required SNR for first-motion determination
|
||||||
|
10.0 #fmpickwin# %pick window around P onset for calculating zero crossings
|
||||||
|
#quality assessment#
|
||||||
|
4.0 8.0 12.0 16.0 #timeerrorsP# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for P
|
||||||
|
4.0 8.0 12.0 16.0 #timeerrorsS# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for S
|
||||||
|
0.01 #minAICPslope# %below this slope [counts/s] the initial P pick is rejected
|
||||||
|
1.1 #minAICPSNR# %below this SNR the initial P pick is rejected
|
||||||
|
0.01 #minAICSslope# %below this slope [counts/s] the initial S pick is rejected
|
||||||
|
1.1 #minAICSSNR# %below this SNR the initial S pick is rejected
|
||||||
|
12.0 #minsiglength# %length of signal part for which amplitudes must exceed noiselevel [s]
|
||||||
|
1.1 #noisefactor# %noiselevel*noisefactor=threshold
|
||||||
|
20.0 #minpercent# %required percentage of amplitudes exceeding threshold
|
||||||
|
1.25 #zfac# %P-amplitude must exceed at least zfac times RMS-S amplitude
|
||||||
|
60.0 #mdttolerance# %maximum allowed deviation of P picks from median [s]
|
||||||
|
60.0 #wdttolerance# %maximum allowed deviation from Wadati-diagram
|
||||||
|
5.0 #jackfactor# %pick is removed if the variance of the subgroup with the pick removed is larger than the mean variance of all subgroups times safety factor
|
104
tests/test_autopickstation/autoPyLoT_global_taupy_true.in
Normal file
104
tests/test_autopickstation/autoPyLoT_global_taupy_true.in
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
%This is a parameter input file for PyLoT/autoPyLoT.
|
||||||
|
%All main and special settings regarding data handling
|
||||||
|
%and picking are to be set here!
|
||||||
|
%Parameters are optimized for %extent data sets!
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#main settings#
|
||||||
|
/home/darius #rootpath# %project path
|
||||||
|
alparray #datapath# %data path
|
||||||
|
waveforms_used #database# %name of data base
|
||||||
|
e0093.173.16 #eventID# %event ID for single event processing (* for all events found in database)
|
||||||
|
/home/darius/alparray/metadata #invdir# %full path to inventory or dataless-seed file
|
||||||
|
PILOT #datastructure# %choose data structure
|
||||||
|
True #apverbose# %choose 'True' or 'False' for terminal output
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#NLLoc settings#
|
||||||
|
None #nllocbin# %path to NLLoc executable
|
||||||
|
/home/darius/alparray/auto #nllocroot# %root of NLLoc-processing directory
|
||||||
|
AUTOPHASES.obs #phasefile# %name of autoPyLoT-output phase file for NLLoc
|
||||||
|
Insheim_min1d032016_auto.in #ctrfile# %name of autoPyLoT-output control file for NLLoc
|
||||||
|
ttime #ttpatter# %pattern of NLLoc ttimes from grid
|
||||||
|
AUTOLOC_nlloc #outpatter# %pattern of NLLoc-output file
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#parameters for seismic moment estimation#
|
||||||
|
3530.0 #vp# %average P-wave velocity
|
||||||
|
2500.0 #rho# %average rock density [kg/m^3]
|
||||||
|
300.0 0.8 #Qp# %quality factor for P waves (Qp*f^a); list(Qp, a)
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#settings local magnitude#
|
||||||
|
1.0 1.0 1.0 #WAscaling# %Scaling relation (log(Ao)+Alog(r)+Br+C) of Wood-Anderson amplitude Ao [nm] If zeros are set, original Richter magnitude is calculated!
|
||||||
|
1.0 1.0 #magscaling# %Scaling relation for derived local magnitude [a*Ml+b]. If zeros are set, no scaling of network magnitude is applied!
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#filter settings#
|
||||||
|
0.01 0.01 #minfreq# %Lower filter frequency [P, S]
|
||||||
|
0.5 0.5 #maxfreq# %Upper filter frequency [P, S]
|
||||||
|
3 3 #filter_order# %filter order [P, S]
|
||||||
|
bandpass bandpass #filter_type# %filter type (bandpass, bandstop, lowpass, highpass) [P, S]
|
||||||
|
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||||
|
#common settings picker#
|
||||||
|
global #extent# %extent of array ("local", "regional" or "global")
|
||||||
|
-100.0 #pstart# %start time [s] for calculating CF for P-picking (if TauPy: seconds relative to estimated onset)
|
||||||
|
350.0 #pstop# %end time [s] for calculating CF for P-picking (if TauPy: seconds relative to estimated onset)
|
||||||
|
200.0 #sstart# %start time [s] relative to P-onset for calculating CF for S-picking
|
||||||
|
875.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
|
||||||
|
True #use_taup# %use estimated traveltimes from TauPy for calculating windows for CF
|
||||||
|
IASP91 #taup_model# %define TauPy model for traveltime estimation. Possible values: 1066a, 1066b, ak135, ak135f, herrin, iasp91, jb, prem, pwdk, sp6
|
||||||
|
0.01 0.1 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
|
||||||
|
0.001 0.5 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
|
||||||
|
0.01 0.5 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
|
||||||
|
0.001 0.5 #bph2# %lower/upper corner freq. of second band pass filter z-comp. [Hz]
|
||||||
|
#special settings for calculating CF#
|
||||||
|
%!!Edit the following only if you know what you are doing!!%
|
||||||
|
#Z-component#
|
||||||
|
HOS #algoP# %choose algorithm for P-onset determination (HOS, ARZ, or AR3)
|
||||||
|
100.0 #tlta# %for HOS-/AR-AIC-picker, length of LTA window [s]
|
||||||
|
4 #hosorder# %for HOS-picker, order of Higher Order Statistics
|
||||||
|
2 #Parorder# %for AR-picker, order of AR process of Z-component
|
||||||
|
24.0 #tdet1z# %for AR-picker, length of AR determination window [s] for Z-component, 1st pick
|
||||||
|
20.0 #tpred1z# %for AR-picker, length of AR prediction window [s] for Z-component, 1st pick
|
||||||
|
16.0 #tdet2z# %for AR-picker, length of AR determination window [s] for Z-component, 2nd pick
|
||||||
|
8.0 #tpred2z# %for AR-picker, length of AR prediction window [s] for Z-component, 2nd pick
|
||||||
|
0.5 #addnoise# %add noise to seismogram for stable AR prediction
|
||||||
|
30.0 5.0 20.0 10.0 #tsnrz# %for HOS/AR, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]
|
||||||
|
55.0 #pickwinP# %for initial AIC pick, length of P-pick window [s]
|
||||||
|
20.0 #Precalcwin# %for HOS/AR, window length [s] for recalculation of CF (relative to 1st pick)
|
||||||
|
6.0 #aictsmooth# %for HOS/AR, take average of samples for smoothing of AIC-function [s]
|
||||||
|
4.0 #tsmoothP# %for HOS/AR, take average of samples for smoothing CF [s]
|
||||||
|
0.5 #ausP# %for HOS/AR, artificial uplift of samples (aus) of CF (P)
|
||||||
|
1.1 #nfacP# %for HOS/AR, noise factor for noise level determination (P)
|
||||||
|
50.0 #checkwindowP# %time window before HOS/AR-maximum to check for smaller maxima [s]
|
||||||
|
0.7 #minfactorP# %Second maximum must be at least minfactor * first maximum [-]
|
||||||
|
#H-components#
|
||||||
|
ARH #algoS# %choose algorithm for S-onset determination (ARH or AR3)
|
||||||
|
30.0 #tdet1h# %for HOS/AR, length of AR-determination window [s], H-components, 1st pick
|
||||||
|
18.0 #tpred1h# %for HOS/AR, length of AR-prediction window [s], H-components, 1st pick
|
||||||
|
16.0 #tdet2h# %for HOS/AR, length of AR-determinaton window [s], H-components, 2nd pick
|
||||||
|
8.0 #tpred2h# %for HOS/AR, length of AR-prediction window [s], H-components, 2nd pick
|
||||||
|
4 #Sarorder# %for AR-picker, order of AR process of H-components
|
||||||
|
30.0 #Srecalcwin# %for AR-picker, window length [s] for recalculation of CF (2nd pick) (H)
|
||||||
|
195.0 #pickwinS# %for initial AIC pick, length of S-pick window [s]
|
||||||
|
30.0 10.0 15.0 10.0 #tsnrh# %for ARH/AR3, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]
|
||||||
|
22.0 #aictsmoothS# %for AIC-picker, take average of samples for smoothing of AIC-function [s]
|
||||||
|
10.0 #tsmoothS# %for AR-picker, take average of samples for smoothing CF [s] (S)
|
||||||
|
0.001 #ausS# %for HOS/AR, artificial uplift of samples (aus) of CF (S)
|
||||||
|
1.2 #nfacS# %for AR-picker, noise factor for noise level determination (S)
|
||||||
|
250.0 #checkwindowS# %time window before AR-maximum to check for smaller maxima [s]
|
||||||
|
0.4 #minfactorS# %Second maximum must be at least minfactor * first maximum [-]
|
||||||
|
#first-motion picker#
|
||||||
|
1 #minfmweight# %minimum required P weight for first-motion determination
|
||||||
|
3.0 #minFMSNR# %miniumum required SNR for first-motion determination
|
||||||
|
10.0 #fmpickwin# %pick window around P onset for calculating zero crossings
|
||||||
|
#quality assessment#
|
||||||
|
4.0 8.0 12.0 16.0 #timeerrorsP# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for P
|
||||||
|
4.0 8.0 12.0 16.0 #timeerrorsS# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for S
|
||||||
|
0.01 #minAICPslope# %below this slope [counts/s] the initial P pick is rejected
|
||||||
|
1.1 #minAICPSNR# %below this SNR the initial P pick is rejected
|
||||||
|
0.01 #minAICSslope# %below this slope [counts/s] the initial S pick is rejected
|
||||||
|
1.1 #minAICSSNR# %below this SNR the initial S pick is rejected
|
||||||
|
12.0 #minsiglength# %length of signal part for which amplitudes must exceed noiselevel [s]
|
||||||
|
1.1 #noisefactor# %noiselevel*noisefactor=threshold
|
||||||
|
20.0 #minpercent# %required percentage of amplitudes exceeding threshold
|
||||||
|
1.25 #zfac# %P-amplitude must exceed at least zfac times RMS-S amplitude
|
||||||
|
60.0 #mdttolerance# %maximum allowed deviation of P picks from median [s]
|
||||||
|
60.0 #wdttolerance# %maximum allowed deviation from Wadati-diagram
|
||||||
|
5.0 #jackfactor# %pick is removed if the variance of the subgroup with the pick removed is larger than the mean variance of all subgroups times safety factor
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -0,0 +1,21 @@
|
|||||||
|
<?xml version='1.0' encoding='utf-8'?>
|
||||||
|
<q:quakeml xmlns:q="http://quakeml.org/xmlns/quakeml/1.2" xmlns="http://quakeml.org/xmlns/bed/1.2">
|
||||||
|
<eventParameters publicID="smi:local/53a38563-739a-48b2-9f34-bf40ee7b656a">
|
||||||
|
<event publicID="smi:local/e0001.024.16">
|
||||||
|
<origin publicID="smi:local/e0001.024.16">
|
||||||
|
<time>
|
||||||
|
<value>2016-01-24T10:30:30.000000Z</value>
|
||||||
|
</time>
|
||||||
|
<latitude>
|
||||||
|
<value>59.66</value>
|
||||||
|
</latitude>
|
||||||
|
<longitude>
|
||||||
|
<value>-153.45</value>
|
||||||
|
</longitude>
|
||||||
|
<depth>
|
||||||
|
<value>128.0</value>
|
||||||
|
</depth>
|
||||||
|
</origin>
|
||||||
|
</event>
|
||||||
|
</eventParameters>
|
||||||
|
</q:quakeml>
|
1
tests/test_autopickstation/e0001.024.16/notes.txt
Normal file
1
tests/test_autopickstation/e0001.024.16/notes.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
/data/AlpArray/mini_SEED_LH/2016-01-24T10:30:30
|
211
tests/test_autopickstation/test_autopickstation.py
Normal file
211
tests/test_autopickstation/test_autopickstation.py
Normal file
@ -0,0 +1,211 @@
|
|||||||
|
import unittest
|
||||||
|
from unittest import skip
|
||||||
|
import obspy
|
||||||
|
from obspy import UTCDateTime
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from pylot.core.pick.autopick import autopickstation
|
||||||
|
from pylot.core.io.inputs import PylotParameter
|
||||||
|
from pylot.core.io.data import Data
|
||||||
|
from pylot.core.util.utils import trim_station_components
|
||||||
|
|
||||||
|
|
||||||
|
class HidePrints:
|
||||||
|
"""
|
||||||
|
Used to hide all standard output the Function to be tested have, since it clutters the test results.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, hide_prints=True):
|
||||||
|
"""Create object with hide_prints=False to disable print hiding"""
|
||||||
|
self.hide = hide_prints
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
if self.hide:
|
||||||
|
self._original_stdout = sys.stdout
|
||||||
|
devnull = open(os.devnull, "w")
|
||||||
|
sys.stdout = devnull
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
if self.hide:
|
||||||
|
sys.stdout = self._original_stdout
|
||||||
|
|
||||||
|
|
||||||
|
class MockMetadata:
|
||||||
|
"""Mock metadata object used for taupy to avoid reading large dless file from disk.
|
||||||
|
get_coordinates must take the same arguments as pylot.core.utils.dataprocssing.py/class Metadata."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.station_names = ['GR.GRA1', 'GR.GRA2', 'G.ECH', 'CH.FIESA', 'Z3.A106A']
|
||||||
|
gra1 = {u'azimuth': 0.0, u'dip': -90.0, u'elevation': 499.5, u'latitude': 49.691888, u'local_depth': 0.0,
|
||||||
|
u'longitude': 11.22172}
|
||||||
|
gra2 = {u'azimuth': 0.0, u'dip': -90.0, u'elevation': 512.0, u'latitude': 49.655208, u'local_depth': 0.0,
|
||||||
|
u'longitude': 11.359444}
|
||||||
|
ech = {u'azimuth': 90.0, u'dip': 0.0, u'elevation': 580.0, u'latitude': 48.216313, u'local_depth': 250.0,
|
||||||
|
u'longitude': 7.158961}
|
||||||
|
fiesa = {'azimuth': 0.0, 'dip': -90.0, 'elevation': 2340.5, 'latitude': 46.43521, 'local_depth': 0.0,
|
||||||
|
'longitude': 8.11051}
|
||||||
|
a106 = {'azimuth': 90.0, 'dip': 0.0, 'elevation': 468.0, 'latitude': 48.753388, 'local_depth': 0.0,
|
||||||
|
'longitude': 9.721937}
|
||||||
|
|
||||||
|
self.coordinates = [gra1, gra2, ech, fiesa, a106]
|
||||||
|
|
||||||
|
def get_coordinates(self, station_id, time=None):
|
||||||
|
"""
|
||||||
|
Mocks the method get_coordinates from obspy.io.xseed.parser.Parser object
|
||||||
|
to avoid building a parser for the unit tests
|
||||||
|
:param station_id: 'GR.GRA1..LHZ' or similar
|
||||||
|
:type station_id: str
|
||||||
|
:return: dictionary containing azimuth, dip, elevation, latitude, longitude,
|
||||||
|
local depth as keys
|
||||||
|
:rtype: dict
|
||||||
|
|
||||||
|
>>>m = MockMetadata(); m.get_coordinates('GR.GRA2..LHZ')
|
||||||
|
{u'azimuth': 0.0, u'dip': -90.0, u'elevation': 512.0, u'latitude': 49.655208, u'local_depth': 0.0, u'longitude': 11.359444}
|
||||||
|
"""
|
||||||
|
|
||||||
|
for index, name in enumerate(self.station_names):
|
||||||
|
if station_id.startswith(name):
|
||||||
|
return self.coordinates[index]
|
||||||
|
|
||||||
|
|
||||||
|
class TestAutopickStation(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Test the autopickstation function as if it were called from GUI.
|
||||||
|
Three stations (GR.GRA1, GR.GRA2, G.ECH) are tested with and without TauPy respectively
|
||||||
|
"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.event_id = 'e0001.024.16'
|
||||||
|
# Create wfstream for picking
|
||||||
|
mseed_relative_path = os.path.join(os.path.dirname(__file__), self.event_id, '*.mseed')
|
||||||
|
self.wfstream = obspy.read(mseed_relative_path)
|
||||||
|
# trim waveform to get the same results as the GUI call
|
||||||
|
with HidePrints():
|
||||||
|
self.wfstream = trim_station_components(self.wfstream, trim_start=True, trim_end=False)
|
||||||
|
self.gra1 = self.wfstream.select(station='GRA1')
|
||||||
|
self.gra2 = self.wfstream.select(station='GRA2')
|
||||||
|
self.ech = self.wfstream.select(station='ECH')
|
||||||
|
self.fiesa = self.wfstream.select(station='FIESA')
|
||||||
|
self.a106 = self.wfstream.select(station='A106A')
|
||||||
|
self.a005a = self.wfstream.select(station='A005A')
|
||||||
|
# Create input parameter container
|
||||||
|
self.inputfile_taupy_enabled = os.path.join(os.path.dirname(__file__), 'autoPyLoT_global_taupy_true.in')
|
||||||
|
self.inputfile_taupy_disabled = os.path.join(os.path.dirname(__file__), 'autoPyLoT_global_taupy_false.in')
|
||||||
|
self.pickparam_taupy_enabled = PylotParameter(fnin=self.inputfile_taupy_enabled)
|
||||||
|
self.pickparam_taupy_disabled = PylotParameter(fnin=self.inputfile_taupy_disabled)
|
||||||
|
self.xml_file = os.path.join(os.path.dirname(__file__),self.event_id, 'PyLoT_'+self.event_id+'.xml')
|
||||||
|
self.data = Data(evtdata=self.xml_file)
|
||||||
|
# create origin for taupy testing
|
||||||
|
self.origin = [obspy.core.event.origin.Origin(magnitude=7.1, latitude=59.66, longitude=-153.45, depth=128.0, time=UTCDateTime("2016-01-24T10:30:30.0"))]
|
||||||
|
# mocking metadata since reading it takes a long time to read from file
|
||||||
|
self.metadata = MockMetadata()
|
||||||
|
|
||||||
|
# show complete diff when difference in results dictionaries are found
|
||||||
|
self.maxDiff = None
|
||||||
|
|
||||||
|
#@skip("Works")
|
||||||
|
def test_autopickstation_taupy_disabled_gra1(self):
|
||||||
|
expected = {'P': {'picker': 'auto', 'snrdb': 15.405649120980094, 'weight': 0, 'Mo': None, 'marked': [], 'Mw': None, 'fc': None, 'snr': 34.718816470730317, 'mpp': UTCDateTime(2016, 1, 24, 10, 41, 31, 690000), 'w0': None, 'spe': 0.93333333333333235, 'network': u'GR', 'epp': UTCDateTime(2016, 1, 24, 10, 41, 28, 890000), 'lpp': UTCDateTime(2016, 1, 24, 10, 41, 32, 690000), 'fm': 'D', 'channel': u'LHZ'}, 'S': {'picker': 'auto', 'snrdb': 10.669661906545489, 'network': u'GR', 'weight': 0, 'Ao': None, 'lpp': UTCDateTime(2016, 1, 24, 10, 50, 30, 690000), 'snr': 11.667187857573905, 'epp': UTCDateTime(2016, 1, 24, 10, 50, 21, 690000), 'mpp': UTCDateTime(2016, 1, 24, 10, 50, 29, 690000), 'fm': None, 'spe': 2.6666666666666665, 'channel': u'LHE'}}
|
||||||
|
with HidePrints():
|
||||||
|
result, station = autopickstation(wfstream=self.gra1, pickparam=self.pickparam_taupy_disabled, metadata=(None, None))
|
||||||
|
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
||||||
|
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
||||||
|
self.assertEqual('GRA1', station)
|
||||||
|
|
||||||
|
def test_autopickstation_taupy_enabled_gra1(self):
|
||||||
|
expected = {'P': {'picker': 'auto', 'snrdb': 15.599905299126778, 'weight': 0, 'Mo': None, 'marked': [], 'Mw': None, 'fc': None, 'snr': 36.307013769185403, 'mpp': UTCDateTime(2016, 1, 24, 10, 41, 27, 690000), 'w0': None, 'spe': 0.93333333333333235, 'network': u'GR', 'epp': UTCDateTime(2016, 1, 24, 10, 41, 24, 890000), 'lpp': UTCDateTime(2016, 1, 24, 10, 41, 28, 690000), 'fm': 'U', 'channel': u'LHZ'}, 'S': {'picker': 'auto', 'snrdb': 10.669661906545489, 'network': u'GR', 'weight': 0, 'Ao': None, 'lpp': UTCDateTime(2016, 1, 24, 10, 50, 30, 690000), 'snr': 11.667187857573905, 'epp': UTCDateTime(2016, 1, 24, 10, 50, 21, 690000), 'mpp': UTCDateTime(2016, 1, 24, 10, 50, 29, 690000), 'fm': None, 'spe': 2.6666666666666665, 'channel': u'LHE'}}
|
||||||
|
with HidePrints():
|
||||||
|
result, station = autopickstation(wfstream=self.gra1, pickparam=self.pickparam_taupy_enabled, metadata=self.metadata, origin=self.origin)
|
||||||
|
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
||||||
|
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
||||||
|
self.assertEqual('GRA1', station)
|
||||||
|
|
||||||
|
def test_autopickstation_taupy_disabled_gra2(self):
|
||||||
|
expected = {'P': {'picker': 'auto', 'snrdb': None, 'weight': 9, 'Mo': None, 'marked': 'shortsignallength', 'Mw': None, 'fc': None, 'snr': None, 'mpp': UTCDateTime(2016, 1, 24, 10, 36, 59, 150000), 'w0': None, 'spe': None, 'network': u'GR', 'epp': UTCDateTime(2016, 1, 24, 10, 36, 43, 150000), 'lpp': UTCDateTime(2016, 1, 24, 10, 37, 15, 150000), 'fm': 'N', 'channel': u'LHZ'}, 'S': {'picker': 'auto', 'snrdb': None, 'network': u'GR', 'weight': 4, 'Ao': None, 'lpp': UTCDateTime(2016, 1, 24, 10, 37, 15, 150000), 'snr': None, 'epp': UTCDateTime(2016, 1, 24, 10, 36, 43, 150000), 'mpp': UTCDateTime(2016, 1, 24, 10, 36, 59, 150000), 'fm': None, 'spe': None, 'channel': u'LHE'}}
|
||||||
|
with HidePrints():
|
||||||
|
result, station = autopickstation(wfstream=self.gra2, pickparam=self.pickparam_taupy_disabled, metadata=(None, None))
|
||||||
|
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
||||||
|
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
||||||
|
self.assertEqual('GRA2', station)
|
||||||
|
|
||||||
|
def test_autopickstation_taupy_enabled_gra2(self):
|
||||||
|
expected = {'P': {'picker': 'auto', 'snrdb': 13.957959025719253, 'weight': 0, 'Mo': None, 'marked': [], 'Mw': None, 'fc': None, 'snr': 24.876879503607871, 'mpp': UTCDateTime(2016, 1, 24, 10, 41, 29, 150000), 'w0': None, 'spe': 1.0, 'network': u'GR', 'epp': UTCDateTime(2016, 1, 24, 10, 41, 26, 150000), 'lpp': UTCDateTime(2016, 1, 24, 10, 41, 30, 150000), 'fm': None, 'channel': u'LHZ'}, 'S': {'picker': 'auto', 'snrdb': 10.573236990555648, 'network': u'GR', 'weight': 1, 'Ao': None, 'lpp': UTCDateTime(2016, 1, 24, 10, 50, 34, 150000), 'snr': 11.410999834108294, 'epp': UTCDateTime(2016, 1, 24, 10, 50, 21, 150000), 'mpp': UTCDateTime(2016, 1, 24, 10, 50, 33, 150000), 'fm': None, 'spe': 4.666666666666667, 'channel': u'LHE'}}
|
||||||
|
with HidePrints():
|
||||||
|
result, station = autopickstation(wfstream=self.gra2, pickparam=self.pickparam_taupy_enabled, metadata=self.metadata, origin = self.origin)
|
||||||
|
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
||||||
|
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
||||||
|
self.assertEqual('GRA2', station)
|
||||||
|
|
||||||
|
def test_autopickstation_taupy_disabled_ech(self):
|
||||||
|
expected = {'P': {'picker': 'auto', 'snrdb': None, 'weight': 9, 'Mo': None, 'marked': 'SinsteadP', 'Mw': None, 'fc': None, 'snr': None, 'mpp': UTCDateTime(2016, 1, 24, 10, 26, 57), 'w0': None, 'spe': None, 'network': u'G', 'epp': UTCDateTime(2016, 1, 24, 10, 26, 41), 'lpp': UTCDateTime(2016, 1, 24, 10, 27, 13), 'fm': 'N', 'channel': u'LHZ'}, 'S': {'picker': 'auto', 'snrdb': None, 'network': u'G', 'weight': 4, 'Ao': None, 'lpp': UTCDateTime(2016, 1, 24, 10, 27, 13), 'snr': None, 'epp': UTCDateTime(2016, 1, 24, 10, 26, 41), 'mpp': UTCDateTime(2016, 1, 24, 10, 26, 57), 'fm': None, 'spe': None, 'channel': u'LHE'}}
|
||||||
|
with HidePrints():
|
||||||
|
result, station = autopickstation(wfstream=self.ech, pickparam=self.pickparam_taupy_disabled)
|
||||||
|
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
||||||
|
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
||||||
|
self.assertEqual('ECH', station)
|
||||||
|
|
||||||
|
def test_autopickstation_taupy_enabled_ech(self):
|
||||||
|
# this station has a long time of before the first onset, so taupy will help during picking
|
||||||
|
expected = {'P': {'picker': 'auto', 'snrdb': 9.9753586609166316, 'weight': 0, 'Mo': None, 'marked': [], 'Mw': None, 'fc': None, 'snr': 9.9434218804137107, 'mpp': UTCDateTime(2016, 1, 24, 10, 41, 34), 'w0': None, 'spe': 1.6666666666666667, 'network': u'G', 'epp': UTCDateTime(2016, 1, 24, 10, 41, 29), 'lpp': UTCDateTime(2016, 1, 24, 10, 41, 35), 'fm': None, 'channel': u'LHZ'}, 'S': {'picker': 'auto', 'snrdb': 12.698999454169567, 'network': u'G', 'weight': 0, 'Ao': None, 'lpp': UTCDateTime(2016, 1, 24, 10, 50, 44), 'snr': 18.616581906366577, 'epp': UTCDateTime(2016, 1, 24, 10, 50, 33), 'mpp': UTCDateTime(2016, 1, 24, 10, 50, 43), 'fm': None, 'spe': 3.3333333333333335, 'channel': u'LHE'}}
|
||||||
|
with HidePrints():
|
||||||
|
result, station = autopickstation(wfstream=self.ech, pickparam=self.pickparam_taupy_enabled, metadata=self.metadata, origin=self.origin)
|
||||||
|
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
||||||
|
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
||||||
|
self.assertEqual('ECH', station)
|
||||||
|
|
||||||
|
def test_autopickstation_taupy_disabled_fiesa(self):
|
||||||
|
# this station has a long time of before the first onset, so taupy will help during picking
|
||||||
|
expected = {'P': {'picker': 'auto', 'snrdb': None, 'weight': 9, 'Mo': None, 'marked': 'SinsteadP', 'Mw': None, 'fc': None, 'snr': None, 'mpp': UTCDateTime(2016, 1, 24, 10, 35, 58), 'w0': None, 'spe': None, 'network': u'CH', 'epp': UTCDateTime(2016, 1, 24, 10, 35, 42), 'lpp': UTCDateTime(2016, 1, 24, 10, 36, 14), 'fm': 'N', 'channel': u'LHZ'}, 'S': {'picker': 'auto', 'snrdb': None, 'network': u'CH', 'weight': 4, 'Ao': None, 'lpp': UTCDateTime(2016, 1, 24, 10, 36, 14), 'snr': None, 'epp': UTCDateTime(2016, 1, 24, 10, 35, 42), 'mpp': UTCDateTime(2016, 1, 24, 10, 35, 58), 'fm': None, 'spe': None, 'channel': u'LHE'}}
|
||||||
|
with HidePrints():
|
||||||
|
result, station = autopickstation(wfstream=self.fiesa, pickparam=self.pickparam_taupy_disabled)
|
||||||
|
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
||||||
|
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
||||||
|
self.assertEqual('FIESA', station)
|
||||||
|
|
||||||
|
def test_autopickstation_taupy_enabled_fiesa(self):
|
||||||
|
# this station has a long time of before the first onset, so taupy will help during picking
|
||||||
|
expected = {'P': {'picker': 'auto', 'snrdb': 13.921049277904373, 'weight': 0, 'Mo': None, 'marked': [], 'Mw': None, 'fc': None, 'snr': 24.666352170589487, 'mpp': UTCDateTime(2016, 1, 24, 10, 41, 47), 'w0': None, 'spe': 1.2222222222222285, 'network': u'CH', 'epp': UTCDateTime(2016, 1, 24, 10, 41, 43, 333333), 'lpp': UTCDateTime(2016, 1, 24, 10, 41, 48), 'fm': None, 'channel': u'LHZ'}, 'S': {'picker': 'auto', 'snrdb': 10.893086316477728, 'network': u'CH', 'weight': 0, 'Ao': None, 'lpp': UTCDateTime(2016, 1, 24, 10, 51, 5), 'snr': 12.283118216397849, 'epp': UTCDateTime(2016, 1, 24, 10, 50, 59, 333333), 'mpp': UTCDateTime(2016, 1, 24, 10, 51, 2), 'fm': None, 'spe': 2.8888888888888764, 'channel': u'LHE'}}
|
||||||
|
with HidePrints():
|
||||||
|
result, station = autopickstation(wfstream=self.fiesa, pickparam=self.pickparam_taupy_enabled, metadata=self.metadata, origin=self.origin)
|
||||||
|
self.assertDictContainsSubset(expected=expected['P'], actual=result['P'])
|
||||||
|
self.assertDictContainsSubset(expected=expected['S'], actual=result['S'])
|
||||||
|
self.assertEqual('FIESA', station)
|
||||||
|
|
||||||
|
def test_autopickstation_gra1_z_comp_missing(self):
|
||||||
|
"""Picking on a stream without a vertical trace should return None"""
|
||||||
|
wfstream = self.gra1.copy()
|
||||||
|
wfstream = wfstream.select(channel='*E') + wfstream.select(channel='*N')
|
||||||
|
with HidePrints():
|
||||||
|
result, station = autopickstation(wfstream=wfstream, pickparam=self.pickparam_taupy_disabled, metadata=(None, None))
|
||||||
|
self.assertIsNone(result)
|
||||||
|
self.assertEqual('GRA1', station)
|
||||||
|
|
||||||
|
def test_autopickstation_gra1_horizontal_comps_missing(self):
|
||||||
|
"""Picking on a stream without horizontal traces should still pick the P phase on the vertical component"""
|
||||||
|
wfstream = self.gra1.copy()
|
||||||
|
wfstream = wfstream.select(channel='*Z')
|
||||||
|
expected = {'P': {'picker': 'auto', 'snrdb': 15.405649120980094, 'network': u'GR', 'weight': 0, 'Ao': None, 'Mo': None, 'marked': [], 'lpp': UTCDateTime(2016, 1, 24, 10, 41, 32, 690000), 'Mw': None, 'fc': None, 'snr': 34.718816470730317, 'epp': UTCDateTime(2016, 1, 24, 10, 41, 28, 890000), 'mpp': UTCDateTime(2016, 1, 24, 10, 41, 31, 690000), 'w0': None, 'spe': 0.9333333333333323, 'fm': 'D', 'channel': u'LHZ'}, 'S': {'picker': 'auto', 'snrdb': None, 'network': None, 'weight': 4, 'Mo': None, 'Ao': None, 'lpp': None, 'Mw': None, 'fc': None, 'snr': None, 'marked': [], 'mpp': None, 'w0': None, 'spe': None, 'epp': None, 'fm': 'N', 'channel': None}}
|
||||||
|
with HidePrints():
|
||||||
|
result, station = autopickstation(wfstream=wfstream, pickparam=self.pickparam_taupy_disabled, metadata=(None, None))
|
||||||
|
self.assertEqual(expected, result)
|
||||||
|
self.assertEqual('GRA1', station)
|
||||||
|
|
||||||
|
def test_autopickstation_a106_taupy_enabled(self):
|
||||||
|
"""This station has invalid values recorded on both N and E component, but a pick can still be found on Z"""
|
||||||
|
expected = {'P': {'picker': 'auto', 'snrdb': 12.862128789922826, 'network': u'Z3', 'weight': 0, 'Ao': None, 'Mo': None, 'marked': [], 'lpp': UTCDateTime(2016, 1, 24, 10, 41, 34), 'Mw': None, 'fc': None, 'snr': 19.329155459132608, 'epp': UTCDateTime(2016, 1, 24, 10, 41, 30), 'mpp': UTCDateTime(2016, 1, 24, 10, 41, 33), 'w0': None, 'spe': 1.6666666666666667, 'fm': None, 'channel': u'LHZ'}, 'S': {'picker': 'auto', 'snrdb': None, 'network': u'Z3', 'weight': 4, 'Ao': None, 'Mo': None, 'marked': [], 'lpp': UTCDateTime(2016, 1, 24, 10, 28, 56), 'Mw': None, 'fc': None, 'snr': None, 'epp': UTCDateTime(2016, 1, 24, 10, 28, 24), 'mpp': UTCDateTime(2016, 1, 24, 10, 28, 40), 'w0': None, 'spe': None, 'fm': None, 'channel': u'LHE'}}
|
||||||
|
with HidePrints():
|
||||||
|
result, station = autopickstation(wfstream=self.a106, pickparam=self.pickparam_taupy_enabled, metadata=self.metadata, origin=self.origin)
|
||||||
|
self.assertEqual(expected, result)
|
||||||
|
|
||||||
|
def test_autopickstation_station_missing_in_metadata(self):
|
||||||
|
"""This station is not in the metadata, but Taupy is enabled. Taupy should exit cleanly and modify the starttime
|
||||||
|
relative to the theoretical onset to one relative to the traces starttime, eg never negative.
|
||||||
|
"""
|
||||||
|
self.pickparam_taupy_enabled.setParamKV('pstart', -100) # modify starttime to be relative to theoretical onset
|
||||||
|
expected = {'P': {'picker': 'auto', 'snrdb': 14.464757855513506, 'network': u'Z3', 'weight': 0, 'Mo': None, 'Ao': None, 'lpp': UTCDateTime(2016, 1, 24, 10, 41, 39, 605000), 'Mw': None, 'fc': None, 'snr': 27.956048519707181, 'marked': [], 'mpp': UTCDateTime(2016, 1, 24, 10, 41, 38, 605000), 'w0': None, 'spe': 1.6666666666666667, 'epp': UTCDateTime(2016, 1, 24, 10, 41, 35, 605000), 'fm': None, 'channel': u'LHZ'}, 'S': {'picker': 'auto', 'snrdb': 10.112844176301248, 'network': u'Z3', 'weight': 1, 'Mo': None, 'Ao': None, 'lpp': UTCDateTime(2016, 1, 24, 10, 50, 51, 605000), 'Mw': None, 'fc': None, 'snr': 10.263238413785425, 'marked': [], 'mpp': UTCDateTime(2016, 1, 24, 10, 50, 48, 605000), 'w0': None, 'spe': 4.666666666666667, 'epp': UTCDateTime(2016, 1, 24, 10, 50, 40, 605000), 'fm': None, 'channel': u'LHE'}}
|
||||||
|
with HidePrints():
|
||||||
|
result, station = autopickstation(wfstream = self.a005a, pickparam=self.pickparam_taupy_enabled, metadata=self.metadata, origin=self.origin)
|
||||||
|
self.assertEqual(expected, result)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
56
tests/test_get_quality_class.py
Normal file
56
tests/test_get_quality_class.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
import unittest
|
||||||
|
from pylot.core.pick.utils import get_quality_class
|
||||||
|
|
||||||
|
|
||||||
|
class TestQualityClassFromUncertainty(unittest.TestCase):
|
||||||
|
"""
|
||||||
|
Test function that assigns a quality value [0...4] to a pick uncertainty.
|
||||||
|
The pick uncertainty is compared to the error classes.
|
||||||
|
A pick uncertainty that is below the first error class is assigned the best quality, quality 0.
|
||||||
|
A pick uncertainty that is above the first error class but below the second is assigned quality 1 and so on.
|
||||||
|
A pick uncertainty that is larger than the biggest error class is assigned quality 4.
|
||||||
|
The upper border of a quality class is inclusive, the lower border exclusive. Meaning if a value is exactly on the
|
||||||
|
border between two classes, it is assigned into the higher quality class (represented by the lower number).
|
||||||
|
"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
# entries hold upper/lower bound of error classes
|
||||||
|
self.error_classes = [float(x) for x in range(1, 9, 2)]
|
||||||
|
# [1.0, 3.0, 5.0, 7.0]
|
||||||
|
|
||||||
|
def test_out_of_lower_bound(self):
|
||||||
|
# Error out of lower bound of classes
|
||||||
|
self.assertEqual(0, get_quality_class(0.5, self.error_classes))
|
||||||
|
|
||||||
|
def test_out_of_upper_bound(self):
|
||||||
|
# Error out of upper bound of error classes
|
||||||
|
self.assertEqual(4, get_quality_class(14.7, self.error_classes))
|
||||||
|
|
||||||
|
def test_on_lower_border(self):
|
||||||
|
# Error exactly on lower bound
|
||||||
|
self.assertEqual(0, get_quality_class(1., self.error_classes))
|
||||||
|
|
||||||
|
def test_on_upper_border(self):
|
||||||
|
# Error exactly on upper bound
|
||||||
|
self.assertEqual(3, get_quality_class(7., self.error_classes))
|
||||||
|
|
||||||
|
def test_on_middle_border_inclusive(self):
|
||||||
|
# Error exactly between two classes, since lower bound is exclusive and upper bound is inclusive it should
|
||||||
|
# fall into the class with better quality
|
||||||
|
self.assertEqual(1, get_quality_class(3., self.error_classes))
|
||||||
|
self.assertNotEqual(2, get_quality_class(3., self.error_classes))
|
||||||
|
|
||||||
|
def test_in_class1(self):
|
||||||
|
# Error exactly in class 1
|
||||||
|
self.assertEqual(1, get_quality_class(1.5, self.error_classes))
|
||||||
|
|
||||||
|
def test_in_class2(self):
|
||||||
|
# Error exactly in class 2
|
||||||
|
self.assertEqual(2, get_quality_class(3.5, self.error_classes))
|
||||||
|
|
||||||
|
def test_in_class3(self):
|
||||||
|
# Error exactly in class 3
|
||||||
|
self.assertEqual(3, get_quality_class(5.6, self.error_classes))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
Loading…
Reference in New Issue
Block a user