Merge branch 'feature/magnitude4QtPyLoT'

Conflicts:
	pylot/core/util/utils.py
This commit is contained in:
Sebastian Wehling-Benatelli 2016-09-13 12:02:21 +02:00
commit d7680f8ae3
8 changed files with 341 additions and 166 deletions

View File

@ -39,7 +39,10 @@ from PySide.QtGui import QMainWindow, QInputDialog, QIcon, QFileDialog, \
import numpy as np
import subprocess
from obspy import UTCDateTime
from obspy.geodetics import degrees2kilometers
from obspy.core.event import Magnitude
from pylot.core.analysis.magnitude import calcsourcespec, calcMoMw
from pylot.core.io.data import Data
from pylot.core.io.inputs import FilterOptions, AutoPickParameter
from pylot.core.pick.autopick import autopickevent
@ -73,7 +76,10 @@ class MainWindow(QMainWindow):
super(MainWindow, self).__init__(parent)
self.createAction = createAction
# read settings
settings = QSettings()
infile = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
self._inputs = AutoPickParameter(infile)
if settings.value("user/FullName", None) is None:
fulluser = QInputDialog.getText(self, "Enter Name:", "Full name")
settings.setValue("user/FullName", fulluser)
@ -337,8 +343,8 @@ class MainWindow(QMainWindow):
# pickToolBar.setObjectName("PickTools")
# self.addActions(pickToolBar, pickToolActions)
locateEvent = self.createAction(parent=self, text='locateEvent',
slot=self.locateEvent,
locateEvent = self.createAction(parent=self, text='locate_event',
slot=self.locate_event,
shortcut='Alt+Ctrl+L',
icon=locate_icon,
tip='Locate the event using '
@ -396,6 +402,10 @@ class MainWindow(QMainWindow):
self.fileMenu.addSeparator()
self.fileMenu.addAction(self.fileMenuActions[-1])
@property
def inputs(self):
return self._inputs
def getRoot(self):
settings = QSettings()
return settings.value("data/dataRoot")
@ -813,6 +823,9 @@ class MainWindow(QMainWindow):
self.logDockWidget.setWidget(self.listWidget)
self.addDockWidget(Qt.LeftDockWidgetArea, self.logDockWidget)
self.addListItem('loading default values for local data ...')
# may become obsolete if generalized input parameter a read from disc
# during initialization
# TODO double check for obsolete read in of parameters
autopick_parameter = AutoPickParameter(AUTOMATIC_DEFAULTS)
self.addListItem(str(autopick_parameter))
@ -874,6 +887,8 @@ class MainWindow(QMainWindow):
return
# plotting picks
plotID = self.getStationID(station)
if not plotID:
return
ax = self.getPlotWidget().axes
ylims = np.array([-.5, +.5]) + plotID
phase_col = {
@ -908,7 +923,7 @@ class MainWindow(QMainWindow):
else:
raise TypeError('Unknow picktype {0}'.format(picktype))
def locateEvent(self):
def locate_event(self):
"""
locate event using the manually picked phases
:return:
@ -923,7 +938,7 @@ class MainWindow(QMainWindow):
locroot = settings.value("{0}/rootPath".format(loctool), None)
if locroot is None:
self.PyLoTprefs()
self.locateEvent()
self.locate_event()
infile = settings.value("{0}/inputFile".format(loctool), None)
@ -933,7 +948,13 @@ class MainWindow(QMainWindow):
" (*.in *.ini *.conf *.cfg)"
ans = QFileDialog().getOpenFileName(self, caption=caption,
filter=filt, dir=locroot)
if ans[0]:
infile = ans[0]
else:
QMessageBox.information(self,
self.tr('No infile selected'),
self.tr('Inputfile necessary for localization.'))
return
settings.setValue("{0}/inputFile".format(loctool), infile)
settings.sync()
if loctool == 'nll':
@ -970,24 +991,29 @@ class MainWindow(QMainWindow):
if e.origins:
o = e.origins[0]
mags = dict()
for a in o.arrivals:
pick = a.pick_id.get_referred_object()
station = pick.waveform_id.station_code
wf = self.get_data().getWFData().select(station=station)
onset = pick.time
fninv = settings.value("inventoryFile", None)
if fninv is None:
fninv = QFileDialog.getOpenFileName()
fninv, _ = QFileDialog.getOpenFileName(self, self.tr(
"Select inventory..."), self.tr("Select file"))
ans = QMessageBox.question(self, self.tr("Make default..."),
self.tr("New inventory filename set.\n" + \
self.tr(
"New inventory filename set.\n" + \
"Do you want to make it the default value?"),
QMessageBox.Yes | QMessageBox.No,
QMessageBox.No)
if ans == QMessageBox.Yes:
settings.setValue("inventoryFile", fninv)
settings.sync()
w0, fc = calcsourcespec(wf, onset, fninv, 3000., a.distance, a.azimuth, a.takeoff_angle, 60., 0)
stat_mags = calcMoMw(wf, w0, 2700., 3000., a.distance, fninv)
for a in o.arrivals:
pick = a.pick_id.get_referred_object()
station = pick.waveform_id.station_code
wf = self.get_data().getWFData().select(station=station)
onset = pick.time
dist = degrees2kilometers(a.distance)
w0, fc = calcsourcespec(wf, onset, fninv, self.inputs.get('vp'), dist,
a.azimuth, a.takeoff_angle,
self.inputs.get('Qp'), 0)
stat_mags = calcMoMw(wf, w0, 2700., 3000., dist, fninv)
mags[station] = stat_mags
mag = np.median([M[1] for M in mags.values()])
return Magnitude(mag=mag, magnitude_type='Mw')

98
inputs/pylot_sample.in Normal file
View File

@ -0,0 +1,98 @@
%This is a example parameter input file for PyLoT.
%All main and special settings regarding data handling
%and picking are to be set here!
%Parameters shown here are optimized for local data sets!
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#main settings#
/data/Geothermie/Insheim #rootpath# %project path
EVENT_DATA/LOCAL #datapath# %data path
2013.02_Insheim #database# %name of data base
e0019.048.13 #eventID# %event ID for single event processing
/data/Geothermie/Insheim/STAT_INFO #invdir# %full path to inventory or dataless-seed file
PILOT #datastructure# %choose data structure
0 #iplot# %flag for plotting: 0 none, 1 partly, >1 everything
True #apverbose# %choose 'True' or 'False' for terminal output
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#NLLoc settings#
/progs/bin #nllocbin# %path to NLLoc executable
/data/Geothermie/Insheim/LOCALISATION/NLLoc #nllocroot# %root of NLLoc-processing directory
AUTOPHASES.obs #phasefile# %name of autoPyLoT-output phase file for NLLoc
%(in nllocroot/obs)
Insheim_min1d2015.in #ctrfile# %name of PyLoT-output control file for NLLoc
%(in nllocroot/run)
ttime #ttpatter# %pattern of NLLoc ttimes from grid
%(in nllocroot/times)
AUTOLOC_nlloc #outpatter# %pattern of NLLoc-output file
%(returns 'eventID_outpatter')
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#parameters for seismic moment estimation#
3530 #vp# %average P-wave velocity
2500 #rho# %average rock density [kg/m^3]
300 0.8 #Qp# %quality factor for P waves (Qp*f^a); list(Qp, a)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
AUTOFOCMEC_AIC_HOS4_ARH.in #focmecin# %name of focmec input file containing derived polarities
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#common settings picker#
15.0 #pstart# %start time [s] for calculating CF for P-picking
60.0 #pstop# %end time [s] for calculating CF for P-picking
-1.0 #sstart# %start time [s] relative to P-onset for calculating CF for S-picking
10.0 #sstop# %end time [s] after P-onset for calculating CF for S-picking
2 20 #bpz1# %lower/upper corner freq. of first band pass filter Z-comp. [Hz]
2 30 #bpz2# %lower/upper corner freq. of second band pass filter Z-comp. [Hz]
2 15 #bph1# %lower/upper corner freq. of first band pass filter H-comp. [Hz]
2 20 #bph2# %lower/upper corner freq. of second band pass filter z-comp. [Hz]
#special settings for calculating CF#
%!!Edit the following only if you know what you are doing!!%
#Z-component#
HOS #algoP# %choose algorithm for P-onset determination (HOS, ARZ, or AR3)
7.0 #tlta# %for HOS-/AR-AIC-picker, length of LTA window [s]
4 #hosorder# %for HOS-picker, order of Higher Order Statistics
2 #Parorder# %for AR-picker, order of AR process of Z-component
1.2 #tdet1z# %for AR-picker, length of AR determination window [s] for Z-component, 1st pick
0.4 #tpred1z# %for AR-picker, length of AR prediction window [s] for Z-component, 1st pick
0.6 #tdet2z# %for AR-picker, length of AR determination window [s] for Z-component, 2nd pick
0.2 #tpred2z# %for AR-picker, length of AR prediction window [s] for Z-component, 2nd pick
0.001 #addnoise# %add noise to seismogram for stable AR prediction
3 0.1 0.5 0.5 #tsnrz# %for HOS/AR, window lengths for SNR-and slope estimation [tnoise,tsafetey,tsignal,tslope] [s]
3.0 #pickwinP# %for initial AIC pick, length of P-pick window [s]
6.0 #Precalcwin# %for HOS/AR, window length [s] for recalculation of CF (relative to 1st pick)
0.2 #aictsmooth# %for HOS/AR, take average of samples for smoothing of AIC-function [s]
0.1 #tsmoothP# %for HOS/AR, take average of samples for smoothing CF [s]
0.001 #ausP# %for HOS/AR, artificial uplift of samples (aus) of CF (P)
1.3 #nfacP# %for HOS/AR, noise factor for noise level determination (P)
#H-components#
ARH #algoS# %choose algorithm for S-onset determination (ARH or AR3)
0.8 #tdet1h# %for HOS/AR, length of AR-determination window [s], H-components, 1st pick
0.4 #tpred1h# %for HOS/AR, length of AR-prediction window [s], H-components, 1st pick
0.6 #tdet2h# %for HOS/AR, length of AR-determinaton window [s], H-components, 2nd pick
0.3 #tpred2h# %for HOS/AR, length of AR-prediction window [s], H-components, 2nd pick
4 #Sarorder# %for AR-picker, order of AR process of H-components
5.0 #Srecalcwin# %for AR-picker, window length [s] for recalculation of CF (2nd pick) (H)
3.0 #pickwinS# %for initial AIC pick, length of S-pick window [s]
2 0.2 1.5 0.5 #tsnrh# %for ARH/AR3, window lengths for SNR-and slope estimation [tnoise,tsafetey,tsignal,tslope] [s]
0.5 #aictsmoothS# %for AIC-picker, take average of samples for smoothing of AIC-function [s]
0.7 #tsmoothS# %for AR-picker, take average of samples for smoothing CF [s] (S)
0.9 #ausS# %for HOS/AR, artificial uplift of samples (aus) of CF (S)
1.5 #nfacS# %for AR-picker, noise factor for noise level determination (S)
%first-motion picker%
1 #minfmweight# %minimum required P weight for first-motion determination
2 #minFMSNR# %miniumum required SNR for first-motion determination
0.2 #fmpickwin# %pick window around P onset for calculating zero crossings
%quality assessment%
#inital AIC onset#
0.01 0.02 0.04 0.08 #timeerrorsP# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for P
0.04 0.08 0.16 0.32 #timeerrorsS# %discrete time errors [s] corresponding to picking weights [0 1 2 3] for S
4 #minAICPslope# %below this slope [counts/s] the initial P pick is rejected
1.2 #minAICPSNR# %below this SNR the initial P pick is rejected
2 #minAICSslope# %below this slope [counts/s] the initial S pick is rejected
1.5 #minAICSSNR# %below this SNR the initial S pick is rejected
#check duration of signal using envelope function#
3 #minsiglength# %minimum required length of signal [s]
1.0 #noisefactor# %noiselevel*noisefactor=threshold
40 #minpercent# %required percentage of samples higher than threshold
#check for spuriously picked S-onsets#
2.0 #zfac# %P-amplitude must exceed at least zfac times RMS-S amplitude
#check statistics of P onsets#
2.5 #mdttolerance# %maximum allowed deviation of P picks from median [s]
#wadati check#
1.0 #wdttolerance# %maximum allowed deviation from Wadati-diagram

View File

@ -14,6 +14,7 @@ from pylot.core.util.utils import getPatternLine
from scipy.optimize import curve_fit
from scipy import integrate, signal
from pylot.core.io.data import Data
from pylot.core.util.dataprocessing import restitute_data
class Magnitude(object):
@ -304,7 +305,7 @@ def calcMoMw(wfstream, w0, rho, vp, delta, inv):
return Mo, Mw
def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp, iplot):
def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, qp, iplot):
'''
Subfunction to calculate the source spectrum and to derive from that the plateau
(usually called omega0) and the corner frequency assuming Aki's omega-square
@ -342,11 +343,8 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp
print ("Calculating source spectrum ....")
# get Q value
qu = Qp.split('f**')
# constant Q
Q = int(qu[0])
# A, i.e. power of frequency
A = float(qu[1])
Q, A = qp
delta = delta * 1000 # hypocentral distance in [m]
fc = None
@ -354,7 +352,7 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp
data = Data()
wf_copy = wfstream.copy()
[cordat, restflag] = data.restituteWFData(inventory, wf_copy)
[cordat, restflag] = restitute_data(wf_copy, inventory)
if restflag == 1:
zdat = cordat.select(component="Z")
if len(zdat) == 0:

View File

@ -2,15 +2,13 @@
# -*- coding: utf-8 -*-
import copy
import glob
import os
from obspy import read_events, read_inventory
from obspy import read_events
from obspy.core import read, Stream, UTCDateTime
from obspy.core.event import Event
from obspy.io.xseed import Parser
from pylot.core.io.phases import readPILOTEvent, picks_from_picksdict, \
picksdict_from_pilot
picksdict_from_pilot, merge_picks
from pylot.core.util.errors import FormatError, OverwriteError
from pylot.core.util.utils import fnConstructor, getGlobalTimes
@ -269,139 +267,6 @@ class Data(object):
"""
self.get_evt_data().picks = []
def restituteWFData(self, invdlpath, streams=None):
"""
:param invdlpath:
:param streams:
:return:
"""
restflag = 0
if streams is None:
st_raw = self.getWFData()
st = st_raw.copy()
else:
st = streams
for tr in st:
# remove underscores
if len(tr.stats.station) > 3 and tr.stats.station[3] == '_':
tr.stats.station = tr.stats.station[0:3]
dlp = '%s/*.dless' % invdlpath
invp = '%s/*.xml' % invdlpath
respp = '%s/*.resp' % invdlpath
dlfile = glob.glob(dlp)
invfile = glob.glob(invp)
respfile = glob.glob(respp)
# check for dataless-SEED file
if len(dlfile) >= 1:
print("Found dataless-SEED file(s)!")
print("Reading meta data information ...")
for j in range(len(dlfile)):
print("Found dataless-SEED file %s" % dlfile[j])
parser = Parser('%s' % dlfile[j])
for i in range(len(st)):
# check, whether this trace has already been corrected
try:
st[i].stats.processing
except:
try:
print(
"Correcting %s, %s for instrument response "
"..." % (st[i].stats.station,
st[i].stats.channel))
# get corner frequencies for pre-filtering
fny = st[i].stats.sampling_rate / 2
fc21 = fny - (fny * 0.05)
fc22 = fny - (fny * 0.02)
prefilt = [0.5, 0.9, fc21, fc22]
# instrument correction
st[i].simulate(pre_filt=prefilt,
seedresp={'filename': parser,
'date': st[
i].stats.starttime,
'units': "VEL"})
restflag = 1
except ValueError as e:
vmsg = '{0}'.format(e)
print(vmsg)
else:
print("Trace has already been corrected!")
# check for inventory-xml file
if len(invfile) >= 1:
print("Found inventory-xml file(s)!")
print("Reading meta data information ...")
for j in range(len(invfile)):
print("Found inventory-xml file %s" % invfile[j])
inv = read_inventory(invfile[j], format="STATIONXML")
for i in range(len(st)):
# check, whether this trace has already been corrected
try:
st[i].stats.processing
except:
try:
print("Correcting %s, %s for instrument response "
"..." % (st[i].stats.station,
st[i].stats.channel))
# get corner frequencies for pre-filtering
fny = st[i].stats.sampling_rate / 2
fc21 = fny - (fny * 0.05)
fc22 = fny - (fny * 0.02)
prefilt = [0.5, 0.9, fc21, fc22]
# instrument correction
st[i].attach_response(inv)
st[i].remove_response(output='VEL',
pre_filt=prefilt)
restflag = 1
except ValueError as e:
vmsg = '{0}'.format(e)
print(vmsg)
else:
print("Trace has already been corrected!")
# check for RESP-file
if len(respfile) >= 1:
print("Found response file(s)!")
print("Reading meta data information ...")
for j in range(len(respfile)):
print("Found RESP-file %s" % respfile[j])
for i in range(len(st)):
# check, whether this trace has already been corrected
try:
st[i].stats.processing
except:
try:
print("Correcting %s, %s for instrument response "
"..." % (st[i].stats.station,
st[i].stats.channel))
# get corner frequencies for pre-filtering
fny = st[i].stats.sampling_rate / 2
fc21 = fny - (fny * 0.05)
fc22 = fny - (fny * 0.02)
prefilt = [0.5, 0.9, fc21, fc22]
# instrument correction
seedresp = {'filename': respfile[0],
'date': st[0].stats.starttime,
'units': "VEL"}
st[i].simulate(paz_remove=None, pre_filt=prefilt,
seedresp=seedresp)
restflag = 1
except ValueError as e:
vmsg = '{0}'.format(e)
print(vmsg)
else:
print("Trace has already been corrected!")
if len(respfile) < 1 and len(invfile) < 1 and len(dlfile) < 1:
print("No dataless-SEED file,inventory-xml file nor RESP-file "
"found!")
print("Go on processing data without source parameter "
"determination!")
return st, restflag
def get_evt_data(self):
"""

View File

@ -527,3 +527,26 @@ def writephases(arrivals, fformat, filename):
Ao))
fid.close()
def merge_picks(event, picks):
"""
takes an event object and a list of picks and searches for matching
entries by comparing station name and phase_hint and overwrites the time
and time_errors value of the event picks' with those from the picks
without changing the resource identifiers
:param event: `obspy.core.event.Event` object (e.g. from NLLoc output)
:param picks: list of `obspy.core.event.Pick` objects containing the
original time and time_errors values
:return: merged `obspy.core.event.Event` object
"""
for pick in picks:
time = pick.time
err = pick.time_errors
phase = pick.phase_hint
station = pick.waveform_id.station_code
for p in event.picks:
if p.waveform_id.station_code == station and p.phase_hint == phase:
p.time, p.time_errors = time, err
del time, err, phase, station
return event

View File

@ -17,6 +17,7 @@ from pylot.core.pick.charfuns import CharacteristicFunction
from pylot.core.pick.charfuns import HOScf, AICcf, ARZcf, ARHcf, AR3Ccf
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \
getSNR, fmpicker, checkPonsets, wadaticheck
from pylot.core.util.dataprocessing import restitute_data
from pylot.core.util.utils import getPatternLine
from pylot.core.io.data import Data
from pylot.core.analysis.magnitude import WApp
@ -564,10 +565,10 @@ def autopickstation(wfstream, pickparam, verbose=False):
hdat = edat.copy()
hdat += ndat
h_copy = hdat.copy()
[cordat, restflag] = data.restituteWFData(invdir, h_copy)
[cordat, restflag] = restitute_data(h_copy, invdir)
# calculate WA-peak-to-peak amplitude
# using subclass WApp of superclass Magnitude
if restflag == 1:
if restflag:
if Sweight < 4:
wapp = WApp(cordat, mpickS, mpickP + sstop, iplot)
else:
@ -577,6 +578,9 @@ def autopickstation(wfstream, pickparam, verbose=False):
(0.5 * (mpickP + sstop)), iplot)
Ao = wapp.getwapp()
else:
print("Go on processing data without source parameter "
"determination!")
else:
msg = 'Bad initial (AIC) S-pick, skipping this onset!\n' \
@ -598,7 +602,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
hdat = edat.copy()
hdat += ndat
h_copy = hdat.copy()
[cordat, restflag] = data.restituteWFData(invdir, h_copy)
[cordat, restflag] = restitute_data(h_copy, invdir)
if restflag == 1:
# calculate WA-peak-to-peak amplitude
# using subclass WApp of superclass Magnitude

View File

@ -3,9 +3,16 @@
import os
import glob
from obspy import UTCDateTime
import sys
import numpy as np
from obspy import UTCDateTime, read_inventory, read
from obspy.io.xseed import Parser
from pylot.core.util.utils import key_for_set_value, find_in_list, \
remove_underscores
def time_from_header(header):
"""
Function takes in the second line from a .gse file and takes out the date and time from that line.
@ -148,6 +155,129 @@ def evt_head_check(root_dir, out_dir = None):
print(nfiles)
def restitute_data(data, path_to_inventory, unit='VEL', force=False):
"""
takes a data stream and a path_to_inventory and returns the corrected
waveform data stream
:param data: seismic data stream
:param path_to_inventory: path to inventory folder or file
:param unit: unit to correct for (default: 'VEL')
:param force: force restitution for already corrected traces (default:
False)
:return: corrected data stream
"""
restflag = list()
data = remove_underscores(data)
dlfile = list()
invfile = list()
respfile = list()
inv = dict(dless=dlfile, xml=invfile, resp=respfile)
if os.path.isfile(path_to_inventory):
ext = os.path.splitext(path_to_inventory)[1].split('.')[1]
inv[ext] += [path_to_inventory]
else:
for ext in inv.keys():
inv[ext] += glob.glob1(path_to_inventory, '*.{0}'.format(ext))
invtype = key_for_set_value(inv)
if invtype is None:
print("Neither dataless-SEED file,inventory-xml file nor RESP-file "
"found!")
return data, False
# loop over traces
for tr in data:
seed_id = tr.get_id()
# check, whether this trace has already been corrected
if 'processing' in tr.stats.keys() and not force:
print("Trace {0} has already been corrected!".format(seed_id))
continue
stime = tr.stats.starttime
seedresp = None
prefilt = get_prefilt(tr)
if invtype == 'resp':
fresp = find_in_list(inv[invtype], seed_id)
if not fresp:
raise IOError('no response file found '
'for trace {0}'.format(seed_id))
fname = fresp
seedresp = dict(filename=fname,
date=stime,
units=unit)
kwargs = dict(paz_remove=None, pre_filt=prefilt, seedresp=seedresp)
elif invtype == 'dless':
if len(inv[invtype]) > 1:
fname = Parser(find_in_list(inv[invtype], seed_id))
else:
fname = Parser(inv[invtype][0])
seedresp = dict(filename=fname,
date=stime,
units=unit)
kwargs = dict(pre_filt=prefilt, seedresp=seedresp)
elif invtype == 'xml':
invlist = inv[invtype]
if len(invlist) > 1:
finv = find_in_list(invlist, seed_id)
else:
finv = invlist[0]
inventory = read_inventory(finv, format='STATIONXML')
# apply restitution to data
if invtype in ['resp', 'dless']:
tr.simulate(**kwargs)
else:
tr.attach_response(inventory)
tr.remove_response(output=unit,
pre_filt=prefilt)
restflag.append(True)
# check if ALL traces could be restituted, take care of large datasets
# better try restitution for smaller subsets of data (e.g. station by
# station)
if len(restflag) > 0:
restflag = np.all(restflag)
else:
restflag = False
return data, restflag
def get_prefilt(trace, tlow=(0.5, 0.9), thi=(5., 2.), verbosity=0):
"""
takes a `obspy.core.stream.Trace` object, taper parameters tlow and thi and
returns the pre-filtering corner frequencies for the cosine taper for
further processing
:param trace: seismic data trace
:type trace: `obspy.core.stream.Trace`
:param tlow: tuple or list containing the desired lower corner
frequenices for a cosine taper
:type tlow: tuple or list
:param thi: tuple or list containing the percentage values of the
Nyquist frequency for the desired upper corner frequencies of the cosine
taper
:type thi: tuple or list
:param verbosity: verbosity level
:type verbosity: int
:return: pre-filt cosine taper corner frequencies
:rtype: tuple
..example::
>>> st = read()
>>> get_prefilt(st[0])
(0.5, 0.9, 47.5, 49.0)
"""
if verbosity:
print("Calculating pre-filter values for %s, %s ..." % (
trace.stats.station, trace.stats.channel))
# get corner frequencies for pre-filtering
fny = trace.stats.sampling_rate / 2
fc21 = fny - (fny * thi[0]/100.)
fc22 = fny - (fny * thi[1]/100.)
return (tlow[0], tlow[1], fc21, fc22)
if __name__ == "__main__":
import doctest

View File

@ -328,6 +328,23 @@ def isIterable(obj):
return False
return True
def key_for_set_value(d):
"""
takes a dictionary and returns the first key for which's value the
boolean is True
:param d: dictionary containing values
:type d: dict
:return: key to the first non-False value found; None if no value's
boolean equals True
"""
r = None
for k, v in d.items():
if v:
return k
return r
def prepTimeAxis(stime, trace):
'''
takes a starttime and a trace object and returns a valid time axis for
@ -377,6 +394,20 @@ def find_horizontals(data):
return rval
def remove_underscores(data):
"""
takes a `obspy.core.stream.Stream` object and removes all underscores
from stationnames
:param data: stream of seismic data
:type data: `obspy.core.stream.Stream`
:return: data stream
"""
for tr in data:
# remove underscores
tr.stats.station = tr.stats.station.strip('_')
return data
def scaleWFData(data, factor=None, components='all'):
"""
produce scaled waveforms from given waveform data and a scaling factor,