[reformat] code reformatting with PyCharm

This commit is contained in:
Marcel Paffrath 2017-08-03 09:41:54 +02:00
parent 4107f0249d
commit 20b31a1c5c
49 changed files with 3255 additions and 3194 deletions

View File

@ -23,10 +23,11 @@ https://www.iconfinder.com/iconsets/flavour
(http://www.gnu.org/copyleft/lesser.html)
"""
import os
import sys
import platform
import argparse
import os
import platform
import sys
import matplotlib
matplotlib.use('Qt4Agg')
@ -66,10 +67,10 @@ from pylot.core.pick.compare import Comparison
from pylot.core.pick.utils import symmetrize_error
from pylot.core.io.phases import picksdict_from_picks
import pylot.core.loc.nll as nll
from pylot.core.util.defaults import FILTERDEFAULTS, OUTPUTFORMATS, SetChannelComponents, \
from pylot.core.util.defaults import FILTERDEFAULTS, SetChannelComponents, \
readFilterInformation
from pylot.core.util.errors import FormatError, DatastructureError, \
OverwriteError, ProcessingError
OverwriteError
from pylot.core.util.connection import checkurl
from pylot.core.util.dataprocessing import read_metadata, restitute_data
from pylot.core.util.utils import fnConstructor, getLogin, \
@ -81,7 +82,7 @@ from pylot.core.util.widgets import FilterOptionsDialog, NewEventDlg, \
getDataType, ComparisonDialog, TuneAutopicker, PylotParaBox
from pylot.core.util.map_projection import map_projection
from pylot.core.util.structure import DATASTRUCTURE
from pylot.core.util.thread import AutoPickThread, Thread, Worker
from pylot.core.util.thread import Thread, Worker
from pylot.core.util.version import get_git_version as _getVersionString
if sys.version_info.major == 3:
@ -270,7 +271,7 @@ class MainWindow(QMainWindow):
quitIcon = self.style().standardIcon(QStyle.SP_MediaStop)
helpIcon = self.style().standardIcon(QStyle.SP_DialogHelpButton)
newFolderIcon = self.style().standardIcon(QStyle.SP_FileDialogNewFolder)
# create resource icons
newIcon = QIcon()
newIcon.addPixmap(QPixmap(':/icons/newfile.png'))
@ -1149,8 +1150,8 @@ class MainWindow(QMainWindow):
# return False
# export to given path
#self.get_data().exportEvent(fbasename, exform, upperErrors=[uppererrorP[3], uppererrorS[3]])
#try:
# self.get_data().exportEvent(fbasename, exform, upperErrors=[uppererrorP[3], uppererrorS[3]])
# try:
self.get_data().exportEvent(fbasename, exform[0], fcheck=fcheck,
upperErrors=[uppererrorP[3], uppererrorS[3]])
self.get_data().exportEvent(fbasename, exform[1], fcheck=fcheck,
@ -1159,7 +1160,7 @@ class MainWindow(QMainWindow):
# QMessageBox.warning(self, "PyLoT Warning",
# "Could not save event: {}".format(e))
# return
#self.get_data().exportEvent(fbasename, exform[2], upperErrors=[uppererrorP[3], uppererrorS[3]])
# self.get_data().exportEvent(fbasename, exform[2], upperErrors=[uppererrorP[3], uppererrorS[3]])
# all files save (ui clean)
self.update_status('Picks saved as %s, %s, and %s' % (fbasename + exform[0], fbasename + exform[1],
fbasename + exform[2]))
@ -1322,7 +1323,7 @@ class MainWindow(QMainWindow):
if self.tabs.currentIndex() == 2:
self.init_event_table()
self.refreshRefTestButtons()
# only refresh first/second tab when an event was changed.
if self._eventChanged[0] or self._eventChanged[1]:
event = self.get_current_event()
@ -1888,7 +1889,7 @@ class MainWindow(QMainWindow):
args = {'parameter': self._inputs,
'station': 'all',
'fnames': 'None',
'eventid': self.get_current_event_path (),
'eventid': self.get_current_event_path(),
'iplot': 0,
'fig_dict': None,
'locflag': 0}
@ -1900,7 +1901,7 @@ class MainWindow(QMainWindow):
self.addListItem(str(self._inputs))
self.mp_worker.signals.message.connect(self.addListItem)
#self.mp_thread.finished.connect(self.finalizeAutoPick)
# self.mp_thread.finished.connect(self.finalizeAutoPick)
def finalizeAutoPick(self):
self.drawPicks(picktype='auto')
@ -2348,7 +2349,7 @@ class MainWindow(QMainWindow):
if type(item) == QtGui.QTableWidgetItem:
self.event_table.setItem(r_index, c_index, item)
elif type(item) in [QtGui.QWidget, QtGui.QPushButton]:
self.event_table.setCellWidget(r_index, c_index, item)
self.event_table.setCellWidget(r_index, c_index, item)
header = self.event_table.horizontalHeader()
header.setResizeMode(QtGui.QHeaderView.ResizeToContents)
@ -2674,7 +2675,7 @@ class Project(object):
if eventID in str(event.resource_id):
self.remove_event(event)
break
def read_eventfile_info(self, filename, separator=','):
'''
Try to read event information from file (:param:filename) comparing specific event datetimes.

View File

@ -4,35 +4,37 @@
from __future__ import print_function
import argparse
import datetime
import glob
import os
import datetime
from obspy import read_events
from obspy.core.event import ResourceIdentifier
import pylot.core.loc.hyposat as hyposat
import pylot.core.loc.hypo71 as hypo71
import pylot.core.loc.velest as velest
import pylot.core.loc.hypodd as hypodd
import pylot.core.loc.focmec as focmec
import pylot.core.loc.hash as hash
import pylot.core.loc.hypo71 as hypo71
import pylot.core.loc.hypodd as hypodd
import pylot.core.loc.hyposat as hyposat
import pylot.core.loc.nll as nll
#from PySide.QtGui import QWidget, QInputDialog
import pylot.core.loc.velest as velest
from obspy import read_events
from obspy.core.event import ResourceIdentifier
# from PySide.QtGui import QWidget, QInputDialog
from pylot.core.analysis.magnitude import MomentMagnitude, LocalMagnitude
from pylot.core.io.data import Data
from pylot.core.io.inputs import PylotParameter
from pylot.core.pick.autopick import autopickevent, iteratepicker
from pylot.core.util.dataprocessing import restitute_data, read_metadata, \
remove_underscores
from pylot.core.util.structure import DATASTRUCTURE
from pylot.core.util.version import get_git_version as _getVersionString
from pylot.core.util.event import Event
from pylot.core.util.utils import real_None
from pylot.core.util.defaults import SEPARATOR
from pylot.core.util.event import Event
from pylot.core.util.structure import DATASTRUCTURE
from pylot.core.util.utils import real_None
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, eventid=None, savepath=None, station='all', iplot=0):
def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, eventid=None, savepath=None, station='all',
iplot=0):
"""
Determine phase onsets automatically utilizing the automatic picking
algorithms by Kueperkoch et al. 2010/2012.
@ -108,7 +110,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
'dbase': parameter.get('database')}
exf = ['root', 'dpath', 'dbase']
if parameter['eventID'] is not '*' and fnames == 'None':
dsfields['eventID'] = parameter['eventID']
exf.append('eventID')
@ -176,7 +178,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
events[index] = event
for event in events:
pylot_event = Event(event) #event should be path to event directory
pylot_event = Event(event) # event should be path to event directory
data.setEvtData(pylot_event)
if fnames == 'None':
data.setWFData(glob.glob(os.path.join(datapath, event, '*')))
@ -196,10 +198,10 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
parameter.setParam(eventID=eventID)
else:
data.setWFData(fnames)
event = events[0]
#now = datetime.datetime.now()
#evID = '%d%02d%02d%02d%02d' % (now.year,
# now = datetime.datetime.now()
# evID = '%d%02d%02d%02d%02d' % (now.year,
# now.month,
# now.day,
# now.hour,
@ -210,13 +212,13 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
wfdat = wfdat.select(station=station)
if not wfdat:
print('Could not find station {}. STOP!'.format(station))
return
return
wfdat = remove_underscores(wfdat)
metadata = read_metadata(parameter.get('invdir'))
metadata = read_metadata(parameter.get('invdir'))
print("Restitute data ...")
corr_dat = restitute_data(wfdat.copy(), *metadata)
if not corr_dat and locflag:
locflag = 2
locflag = 2
print('Working on event %s. Stations: %s' % (event, station))
print(wfdat)
##########################################################
@ -277,12 +279,12 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
WAscaling = parameter.get('WAscaling')
magscaling = parameter.get('magscaling')
local_mag = LocalMagnitude(corr_dat, evt,
parameter.get('sstop'),
parameter.get('sstop'),
WAscaling, True, iplot)
for station, amplitude in local_mag.amplitudes.items():
picks[station]['S']['Ao'] = amplitude.generic_amplitude
print("Local station magnitudes scaled with:")
print("log(Ao) + %f * log(r) + %f * r + %f" % (WAscaling[0],
print("log(Ao) + %f * log(r) + %f * r + %f" % (WAscaling[0],
WAscaling[1],
WAscaling[2]))
evt = local_mag.updated_event(magscaling)
@ -310,7 +312,8 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
if input_dict:
if 'fig_dict' in input_dict:
fig_dict = input_dict['fig_dict']
picks = iteratepicker(wfdat, nllocfile, picks, badpicks, parameter, fig_dict=fig_dict)
picks = iteratepicker(wfdat, nllocfile, picks, badpicks, parameter,
fig_dict=fig_dict)
else:
picks = iteratepicker(wfdat, nllocfile, picks, badpicks, parameter)
# write phases to NLLoc-phase file
@ -349,12 +352,12 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
WAscaling = parameter.get('WAscaling')
magscaling = parameter.get('magscaling')
local_mag = LocalMagnitude(corr_dat, evt,
parameter.get('sstop'),
parameter.get('sstop'),
WAscaling, True, iplot)
for station, amplitude in local_mag.amplitudes.items():
picks[station]['S']['Ao'] = amplitude.generic_amplitude
print("Local station magnitudes scaled with:")
print("log(Ao) + %f * log(r) + %f * r + %f" % (WAscaling[0],
print("log(Ao) + %f * log(r) + %f * r + %f" % (WAscaling[0],
WAscaling[1],
WAscaling[2]))
evt = local_mag.updated_event(magscaling)
@ -420,16 +423,16 @@ if __name__ == "__main__":
autoregressive prediction and AIC followed by locating the seismic events using
NLLoc''')
#parser.add_argument('-d', '-D', '--input_dict', type=str,
# parser.add_argument('-d', '-D', '--input_dict', type=str,
# action='store',
# help='''optional, dictionary containing processing parameters''')
#parser.add_argument('-p', '-P', '--parameter', type=str,
# parser.add_argument('-p', '-P', '--parameter', type=str,
# action='store',
# help='''parameter file, default=None''')
parser.add_argument('-i', '-I', '--inputfile', type=str,
action='store',
help='''full path to the file containing the input
parameters for autoPyLoT''')
parameters for autoPyLoT''')
parser.add_argument('-f', '-F', '--fnames', type=str,
action='store',
help='''optional, list of data file names''')
@ -439,11 +442,11 @@ if __name__ == "__main__":
parser.add_argument('-s', '-S', '--spath', type=str,
action='store',
help='''optional, save path for autoPyLoT output''')
#parser.add_argument('-v', '-V', '--version', action='version',
# parser.add_argument('-v', '-V', '--version', action='version',
# version='autoPyLoT ' + __version__,
# help='show version information and exit')
cla = parser.parse_args()
picks = autoPyLoT(inputfile=str(cla.inputfile), fnames=str(cla.fnames),
eventid=str(cla.eventid), savepath=str(cla.spath))

View File

@ -1,17 +1,19 @@
<html><head><title>PyLoT - the Python picking and Localisation Tool</title></head>
<html>
<head><title>PyLoT - the Python picking and Localisation Tool</title></head>
<body>
<p><b>PyLoT</b> is a program which is capable of picking seismic phases,
exporting these as numerous standard phase format and localize the corresponding
seismic event with external software as, e.g.:</p>
exporting these as numerous standard phase format and localize the corresponding
seismic event with external software as, e.g.:</p>
<ul type="circle">
<li><a href="http://alomax.free.fr/nlloc/index.html">NonLinLoc</a></li>
<li>HypoInvers</li>
<li>HypoSat</li>
<li>whatever you want ...</li>
<li><a href="http://alomax.free.fr/nlloc/index.html">NonLinLoc</a></li>
<li>HypoInvers</li>
<li>HypoSat</li>
<li>whatever you want ...</li>
</ul>
<p>Read more on the
<a href="https://ariadne.geophysik.rub.de/trac/PyLoT/wiki/">PyLoT WikiPage</a>.</p>
<a href="https://ariadne.geophysik.rub.de/trac/PyLoT/wiki/">PyLoT WikiPage</a>.</p>
<p>Bug reports are very much appreciated and can also be delivered on our
<a href="https://ariadne.geophysik.rub.de/trac/PyLoT">PyLoT TracPage</a> after
successful registration.</p>
</body></html>
<a href="https://ariadne.geophysik.rub.de/trac/PyLoT">PyLoT TracPage</a> after
successful registration.</p>
</body>
</html>

View File

@ -158,12 +158,12 @@ def buildPyLoT(verbosity=None):
def installPyLoT(verbosity=None):
files_to_copy = {'autoPyLoT_local.in':['~', '.pylot'],
'autoPyLoT_regional.in':['~', '.pylot']}
files_to_copy = {'autoPyLoT_local.in': ['~', '.pylot'],
'autoPyLoT_regional.in': ['~', '.pylot']}
if verbosity > 0:
print ('starting installation of PyLoT ...')
print('starting installation of PyLoT ...')
if verbosity > 1:
print ('copying input files into destination folder ...')
print('copying input files into destination folder ...')
ans = input('please specify scope of interest '
'([0]=local, 1=regional) :') or 0
if not isinstance(ans, int):
@ -182,7 +182,7 @@ def installPyLoT(verbosity=None):
assert not os.path.isabs(srcfile), 'source files seem to be ' \
'corrupted ...'
if verbosity > 1:
print ('copying file {file} to folder {dest}'.format(file=file, dest=destination))
print('copying file {file} to folder {dest}'.format(file=file, dest=destination))
shutil.copyfile(srcfile, destination)
if link_file:
if verbosity:
@ -190,8 +190,6 @@ def installPyLoT(verbosity=None):
os.symlink(destination, link_dest)
def cleanUp(verbosity=None):
if verbosity >= 1:
print('cleaning up build files...')

View File

@ -6,27 +6,27 @@ Revised/extended summer 2017.
:author: Ludger Küperkoch / MAGS2 EP3 working group
"""
import os
import matplotlib.pyplot as plt
import numpy as np
import obspy.core.event as ope
from obspy.geodetics import degrees2kilometers
from scipy import integrate, signal
from scipy.optimize import curve_fit
from pylot.core.pick.utils import getsignalwin, crossings_nonzero_all, \
select_for_phase
from pylot.core.util.utils import common_range, fit_curve
from scipy import integrate, signal
from scipy.optimize import curve_fit
def richter_magnitude_scaling(delta):
distance = np.array([0, 10, 20, 25, 30, 35,40, 45, 50, 60, 70, 75, 85, 90, 100, 110,
distance = np.array([0, 10, 20, 25, 30, 35, 40, 45, 50, 60, 70, 75, 85, 90, 100, 110,
120, 130, 140, 150, 160, 170, 180, 190, 200, 210, 230, 240, 250,
260, 270, 280, 290, 300, 310, 320, 330, 340, 350, 360, 370, 380,
390, 400, 430, 470, 510, 560, 600, 700, 800, 900, 1000])
richter_scaling = np.array([1.4, 1.5, 1.7, 1.9, 2.1, 2.3, 2.4, 2.5, 2.6, 2.8, 2.8, 2.9,
2.9, 3.0, 3.1, 3.1, 3.2, 3.2, 3.3, 3.3, 3.4, 3.4, 3.5, 3.5,
3.6, 3.7, 3.7, 3.8, 3.8, 3.9, 3.9, 4.0, 4.0, 4.1, 4.2, 4.2,
4.2, 4.2, 4.3, 4.3, 4.3, 4.4, 4.4, 4.5, 4.6, 4.7, 4.8, 4.9,
5.1, 5.2, 5.4, 5.5, 5.7])
richter_scaling = np.array([1.4, 1.5, 1.7, 1.9, 2.1, 2.3, 2.4, 2.5, 2.6, 2.8, 2.8, 2.9,
2.9, 3.0, 3.1, 3.1, 3.2, 3.2, 3.3, 3.3, 3.4, 3.4, 3.5, 3.5,
3.6, 3.7, 3.7, 3.8, 3.8, 3.9, 3.9, 4.0, 4.0, 4.1, 4.2, 4.2,
4.2, 4.2, 4.3, 4.3, 4.3, 4.4, 4.4, 4.5, 4.6, 4.7, 4.8, 4.9,
5.1, 5.2, 5.4, 5.5, 5.7])
# prepare spline interpolation to calculate return value
func, params = fit_curve(distance, richter_scaling)
return func(delta, params)
@ -47,7 +47,7 @@ class Magnitude(object):
def __str__(self):
print(
'number of stations used: {0}\n'.format(len(self.magnitudes.values())))
'number of stations used: {0}\n'.format(len(self.magnitudes.values())))
print('\tstation\tmagnitude')
for s, m in self.magnitudes.items(): print('\t{0}\t{1}'.format(s, m))
@ -126,8 +126,8 @@ class Magnitude(object):
# scaling necessary
print("Scaling network magnitude ...")
mag = ope.Magnitude(
mag=np.median([M.mag for M in self.magnitudes.values()]) *\
magscaling[0] + magscaling[1],
mag=np.median([M.mag for M in self.magnitudes.values()]) * \
magscaling[0] + magscaling[1],
magnitude_type=self.type,
origin_id=self.origin_id,
station_count=len(self.magnitudes),
@ -215,7 +215,7 @@ class LocalMagnitude(Magnitude):
th = np.arange(0, len(sqH) * dt, dt)
# get maximum peak within pick window
iwin = getsignalwin(th, t0 - stime, self.calc_win)
ii = min([iwin[len(iwin)-1], len(th)])
ii = min([iwin[len(iwin) - 1], len(th)])
iwin = iwin[0:ii]
wapp = np.max(sqH[iwin])
if self.verbose:
@ -250,8 +250,8 @@ class LocalMagnitude(Magnitude):
if not wf:
if self.verbose:
print(
'WARNING: no waveform data found for station {0}'.format(
station))
'WARNING: no waveform data found for station {0}'.format(
station))
continue
delta = degrees2kilometers(a.distance)
onset = pick.time
@ -270,13 +270,14 @@ class LocalMagnitude(Magnitude):
if str(self.wascaling) == '[0.0, 0.0, 0.0]':
print("Calculating original Richter magnitude ...")
magnitude = ope.StationMagnitude(mag=np.log10(a0) \
+ richter_magnitude_scaling(delta))
+ richter_magnitude_scaling(delta))
else:
print("Calculating scaled local magnitude ...")
a0 = a0 * 1e03 # mm to nm (see Havskov & Ottemöller, 2010)
a0 = a0 * 1e03 # mm to nm (see Havskov & Ottemöller, 2010)
magnitude = ope.StationMagnitude(mag=np.log10(a0) \
+ self.wascaling[0] * np.log10(delta) + self.wascaling[1]
* delta + self.wascaling[2])
+ self.wascaling[0] * np.log10(delta) + self.wascaling[1]
* delta + self.wascaling[
2])
magnitude.origin_id = self.origin_id
magnitude.waveform_id = pick.waveform_id
magnitude.amplitude_id = amplitude.resource_id
@ -397,8 +398,8 @@ def calcMoMw(wfstream, w0, rho, vp, delta, verbosity=False):
if verbosity:
print(
"calcMoMw: Calculating seismic moment Mo and moment magnitude Mw for station {0} ...".format(
tr.stats.station))
"calcMoMw: Calculating seismic moment Mo and moment magnitude Mw for station {0} ...".format(
tr.stats.station))
# additional common parameters for calculating Mo
rP = 2 / np.sqrt(
@ -412,8 +413,8 @@ def calcMoMw(wfstream, w0, rho, vp, delta, verbosity=False):
if verbosity:
print(
"calcMoMw: Calculated seismic moment Mo = {0} Nm => Mw = {1:3.1f} ".format(
Mo, Mw))
"calcMoMw: Calculated seismic moment Mo = {0} Nm => Mw = {1:3.1f} ".format(
Mo, Mw))
return Mo, Mw
@ -452,7 +453,7 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
:type: integer
'''
if verbosity:
print ("Calculating source spectrum for station %s ...." % wfstream[0].stats.station)
print("Calculating source spectrum for station %s ...." % wfstream[0].stats.station)
# get Q value
Q, A = qp
@ -509,9 +510,9 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
zc = crossings_nonzero_all(wfzc)
if np.size(zc) == 0 or len(zc) <= 3:
if verbosity:
print ("calcsourcespec: Something is wrong with the waveform, "
"no zero crossings derived!\n")
print ("No calculation of source spectrum possible!")
print("calcsourcespec: Something is wrong with the waveform, "
"no zero crossings derived!\n")
print("No calculation of source spectrum possible!")
plotflag = 0
else:
plotflag = 1
@ -558,22 +559,22 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
[optspecfit, _] = curve_fit(synthsourcespec, F, YYcor, [w0in, Fcin])
w0 = optspecfit[0]
fc = optspecfit[1]
#w01 = optspecfit[0]
#fc1 = optspecfit[1]
# w01 = optspecfit[0]
# fc1 = optspecfit[1]
if verbosity:
print ("calcsourcespec: Determined w0-value: %e m/Hz, \n"
"calcsourcespec: Determined corner frequency: %f Hz" % (w0, fc))
print("calcsourcespec: Determined w0-value: %e m/Hz, \n"
"calcsourcespec: Determined corner frequency: %f Hz" % (w0, fc))
# use of conventional fitting
# [w02, fc2] = fitSourceModel(F, YYcor, Fcin, iplot, verbosity)
# use of conventional fitting
# [w02, fc2] = fitSourceModel(F, YYcor, Fcin, iplot, verbosity)
# get w0 and fc as median of both
# source spectrum fits
#w0 = np.median([w01, w02])
#fc = np.median([fc1, fc2])
#if verbosity:
# print("calcsourcespec: Using w0-value = %e m/Hz and fc = %f Hz" % (
# w0, fc))
# get w0 and fc as median of both
# source spectrum fits
# w0 = np.median([w01, w02])
# fc = np.median([fc1, fc2])
# if verbosity:
# print("calcsourcespec: Using w0-value = %e m/Hz and fc = %f Hz" % (
# w0, fc))
if iplot > 1:
f1 = plt.figure()
@ -659,9 +660,9 @@ def fitSourceModel(f, S, fc0, iplot, verbosity=False):
# left side of initial corner frequency
fcstopl = max(f[0], fc0 - max(1, fc0 / 2))
il = np.where(f <= fcstopl)
il = il[0][np.size(il) - 1]
il = il[0][np.size(il) - 1]
# right side of initial corner frequency
fcstopr = min(fc0 + (fc0 / 2), f[len(f) - 1])
fcstopr = min(fc0 + (fc0 / 2), f[len(f) - 1])
ir = np.where(f >= fcstopr)
# check, if fcstopr is available
if np.size(ir) == 0:
@ -672,16 +673,16 @@ def fitSourceModel(f, S, fc0, iplot, verbosity=False):
# vary corner frequency around initial point
print("fitSourceModel: Varying corner frequency "
"around initial corner frequency ...")
"around initial corner frequency ...")
# check difference of il and ir in order to
# keep calculation time acceptable
idiff = ir - il
if idiff > 10000:
increment = 100
increment = 100
elif idiff <= 20:
increment = 1
increment = 1
else:
increment = 10
increment = 10
for i in range(il, ir, increment):
FC = f[i]
@ -707,10 +708,10 @@ def fitSourceModel(f, S, fc0, iplot, verbosity=False):
w0 = max(S)
if verbosity:
print(
"fitSourceModel: best fc: {0} Hz, best w0: {1} m/Hz".format(fc, w0))
"fitSourceModel: best fc: {0} Hz, best w0: {1} m/Hz".format(fc, w0))
if iplot > 1:
plt.figure()#iplot)
plt.figure() # iplot)
plt.loglog(f, S, 'k')
plt.loglog([f[0], fc], [w0, w0], 'g')
plt.loglog([fc, fc], [w0 / 100, w0], 'g')
@ -719,7 +720,7 @@ def fitSourceModel(f, S, fc0, iplot, verbosity=False):
plt.xlabel('Frequency [Hz]')
plt.ylabel('Amplitude [m/Hz]')
plt.grid()
plt.figure()#iplot + 1)
plt.figure() # iplot + 1)
plt.subplot(311)
plt.plot(f[il:ir], STD, '*')
plt.title('Common Standard Deviations')

View File

@ -12,6 +12,7 @@ from obspy.core import Stream
from pylot.core.pick.utils import getsignalwin
from scipy.optimize import curve_fit
class Magnitude(object):
'''
Superclass for calculating Wood-Anderson peak-to-peak
@ -45,7 +46,6 @@ class Magnitude(object):
self.calcwapp()
self.calcsourcespec()
def getwfstream(self):
return self.wfstream
@ -85,6 +85,7 @@ class Magnitude(object):
def calcsourcespec(self):
self.sourcespek = None
class WApp(Magnitude):
'''
Method to derive peak-to-peak amplitude as seen on a Wood-Anderson-
@ -92,8 +93,8 @@ class WApp(Magnitude):
'''
def calcwapp(self):
print ("Getting Wood-Anderson peak-to-peak amplitude ...")
print ("Simulating Wood-Anderson seismograph ...")
print("Getting Wood-Anderson peak-to-peak amplitude ...")
print("Simulating Wood-Anderson seismograph ...")
self.wapp = None
stream = self.getwfstream()
@ -118,7 +119,7 @@ class WApp(Magnitude):
# get maximum peak within pick window
iwin = getsignalwin(th, self.getTo(), self.getpwin())
self.wapp = np.max(sqH[iwin])
print ("Determined Wood-Anderson peak-to-peak amplitude: %f mm") % self.wapp
print("Determined Wood-Anderson peak-to-peak amplitude: %f mm") % self.wapp
if self.getiplot() > 1:
stream.plot()
@ -143,10 +144,10 @@ class DCfc(Magnitude):
'''
def calcsourcespec(self):
print ("Calculating source spectrum ....")
print("Calculating source spectrum ....")
self.w0 = None # DC-value
self.fc = None # corner frequency
self.w0 = None # DC-value
self.fc = None # corner frequency
stream = self.getwfstream()
tr = stream[0]
@ -159,14 +160,14 @@ class DCfc(Magnitude):
# fft
fny = tr.stats.sampling_rate / 2
l = len(xdat) / tr.stats.sampling_rate
n = tr.stats.sampling_rate * l # number of fft bins after Bath
n = tr.stats.sampling_rate * l # number of fft bins after Bath
# find next power of 2 of data length
m = pow(2, np.ceil(np.log(len(xdat)) / np.log(2)))
N = int(np.power(m, 2))
y = tr.stats.delta * np.fft.fft(xdat, N)
Y = abs(y[: N/2])
Y = abs(y[: N / 2])
L = (N - 1) / tr.stats.sampling_rate
f = np.arange(0, fny, 1/L)
f = np.arange(0, fny, 1 / L)
# remove zero-frequency and frequencies above
# corner frequency of seismometer (assumed
@ -185,20 +186,18 @@ class DCfc(Magnitude):
[optspecfit, pcov] = curve_fit(synthsourcespec, F, YY.real, [DCin, Fcin])
self.w0 = optspecfit[0]
self.fc = optspecfit[1]
print ("DCfc: Determined DC-value: %e m/Hz, \n" \
"Determined corner frequency: %f Hz" % (self.w0, self.fc))
#if self.getiplot() > 1:
iplot=2
if iplot > 1:
print ("DCfc: Determined DC-value: %e m/Hz, \n"
"Determined corner frequency: %f Hz" % (self.w0, self.fc))
print("DCfc: Determined DC-value: %e m/Hz, \n" \
"Determined corner frequency: %f Hz" % (self.w0, self.fc))
# if self.getiplot() > 1:
iplot = 2
if iplot > 1:
print("DCfc: Determined DC-value: %e m/Hz, \n"
"Determined corner frequency: %f Hz" % (self.w0, self.fc))
if self.getiplot() > 1:
f1 = plt.figure()
plt.subplot(2,1,1)
plt.subplot(2, 1, 1)
# show displacement in mm
plt.plot(t, np.multiply(tr, 1000), 'k')
plt.plot(t[iwin], np.multiply(xdat, 1000), 'g')
@ -206,12 +205,12 @@ class DCfc(Magnitude):
plt.xlabel('Time since %s' % tr.stats.starttime)
plt.ylabel('Displacement [mm]')
plt.subplot(2,1,2)
plt.subplot(2, 1, 2)
plt.loglog(f, Y.real, 'k')
plt.loglog(F, YY.real)
plt.loglog(F, fit, 'g')
plt.title('Source Spectrum from P Pulse, DC=%e m/Hz, fc=%4.1f Hz' \
% (self.w0, self.fc))
% (self.w0, self.fc))
plt.xlabel('Frequency [Hz]')
plt.ylabel('Amplitude [m/Hz]')
plt.grid()
@ -235,8 +234,7 @@ def synthsourcespec(f, omega0, fcorner):
:type: float
'''
#ssp = omega0 / (pow(2, (1 + f / fcorner)))
# ssp = omega0 / (pow(2, (1 + f / fcorner)))
ssp = omega0 / (1 + pow(2, (f / fcorner)))
return ssp

View File

@ -3,15 +3,17 @@
import copy
import os
from obspy import read_events
from obspy.core import read, Stream, UTCDateTime
from obspy.io.sac import SacIOError
from obspy.core.event import Event as ObsPyEvent
from obspy.io.sac import SacIOError
from pylot.core.io.phases import readPILOTEvent, picks_from_picksdict, \
picksdict_from_pilot, merge_picks
from pylot.core.util.errors import FormatError, OverwriteError
from pylot.core.util.utils import fnConstructor, full_range
from pylot.core.util.event import Event
from pylot.core.util.utils import fnConstructor, full_range
class Data(object):
"""
@ -75,7 +77,7 @@ class Data(object):
def __add__(self, other):
assert isinstance(other, Data), "operands must be of same type 'Data'"
rs_id = self.get_evt_data().get('resource_id')
rs_id_other = other.get_evt_data().get('resource_id')
rs_id_other = other.get_evt_data().get('resource_id')
if other.isNew() and not self.isNew():
picks_to_add = other.get_evt_data().picks
old_picks = self.get_evt_data().picks
@ -156,23 +158,23 @@ class Data(object):
self.replacePicks(event, 'auto')
if 'manual' in fcheck:
self.replacePicks(event, 'manual')
def replaceOrigin(self, event, forceOverwrite=False):
if self.get_evt_data().origins or forceOverwrite:
if event.origins:
print("Found origin, replace it by new origin." )
print("Found origin, replace it by new origin.")
event.origins = self.get_evt_data().origins
def replaceMagnitude(self, event, forceOverwrite=False):
if self.get_evt_data().magnitudes or forceOverwrite:
if event.magnitudes:
print("Found magnitude, replace it by new magnitude")
event.magnitudes = self.get_evt_data().magnitudes
def replacePicks(self, event, picktype):
checkflag = 0
picks = event.picks
#remove existing picks
# remove existing picks
for j, pick in reversed(list(enumerate(picks))):
if picktype in str(pick.method_id.id):
picks.pop(j)
@ -180,7 +182,7 @@ class Data(object):
if checkflag:
print("Found %s pick(s), remove them and append new picks to catalog." % picktype)
#append new picks
# append new picks
for pick in self.get_evt_data().picks:
if picktype in str(pick.method_id.id):
picks.append(pick)
@ -195,8 +197,8 @@ class Data(object):
"""
from pylot.core.util.defaults import OUTPUTFORMATS
if not type(fcheck)==list:
fcheck=[fcheck]
if not type(fcheck) == list:
fcheck = [fcheck]
try:
evtformat = OUTPUTFORMATS[fnext]
@ -204,7 +206,7 @@ class Data(object):
errmsg = '{0}; selected file extension {1} not ' \
'supported'.format(e, fnext)
raise FormatError(errmsg)
# check for already existing xml-file
if fnext == '.xml':
if os.path.isfile(fnout + fnext):
@ -231,73 +233,71 @@ class Data(object):
# Prefer manual picks!
for i in range(len(evtdata_org.picks)):
if evtdata_org.picks[i].method_id == 'manual':
mstation = evtdata_org.picks[i].waveform_id.station_code
mstation_ext = mstation + '_'
for k in range(len(evtdata_copy.picks)):
if ((evtdata_copy.picks[k].waveform_id.station_code == mstation) or \
(evtdata_copy.picks[k].waveform_id.station_code == mstation_ext)) and \
(evtdata_copy.picks[k].method_id == 'auto'):
del evtdata_copy.picks[k]
break
mstation = evtdata_org.picks[i].waveform_id.station_code
mstation_ext = mstation + '_'
for k in range(len(evtdata_copy.picks)):
if ((evtdata_copy.picks[k].waveform_id.station_code == mstation) or \
(evtdata_copy.picks[k].waveform_id.station_code == mstation_ext)) and \
(evtdata_copy.picks[k].method_id == 'auto'):
del evtdata_copy.picks[k]
break
lendiff = len(evtdata_org.picks) - len(evtdata_copy.picks)
if lendiff is not 0:
print("Manual as well as automatic picks available. Prefered the {} manual ones!".format(lendiff))
print("Manual as well as automatic picks available. Prefered the {} manual ones!".format(lendiff))
if upperErrors:
# check for pick uncertainties exceeding adjusted upper errors
# Picks with larger uncertainties will not be saved in output file!
for j in range(len(evtdata_org.picks)):
for i in range(len(evtdata_copy.picks)):
if evtdata_copy.picks[i].phase_hint[0] == 'P':
if (evtdata_copy.picks[i].time_errors['upper_uncertainty'] >= upperErrors[0]) or \
(evtdata_copy.picks[i].time_errors['uncertainty'] == None):
print("Uncertainty exceeds or equal adjusted upper time error!")
print("Adjusted uncertainty: {}".format(upperErrors[0]))
print("Pick uncertainty: {}".format(evtdata_copy.picks[i].time_errors['uncertainty']))
print("{1} P-Pick of station {0} will not be saved in outputfile".format(
evtdata_copy.picks[i].waveform_id.station_code,
evtdata_copy.picks[i].method_id))
print("#")
del evtdata_copy.picks[i]
break
if evtdata_copy.picks[i].phase_hint[0] == 'S':
if (evtdata_copy.picks[i].time_errors['upper_uncertainty'] >= upperErrors[1]) or \
(evtdata_copy.picks[i].time_errors['uncertainty'] == None):
print("Uncertainty exceeds or equal adjusted upper time error!")
print("Adjusted uncertainty: {}".format(upperErrors[1]))
print("Pick uncertainty: {}".format(evtdata_copy.picks[i].time_errors['uncertainty']))
print("{1} S-Pick of station {0} will not be saved in outputfile".format(
evtdata_copy.picks[i].waveform_id.station_code,
evtdata_copy.picks[i].method_id))
print("#")
del evtdata_copy.picks[i]
break
# check for pick uncertainties exceeding adjusted upper errors
# Picks with larger uncertainties will not be saved in output file!
for j in range(len(evtdata_org.picks)):
for i in range(len(evtdata_copy.picks)):
if evtdata_copy.picks[i].phase_hint[0] == 'P':
if (evtdata_copy.picks[i].time_errors['upper_uncertainty'] >= upperErrors[0]) or \
(evtdata_copy.picks[i].time_errors['uncertainty'] == None):
print("Uncertainty exceeds or equal adjusted upper time error!")
print("Adjusted uncertainty: {}".format(upperErrors[0]))
print("Pick uncertainty: {}".format(evtdata_copy.picks[i].time_errors['uncertainty']))
print("{1} P-Pick of station {0} will not be saved in outputfile".format(
evtdata_copy.picks[i].waveform_id.station_code,
evtdata_copy.picks[i].method_id))
print("#")
del evtdata_copy.picks[i]
break
if evtdata_copy.picks[i].phase_hint[0] == 'S':
if (evtdata_copy.picks[i].time_errors['upper_uncertainty'] >= upperErrors[1]) or \
(evtdata_copy.picks[i].time_errors['uncertainty'] == None):
print("Uncertainty exceeds or equal adjusted upper time error!")
print("Adjusted uncertainty: {}".format(upperErrors[1]))
print("Pick uncertainty: {}".format(evtdata_copy.picks[i].time_errors['uncertainty']))
print("{1} S-Pick of station {0} will not be saved in outputfile".format(
evtdata_copy.picks[i].waveform_id.station_code,
evtdata_copy.picks[i].method_id))
print("#")
del evtdata_copy.picks[i]
break
if fnext == '.obs':
try:
evtdata_copy.write(fnout + fnext, format=evtformat)
# write header afterwards
evid = str(evtdata_org.resource_id).split('/')[1]
header = '# EQEVENT: Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' % evid
nllocfile = open(fnout + fnext)
l = nllocfile.readlines()
nllocfile.close()
l.insert(0, header)
nllocfile = open(fnout + fnext, 'w')
nllocfile.write("".join(l))
nllocfile.close()
except KeyError as e:
raise KeyError('''{0} export format
try:
evtdata_copy.write(fnout + fnext, format=evtformat)
# write header afterwards
evid = str(evtdata_org.resource_id).split('/')[1]
header = '# EQEVENT: Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' % evid
nllocfile = open(fnout + fnext)
l = nllocfile.readlines()
nllocfile.close()
l.insert(0, header)
nllocfile = open(fnout + fnext, 'w')
nllocfile.write("".join(l))
nllocfile.close()
except KeyError as e:
raise KeyError('''{0} export format
not implemented: {1}'''.format(evtformat, e))
if fnext == '.cnv':
try:
evtdata_org.write(fnout + fnext, format=evtformat)
except KeyError as e:
raise KeyError('''{0} export format
try:
evtdata_org.write(fnout + fnext, format=evtformat)
except KeyError as e:
raise KeyError('''{0} export format
not implemented: {1}'''.format(evtformat, e))
def getComp(self):
"""
@ -362,7 +362,7 @@ class Data(object):
except Exception as e:
warnmsg += '{0}\n{1}\n'.format(fname, e)
except SacIOError as se:
warnmsg += '{0}\n{1}\n'.format(fname, se)
warnmsg += '{0}\n{1}\n'.format(fname, se)
if warnmsg:
warnmsg = 'WARNING: unable to read\n' + warnmsg
print(warnmsg)
@ -427,21 +427,21 @@ class Data(object):
:raise OverwriteError: raises an OverwriteError if the picks list is
not empty. The GUI will then ask for a decision.
"""
#firstonset = find_firstonset(picks)
# firstonset = find_firstonset(picks)
# check for automatic picks
print("Writing phases to ObsPy-quakeml file")
for key in picks:
if picks[key]['P']['picker'] == 'auto':
print("Existing picks will be overwritten!")
picks = picks_from_picksdict(picks)
break
print("Existing picks will be overwritten!")
picks = picks_from_picksdict(picks)
break
else:
if self.get_evt_data().picks:
raise OverwriteError('Existing picks would be overwritten!')
break
else:
picks = picks_from_picksdict(picks)
break
if self.get_evt_data().picks:
raise OverwriteError('Existing picks would be overwritten!')
break
else:
picks = picks_from_picksdict(picks)
break
self.get_evt_data().picks = picks
# if 'smi:local' in self.getID() and firstonset:
# fonset_str = firstonset.strftime('%Y_%m_%d_%H_%M_%S')
@ -449,7 +449,6 @@ class Data(object):
# ID.convertIDToQuakeMLURI(authority_id=authority_id)
# self.get_evt_data().resource_id = ID
def applyEvent(event):
"""
takes an `obspy.core.event.Event` object and applies all new
@ -476,7 +475,6 @@ class Data(object):
applydata[typ](data)
self._new = False
class GenericDataStructure(object):

View File

@ -5,347 +5,347 @@ defaults = {'rootpath': {'type': str,
'tooltip': 'project path',
'value': '',
'namestring': 'Root path'},
'datapath': {'type': str,
'tooltip': 'data path',
'value': '',
'namestring': 'Data path'},
'database': {'type': str,
'tooltip': 'name of data base',
'value': '',
'namestring': 'Database path'},
'eventID': {'type': str,
'tooltip': 'event ID for single event processing (* for all events found in database)',
'value': '',
'namestring': 'Event ID'},
'extent': {'type': str,
'tooltip': 'extent of array ("local", "regional" or "global")',
'value': 'local',
'namestring': 'Array extent'},
'invdir': {'type': str,
'tooltip': 'full path to inventory or dataless-seed file',
'value': '',
'namestring': 'Inversion dir'},
'datastructure': {'type': str,
'tooltip': 'choose data structure',
'value': 'PILOT',
'namestring': 'Datastructure'},
'apverbose': {'type': bool,
'tooltip': "choose 'True' or 'False' for terminal output",
'value': True,
'value': True,
'namestring': 'App. verbosity'},
'nllocbin': {'type': str,
'tooltip': 'path to NLLoc executable',
'value': '',
'namestring': 'NLLoc bin path'},
'nllocroot': {'type': str,
'tooltip': 'root of NLLoc-processing directory',
'value': '',
'namestring': 'NLLoc root path'},
'phasefile': {'type': str,
'tooltip': 'name of autoPyLoT-output phase file for NLLoc',
'value': 'AUTOPHASES.obs',
'namestring': 'Phase filename'},
'ctrfile': {'type': str,
'tooltip': 'name of autoPyLoT-output control file for NLLoc',
'value': 'Insheim_min1d2015_auto.in',
'namestring': 'Control filename'},
'ttpatter': {'type': str,
'tooltip': 'pattern of NLLoc ttimes from grid',
'value': 'ttime',
'namestring': 'Traveltime pattern'},
'outpatter': {'type': str,
'tooltip': 'pattern of NLLoc-output file',
'value': 'AUTOLOC_nlloc',
'namestring': 'NLLoc output pattern'},
'vp': {'type': float,
'tooltip': 'average P-wave velocity',
'value': 3530.,
'namestring': 'P-velocity'},
'rho': {'type': float,
'tooltip': 'average rock density [kg/m^3]',
'value': 2500.,
'namestring': 'Density'},
'Qp': {'type': (float, float),
'tooltip': 'quality factor for P waves (Qp*f^a); list(Qp, a)',
'value': (300., 0.8),
'namestring': ('Quality factor', 'Qp1', 'Qp2')},
'pstart': {'type': float,
'tooltip': 'start time [s] for calculating CF for P-picking',
'value': 15.0,
'namestring': 'P start'},
'pstop': {'type': float,
'tooltip': 'end time [s] for calculating CF for P-picking',
'value': 60.0,
'namestring': 'P stop'},
'sstart': {'type': float,
'tooltip': 'start time [s] relative to P-onset for calculating CF for S-picking',
'value': -1.0,
'namestring': 'S start'},
'sstop': {'type': float,
'tooltip': 'end time [s] after P-onset for calculating CF for S-picking',
'value': 10.0,
'namestring': 'S stop'},
'bpz1': {'type': (float, float),
'tooltip': 'lower/upper corner freq. of first band pass filter Z-comp. [Hz]',
'value': (2, 20),
'namestring': ('Z-bandpass 1', 'Lower', 'Upper')},
'bpz2': {'type': (float, float),
'tooltip': 'lower/upper corner freq. of second band pass filter Z-comp. [Hz]',
'value': (2, 30),
'namestring': ('Z-bandpass 2', 'Lower', 'Upper')},
'bph1': {'type': (float, float),
'tooltip': 'lower/upper corner freq. of first band pass filter H-comp. [Hz]',
'value': (2, 15),
'namestring': ('H-bandpass 1', 'Lower', 'Upper')},
'bph2': {'type': (float, float),
'tooltip': 'lower/upper corner freq. of second band pass filter z-comp. [Hz]',
'value': (2, 20),
'namestring': ('H-bandpass 2', 'Lower', 'Upper')},
'algoP': {'type': str,
'tooltip': 'choose algorithm for P-onset determination (HOS, ARZ, or AR3)',
'value': 'HOS',
'namestring': 'P algorithm'},
'tlta': {'type': float,
'tooltip': 'for HOS-/AR-AIC-picker, length of LTA window [s]',
'value': 7.0,
'namestring': 'LTA window'},
'hosorder': {'type': int,
'tooltip': 'for HOS-picker, order of Higher Order Statistics',
'value': 4,
'namestring': 'HOS order'},
'Parorder': {'type': int,
'tooltip': 'for AR-picker, order of AR process of Z-component',
'value': 2,
'namestring': 'AR order P'},
'tdet1z': {'type': float,
'tooltip': 'for AR-picker, length of AR determination window [s] for Z-component, 1st pick',
'value': 1.2,
'namestring': 'AR det. window Z 1'},
'tpred1z': {'type': float,
'tooltip': 'for AR-picker, length of AR prediction window [s] for Z-component, 1st pick',
'value': 0.4,
'namestring': 'AR pred. window Z 1'},
'tdet2z': {'type': float,
'tooltip': 'for AR-picker, length of AR determination window [s] for Z-component, 2nd pick',
'value': 0.6,
'namestring': 'AR det. window Z 2'},
'tpred2z': {'type': float,
'tooltip': 'for AR-picker, length of AR prediction window [s] for Z-component, 2nd pick',
'value': 0.2,
'namestring': 'AR pred. window Z 2'},
'addnoise': {'type': float,
'tooltip': 'add noise to seismogram for stable AR prediction',
'value': 0.001,
'namestring': 'Add noise'},
'tsnrz': {'type': (float, float, float, float),
'tooltip': 'for HOS/AR, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]',
'value': (3, 0.1, 0.5, 1.0),
'namestring': ('SNR windows P', 'Noise', 'Safety', 'Signal', 'Slope')},
'pickwinP': {'type': float,
'tooltip': 'for initial AIC pick, length of P-pick window [s]',
'value': 3.0,
'namestring': 'AIC window P'},
'Precalcwin': {'type': float,
'tooltip': 'for HOS/AR, window length [s] for recalculation of CF (relative to 1st pick)',
'value': 6.0,
'namestring': 'Recal. window P'},
'aictsmooth': {'type': float,
'tooltip': 'for HOS/AR, take average of samples for smoothing of AIC-function [s]',
'value': 0.2,
'namestring': 'AIC smooth P'},
'tsmoothP': {'type': float,
'tooltip': 'for HOS/AR, take average of samples for smoothing CF [s]',
'value': 0.1,
'namestring': 'CF smooth P'},
'ausP': {'type': float,
'tooltip': 'for HOS/AR, artificial uplift of samples (aus) of CF (P)',
'value': 0.001,
'namestring': 'Artificial uplift P'},
'nfacP': {'type': float,
'tooltip': 'for HOS/AR, noise factor for noise level determination (P)',
'value': 1.3,
'namestring': 'Noise factor P'},
'algoS': {'type': str,
'tooltip': 'choose algorithm for S-onset determination (ARH or AR3)',
'value': 'ARH',
'namestring': 'S algorithm'},
'tdet1h': {'type': float,
'tooltip': 'for HOS/AR, length of AR-determination window [s], H-components, 1st pick',
'value': 0.8,
'namestring': 'AR det. window H 1'},
'tpred1h': {'type': float,
'tooltip': 'for HOS/AR, length of AR-prediction window [s], H-components, 1st pick',
'value': 0.4,
'namestring': 'AR pred. window H 1'},
'tdet2h': {'type': float,
'tooltip': 'for HOS/AR, length of AR-determinaton window [s], H-components, 2nd pick',
'value': 0.6,
'namestring': 'AR det. window H 2'},
'tpred2h': {'type': float,
'tooltip': 'for HOS/AR, length of AR-prediction window [s], H-components, 2nd pick',
'value': 0.3,
'namestring': 'AR pred. window H 2'},
'Sarorder': {'type': int,
'tooltip': 'for AR-picker, order of AR process of H-components',
'value': 4,
'namestring': 'AR order S'},
'Srecalcwin': {'type': float,
'tooltip': 'for AR-picker, window length [s] for recalculation of CF (2nd pick) (H)',
'value': 5.0,
'namestring': 'Recal. window S'},
'pickwinS': {'type': float,
'tooltip': 'for initial AIC pick, length of S-pick window [s]',
'value': 3.0,
'namestring': 'AIC window S'},
'tsnrh': {'type': (float, float, float, float),
'tooltip': 'for ARH/AR3, window lengths for SNR-and slope estimation [tnoise, tsafetey, tsignal, tslope] [s]',
'value': (2, 0.2, 1.5, 0.5),
'namestring': ('SNR windows S', 'Noise', 'Safety', 'Signal', 'Slope')},
'aictsmoothS': {'type': float,
'tooltip': 'for AIC-picker, take average of samples for smoothing of AIC-function [s]',
'value': 0.5,
'namestring': 'AIC smooth S'},
'tsmoothS': {'type': float,
'tooltip': 'for AR-picker, take average of samples for smoothing CF [s] (S)',
'value': 0.7,
'namestring': 'CF smooth S'},
'ausS': {'type': float,
'tooltip': 'for HOS/AR, artificial uplift of samples (aus) of CF (S)',
'value': 0.9,
'namestring': 'Artificial uplift S'},
'nfacS': {'type': float,
'tooltip': 'for AR-picker, noise factor for noise level determination (S)',
'value': 1.5,
'namestring': 'Noise factor S'},
'minfmweight': {'type': int,
'tooltip': 'minimum required P weight for first-motion determination',
'value': 1,
'namestring': 'Min. P weight'},
'minFMSNR': {'type': float,
'tooltip': 'miniumum required SNR for first-motion determination',
'value': 2.,
'namestring': 'Min SNR'},
'fmpickwin': {'type': float,
'tooltip': 'pick window around P onset for calculating zero crossings',
'value': 0.2,
'namestring': 'Zero crossings window'},
'timeerrorsP': {'type': (float, float, float, float),
'tooltip': 'discrete time errors [s] corresponding to picking weights [0 1 2 3] for P',
'value': (0.01, 0.02, 0.04, 0.08),
'namestring': ('Time errors P', '0', '1', '2', '3')},
'timeerrorsS': {'type': (float, float, float, float),
'tooltip': 'discrete time errors [s] corresponding to picking weights [0 1 2 3] for S',
'value': (0.04, 0.08, 0.16, 0.32),
'namestring': ('Time errors S', '0', '1', '2', '3')},
'minAICPslope': {'type': float,
'tooltip': 'below this slope [counts/s] the initial P pick is rejected',
'value': 0.8,
'namestring': 'Min. slope P'},
'minAICPSNR': {'type': float,
'tooltip': 'below this SNR the initial P pick is rejected',
'value': 1.1,
'namestring': 'Min. SNR P'},
'minAICSslope': {'type': float,
'tooltip': 'below this slope [counts/s] the initial S pick is rejected',
'value': 1.,
'namestring': 'Min. slope S'},
'minAICSSNR': {'type': float,
'tooltip': 'below this SNR the initial S pick is rejected',
'value': 1.5,
'namestring': 'Min. SNR S'},
'minsiglength': {'type': float,
'tooltip': 'length of signal part for which amplitudes must exceed noiselevel [s]',
'value': 1.,
'namestring': 'Min. signal length'},
'noisefactor': {'type': float,
'tooltip': 'noiselevel*noisefactor=threshold',
'value': 1.0,
'namestring': 'Noise factor'},
'minpercent': {'type': float,
'tooltip': 'required percentage of amplitudes exceeding threshold',
'value': 10.,
'namestring': 'Min amplitude [%]'},
'zfac': {'type': float,
'tooltip': 'P-amplitude must exceed at least zfac times RMS-S amplitude',
'value': 1.5,
'namestring': 'Z factor'},
'mdttolerance': {'type': float,
'tooltip': 'maximum allowed deviation of P picks from median [s]',
'value': 6.0,
'namestring': 'Median tolerance'},
'wdttolerance': {'type': float,
'tooltip': 'maximum allowed deviation from Wadati-diagram',
'value': 1.0,
'namestring': 'Wadati tolerance'},
'WAscaling': {'type': (float, float, float),
'tooltip': 'Scaling relation (log(Ao)+Alog(r)+Br+C) of Wood-Anderson amplitude Ao [nm] \
If zeros are set, original Richter magnitude is calculated!',
@ -357,30 +357,30 @@ defaults = {'rootpath': {'type': str,
If zeros are set, no scaling of network magnitude is applied!',
'value': (0., 0.),
'namestring': ('Local mag. scaling', '', '')},
'minfreq': {'type': (float, float),
'tooltip': 'Lower filter frequency [P, S]',
'value': (1.0, 1.0),
'namestring': ('Lower freq.', 'P', 'S')},
'maxfreq': {'type': (float, float),
'tooltip': 'Upper filter frequency [P, S]',
'value': (10.0, 10.0),
'namestring': ('Upper freq.', 'P', 'S')},
'filter_order': {'type': (int, int),
'tooltip': 'filter order [P, S]',
'value': (2, 2),
'namestring': ('Order', 'P', 'S')},
'filter_type': {'type': (str, str),
'tooltip': 'filter type (bandpass, bandstop, lowpass, highpass) [P, S]',
'value': ('bandpass' , 'bandpass'),
'value': ('bandpass', 'bandpass'),
'namestring': ('Type', 'P', 'S')}
}
}
settings_main={
'dirs':[
settings_main = {
'dirs': [
'rootpath',
'datapath',
'database',
@ -388,26 +388,26 @@ settings_main={
'invdir',
'datastructure',
'apverbose'],
'nlloc':[
'nlloc': [
'nllocbin',
'nllocroot',
'phasefile',
'ctrfile',
'ttpatter',
'outpatter'],
'smoment':[
'smoment': [
'vp',
'rho',
'Qp'],
'localmag':[
'localmag': [
'WAscaling',
'magscaling'],
'filter':[
'filter': [
'minfreq',
'maxfreq',
'filter_order',
'filter_type'],
'pick':[
'pick': [
'extent',
'pstart',
'pstop',
@ -419,8 +419,8 @@ settings_main={
'bph2']
}
settings_special_pick={
'z':[
settings_special_pick = {
'z': [
'algoP',
'tlta',
'hosorder',
@ -437,7 +437,7 @@ settings_special_pick={
'tsmoothP',
'ausP',
'nfacP'],
'h':[
'h': [
'algoS',
'tdet1h',
'tpred1h',
@ -451,11 +451,11 @@ settings_special_pick={
'tsmoothS',
'ausS',
'nfacS'],
'fm':[
'fm': [
'minfmweight',
'minFMSNR',
'fmpickwin'],
'quality':[
'quality': [
'timeerrorsP',
'timeerrorsS',
'minAICPslope',

View File

@ -1,8 +1,9 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pylot.core.util.errors import ParameterError
from pylot.core.io import default_parameters
from pylot.core.util.errors import ParameterError
class PylotParameter(object):
'''
@ -69,13 +70,13 @@ class PylotParameter(object):
# Set default values of parameter names
def __init_default_paras(self):
parameters=default_parameters.defaults
parameters = default_parameters.defaults
self.__defaults = parameters
def __init_subsettings(self):
self._settings_main=default_parameters.settings_main
self._settings_special_pick=default_parameters.settings_special_pick
self._settings_main = default_parameters.settings_main
self._settings_special_pick = default_parameters.settings_special_pick
# String representation of the object
def __repr__(self):
return "PylotParameter('%s')" % self.__filename
@ -136,13 +137,13 @@ class PylotParameter(object):
return self._settings_special_pick
def get_all_para_names(self):
all_names=[]
all_names = []
all_names += self.get_main_para_names()['dirs']
all_names += self.get_main_para_names()['nlloc']
all_names += self.get_main_para_names()['smoment']
all_names += self.get_main_para_names()['localmag']
all_names += self.get_main_para_names()['pick']
all_names += self.get_main_para_names()['filter']
all_names += self.get_main_para_names()['filter']
all_names += self.get_special_para_names()['z']
all_names += self.get_special_para_names()['h']
all_names += self.get_special_para_names()['fm']
@ -156,14 +157,14 @@ class PylotParameter(object):
message = 'Type check failed for param: {}, is type: {}, expected type:{}'
message = message.format(param, is_type, expect_type)
print(Warning(message))
def setParamKV(self, param, value):
self.__setitem__(param, value)
def setParam(self, **kwargs):
for key in kwargs:
self.__setitem__(key, kwargs[key])
@staticmethod
def _printParameterError(errmsg):
print('ParameterError:\n non-existent parameter %s' % errmsg)
@ -172,7 +173,7 @@ class PylotParameter(object):
defaults = self.get_defaults()
for param in defaults:
self.setParamKV(param, defaults[param]['value'])
def from_file(self, fnin=None):
if not fnin:
if self.__filename is not None:
@ -225,9 +226,9 @@ class PylotParameter(object):
# for key, value in self.iteritems():
# lines.append('{key}\t{value}\n'.format(key=key, value=value))
# fid_out.writelines(lines)
header = ('%This is a parameter input file for PyLoT/autoPyLoT.\n'+
'%All main and special settings regarding data handling\n'+
'%and picking are to be set here!\n'+
header = ('%This is a parameter input file for PyLoT/autoPyLoT.\n' +
'%All main and special settings regarding data handling\n' +
'%and picking are to be set here!\n' +
'%Parameters are optimized for %{} data sets!\n'.format(self.get_main_para_names()['pick'][0]))
separator = '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n'
@ -244,14 +245,14 @@ class PylotParameter(object):
'filter settings', separator)
self.write_section(fid_out, self.get_main_para_names()['pick'],
'common settings picker', separator)
fid_out.write(('#special settings for calculating CF#\n'+
fid_out.write(('#special settings for calculating CF#\n' +
'%!!Edit the following only if you know what you are doing!!%\n'))
self.write_section(fid_out, self.get_special_para_names()['z'],
'Z-component', None)
self.write_section(fid_out, self.get_special_para_names()['h'],
'H-components', None)
self.write_section(fid_out, self.get_special_para_names()['fm'],
'first-motion picker', None)
'first-motion picker', None)
self.write_section(fid_out, self.get_special_para_names()['quality'],
'quality assessment', None)
@ -267,7 +268,7 @@ class PylotParameter(object):
if type(value) == list or type(value) == tuple:
value_tmp = ''
for vl in value:
value_tmp+= '{} '.format(vl)
value_tmp += '{} '.format(vl)
value = value_tmp
tooltip = self.get_defaults()[name]['tooltip']
if not len(str(value)) > l_val:
@ -283,7 +284,7 @@ class PylotParameter(object):
ttip = '%{:<{}}\n'.format(tooltip, l_ttip)
else:
ttip = '%{}\n'.format(tooltip)
line = value+name+ttip
line = value + name + ttip
fid.write(line)

View File

@ -2,22 +2,23 @@
# -*- coding: utf-8 -*-
import glob
import obspy.core.event as ope
from obspy.core.event import read_events
import os
import scipy.io as sio
import warnings
import matplotlib.pyplot as plt
import numpy as np
import warnings
import obspy.core.event as ope
import scipy.io as sio
from obspy.core import UTCDateTime
from obspy.core.event import read_events
from obspy.core.util import AttribDict
from pylot.core.io.inputs import PylotParameter
from pylot.core.io.location import create_arrival, create_event, \
create_magnitude, create_origin, create_pick
from pylot.core.io.location import create_event, \
create_magnitude
from pylot.core.pick.utils import select_for_phase
from pylot.core.util.utils import getOwner, full_range, four_digits
def add_amplitudes(event, amplitudes):
amplitude_list = []
for pick in event.picks:
@ -36,6 +37,7 @@ def add_amplitudes(event, amplitudes):
event.amplitudes = amplitude_list
return event
def readPILOTEvent(phasfn=None, locfn=None, authority_id='RUB', **kwargs):
"""
readPILOTEvent - function
@ -203,7 +205,7 @@ def picksdict_from_picks(evt):
try:
onsets = picks[station]
except KeyError as e:
#print(e)
# print(e)
onsets = {}
mpp = pick.time
spe = pick.time_errors.uncertainty
@ -233,6 +235,7 @@ def picksdict_from_picks(evt):
picks[station] = onsets.copy()
return picks
def picks_from_picksdict(picks, creation_info=None):
picks_list = list()
for station, onsets in picks.items():
@ -266,8 +269,8 @@ def picks_from_picksdict(picks, creation_info=None):
pick.phase_hint = label
pick.method_id = ope.ResourceIdentifier(id=picker)
pick.waveform_id = ope.WaveformStreamID(station_code=station,
channel_code=ccode,
network_code=ncode)
channel_code=ccode,
network_code=ncode)
try:
polarity = phase['fm']
if polarity == 'U' or '+':
@ -277,7 +280,7 @@ def picks_from_picksdict(picks, creation_info=None):
else:
pick.polarity = 'undecidable'
except KeyError as e:
if 'fm' in str(e): # no polarity information found for this phase
if 'fm' in str(e): # no polarity information found for this phase
pass
else:
raise e
@ -289,7 +292,7 @@ def reassess_pilot_db(root_dir, db_dir, out_dir=None, fn_param=None, verbosity=0
import glob
db_root = os.path.join(root_dir, db_dir)
evt_list = glob.glob1(db_root,'e????.???.??')
evt_list = glob.glob1(db_root, 'e????.???.??')
for evt in evt_list:
if verbosity > 0:
@ -297,7 +300,6 @@ def reassess_pilot_db(root_dir, db_dir, out_dir=None, fn_param=None, verbosity=0
reassess_pilot_event(root_dir, db_dir, evt, out_dir, fn_param, verbosity)
def reassess_pilot_event(root_dir, db_dir, event_id, out_dir=None, fn_param=None, verbosity=0):
from obspy import read
@ -305,7 +307,6 @@ def reassess_pilot_event(root_dir, db_dir, event_id, out_dir=None, fn_param=None
from pylot.core.pick.utils import earllatepicker
if fn_param is None:
import pylot.core.util.defaults as defaults
fn_param = defaults.AUTOMATIC_DEFAULTS
default = PylotParameter(fn_param, verbosity)
@ -339,7 +340,8 @@ def reassess_pilot_event(root_dir, db_dir, event_id, out_dir=None, fn_param=None
except Exception as e:
if 'No file matching file pattern:' in e.message:
if verbosity > 0:
warnings.warn('no waveform data found for station {station}'.format(station=station), RuntimeWarning)
warnings.warn('no waveform data found for station {station}'.format(station=station),
RuntimeWarning)
datacheck.append(fn_pattern + ' (no data)\n')
continue
else:
@ -395,7 +397,7 @@ def reassess_pilot_event(root_dir, db_dir, event_id, out_dir=None, fn_param=None
os.makedirs(out_dir)
fnout_prefix = os.path.join(out_dir, 'PyLoT_{0}.'.format(event_id))
evt.write(fnout_prefix + 'xml', format='QUAKEML')
#evt.write(fnout_prefix + 'cnv', format='VELEST')
# evt.write(fnout_prefix + 'cnv', format='VELEST')
def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
@ -424,10 +426,10 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
:param: eventinfo, optional, needed for VELEST-cnv file
and FOCMEC- and HASH-input files
:type: `obspy.core.event.Event` object
"""
"""
if fformat == 'NLLoc':
print ("Writing phases to %s for NLLoc" % filename)
print("Writing phases to %s for NLLoc" % filename)
fid = open("%s" % filename, 'w')
# write header
fid.write('# EQEVENT: %s Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' %
@ -451,7 +453,7 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
ss = onset.second
ms = onset.microsecond
ss_ms = ss + ms / 1000000.0
pweight = 1 # use pick
pweight = 1 # use pick
try:
if arrivals[key]['P']['weight'] >= 4:
pweight = 0 # do not use pick
@ -478,7 +480,7 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
ss = onset.second
ms = onset.microsecond
ss_ms = ss + ms / 1000000.0
sweight = 1 # use pick
sweight = 1 # use pick
try:
if arrivals[key]['S']['weight'] >= 4:
sweight = 0 # do not use pick
@ -496,15 +498,15 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
fid.close()
elif fformat == 'HYPO71':
print ("Writing phases to %s for HYPO71" % filename)
print("Writing phases to %s for HYPO71" % filename)
fid = open("%s" % filename, 'w')
# write header
fid.write(' %s\n' %
parameter.get('eventID'))
parameter.get('eventID'))
for key in arrivals:
if arrivals[key]['P']['weight'] < 4:
stat = key
if len(stat) > 4: # HYPO71 handles only 4-string station IDs
if len(stat) > 4: # HYPO71 handles only 4-string station IDs
stat = stat[1:5]
Ponset = arrivals[key]['P']['mpp']
Sonset = arrivals[key]['S']['mpp']
@ -544,36 +546,36 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
elif sweight >= 2:
sstr = 'E'
fid.write('%-4s%sP%s%d %02d%02d%02d%02d%02d%5.2f %s%sS %d %s\n' % (stat,
pstr,
fm,
pweight,
year,
month,
day,
hh,
mm,
ss_ms,
Sss_ms,
sstr,
sweight,
Ao))
pstr,
fm,
pweight,
year,
month,
day,
hh,
mm,
ss_ms,
Sss_ms,
sstr,
sweight,
Ao))
else:
fid.write('%-4s%sP%s%d %02d%02d%02d%02d%02d%5.2f %s\n' % (stat,
pstr,
fm,
pweight,
year,
month,
day,
hh,
mm,
ss_ms,
Ao))
pstr,
fm,
pweight,
year,
month,
day,
hh,
mm,
ss_ms,
Ao))
fid.close()
elif fformat == 'HYPOSAT':
print ("Writing phases to %s for HYPOSAT" % filename)
print("Writing phases to %s for HYPOSAT" % filename)
fid = open("%s" % filename, 'w')
# write header
fid.write('%s, event %s \n' % (parameter.get('database'), parameter.get('eventID')))
@ -595,7 +597,7 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
# use symmetrized picking error as std
# (read the HYPOSAT manual)
pstd = arrivals[key]['P']['spe']
fid.write('%-5s P1 %4.0f %02d %02d %02d %02d %05.02f %5.3f -999. 0.00 -999. 0.00\n'
fid.write('%-5s P1 %4.0f %02d %02d %02d %02d %05.02f %5.3f -999. 0.00 -999. 0.00\n'
% (key, pyear, pmonth, pday, phh, pmm, Pss, pstd))
# S onsets
if arrivals[key].has_key('S') and arrivals[key]['S']:
@ -610,12 +612,12 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
sms = Sonset.microsecond
Sss = sss + sms / 1000000.0
sstd = arrivals[key]['S']['spe']
fid.write('%-5s S1 %4.0f %02d %02d %02d %02d %05.02f %5.3f -999. 0.00 -999. 0.00\n'
fid.write('%-5s S1 %4.0f %02d %02d %02d %02d %05.02f %5.3f -999. 0.00 -999. 0.00\n'
% (key, syear, smonth, sday, shh, smm, Sss, sstd))
fid.close()
elif fformat == 'VELEST':
print ("Writing phases to %s for VELEST" % filename)
print("Writing phases to %s for VELEST" % filename)
fid = open("%s" % filename, 'w')
# get informations needed in cnv-file
# check, whether latitude is N or S and longitude is E or W
@ -631,14 +633,14 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
# get last two integers of origin year
stime = eventsource['time']
if stime.year - 2000 >= 0:
syear = stime.year - 2000
syear = stime.year - 2000
else:
syear = stime.year - 1900
ifx = 0 # default value, see VELEST manual, pp. 22-23
syear = stime.year - 1900
ifx = 0 # default value, see VELEST manual, pp. 22-23
# write header
fid.write('%s%02d%02d %02d%02d %05.2f %7.4f%c %8.4f%c %7.2f %6.2f %02.0f 0.0 0.03 1.0 1.0\n' % (
syear, stime.month, stime.day, stime.hour, stime.minute, stime.second, eventsource['latitude'],
cns, eventsource['longitude'], cew, eventsource['depth'],eventinfo.magnitudes[0]['mag'], ifx))
syear, stime.month, stime.day, stime.hour, stime.minute, stime.second, eventsource['latitude'],
cns, eventsource['longitude'], cew, eventsource['depth'], eventinfo.magnitudes[0]['mag'], ifx))
n = 0
for key in arrivals:
# P onsets
@ -646,33 +648,33 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
if arrivals[key]['P']['weight'] < 4:
n += 1
stat = key
if len(stat) > 4: # VELEST handles only 4-string station IDs
if len(stat) > 4: # VELEST handles only 4-string station IDs
stat = stat[1:5]
Ponset = arrivals[key]['P']['mpp']
Pweight = arrivals[key]['P']['weight']
Prt = Ponset - stime # onset time relative to source time
Prt = Ponset - stime # onset time relative to source time
if n % 6 is not 0:
fid.write('%-4sP%d%6.2f' % (stat, Pweight, Prt))
fid.write('%-4sP%d%6.2f' % (stat, Pweight, Prt))
else:
fid.write('%-4sP%d%6.2f\n' % (stat, Pweight, Prt))
# S onsets
fid.write('%-4sP%d%6.2f\n' % (stat, Pweight, Prt))
# S onsets
if arrivals[key].has_key('S'):
if arrivals[key]['S']['weight'] < 4:
n += 1
stat = key
if len(stat) > 4: # VELEST handles only 4-string station IDs
if len(stat) > 4: # VELEST handles only 4-string station IDs
stat = stat[1:5]
Sonset = arrivals[key]['S']['mpp']
Sweight = arrivals[key]['S']['weight']
Srt = Ponset - stime # onset time relative to source time
Srt = Ponset - stime # onset time relative to source time
if n % 6 is not 0:
fid.write('%-4sS%d%6.2f' % (stat, Sweight, Srt))
fid.write('%-4sS%d%6.2f' % (stat, Sweight, Srt))
else:
fid.write('%-4sS%d%6.2f\n' % (stat, Sweight, Srt))
fid.write('%-4sS%d%6.2f\n' % (stat, Sweight, Srt))
fid.close()
elif fformat == 'hypoDD':
print ("Writing phases to %s for hypoDD" % filename)
print("Writing phases to %s for hypoDD" % filename)
fid = open("%s" % filename, 'w')
# get event information needed for hypoDD-phase file
eventsource = eventinfo.origins[0]
@ -681,59 +683,62 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
hddID = event.split('.')[0][1:5]
# write header
fid.write('# %d %d %d %d %d %5.2f %7.4f +%6.4f %7.4f %4.2f 0.1 0.5 %4.2f %s\n' % (
stime.year, stime.month, stime.day, stime.hour, stime.minute, stime.second,
eventsource['latitude'], eventsource['longitude'], eventsource['depth'] / 1000,
eventinfo.magnitudes[0]['mag'], eventsource['quality']['standard_error'], hddID))
stime.year, stime.month, stime.day, stime.hour, stime.minute, stime.second,
eventsource['latitude'], eventsource['longitude'], eventsource['depth'] / 1000,
eventinfo.magnitudes[0]['mag'], eventsource['quality']['standard_error'], hddID))
for key in arrivals:
if arrivals[key].has_key('P'):
# P onsets
if arrivals[key]['P']['weight'] < 4:
Ponset = arrivals[key]['P']['mpp']
Prt = Ponset - stime # onset time relative to source time
fid.write('%s %6.3f 1 P\n' % (key, Prt))
# S onsets
Prt = Ponset - stime # onset time relative to source time
fid.write('%s %6.3f 1 P\n' % (key, Prt))
# S onsets
if arrivals[key]['S']['weight'] < 4:
Sonset = arrivals[key]['S']['mpp']
Srt = Sonset - stime # onset time relative to source time
fid.write('%-5s %6.3f 1 S\n' % (key, Srt))
Srt = Sonset - stime # onset time relative to source time
fid.write('%-5s %6.3f 1 S\n' % (key, Srt))
fid.close()
elif fformat == 'FOCMEC':
print ("Writing phases to %s for FOCMEC" % filename)
print("Writing phases to %s for FOCMEC" % filename)
fid = open("%s" % filename, 'w')
# get event information needed for FOCMEC-input file
eventsource = eventinfo.origins[0]
stime = eventsource['time']
# write header line including event information
fid.write('%s %d%02d%02d%02d%02d%02.0f %7.4f %6.4f %3.1f %3.1f\n' % (parameter.get('eventID'),
stime.year, stime.month, stime.day, stime.hour, stime.minute, stime.second,
eventsource['latitude'], eventsource['longitude'], eventsource['depth'] / 1000,
eventinfo.magnitudes[0]['mag']))
stime.year, stime.month, stime.day,
stime.hour, stime.minute, stime.second,
eventsource['latitude'],
eventsource['longitude'],
eventsource['depth'] / 1000,
eventinfo.magnitudes[0]['mag']))
picks = eventinfo.picks
for key in arrivals:
if arrivals[key].has_key('P'):
if arrivals[key]['P']['weight'] < 4 and arrivals[key]['P']['fm'] is not None:
stat = key
for i in range(len(picks)):
station = picks[i].waveform_id.station_code
if station == stat:
# get resource ID
resid_picks = picks[i].get('resource_id')
# find same ID in eventinfo
# there it is the pick_id!!
for j in range(len(eventinfo.origins[0].arrivals)):
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
if len(stat) > 4: # FOCMEC handles only 4-string station IDs
stat = stat[1:5]
az = eventinfo.origins[0].arrivals[j].get('azimuth')
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
fid.write('%-4s %6.2f %6.2f%s \n' % (stat,
az,
inz,
arrivals[key]['P']['fm']))
break
station = picks[i].waveform_id.station_code
if station == stat:
# get resource ID
resid_picks = picks[i].get('resource_id')
# find same ID in eventinfo
# there it is the pick_id!!
for j in range(len(eventinfo.origins[0].arrivals)):
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
if len(stat) > 4: # FOCMEC handles only 4-string station IDs
stat = stat[1:5]
az = eventinfo.origins[0].arrivals[j].get('azimuth')
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
fid.write('%-4s %6.2f %6.2f%s \n' % (stat,
az,
inz,
arrivals[key]['P']['fm']))
break
fid.close()
@ -742,9 +747,9 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
# HASH-driver 1 and 2 (see HASH manual!)
filename1 = filename + 'drv1' + '.phase'
filename2 = filename + 'drv2' + '.phase'
print ("Writing phases to %s for HASH for HASH-driver 1" % filename1)
print("Writing phases to %s for HASH for HASH-driver 1" % filename1)
fid1 = open("%s" % filename1, 'w')
print ("Writing phases to %s for HASH for HASH-driver 2" % filename2)
print("Writing phases to %s for HASH for HASH-driver 2" % filename2)
fid2 = open("%s" % filename2, 'w')
# get event information needed for HASH-input file
eventsource = eventinfo.origins[0]
@ -759,26 +764,32 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
erz = eventsource.depth_errors['uncertainty']
stime = eventsource['time']
if stime.year - 2000 >= 0:
syear = stime.year - 2000
syear = stime.year - 2000
else:
syear = stime.year - 1900
syear = stime.year - 1900
picks = eventinfo.picks
# write header line including event information
# for HASH-driver 1
fid1.write('%s%02d%02d%02d%02d%5.2f%2dN%5.2f%3dE%5.2f%6.3f%4.2f%5.2f%5.2f%s\n' % (syear,
stime.month, stime.day, stime.hour, stime.minute, stime.second,
latdeg, latmin, londeg, lonmin, eventsource['depth'],
eventinfo.magnitudes[0]['mag'], erh, erz,
hashID))
stime.month, stime.day,
stime.hour, stime.minute,
stime.second,
latdeg, latmin, londeg,
lonmin, eventsource['depth'],
eventinfo.magnitudes[0][
'mag'], erh, erz,
hashID))
# write header line including event information
# for HASH-driver 2
fid2.write('%d%02d%02d%02d%02d%5.2f%dN%5.2f%3dE%6.2f%5.2f %d %5.2f %5.2f %4.2f %s \n' % (syear, stime.month, stime.day,
stime.hour, stime.minute, stime.second,
latdeg,latmin,londeg, lonmin,
eventsource['depth'],
eventsource['quality']['used_phase_count'],
erh, erz, eventinfo.magnitudes[0]['mag'],
hashID))
fid2.write(
'%d%02d%02d%02d%02d%5.2f%dN%5.2f%3dE%6.2f%5.2f %d %5.2f %5.2f %4.2f %s \n' % (
syear, stime.month, stime.day,
stime.hour, stime.minute, stime.second,
latdeg, latmin, londeg, lonmin,
eventsource['depth'],
eventsource['quality']['used_phase_count'],
erh, erz, eventinfo.magnitudes[0]['mag'],
hashID))
# write phase lines
for key in arrivals:
@ -789,36 +800,38 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
ncode = arrivals[key]['P']['network']
if arrivals[key]['P']['weight'] < 2:
Pqual='I'
Pqual = 'I'
else:
Pqual='E'
Pqual = 'E'
for i in range(len(picks)):
station = picks[i].waveform_id.station_code
if station == stat:
# get resource ID
resid_picks = picks[i].get('resource_id')
# find same ID in eventinfo
# there it is the pick_id!!
for j in range(len(eventinfo.origins[0].arrivals)):
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
if len(stat) > 4: # HASH handles only 4-string station IDs
stat = stat[1:5]
az = eventinfo.origins[0].arrivals[j].get('azimuth')
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
dist = eventinfo.origins[0].arrivals[j].get('distance')
# write phase line for HASH-driver 1
fid1.write('%-4s%sP%s%d 0 %3.1f %03d %03d 2 1 %s\n' % (stat, Pqual, arrivals[key]['P']['fm'], arrivals[key]['P']['weight'],
dist, inz, az, ccode))
# write phase line for HASH-driver 2
fid2.write('%-4s %s %s %s %s \n' % (
stat,
ncode,
ccode,
Pqual,
arrivals[key]['P']['fm']))
break
station = picks[i].waveform_id.station_code
if station == stat:
# get resource ID
resid_picks = picks[i].get('resource_id')
# find same ID in eventinfo
# there it is the pick_id!!
for j in range(len(eventinfo.origins[0].arrivals)):
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
if len(stat) > 4: # HASH handles only 4-string station IDs
stat = stat[1:5]
az = eventinfo.origins[0].arrivals[j].get('azimuth')
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
dist = eventinfo.origins[0].arrivals[j].get('distance')
# write phase line for HASH-driver 1
fid1.write(
'%-4s%sP%s%d 0 %3.1f %03d %03d 2 1 %s\n' % (
stat, Pqual, arrivals[key]['P']['fm'], arrivals[key]['P']['weight'],
dist, inz, az, ccode))
# write phase line for HASH-driver 2
fid2.write('%-4s %s %s %s %s \n' % (
stat,
ncode,
ccode,
Pqual,
arrivals[key]['P']['fm']))
break
fid1.write(' %s' % hashID)
fid1.close()
@ -849,6 +862,7 @@ def merge_picks(event, picks):
del time, err, phase, station, network, method
return event
def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
"""
Script to get onset uncertainties from Quakeml.xml files created by PyLoT.
@ -867,7 +881,7 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
Sw2 = []
Sw3 = []
Sw4 = []
for names in xmlnames:
for names in xmlnames:
print("Getting onset weights from {}".format(names))
cat = read_events(names)
cat_copy = cat.copy()
@ -881,53 +895,53 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
for mpick in arrivals_copy:
if mpick.phase_hint[0] == 'P':
if ((mpick.waveform_id.station_code == mstation) or \
(mpick.waveform_id.station_code == mstation_ext)) and \
((mpick.method_id).split('/')[1] == 'auto') and \
(mpick.time_errors['uncertainty'] <= ErrorsP[3]):
del mpick
break
(mpick.waveform_id.station_code == mstation_ext)) and \
((mpick.method_id).split('/')[1] == 'auto') and \
(mpick.time_errors['uncertainty'] <= ErrorsP[3]):
del mpick
break
elif mpick.phase_hint[0] == 'S':
if ((mpick.waveform_id.station_code == mstation) or \
(mpick.waveform_id.station_code == mstation_ext)) and \
((mpick.method_id).split('/')[1] == 'auto') and \
(mpick.time_errors['uncertainty'] <= ErrorsS[3]):
del mpick
break
(mpick.waveform_id.station_code == mstation_ext)) and \
((mpick.method_id).split('/')[1] == 'auto') and \
(mpick.time_errors['uncertainty'] <= ErrorsS[3]):
del mpick
break
lendiff = len(arrivals) - len(arrivals_copy)
if lendiff is not 0:
print("Found manual as well as automatic picks, prefered the {} manual ones!".format(lendiff))
for Pick in arrivals_copy:
if Pick.phase_hint[0] == 'P':
if Pick.time_errors.uncertainty <= ErrorsP[0]:
Pw0.append(Pick.time_errors.uncertainty)
if Pick.time_errors.uncertainty <= ErrorsP[0]:
Pw0.append(Pick.time_errors.uncertainty)
elif (Pick.time_errors.uncertainty > ErrorsP[0]) and \
(Pick.time_errors.uncertainty <= ErrorsP[1]):
Pw1.append(Pick.time_errors.uncertainty)
(Pick.time_errors.uncertainty <= ErrorsP[1]):
Pw1.append(Pick.time_errors.uncertainty)
elif (Pick.time_errors.uncertainty > ErrorsP[1]) and \
(Pick.time_errors.uncertainty <= ErrorsP[2]):
Pw2.append(Pick.time_errors.uncertainty)
(Pick.time_errors.uncertainty <= ErrorsP[2]):
Pw2.append(Pick.time_errors.uncertainty)
elif (Pick.time_errors.uncertainty > ErrorsP[2]) and \
(Pick.time_errors.uncertainty <= ErrorsP[3]):
Pw3.append(Pick.time_errors.uncertainty)
(Pick.time_errors.uncertainty <= ErrorsP[3]):
Pw3.append(Pick.time_errors.uncertainty)
elif Pick.time_errors.uncertainty > ErrorsP[3]:
Pw4.append(Pick.time_errors.uncertainty)
Pw4.append(Pick.time_errors.uncertainty)
else:
pass
elif Pick.phase_hint[0] == 'S':
if Pick.time_errors.uncertainty <= ErrorsS[0]:
Sw0.append(Pick.time_errors.uncertainty)
if Pick.time_errors.uncertainty <= ErrorsS[0]:
Sw0.append(Pick.time_errors.uncertainty)
elif (Pick.time_errors.uncertainty > ErrorsS[0]) and \
(Pick.time_errors.uncertainty <= ErrorsS[1]):
Sw1.append(Pick.time_errors.uncertainty)
(Pick.time_errors.uncertainty <= ErrorsS[1]):
Sw1.append(Pick.time_errors.uncertainty)
elif (Pick.time_errors.uncertainty > ErrorsS[1]) and \
(Pick.time_errors.uncertainty <= ErrorsS[2]):
Sw2.append(Pick.time_errors.uncertainty)
(Pick.time_errors.uncertainty <= ErrorsS[2]):
Sw2.append(Pick.time_errors.uncertainty)
elif (Pick.time_errors.uncertainty > ErrorsS[2]) and \
(Pick.time_errors.uncertainty <= ErrorsS[3]):
Sw3.append(Pick.time_errors.uncertainty)
(Pick.time_errors.uncertainty <= ErrorsS[3]):
Sw3.append(Pick.time_errors.uncertainty)
elif Pick.time_errors.uncertainty > ErrorsS[3]:
Sw4.append(Pick.time_errors.uncertainty)
Sw4.append(Pick.time_errors.uncertainty)
else:
pass
else:
@ -992,6 +1006,5 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
plt.xticks(y_pos, weights)
plt.xlim([-0.5, 4.5])
plt.xlabel('Qualities')
plt.title('{0} P-Qualities, {1} S-Qualities'.format(numPweights, numSweights))
plt.title('{0} P-Qualities, {1} S-Qualities'.format(numPweights, numSweights))
plt.show()

View File

@ -6,6 +6,7 @@ from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
def export(picks, fnout, parameter, eventinfo):
'''
Take <picks> dictionary and exports picking data to a focmec

View File

@ -6,6 +6,7 @@ from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
def export(picks, fnout, parameter, eventinfo):
'''
Take <picks> dictionary and exports picking data to a HASH

View File

@ -6,6 +6,7 @@ from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
def export(picks, fnout, parameter):
'''
Take <picks> dictionary and exports picking data to a HYPO71

View File

@ -6,6 +6,7 @@ from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
def export(picks, fnout, parameter, eventinfo):
'''
Take <picks> dictionary and exports picking data to a hypoDD

View File

@ -6,6 +6,7 @@ from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
def export(picks, fnout, parameter):
'''
Take <picks> dictionary and exports picking data to a HYPOSAT

View File

@ -1,9 +1,10 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
import os
import glob
import os
import subprocess
from obspy import read_events
from pylot.core.io.phases import writephases
from pylot.core.util.utils import getPatternLine, runProgram, which
@ -11,9 +12,11 @@ from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
class NLLocError(EnvironmentError):
pass
def export(picks, fnout, parameter):
'''
Take <picks> dictionary and exports picking data to a NLLOC-obs
@ -58,7 +61,7 @@ def modify_inputs(ctrfn, root, nllocoutn, phasefn, tttn):
locfiles = 'LOCFILES %s NLLOC_OBS %s %s 0\n' % (phasefile, tttable, nllocout)
# modification of NLLoc-control file
print ("Modifying NLLoc-control file %s ..." % ctrfile)
print("Modifying NLLoc-control file %s ..." % ctrfile)
curlocfiles = getPatternLine(ctrfile, 'LOCFILES')
nllfile = open(ctrfile, 'r')
filedata = nllfile.read()
@ -94,7 +97,7 @@ def locate(fnin, infile=None):
def read_location(fn):
path, file = os.path.split(fn)
file = glob.glob1(path, file + '.[0-9]*.grid0.loc.hyp')
file = glob.glob1(path, file + '.[0-9]*.grid0.loc.hyp')
if len(file) > 1:
raise IOError('ambiguous location name {0}'.format(file))
fn = os.path.join(path, file[0])

View File

@ -6,6 +6,7 @@ from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
def export(picks, fnout, parameter, eventinfo):
'''
Take <picks> dictionary and exports picking data to a VELEST-cnv

View File

@ -11,14 +11,14 @@ function conglomerate utils.
import matplotlib.pyplot as plt
import numpy as np
from pylot.core.io.data import Data
from pylot.core.io.inputs import PylotParameter
from pylot.core.pick.picker import AICPicker, PragPicker
from pylot.core.pick.charfuns import CharacteristicFunction
from pylot.core.pick.charfuns import HOScf, AICcf, ARZcf, ARHcf, AR3Ccf
from pylot.core.pick.picker import AICPicker, PragPicker
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \
getSNR, fmpicker, checkPonsets, wadaticheck
from pylot.core.util.utils import getPatternLine, gen_Pool
from pylot.core.io.data import Data
def autopickevent(data, param, iplot=0, fig_dict=None):
@ -43,10 +43,10 @@ def autopickevent(data, param, iplot=0, fig_dict=None):
if not iplot:
input_tuples.append((topick, param, apverbose))
if iplot>0:
if iplot > 0:
all_onsets[station] = autopickstation(topick, param, verbose=apverbose, iplot=iplot, fig_dict=fig_dict)
if iplot>0:
if iplot > 0:
print('iPlot Flag active: NO MULTIPROCESSING possible.')
return all_onsets
@ -70,7 +70,7 @@ def autopickevent(data, param, iplot=0, fig_dict=None):
def call_autopickstation(input_tuple):
wfstream, pickparam, verbose = input_tuple
#multiprocessing not possible with interactive plotting
# multiprocessing not possible with interactive plotting
return autopickstation(wfstream, pickparam, verbose, iplot=0)
@ -92,7 +92,7 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
# special parameters for P picking
iplot = iplot
algoP = pickparam.get('algoP')
pstart = pickparam.get('pstart')
pstop = pickparam.get('pstop')
@ -291,7 +291,7 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
'Skipping control function checkZ4S.'
if verbose: print(msg)
else:
if iplot>1:
if iplot > 1:
if fig_dict:
fig = fig_dict['checkZ4s']
else:
@ -364,7 +364,7 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
mpickP, iplot, fig=fig)
else:
epickP, lpickP, Perror = earllatepicker(z_copy, nfacP, tsnrz,
mpickP, iplot)
mpickP, iplot)
# get SNR
[SNRP, SNRPdB, Pnoiselevel] = getSNR(z_copy, tsnrz, mpickP)
@ -392,7 +392,7 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
fig = None
FM = fmpicker(zdat, z_copy, fmpickwin, mpickP, iplot, fig)
else:
FM = fmpicker(zdat, z_copy, fmpickwin, mpickP, iplot)
FM = fmpicker(zdat, z_copy, fmpickwin, mpickP, iplot)
else:
FM = 'N'
@ -567,15 +567,15 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
fig = fig_dict['el_S1pick']
else:
fig = None
epickS1, lpickS1, Serror1 = earllatepicker(h_copy, nfacS,
tsnrh,
mpickS, iplot,
fig=fig)
epickS1, lpickS1, Serror1 = earllatepicker(h_copy, nfacS,
tsnrh,
mpickS, iplot,
fig=fig)
else:
epickS1, lpickS1, Serror1 = earllatepicker(h_copy, nfacS,
tsnrh,
mpickS, iplot)
h_copy[0].data = trH2_filt.data
if iplot:
if fig_dict:
@ -706,7 +706,7 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
ax1.plot([aicpick.getpick() - 0.5, aicpick.getpick() + 0.5],
[-1, -1], 'r')
ax1.plot([refPpick.getpick(), refPpick.getpick()],
[-1.3, 1.3], 'r', linewidth=2, label='Final P Pick')
[-1.3, 1.3], 'r', linewidth=2, label='Final P Pick')
ax1.plot([refPpick.getpick() - 0.5, refPpick.getpick() + 0.5],
[1.3, 1.3], 'r', linewidth=2)
ax1.plot([refPpick.getpick() - 0.5, refPpick.getpick() + 0.5],
@ -714,28 +714,28 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
ax1.plot([lpickP, lpickP], [-1.1, 1.1], 'r--', label='lpp')
ax1.plot([epickP, epickP], [-1.1, 1.1], 'r--', label='epp')
ax1.set_title('%s, %s, P Weight=%d, SNR=%7.2f, SNR[dB]=%7.2f '
'Polarity: %s' % (tr_filt.stats.station,
tr_filt.stats.channel,
Pweight,
SNRP,
SNRPdB,
FM))
'Polarity: %s' % (tr_filt.stats.station,
tr_filt.stats.channel,
Pweight,
SNRP,
SNRPdB,
FM))
else:
ax1.set_title('%s, P Weight=%d, SNR=None, '
'SNRdB=None' % (tr_filt.stats.channel, Pweight))
'SNRdB=None' % (tr_filt.stats.channel, Pweight))
else:
ax1.set_title('%s, %s, P Weight=%d' % (tr_filt.stats.station,
tr_filt.stats.channel,
Pweight))
tr_filt.stats.channel,
Pweight))
ax1.legend()
ax1.set_yticks([])
ax1.set_ylim([-1.5, 1.5])
ax1.set_ylabel('Normalized Counts')
#fig.suptitle(tr_filt.stats.starttime)
# fig.suptitle(tr_filt.stats.starttime)
if len(edat[0]) > 1 and len(ndat[0]) > 1 and Sflag == 1:
# plot horizontal traces
ax2 = fig.add_subplot(3,1,2,sharex=ax1)
ax2 = fig.add_subplot(3, 1, 2, sharex=ax1)
th1data = np.arange(0,
trH1_filt.stats.npts /
trH1_filt.stats.sampling_rate,
@ -750,7 +750,7 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
arhcf1.getCF() / max(arhcf1.getCF()), 'b', label='CF1')
if aicSflag == 1:
ax2.plot(arhcf2.getTimeArray(),
arhcf2.getCF() / max(arhcf2.getCF()), 'm', label='CF2')
arhcf2.getCF() / max(arhcf2.getCF()), 'm', label='CF2')
ax2.plot(
[aicarhpick.getpick(), aicarhpick.getpick()],
[-1, 1], 'g', label='Initial S Onset')
@ -782,9 +782,9 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
ax2.set_yticks([])
ax2.set_ylim([-1.5, 1.5])
ax2.set_ylabel('Normalized Counts')
#fig.suptitle(trH1_filt.stats.starttime)
# fig.suptitle(trH1_filt.stats.starttime)
ax3 = fig.add_subplot(3,1,3, sharex=ax1)
ax3 = fig.add_subplot(3, 1, 3, sharex=ax1)
th2data = np.arange(0,
trH2_filt.stats.npts /
trH2_filt.stats.sampling_rate,

View File

@ -17,7 +17,6 @@ autoregressive prediction: application ot local and regional distances, Geophys.
:author: MAGS2 EP3 working group
"""
import matplotlib.pyplot as plt
import numpy as np
from obspy.core import Stream
@ -466,7 +465,7 @@ class ARHcf(CharacteristicFunction):
# prediction error = CF
cf[i + lpred] = np.sqrt(np.sum(np.power(self.xpred[0][i:i + lpred] - xnp[0][i:i + lpred], 2) \
+ np.power(self.xpred[1][i:i + lpred] - xnp[1][i:i + lpred], 2)) / (
2 * lpred))
2 * lpred))
nn = np.isnan(cf)
if len(nn) > 1:
cf[nn] = 0
@ -608,7 +607,7 @@ class AR3Ccf(CharacteristicFunction):
cf[i + lpred] = np.sqrt(np.sum(np.power(self.xpred[0][i:i + lpred] - xnp[0][i:i + lpred], 2) \
+ np.power(self.xpred[1][i:i + lpred] - xnp[1][i:i + lpred], 2) \
+ np.power(self.xpred[2][i:i + lpred] - xnp[2][i:i + lpred], 2)) / (
3 * lpred))
3 * lpred))
nn = np.isnan(cf)
if len(nn) > 1:
cf[nn] = 0

View File

@ -4,11 +4,10 @@
import copy
import operator
import os
import numpy as np
import glob
import matplotlib.pyplot as plt
from obspy import read_events
import matplotlib.pyplot as plt
import numpy as np
from obspy import read_events
from pylot.core.io.phases import picksdict_from_picks
from pylot.core.util.pdf import ProbabilityDensityFunction
from pylot.core.util.utils import find_in_list
@ -334,7 +333,7 @@ class PDFDictionary(object):
axarr[l].set_title(phase)
if l is 0:
axann = axarr[l].annotate(station, xy=(.05, .5),
xycoords='axes fraction')
xycoords='axes fraction')
bbox_props = dict(boxstyle='round', facecolor='lightgrey',
alpha=.7)
axann.set_bbox(bbox_props)
@ -352,7 +351,6 @@ class PDFstatistics(object):
Takes a path as argument.
"""
def __init__(self, directory):
"""Initiates some values needed when dealing with pdfs later"""
self._rootdir = directory
@ -449,7 +447,7 @@ class PDFstatistics(object):
else:
raise ValueError("for call to method {0} value has to be "
"defined but is 'None' ".format(method_options[
property.upper()]))
property.upper()]))
for pdf_dict in self:
# create worklist
@ -459,7 +457,7 @@ class PDFstatistics(object):
return rlist
def writeThetaToFile(self,array,out_dir):
def writeThetaToFile(self, array, out_dir):
"""
Method to write array like data to file. Useful since acquiring can take
serious amount of time when dealing with large databases.
@ -471,12 +469,12 @@ class PDFstatistics(object):
"""
fid = open(os.path.join(out_dir), 'w')
for val in array:
fid.write(str(val)+'\n')
fid.write(str(val) + '\n')
fid.close()
def main():
root_dir ='/home/sebastianp/Codetesting/xmls/'
root_dir = '/home/sebastianp/Codetesting/xmls/'
Insheim = PDFstatistics(root_dir)
Insheim.curphase = 'p'
qdlist = Insheim.get('qdf', 0.2)

View File

@ -19,12 +19,13 @@ calculated after Diehl & Kissling (2009).
:author: MAGS2 EP3 working group / Ludger Kueperkoch
"""
import numpy as np
import matplotlib.pyplot as plt
from pylot.core.pick.utils import getnoisewin, getsignalwin
from pylot.core.pick.charfuns import CharacteristicFunction
import warnings
import matplotlib.pyplot as plt
import numpy as np
from pylot.core.pick.charfuns import CharacteristicFunction
from pylot.core.pick.utils import getnoisewin, getsignalwin
class AutoPicker(object):
'''
@ -212,14 +213,14 @@ class AICPicker(AutoPicker):
self.Data[0].data = self.Data[0].data * 1000000
# get signal window
isignal = getsignalwin(self.Tcf, self.Pick, self.TSNR[2])
ii = min([isignal[len(isignal)-1], len(self.Tcf)])
ii = min([isignal[len(isignal) - 1], len(self.Tcf)])
isignal = isignal[0:ii]
try:
aic[isignal]
aic[isignal]
except IndexError as e:
msg = "Time series out of bounds! {}".format(e)
print(msg)
return
msg = "Time series out of bounds! {}".format(e)
print(msg)
return
# calculate SNR from CF
self.SNR = max(abs(aic[isignal] - np.mean(aic[isignal]))) / \
max(abs(aic[inoise] - np.mean(aic[inoise])))
@ -242,7 +243,7 @@ class AICPicker(AutoPicker):
print("Choose longer slope determination window!")
if self.iplot > 1:
if not self.fig:
fig = plt.figure() #self.iplot) ### WHY? MP MP
fig = plt.figure() # self.iplot) ### WHY? MP MP
else:
fig = self.fig
ax = fig.add_subplot(111)
@ -271,7 +272,7 @@ class AICPicker(AutoPicker):
if self.iplot > 1:
if not self.fig:
fig = plt.figure()#self.iplot)
fig = plt.figure() # self.iplot)
else:
fig = self.fig
ax1 = fig.add_subplot(211)
@ -283,21 +284,25 @@ class AICPicker(AutoPicker):
ax1.set_xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
ax1.set_yticks([])
ax1.legend()
if self.Pick is not None:
ax2 = fig.add_subplot(2,1,2, sharex=ax1)
ax2 = fig.add_subplot(2, 1, 2, sharex=ax1)
ax2.plot(self.Tcf, x, 'k', label='Data')
ax1.axvspan(self.Tcf[inoise[0]],self.Tcf[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
ax1.axvspan(self.Tcf[isignal[0]],self.Tcf[isignal[-1]], color='b', alpha=0.2, lw=0, label='Signal Window')
ax1.axvspan(self.Tcf[iislope[0]],self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0, label='Slope Window')
ax2.axvspan(self.Tcf[inoise[0]],self.Tcf[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
ax2.axvspan(self.Tcf[isignal[0]],self.Tcf[isignal[-1]], color='b', alpha=0.2, lw=0, label='Signal Window')
ax2.axvspan(self.Tcf[iislope[0]],self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0, label='Slope Window')
ax1.axvspan(self.Tcf[inoise[0]], self.Tcf[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
ax1.axvspan(self.Tcf[isignal[0]], self.Tcf[isignal[-1]], color='b', alpha=0.2, lw=0,
label='Signal Window')
ax1.axvspan(self.Tcf[iislope[0]], self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0,
label='Slope Window')
ax2.axvspan(self.Tcf[inoise[0]], self.Tcf[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
ax2.axvspan(self.Tcf[isignal[0]], self.Tcf[isignal[-1]], color='b', alpha=0.2, lw=0,
label='Signal Window')
ax2.axvspan(self.Tcf[iislope[0]], self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0,
label='Slope Window')
ax2.plot(self.Tcf[iislope], datafit, 'g', linewidth=2, label='Slope')
ax1.set_title('Station %s, SNR=%7.2f, Slope= %12.2f counts/s' % (self.Data[0].stats.station,
self.SNR, self.slope))
self.SNR, self.slope))
ax2.set_xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
ax2.set_ylabel('Counts')
ax2.set_yticks([])
@ -307,7 +312,7 @@ class AICPicker(AutoPicker):
if self.Pick == None:
print('AICPicker: Could not find minimum, picking window too short?')
return
@ -317,7 +322,7 @@ class PragPicker(AutoPicker):
'''
def calcPick(self):
if self.getpick1() is not None:
print('PragPicker: Get most likely pick from HOS- or AR-CF using pragmatic picking algorithm ...')
@ -402,7 +407,7 @@ class PragPicker(AutoPicker):
if self.getiplot() > 1:
if not self.fig:
fig = plt.figure()#self.getiplot())
fig = plt.figure() # self.getiplot())
else:
fig = self.fig
ax = fig.add_subplot(111)

View File

@ -9,6 +9,7 @@
"""
import warnings
import matplotlib.pyplot as plt
import numpy as np
from obspy.core import Stream, UTCDateTime
@ -41,11 +42,11 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None):
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
if verbosity == 2:
print('earllatepicker:')
print('earllatepicker:')
print('nfac:', nfac)
print('Init pick:', Pick1)
print('TSNR (T_noise, T_gap, T_signal):', TSNR)
LPick = None
EPick = None
PickError = None
@ -69,14 +70,14 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None):
print('x_inoise:', x[inoise])
print('x_isignal:', x[isignal])
print('nlevel:', nlevel)
# get time where signal exceeds nlevel
ilup, = np.where(x[isignal] > nlevel)
ildown, = np.where(x[isignal] < -nlevel)
if not ilup.size and not ildown.size:
if verbosity:
print ("earllatepicker: Signal lower than noise level!\n"
"Skip this trace!")
print("earllatepicker: Signal lower than noise level!\n"
"Skip this trace!")
return LPick, EPick, PickError
il = min(np.min(ilup) if ilup.size else float('inf'),
np.min(ildown) if ildown.size else float('inf'))
@ -118,7 +119,7 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None):
if iplot > 1:
if not fig:
fig = plt.figure()#iplot)
fig = plt.figure() # iplot)
ax = fig.add_subplot(111)
ax.plot(t, x, 'k', label='Data')
ax.axvspan(t[inoise[0]], t[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
@ -131,9 +132,9 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None):
ax.plot([LPick, LPick], [max(x) / 2, -max(x) / 2], '--k', label='lpp')
ax.plot([EPick, EPick], [max(x) / 2, -max(x) / 2], '--k', label='epp')
ax.plot([Pick1 + PickError, Pick1 + PickError],
[max(x) / 2, -max(x) / 2], 'r--', label='spe')
[max(x) / 2, -max(x) / 2], 'r--', label='spe')
ax.plot([Pick1 - PickError, Pick1 - PickError],
[max(x) / 2, -max(x) / 2], 'r--')
[max(x) / 2, -max(x) / 2], 'r--')
ax.set_xlabel('Time [s] since %s' % X[0].stats.starttime)
ax.set_yticks([])
ax.set_title(
@ -173,7 +174,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None):
FM = None
if Pick is not None:
print ("fmpicker: Get first motion (polarity) of onset using unfiltered seismogram...")
print("fmpicker: Get first motion (polarity) of onset using unfiltered seismogram...")
xraw = Xraw[0].data
xfilt = Xfilt[0].data
@ -212,15 +213,15 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None):
else:
li1 = index1[0]
if np.size(xraw[ipick[0][1]:ipick[0][li1]]) == 0:
print ("fmpicker: Onset on unfiltered trace too emergent for first motion determination!")
print("fmpicker: Onset on unfiltered trace too emergent for first motion determination!")
P1 = None
else:
imax1 = np.argmax(abs(xraw[ipick[0][1]:ipick[0][li1]]))
if imax1 == 0:
imax1 = np.argmax(abs(xraw[ipick[0][1]:ipick[0][index1[1]]]))
if imax1 == 0:
print ("fmpicker: Zero crossings too close!")
print ("Skip first motion determination!")
print("fmpicker: Zero crossings too close!")
print("Skip first motion determination!")
return FM
islope1 = np.where((t >= Pick) & (t <= Pick + t[imax1]))
@ -254,15 +255,15 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None):
else:
li2 = index2[0]
if np.size(xfilt[ipick[0][1]:ipick[0][li2]]) == 0:
print ("fmpicker: Onset on filtered trace too emergent for first motion determination!")
print("fmpicker: Onset on filtered trace too emergent for first motion determination!")
P2 = None
else:
imax2 = np.argmax(abs(xfilt[ipick[0][1]:ipick[0][li2]]))
if imax2 == 0:
imax2 = np.argmax(abs(xfilt[ipick[0][1]:ipick[0][index2[1]]]))
if imax2 == 0:
print ("fmpicker: Zero crossings too close!")
print ("Skip first motion determination!")
print("fmpicker: Zero crossings too close!")
print("Skip first motion determination!")
return FM
islope2 = np.where((t >= Pick) & (t <= Pick + t[imax2]))
@ -286,11 +287,11 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None):
elif P1[0] > 0 >= P2[0]:
FM = '+'
print ("fmpicker: Found polarity %s" % FM)
print("fmpicker: Found polarity %s" % FM)
if iplot > 1:
if not fig:
fig = plt.figure()#iplot)
fig = plt.figure() # iplot)
ax1 = fig.add_subplot(211)
ax1.plot(t, xraw, 'k')
ax1.plot([Pick, Pick], [max(xraw), -max(xraw)], 'b', linewidth=2, label='Pick')
@ -304,11 +305,11 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None):
ax1.set_title('First-Motion Determination, %s, Unfiltered Data' % Xraw[
0].stats.station)
ax2=fig.add_subplot(2,1,2, sharex=ax1)
ax2 = fig.add_subplot(2, 1, 2, sharex=ax1)
ax2.set_title('First-Motion Determination, Filtered Data')
ax2.plot(t, xfilt, 'k')
ax2.plot([Pick, Pick], [max(xfilt), -max(xfilt)], 'b',
linewidth=2)
linewidth=2)
if P2 is not None:
ax2.plot(t[islope2], xfilt[islope2])
ax2.plot(zc2, np.zeros(len(zc2)), '*g', markersize=14)
@ -359,7 +360,7 @@ def getSNR(X, TSNR, t1, tracenum=0):
SNR = None
SNRdB = None
noiselevel = None
x = X[tracenum].data
npts = X[tracenum].stats.npts
sr = X[tracenum].stats.sampling_rate
@ -372,7 +373,7 @@ def getSNR(X, TSNR, t1, tracenum=0):
# get signal window
isignal = getsignalwin(t, t1, TSNR[2])
if np.size(inoise) < 1:
print ("getSNR: Empty array inoise, check noise window!")
print("getSNR: Empty array inoise, check noise window!")
return SNR, SNRdB, noiselevel
# demean over entire waveform
@ -380,13 +381,13 @@ def getSNR(X, TSNR, t1, tracenum=0):
# calculate ratios
noiselevel = np.sqrt(np.mean(np.square(x[inoise])))
#signallevel = np.sqrt(np.mean(np.square(x[isignal])))
# signallevel = np.sqrt(np.mean(np.square(x[isignal])))
if np.size(isignal) < 1:
print ("getSNR: Empty array isignal, check signal window!")
print("getSNR: Empty array isignal, check signal window!")
return SNR, SNRdB, noiselevel
#noiselevel = np.abs(x[inoise]).max()
# noiselevel = np.abs(x[inoise]).max()
signallevel = np.abs(x[isignal]).max()
SNR = signallevel / noiselevel
@ -418,9 +419,9 @@ def getnoisewin(t, t1, tnoise, tgap):
inoise, = np.where((t <= max([t1 - tgap, 0])) \
& (t >= max([t1 - tnoise - tgap, 0])))
if np.size(inoise) < 1:
inoise, = np.where((t>=t[0]) & (t<=t1))
inoise, = np.where((t >= t[0]) & (t <= t1))
if np.size(inoise) < 1:
print ("getnoisewin: Empty array inoise, check noise window!")
print("getnoisewin: Empty array inoise, check noise window!")
return inoise
@ -444,7 +445,7 @@ def getsignalwin(t, t1, tsignal):
isignal, = np.where((t <= min([t1 + tsignal, len(t)])) \
& (t >= t1))
if np.size(isignal) < 1:
print ("getsignalwin: Empty array isignal, check signal window!")
print("getsignalwin: Empty array isignal, check signal window!")
return isignal
@ -473,7 +474,7 @@ def getResolutionWindow(snr, extent):
>>> getResolutionWindow(2)
2.5
"""
res_wins = {
'regional': {'HRW': 2., 'MRW': 5., 'LRW': 10., 'VLRW': 15.},
'local': {'HRW': 2., 'MRW': 5., 'LRW': 10., 'VLRW': 15.},
@ -487,11 +488,11 @@ def getResolutionWindow(snr, extent):
time_resolution = res_wins[extent]['LRW']
elif snr < 3.:
time_resolution = res_wins[extent]['MRW']
elif snr >3.:
elif snr > 3.:
time_resolution = res_wins[extent]['HRW']
else:
time_resolution = res_wins[extent]['VLRW']
return time_resolution / 2
@ -573,8 +574,8 @@ def wadaticheck(pickdic, dttolerance, iplot):
# calculate vp/vs ratio before check
vpvsr = p1[0] + 1
print ("###############################################")
print ("wadaticheck: Average Vp/Vs ratio before check: %f" % vpvsr)
print("###############################################")
print("wadaticheck: Average Vp/Vs ratio before check: %f" % vpvsr)
checkedPpicks = []
checkedSpicks = []
@ -611,23 +612,23 @@ def wadaticheck(pickdic, dttolerance, iplot):
# calculate vp/vs ratio after check
cvpvsr = p2[0] + 1
print ("wadaticheck: Average Vp/Vs ratio after check: %f" % cvpvsr)
print ("wadatacheck: Skipped %d S pick(s)" % ibad)
print("wadaticheck: Average Vp/Vs ratio after check: %f" % cvpvsr)
print("wadatacheck: Skipped %d S pick(s)" % ibad)
else:
print ("###############################################")
print ("wadatacheck: Not enough checked S-P times available!")
print ("Skip Wadati check!")
print("###############################################")
print("wadatacheck: Not enough checked S-P times available!")
print("Skip Wadati check!")
checkedonsets = pickdic
else:
print ("wadaticheck: Not enough S-P times available for reliable regression!")
print ("Skip wadati check!")
print("wadaticheck: Not enough S-P times available for reliable regression!")
print("Skip wadati check!")
wfitflag = 1
# plot results
if iplot > 0:
plt.figure()#iplot)
plt.figure() # iplot)
f1, = plt.plot(Ppicks, SPtimes, 'ro')
if wfitflag == 0:
f2, = plt.plot(Ppicks, wdfit, 'k')
@ -645,12 +646,14 @@ def wadaticheck(pickdic, dttolerance, iplot):
return checkedonsets
def RMS(X):
'''
Function returns root mean square of a given array X
'''
return np.sqrt(np.sum(np.power(X, 2))/len(X))
return np.sqrt(np.sum(np.power(X, 2)) / len(X))
def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot=0, fig=None):
'''
Function to detect spuriously picked noise peaks.
@ -684,7 +687,7 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot=0, fi
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
print ("Checking signal length ...")
print("Checking signal length ...")
if len(X) > 1:
# all three components available
@ -714,21 +717,21 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot=0, fi
numoverthr = len(np.where(rms[isignal] >= minsiglevel)[0])
if numoverthr >= minnum:
print ("checksignallength: Signal reached required length.")
print("checksignallength: Signal reached required length.")
returnflag = 1
else:
print ("checksignallength: Signal shorter than required minimum signal length!")
print ("Presumably picked noise peak, pick is rejected!")
print ("(min. signal length required: %s s)" % minsiglength)
print("checksignallength: Signal shorter than required minimum signal length!")
print("Presumably picked noise peak, pick is rejected!")
print("(min. signal length required: %s s)" % minsiglength)
returnflag = 0
if iplot == 2:
if not fig:
fig = plt.figure()#iplot)
fig = plt.figure() # iplot)
ax = fig.add_subplot(111)
ax.plot(t, rms, 'k', label='RMS Data')
ax.axvspan(t[inoise[0]], t[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
ax.axvspan(t[isignal[0]], t[isignal[-1]], color='b', alpha=0.2, lw=0, label='Signal Window')
ax.axvspan(t[inoise[0]], t[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
ax.axvspan(t[isignal[0]], t[isignal[-1]], color='b', alpha=0.2, lw=0, label='Signal Window')
ax.plot([t[isignal[0]], t[isignal[len(isignal) - 1]]],
[minsiglevel, minsiglevel], 'g', linewidth=2, label='Minimum Signal Level')
ax.plot([pick, pick], [min(rms), max(rms)], 'b', linewidth=2, label='Onset')
@ -771,8 +774,8 @@ def checkPonsets(pickdic, dttolerance, iplot):
stations.append(key)
# apply jackknife bootstrapping on variance of P onsets
print ("###############################################")
print ("checkPonsets: Apply jackknife bootstrapping on P-onset times ...")
print("###############################################")
print("checkPonsets: Apply jackknife bootstrapping on P-onset times ...")
[xjack, PHI_pseudo, PHI_sub] = jackknife(Ppicks, 'VAR', 1)
# get pseudo variances smaller than average variances
# (times safety factor), these picks passed jackknife test
@ -780,7 +783,7 @@ def checkPonsets(pickdic, dttolerance, iplot):
# these picks did not pass jackknife test
badjk = np.where(PHI_pseudo > 5 * xjack)
badjkstations = np.array(stations)[badjk]
print ("checkPonsets: %d pick(s) did not pass jackknife test!" % len(badjkstations))
print("checkPonsets: %d pick(s) did not pass jackknife test!" % len(badjkstations))
print(badjkstations)
# calculate median from these picks
@ -793,9 +796,9 @@ def checkPonsets(pickdic, dttolerance, iplot):
goodstations = np.array(stations)[igood]
badstations = np.array(stations)[ibad]
print ("checkPonsets: %d pick(s) deviate too much from median!" % len(ibad))
print ("checkPonsets: Skipped %d P pick(s) out of %d" % (len(badstations) \
+ len(badjkstations), len(stations)))
print("checkPonsets: %d pick(s) deviate too much from median!" % len(ibad))
print("checkPonsets: Skipped %d P pick(s) out of %d" % (len(badstations) \
+ len(badjkstations), len(stations)))
goodmarker = 'goodPonsetcheck'
badmarker = 'badPonsetcheck'
@ -863,8 +866,8 @@ def jackknife(X, phi, h):
g = len(X) / h
if type(g) is not int:
print ("jackknife: Cannot divide quantity X in equal sized subgroups!")
print ("Choose another size for subgroups!")
print("jackknife: Cannot divide quantity X in equal sized subgroups!")
print("Choose another size for subgroups!")
return PHI_jack, PHI_pseudo, PHI_sub
else:
# estimator of undisturbed spot check
@ -932,7 +935,7 @@ def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None):
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
print ("Check for spuriously picked S onset instead of P onset ...")
print("Check for spuriously picked S onset instead of P onset ...")
returnflag = 0
@ -949,7 +952,7 @@ def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None):
# get earliest time of all 3 traces
min_t = min(zdat[0].stats.starttime, edat[0].stats.starttime, ndat[0].stats.starttime)
# generate time arrays for all 3 traces
tz = np.arange(0, zdat[0].stats.npts / zdat[0].stats.sampling_rate,
zdat[0].stats.delta)
@ -961,11 +964,11 @@ def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None):
zdiff = (zdat[0].stats.starttime - min_t)
ndiff = (ndat[0].stats.starttime - min_t)
ediff = (edat[0].stats.starttime - min_t)
# get signal windows
isignalz = getsignalwin(tz, pick-zdiff, checkwin)
isignaln = getsignalwin(tn, pick-ndiff, checkwin)
isignale = getsignalwin(te, pick-ediff, checkwin)
isignalz = getsignalwin(tz, pick - zdiff, checkwin)
isignaln = getsignalwin(tn, pick - ndiff, checkwin)
isignale = getsignalwin(te, pick - ediff, checkwin)
# calculate RMS of traces
rmsz = RMS(zdat[0].data[isignalz])
@ -978,9 +981,9 @@ def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None):
# vertical P-coda level must exceed horizontal P-coda level
# zfac times encodalevel
if rmsz < minsiglevel:
print ("checkZ4S: Maybe S onset? Skip this P pick!")
print("checkZ4S: Maybe S onset? Skip this P pick!")
else:
print ("checkZ4S: P onset passes checkZ4S test!")
print("checkZ4S: P onset passes checkZ4S test!")
returnflag = 1
if iplot > 1:
@ -996,28 +999,28 @@ def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None):
'N': ndiff,
'E': ediff}
signal_dict = {'Z': isignalz,
'N': isignaln,
'E': isignale}
signal_dict = {'Z': isignalz,
'N': isignaln,
'E': isignale}
for i, key in enumerate(['Z', 'N', 'E']):
rms = rms_dict[key]
trace = traces_dict[key]
t = np.arange(diff_dict[key], trace.stats.npts / trace.stats.sampling_rate+diff_dict[key],
trace.stats.delta)
t = np.arange(diff_dict[key], trace.stats.npts / trace.stats.sampling_rate + diff_dict[key],
trace.stats.delta)
if i == 0:
ax1 = fig.add_subplot(3, 1, i+1)
ax1 = fig.add_subplot(3, 1, i + 1)
ax = ax1
ax.set_title('CheckZ4S, Station %s' % zdat[0].stats.station)
else:
ax = fig.add_subplot(3,1,i+1, sharex=ax1)
ax = fig.add_subplot(3, 1, i + 1, sharex=ax1)
ax.plot(t, abs(trace.data), color='b', label='abs')
ax.plot(t, trace.data, color='k')
name = str(trace.stats.channel) + ': {}'.format(rms)
ax.plot([pick, pick+checkwin], [rms, rms], 'r', label='RMS {}'.format(name))
ax.plot([pick, pick + checkwin], [rms, rms], 'r', label='RMS {}'.format(name))
ax.plot([pick, pick], ax.get_ylim(), 'm', label='Pick')
ax.set_ylabel('Normalized Counts')
ax.axvspan(pick, pick+checkwin, color='c', alpha=0.2,
ax.axvspan(pick, pick + checkwin, color='c', alpha=0.2,
lw=0)
ax.legend()
ax.set_xlabel('Time [s] since %s' % zdat[0].stats.starttime)

View File

@ -8,14 +8,14 @@
:author: Ludger Kueperkoch / MAGS2 EP3 working group
"""
import pdb
import numpy as np
import matplotlib.pyplot as plt
from obspy.core import Stream, UTCDateTime
import warnings
import matplotlib.pyplot as plt
import numpy as np
from obspy.core import Stream, UTCDateTime
def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, stealthMode = False):
def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, stealthMode=False):
'''
Function to derive earliest and latest possible pick after Diehl & Kissling (2009)
as reasonable uncertainties. Latest possible pick is based on noise level,
@ -45,7 +45,8 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, stealthMode = False):
EPick = None
PickError = None
if stealthMode is False:
print 'earllatepicker: Get earliest and latest possible pick relative to most likely pick ...'
print
'earllatepicker: Get earliest and latest possible pick relative to most likely pick ...'
x = X[0].data
t = np.arange(0, X[0].stats.npts / X[0].stats.sampling_rate,
@ -61,8 +62,8 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, stealthMode = False):
ilup, = np.where(x[isignal] > nlevel)
ildown, = np.where(x[isignal] < -nlevel)
if not ilup.size and not ildown.size:
print ("earllatepicker: Signal lower than noise level!")
print ("Skip this trace!")
print("earllatepicker: Signal lower than noise level!")
print("Skip this trace!")
return LPick, EPick, PickError
il = min(np.min(ilup) if ilup.size else float('inf'),
np.min(ildown) if ildown.size else float('inf'))
@ -70,20 +71,21 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, stealthMode = False):
# get earliest possible pick
EPick = np.nan; count = 0
EPick = np.nan;
count = 0
pis = isignal
# if EPick stays NaN the signal window size will be doubled
while np.isnan(EPick):
if count > 0:
print("earllatepicker: Doubled signal window size %s time(s) "
"because of NaN for earliest pick." %count)
"because of NaN for earliest pick." % count)
if stealthMode is False:
print("\nearllatepicker: Doubled signal window size %s time(s) "
"because of NaN for earliest pick." %count)
"because of NaN for earliest pick." % count)
isigDoubleWinStart = pis[-1] + 1
isignalDoubleWin = np.arange(isigDoubleWinStart,
isigDoubleWinStart + len(pis))
isigDoubleWinStart + len(pis))
if (isigDoubleWinStart + len(pis)) < X[0].data.size:
pis = np.concatenate((pis, isignalDoubleWin))
else:
@ -97,7 +99,6 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, stealthMode = False):
# T0/4 is assumed as time difference between most likely and earliest possible pick!
EPick = Pick1 - T0 / 2
# get symmetric pick error as mean from earliest and latest possible pick
# by weighting latest possible pick two times earliest possible pick
diffti_tl = LPick - Pick1
@ -165,7 +166,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0):
FM = None
if Pick is not None:
print ("fmpicker: Get first motion (polarity) of onset using unfiltered seismogram...")
print("fmpicker: Get first motion (polarity) of onset using unfiltered seismogram...")
xraw = Xraw[0].data
xfilt = Xfilt[0].data
@ -204,15 +205,15 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0):
else:
li1 = index1[0]
if np.size(xraw[ipick[0][1]:ipick[0][li1]]) == 0:
print ("fmpicker: Onset on unfiltered trace too emergent for first motion determination!")
print("fmpicker: Onset on unfiltered trace too emergent for first motion determination!")
P1 = None
else:
imax1 = np.argmax(abs(xraw[ipick[0][1]:ipick[0][li1]]))
if imax1 == 0:
imax1 = np.argmax(abs(xraw[ipick[0][1]:ipick[0][index1[1]]]))
if imax1 == 0:
print ("fmpicker: Zero crossings too close!")
print ("Skip first motion determination!")
print("fmpicker: Zero crossings too close!")
print("Skip first motion determination!")
return FM
islope1 = np.where((t >= Pick) & (t <= Pick + t[imax1]))
@ -246,15 +247,15 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0):
else:
li2 = index2[0]
if np.size(xfilt[ipick[0][1]:ipick[0][li2]]) == 0:
print ("fmpicker: Onset on filtered trace too emergent for first motion determination!")
print("fmpicker: Onset on filtered trace too emergent for first motion determination!")
P2 = None
else:
imax2 = np.argmax(abs(xfilt[ipick[0][1]:ipick[0][li2]]))
if imax2 == 0:
imax2 = np.argmax(abs(xfilt[ipick[0][1]:ipick[0][index2[1]]]))
if imax2 == 0:
print ("fmpicker: Zero crossings too close!")
print ("Skip first motion determination!")
print("fmpicker: Zero crossings too close!")
print("Skip first motion determination!")
return FM
islope2 = np.where((t >= Pick) & (t <= Pick + t[imax2]))
@ -278,7 +279,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0):
elif P1[0] > 0 >= P2[0]:
FM = '+'
print ("fmpicker: Found polarity %s" % FM)
print("fmpicker: Found polarity %s" % FM)
if iplot > 1:
plt.figure(iplot)
@ -353,10 +354,10 @@ def getSNR(X, TSNR, t1):
# get signal window
isignal = getsignalwin(t, t1, TSNR[2])
if np.size(inoise) < 1:
print ("getSNR: Empty array inoise, check noise window!")
print("getSNR: Empty array inoise, check noise window!")
return
elif np.size(isignal) < 1:
print ("getSNR: Empty array isignal, check signal window!")
print("getSNR: Empty array isignal, check signal window!")
return
# demean over entire waveform
@ -392,9 +393,9 @@ def getnoisewin(t, t1, tnoise, tgap):
# get noise window
inoise, = np.where((t <= max([t1 - tgap, 0])) \
& (t >= max([t1 - tnoise - tgap, 0])))
& (t >= max([t1 - tnoise - tgap, 0])))
if np.size(inoise) < 1:
print ("getnoisewin: Empty array inoise, check noise window!")
print("getnoisewin: Empty array inoise, check noise window!")
return inoise
@ -416,9 +417,9 @@ def getsignalwin(t, t1, tsignal):
# get signal window
isignal, = np.where((t <= min([t1 + tsignal, len(t)])) \
& (t >= t1))
& (t >= t1))
if np.size(isignal) < 1:
print ("getsignalwin: Empty array isignal, check signal window!")
print("getsignalwin: Empty array isignal, check signal window!")
return isignal
@ -457,7 +458,7 @@ def getResolutionWindow(snr):
else:
time_resolution = res_wins['HRW']
return time_resolution/2
return time_resolution / 2
def wadaticheck(pickdic, dttolerance, iplot):
@ -485,17 +486,16 @@ def wadaticheck(pickdic, dttolerance, iplot):
SPtimes = []
for key in pickdic:
if pickdic[key]['P']['weight'] < 4 and pickdic[key]['S']['weight'] < 4:
# calculate S-P time
spt = pickdic[key]['S']['mpp'] - pickdic[key]['P']['mpp']
# add S-P time to dictionary
pickdic[key]['SPt'] = spt
# add P onsets and corresponding S-P times to list
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
UTCSpick = UTCDateTime(pickdic[key]['S']['mpp'])
Ppicks.append(UTCPpick.timestamp)
Spicks.append(UTCSpick.timestamp)
SPtimes.append(spt)
# calculate S-P time
spt = pickdic[key]['S']['mpp'] - pickdic[key]['P']['mpp']
# add S-P time to dictionary
pickdic[key]['SPt'] = spt
# add P onsets and corresponding S-P times to list
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
UTCSpick = UTCDateTime(pickdic[key]['S']['mpp'])
Ppicks.append(UTCPpick.timestamp)
Spicks.append(UTCSpick.timestamp)
SPtimes.append(spt)
if len(SPtimes) >= 3:
# calculate slope
@ -505,8 +505,8 @@ def wadaticheck(pickdic, dttolerance, iplot):
# calculate vp/vs ratio before check
vpvsr = p1[0] + 1
print ("###############################################")
print ("wadaticheck: Average Vp/Vs ratio before check: %f" % vpvsr)
print("###############################################")
print("wadaticheck: Average Vp/Vs ratio before check: %f" % vpvsr)
checkedPpicks = []
checkedSpicks = []
@ -527,7 +527,7 @@ def wadaticheck(pickdic, dttolerance, iplot):
ibad += 1
else:
marker = 'goodWadatiCheck'
checkedPpick = UTCDateTime(pickdic[key]['P']['mpp'])
checkedPpick = UTCDateTime(pickdic[key]['P']['mpp'])
checkedPpicks.append(checkedPpick.timestamp)
checkedSpick = UTCDateTime(pickdic[key]['S']['mpp'])
checkedSpicks.append(checkedSpick.timestamp)
@ -543,18 +543,18 @@ def wadaticheck(pickdic, dttolerance, iplot):
# calculate vp/vs ratio after check
cvpvsr = p2[0] + 1
print ("wadaticheck: Average Vp/Vs ratio after check: %f" % cvpvsr)
print ("wadatacheck: Skipped %d S pick(s)" % ibad)
print("wadaticheck: Average Vp/Vs ratio after check: %f" % cvpvsr)
print("wadatacheck: Skipped %d S pick(s)" % ibad)
else:
print ("###############################################")
print ("wadatacheck: Not enough checked S-P times available!")
print ("Skip Wadati check!")
print("###############################################")
print("wadatacheck: Not enough checked S-P times available!")
print("Skip Wadati check!")
checkedonsets = pickdic
else:
print ("wadaticheck: Not enough S-P times available for reliable regression!")
print ("Skip wadati check!")
print("wadaticheck: Not enough S-P times available for reliable regression!")
print("Skip wadati check!")
wfitflag = 1
# plot results
@ -614,7 +614,7 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
print ("Checking signal length ...")
print("Checking signal length ...")
if len(X) > 1:
# all three components available
@ -639,25 +639,25 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
# calculate minimum adjusted signal level
minsiglevel = max(rms[inoise]) * nfac
# minimum adjusted number of samples over minimum signal level
minnum = len(isignal) * minpercent/100
minnum = len(isignal) * minpercent / 100
# get number of samples above minimum adjusted signal level
numoverthr = len(np.where(rms[isignal] >= minsiglevel)[0])
if numoverthr >= minnum:
print ("checksignallength: Signal reached required length.")
print("checksignallength: Signal reached required length.")
returnflag = 1
else:
print ("checksignallength: Signal shorter than required minimum signal length!")
print ("Presumably picked noise peak, pick is rejected!")
print ("(min. signal length required: %s s)" % minsiglength)
print("checksignallength: Signal shorter than required minimum signal length!")
print("Presumably picked noise peak, pick is rejected!")
print("(min. signal length required: %s s)" % minsiglength)
returnflag = 0
if iplot == 2:
plt.figure(iplot)
p1, = plt.plot(t,rms, 'k')
p1, = plt.plot(t, rms, 'k')
p2, = plt.plot(t[inoise], rms[inoise], 'c')
p3, = plt.plot(t[isignal],rms[isignal], 'r')
p4, = plt.plot([t[isignal[0]], t[isignal[len(isignal)-1]]],
p3, = plt.plot(t[isignal], rms[isignal], 'r')
p4, = plt.plot([t[isignal[0]], t[isignal[len(isignal) - 1]]],
[minsiglevel, minsiglevel], 'g', linewidth=2)
p5, = plt.plot([pick, pick], [min(rms), max(rms)], 'b', linewidth=2)
plt.legend([p1, p2, p3, p4, p5], ['RMS Data', 'RMS Noise Window',
@ -698,22 +698,22 @@ def checkPonsets(pickdic, dttolerance, iplot):
stations = []
for key in pickdic:
if pickdic[key]['P']['weight'] < 4:
# add P onsets to list
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
Ppicks.append(UTCPpick.timestamp)
stations.append(key)
# add P onsets to list
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
Ppicks.append(UTCPpick.timestamp)
stations.append(key)
# apply jackknife bootstrapping on variance of P onsets
print ("###############################################")
print ("checkPonsets: Apply jackknife bootstrapping on P-onset times ...")
[xjack,PHI_pseudo,PHI_sub] = jackknife(Ppicks, 'VAR', 1)
print("###############################################")
print("checkPonsets: Apply jackknife bootstrapping on P-onset times ...")
[xjack, PHI_pseudo, PHI_sub] = jackknife(Ppicks, 'VAR', 1)
# get pseudo variances smaller than average variances
# (times safety factor), these picks passed jackknife test
ij = np.where(PHI_pseudo <= 2 * xjack)
# these picks did not pass jackknife test
badjk = np.where(PHI_pseudo > 2 * xjack)
badjkstations = np.array(stations)[badjk]
print ("checkPonsets: %d pick(s) did not pass jackknife test!" % len(badjkstations))
print("checkPonsets: %d pick(s) did not pass jackknife test!" % len(badjkstations))
# calculate median from these picks
pmedian = np.median(np.array(Ppicks)[ij])
@ -725,9 +725,9 @@ def checkPonsets(pickdic, dttolerance, iplot):
goodstations = np.array(stations)[igood]
badstations = np.array(stations)[ibad]
print ("checkPonsets: %d pick(s) deviate too much from median!" % len(ibad))
print ("checkPonsets: Skipped %d P pick(s) out of %d" % (len(badstations) \
+ len(badjkstations), len(stations)))
print("checkPonsets: %d pick(s) deviate too much from median!" % len(ibad))
print("checkPonsets: Skipped %d P pick(s) out of %d" % (len(badstations) \
+ len(badjkstations), len(stations)))
goodmarker = 'goodPonsetcheck'
badmarker = 'badPonsetcheck'
@ -794,8 +794,8 @@ def jackknife(X, phi, h):
g = len(X) / h
if type(g) is not int:
print ("jackknife: Cannot divide quantity X in equal sized subgroups!")
print ("Choose another size for subgroups!")
print("jackknife: Cannot divide quantity X in equal sized subgroups!")
print("Choose another size for subgroups!")
return PHI_jack, PHI_pseudo, PHI_sub
else:
# estimator of undisturbed spot check
@ -863,7 +863,7 @@ def checkZ4S(X, pick, zfac, checkwin, iplot):
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
print ("Check for spuriously picked S onset instead of P onset ...")
print("Check for spuriously picked S onset instead of P onset ...")
returnflag = 0
@ -876,10 +876,9 @@ def checkZ4S(X, pick, zfac, checkwin, iplot):
if len(ndat) == 0: # check for other components
ndat = X.select(component="1")
z = zdat[0].data
tz = np.arange(0, zdat[0].stats.npts / zdat[0].stats.sampling_rate,
zdat[0].stats.delta)
zdat[0].stats.delta)
# calculate RMS trace from vertical component
absz = np.sqrt(np.power(z, 2))
@ -904,16 +903,16 @@ def checkZ4S(X, pick, zfac, checkwin, iplot):
# vertical P-coda level must exceed horizontal P-coda level
# zfac times encodalevel
if zcodalevel < minsiglevel:
print ("checkZ4S: Maybe S onset? Skip this P pick!")
print("checkZ4S: Maybe S onset? Skip this P pick!")
else:
print ("checkZ4S: P onset passes checkZ4S test!")
print("checkZ4S: P onset passes checkZ4S test!")
returnflag = 1
if iplot > 1:
te = np.arange(0, edat[0].stats.npts / edat[0].stats.sampling_rate,
edat[0].stats.delta)
edat[0].stats.delta)
tn = np.arange(0, ndat[0].stats.npts / ndat[0].stats.sampling_rate,
ndat[0].stats.delta)
ndat[0].stats.delta)
plt.plot(tz, z / max(z), 'k')
plt.plot(tz[isignal], z[isignal] / max(z), 'r')
plt.plot(te, edat[0].data / max(edat[0].data) + 1, 'k')
@ -955,9 +954,8 @@ def writephases(arrivals, fformat, filename):
:type: string
'''
if fformat == 'NLLoc':
print ("Writing phases to %s for NLLoc" % filename)
print("Writing phases to %s for NLLoc" % filename)
fid = open("%s" % filename, 'w')
# write header
fid.write('# EQEVENT: Label: EQ001 Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n')
@ -969,34 +967,32 @@ def writephases(arrivals, fformat, filename):
onset = arrivals[key]['P']['mpp']
year = onset.year
month = onset.month
day =onset.day
day = onset.day
hh = onset.hour
mm = onset.minute
ss = onset.second
ss = onset.second
ms = onset.microsecond
ss_ms = ss + (ms / 1E06)
fid.write('%s ? ? ? P %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 1 \n' \
% (key, fm, year, month, day, hh, mm, ss_ms))
% (key, fm, year, month, day, hh, mm, ss_ms))
if arrivals[key]['S']['weight'] < 4:
fm = '?'
onset = arrivals[key]['S']['mpp']
year = onset.year
month = onset.month
day =onset.day
day = onset.day
hh = onset.hour
mm = onset.minute
ss = onset.second
ss = onset.second
ms = onset.microsecond
ss_ms = ss + (ms / 1E06)
fid.write('%s ? ? ? S %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 1 \n' \
% (key, fm, year, month, day, hh, mm, ss_ms))
% (key, fm, year, month, day, hh, mm, ss_ms))
fid.close()
if __name__ == '__main__':
import doctest
doctest.testmod()

View File

@ -1,12 +1,11 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import glob
import os
import sys
import numpy as np
from obspy import UTCDateTime, read_inventory, read
from obspy.io.xseed import Parser
from pylot.core.util.utils import key_for_set_value, find_in_list, \
@ -116,7 +115,7 @@ def make_time_line(line, datetime):
return newline
def evt_head_check(root_dir, out_dir = None):
def evt_head_check(root_dir, out_dir=None):
"""
A function to make sure that an arbitrary number of .gse files have correct values in their header.
:param root_dir: a directory leading to the .gse files.
@ -183,7 +182,7 @@ def read_metadata(path_to_inventory):
print("Neither dataless-SEED file, inventory-xml file nor "
"RESP-file found!")
print("!!WRONG CALCULATION OF SOURCE PARAMETERS!!")
robj = None,
robj = None,
elif invtype == 'dless': # prevent multiple read of large dlsv
print("Reading metadata information from dataless-SEED file ...")
if len(inv[invtype]) == 1:
@ -201,7 +200,7 @@ def restitute_trace(input_tuple):
tr, invtype, inobj, unit, force = input_tuple
remove_trace = False
seed_id = tr.get_id()
# check, whether this trace has already been corrected
if 'processing' in tr.stats.keys() \
@ -244,14 +243,14 @@ def restitute_trace(input_tuple):
remove_trace = True
# apply restitution to data
print("Correcting instrument at station %s, channel %s" \
% (tr.stats.station, tr.stats.channel))
% (tr.stats.station, tr.stats.channel))
try:
if invtype in ['resp', 'dless']:
try:
tr.simulate(**kwargs)
tr.simulate(**kwargs)
except ValueError as e:
vmsg = '{0}'.format(e)
print(vmsg)
vmsg = '{0}'.format(e)
print(vmsg)
else:
tr.attach_response(inventory)
@ -293,15 +292,15 @@ def restitute_data(data, invtype, inobj, unit='VEL', force=False):
for tr in data:
input_tuples.append((tr, invtype, inobj, unit, force))
data.remove(tr)
pool = gen_Pool()
result = pool.map(restitute_trace, input_tuples)
pool.close()
for tr, remove_trace in result:
if not remove_trace:
data.traces.append(tr)
# check if ALL traces could be restituted, take care of large datasets
# better try restitution for smaller subsets of data (e.g. station by
# station)
@ -343,8 +342,8 @@ def get_prefilt(trace, tlow=(0.5, 0.9), thi=(5., 2.), verbosity=0):
trace.stats.station, trace.stats.channel))
# get corner frequencies for pre-filtering
fny = trace.stats.sampling_rate / 2
fc21 = fny - (fny * thi[0]/100.)
fc22 = fny - (fny * thi[1]/100.)
fc21 = fny - (fny * thi[0] / 100.)
fc22 = fny - (fny * thi[1] / 100.)
return (tlow[0], tlow[1], fc21, fc22)

View File

@ -9,17 +9,19 @@ Created on Wed Feb 26 12:31:25 2014
import os
import platform
from pylot.core.loc import nll
from pylot.core.loc import hyposat
from pylot.core.io.inputs import PylotParameter
from pylot.core.loc import hypo71
from pylot.core.loc import hypodd
from pylot.core.loc import hyposat
from pylot.core.loc import nll
from pylot.core.loc import velest
from pylot.core.io.inputs import PylotParameter
def readDefaultFilterInformation(fname):
pparam = PylotParameter(fname)
return readFilterInformation(pparam)
def readFilterInformation(pylot_parameter):
p_filter = {'filtertype': pylot_parameter['filter_type'][0],
'freq': [pylot_parameter['minfreq'][0], pylot_parameter['maxfreq'][0]],
@ -31,6 +33,7 @@ def readFilterInformation(pylot_parameter):
'S': s_filter}
return filter_information
# determine system dependent path separator
system_name = platform.system()
if system_name in ["Linux", "Darwin"]:
@ -59,14 +62,14 @@ LOCTOOLS = dict(nll=nll, hyposat=hyposat, velest=velest, hypo71=hypo71, hypodd=h
class SetChannelComponents(object):
def __init__(self):
self.setDefaultCompPosition()
def setDefaultCompPosition(self):
# default component order
self.compPosition_Map = dict(Z=2, N=1, E=0)
self.compName_Map = {'3': 'Z',
'1': 'N',
'2': 'E'}
def _getCurrentPosition(self, component):
for key, value in self.compName_Map.items():
if value == component:
@ -85,10 +88,10 @@ class SetChannelComponents(object):
def setCompPosition(self, component_alter, component, switch=True):
component_alter = str(component_alter)
if not component_alter in self.compName_Map.keys():
errMsg='setCompPosition: Unrecognized alternative component {}. Expecting one of {}.'
errMsg = 'setCompPosition: Unrecognized alternative component {}. Expecting one of {}.'
raise ValueError(errMsg.format(component_alter, self.compName_Map.keys()))
if not component in self.compPosition_Map.keys():
errMsg='setCompPosition: Unrecognized target component {}. Expecting one of {}.'
errMsg = 'setCompPosition: Unrecognized target component {}. Expecting one of {}.'
raise ValueError(errMsg.format(component, self.compPosition_Map.keys()))
print('setCompPosition: set component {} to {}'.format(component_alter, component))
if switch:
@ -97,7 +100,7 @@ class SetChannelComponents(object):
def getCompPosition(self, component):
return self._getCurrentPosition(component)[0]
def getPlotPosition(self, component):
component = str(component)
if component in self.compPosition_Map.keys():
@ -105,6 +108,5 @@ class SetChannelComponents(object):
elif component in self.compName_Map.keys():
return self.compPosition_Map[self.compName_Map[component]]
else:
errMsg='getCompPosition: Unrecognized component {}. Expecting one of {} or {}.'
errMsg = 'getCompPosition: Unrecognized component {}. Expecting one of {} or {}.'
raise ValueError(errMsg.format(component, self.compPosition_Map.keys(), self.compName_Map.keys()))

View File

@ -25,5 +25,6 @@ class OverwriteError(IOError):
class ParameterError(Exception):
pass
class ProcessingError(RuntimeError):
pass

View File

@ -6,7 +6,6 @@ import os
from obspy import UTCDateTime
from obspy.core.event import Event as ObsPyEvent
from obspy.core.event import Origin, ResourceIdentifier
from pylot.core.io.phases import picks_from_picksdict
@ -14,10 +13,11 @@ class Event(ObsPyEvent):
'''
Pickable class derived from ~obspy.core.event.Event containing information on a single event.
'''
def __init__(self, path):
self.pylot_id = path.split('/')[-1]
# initialize super class
super(Event, self).__init__(resource_id=ResourceIdentifier('smi:local/'+self.pylot_id))
super(Event, self).__init__(resource_id=ResourceIdentifier('smi:local/' + self.pylot_id))
self.path = path
self.database = path.split('/')[-2]
self.datapath = path.split('/')[-3]
@ -32,13 +32,13 @@ class Event(ObsPyEvent):
def get_notes_path(self):
notesfile = os.path.join(self.path, 'notes.txt')
return notesfile
def get_notes(self):
notesfile = self.get_notes_path()
if os.path.isfile(notesfile):
with open(notesfile) as infile:
path = str(infile.readlines()[0].split('\n')[0])
text = '[eventInfo: '+path+']'
text = '[eventInfo: ' + path + ']'
self.addNotes(text)
try:
datetime = UTCDateTime(path.split('/')[-1])
@ -73,13 +73,13 @@ class Event(ObsPyEvent):
'''
for station in picks:
self.pylot_picks[station] = picks[station]
#add ObsPy picks
# add ObsPy picks
self.picks = picks_from_picksdict(self.pylot_picks)
def addAutopicks(self, autopicks):
for station in autopicks:
self.pylot_autopicks[station] = autopicks[station]
def setPick(self, station, pick):
if pick:
self.pylot_picks[station] = pick
@ -89,14 +89,14 @@ class Event(ObsPyEvent):
except Exception as e:
print('Could not remove pick {} from station {}: {}'.format(pick, station, e))
self.picks = picks_from_picksdict(self.pylot_picks)
def setPicks(self, picks):
'''
set pylot picks and delete and overwrite all existing
'''
self.pylot_picks = picks
self.picks = picks_from_picksdict(self.pylot_picks)
def getPick(self, station):
if station in self.pylot_picks.keys():
return self.pylot_picks[station]
@ -110,7 +110,7 @@ class Event(ObsPyEvent):
def setAutopicks(self, autopicks):
self.pylot_autopicks = autopicks
def getAutopick(self, station):
if station in self.pylot_autopicks.keys():
return self.pylot_autopicks[station]

View File

@ -1,16 +1,15 @@
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import numpy as np
import obspy
from matplotlib import cm
from scipy.interpolate import griddata
from PySide import QtGui
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
from PySide import QtCore, QtGui
from mpl_toolkits.basemap import Basemap
from pylot.core.util.widgets import PickDlg
from scipy.interpolate import griddata
plt.interactive(False)
class map_projection(QtGui.QWidget):
def __init__(self, parent, figure=None):
'''
@ -29,7 +28,7 @@ class map_projection(QtGui.QWidget):
self.init_stations()
self.init_basemap(resolution='l')
self.init_map()
#self.show()
# self.show()
def init_map(self):
self.init_lat_lon_dimensions()
@ -37,7 +36,7 @@ class map_projection(QtGui.QWidget):
self.init_x_y_dimensions()
self.connectSignals()
self.draw_everything()
def onpick(self, event):
ind = event.ind
button = event.mouseevent.button
@ -45,7 +44,7 @@ class map_projection(QtGui.QWidget):
return
data = self._parent.get_data().getWFData()
for index in ind:
station=str(self.station_names[index].split('.')[-1])
station = str(self.station_names[index].split('.')[-1])
try:
pickDlg = PickDlg(self, parameter=self._parent._inputs,
data=data.select(station=station),
@ -90,7 +89,7 @@ class map_projection(QtGui.QWidget):
else:
self.figure = self._parent.am_figure
self.toolbar = self._parent.am_toolbar
self.main_ax = self.figure.add_subplot(111)
self.canvas = self.figure.canvas
@ -106,29 +105,29 @@ class map_projection(QtGui.QWidget):
self.comboBox_am = QtGui.QComboBox()
self.comboBox_am.insertItem(0, 'auto')
self.comboBox_am.insertItem(1, 'manual')
self.comboBox_am.insertItem(1, 'manual')
self.top_row.addWidget(QtGui.QLabel('Select a phase: '))
self.top_row.addWidget(self.comboBox_phase)
self.top_row.setStretch(1,1) #set stretch of item 1 to 1
self.top_row.setStretch(1, 1) # set stretch of item 1 to 1
self.main_box.addWidget(self.canvas)
self.main_box.addWidget(self.toolbar)
def init_stations(self):
def get_station_names_lat_lon(parser):
station_names=[]
lat=[]
lon=[]
station_names = []
lat = []
lon = []
for station in parser.stations:
station_name=station[0].station_call_letters
network=station[0].network_code
station_name = station[0].station_call_letters
network = station[0].network_code
if not station_name in station_names:
station_names.append(network+'.'+station_name)
station_names.append(network + '.' + station_name)
lat.append(station[0].latitude)
lon.append(station[0].longitude)
return station_names, lat, lon
station_names, lat, lon = get_station_names_lat_lon(self.parser)
self.station_names = station_names
self.lat = lat
@ -136,52 +135,53 @@ class map_projection(QtGui.QWidget):
def init_picks(self):
phase = self.comboBox_phase.currentText()
def get_picks(station_names):
picks=[]
picks = []
for station in station_names:
try:
station=station.split('.')[-1]
station = station.split('.')[-1]
picks.append(self.picks_dict[station][phase]['mpp'])
except:
picks.append(np.nan)
return picks
def get_picks_rel(picks):
picks_rel=[]
picks_rel = []
picks_utc = []
for pick in picks:
if type(pick) is obspy.core.utcdatetime.UTCDateTime:
picks_utc.append(pick)
minp = min(picks_utc)
for pick in picks:
if type(pick) is obspy.core.utcdatetime.UTCDateTime:
if type(pick) is obspy.core.utcdatetime.UTCDateTime:
pick -= minp
picks_rel.append(pick)
return picks_rel
self.picks = get_picks(self.station_names)
self.picks_rel = get_picks_rel(self.picks)
def init_picks_active(self):
def remove_nan_picks(picks):
picks_no_nan=[]
picks_no_nan = []
for pick in picks:
if not np.isnan(pick):
picks_no_nan.append(pick)
return picks_no_nan
self.picks_no_nan = remove_nan_picks(self.picks_rel)
def init_stations_active(self):
def remove_nan_lat_lon(picks, lat, lon):
lat_no_nan=[]
lon_no_nan=[]
lat_no_nan = []
lon_no_nan = []
for index, pick in enumerate(picks):
if not np.isnan(pick):
lat_no_nan.append(lat[index])
lon_no_nan.append(lon[index])
return lat_no_nan, lon_no_nan
self.lat_no_nan, self.lon_no_nan = remove_nan_lat_lon(self.picks_rel, self.lat, self.lon)
def init_lat_lon_dimensions(self):
@ -189,7 +189,7 @@ class map_projection(QtGui.QWidget):
londim = max(lon) - min(lon)
latdim = max(lat) - min(lat)
return londim, latdim
self.londim, self.latdim = get_lon_lat_dim(self.lon, self.lat)
def init_x_y_dimensions(self):
@ -197,30 +197,30 @@ class map_projection(QtGui.QWidget):
xdim = max(x) - min(x)
ydim = max(y) - min(y)
return xdim, ydim
self.x, self.y = self.basemap(self.lon, self.lat)
self.xdim, self.ydim = get_x_y_dim(self.x, self.y)
def init_basemap(self, resolution='l'):
#basemap = Basemap(projection=projection, resolution = resolution, ax=self.main_ax)
basemap = Basemap(projection='lcc', resolution = resolution, ax=self.main_ax,
# basemap = Basemap(projection=projection, resolution = resolution, ax=self.main_ax)
basemap = Basemap(projection='lcc', resolution=resolution, ax=self.main_ax,
width=5e6, height=2e6,
lat_0=(min(self.lat)+max(self.lat))/2.,
lon_0=(min(self.lon)+max(self.lon))/2.)
#basemap.fillcontinents(color=None, lake_color='aqua',zorder=1)
basemap.drawmapboundary(zorder=2)#fill_color='darkblue')
lat_0=(min(self.lat) + max(self.lat)) / 2.,
lon_0=(min(self.lon) + max(self.lon)) / 2.)
# basemap.fillcontinents(color=None, lake_color='aqua',zorder=1)
basemap.drawmapboundary(zorder=2) # fill_color='darkblue')
basemap.shadedrelief(zorder=3)
basemap.drawcountries(zorder=4)
basemap.drawstates(zorder=5)
basemap.drawcoastlines(zorder=6)
self.basemap = basemap
self.figure.tight_layout()
def init_lat_lon_grid(self):
def get_lat_lon_axis(lat, lon):
steplat = (max(lat)-min(lat))/250
steplon = (max(lon)-min(lon))/250
steplat = (max(lat) - min(lat)) / 250
steplon = (max(lon) - min(lon)) / 250
lataxis = np.arange(min(lat), max(lat), steplat)
lonaxis = np.arange(min(lon), max(lon), steplon)
@ -235,7 +235,8 @@ class map_projection(QtGui.QWidget):
def init_picksgrid(self):
self.picksgrid_no_nan = griddata((self.lat_no_nan, self.lon_no_nan),
self.picks_no_nan, (self.latgrid, self.longrid), method='linear') ##################
self.picks_no_nan, (self.latgrid, self.longrid),
method='linear') ##################
def draw_contour_filled(self, nlevel='50'):
levels = np.linspace(min(self.picks_no_nan), max(self.picks_no_nan), nlevel)
@ -244,7 +245,7 @@ class map_projection(QtGui.QWidget):
def scatter_all_stations(self):
self.sc = self.basemap.scatter(self.lon, self.lat, s=50, facecolor='none', latlon=True,
zorder=10, picker=True, edgecolor='m', label='Not Picked')
zorder=10, picker=True, edgecolor='m', label='Not Picked')
self.cid = self.canvas.mpl_connect('pick_event', self.onpick)
if self.eventLoc:
lat, lon = self.eventLoc
@ -255,11 +256,11 @@ class map_projection(QtGui.QWidget):
lon = self.lon_no_nan
lat = self.lat_no_nan
#workaround because of an issue with latlon transformation of arrays with len <3
# workaround because of an issue with latlon transformation of arrays with len <3
if len(lon) <= 2 and len(lat) <= 2:
self.sc_picked = self.basemap.scatter(lon[0], lat[0], s=50, facecolor='white',
c=self.picks_no_nan[0], latlon=True, zorder=11, label='Picked')
if len(lon) == 2 and len(lat) == 2:
if len(lon) == 2 and len(lat) == 2:
self.sc_picked = self.basemap.scatter(lon[1], lat[1], s=50, facecolor='white',
c=self.picks_no_nan[1], latlon=True, zorder=11)
else:
@ -267,11 +268,11 @@ class map_projection(QtGui.QWidget):
c=self.picks_no_nan, latlon=True, zorder=11, label='Picked')
def annotate_ax(self):
self.annotations=[]
self.annotations = []
for index, name in enumerate(self.station_names):
self.annotations.append(self.main_ax.annotate(' %s' % name, xy=(self.x[index], self.y[index]),
fontsize='x-small', color='white', zorder=12))
self.legend=self.main_ax.legend()
self.legend = self.main_ax.legend()
def add_cbar(self, label):
cbar = self.main_ax.figure.colorbar(self.sc_picked, fraction=0.025)
@ -307,19 +308,19 @@ class map_projection(QtGui.QWidget):
def remove_drawings(self):
if hasattr(self, 'sc_picked'):
self.sc_picked.remove()
del(self.sc_picked)
del (self.sc_picked)
if hasattr(self, 'sc_event'):
self.sc_event.remove()
del(self.sc_event)
del (self.sc_event)
if hasattr(self, 'cbar'):
self.cbar.remove()
del(self.cbar)
del (self.cbar)
if hasattr(self, 'contourf'):
self.remove_contourf()
del(self.contourf)
del (self.contourf)
if hasattr(self, 'cid'):
self.canvas.mpl_disconnect(self.cid)
del(self.cid)
del (self.cid)
try:
self.sc.remove()
except Exception as e:
@ -343,18 +344,18 @@ class map_projection(QtGui.QWidget):
xlim = map.ax.get_xlim()
ylim = map.ax.get_ylim()
x, y = event.xdata, event.ydata
zoom = {'up': 1./2.,
zoom = {'up': 1. / 2.,
'down': 2.}
if not event.xdata or not event.ydata:
return
if event.button in zoom:
factor = zoom[event.button]
xdiff = (xlim[1]-xlim[0])*factor
xdiff = (xlim[1] - xlim[0]) * factor
xl = x - 0.5 * xdiff
xr = x + 0.5 * xdiff
ydiff = (ylim[1]-ylim[0])*factor
ydiff = (ylim[1] - ylim[0]) * factor
yb = y - 0.5 * ydiff
yt = y + 0.5 * ydiff
@ -364,10 +365,8 @@ class map_projection(QtGui.QWidget):
map.ax.set_xlim(xl, xr)
map.ax.set_ylim(yb, yt)
map.ax.figure.canvas.draw()
def _warn(self, message):
self.qmb = QtGui.QMessageBox(QtGui.QMessageBox.Icon.Warning,
'Warning', message)
self.qmb.show()
self.qmb.show()

View File

@ -2,20 +2,23 @@
# -*- coding: utf-8 -*-
import warnings
import numpy as np
from obspy import UTCDateTime
from pylot.core.util.utils import fit_curve, find_nearest, clims
from pylot.core.util.utils import fit_curve, clims
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
__author__ = 'sebastianw'
def create_axis(x0, incr, npts):
ax = np.zeros(npts)
for i in range(npts):
ax[i] = x0 + incr * i
return ax
def gauss_parameter(te, tm, tl, eta):
'''
takes three onset times and returns the parameters sig1, sig2, a1 and a2
@ -79,9 +82,9 @@ def gauss_branches(k, param_tuple):
:returns fun_vals: list with function values along axes x
'''
#python 3 workaround
# python 3 workaround
mu, sig1, sig2, a1, a2 = param_tuple
def _func(k, mu, sig1, sig2, a1, a2):
if k < mu:
rval = a1 * 1 / (np.sqrt(2 * np.pi) * sig1) * np.exp(-((k - mu) / sig1) ** 2 / 2)
@ -110,9 +113,9 @@ def exp_branches(k, param_tuple):
:returns fun_vals: list with function values along axes x:
'''
#python 3 workaround
# python 3 workaround
mu, sig1, sig2, a = param_tuple
def _func(k, mu, sig1, sig2, a):
mu = float(mu)
if k < mu:
@ -313,14 +316,14 @@ class ProbabilityDensityFunction(object):
:return float: rval
'''
#rval = 0
#for x in self.axis:
# rval = 0
# for x in self.axis:
# rval += x * self.data(x)
rval = self.mu
# Not sure about this! That might not be the barycentre.
# However, for std calculation (next function)
# self.mu is also used!! (LK, 02/2017)
return rval
return rval
def standard_deviation(self):
mu = self.mu
@ -394,7 +397,6 @@ class ProbabilityDensityFunction(object):
qu = self.quantile(1 - prob_value)
return qu - ql
def quantile_dist_frac(self, x):
"""
takes a probability value and returns the fraction of two
@ -411,8 +413,7 @@ class ProbabilityDensityFunction(object):
"""
if x <= 0 or x >= 0.25:
raise ValueError('Value out of range.')
return self.quantile_distance(0.5-x)/self.quantile_distance(x)
return self.quantile_distance(0.5 - x) / self.quantile_distance(x)
def plot(self, label=None):
import matplotlib.pyplot as plt
@ -486,4 +487,3 @@ class ProbabilityDensityFunction(object):
x0, npts = self.commonlimits(incr, other)
return x0, incr, npts

View File

@ -3,6 +3,7 @@
import matplotlib.pyplot as plt
def create_bin_list(l_boundary, u_boundary, nbins=100):
"""
takes two boundaries and a number of bins and creates a list of bins for
@ -54,4 +55,4 @@ def histplot(array, binlist, xlab='Values',
if fnout:
plt.savefig(fnout)
else:
plt.show()
plt.show()

View File

@ -1,6 +1,9 @@
# -*- coding: utf-8 -*-
import sys, os, traceback
import multiprocessing
import os
import sys
import traceback
from PySide.QtCore import QThread, Signal, Qt, Slot, QRunnable, QObject
from PySide.QtGui import QDialog, QProgressBar, QLabel, QHBoxLayout, QPushButton
@ -42,7 +45,7 @@ class AutoPickThread(QThread):
class Thread(QThread):
message = Signal(str)
def __init__(self, parent, func, arg=None, progressText=None, pb_widget=None, redirect_stdout=False):
QThread.__init__(self, parent)
self.func = func
@ -55,7 +58,7 @@ class Thread(QThread):
def run(self):
if self.redirect_stdout:
sys.stdout = self
sys.stdout = self
try:
if self.arg:
self.data = self.func(self.arg)
@ -68,7 +71,7 @@ class Thread(QThread):
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print('Exception: {}, file: {}, line: {}'.format(exc_type, fname, exc_tb.tb_lineno))
sys.stdout = sys.__stdout__
sys.stdout = sys.__stdout__
def __del__(self):
self.wait()
@ -79,7 +82,7 @@ class Thread(QThread):
# generate widget if not given in init
if not self.pb_widget:
self.pb_widget = QDialog(self.parent())
self.pb_widget.setWindowFlags(Qt.SplashScreen)
self.pb_widget.setWindowFlags(Qt.SplashScreen)
self.pb_widget.setModal(True)
# add button
@ -109,6 +112,7 @@ class Worker(QRunnable):
'''
'''
def __init__(self, fun, args,
progressText=None,
pb_widget=None,
@ -116,7 +120,7 @@ class Worker(QRunnable):
super(Worker, self).__init__()
self.fun = fun
self.args = args
#self.kwargs = kwargs
# self.kwargs = kwargs
self.signals = WorkerSignals()
self.progressText = progressText
self.pb_widget = pb_widget
@ -131,9 +135,9 @@ class Worker(QRunnable):
result = self.fun(self.args)
except:
traceback.print_exc()
exctype, value = sys.exc_info ()[:2]
exctype, value = sys.exc_info()[:2]
print(exctype, value, traceback.format_exc())
#self.signals.error.emit ((exctype, value, traceback.format_exc ()))
# self.signals.error.emit ((exctype, value, traceback.format_exc ()))
else:
self.signals.result.emit(result)
finally:
@ -157,7 +161,7 @@ class WorkerSignals(QObject):
class MultiThread(QThread):
finished = Signal(str)
message = Signal(str)
message = Signal(str)
def __init__(self, parent, func, args, ncores=1,
progressText=None, pb_widget=None, redirect_stdout=False):
@ -170,16 +174,16 @@ class MultiThread(QThread):
self.redirect_stdout = redirect_stdout
self.finished.connect(self.hideProgressbar)
self.showProgressbar()
def run(self):
if self.redirect_stdout:
sys.stdout = self
sys.stdout = self
try:
if not self.ncores:
self.ncores = multiprocessing.cpu_count()
pool = multiprocessing.Pool(self.ncores)
self.data = pool.map_async(self.func, self.args, callback=self.emitDone)
#self.data = pool.apply_async(self.func, self.shotlist, callback=self.emitDone) #emit each time returned
# self.data = pool.apply_async(self.func, self.shotlist, callback=self.emitDone) #emit each time returned
pool.close()
self._executed = True
except Exception as e:
@ -188,7 +192,7 @@ class MultiThread(QThread):
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print('Exception: {}, file: {}, line: {}'.format(exc_type, fname, exc_tb.tb_lineno))
sys.stdout = sys.__stdout__
sys.stdout = sys.__stdout__
def __del__(self):
self.wait()
@ -197,7 +201,7 @@ class MultiThread(QThread):
if self.progressText:
if not self.pb_widget:
self.pb_widget = QDialog(self.parent())
self.pb_widget.setWindowFlags(Qt.SplashScreen)
self.pb_widget.setWindowFlags(Qt.SplashScreen)
self.pb_widget.setModal(True)
hl = QHBoxLayout()
pb = QProgressBar()

View File

@ -2,26 +2,28 @@
# -*- coding: utf-8 -*-
import hashlib
import numpy as np
from scipy.interpolate import splrep, splev
import os
import platform
import re
import warnings
import subprocess
import numpy as np
from obspy import UTCDateTime, read
from pylot.core.io.inputs import PylotParameter
from scipy.interpolate import splrep, splev
def _pickle_method(m):
if m.im_self is None:
return getattr, (m.im_class, m.im_func.func_name)
else:
return getattr, (m.im_self, m.im_func.func_name)
def fit_curve(x, y):
return splev, splrep(x, y)
def getindexbounds(f, eta):
mi = f.argmax()
m = max(f)
@ -34,9 +36,9 @@ def getindexbounds(f, eta):
def gen_Pool(ncores='max'):
import multiprocessing
if ncores=='max':
ncores=multiprocessing.cpu_count()
if ncores == 'max':
ncores = multiprocessing.cpu_count()
pool = multiprocessing.Pool(ncores)
return pool
@ -106,6 +108,7 @@ def findComboBoxIndex(combo_box, val):
"""
return combo_box.findText(val) if combo_box.findText(val) is not -1 else 0
def find_in_list(list, str):
"""
takes a list of strings and a string and returns the first list item
@ -135,6 +138,7 @@ def find_in_list(list, str):
return rlist[0]
return None
def find_nearest(array, value):
'''
function find_nearest takes an array and a value and returns the
@ -181,12 +185,14 @@ def fnConstructor(s):
fn = '_' + fn
return fn
def real_None(value):
if value == 'None':
return None
else:
return value
def four_digits(year):
"""
takes a two digit year integer and returns the correct four digit equivalent
@ -307,6 +313,7 @@ def getPatternLine(fn, pattern):
return None
def is_executable(fn):
"""
takes a filename and returns True if the file is executable on the system
@ -492,6 +499,7 @@ def runProgram(cmd, parameter=None):
subprocess.check_output('{} | tee /dev/stderr'.format(cmd), shell=True)
def which(program, infile=None):
"""
takes a program name and returns the full path to the executable or None
@ -510,7 +518,7 @@ def which(program, infile=None):
bpath = os.path.join(os.path.expanduser('~'), '.pylot', 'pylot.in')
else:
bpath = os.path.join(os.path.expanduser('~'), '.pylot', infile)
if os.path.exists(bpath):
nllocpath = ":" + PylotParameter(bpath).get('nllocbin')
os.environ['PATH'] += nllocpath
@ -538,6 +546,7 @@ def which(program, infile=None):
return None
if __name__ == "__main__":
import doctest

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,8 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, time
import sys
from PySide.QtGui import QApplication
from pylot.core.util.widgets import HelpForm

View File

@ -2,6 +2,7 @@
# -*- coding: utf-8 -*-
import sys
import matplotlib
matplotlib.use('Qt4Agg')
@ -10,7 +11,6 @@ matplotlib.rcParams['backend.qt4'] = 'PySide'
from PySide.QtGui import QApplication
from obspy.core import read
from pylot.core.util.widgets import PickDlg
import icons_rc
app = QApplication(sys.argv)

View File

@ -1,7 +1,8 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, time
import sys
from PySide.QtGui import QApplication
from pylot.core.util.widgets import PropertiesDlg

View File

@ -2,7 +2,9 @@
# -*- coding: utf-8 -*-
import sys, time
import sys
import time
from PySide.QtGui import QApplication
from pylot.core.util.widgets import FilterOptionsDialog, PropertiesDlg, HelpForm

View File

@ -8,20 +8,16 @@ import unittest
class Test(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testName(self):
pass
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()

View File

@ -8,12 +8,10 @@ import unittest
class Test(unittest.TestCase):
def testName(self):
pass
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()

View File

@ -6,40 +6,40 @@
Only for test purposes!
"""
import argparse
import glob
from obspy.core import read
import matplotlib.pyplot as plt
import numpy as np
from pylot.core.pick.charfuns import *
from pylot.core.pick.picker import *
import glob
import argparse
def run_makeCF(project, database, event, iplot, station=None):
#parameters for CF calculation
t2 = 7 #length of moving window for HOS calculation [sec]
p = 4 #order of HOS
cuttimes = [10, 50] #start and end time for CF calculation
bpz = [2, 30] #corner frequencies of bandpass filter, vertical component
bph = [2, 15] #corner frequencies of bandpass filter, horizontal components
tdetz= 1.2 #length of AR-determination window [sec], vertical component
tdeth= 0.8 #length of AR-determination window [sec], horizontal components
tpredz = 0.4 #length of AR-prediction window [sec], vertical component
tpredh = 0.4 #length of AR-prediction window [sec], horizontal components
addnoise = 0.001 #add noise to seismogram for stable AR prediction
arzorder = 2 #chosen order of AR process, vertical component
arhorder = 4 #chosen order of AR process, horizontal components
TSNRhos = [5, 0.5, 1, 0.1] #window lengths [s] for calculating SNR for earliest/latest pick and quality assessment
#from HOS-CF [noise window, safety gap, signal window, slope determination window]
TSNRarz = [5, 0.5, 1, 0.5] #window lengths [s] for calculating SNR for earliest/lates pick and quality assessment
#from ARZ-CF
#get waveform data
# parameters for CF calculation
t2 = 7 # length of moving window for HOS calculation [sec]
p = 4 # order of HOS
cuttimes = [10, 50] # start and end time for CF calculation
bpz = [2, 30] # corner frequencies of bandpass filter, vertical component
bph = [2, 15] # corner frequencies of bandpass filter, horizontal components
tdetz = 1.2 # length of AR-determination window [sec], vertical component
tdeth = 0.8 # length of AR-determination window [sec], horizontal components
tpredz = 0.4 # length of AR-prediction window [sec], vertical component
tpredh = 0.4 # length of AR-prediction window [sec], horizontal components
addnoise = 0.001 # add noise to seismogram for stable AR prediction
arzorder = 2 # chosen order of AR process, vertical component
arhorder = 4 # chosen order of AR process, horizontal components
TSNRhos = [5, 0.5, 1, 0.1] # window lengths [s] for calculating SNR for earliest/latest pick and quality assessment
# from HOS-CF [noise window, safety gap, signal window, slope determination window]
TSNRarz = [5, 0.5, 1, 0.5] # window lengths [s] for calculating SNR for earliest/lates pick and quality assessment
# from ARZ-CF
# get waveform data
if station:
dpz = '/data/%s/EVENT_DATA/LOCAL/%s/%s/%s*HZ.msd' % (project, database, event, station)
dpe = '/data/%s/EVENT_DATA/LOCAL/%s/%s/%s*HE.msd' % (project, database, event, station)
dpn = '/data/%s/EVENT_DATA/LOCAL/%s/%s/%s*HN.msd' % (project, database, event, station)
#dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_z.gse' % (project, database, event, station)
#dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_e.gse' % (project, database, event, station)
#dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_n.gse' % (project, database, event, station)
dpz = '/data/%s/EVENT_DATA/LOCAL/%s/%s/%s*HZ.msd' % (project, database, event, station)
dpe = '/data/%s/EVENT_DATA/LOCAL/%s/%s/%s*HE.msd' % (project, database, event, station)
dpn = '/data/%s/EVENT_DATA/LOCAL/%s/%s/%s*HN.msd' % (project, database, event, station)
# dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_z.gse' % (project, database, event, station)
# dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_e.gse' % (project, database, event, station)
# dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_n.gse' % (project, database, event, station)
else:
# dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*_z.gse' % (project, database, event)
# dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*_e.gse' % (project, database, event)
@ -51,245 +51,253 @@ def run_makeCF(project, database, event, iplot, station=None):
wfefiles = glob.glob(dpe)
wfnfiles = glob.glob(dpn)
if wfzfiles:
for i in range(len(wfzfiles)):
print 'Vertical component data found ...'
print wfzfiles[i]
st = read('%s' % wfzfiles[i])
st_copy = st.copy()
#filter and taper data
tr_filt = st[0].copy()
tr_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
tr_filt.taper(max_percentage=0.05, type='hann')
st_copy[0].data = tr_filt.data
##############################################################
#calculate HOS-CF using subclass HOScf of class CharacteristicFunction
hoscf = HOScf(st_copy, cuttimes, t2, p) #instance of HOScf
##############################################################
#calculate AIC-HOS-CF using subclass AICcf of class CharacteristicFunction
#class needs stream object => build it
tr_aic = tr_filt.copy()
tr_aic.data = hoscf.getCF()
st_copy[0].data = tr_aic.data
aiccf = AICcf(st_copy, cuttimes) #instance of AICcf
##############################################################
#get prelimenary onset time from AIC-HOS-CF using subclass AICPicker of class AutoPicking
aicpick = AICPicker(aiccf, None, TSNRhos, 3, 10, None, 0.1)
##############################################################
#get refined onset time from HOS-CF using class Picker
hospick = PragPicker(hoscf, None, TSNRhos, 2, 10, 0.001, 0.2, aicpick.getpick())
#get earliest and latest possible picks
hosELpick = EarlLatePicker(hoscf, 1.5, TSNRhos, None, 10, None, None, hospick.getpick())
##############################################################
#calculate ARZ-CF using subclass ARZcf of class CharcteristicFunction
#get stream object of filtered data
st_copy[0].data = tr_filt.data
arzcf = ARZcf(st_copy, cuttimes, tpredz, arzorder, tdetz, addnoise) #instance of ARZcf
##############################################################
#calculate AIC-ARZ-CF using subclass AICcf of class CharacteristicFunction
#class needs stream object => build it
tr_arzaic = tr_filt.copy()
tr_arzaic.data = arzcf.getCF()
st_copy[0].data = tr_arzaic.data
araiccf = AICcf(st_copy, cuttimes, tpredz, 0, tdetz) #instance of AICcf
##############################################################
#get onset time from AIC-ARZ-CF using subclass AICPicker of class AutoPicking
aicarzpick = AICPicker(araiccf, 1.5, TSNRarz, 2, 10, None, 0.1)
##############################################################
#get refined onset time from ARZ-CF using class Picker
arzpick = PragPicker(arzcf, 1.5, TSNRarz, 2.0, 10, 0.1, 0.05, aicarzpick.getpick())
#get earliest and latest possible picks
arzELpick = EarlLatePicker(arzcf, 1.5, TSNRarz, None, 10, None, None, arzpick.getpick())
for i in range(len(wfzfiles)):
print
'Vertical component data found ...'
print
wfzfiles[i]
st = read('%s' % wfzfiles[i])
st_copy = st.copy()
# filter and taper data
tr_filt = st[0].copy()
tr_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
tr_filt.taper(max_percentage=0.05, type='hann')
st_copy[0].data = tr_filt.data
##############################################################
# calculate HOS-CF using subclass HOScf of class CharacteristicFunction
hoscf = HOScf(st_copy, cuttimes, t2, p) # instance of HOScf
##############################################################
# calculate AIC-HOS-CF using subclass AICcf of class CharacteristicFunction
# class needs stream object => build it
tr_aic = tr_filt.copy()
tr_aic.data = hoscf.getCF()
st_copy[0].data = tr_aic.data
aiccf = AICcf(st_copy, cuttimes) # instance of AICcf
##############################################################
# get prelimenary onset time from AIC-HOS-CF using subclass AICPicker of class AutoPicking
aicpick = AICPicker(aiccf, None, TSNRhos, 3, 10, None, 0.1)
##############################################################
# get refined onset time from HOS-CF using class Picker
hospick = PragPicker(hoscf, None, TSNRhos, 2, 10, 0.001, 0.2, aicpick.getpick())
# get earliest and latest possible picks
hosELpick = EarlLatePicker(hoscf, 1.5, TSNRhos, None, 10, None, None, hospick.getpick())
##############################################################
# calculate ARZ-CF using subclass ARZcf of class CharcteristicFunction
# get stream object of filtered data
st_copy[0].data = tr_filt.data
arzcf = ARZcf(st_copy, cuttimes, tpredz, arzorder, tdetz, addnoise) # instance of ARZcf
##############################################################
# calculate AIC-ARZ-CF using subclass AICcf of class CharacteristicFunction
# class needs stream object => build it
tr_arzaic = tr_filt.copy()
tr_arzaic.data = arzcf.getCF()
st_copy[0].data = tr_arzaic.data
araiccf = AICcf(st_copy, cuttimes, tpredz, 0, tdetz) # instance of AICcf
##############################################################
# get onset time from AIC-ARZ-CF using subclass AICPicker of class AutoPicking
aicarzpick = AICPicker(araiccf, 1.5, TSNRarz, 2, 10, None, 0.1)
##############################################################
# get refined onset time from ARZ-CF using class Picker
arzpick = PragPicker(arzcf, 1.5, TSNRarz, 2.0, 10, 0.1, 0.05, aicarzpick.getpick())
# get earliest and latest possible picks
arzELpick = EarlLatePicker(arzcf, 1.5, TSNRarz, None, 10, None, None, arzpick.getpick())
elif not wfzfiles:
print 'No vertical component data found!'
print
'No vertical component data found!'
if wfefiles and wfnfiles:
for i in range(len(wfefiles)):
print 'Horizontal component data found ...'
print wfefiles[i]
print wfnfiles[i]
#merge streams
H = read('%s' % wfefiles[i])
H += read('%s' % wfnfiles[i])
H_copy = H.copy()
#filter and taper data
trH1_filt = H[0].copy()
trH2_filt = H[1].copy()
trH1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
H_copy[0].data = trH1_filt.data
H_copy[1].data = trH2_filt.data
for i in range(len(wfefiles)):
print
'Horizontal component data found ...'
print
wfefiles[i]
print
wfnfiles[i]
# merge streams
H = read('%s' % wfefiles[i])
H += read('%s' % wfnfiles[i])
H_copy = H.copy()
# filter and taper data
trH1_filt = H[0].copy()
trH2_filt = H[1].copy()
trH1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
H_copy[0].data = trH1_filt.data
H_copy[1].data = trH2_filt.data
##############################################################
#calculate ARH-CF using subclass ARHcf of class CharcteristicFunction
arhcf = ARHcf(H_copy, cuttimes, tpredh, arhorder, tdeth, addnoise) #instance of ARHcf
##############################################################
#calculate AIC-ARH-CF using subclass AICcf of class CharacteristicFunction
#class needs stream object => build it
tr_arhaic = trH1_filt.copy()
tr_arhaic.data = arhcf.getCF()
H_copy[0].data = tr_arhaic.data
#calculate ARH-AIC-CF
arhaiccf = AICcf(H_copy, cuttimes, tpredh, 0, tdeth) #instance of AICcf
##############################################################
#get onset time from AIC-ARH-CF using subclass AICPicker of class AutoPicking
aicarhpick = AICPicker(arhaiccf, 1.5, TSNRarz, 4, 10, None, 0.1)
###############################################################
#get refined onset time from ARH-CF using class Picker
arhpick = PragPicker(arhcf, 1.5, TSNRarz, 2.5, 10, 0.1, 0.05, aicarhpick.getpick())
#get earliest and latest possible picks
arhELpick = EarlLatePicker(arhcf, 1.5, TSNRarz, None, 10, None, None, arhpick.getpick())
##############################################################
# calculate ARH-CF using subclass ARHcf of class CharcteristicFunction
arhcf = ARHcf(H_copy, cuttimes, tpredh, arhorder, tdeth, addnoise) # instance of ARHcf
##############################################################
# calculate AIC-ARH-CF using subclass AICcf of class CharacteristicFunction
# class needs stream object => build it
tr_arhaic = trH1_filt.copy()
tr_arhaic.data = arhcf.getCF()
H_copy[0].data = tr_arhaic.data
# calculate ARH-AIC-CF
arhaiccf = AICcf(H_copy, cuttimes, tpredh, 0, tdeth) # instance of AICcf
##############################################################
# get onset time from AIC-ARH-CF using subclass AICPicker of class AutoPicking
aicarhpick = AICPicker(arhaiccf, 1.5, TSNRarz, 4, 10, None, 0.1)
###############################################################
# get refined onset time from ARH-CF using class Picker
arhpick = PragPicker(arhcf, 1.5, TSNRarz, 2.5, 10, 0.1, 0.05, aicarhpick.getpick())
# get earliest and latest possible picks
arhELpick = EarlLatePicker(arhcf, 1.5, TSNRarz, None, 10, None, None, arhpick.getpick())
# create stream with 3 traces
# merge streams
AllC = read('%s' % wfefiles[i])
AllC += read('%s' % wfnfiles[i])
AllC += read('%s' % wfzfiles[i])
# filter and taper data
All1_filt = AllC[0].copy()
All2_filt = AllC[1].copy()
All3_filt = AllC[2].copy()
All1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
All2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
All3_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
All1_filt.taper(max_percentage=0.05, type='hann')
All2_filt.taper(max_percentage=0.05, type='hann')
All3_filt.taper(max_percentage=0.05, type='hann')
AllC[0].data = All1_filt.data
AllC[1].data = All2_filt.data
AllC[2].data = All3_filt.data
# calculate AR3C-CF using subclass AR3Ccf of class CharacteristicFunction
ar3ccf = AR3Ccf(AllC, cuttimes, tpredz, arhorder, tdetz, addnoise) # instance of AR3Ccf
# get earliest and latest possible pick from initial ARH-pick
ar3cELpick = EarlLatePicker(ar3ccf, 1.5, TSNRarz, None, 10, None, None, arhpick.getpick())
##############################################################
if iplot:
# plot vertical trace
plt.figure()
tr = st[0]
tdata = np.arange(0, tr.stats.npts / tr.stats.sampling_rate, tr.stats.delta)
p1, = plt.plot(tdata, tr_filt.data / max(tr_filt.data), 'k')
p2, = plt.plot(hoscf.getTimeArray(), hoscf.getCF() / max(hoscf.getCF()), 'r')
p3, = plt.plot(aiccf.getTimeArray(), aiccf.getCF() / max(aiccf.getCF()), 'b')
p4, = plt.plot(arzcf.getTimeArray(), arzcf.getCF() / max(arzcf.getCF()), 'g')
p5, = plt.plot(araiccf.getTimeArray(), araiccf.getCF() / max(araiccf.getCF()), 'y')
plt.plot([aicpick.getpick(), aicpick.getpick()], [-1, 1], 'b--')
plt.plot([aicpick.getpick() - 0.5, aicpick.getpick() + 0.5], [1, 1], 'b')
plt.plot([aicpick.getpick() - 0.5, aicpick.getpick() + 0.5], [-1, -1], 'b')
plt.plot([hospick.getpick(), hospick.getpick()], [-1.3, 1.3], 'r', linewidth=2)
plt.plot([hospick.getpick() - 0.5, hospick.getpick() + 0.5], [1.3, 1.3], 'r')
plt.plot([hospick.getpick() - 0.5, hospick.getpick() + 0.5], [-1.3, -1.3], 'r')
plt.plot([hosELpick.getLpick(), hosELpick.getLpick()], [-1.1, 1.1], 'r--')
plt.plot([hosELpick.getEpick(), hosELpick.getEpick()], [-1.1, 1.1], 'r--')
plt.plot([aicarzpick.getpick(), aicarzpick.getpick()], [-1.2, 1.2], 'y', linewidth=2)
plt.plot([aicarzpick.getpick() - 0.5, aicarzpick.getpick() + 0.5], [1.2, 1.2], 'y')
plt.plot([aicarzpick.getpick() - 0.5, aicarzpick.getpick() + 0.5], [-1.2, -1.2], 'y')
plt.plot([arzpick.getpick(), arzpick.getpick()], [-1.4, 1.4], 'g', linewidth=2)
plt.plot([arzpick.getpick() - 0.5, arzpick.getpick() + 0.5], [1.4, 1.4], 'g')
plt.plot([arzpick.getpick() - 0.5, arzpick.getpick() + 0.5], [-1.4, -1.4], 'g')
plt.plot([arzELpick.getLpick(), arzELpick.getLpick()], [-1.2, 1.2], 'g--')
plt.plot([arzELpick.getEpick(), arzELpick.getEpick()], [-1.2, 1.2], 'g--')
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.xlabel('Time [s]')
plt.ylabel('Normalized Counts')
plt.title('%s, %s, CF-SNR=%7.2f, CF-Slope=%12.2f' % (tr.stats.station, \
tr.stats.channel, aicpick.getSNR(),
aicpick.getSlope()))
plt.suptitle(tr.stats.starttime)
plt.legend([p1, p2, p3, p4, p5], ['Data', 'HOS-CF', 'HOSAIC-CF', 'ARZ-CF', 'ARZAIC-CF'])
# plot horizontal traces
plt.figure(2)
plt.subplot(2, 1, 1)
tsteph = tpredh / 4
th1data = np.arange(0, trH1_filt.stats.npts / trH1_filt.stats.sampling_rate, trH1_filt.stats.delta)
th2data = np.arange(0, trH2_filt.stats.npts / trH2_filt.stats.sampling_rate, trH2_filt.stats.delta)
tarhcf = np.arange(0, len(arhcf.getCF()) * tsteph, tsteph) + cuttimes[0] + tdeth + tpredh
p21, = plt.plot(th1data, trH1_filt.data / max(trH1_filt.data), 'k')
p22, = plt.plot(arhcf.getTimeArray(), arhcf.getCF() / max(arhcf.getCF()), 'r')
p23, = plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF() / max(arhaiccf.getCF()))
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [1, 1], 'b')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'r')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'r')
plt.plot([arhELpick.getLpick(), arhELpick.getLpick()], [-0.8, 0.8], 'r--')
plt.plot([arhELpick.getEpick(), arhELpick.getEpick()], [-0.8, 0.8], 'r--')
plt.plot([arhpick.getpick() + arhELpick.getPickError(), arhpick.getpick() + arhELpick.getPickError()], \
[-0.2, 0.2], 'r--')
plt.plot([arhpick.getpick() - arhELpick.getPickError(), arhpick.getpick() - arhELpick.getPickError()], \
[-0.2, 0.2], 'r--')
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.ylabel('Normalized Counts')
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
plt.suptitle(trH1_filt.stats.starttime)
plt.legend([p21, p22, p23], ['Data', 'ARH-CF', 'ARHAIC-CF'])
plt.subplot(2, 1, 2)
plt.plot(th2data, trH2_filt.data / max(trH2_filt.data), 'k')
plt.plot(arhcf.getTimeArray(), arhcf.getCF() / max(arhcf.getCF()), 'r')
plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF() / max(arhaiccf.getCF()))
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [1, 1], 'b')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'r')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'r')
plt.plot([arhELpick.getLpick(), arhELpick.getLpick()], [-0.8, 0.8], 'r--')
plt.plot([arhELpick.getEpick(), arhELpick.getEpick()], [-0.8, 0.8], 'r--')
plt.plot([arhpick.getpick() + arhELpick.getPickError(), arhpick.getpick() + arhELpick.getPickError()], \
[-0.2, 0.2], 'r--')
plt.plot([arhpick.getpick() - arhELpick.getPickError(), arhpick.getpick() - arhELpick.getPickError()], \
[-0.2, 0.2], 'r--')
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.xlabel('Time [s]')
plt.ylabel('Normalized Counts')
# plot 3-component window
plt.figure(3)
plt.subplot(3, 1, 1)
p31, = plt.plot(tdata, tr_filt.data / max(tr_filt.data), 'k')
p32, = plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF() / max(ar3ccf.getCF()), 'r')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'b')
plt.plot([ar3cELpick.getLpick(), ar3cELpick.getLpick()], [-0.8, 0.8], 'b--')
plt.plot([ar3cELpick.getEpick(), ar3cELpick.getEpick()], [-0.8, 0.8], 'b--')
plt.yticks([])
plt.xticks([])
plt.ylabel('Normalized Counts')
plt.title([tr.stats.station, tr.stats.channel])
plt.suptitle(trH1_filt.stats.starttime)
plt.legend([p31, p32], ['Data', 'AR3C-CF'])
plt.subplot(3, 1, 2)
plt.plot(th1data, trH1_filt.data / max(trH1_filt.data), 'k')
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF() / max(ar3ccf.getCF()), 'r')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'b')
plt.plot([ar3cELpick.getLpick(), ar3cELpick.getLpick()], [-0.8, 0.8], 'b--')
plt.plot([ar3cELpick.getEpick(), ar3cELpick.getEpick()], [-0.8, 0.8], 'b--')
plt.yticks([])
plt.xticks([])
plt.ylabel('Normalized Counts')
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
plt.subplot(3, 1, 3)
plt.plot(th2data, trH2_filt.data / max(trH2_filt.data), 'k')
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF() / max(ar3ccf.getCF()), 'r')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'b')
plt.plot([ar3cELpick.getLpick(), ar3cELpick.getLpick()], [-0.8, 0.8], 'b--')
plt.plot([ar3cELpick.getEpick(), ar3cELpick.getEpick()], [-0.8, 0.8], 'b--')
plt.yticks([])
plt.ylabel('Normalized Counts')
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
plt.xlabel('Time [s]')
plt.show()
raw_input()
plt.close()
#create stream with 3 traces
#merge streams
AllC = read('%s' % wfefiles[i])
AllC += read('%s' % wfnfiles[i])
AllC += read('%s' % wfzfiles[i])
#filter and taper data
All1_filt = AllC[0].copy()
All2_filt = AllC[1].copy()
All3_filt = AllC[2].copy()
All1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
All2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
All3_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
All1_filt.taper(max_percentage=0.05, type='hann')
All2_filt.taper(max_percentage=0.05, type='hann')
All3_filt.taper(max_percentage=0.05, type='hann')
AllC[0].data = All1_filt.data
AllC[1].data = All2_filt.data
AllC[2].data = All3_filt.data
#calculate AR3C-CF using subclass AR3Ccf of class CharacteristicFunction
ar3ccf = AR3Ccf(AllC, cuttimes, tpredz, arhorder, tdetz, addnoise) #instance of AR3Ccf
#get earliest and latest possible pick from initial ARH-pick
ar3cELpick = EarlLatePicker(ar3ccf, 1.5, TSNRarz, None, 10, None, None, arhpick.getpick())
##############################################################
if iplot:
#plot vertical trace
plt.figure()
tr = st[0]
tdata = np.arange(0, tr.stats.npts / tr.stats.sampling_rate, tr.stats.delta)
p1, = plt.plot(tdata, tr_filt.data/max(tr_filt.data), 'k')
p2, = plt.plot(hoscf.getTimeArray(), hoscf.getCF() / max(hoscf.getCF()), 'r')
p3, = plt.plot(aiccf.getTimeArray(), aiccf.getCF()/max(aiccf.getCF()), 'b')
p4, = plt.plot(arzcf.getTimeArray(), arzcf.getCF()/max(arzcf.getCF()), 'g')
p5, = plt.plot(araiccf.getTimeArray(), araiccf.getCF()/max(araiccf.getCF()), 'y')
plt.plot([aicpick.getpick(), aicpick.getpick()], [-1, 1], 'b--')
plt.plot([aicpick.getpick()-0.5, aicpick.getpick()+0.5], [1, 1], 'b')
plt.plot([aicpick.getpick()-0.5, aicpick.getpick()+0.5], [-1, -1], 'b')
plt.plot([hospick.getpick(), hospick.getpick()], [-1.3, 1.3], 'r', linewidth=2)
plt.plot([hospick.getpick()-0.5, hospick.getpick()+0.5], [1.3, 1.3], 'r')
plt.plot([hospick.getpick()-0.5, hospick.getpick()+0.5], [-1.3, -1.3], 'r')
plt.plot([hosELpick.getLpick(), hosELpick.getLpick()], [-1.1, 1.1], 'r--')
plt.plot([hosELpick.getEpick(), hosELpick.getEpick()], [-1.1, 1.1], 'r--')
plt.plot([aicarzpick.getpick(), aicarzpick.getpick()], [-1.2, 1.2], 'y', linewidth=2)
plt.plot([aicarzpick.getpick()-0.5, aicarzpick.getpick()+0.5], [1.2, 1.2], 'y')
plt.plot([aicarzpick.getpick()-0.5, aicarzpick.getpick()+0.5], [-1.2, -1.2], 'y')
plt.plot([arzpick.getpick(), arzpick.getpick()], [-1.4, 1.4], 'g', linewidth=2)
plt.plot([arzpick.getpick()-0.5, arzpick.getpick()+0.5], [1.4, 1.4], 'g')
plt.plot([arzpick.getpick()-0.5, arzpick.getpick()+0.5], [-1.4, -1.4], 'g')
plt.plot([arzELpick.getLpick(), arzELpick.getLpick()], [-1.2, 1.2], 'g--')
plt.plot([arzELpick.getEpick(), arzELpick.getEpick()], [-1.2, 1.2], 'g--')
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.xlabel('Time [s]')
plt.ylabel('Normalized Counts')
plt.title('%s, %s, CF-SNR=%7.2f, CF-Slope=%12.2f' % (tr.stats.station, \
tr.stats.channel, aicpick.getSNR(), aicpick.getSlope()))
plt.suptitle(tr.stats.starttime)
plt.legend([p1, p2, p3, p4, p5], ['Data', 'HOS-CF', 'HOSAIC-CF', 'ARZ-CF', 'ARZAIC-CF'])
#plot horizontal traces
plt.figure(2)
plt.subplot(2,1,1)
tsteph = tpredh / 4
th1data = np.arange(0, trH1_filt.stats.npts / trH1_filt.stats.sampling_rate, trH1_filt.stats.delta)
th2data = np.arange(0, trH2_filt.stats.npts / trH2_filt.stats.sampling_rate, trH2_filt.stats.delta)
tarhcf = np.arange(0, len(arhcf.getCF()) * tsteph, tsteph) + cuttimes[0] + tdeth +tpredh
p21, = plt.plot(th1data, trH1_filt.data/max(trH1_filt.data), 'k')
p22, = plt.plot(arhcf.getTimeArray(), arhcf.getCF()/max(arhcf.getCF()), 'r')
p23, = plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF()/max(arhaiccf.getCF()))
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [1, 1], 'b')
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'r')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'r')
plt.plot([arhELpick.getLpick(), arhELpick.getLpick()], [-0.8, 0.8], 'r--')
plt.plot([arhELpick.getEpick(), arhELpick.getEpick()], [-0.8, 0.8], 'r--')
plt.plot([arhpick.getpick() + arhELpick.getPickError(), arhpick.getpick() + arhELpick.getPickError()], \
[-0.2, 0.2], 'r--')
plt.plot([arhpick.getpick() - arhELpick.getPickError(), arhpick.getpick() - arhELpick.getPickError()], \
[-0.2, 0.2], 'r--')
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.ylabel('Normalized Counts')
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
plt.suptitle(trH1_filt.stats.starttime)
plt.legend([p21, p22, p23], ['Data', 'ARH-CF', 'ARHAIC-CF'])
plt.subplot(2,1,2)
plt.plot(th2data, trH2_filt.data/max(trH2_filt.data), 'k')
plt.plot(arhcf.getTimeArray(), arhcf.getCF()/max(arhcf.getCF()), 'r')
plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF()/max(arhaiccf.getCF()))
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [1, 1], 'b')
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'r')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'r')
plt.plot([arhELpick.getLpick(), arhELpick.getLpick()], [-0.8, 0.8], 'r--')
plt.plot([arhELpick.getEpick(), arhELpick.getEpick()], [-0.8, 0.8], 'r--')
plt.plot([arhpick.getpick() + arhELpick.getPickError(), arhpick.getpick() + arhELpick.getPickError()], \
[-0.2, 0.2], 'r--')
plt.plot([arhpick.getpick() - arhELpick.getPickError(), arhpick.getpick() - arhELpick.getPickError()], \
[-0.2, 0.2], 'r--')
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.xlabel('Time [s]')
plt.ylabel('Normalized Counts')
#plot 3-component window
plt.figure(3)
plt.subplot(3,1,1)
p31, = plt.plot(tdata, tr_filt.data/max(tr_filt.data), 'k')
p32, = plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF()/max(ar3ccf.getCF()), 'r')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'b')
plt.plot([ar3cELpick.getLpick(), ar3cELpick.getLpick()], [-0.8, 0.8], 'b--')
plt.plot([ar3cELpick.getEpick(), ar3cELpick.getEpick()], [-0.8, 0.8], 'b--')
plt.yticks([])
plt.xticks([])
plt.ylabel('Normalized Counts')
plt.title([tr.stats.station, tr.stats.channel])
plt.suptitle(trH1_filt.stats.starttime)
plt.legend([p31, p32], ['Data', 'AR3C-CF'])
plt.subplot(3,1,2)
plt.plot(th1data, trH1_filt.data/max(trH1_filt.data), 'k')
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF()/max(ar3ccf.getCF()), 'r')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'b')
plt.plot([ar3cELpick.getLpick(), ar3cELpick.getLpick()], [-0.8, 0.8], 'b--')
plt.plot([ar3cELpick.getEpick(), ar3cELpick.getEpick()], [-0.8, 0.8], 'b--')
plt.yticks([])
plt.xticks([])
plt.ylabel('Normalized Counts')
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
plt.subplot(3,1,3)
plt.plot(th2data, trH2_filt.data/max(trH2_filt.data), 'k')
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF()/max(ar3ccf.getCF()), 'r')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'b')
plt.plot([ar3cELpick.getLpick(), ar3cELpick.getLpick()], [-0.8, 0.8], 'b--')
plt.plot([ar3cELpick.getEpick(), ar3cELpick.getEpick()], [-0.8, 0.8], 'b--')
plt.yticks([])
plt.ylabel('Normalized Counts')
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
plt.xlabel('Time [s]')
plt.show()
raw_input()
plt.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()

View File

@ -2,6 +2,7 @@
# -*- coding: utf-8 -*-
from pylot.core.util.pdf import ProbabilityDensityFunction
pdf = ProbabilityDensityFunction.from_pick(0.34, 0.5, 0.54, type='exp')
pdf2 = ProbabilityDensityFunction.from_pick(0.34, 0.5, 0.54, type='exp')
diff = pdf - pdf2
diff = pdf - pdf2

View File

@ -2,6 +2,7 @@
# -*- coding: utf-8 -*-
import argparse
import numpy
from pylot.core.pick.utils import getnoisewin

View File

@ -11,6 +11,7 @@
"""
import argparse
import obspy
from pylot.core.pick.utils import earllatepicker

View File

@ -8,6 +8,7 @@
"""
import argparse
import obspy
from pylot.core.pick.utils import fmpicker

View File

@ -3,8 +3,8 @@
import argparse
from pylot.core.util.version import get_git_version as _getVersionString
from pylot.core.io.phases import reassess_pilot_db
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
__author__ = 'S. Wehling-Benatelli'

View File

@ -3,8 +3,8 @@
import argparse
from pylot.core.util.version import get_git_version as _getVersionString
from pylot.core.io.phases import reassess_pilot_event
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
__author__ = 'S. Wehling-Benatelli'

View File

@ -2,6 +2,7 @@
# -*- coding: utf-8 -*-
import argparse
import numpy
from pylot.core.pick.utils import getsignalwin

View File

@ -9,6 +9,7 @@
"""
import argparse
import obspy
from pylot.core.pick.utils import getSNR
@ -27,4 +28,5 @@ if __name__ == "__main__":
'are calculated',
dest='time')
args = parser.parse_args()
print getSNR(args.data, args.tsnr, args.time)
print
getSNR(args.data, args.tsnr, args.time)

View File

@ -6,295 +6,302 @@
Only for test purposes!
"""
from obspy.core import read
import matplotlib.pyplot as plt
import numpy as np
from pylot.core.pick.charfuns import CharacteristicFunction
from pylot.core.pick.picker import AutoPicker
from pylot.core.pick.utils import *
import glob
import argparse
import glob
from obspy.core import read
from pylot.core.pick.utils import *
def run_makeCF(project, database, event, iplot, station=None):
#parameters for CF calculation
t2 = 7 #length of moving window for HOS calculation [sec]
p = 4 #order of HOS
cuttimes = [10, 50] #start and end time for CF calculation
bpz = [2, 30] #corner frequencies of bandpass filter, vertical component
bph = [2, 15] #corner frequencies of bandpass filter, horizontal components
tdetz= 1.2 #length of AR-determination window [sec], vertical component
tdeth= 0.8 #length of AR-determination window [sec], horizontal components
tpredz = 0.4 #length of AR-prediction window [sec], vertical component
tpredh = 0.4 #length of AR-prediction window [sec], horizontal components
addnoise = 0.001 #add noise to seismogram for stable AR prediction
arzorder = 2 #chosen order of AR process, vertical component
arhorder = 4 #chosen order of AR process, horizontal components
TSNRhos = [5, 0.5, 1, .6] #window lengths [s] for calculating SNR for earliest/latest pick and quality assessment
#from HOS-CF [noise window, safety gap, signal window, slope determination window]
TSNRarz = [5, 0.5, 1, 1.0] #window lengths [s] for calculating SNR for earliest/lates pick and quality assessment
#from ARZ-CF
#get waveform data
# parameters for CF calculation
t2 = 7 # length of moving window for HOS calculation [sec]
p = 4 # order of HOS
cuttimes = [10, 50] # start and end time for CF calculation
bpz = [2, 30] # corner frequencies of bandpass filter, vertical component
bph = [2, 15] # corner frequencies of bandpass filter, horizontal components
tdetz = 1.2 # length of AR-determination window [sec], vertical component
tdeth = 0.8 # length of AR-determination window [sec], horizontal components
tpredz = 0.4 # length of AR-prediction window [sec], vertical component
tpredh = 0.4 # length of AR-prediction window [sec], horizontal components
addnoise = 0.001 # add noise to seismogram for stable AR prediction
arzorder = 2 # chosen order of AR process, vertical component
arhorder = 4 # chosen order of AR process, horizontal components
TSNRhos = [5, 0.5, 1, .6] # window lengths [s] for calculating SNR for earliest/latest pick and quality assessment
# from HOS-CF [noise window, safety gap, signal window, slope determination window]
TSNRarz = [5, 0.5, 1, 1.0] # window lengths [s] for calculating SNR for earliest/lates pick and quality assessment
# from ARZ-CF
# get waveform data
if station:
dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*HZ.msd' % (project, database, event, station)
dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*HE.msd' % (project, database, event, station)
dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*HN.msd' % (project, database, event, station)
#dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_z.gse' % (project, database, event, station)
#dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_e.gse' % (project, database, event, station)
#dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_n.gse' % (project, database, event, station)
dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*HZ.msd' % (project, database, event, station)
dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*HE.msd' % (project, database, event, station)
dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*HN.msd' % (project, database, event, station)
# dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_z.gse' % (project, database, event, station)
# dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_e.gse' % (project, database, event, station)
# dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_n.gse' % (project, database, event, station)
else:
dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*HZ.msd' % (project, database, event)
dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*HE.msd' % (project, database, event)
dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*HN.msd' % (project, database, event)
dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*HZ.msd' % (project, database, event)
dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*HE.msd' % (project, database, event)
dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*HN.msd' % (project, database, event)
wfzfiles = glob.glob(dpz)
wfefiles = glob.glob(dpe)
wfnfiles = glob.glob(dpn)
if wfzfiles:
for i in range(len(wfzfiles)):
print 'Vertical component data found ...'
print wfzfiles[i]
st = read('%s' % wfzfiles[i])
st_copy = st.copy()
#filter and taper data
tr_filt = st[0].copy()
tr_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
tr_filt.taper(max_percentage=0.05, type='hann')
st_copy[0].data = tr_filt.data
##############################################################
#calculate HOS-CF using subclass HOScf of class CharacteristicFunction
hoscf = HOScf(st_copy, cuttimes, t2, p) #instance of HOScf
##############################################################
#calculate AIC-HOS-CF using subclass AICcf of class CharacteristicFunction
#class needs stream object => build it
tr_aic = tr_filt.copy()
tr_aic.data = hoscf.getCF()
st_copy[0].data = tr_aic.data
aiccf = AICcf(st_copy, cuttimes) #instance of AICcf
##############################################################
#get prelimenary onset time from AIC-HOS-CF using subclass AICPicker of class AutoPicking
aicpick = AICPicker(aiccf, TSNRhos, 3, 10, None, 0.1)
##############################################################
#get refined onset time from HOS-CF using class Picker
hospick = PragPicker(hoscf, TSNRhos, 2, 10, 0.001, 0.2, aicpick.getpick())
#############################################################
#get earliest and latest possible picks
st_copy[0].data = tr_filt.data
[lpickhos, epickhos, pickerrhos] = earllatepicker(st_copy, 1.5, TSNRhos, hospick.getpick(), 10)
#############################################################
#get SNR
[SNR, SNRdB] = getSNR(st_copy, TSNRhos, hospick.getpick())
print 'SNR:', SNR, 'SNR[dB]:', SNRdB
##########################################################
#get first motion of onset
hosfm = fmpicker(st, st_copy, 0.2, hospick.getpick(), 11)
##############################################################
#calculate ARZ-CF using subclass ARZcf of class CharcteristicFunction
arzcf = ARZcf(st, cuttimes, tpredz, arzorder, tdetz, addnoise) #instance of ARZcf
##############################################################
#calculate AIC-ARZ-CF using subclass AICcf of class CharacteristicFunction
#class needs stream object => build it
tr_arzaic = tr_filt.copy()
tr_arzaic.data = arzcf.getCF()
st_copy[0].data = tr_arzaic.data
araiccf = AICcf(st_copy, cuttimes, tpredz, 0, tdetz) #instance of AICcf
##############################################################
#get onset time from AIC-ARZ-CF using subclass AICPicker of class AutoPicking
aicarzpick = AICPicker(araiccf, TSNRarz, 2, 10, None, 0.1)
##############################################################
#get refined onset time from ARZ-CF using class Picker
arzpick = PragPicker(arzcf, TSNRarz, 2.0, 10, 0.1, 0.05, aicarzpick.getpick())
#get earliest and latest possible picks
st_copy[0].data = tr_filt.data
[lpickarz, epickarz, pickerrarz] = earllatepicker(st_copy, 1.5, TSNRarz, arzpick.getpick(), 10)
for i in range(len(wfzfiles)):
print
'Vertical component data found ...'
print
wfzfiles[i]
st = read('%s' % wfzfiles[i])
st_copy = st.copy()
# filter and taper data
tr_filt = st[0].copy()
tr_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
tr_filt.taper(max_percentage=0.05, type='hann')
st_copy[0].data = tr_filt.data
##############################################################
# calculate HOS-CF using subclass HOScf of class CharacteristicFunction
hoscf = HOScf(st_copy, cuttimes, t2, p) # instance of HOScf
##############################################################
# calculate AIC-HOS-CF using subclass AICcf of class CharacteristicFunction
# class needs stream object => build it
tr_aic = tr_filt.copy()
tr_aic.data = hoscf.getCF()
st_copy[0].data = tr_aic.data
aiccf = AICcf(st_copy, cuttimes) # instance of AICcf
##############################################################
# get prelimenary onset time from AIC-HOS-CF using subclass AICPicker of class AutoPicking
aicpick = AICPicker(aiccf, TSNRhos, 3, 10, None, 0.1)
##############################################################
# get refined onset time from HOS-CF using class Picker
hospick = PragPicker(hoscf, TSNRhos, 2, 10, 0.001, 0.2, aicpick.getpick())
#############################################################
# get earliest and latest possible picks
st_copy[0].data = tr_filt.data
[lpickhos, epickhos, pickerrhos] = earllatepicker(st_copy, 1.5, TSNRhos, hospick.getpick(), 10)
#############################################################
# get SNR
[SNR, SNRdB] = getSNR(st_copy, TSNRhos, hospick.getpick())
print
'SNR:', SNR, 'SNR[dB]:', SNRdB
##########################################################
# get first motion of onset
hosfm = fmpicker(st, st_copy, 0.2, hospick.getpick(), 11)
##############################################################
# calculate ARZ-CF using subclass ARZcf of class CharcteristicFunction
arzcf = ARZcf(st, cuttimes, tpredz, arzorder, tdetz, addnoise) # instance of ARZcf
##############################################################
# calculate AIC-ARZ-CF using subclass AICcf of class CharacteristicFunction
# class needs stream object => build it
tr_arzaic = tr_filt.copy()
tr_arzaic.data = arzcf.getCF()
st_copy[0].data = tr_arzaic.data
araiccf = AICcf(st_copy, cuttimes, tpredz, 0, tdetz) # instance of AICcf
##############################################################
# get onset time from AIC-ARZ-CF using subclass AICPicker of class AutoPicking
aicarzpick = AICPicker(araiccf, TSNRarz, 2, 10, None, 0.1)
##############################################################
# get refined onset time from ARZ-CF using class Picker
arzpick = PragPicker(arzcf, TSNRarz, 2.0, 10, 0.1, 0.05, aicarzpick.getpick())
# get earliest and latest possible picks
st_copy[0].data = tr_filt.data
[lpickarz, epickarz, pickerrarz] = earllatepicker(st_copy, 1.5, TSNRarz, arzpick.getpick(), 10)
elif not wfzfiles:
print 'No vertical component data found!'
print
'No vertical component data found!'
if wfefiles and wfnfiles:
for i in range(len(wfefiles)):
print 'Horizontal component data found ...'
print wfefiles[i]
print wfnfiles[i]
#merge streams
H = read('%s' % wfefiles[i])
H += read('%s' % wfnfiles[i])
H_copy = H.copy()
#filter and taper data
trH1_filt = H[0].copy()
trH2_filt = H[1].copy()
trH1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
H_copy[0].data = trH1_filt.data
H_copy[1].data = trH2_filt.data
for i in range(len(wfefiles)):
print
'Horizontal component data found ...'
print
wfefiles[i]
print
wfnfiles[i]
# merge streams
H = read('%s' % wfefiles[i])
H += read('%s' % wfnfiles[i])
H_copy = H.copy()
# filter and taper data
trH1_filt = H[0].copy()
trH2_filt = H[1].copy()
trH1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
H_copy[0].data = trH1_filt.data
H_copy[1].data = trH2_filt.data
##############################################################
#calculate ARH-CF using subclass ARHcf of class CharcteristicFunction
arhcf = ARHcf(H_copy, cuttimes, tpredh, arhorder, tdeth, addnoise) #instance of ARHcf
##############################################################
#calculate AIC-ARH-CF using subclass AICcf of class CharacteristicFunction
#class needs stream object => build it
tr_arhaic = trH1_filt.copy()
tr_arhaic.data = arhcf.getCF()
H_copy[0].data = tr_arhaic.data
#calculate ARH-AIC-CF
arhaiccf = AICcf(H_copy, cuttimes, tpredh, 0, tdeth) #instance of AICcf
##############################################################
#get onset time from AIC-ARH-CF using subclass AICPicker of class AutoPicking
aicarhpick = AICPicker(arhaiccf, TSNRarz, 4, 10, None, 0.1)
###############################################################
#get refined onset time from ARH-CF using class Picker
arhpick = PragPicker(arhcf, TSNRarz, 2.5, 10, 0.1, 0.05, aicarhpick.getpick())
#get earliest and latest possible picks
H_copy[0].data = trH1_filt.data
[lpickarh1, epickarh1, pickerrarh1] = earllatepicker(H_copy, 1.5, TSNRarz, arhpick.getpick(), 10)
H_copy[0].data = trH2_filt.data
[lpickarh2, epickarh2, pickerrarh2] = earllatepicker(H_copy, 1.5, TSNRarz, arhpick.getpick(), 10)
#get earliest pick of both earliest possible picks
epick = [epickarh1, epickarh2]
lpick = [lpickarh1, lpickarh2]
pickerr = [pickerrarh1, pickerrarh2]
ipick =np.argmin([epickarh1, epickarh2])
epickarh = epick[ipick]
lpickarh = lpick[ipick]
pickerrarh = pickerr[ipick]
##############################################################
# calculate ARH-CF using subclass ARHcf of class CharcteristicFunction
arhcf = ARHcf(H_copy, cuttimes, tpredh, arhorder, tdeth, addnoise) # instance of ARHcf
##############################################################
# calculate AIC-ARH-CF using subclass AICcf of class CharacteristicFunction
# class needs stream object => build it
tr_arhaic = trH1_filt.copy()
tr_arhaic.data = arhcf.getCF()
H_copy[0].data = tr_arhaic.data
# calculate ARH-AIC-CF
arhaiccf = AICcf(H_copy, cuttimes, tpredh, 0, tdeth) # instance of AICcf
##############################################################
# get onset time from AIC-ARH-CF using subclass AICPicker of class AutoPicking
aicarhpick = AICPicker(arhaiccf, TSNRarz, 4, 10, None, 0.1)
###############################################################
# get refined onset time from ARH-CF using class Picker
arhpick = PragPicker(arhcf, TSNRarz, 2.5, 10, 0.1, 0.05, aicarhpick.getpick())
# get earliest and latest possible picks
H_copy[0].data = trH1_filt.data
[lpickarh1, epickarh1, pickerrarh1] = earllatepicker(H_copy, 1.5, TSNRarz, arhpick.getpick(), 10)
H_copy[0].data = trH2_filt.data
[lpickarh2, epickarh2, pickerrarh2] = earllatepicker(H_copy, 1.5, TSNRarz, arhpick.getpick(), 10)
# get earliest pick of both earliest possible picks
epick = [epickarh1, epickarh2]
lpick = [lpickarh1, lpickarh2]
pickerr = [pickerrarh1, pickerrarh2]
ipick = np.argmin([epickarh1, epickarh2])
epickarh = epick[ipick]
lpickarh = lpick[ipick]
pickerrarh = pickerr[ipick]
# create stream with 3 traces
# merge streams
AllC = read('%s' % wfefiles[i])
AllC += read('%s' % wfnfiles[i])
AllC += read('%s' % wfzfiles[i])
# filter and taper data
All1_filt = AllC[0].copy()
All2_filt = AllC[1].copy()
All3_filt = AllC[2].copy()
All1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
All2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
All3_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
All1_filt.taper(max_percentage=0.05, type='hann')
All2_filt.taper(max_percentage=0.05, type='hann')
All3_filt.taper(max_percentage=0.05, type='hann')
AllC[0].data = All1_filt.data
AllC[1].data = All2_filt.data
AllC[2].data = All3_filt.data
# calculate AR3C-CF using subclass AR3Ccf of class CharacteristicFunction
ar3ccf = AR3Ccf(AllC, cuttimes, tpredz, arhorder, tdetz, addnoise) # instance of AR3Ccf
##############################################################
if iplot:
# plot vertical trace
plt.figure()
tr = st[0]
tdata = np.arange(0, tr.stats.npts / tr.stats.sampling_rate, tr.stats.delta)
p1, = plt.plot(tdata, tr_filt.data / max(tr_filt.data), 'k')
p2, = plt.plot(hoscf.getTimeArray(), hoscf.getCF() / max(hoscf.getCF()), 'r')
p3, = plt.plot(aiccf.getTimeArray(), aiccf.getCF() / max(aiccf.getCF()), 'b')
p4, = plt.plot(arzcf.getTimeArray(), arzcf.getCF() / max(arzcf.getCF()), 'g')
p5, = plt.plot(araiccf.getTimeArray(), araiccf.getCF() / max(araiccf.getCF()), 'y')
plt.plot([aicpick.getpick(), aicpick.getpick()], [-1, 1], 'b--')
plt.plot([aicpick.getpick() - 0.5, aicpick.getpick() + 0.5], [1, 1], 'b')
plt.plot([aicpick.getpick() - 0.5, aicpick.getpick() + 0.5], [-1, -1], 'b')
plt.plot([hospick.getpick(), hospick.getpick()], [-1.3, 1.3], 'r', linewidth=2)
plt.plot([hospick.getpick() - 0.5, hospick.getpick() + 0.5], [1.3, 1.3], 'r')
plt.plot([hospick.getpick() - 0.5, hospick.getpick() + 0.5], [-1.3, -1.3], 'r')
plt.plot([lpickhos, lpickhos], [-1.1, 1.1], 'r--')
plt.plot([epickhos, epickhos], [-1.1, 1.1], 'r--')
plt.plot([aicarzpick.getpick(), aicarzpick.getpick()], [-1.2, 1.2], 'y', linewidth=2)
plt.plot([aicarzpick.getpick() - 0.5, aicarzpick.getpick() + 0.5], [1.2, 1.2], 'y')
plt.plot([aicarzpick.getpick() - 0.5, aicarzpick.getpick() + 0.5], [-1.2, -1.2], 'y')
plt.plot([arzpick.getpick(), arzpick.getpick()], [-1.4, 1.4], 'g', linewidth=2)
plt.plot([arzpick.getpick() - 0.5, arzpick.getpick() + 0.5], [1.4, 1.4], 'g')
plt.plot([arzpick.getpick() - 0.5, arzpick.getpick() + 0.5], [-1.4, -1.4], 'g')
plt.plot([lpickarz, lpickarz], [-1.2, 1.2], 'g--')
plt.plot([epickarz, epickarz], [-1.2, 1.2], 'g--')
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.xlabel('Time [s]')
plt.ylabel('Normalized Counts')
plt.title('%s, %s, CF-SNR=%7.2f, CF-Slope=%12.2f' % (tr.stats.station,
tr.stats.channel, aicpick.getSNR(),
aicpick.getSlope()))
plt.suptitle(tr.stats.starttime)
plt.legend([p1, p2, p3, p4, p5], ['Data', 'HOS-CF', 'HOSAIC-CF', 'ARZ-CF', 'ARZAIC-CF'])
# plot horizontal traces
plt.figure(2)
plt.subplot(2, 1, 1)
tsteph = tpredh / 4
th1data = np.arange(0, trH1_filt.stats.npts / trH1_filt.stats.sampling_rate, trH1_filt.stats.delta)
th2data = np.arange(0, trH2_filt.stats.npts / trH2_filt.stats.sampling_rate, trH2_filt.stats.delta)
tarhcf = np.arange(0, len(arhcf.getCF()) * tsteph, tsteph) + cuttimes[0] + tdeth + tpredh
p21, = plt.plot(th1data, trH1_filt.data / max(trH1_filt.data), 'k')
p22, = plt.plot(arhcf.getTimeArray(), arhcf.getCF() / max(arhcf.getCF()), 'r')
p23, = plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF() / max(arhaiccf.getCF()))
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [1, 1], 'b')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'r')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'r')
plt.plot([lpickarh, lpickarh], [-0.8, 0.8], 'r--')
plt.plot([epickarh, epickarh], [-0.8, 0.8], 'r--')
plt.plot([arhpick.getpick() + pickerrarh, arhpick.getpick() + pickerrarh], [-0.2, 0.2], 'r--')
plt.plot([arhpick.getpick() - pickerrarh, arhpick.getpick() - pickerrarh], [-0.2, 0.2], 'r--')
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.ylabel('Normalized Counts')
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
plt.suptitle(trH1_filt.stats.starttime)
plt.legend([p21, p22, p23], ['Data', 'ARH-CF', 'ARHAIC-CF'])
plt.subplot(2, 1, 2)
plt.plot(th2data, trH2_filt.data / max(trH2_filt.data), 'k')
plt.plot(arhcf.getTimeArray(), arhcf.getCF() / max(arhcf.getCF()), 'r')
plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF() / max(arhaiccf.getCF()))
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [1, 1], 'b')
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'r')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'r')
plt.plot([lpickarh, lpickarh], [-0.8, 0.8], 'r--')
plt.plot([epickarh, epickarh], [-0.8, 0.8], 'r--')
plt.plot([arhpick.getpick() + pickerrarh, arhpick.getpick() + pickerrarh], [-0.2, 0.2], 'r--')
plt.plot([arhpick.getpick() - pickerrarh, arhpick.getpick() - pickerrarh], [-0.2, 0.2], 'r--')
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.xlabel('Time [s]')
plt.ylabel('Normalized Counts')
# plot 3-component window
plt.figure(3)
plt.subplot(3, 1, 1)
p31, = plt.plot(tdata, tr_filt.data / max(tr_filt.data), 'k')
p32, = plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF() / max(ar3ccf.getCF()), 'r')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'b')
plt.yticks([])
plt.xticks([])
plt.ylabel('Normalized Counts')
plt.title([tr.stats.station, tr.stats.channel])
plt.suptitle(trH1_filt.stats.starttime)
plt.legend([p31, p32], ['Data', 'AR3C-CF'])
plt.subplot(3, 1, 2)
plt.plot(th1data, trH1_filt.data / max(trH1_filt.data), 'k')
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF() / max(ar3ccf.getCF()), 'r')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'b')
plt.yticks([])
plt.xticks([])
plt.ylabel('Normalized Counts')
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
plt.subplot(3, 1, 3)
plt.plot(th2data, trH2_filt.data / max(trH2_filt.data), 'k')
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF() / max(ar3ccf.getCF()), 'r')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'b')
plt.yticks([])
plt.ylabel('Normalized Counts')
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
plt.xlabel('Time [s]')
plt.show()
raw_input()
plt.close()
#create stream with 3 traces
#merge streams
AllC = read('%s' % wfefiles[i])
AllC += read('%s' % wfnfiles[i])
AllC += read('%s' % wfzfiles[i])
#filter and taper data
All1_filt = AllC[0].copy()
All2_filt = AllC[1].copy()
All3_filt = AllC[2].copy()
All1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
All2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
All3_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
All1_filt.taper(max_percentage=0.05, type='hann')
All2_filt.taper(max_percentage=0.05, type='hann')
All3_filt.taper(max_percentage=0.05, type='hann')
AllC[0].data = All1_filt.data
AllC[1].data = All2_filt.data
AllC[2].data = All3_filt.data
#calculate AR3C-CF using subclass AR3Ccf of class CharacteristicFunction
ar3ccf = AR3Ccf(AllC, cuttimes, tpredz, arhorder, tdetz, addnoise) #instance of AR3Ccf
##############################################################
if iplot:
#plot vertical trace
plt.figure()
tr = st[0]
tdata = np.arange(0, tr.stats.npts / tr.stats.sampling_rate, tr.stats.delta)
p1, = plt.plot(tdata, tr_filt.data/max(tr_filt.data), 'k')
p2, = plt.plot(hoscf.getTimeArray(), hoscf.getCF() / max(hoscf.getCF()), 'r')
p3, = plt.plot(aiccf.getTimeArray(), aiccf.getCF()/max(aiccf.getCF()), 'b')
p4, = plt.plot(arzcf.getTimeArray(), arzcf.getCF()/max(arzcf.getCF()), 'g')
p5, = plt.plot(araiccf.getTimeArray(), araiccf.getCF()/max(araiccf.getCF()), 'y')
plt.plot([aicpick.getpick(), aicpick.getpick()], [-1, 1], 'b--')
plt.plot([aicpick.getpick()-0.5, aicpick.getpick()+0.5], [1, 1], 'b')
plt.plot([aicpick.getpick()-0.5, aicpick.getpick()+0.5], [-1, -1], 'b')
plt.plot([hospick.getpick(), hospick.getpick()], [-1.3, 1.3], 'r', linewidth=2)
plt.plot([hospick.getpick()-0.5, hospick.getpick()+0.5], [1.3, 1.3], 'r')
plt.plot([hospick.getpick()-0.5, hospick.getpick()+0.5], [-1.3, -1.3], 'r')
plt.plot([lpickhos, lpickhos], [-1.1, 1.1], 'r--')
plt.plot([epickhos, epickhos], [-1.1, 1.1], 'r--')
plt.plot([aicarzpick.getpick(), aicarzpick.getpick()], [-1.2, 1.2], 'y', linewidth=2)
plt.plot([aicarzpick.getpick()-0.5, aicarzpick.getpick()+0.5], [1.2, 1.2], 'y')
plt.plot([aicarzpick.getpick()-0.5, aicarzpick.getpick()+0.5], [-1.2, -1.2], 'y')
plt.plot([arzpick.getpick(), arzpick.getpick()], [-1.4, 1.4], 'g', linewidth=2)
plt.plot([arzpick.getpick()-0.5, arzpick.getpick()+0.5], [1.4, 1.4], 'g')
plt.plot([arzpick.getpick()-0.5, arzpick.getpick()+0.5], [-1.4, -1.4], 'g')
plt.plot([lpickarz, lpickarz], [-1.2, 1.2], 'g--')
plt.plot([epickarz, epickarz], [-1.2, 1.2], 'g--')
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.xlabel('Time [s]')
plt.ylabel('Normalized Counts')
plt.title('%s, %s, CF-SNR=%7.2f, CF-Slope=%12.2f' % (tr.stats.station,
tr.stats.channel, aicpick.getSNR(), aicpick.getSlope()))
plt.suptitle(tr.stats.starttime)
plt.legend([p1, p2, p3, p4, p5], ['Data', 'HOS-CF', 'HOSAIC-CF', 'ARZ-CF', 'ARZAIC-CF'])
#plot horizontal traces
plt.figure(2)
plt.subplot(2,1,1)
tsteph = tpredh / 4
th1data = np.arange(0, trH1_filt.stats.npts / trH1_filt.stats.sampling_rate, trH1_filt.stats.delta)
th2data = np.arange(0, trH2_filt.stats.npts / trH2_filt.stats.sampling_rate, trH2_filt.stats.delta)
tarhcf = np.arange(0, len(arhcf.getCF()) * tsteph, tsteph) + cuttimes[0] + tdeth +tpredh
p21, = plt.plot(th1data, trH1_filt.data/max(trH1_filt.data), 'k')
p22, = plt.plot(arhcf.getTimeArray(), arhcf.getCF()/max(arhcf.getCF()), 'r')
p23, = plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF()/max(arhaiccf.getCF()))
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [1, 1], 'b')
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'r')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'r')
plt.plot([lpickarh, lpickarh], [-0.8, 0.8], 'r--')
plt.plot([epickarh, epickarh], [-0.8, 0.8], 'r--')
plt.plot([arhpick.getpick() + pickerrarh, arhpick.getpick() + pickerrarh], [-0.2, 0.2], 'r--')
plt.plot([arhpick.getpick() - pickerrarh, arhpick.getpick() - pickerrarh], [-0.2, 0.2], 'r--')
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.ylabel('Normalized Counts')
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
plt.suptitle(trH1_filt.stats.starttime)
plt.legend([p21, p22, p23], ['Data', 'ARH-CF', 'ARHAIC-CF'])
plt.subplot(2,1,2)
plt.plot(th2data, trH2_filt.data/max(trH2_filt.data), 'k')
plt.plot(arhcf.getTimeArray(), arhcf.getCF()/max(arhcf.getCF()), 'r')
plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF()/max(arhaiccf.getCF()))
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [1, 1], 'b')
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'r')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'r')
plt.plot([lpickarh, lpickarh], [-0.8, 0.8], 'r--')
plt.plot([epickarh, epickarh], [-0.8, 0.8], 'r--')
plt.plot([arhpick.getpick() + pickerrarh, arhpick.getpick() + pickerrarh], [-0.2, 0.2], 'r--')
plt.plot([arhpick.getpick() - pickerrarh, arhpick.getpick() - pickerrarh], [-0.2, 0.2], 'r--')
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.xlabel('Time [s]')
plt.ylabel('Normalized Counts')
#plot 3-component window
plt.figure(3)
plt.subplot(3,1,1)
p31, = plt.plot(tdata, tr_filt.data/max(tr_filt.data), 'k')
p32, = plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF()/max(ar3ccf.getCF()), 'r')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'b')
plt.yticks([])
plt.xticks([])
plt.ylabel('Normalized Counts')
plt.title([tr.stats.station, tr.stats.channel])
plt.suptitle(trH1_filt.stats.starttime)
plt.legend([p31, p32], ['Data', 'AR3C-CF'])
plt.subplot(3,1,2)
plt.plot(th1data, trH1_filt.data/max(trH1_filt.data), 'k')
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF()/max(ar3ccf.getCF()), 'r')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'b')
plt.yticks([])
plt.xticks([])
plt.ylabel('Normalized Counts')
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
plt.subplot(3,1,3)
plt.plot(th2data, trH2_filt.data/max(trH2_filt.data), 'k')
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF()/max(ar3ccf.getCF()), 'r')
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'b')
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'b')
plt.yticks([])
plt.ylabel('Normalized Counts')
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
plt.xlabel('Time [s]')
plt.show()
raw_input()
plt.close()
parser = argparse.ArgumentParser()
parser.add_argument('--project', type=str, help='project name (e.g. Insheim)')