[reformat] code reformatting with PyCharm
This commit is contained in:
parent
4107f0249d
commit
20b31a1c5c
23
QtPyLoT.py
23
QtPyLoT.py
@ -23,10 +23,11 @@ https://www.iconfinder.com/iconsets/flavour
|
||||
(http://www.gnu.org/copyleft/lesser.html)
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import platform
|
||||
import argparse
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
|
||||
import matplotlib
|
||||
|
||||
matplotlib.use('Qt4Agg')
|
||||
@ -66,10 +67,10 @@ from pylot.core.pick.compare import Comparison
|
||||
from pylot.core.pick.utils import symmetrize_error
|
||||
from pylot.core.io.phases import picksdict_from_picks
|
||||
import pylot.core.loc.nll as nll
|
||||
from pylot.core.util.defaults import FILTERDEFAULTS, OUTPUTFORMATS, SetChannelComponents, \
|
||||
from pylot.core.util.defaults import FILTERDEFAULTS, SetChannelComponents, \
|
||||
readFilterInformation
|
||||
from pylot.core.util.errors import FormatError, DatastructureError, \
|
||||
OverwriteError, ProcessingError
|
||||
OverwriteError
|
||||
from pylot.core.util.connection import checkurl
|
||||
from pylot.core.util.dataprocessing import read_metadata, restitute_data
|
||||
from pylot.core.util.utils import fnConstructor, getLogin, \
|
||||
@ -81,7 +82,7 @@ from pylot.core.util.widgets import FilterOptionsDialog, NewEventDlg, \
|
||||
getDataType, ComparisonDialog, TuneAutopicker, PylotParaBox
|
||||
from pylot.core.util.map_projection import map_projection
|
||||
from pylot.core.util.structure import DATASTRUCTURE
|
||||
from pylot.core.util.thread import AutoPickThread, Thread, Worker
|
||||
from pylot.core.util.thread import Thread, Worker
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
if sys.version_info.major == 3:
|
||||
@ -1149,8 +1150,8 @@ class MainWindow(QMainWindow):
|
||||
# return False
|
||||
|
||||
# export to given path
|
||||
#self.get_data().exportEvent(fbasename, exform, upperErrors=[uppererrorP[3], uppererrorS[3]])
|
||||
#try:
|
||||
# self.get_data().exportEvent(fbasename, exform, upperErrors=[uppererrorP[3], uppererrorS[3]])
|
||||
# try:
|
||||
self.get_data().exportEvent(fbasename, exform[0], fcheck=fcheck,
|
||||
upperErrors=[uppererrorP[3], uppererrorS[3]])
|
||||
self.get_data().exportEvent(fbasename, exform[1], fcheck=fcheck,
|
||||
@ -1159,7 +1160,7 @@ class MainWindow(QMainWindow):
|
||||
# QMessageBox.warning(self, "PyLoT Warning",
|
||||
# "Could not save event: {}".format(e))
|
||||
# return
|
||||
#self.get_data().exportEvent(fbasename, exform[2], upperErrors=[uppererrorP[3], uppererrorS[3]])
|
||||
# self.get_data().exportEvent(fbasename, exform[2], upperErrors=[uppererrorP[3], uppererrorS[3]])
|
||||
# all files save (ui clean)
|
||||
self.update_status('Picks saved as %s, %s, and %s' % (fbasename + exform[0], fbasename + exform[1],
|
||||
fbasename + exform[2]))
|
||||
@ -1888,7 +1889,7 @@ class MainWindow(QMainWindow):
|
||||
args = {'parameter': self._inputs,
|
||||
'station': 'all',
|
||||
'fnames': 'None',
|
||||
'eventid': self.get_current_event_path (),
|
||||
'eventid': self.get_current_event_path(),
|
||||
'iplot': 0,
|
||||
'fig_dict': None,
|
||||
'locflag': 0}
|
||||
@ -1900,7 +1901,7 @@ class MainWindow(QMainWindow):
|
||||
self.addListItem(str(self._inputs))
|
||||
|
||||
self.mp_worker.signals.message.connect(self.addListItem)
|
||||
#self.mp_thread.finished.connect(self.finalizeAutoPick)
|
||||
# self.mp_thread.finished.connect(self.finalizeAutoPick)
|
||||
|
||||
def finalizeAutoPick(self):
|
||||
self.drawPicks(picktype='auto')
|
||||
|
45
autoPyLoT.py
45
autoPyLoT.py
@ -4,35 +4,37 @@
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
import glob
|
||||
import os
|
||||
import datetime
|
||||
from obspy import read_events
|
||||
from obspy.core.event import ResourceIdentifier
|
||||
import pylot.core.loc.hyposat as hyposat
|
||||
import pylot.core.loc.hypo71 as hypo71
|
||||
import pylot.core.loc.velest as velest
|
||||
import pylot.core.loc.hypodd as hypodd
|
||||
|
||||
import pylot.core.loc.focmec as focmec
|
||||
import pylot.core.loc.hash as hash
|
||||
import pylot.core.loc.hypo71 as hypo71
|
||||
import pylot.core.loc.hypodd as hypodd
|
||||
import pylot.core.loc.hyposat as hyposat
|
||||
import pylot.core.loc.nll as nll
|
||||
#from PySide.QtGui import QWidget, QInputDialog
|
||||
import pylot.core.loc.velest as velest
|
||||
from obspy import read_events
|
||||
from obspy.core.event import ResourceIdentifier
|
||||
# from PySide.QtGui import QWidget, QInputDialog
|
||||
from pylot.core.analysis.magnitude import MomentMagnitude, LocalMagnitude
|
||||
from pylot.core.io.data import Data
|
||||
from pylot.core.io.inputs import PylotParameter
|
||||
from pylot.core.pick.autopick import autopickevent, iteratepicker
|
||||
from pylot.core.util.dataprocessing import restitute_data, read_metadata, \
|
||||
remove_underscores
|
||||
from pylot.core.util.structure import DATASTRUCTURE
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
from pylot.core.util.event import Event
|
||||
from pylot.core.util.utils import real_None
|
||||
from pylot.core.util.defaults import SEPARATOR
|
||||
from pylot.core.util.event import Event
|
||||
from pylot.core.util.structure import DATASTRUCTURE
|
||||
from pylot.core.util.utils import real_None
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
|
||||
|
||||
def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, eventid=None, savepath=None, station='all', iplot=0):
|
||||
def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, eventid=None, savepath=None, station='all',
|
||||
iplot=0):
|
||||
"""
|
||||
Determine phase onsets automatically utilizing the automatic picking
|
||||
algorithms by Kueperkoch et al. 2010/2012.
|
||||
@ -176,7 +178,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
events[index] = event
|
||||
|
||||
for event in events:
|
||||
pylot_event = Event(event) #event should be path to event directory
|
||||
pylot_event = Event(event) # event should be path to event directory
|
||||
data.setEvtData(pylot_event)
|
||||
if fnames == 'None':
|
||||
data.setWFData(glob.glob(os.path.join(datapath, event, '*')))
|
||||
@ -198,8 +200,8 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
data.setWFData(fnames)
|
||||
|
||||
event = events[0]
|
||||
#now = datetime.datetime.now()
|
||||
#evID = '%d%02d%02d%02d%02d' % (now.year,
|
||||
# now = datetime.datetime.now()
|
||||
# evID = '%d%02d%02d%02d%02d' % (now.year,
|
||||
# now.month,
|
||||
# now.day,
|
||||
# now.hour,
|
||||
@ -212,7 +214,7 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
print('Could not find station {}. STOP!'.format(station))
|
||||
return
|
||||
wfdat = remove_underscores(wfdat)
|
||||
metadata = read_metadata(parameter.get('invdir'))
|
||||
metadata = read_metadata(parameter.get('invdir'))
|
||||
print("Restitute data ...")
|
||||
corr_dat = restitute_data(wfdat.copy(), *metadata)
|
||||
if not corr_dat and locflag:
|
||||
@ -310,7 +312,8 @@ def autoPyLoT(input_dict=None, parameter=None, inputfile=None, fnames=None, even
|
||||
if input_dict:
|
||||
if 'fig_dict' in input_dict:
|
||||
fig_dict = input_dict['fig_dict']
|
||||
picks = iteratepicker(wfdat, nllocfile, picks, badpicks, parameter, fig_dict=fig_dict)
|
||||
picks = iteratepicker(wfdat, nllocfile, picks, badpicks, parameter,
|
||||
fig_dict=fig_dict)
|
||||
else:
|
||||
picks = iteratepicker(wfdat, nllocfile, picks, badpicks, parameter)
|
||||
# write phases to NLLoc-phase file
|
||||
@ -420,10 +423,10 @@ if __name__ == "__main__":
|
||||
autoregressive prediction and AIC followed by locating the seismic events using
|
||||
NLLoc''')
|
||||
|
||||
#parser.add_argument('-d', '-D', '--input_dict', type=str,
|
||||
# parser.add_argument('-d', '-D', '--input_dict', type=str,
|
||||
# action='store',
|
||||
# help='''optional, dictionary containing processing parameters''')
|
||||
#parser.add_argument('-p', '-P', '--parameter', type=str,
|
||||
# parser.add_argument('-p', '-P', '--parameter', type=str,
|
||||
# action='store',
|
||||
# help='''parameter file, default=None''')
|
||||
parser.add_argument('-i', '-I', '--inputfile', type=str,
|
||||
@ -439,7 +442,7 @@ if __name__ == "__main__":
|
||||
parser.add_argument('-s', '-S', '--spath', type=str,
|
||||
action='store',
|
||||
help='''optional, save path for autoPyLoT output''')
|
||||
#parser.add_argument('-v', '-V', '--version', action='version',
|
||||
# parser.add_argument('-v', '-V', '--version', action='version',
|
||||
# version='autoPyLoT ' + __version__,
|
||||
# help='show version information and exit')
|
||||
|
||||
|
@ -1,17 +1,19 @@
|
||||
<html><head><title>PyLoT - the Python picking and Localisation Tool</title></head>
|
||||
<html>
|
||||
<head><title>PyLoT - the Python picking and Localisation Tool</title></head>
|
||||
<body>
|
||||
<p><b>PyLoT</b> is a program which is capable of picking seismic phases,
|
||||
exporting these as numerous standard phase format and localize the corresponding
|
||||
seismic event with external software as, e.g.:</p>
|
||||
exporting these as numerous standard phase format and localize the corresponding
|
||||
seismic event with external software as, e.g.:</p>
|
||||
<ul type="circle">
|
||||
<li><a href="http://alomax.free.fr/nlloc/index.html">NonLinLoc</a></li>
|
||||
<li>HypoInvers</li>
|
||||
<li>HypoSat</li>
|
||||
<li>whatever you want ...</li>
|
||||
<li><a href="http://alomax.free.fr/nlloc/index.html">NonLinLoc</a></li>
|
||||
<li>HypoInvers</li>
|
||||
<li>HypoSat</li>
|
||||
<li>whatever you want ...</li>
|
||||
</ul>
|
||||
<p>Read more on the
|
||||
<a href="https://ariadne.geophysik.rub.de/trac/PyLoT/wiki/">PyLoT WikiPage</a>.</p>
|
||||
<a href="https://ariadne.geophysik.rub.de/trac/PyLoT/wiki/">PyLoT WikiPage</a>.</p>
|
||||
<p>Bug reports are very much appreciated and can also be delivered on our
|
||||
<a href="https://ariadne.geophysik.rub.de/trac/PyLoT">PyLoT TracPage</a> after
|
||||
successful registration.</p>
|
||||
</body></html>
|
||||
<a href="https://ariadne.geophysik.rub.de/trac/PyLoT">PyLoT TracPage</a> after
|
||||
successful registration.</p>
|
||||
</body>
|
||||
</html>
|
||||
|
12
makePyLoT.py
12
makePyLoT.py
@ -158,12 +158,12 @@ def buildPyLoT(verbosity=None):
|
||||
|
||||
|
||||
def installPyLoT(verbosity=None):
|
||||
files_to_copy = {'autoPyLoT_local.in':['~', '.pylot'],
|
||||
'autoPyLoT_regional.in':['~', '.pylot']}
|
||||
files_to_copy = {'autoPyLoT_local.in': ['~', '.pylot'],
|
||||
'autoPyLoT_regional.in': ['~', '.pylot']}
|
||||
if verbosity > 0:
|
||||
print ('starting installation of PyLoT ...')
|
||||
print('starting installation of PyLoT ...')
|
||||
if verbosity > 1:
|
||||
print ('copying input files into destination folder ...')
|
||||
print('copying input files into destination folder ...')
|
||||
ans = input('please specify scope of interest '
|
||||
'([0]=local, 1=regional) :') or 0
|
||||
if not isinstance(ans, int):
|
||||
@ -182,7 +182,7 @@ def installPyLoT(verbosity=None):
|
||||
assert not os.path.isabs(srcfile), 'source files seem to be ' \
|
||||
'corrupted ...'
|
||||
if verbosity > 1:
|
||||
print ('copying file {file} to folder {dest}'.format(file=file, dest=destination))
|
||||
print('copying file {file} to folder {dest}'.format(file=file, dest=destination))
|
||||
shutil.copyfile(srcfile, destination)
|
||||
if link_file:
|
||||
if verbosity:
|
||||
@ -190,8 +190,6 @@ def installPyLoT(verbosity=None):
|
||||
os.symlink(destination, link_dest)
|
||||
|
||||
|
||||
|
||||
|
||||
def cleanUp(verbosity=None):
|
||||
if verbosity >= 1:
|
||||
print('cleaning up build files...')
|
||||
|
@ -6,27 +6,27 @@ Revised/extended summer 2017.
|
||||
|
||||
:author: Ludger Küperkoch / MAGS2 EP3 working group
|
||||
"""
|
||||
import os
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import obspy.core.event as ope
|
||||
from obspy.geodetics import degrees2kilometers
|
||||
from scipy import integrate, signal
|
||||
from scipy.optimize import curve_fit
|
||||
from pylot.core.pick.utils import getsignalwin, crossings_nonzero_all, \
|
||||
select_for_phase
|
||||
from pylot.core.util.utils import common_range, fit_curve
|
||||
from scipy import integrate, signal
|
||||
from scipy.optimize import curve_fit
|
||||
|
||||
|
||||
def richter_magnitude_scaling(delta):
|
||||
distance = np.array([0, 10, 20, 25, 30, 35,40, 45, 50, 60, 70, 75, 85, 90, 100, 110,
|
||||
distance = np.array([0, 10, 20, 25, 30, 35, 40, 45, 50, 60, 70, 75, 85, 90, 100, 110,
|
||||
120, 130, 140, 150, 160, 170, 180, 190, 200, 210, 230, 240, 250,
|
||||
260, 270, 280, 290, 300, 310, 320, 330, 340, 350, 360, 370, 380,
|
||||
390, 400, 430, 470, 510, 560, 600, 700, 800, 900, 1000])
|
||||
richter_scaling = np.array([1.4, 1.5, 1.7, 1.9, 2.1, 2.3, 2.4, 2.5, 2.6, 2.8, 2.8, 2.9,
|
||||
2.9, 3.0, 3.1, 3.1, 3.2, 3.2, 3.3, 3.3, 3.4, 3.4, 3.5, 3.5,
|
||||
3.6, 3.7, 3.7, 3.8, 3.8, 3.9, 3.9, 4.0, 4.0, 4.1, 4.2, 4.2,
|
||||
4.2, 4.2, 4.3, 4.3, 4.3, 4.4, 4.4, 4.5, 4.6, 4.7, 4.8, 4.9,
|
||||
5.1, 5.2, 5.4, 5.5, 5.7])
|
||||
2.9, 3.0, 3.1, 3.1, 3.2, 3.2, 3.3, 3.3, 3.4, 3.4, 3.5, 3.5,
|
||||
3.6, 3.7, 3.7, 3.8, 3.8, 3.9, 3.9, 4.0, 4.0, 4.1, 4.2, 4.2,
|
||||
4.2, 4.2, 4.3, 4.3, 4.3, 4.4, 4.4, 4.5, 4.6, 4.7, 4.8, 4.9,
|
||||
5.1, 5.2, 5.4, 5.5, 5.7])
|
||||
# prepare spline interpolation to calculate return value
|
||||
func, params = fit_curve(distance, richter_scaling)
|
||||
return func(delta, params)
|
||||
@ -47,7 +47,7 @@ class Magnitude(object):
|
||||
|
||||
def __str__(self):
|
||||
print(
|
||||
'number of stations used: {0}\n'.format(len(self.magnitudes.values())))
|
||||
'number of stations used: {0}\n'.format(len(self.magnitudes.values())))
|
||||
print('\tstation\tmagnitude')
|
||||
for s, m in self.magnitudes.items(): print('\t{0}\t{1}'.format(s, m))
|
||||
|
||||
@ -126,8 +126,8 @@ class Magnitude(object):
|
||||
# scaling necessary
|
||||
print("Scaling network magnitude ...")
|
||||
mag = ope.Magnitude(
|
||||
mag=np.median([M.mag for M in self.magnitudes.values()]) *\
|
||||
magscaling[0] + magscaling[1],
|
||||
mag=np.median([M.mag for M in self.magnitudes.values()]) * \
|
||||
magscaling[0] + magscaling[1],
|
||||
magnitude_type=self.type,
|
||||
origin_id=self.origin_id,
|
||||
station_count=len(self.magnitudes),
|
||||
@ -215,7 +215,7 @@ class LocalMagnitude(Magnitude):
|
||||
th = np.arange(0, len(sqH) * dt, dt)
|
||||
# get maximum peak within pick window
|
||||
iwin = getsignalwin(th, t0 - stime, self.calc_win)
|
||||
ii = min([iwin[len(iwin)-1], len(th)])
|
||||
ii = min([iwin[len(iwin) - 1], len(th)])
|
||||
iwin = iwin[0:ii]
|
||||
wapp = np.max(sqH[iwin])
|
||||
if self.verbose:
|
||||
@ -250,8 +250,8 @@ class LocalMagnitude(Magnitude):
|
||||
if not wf:
|
||||
if self.verbose:
|
||||
print(
|
||||
'WARNING: no waveform data found for station {0}'.format(
|
||||
station))
|
||||
'WARNING: no waveform data found for station {0}'.format(
|
||||
station))
|
||||
continue
|
||||
delta = degrees2kilometers(a.distance)
|
||||
onset = pick.time
|
||||
@ -270,13 +270,14 @@ class LocalMagnitude(Magnitude):
|
||||
if str(self.wascaling) == '[0.0, 0.0, 0.0]':
|
||||
print("Calculating original Richter magnitude ...")
|
||||
magnitude = ope.StationMagnitude(mag=np.log10(a0) \
|
||||
+ richter_magnitude_scaling(delta))
|
||||
+ richter_magnitude_scaling(delta))
|
||||
else:
|
||||
print("Calculating scaled local magnitude ...")
|
||||
a0 = a0 * 1e03 # mm to nm (see Havskov & Ottemöller, 2010)
|
||||
a0 = a0 * 1e03 # mm to nm (see Havskov & Ottemöller, 2010)
|
||||
magnitude = ope.StationMagnitude(mag=np.log10(a0) \
|
||||
+ self.wascaling[0] * np.log10(delta) + self.wascaling[1]
|
||||
* delta + self.wascaling[2])
|
||||
+ self.wascaling[0] * np.log10(delta) + self.wascaling[1]
|
||||
* delta + self.wascaling[
|
||||
2])
|
||||
magnitude.origin_id = self.origin_id
|
||||
magnitude.waveform_id = pick.waveform_id
|
||||
magnitude.amplitude_id = amplitude.resource_id
|
||||
@ -397,8 +398,8 @@ def calcMoMw(wfstream, w0, rho, vp, delta, verbosity=False):
|
||||
|
||||
if verbosity:
|
||||
print(
|
||||
"calcMoMw: Calculating seismic moment Mo and moment magnitude Mw for station {0} ...".format(
|
||||
tr.stats.station))
|
||||
"calcMoMw: Calculating seismic moment Mo and moment magnitude Mw for station {0} ...".format(
|
||||
tr.stats.station))
|
||||
|
||||
# additional common parameters for calculating Mo
|
||||
rP = 2 / np.sqrt(
|
||||
@ -412,8 +413,8 @@ def calcMoMw(wfstream, w0, rho, vp, delta, verbosity=False):
|
||||
|
||||
if verbosity:
|
||||
print(
|
||||
"calcMoMw: Calculated seismic moment Mo = {0} Nm => Mw = {1:3.1f} ".format(
|
||||
Mo, Mw))
|
||||
"calcMoMw: Calculated seismic moment Mo = {0} Nm => Mw = {1:3.1f} ".format(
|
||||
Mo, Mw))
|
||||
|
||||
return Mo, Mw
|
||||
|
||||
@ -452,7 +453,7 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
|
||||
:type: integer
|
||||
'''
|
||||
if verbosity:
|
||||
print ("Calculating source spectrum for station %s ...." % wfstream[0].stats.station)
|
||||
print("Calculating source spectrum for station %s ...." % wfstream[0].stats.station)
|
||||
|
||||
# get Q value
|
||||
Q, A = qp
|
||||
@ -509,9 +510,9 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
|
||||
zc = crossings_nonzero_all(wfzc)
|
||||
if np.size(zc) == 0 or len(zc) <= 3:
|
||||
if verbosity:
|
||||
print ("calcsourcespec: Something is wrong with the waveform, "
|
||||
"no zero crossings derived!\n")
|
||||
print ("No calculation of source spectrum possible!")
|
||||
print("calcsourcespec: Something is wrong with the waveform, "
|
||||
"no zero crossings derived!\n")
|
||||
print("No calculation of source spectrum possible!")
|
||||
plotflag = 0
|
||||
else:
|
||||
plotflag = 1
|
||||
@ -558,22 +559,22 @@ def calcsourcespec(wfstream, onset, vp, delta, azimuth, incidence,
|
||||
[optspecfit, _] = curve_fit(synthsourcespec, F, YYcor, [w0in, Fcin])
|
||||
w0 = optspecfit[0]
|
||||
fc = optspecfit[1]
|
||||
#w01 = optspecfit[0]
|
||||
#fc1 = optspecfit[1]
|
||||
# w01 = optspecfit[0]
|
||||
# fc1 = optspecfit[1]
|
||||
if verbosity:
|
||||
print ("calcsourcespec: Determined w0-value: %e m/Hz, \n"
|
||||
"calcsourcespec: Determined corner frequency: %f Hz" % (w0, fc))
|
||||
print("calcsourcespec: Determined w0-value: %e m/Hz, \n"
|
||||
"calcsourcespec: Determined corner frequency: %f Hz" % (w0, fc))
|
||||
|
||||
# use of conventional fitting
|
||||
# [w02, fc2] = fitSourceModel(F, YYcor, Fcin, iplot, verbosity)
|
||||
# use of conventional fitting
|
||||
# [w02, fc2] = fitSourceModel(F, YYcor, Fcin, iplot, verbosity)
|
||||
|
||||
# get w0 and fc as median of both
|
||||
# source spectrum fits
|
||||
#w0 = np.median([w01, w02])
|
||||
#fc = np.median([fc1, fc2])
|
||||
#if verbosity:
|
||||
# print("calcsourcespec: Using w0-value = %e m/Hz and fc = %f Hz" % (
|
||||
# w0, fc))
|
||||
# get w0 and fc as median of both
|
||||
# source spectrum fits
|
||||
# w0 = np.median([w01, w02])
|
||||
# fc = np.median([fc1, fc2])
|
||||
# if verbosity:
|
||||
# print("calcsourcespec: Using w0-value = %e m/Hz and fc = %f Hz" % (
|
||||
# w0, fc))
|
||||
|
||||
if iplot > 1:
|
||||
f1 = plt.figure()
|
||||
@ -672,16 +673,16 @@ def fitSourceModel(f, S, fc0, iplot, verbosity=False):
|
||||
|
||||
# vary corner frequency around initial point
|
||||
print("fitSourceModel: Varying corner frequency "
|
||||
"around initial corner frequency ...")
|
||||
"around initial corner frequency ...")
|
||||
# check difference of il and ir in order to
|
||||
# keep calculation time acceptable
|
||||
idiff = ir - il
|
||||
if idiff > 10000:
|
||||
increment = 100
|
||||
increment = 100
|
||||
elif idiff <= 20:
|
||||
increment = 1
|
||||
increment = 1
|
||||
else:
|
||||
increment = 10
|
||||
increment = 10
|
||||
|
||||
for i in range(il, ir, increment):
|
||||
FC = f[i]
|
||||
@ -707,10 +708,10 @@ def fitSourceModel(f, S, fc0, iplot, verbosity=False):
|
||||
w0 = max(S)
|
||||
if verbosity:
|
||||
print(
|
||||
"fitSourceModel: best fc: {0} Hz, best w0: {1} m/Hz".format(fc, w0))
|
||||
"fitSourceModel: best fc: {0} Hz, best w0: {1} m/Hz".format(fc, w0))
|
||||
|
||||
if iplot > 1:
|
||||
plt.figure()#iplot)
|
||||
plt.figure() # iplot)
|
||||
plt.loglog(f, S, 'k')
|
||||
plt.loglog([f[0], fc], [w0, w0], 'g')
|
||||
plt.loglog([fc, fc], [w0 / 100, w0], 'g')
|
||||
@ -719,7 +720,7 @@ def fitSourceModel(f, S, fc0, iplot, verbosity=False):
|
||||
plt.xlabel('Frequency [Hz]')
|
||||
plt.ylabel('Amplitude [m/Hz]')
|
||||
plt.grid()
|
||||
plt.figure()#iplot + 1)
|
||||
plt.figure() # iplot + 1)
|
||||
plt.subplot(311)
|
||||
plt.plot(f[il:ir], STD, '*')
|
||||
plt.title('Common Standard Deviations')
|
||||
|
@ -12,6 +12,7 @@ from obspy.core import Stream
|
||||
from pylot.core.pick.utils import getsignalwin
|
||||
from scipy.optimize import curve_fit
|
||||
|
||||
|
||||
class Magnitude(object):
|
||||
'''
|
||||
Superclass for calculating Wood-Anderson peak-to-peak
|
||||
@ -45,7 +46,6 @@ class Magnitude(object):
|
||||
self.calcwapp()
|
||||
self.calcsourcespec()
|
||||
|
||||
|
||||
def getwfstream(self):
|
||||
return self.wfstream
|
||||
|
||||
@ -85,6 +85,7 @@ class Magnitude(object):
|
||||
def calcsourcespec(self):
|
||||
self.sourcespek = None
|
||||
|
||||
|
||||
class WApp(Magnitude):
|
||||
'''
|
||||
Method to derive peak-to-peak amplitude as seen on a Wood-Anderson-
|
||||
@ -92,8 +93,8 @@ class WApp(Magnitude):
|
||||
'''
|
||||
|
||||
def calcwapp(self):
|
||||
print ("Getting Wood-Anderson peak-to-peak amplitude ...")
|
||||
print ("Simulating Wood-Anderson seismograph ...")
|
||||
print("Getting Wood-Anderson peak-to-peak amplitude ...")
|
||||
print("Simulating Wood-Anderson seismograph ...")
|
||||
|
||||
self.wapp = None
|
||||
stream = self.getwfstream()
|
||||
@ -118,7 +119,7 @@ class WApp(Magnitude):
|
||||
# get maximum peak within pick window
|
||||
iwin = getsignalwin(th, self.getTo(), self.getpwin())
|
||||
self.wapp = np.max(sqH[iwin])
|
||||
print ("Determined Wood-Anderson peak-to-peak amplitude: %f mm") % self.wapp
|
||||
print("Determined Wood-Anderson peak-to-peak amplitude: %f mm") % self.wapp
|
||||
|
||||
if self.getiplot() > 1:
|
||||
stream.plot()
|
||||
@ -143,10 +144,10 @@ class DCfc(Magnitude):
|
||||
'''
|
||||
|
||||
def calcsourcespec(self):
|
||||
print ("Calculating source spectrum ....")
|
||||
print("Calculating source spectrum ....")
|
||||
|
||||
self.w0 = None # DC-value
|
||||
self.fc = None # corner frequency
|
||||
self.w0 = None # DC-value
|
||||
self.fc = None # corner frequency
|
||||
|
||||
stream = self.getwfstream()
|
||||
tr = stream[0]
|
||||
@ -159,14 +160,14 @@ class DCfc(Magnitude):
|
||||
# fft
|
||||
fny = tr.stats.sampling_rate / 2
|
||||
l = len(xdat) / tr.stats.sampling_rate
|
||||
n = tr.stats.sampling_rate * l # number of fft bins after Bath
|
||||
n = tr.stats.sampling_rate * l # number of fft bins after Bath
|
||||
# find next power of 2 of data length
|
||||
m = pow(2, np.ceil(np.log(len(xdat)) / np.log(2)))
|
||||
N = int(np.power(m, 2))
|
||||
y = tr.stats.delta * np.fft.fft(xdat, N)
|
||||
Y = abs(y[: N/2])
|
||||
Y = abs(y[: N / 2])
|
||||
L = (N - 1) / tr.stats.sampling_rate
|
||||
f = np.arange(0, fny, 1/L)
|
||||
f = np.arange(0, fny, 1 / L)
|
||||
|
||||
# remove zero-frequency and frequencies above
|
||||
# corner frequency of seismometer (assumed
|
||||
@ -185,20 +186,18 @@ class DCfc(Magnitude):
|
||||
[optspecfit, pcov] = curve_fit(synthsourcespec, F, YY.real, [DCin, Fcin])
|
||||
self.w0 = optspecfit[0]
|
||||
self.fc = optspecfit[1]
|
||||
print ("DCfc: Determined DC-value: %e m/Hz, \n" \
|
||||
"Determined corner frequency: %f Hz" % (self.w0, self.fc))
|
||||
print("DCfc: Determined DC-value: %e m/Hz, \n" \
|
||||
"Determined corner frequency: %f Hz" % (self.w0, self.fc))
|
||||
|
||||
|
||||
#if self.getiplot() > 1:
|
||||
iplot=2
|
||||
# if self.getiplot() > 1:
|
||||
iplot = 2
|
||||
if iplot > 1:
|
||||
print ("DCfc: Determined DC-value: %e m/Hz, \n"
|
||||
"Determined corner frequency: %f Hz" % (self.w0, self.fc))
|
||||
|
||||
print("DCfc: Determined DC-value: %e m/Hz, \n"
|
||||
"Determined corner frequency: %f Hz" % (self.w0, self.fc))
|
||||
|
||||
if self.getiplot() > 1:
|
||||
f1 = plt.figure()
|
||||
plt.subplot(2,1,1)
|
||||
plt.subplot(2, 1, 1)
|
||||
# show displacement in mm
|
||||
plt.plot(t, np.multiply(tr, 1000), 'k')
|
||||
plt.plot(t[iwin], np.multiply(xdat, 1000), 'g')
|
||||
@ -206,12 +205,12 @@ class DCfc(Magnitude):
|
||||
plt.xlabel('Time since %s' % tr.stats.starttime)
|
||||
plt.ylabel('Displacement [mm]')
|
||||
|
||||
plt.subplot(2,1,2)
|
||||
plt.subplot(2, 1, 2)
|
||||
plt.loglog(f, Y.real, 'k')
|
||||
plt.loglog(F, YY.real)
|
||||
plt.loglog(F, fit, 'g')
|
||||
plt.title('Source Spectrum from P Pulse, DC=%e m/Hz, fc=%4.1f Hz' \
|
||||
% (self.w0, self.fc))
|
||||
% (self.w0, self.fc))
|
||||
plt.xlabel('Frequency [Hz]')
|
||||
plt.ylabel('Amplitude [m/Hz]')
|
||||
plt.grid()
|
||||
@ -235,8 +234,7 @@ def synthsourcespec(f, omega0, fcorner):
|
||||
:type: float
|
||||
'''
|
||||
|
||||
#ssp = omega0 / (pow(2, (1 + f / fcorner)))
|
||||
# ssp = omega0 / (pow(2, (1 + f / fcorner)))
|
||||
ssp = omega0 / (1 + pow(2, (f / fcorner)))
|
||||
|
||||
return ssp
|
||||
|
||||
|
@ -3,15 +3,17 @@
|
||||
|
||||
import copy
|
||||
import os
|
||||
|
||||
from obspy import read_events
|
||||
from obspy.core import read, Stream, UTCDateTime
|
||||
from obspy.io.sac import SacIOError
|
||||
from obspy.core.event import Event as ObsPyEvent
|
||||
from obspy.io.sac import SacIOError
|
||||
from pylot.core.io.phases import readPILOTEvent, picks_from_picksdict, \
|
||||
picksdict_from_pilot, merge_picks
|
||||
from pylot.core.util.errors import FormatError, OverwriteError
|
||||
from pylot.core.util.utils import fnConstructor, full_range
|
||||
from pylot.core.util.event import Event
|
||||
from pylot.core.util.utils import fnConstructor, full_range
|
||||
|
||||
|
||||
class Data(object):
|
||||
"""
|
||||
@ -160,7 +162,7 @@ class Data(object):
|
||||
def replaceOrigin(self, event, forceOverwrite=False):
|
||||
if self.get_evt_data().origins or forceOverwrite:
|
||||
if event.origins:
|
||||
print("Found origin, replace it by new origin." )
|
||||
print("Found origin, replace it by new origin.")
|
||||
event.origins = self.get_evt_data().origins
|
||||
|
||||
def replaceMagnitude(self, event, forceOverwrite=False):
|
||||
@ -172,7 +174,7 @@ class Data(object):
|
||||
def replacePicks(self, event, picktype):
|
||||
checkflag = 0
|
||||
picks = event.picks
|
||||
#remove existing picks
|
||||
# remove existing picks
|
||||
for j, pick in reversed(list(enumerate(picks))):
|
||||
if picktype in str(pick.method_id.id):
|
||||
picks.pop(j)
|
||||
@ -180,7 +182,7 @@ class Data(object):
|
||||
if checkflag:
|
||||
print("Found %s pick(s), remove them and append new picks to catalog." % picktype)
|
||||
|
||||
#append new picks
|
||||
# append new picks
|
||||
for pick in self.get_evt_data().picks:
|
||||
if picktype in str(pick.method_id.id):
|
||||
picks.append(pick)
|
||||
@ -195,8 +197,8 @@ class Data(object):
|
||||
"""
|
||||
from pylot.core.util.defaults import OUTPUTFORMATS
|
||||
|
||||
if not type(fcheck)==list:
|
||||
fcheck=[fcheck]
|
||||
if not type(fcheck) == list:
|
||||
fcheck = [fcheck]
|
||||
|
||||
try:
|
||||
evtformat = OUTPUTFORMATS[fnext]
|
||||
@ -231,73 +233,71 @@ class Data(object):
|
||||
# Prefer manual picks!
|
||||
for i in range(len(evtdata_org.picks)):
|
||||
if evtdata_org.picks[i].method_id == 'manual':
|
||||
mstation = evtdata_org.picks[i].waveform_id.station_code
|
||||
mstation_ext = mstation + '_'
|
||||
for k in range(len(evtdata_copy.picks)):
|
||||
if ((evtdata_copy.picks[k].waveform_id.station_code == mstation) or \
|
||||
(evtdata_copy.picks[k].waveform_id.station_code == mstation_ext)) and \
|
||||
(evtdata_copy.picks[k].method_id == 'auto'):
|
||||
del evtdata_copy.picks[k]
|
||||
break
|
||||
mstation = evtdata_org.picks[i].waveform_id.station_code
|
||||
mstation_ext = mstation + '_'
|
||||
for k in range(len(evtdata_copy.picks)):
|
||||
if ((evtdata_copy.picks[k].waveform_id.station_code == mstation) or \
|
||||
(evtdata_copy.picks[k].waveform_id.station_code == mstation_ext)) and \
|
||||
(evtdata_copy.picks[k].method_id == 'auto'):
|
||||
del evtdata_copy.picks[k]
|
||||
break
|
||||
lendiff = len(evtdata_org.picks) - len(evtdata_copy.picks)
|
||||
if lendiff is not 0:
|
||||
print("Manual as well as automatic picks available. Prefered the {} manual ones!".format(lendiff))
|
||||
print("Manual as well as automatic picks available. Prefered the {} manual ones!".format(lendiff))
|
||||
|
||||
if upperErrors:
|
||||
# check for pick uncertainties exceeding adjusted upper errors
|
||||
# Picks with larger uncertainties will not be saved in output file!
|
||||
for j in range(len(evtdata_org.picks)):
|
||||
for i in range(len(evtdata_copy.picks)):
|
||||
if evtdata_copy.picks[i].phase_hint[0] == 'P':
|
||||
if (evtdata_copy.picks[i].time_errors['upper_uncertainty'] >= upperErrors[0]) or \
|
||||
(evtdata_copy.picks[i].time_errors['uncertainty'] == None):
|
||||
print("Uncertainty exceeds or equal adjusted upper time error!")
|
||||
print("Adjusted uncertainty: {}".format(upperErrors[0]))
|
||||
print("Pick uncertainty: {}".format(evtdata_copy.picks[i].time_errors['uncertainty']))
|
||||
print("{1} P-Pick of station {0} will not be saved in outputfile".format(
|
||||
evtdata_copy.picks[i].waveform_id.station_code,
|
||||
evtdata_copy.picks[i].method_id))
|
||||
print("#")
|
||||
del evtdata_copy.picks[i]
|
||||
break
|
||||
if evtdata_copy.picks[i].phase_hint[0] == 'S':
|
||||
if (evtdata_copy.picks[i].time_errors['upper_uncertainty'] >= upperErrors[1]) or \
|
||||
(evtdata_copy.picks[i].time_errors['uncertainty'] == None):
|
||||
print("Uncertainty exceeds or equal adjusted upper time error!")
|
||||
print("Adjusted uncertainty: {}".format(upperErrors[1]))
|
||||
print("Pick uncertainty: {}".format(evtdata_copy.picks[i].time_errors['uncertainty']))
|
||||
print("{1} S-Pick of station {0} will not be saved in outputfile".format(
|
||||
evtdata_copy.picks[i].waveform_id.station_code,
|
||||
evtdata_copy.picks[i].method_id))
|
||||
print("#")
|
||||
del evtdata_copy.picks[i]
|
||||
break
|
||||
|
||||
# check for pick uncertainties exceeding adjusted upper errors
|
||||
# Picks with larger uncertainties will not be saved in output file!
|
||||
for j in range(len(evtdata_org.picks)):
|
||||
for i in range(len(evtdata_copy.picks)):
|
||||
if evtdata_copy.picks[i].phase_hint[0] == 'P':
|
||||
if (evtdata_copy.picks[i].time_errors['upper_uncertainty'] >= upperErrors[0]) or \
|
||||
(evtdata_copy.picks[i].time_errors['uncertainty'] == None):
|
||||
print("Uncertainty exceeds or equal adjusted upper time error!")
|
||||
print("Adjusted uncertainty: {}".format(upperErrors[0]))
|
||||
print("Pick uncertainty: {}".format(evtdata_copy.picks[i].time_errors['uncertainty']))
|
||||
print("{1} P-Pick of station {0} will not be saved in outputfile".format(
|
||||
evtdata_copy.picks[i].waveform_id.station_code,
|
||||
evtdata_copy.picks[i].method_id))
|
||||
print("#")
|
||||
del evtdata_copy.picks[i]
|
||||
break
|
||||
if evtdata_copy.picks[i].phase_hint[0] == 'S':
|
||||
if (evtdata_copy.picks[i].time_errors['upper_uncertainty'] >= upperErrors[1]) or \
|
||||
(evtdata_copy.picks[i].time_errors['uncertainty'] == None):
|
||||
print("Uncertainty exceeds or equal adjusted upper time error!")
|
||||
print("Adjusted uncertainty: {}".format(upperErrors[1]))
|
||||
print("Pick uncertainty: {}".format(evtdata_copy.picks[i].time_errors['uncertainty']))
|
||||
print("{1} S-Pick of station {0} will not be saved in outputfile".format(
|
||||
evtdata_copy.picks[i].waveform_id.station_code,
|
||||
evtdata_copy.picks[i].method_id))
|
||||
print("#")
|
||||
del evtdata_copy.picks[i]
|
||||
break
|
||||
|
||||
if fnext == '.obs':
|
||||
try:
|
||||
evtdata_copy.write(fnout + fnext, format=evtformat)
|
||||
# write header afterwards
|
||||
evid = str(evtdata_org.resource_id).split('/')[1]
|
||||
header = '# EQEVENT: Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' % evid
|
||||
nllocfile = open(fnout + fnext)
|
||||
l = nllocfile.readlines()
|
||||
nllocfile.close()
|
||||
l.insert(0, header)
|
||||
nllocfile = open(fnout + fnext, 'w')
|
||||
nllocfile.write("".join(l))
|
||||
nllocfile.close()
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
try:
|
||||
evtdata_copy.write(fnout + fnext, format=evtformat)
|
||||
# write header afterwards
|
||||
evid = str(evtdata_org.resource_id).split('/')[1]
|
||||
header = '# EQEVENT: Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' % evid
|
||||
nllocfile = open(fnout + fnext)
|
||||
l = nllocfile.readlines()
|
||||
nllocfile.close()
|
||||
l.insert(0, header)
|
||||
nllocfile = open(fnout + fnext, 'w')
|
||||
nllocfile.write("".join(l))
|
||||
nllocfile.close()
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
not implemented: {1}'''.format(evtformat, e))
|
||||
if fnext == '.cnv':
|
||||
try:
|
||||
evtdata_org.write(fnout + fnext, format=evtformat)
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
try:
|
||||
evtdata_org.write(fnout + fnext, format=evtformat)
|
||||
except KeyError as e:
|
||||
raise KeyError('''{0} export format
|
||||
not implemented: {1}'''.format(evtformat, e))
|
||||
|
||||
|
||||
def getComp(self):
|
||||
"""
|
||||
|
||||
@ -362,7 +362,7 @@ class Data(object):
|
||||
except Exception as e:
|
||||
warnmsg += '{0}\n{1}\n'.format(fname, e)
|
||||
except SacIOError as se:
|
||||
warnmsg += '{0}\n{1}\n'.format(fname, se)
|
||||
warnmsg += '{0}\n{1}\n'.format(fname, se)
|
||||
if warnmsg:
|
||||
warnmsg = 'WARNING: unable to read\n' + warnmsg
|
||||
print(warnmsg)
|
||||
@ -427,21 +427,21 @@ class Data(object):
|
||||
:raise OverwriteError: raises an OverwriteError if the picks list is
|
||||
not empty. The GUI will then ask for a decision.
|
||||
"""
|
||||
#firstonset = find_firstonset(picks)
|
||||
# firstonset = find_firstonset(picks)
|
||||
# check for automatic picks
|
||||
print("Writing phases to ObsPy-quakeml file")
|
||||
for key in picks:
|
||||
if picks[key]['P']['picker'] == 'auto':
|
||||
print("Existing picks will be overwritten!")
|
||||
picks = picks_from_picksdict(picks)
|
||||
break
|
||||
print("Existing picks will be overwritten!")
|
||||
picks = picks_from_picksdict(picks)
|
||||
break
|
||||
else:
|
||||
if self.get_evt_data().picks:
|
||||
raise OverwriteError('Existing picks would be overwritten!')
|
||||
break
|
||||
else:
|
||||
picks = picks_from_picksdict(picks)
|
||||
break
|
||||
if self.get_evt_data().picks:
|
||||
raise OverwriteError('Existing picks would be overwritten!')
|
||||
break
|
||||
else:
|
||||
picks = picks_from_picksdict(picks)
|
||||
break
|
||||
self.get_evt_data().picks = picks
|
||||
# if 'smi:local' in self.getID() and firstonset:
|
||||
# fonset_str = firstonset.strftime('%Y_%m_%d_%H_%M_%S')
|
||||
@ -449,7 +449,6 @@ class Data(object):
|
||||
# ID.convertIDToQuakeMLURI(authority_id=authority_id)
|
||||
# self.get_evt_data().resource_id = ID
|
||||
|
||||
|
||||
def applyEvent(event):
|
||||
"""
|
||||
takes an `obspy.core.event.Event` object and applies all new
|
||||
@ -478,7 +477,6 @@ class Data(object):
|
||||
self._new = False
|
||||
|
||||
|
||||
|
||||
class GenericDataStructure(object):
|
||||
"""
|
||||
GenericDataBase type holds all information about the current data-
|
||||
|
@ -38,7 +38,7 @@ defaults = {'rootpath': {'type': str,
|
||||
|
||||
'apverbose': {'type': bool,
|
||||
'tooltip': "choose 'True' or 'False' for terminal output",
|
||||
'value': True,
|
||||
'value': True,
|
||||
'namestring': 'App. verbosity'},
|
||||
|
||||
'nllocbin': {'type': str,
|
||||
@ -375,12 +375,12 @@ defaults = {'rootpath': {'type': str,
|
||||
|
||||
'filter_type': {'type': (str, str),
|
||||
'tooltip': 'filter type (bandpass, bandstop, lowpass, highpass) [P, S]',
|
||||
'value': ('bandpass' , 'bandpass'),
|
||||
'value': ('bandpass', 'bandpass'),
|
||||
'namestring': ('Type', 'P', 'S')}
|
||||
}
|
||||
}
|
||||
|
||||
settings_main={
|
||||
'dirs':[
|
||||
settings_main = {
|
||||
'dirs': [
|
||||
'rootpath',
|
||||
'datapath',
|
||||
'database',
|
||||
@ -388,26 +388,26 @@ settings_main={
|
||||
'invdir',
|
||||
'datastructure',
|
||||
'apverbose'],
|
||||
'nlloc':[
|
||||
'nlloc': [
|
||||
'nllocbin',
|
||||
'nllocroot',
|
||||
'phasefile',
|
||||
'ctrfile',
|
||||
'ttpatter',
|
||||
'outpatter'],
|
||||
'smoment':[
|
||||
'smoment': [
|
||||
'vp',
|
||||
'rho',
|
||||
'Qp'],
|
||||
'localmag':[
|
||||
'localmag': [
|
||||
'WAscaling',
|
||||
'magscaling'],
|
||||
'filter':[
|
||||
'filter': [
|
||||
'minfreq',
|
||||
'maxfreq',
|
||||
'filter_order',
|
||||
'filter_type'],
|
||||
'pick':[
|
||||
'pick': [
|
||||
'extent',
|
||||
'pstart',
|
||||
'pstop',
|
||||
@ -419,8 +419,8 @@ settings_main={
|
||||
'bph2']
|
||||
}
|
||||
|
||||
settings_special_pick={
|
||||
'z':[
|
||||
settings_special_pick = {
|
||||
'z': [
|
||||
'algoP',
|
||||
'tlta',
|
||||
'hosorder',
|
||||
@ -437,7 +437,7 @@ settings_special_pick={
|
||||
'tsmoothP',
|
||||
'ausP',
|
||||
'nfacP'],
|
||||
'h':[
|
||||
'h': [
|
||||
'algoS',
|
||||
'tdet1h',
|
||||
'tpred1h',
|
||||
@ -451,11 +451,11 @@ settings_special_pick={
|
||||
'tsmoothS',
|
||||
'ausS',
|
||||
'nfacS'],
|
||||
'fm':[
|
||||
'fm': [
|
||||
'minfmweight',
|
||||
'minFMSNR',
|
||||
'fmpickwin'],
|
||||
'quality':[
|
||||
'quality': [
|
||||
'timeerrorsP',
|
||||
'timeerrorsS',
|
||||
'minAICPslope',
|
||||
|
@ -1,8 +1,9 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pylot.core.util.errors import ParameterError
|
||||
from pylot.core.io import default_parameters
|
||||
from pylot.core.util.errors import ParameterError
|
||||
|
||||
|
||||
class PylotParameter(object):
|
||||
'''
|
||||
@ -69,12 +70,12 @@ class PylotParameter(object):
|
||||
|
||||
# Set default values of parameter names
|
||||
def __init_default_paras(self):
|
||||
parameters=default_parameters.defaults
|
||||
parameters = default_parameters.defaults
|
||||
self.__defaults = parameters
|
||||
|
||||
def __init_subsettings(self):
|
||||
self._settings_main=default_parameters.settings_main
|
||||
self._settings_special_pick=default_parameters.settings_special_pick
|
||||
self._settings_main = default_parameters.settings_main
|
||||
self._settings_special_pick = default_parameters.settings_special_pick
|
||||
|
||||
# String representation of the object
|
||||
def __repr__(self):
|
||||
@ -136,7 +137,7 @@ class PylotParameter(object):
|
||||
return self._settings_special_pick
|
||||
|
||||
def get_all_para_names(self):
|
||||
all_names=[]
|
||||
all_names = []
|
||||
all_names += self.get_main_para_names()['dirs']
|
||||
all_names += self.get_main_para_names()['nlloc']
|
||||
all_names += self.get_main_para_names()['smoment']
|
||||
@ -225,9 +226,9 @@ class PylotParameter(object):
|
||||
# for key, value in self.iteritems():
|
||||
# lines.append('{key}\t{value}\n'.format(key=key, value=value))
|
||||
# fid_out.writelines(lines)
|
||||
header = ('%This is a parameter input file for PyLoT/autoPyLoT.\n'+
|
||||
'%All main and special settings regarding data handling\n'+
|
||||
'%and picking are to be set here!\n'+
|
||||
header = ('%This is a parameter input file for PyLoT/autoPyLoT.\n' +
|
||||
'%All main and special settings regarding data handling\n' +
|
||||
'%and picking are to be set here!\n' +
|
||||
'%Parameters are optimized for %{} data sets!\n'.format(self.get_main_para_names()['pick'][0]))
|
||||
separator = '%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%\n'
|
||||
|
||||
@ -244,7 +245,7 @@ class PylotParameter(object):
|
||||
'filter settings', separator)
|
||||
self.write_section(fid_out, self.get_main_para_names()['pick'],
|
||||
'common settings picker', separator)
|
||||
fid_out.write(('#special settings for calculating CF#\n'+
|
||||
fid_out.write(('#special settings for calculating CF#\n' +
|
||||
'%!!Edit the following only if you know what you are doing!!%\n'))
|
||||
self.write_section(fid_out, self.get_special_para_names()['z'],
|
||||
'Z-component', None)
|
||||
@ -267,7 +268,7 @@ class PylotParameter(object):
|
||||
if type(value) == list or type(value) == tuple:
|
||||
value_tmp = ''
|
||||
for vl in value:
|
||||
value_tmp+= '{} '.format(vl)
|
||||
value_tmp += '{} '.format(vl)
|
||||
value = value_tmp
|
||||
tooltip = self.get_defaults()[name]['tooltip']
|
||||
if not len(str(value)) > l_val:
|
||||
@ -283,7 +284,7 @@ class PylotParameter(object):
|
||||
ttip = '%{:<{}}\n'.format(tooltip, l_ttip)
|
||||
else:
|
||||
ttip = '%{}\n'.format(tooltip)
|
||||
line = value+name+ttip
|
||||
line = value + name + ttip
|
||||
fid.write(line)
|
||||
|
||||
|
||||
|
@ -2,22 +2,23 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import glob
|
||||
import obspy.core.event as ope
|
||||
from obspy.core.event import read_events
|
||||
import os
|
||||
import scipy.io as sio
|
||||
import warnings
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import warnings
|
||||
import obspy.core.event as ope
|
||||
import scipy.io as sio
|
||||
from obspy.core import UTCDateTime
|
||||
from obspy.core.event import read_events
|
||||
from obspy.core.util import AttribDict
|
||||
|
||||
from pylot.core.io.inputs import PylotParameter
|
||||
from pylot.core.io.location import create_arrival, create_event, \
|
||||
create_magnitude, create_origin, create_pick
|
||||
from pylot.core.io.location import create_event, \
|
||||
create_magnitude
|
||||
from pylot.core.pick.utils import select_for_phase
|
||||
from pylot.core.util.utils import getOwner, full_range, four_digits
|
||||
|
||||
|
||||
def add_amplitudes(event, amplitudes):
|
||||
amplitude_list = []
|
||||
for pick in event.picks:
|
||||
@ -36,6 +37,7 @@ def add_amplitudes(event, amplitudes):
|
||||
event.amplitudes = amplitude_list
|
||||
return event
|
||||
|
||||
|
||||
def readPILOTEvent(phasfn=None, locfn=None, authority_id='RUB', **kwargs):
|
||||
"""
|
||||
readPILOTEvent - function
|
||||
@ -203,7 +205,7 @@ def picksdict_from_picks(evt):
|
||||
try:
|
||||
onsets = picks[station]
|
||||
except KeyError as e:
|
||||
#print(e)
|
||||
# print(e)
|
||||
onsets = {}
|
||||
mpp = pick.time
|
||||
spe = pick.time_errors.uncertainty
|
||||
@ -233,6 +235,7 @@ def picksdict_from_picks(evt):
|
||||
picks[station] = onsets.copy()
|
||||
return picks
|
||||
|
||||
|
||||
def picks_from_picksdict(picks, creation_info=None):
|
||||
picks_list = list()
|
||||
for station, onsets in picks.items():
|
||||
@ -266,8 +269,8 @@ def picks_from_picksdict(picks, creation_info=None):
|
||||
pick.phase_hint = label
|
||||
pick.method_id = ope.ResourceIdentifier(id=picker)
|
||||
pick.waveform_id = ope.WaveformStreamID(station_code=station,
|
||||
channel_code=ccode,
|
||||
network_code=ncode)
|
||||
channel_code=ccode,
|
||||
network_code=ncode)
|
||||
try:
|
||||
polarity = phase['fm']
|
||||
if polarity == 'U' or '+':
|
||||
@ -277,7 +280,7 @@ def picks_from_picksdict(picks, creation_info=None):
|
||||
else:
|
||||
pick.polarity = 'undecidable'
|
||||
except KeyError as e:
|
||||
if 'fm' in str(e): # no polarity information found for this phase
|
||||
if 'fm' in str(e): # no polarity information found for this phase
|
||||
pass
|
||||
else:
|
||||
raise e
|
||||
@ -289,7 +292,7 @@ def reassess_pilot_db(root_dir, db_dir, out_dir=None, fn_param=None, verbosity=0
|
||||
import glob
|
||||
|
||||
db_root = os.path.join(root_dir, db_dir)
|
||||
evt_list = glob.glob1(db_root,'e????.???.??')
|
||||
evt_list = glob.glob1(db_root, 'e????.???.??')
|
||||
|
||||
for evt in evt_list:
|
||||
if verbosity > 0:
|
||||
@ -297,7 +300,6 @@ def reassess_pilot_db(root_dir, db_dir, out_dir=None, fn_param=None, verbosity=0
|
||||
reassess_pilot_event(root_dir, db_dir, evt, out_dir, fn_param, verbosity)
|
||||
|
||||
|
||||
|
||||
def reassess_pilot_event(root_dir, db_dir, event_id, out_dir=None, fn_param=None, verbosity=0):
|
||||
from obspy import read
|
||||
|
||||
@ -305,7 +307,6 @@ def reassess_pilot_event(root_dir, db_dir, event_id, out_dir=None, fn_param=None
|
||||
from pylot.core.pick.utils import earllatepicker
|
||||
|
||||
if fn_param is None:
|
||||
import pylot.core.util.defaults as defaults
|
||||
fn_param = defaults.AUTOMATIC_DEFAULTS
|
||||
|
||||
default = PylotParameter(fn_param, verbosity)
|
||||
@ -339,7 +340,8 @@ def reassess_pilot_event(root_dir, db_dir, event_id, out_dir=None, fn_param=None
|
||||
except Exception as e:
|
||||
if 'No file matching file pattern:' in e.message:
|
||||
if verbosity > 0:
|
||||
warnings.warn('no waveform data found for station {station}'.format(station=station), RuntimeWarning)
|
||||
warnings.warn('no waveform data found for station {station}'.format(station=station),
|
||||
RuntimeWarning)
|
||||
datacheck.append(fn_pattern + ' (no data)\n')
|
||||
continue
|
||||
else:
|
||||
@ -395,7 +397,7 @@ def reassess_pilot_event(root_dir, db_dir, event_id, out_dir=None, fn_param=None
|
||||
os.makedirs(out_dir)
|
||||
fnout_prefix = os.path.join(out_dir, 'PyLoT_{0}.'.format(event_id))
|
||||
evt.write(fnout_prefix + 'xml', format='QUAKEML')
|
||||
#evt.write(fnout_prefix + 'cnv', format='VELEST')
|
||||
# evt.write(fnout_prefix + 'cnv', format='VELEST')
|
||||
|
||||
|
||||
def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
@ -427,7 +429,7 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
"""
|
||||
|
||||
if fformat == 'NLLoc':
|
||||
print ("Writing phases to %s for NLLoc" % filename)
|
||||
print("Writing phases to %s for NLLoc" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# write header
|
||||
fid.write('# EQEVENT: %s Label: EQ%s Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n' %
|
||||
@ -451,7 +453,7 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
ss = onset.second
|
||||
ms = onset.microsecond
|
||||
ss_ms = ss + ms / 1000000.0
|
||||
pweight = 1 # use pick
|
||||
pweight = 1 # use pick
|
||||
try:
|
||||
if arrivals[key]['P']['weight'] >= 4:
|
||||
pweight = 0 # do not use pick
|
||||
@ -478,7 +480,7 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
ss = onset.second
|
||||
ms = onset.microsecond
|
||||
ss_ms = ss + ms / 1000000.0
|
||||
sweight = 1 # use pick
|
||||
sweight = 1 # use pick
|
||||
try:
|
||||
if arrivals[key]['S']['weight'] >= 4:
|
||||
sweight = 0 # do not use pick
|
||||
@ -496,15 +498,15 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
|
||||
fid.close()
|
||||
elif fformat == 'HYPO71':
|
||||
print ("Writing phases to %s for HYPO71" % filename)
|
||||
print("Writing phases to %s for HYPO71" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# write header
|
||||
fid.write(' %s\n' %
|
||||
parameter.get('eventID'))
|
||||
parameter.get('eventID'))
|
||||
for key in arrivals:
|
||||
if arrivals[key]['P']['weight'] < 4:
|
||||
stat = key
|
||||
if len(stat) > 4: # HYPO71 handles only 4-string station IDs
|
||||
if len(stat) > 4: # HYPO71 handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
Ponset = arrivals[key]['P']['mpp']
|
||||
Sonset = arrivals[key]['S']['mpp']
|
||||
@ -544,36 +546,36 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
elif sweight >= 2:
|
||||
sstr = 'E'
|
||||
fid.write('%-4s%sP%s%d %02d%02d%02d%02d%02d%5.2f %s%sS %d %s\n' % (stat,
|
||||
pstr,
|
||||
fm,
|
||||
pweight,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
hh,
|
||||
mm,
|
||||
ss_ms,
|
||||
Sss_ms,
|
||||
sstr,
|
||||
sweight,
|
||||
Ao))
|
||||
pstr,
|
||||
fm,
|
||||
pweight,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
hh,
|
||||
mm,
|
||||
ss_ms,
|
||||
Sss_ms,
|
||||
sstr,
|
||||
sweight,
|
||||
Ao))
|
||||
else:
|
||||
fid.write('%-4s%sP%s%d %02d%02d%02d%02d%02d%5.2f %s\n' % (stat,
|
||||
pstr,
|
||||
fm,
|
||||
pweight,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
hh,
|
||||
mm,
|
||||
ss_ms,
|
||||
Ao))
|
||||
pstr,
|
||||
fm,
|
||||
pweight,
|
||||
year,
|
||||
month,
|
||||
day,
|
||||
hh,
|
||||
mm,
|
||||
ss_ms,
|
||||
Ao))
|
||||
|
||||
fid.close()
|
||||
|
||||
elif fformat == 'HYPOSAT':
|
||||
print ("Writing phases to %s for HYPOSAT" % filename)
|
||||
print("Writing phases to %s for HYPOSAT" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# write header
|
||||
fid.write('%s, event %s \n' % (parameter.get('database'), parameter.get('eventID')))
|
||||
@ -615,7 +617,7 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
fid.close()
|
||||
|
||||
elif fformat == 'VELEST':
|
||||
print ("Writing phases to %s for VELEST" % filename)
|
||||
print("Writing phases to %s for VELEST" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# get informations needed in cnv-file
|
||||
# check, whether latitude is N or S and longitude is E or W
|
||||
@ -631,14 +633,14 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
# get last two integers of origin year
|
||||
stime = eventsource['time']
|
||||
if stime.year - 2000 >= 0:
|
||||
syear = stime.year - 2000
|
||||
syear = stime.year - 2000
|
||||
else:
|
||||
syear = stime.year - 1900
|
||||
ifx = 0 # default value, see VELEST manual, pp. 22-23
|
||||
syear = stime.year - 1900
|
||||
ifx = 0 # default value, see VELEST manual, pp. 22-23
|
||||
# write header
|
||||
fid.write('%s%02d%02d %02d%02d %05.2f %7.4f%c %8.4f%c %7.2f %6.2f %02.0f 0.0 0.03 1.0 1.0\n' % (
|
||||
syear, stime.month, stime.day, stime.hour, stime.minute, stime.second, eventsource['latitude'],
|
||||
cns, eventsource['longitude'], cew, eventsource['depth'],eventinfo.magnitudes[0]['mag'], ifx))
|
||||
syear, stime.month, stime.day, stime.hour, stime.minute, stime.second, eventsource['latitude'],
|
||||
cns, eventsource['longitude'], cew, eventsource['depth'], eventinfo.magnitudes[0]['mag'], ifx))
|
||||
n = 0
|
||||
for key in arrivals:
|
||||
# P onsets
|
||||
@ -646,25 +648,25 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
if arrivals[key]['P']['weight'] < 4:
|
||||
n += 1
|
||||
stat = key
|
||||
if len(stat) > 4: # VELEST handles only 4-string station IDs
|
||||
if len(stat) > 4: # VELEST handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
Ponset = arrivals[key]['P']['mpp']
|
||||
Pweight = arrivals[key]['P']['weight']
|
||||
Prt = Ponset - stime # onset time relative to source time
|
||||
Prt = Ponset - stime # onset time relative to source time
|
||||
if n % 6 is not 0:
|
||||
fid.write('%-4sP%d%6.2f' % (stat, Pweight, Prt))
|
||||
else:
|
||||
fid.write('%-4sP%d%6.2f\n' % (stat, Pweight, Prt))
|
||||
# S onsets
|
||||
# S onsets
|
||||
if arrivals[key].has_key('S'):
|
||||
if arrivals[key]['S']['weight'] < 4:
|
||||
n += 1
|
||||
stat = key
|
||||
if len(stat) > 4: # VELEST handles only 4-string station IDs
|
||||
if len(stat) > 4: # VELEST handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
Sonset = arrivals[key]['S']['mpp']
|
||||
Sweight = arrivals[key]['S']['weight']
|
||||
Srt = Ponset - stime # onset time relative to source time
|
||||
Srt = Ponset - stime # onset time relative to source time
|
||||
if n % 6 is not 0:
|
||||
fid.write('%-4sS%d%6.2f' % (stat, Sweight, Srt))
|
||||
else:
|
||||
@ -672,7 +674,7 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
fid.close()
|
||||
|
||||
elif fformat == 'hypoDD':
|
||||
print ("Writing phases to %s for hypoDD" % filename)
|
||||
print("Writing phases to %s for hypoDD" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# get event information needed for hypoDD-phase file
|
||||
eventsource = eventinfo.origins[0]
|
||||
@ -681,59 +683,62 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
hddID = event.split('.')[0][1:5]
|
||||
# write header
|
||||
fid.write('# %d %d %d %d %d %5.2f %7.4f +%6.4f %7.4f %4.2f 0.1 0.5 %4.2f %s\n' % (
|
||||
stime.year, stime.month, stime.day, stime.hour, stime.minute, stime.second,
|
||||
eventsource['latitude'], eventsource['longitude'], eventsource['depth'] / 1000,
|
||||
eventinfo.magnitudes[0]['mag'], eventsource['quality']['standard_error'], hddID))
|
||||
stime.year, stime.month, stime.day, stime.hour, stime.minute, stime.second,
|
||||
eventsource['latitude'], eventsource['longitude'], eventsource['depth'] / 1000,
|
||||
eventinfo.magnitudes[0]['mag'], eventsource['quality']['standard_error'], hddID))
|
||||
for key in arrivals:
|
||||
if arrivals[key].has_key('P'):
|
||||
# P onsets
|
||||
if arrivals[key]['P']['weight'] < 4:
|
||||
Ponset = arrivals[key]['P']['mpp']
|
||||
Prt = Ponset - stime # onset time relative to source time
|
||||
Prt = Ponset - stime # onset time relative to source time
|
||||
fid.write('%s %6.3f 1 P\n' % (key, Prt))
|
||||
# S onsets
|
||||
# S onsets
|
||||
if arrivals[key]['S']['weight'] < 4:
|
||||
Sonset = arrivals[key]['S']['mpp']
|
||||
Srt = Sonset - stime # onset time relative to source time
|
||||
Srt = Sonset - stime # onset time relative to source time
|
||||
fid.write('%-5s %6.3f 1 S\n' % (key, Srt))
|
||||
|
||||
fid.close()
|
||||
|
||||
elif fformat == 'FOCMEC':
|
||||
print ("Writing phases to %s for FOCMEC" % filename)
|
||||
print("Writing phases to %s for FOCMEC" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# get event information needed for FOCMEC-input file
|
||||
eventsource = eventinfo.origins[0]
|
||||
stime = eventsource['time']
|
||||
# write header line including event information
|
||||
fid.write('%s %d%02d%02d%02d%02d%02.0f %7.4f %6.4f %3.1f %3.1f\n' % (parameter.get('eventID'),
|
||||
stime.year, stime.month, stime.day, stime.hour, stime.minute, stime.second,
|
||||
eventsource['latitude'], eventsource['longitude'], eventsource['depth'] / 1000,
|
||||
eventinfo.magnitudes[0]['mag']))
|
||||
stime.year, stime.month, stime.day,
|
||||
stime.hour, stime.minute, stime.second,
|
||||
eventsource['latitude'],
|
||||
eventsource['longitude'],
|
||||
eventsource['depth'] / 1000,
|
||||
eventinfo.magnitudes[0]['mag']))
|
||||
picks = eventinfo.picks
|
||||
for key in arrivals:
|
||||
if arrivals[key].has_key('P'):
|
||||
if arrivals[key]['P']['weight'] < 4 and arrivals[key]['P']['fm'] is not None:
|
||||
stat = key
|
||||
for i in range(len(picks)):
|
||||
station = picks[i].waveform_id.station_code
|
||||
if station == stat:
|
||||
# get resource ID
|
||||
resid_picks = picks[i].get('resource_id')
|
||||
# find same ID in eventinfo
|
||||
# there it is the pick_id!!
|
||||
for j in range(len(eventinfo.origins[0].arrivals)):
|
||||
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
|
||||
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
|
||||
if len(stat) > 4: # FOCMEC handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
az = eventinfo.origins[0].arrivals[j].get('azimuth')
|
||||
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
|
||||
fid.write('%-4s %6.2f %6.2f%s \n' % (stat,
|
||||
az,
|
||||
inz,
|
||||
arrivals[key]['P']['fm']))
|
||||
break
|
||||
station = picks[i].waveform_id.station_code
|
||||
if station == stat:
|
||||
# get resource ID
|
||||
resid_picks = picks[i].get('resource_id')
|
||||
# find same ID in eventinfo
|
||||
# there it is the pick_id!!
|
||||
for j in range(len(eventinfo.origins[0].arrivals)):
|
||||
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
|
||||
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
|
||||
if len(stat) > 4: # FOCMEC handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
az = eventinfo.origins[0].arrivals[j].get('azimuth')
|
||||
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
|
||||
fid.write('%-4s %6.2f %6.2f%s \n' % (stat,
|
||||
az,
|
||||
inz,
|
||||
arrivals[key]['P']['fm']))
|
||||
break
|
||||
|
||||
fid.close()
|
||||
|
||||
@ -742,9 +747,9 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
# HASH-driver 1 and 2 (see HASH manual!)
|
||||
filename1 = filename + 'drv1' + '.phase'
|
||||
filename2 = filename + 'drv2' + '.phase'
|
||||
print ("Writing phases to %s for HASH for HASH-driver 1" % filename1)
|
||||
print("Writing phases to %s for HASH for HASH-driver 1" % filename1)
|
||||
fid1 = open("%s" % filename1, 'w')
|
||||
print ("Writing phases to %s for HASH for HASH-driver 2" % filename2)
|
||||
print("Writing phases to %s for HASH for HASH-driver 2" % filename2)
|
||||
fid2 = open("%s" % filename2, 'w')
|
||||
# get event information needed for HASH-input file
|
||||
eventsource = eventinfo.origins[0]
|
||||
@ -759,26 +764,32 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
erz = eventsource.depth_errors['uncertainty']
|
||||
stime = eventsource['time']
|
||||
if stime.year - 2000 >= 0:
|
||||
syear = stime.year - 2000
|
||||
syear = stime.year - 2000
|
||||
else:
|
||||
syear = stime.year - 1900
|
||||
syear = stime.year - 1900
|
||||
picks = eventinfo.picks
|
||||
# write header line including event information
|
||||
# for HASH-driver 1
|
||||
fid1.write('%s%02d%02d%02d%02d%5.2f%2dN%5.2f%3dE%5.2f%6.3f%4.2f%5.2f%5.2f%s\n' % (syear,
|
||||
stime.month, stime.day, stime.hour, stime.minute, stime.second,
|
||||
latdeg, latmin, londeg, lonmin, eventsource['depth'],
|
||||
eventinfo.magnitudes[0]['mag'], erh, erz,
|
||||
hashID))
|
||||
stime.month, stime.day,
|
||||
stime.hour, stime.minute,
|
||||
stime.second,
|
||||
latdeg, latmin, londeg,
|
||||
lonmin, eventsource['depth'],
|
||||
eventinfo.magnitudes[0][
|
||||
'mag'], erh, erz,
|
||||
hashID))
|
||||
# write header line including event information
|
||||
# for HASH-driver 2
|
||||
fid2.write('%d%02d%02d%02d%02d%5.2f%dN%5.2f%3dE%6.2f%5.2f %d %5.2f %5.2f %4.2f %s \n' % (syear, stime.month, stime.day,
|
||||
stime.hour, stime.minute, stime.second,
|
||||
latdeg,latmin,londeg, lonmin,
|
||||
eventsource['depth'],
|
||||
eventsource['quality']['used_phase_count'],
|
||||
erh, erz, eventinfo.magnitudes[0]['mag'],
|
||||
hashID))
|
||||
fid2.write(
|
||||
'%d%02d%02d%02d%02d%5.2f%dN%5.2f%3dE%6.2f%5.2f %d %5.2f %5.2f %4.2f %s \n' % (
|
||||
syear, stime.month, stime.day,
|
||||
stime.hour, stime.minute, stime.second,
|
||||
latdeg, latmin, londeg, lonmin,
|
||||
eventsource['depth'],
|
||||
eventsource['quality']['used_phase_count'],
|
||||
erh, erz, eventinfo.magnitudes[0]['mag'],
|
||||
hashID))
|
||||
|
||||
# write phase lines
|
||||
for key in arrivals:
|
||||
@ -789,36 +800,38 @@ def writephases(arrivals, fformat, filename, parameter, eventinfo=None):
|
||||
ncode = arrivals[key]['P']['network']
|
||||
|
||||
if arrivals[key]['P']['weight'] < 2:
|
||||
Pqual='I'
|
||||
Pqual = 'I'
|
||||
else:
|
||||
Pqual='E'
|
||||
Pqual = 'E'
|
||||
|
||||
for i in range(len(picks)):
|
||||
station = picks[i].waveform_id.station_code
|
||||
if station == stat:
|
||||
# get resource ID
|
||||
resid_picks = picks[i].get('resource_id')
|
||||
# find same ID in eventinfo
|
||||
# there it is the pick_id!!
|
||||
for j in range(len(eventinfo.origins[0].arrivals)):
|
||||
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
|
||||
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
|
||||
if len(stat) > 4: # HASH handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
az = eventinfo.origins[0].arrivals[j].get('azimuth')
|
||||
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
|
||||
dist = eventinfo.origins[0].arrivals[j].get('distance')
|
||||
# write phase line for HASH-driver 1
|
||||
fid1.write('%-4s%sP%s%d 0 %3.1f %03d %03d 2 1 %s\n' % (stat, Pqual, arrivals[key]['P']['fm'], arrivals[key]['P']['weight'],
|
||||
dist, inz, az, ccode))
|
||||
# write phase line for HASH-driver 2
|
||||
fid2.write('%-4s %s %s %s %s \n' % (
|
||||
stat,
|
||||
ncode,
|
||||
ccode,
|
||||
Pqual,
|
||||
arrivals[key]['P']['fm']))
|
||||
break
|
||||
station = picks[i].waveform_id.station_code
|
||||
if station == stat:
|
||||
# get resource ID
|
||||
resid_picks = picks[i].get('resource_id')
|
||||
# find same ID in eventinfo
|
||||
# there it is the pick_id!!
|
||||
for j in range(len(eventinfo.origins[0].arrivals)):
|
||||
resid_eventinfo = eventinfo.origins[0].arrivals[j].get('pick_id')
|
||||
if resid_eventinfo == resid_picks and eventinfo.origins[0].arrivals[j].phase == 'P':
|
||||
if len(stat) > 4: # HASH handles only 4-string station IDs
|
||||
stat = stat[1:5]
|
||||
az = eventinfo.origins[0].arrivals[j].get('azimuth')
|
||||
inz = eventinfo.origins[0].arrivals[j].get('takeoff_angle')
|
||||
dist = eventinfo.origins[0].arrivals[j].get('distance')
|
||||
# write phase line for HASH-driver 1
|
||||
fid1.write(
|
||||
'%-4s%sP%s%d 0 %3.1f %03d %03d 2 1 %s\n' % (
|
||||
stat, Pqual, arrivals[key]['P']['fm'], arrivals[key]['P']['weight'],
|
||||
dist, inz, az, ccode))
|
||||
# write phase line for HASH-driver 2
|
||||
fid2.write('%-4s %s %s %s %s \n' % (
|
||||
stat,
|
||||
ncode,
|
||||
ccode,
|
||||
Pqual,
|
||||
arrivals[key]['P']['fm']))
|
||||
break
|
||||
|
||||
fid1.write(' %s' % hashID)
|
||||
fid1.close()
|
||||
@ -849,6 +862,7 @@ def merge_picks(event, picks):
|
||||
del time, err, phase, station, network, method
|
||||
return event
|
||||
|
||||
|
||||
def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
|
||||
"""
|
||||
Script to get onset uncertainties from Quakeml.xml files created by PyLoT.
|
||||
@ -881,18 +895,18 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
|
||||
for mpick in arrivals_copy:
|
||||
if mpick.phase_hint[0] == 'P':
|
||||
if ((mpick.waveform_id.station_code == mstation) or \
|
||||
(mpick.waveform_id.station_code == mstation_ext)) and \
|
||||
((mpick.method_id).split('/')[1] == 'auto') and \
|
||||
(mpick.time_errors['uncertainty'] <= ErrorsP[3]):
|
||||
del mpick
|
||||
break
|
||||
(mpick.waveform_id.station_code == mstation_ext)) and \
|
||||
((mpick.method_id).split('/')[1] == 'auto') and \
|
||||
(mpick.time_errors['uncertainty'] <= ErrorsP[3]):
|
||||
del mpick
|
||||
break
|
||||
elif mpick.phase_hint[0] == 'S':
|
||||
if ((mpick.waveform_id.station_code == mstation) or \
|
||||
(mpick.waveform_id.station_code == mstation_ext)) and \
|
||||
((mpick.method_id).split('/')[1] == 'auto') and \
|
||||
(mpick.time_errors['uncertainty'] <= ErrorsS[3]):
|
||||
del mpick
|
||||
break
|
||||
(mpick.waveform_id.station_code == mstation_ext)) and \
|
||||
((mpick.method_id).split('/')[1] == 'auto') and \
|
||||
(mpick.time_errors['uncertainty'] <= ErrorsS[3]):
|
||||
del mpick
|
||||
break
|
||||
lendiff = len(arrivals) - len(arrivals_copy)
|
||||
if lendiff is not 0:
|
||||
print("Found manual as well as automatic picks, prefered the {} manual ones!".format(lendiff))
|
||||
@ -902,13 +916,13 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
|
||||
if Pick.time_errors.uncertainty <= ErrorsP[0]:
|
||||
Pw0.append(Pick.time_errors.uncertainty)
|
||||
elif (Pick.time_errors.uncertainty > ErrorsP[0]) and \
|
||||
(Pick.time_errors.uncertainty <= ErrorsP[1]):
|
||||
(Pick.time_errors.uncertainty <= ErrorsP[1]):
|
||||
Pw1.append(Pick.time_errors.uncertainty)
|
||||
elif (Pick.time_errors.uncertainty > ErrorsP[1]) and \
|
||||
(Pick.time_errors.uncertainty <= ErrorsP[2]):
|
||||
(Pick.time_errors.uncertainty <= ErrorsP[2]):
|
||||
Pw2.append(Pick.time_errors.uncertainty)
|
||||
elif (Pick.time_errors.uncertainty > ErrorsP[2]) and \
|
||||
(Pick.time_errors.uncertainty <= ErrorsP[3]):
|
||||
(Pick.time_errors.uncertainty <= ErrorsP[3]):
|
||||
Pw3.append(Pick.time_errors.uncertainty)
|
||||
elif Pick.time_errors.uncertainty > ErrorsP[3]:
|
||||
Pw4.append(Pick.time_errors.uncertainty)
|
||||
@ -918,13 +932,13 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
|
||||
if Pick.time_errors.uncertainty <= ErrorsS[0]:
|
||||
Sw0.append(Pick.time_errors.uncertainty)
|
||||
elif (Pick.time_errors.uncertainty > ErrorsS[0]) and \
|
||||
(Pick.time_errors.uncertainty <= ErrorsS[1]):
|
||||
(Pick.time_errors.uncertainty <= ErrorsS[1]):
|
||||
Sw1.append(Pick.time_errors.uncertainty)
|
||||
elif (Pick.time_errors.uncertainty > ErrorsS[1]) and \
|
||||
(Pick.time_errors.uncertainty <= ErrorsS[2]):
|
||||
(Pick.time_errors.uncertainty <= ErrorsS[2]):
|
||||
Sw2.append(Pick.time_errors.uncertainty)
|
||||
elif (Pick.time_errors.uncertainty > ErrorsS[2]) and \
|
||||
(Pick.time_errors.uncertainty <= ErrorsS[3]):
|
||||
(Pick.time_errors.uncertainty <= ErrorsS[3]):
|
||||
Sw3.append(Pick.time_errors.uncertainty)
|
||||
elif Pick.time_errors.uncertainty > ErrorsS[3]:
|
||||
Sw4.append(Pick.time_errors.uncertainty)
|
||||
@ -994,4 +1008,3 @@ def getQualitiesfromxml(xmlnames, ErrorsP, ErrorsS, plotflag=1):
|
||||
plt.xlabel('Qualities')
|
||||
plt.title('{0} P-Qualities, {1} S-Qualities'.format(numPweights, numSweights))
|
||||
plt.show()
|
||||
|
||||
|
@ -6,6 +6,7 @@ from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
|
||||
|
||||
def export(picks, fnout, parameter, eventinfo):
|
||||
'''
|
||||
Take <picks> dictionary and exports picking data to a focmec
|
||||
|
@ -6,6 +6,7 @@ from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
|
||||
|
||||
def export(picks, fnout, parameter, eventinfo):
|
||||
'''
|
||||
Take <picks> dictionary and exports picking data to a HASH
|
||||
|
@ -6,6 +6,7 @@ from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
|
||||
|
||||
def export(picks, fnout, parameter):
|
||||
'''
|
||||
Take <picks> dictionary and exports picking data to a HYPO71
|
||||
|
@ -6,6 +6,7 @@ from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
|
||||
|
||||
def export(picks, fnout, parameter, eventinfo):
|
||||
'''
|
||||
Take <picks> dictionary and exports picking data to a hypoDD
|
||||
|
@ -6,6 +6,7 @@ from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
|
||||
|
||||
def export(picks, fnout, parameter):
|
||||
'''
|
||||
Take <picks> dictionary and exports picking data to a HYPOSAT
|
||||
|
@ -1,9 +1,10 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import subprocess
|
||||
import os
|
||||
import glob
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from obspy import read_events
|
||||
from pylot.core.io.phases import writephases
|
||||
from pylot.core.util.utils import getPatternLine, runProgram, which
|
||||
@ -11,9 +12,11 @@ from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
|
||||
|
||||
class NLLocError(EnvironmentError):
|
||||
pass
|
||||
|
||||
|
||||
def export(picks, fnout, parameter):
|
||||
'''
|
||||
Take <picks> dictionary and exports picking data to a NLLOC-obs
|
||||
@ -58,7 +61,7 @@ def modify_inputs(ctrfn, root, nllocoutn, phasefn, tttn):
|
||||
locfiles = 'LOCFILES %s NLLOC_OBS %s %s 0\n' % (phasefile, tttable, nllocout)
|
||||
|
||||
# modification of NLLoc-control file
|
||||
print ("Modifying NLLoc-control file %s ..." % ctrfile)
|
||||
print("Modifying NLLoc-control file %s ..." % ctrfile)
|
||||
curlocfiles = getPatternLine(ctrfile, 'LOCFILES')
|
||||
nllfile = open(ctrfile, 'r')
|
||||
filedata = nllfile.read()
|
||||
@ -94,7 +97,7 @@ def locate(fnin, infile=None):
|
||||
|
||||
def read_location(fn):
|
||||
path, file = os.path.split(fn)
|
||||
file = glob.glob1(path, file + '.[0-9]*.grid0.loc.hyp')
|
||||
file = glob.glob1(path, file + '.[0-9]*.grid0.loc.hyp')
|
||||
if len(file) > 1:
|
||||
raise IOError('ambiguous location name {0}'.format(file))
|
||||
fn = os.path.join(path, file[0])
|
||||
|
@ -6,6 +6,7 @@ from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
|
||||
|
||||
def export(picks, fnout, parameter, eventinfo):
|
||||
'''
|
||||
Take <picks> dictionary and exports picking data to a VELEST-cnv
|
||||
|
@ -11,14 +11,14 @@ function conglomerate utils.
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from pylot.core.io.data import Data
|
||||
from pylot.core.io.inputs import PylotParameter
|
||||
from pylot.core.pick.picker import AICPicker, PragPicker
|
||||
from pylot.core.pick.charfuns import CharacteristicFunction
|
||||
from pylot.core.pick.charfuns import HOScf, AICcf, ARZcf, ARHcf, AR3Ccf
|
||||
from pylot.core.pick.picker import AICPicker, PragPicker
|
||||
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \
|
||||
getSNR, fmpicker, checkPonsets, wadaticheck
|
||||
from pylot.core.util.utils import getPatternLine, gen_Pool
|
||||
from pylot.core.io.data import Data
|
||||
|
||||
|
||||
def autopickevent(data, param, iplot=0, fig_dict=None):
|
||||
@ -43,10 +43,10 @@ def autopickevent(data, param, iplot=0, fig_dict=None):
|
||||
|
||||
if not iplot:
|
||||
input_tuples.append((topick, param, apverbose))
|
||||
if iplot>0:
|
||||
if iplot > 0:
|
||||
all_onsets[station] = autopickstation(topick, param, verbose=apverbose, iplot=iplot, fig_dict=fig_dict)
|
||||
|
||||
if iplot>0:
|
||||
if iplot > 0:
|
||||
print('iPlot Flag active: NO MULTIPROCESSING possible.')
|
||||
return all_onsets
|
||||
|
||||
@ -70,7 +70,7 @@ def autopickevent(data, param, iplot=0, fig_dict=None):
|
||||
|
||||
def call_autopickstation(input_tuple):
|
||||
wfstream, pickparam, verbose = input_tuple
|
||||
#multiprocessing not possible with interactive plotting
|
||||
# multiprocessing not possible with interactive plotting
|
||||
return autopickstation(wfstream, pickparam, verbose, iplot=0)
|
||||
|
||||
|
||||
@ -291,7 +291,7 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
|
||||
'Skipping control function checkZ4S.'
|
||||
if verbose: print(msg)
|
||||
else:
|
||||
if iplot>1:
|
||||
if iplot > 1:
|
||||
if fig_dict:
|
||||
fig = fig_dict['checkZ4s']
|
||||
else:
|
||||
@ -364,7 +364,7 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
|
||||
mpickP, iplot, fig=fig)
|
||||
else:
|
||||
epickP, lpickP, Perror = earllatepicker(z_copy, nfacP, tsnrz,
|
||||
mpickP, iplot)
|
||||
mpickP, iplot)
|
||||
|
||||
# get SNR
|
||||
[SNRP, SNRPdB, Pnoiselevel] = getSNR(z_copy, tsnrz, mpickP)
|
||||
@ -567,10 +567,10 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
|
||||
fig = fig_dict['el_S1pick']
|
||||
else:
|
||||
fig = None
|
||||
epickS1, lpickS1, Serror1 = earllatepicker(h_copy, nfacS,
|
||||
tsnrh,
|
||||
mpickS, iplot,
|
||||
fig=fig)
|
||||
epickS1, lpickS1, Serror1 = earllatepicker(h_copy, nfacS,
|
||||
tsnrh,
|
||||
mpickS, iplot,
|
||||
fig=fig)
|
||||
else:
|
||||
epickS1, lpickS1, Serror1 = earllatepicker(h_copy, nfacS,
|
||||
tsnrh,
|
||||
@ -706,7 +706,7 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
|
||||
ax1.plot([aicpick.getpick() - 0.5, aicpick.getpick() + 0.5],
|
||||
[-1, -1], 'r')
|
||||
ax1.plot([refPpick.getpick(), refPpick.getpick()],
|
||||
[-1.3, 1.3], 'r', linewidth=2, label='Final P Pick')
|
||||
[-1.3, 1.3], 'r', linewidth=2, label='Final P Pick')
|
||||
ax1.plot([refPpick.getpick() - 0.5, refPpick.getpick() + 0.5],
|
||||
[1.3, 1.3], 'r', linewidth=2)
|
||||
ax1.plot([refPpick.getpick() - 0.5, refPpick.getpick() + 0.5],
|
||||
@ -714,28 +714,28 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
|
||||
ax1.plot([lpickP, lpickP], [-1.1, 1.1], 'r--', label='lpp')
|
||||
ax1.plot([epickP, epickP], [-1.1, 1.1], 'r--', label='epp')
|
||||
ax1.set_title('%s, %s, P Weight=%d, SNR=%7.2f, SNR[dB]=%7.2f '
|
||||
'Polarity: %s' % (tr_filt.stats.station,
|
||||
tr_filt.stats.channel,
|
||||
Pweight,
|
||||
SNRP,
|
||||
SNRPdB,
|
||||
FM))
|
||||
'Polarity: %s' % (tr_filt.stats.station,
|
||||
tr_filt.stats.channel,
|
||||
Pweight,
|
||||
SNRP,
|
||||
SNRPdB,
|
||||
FM))
|
||||
else:
|
||||
ax1.set_title('%s, P Weight=%d, SNR=None, '
|
||||
'SNRdB=None' % (tr_filt.stats.channel, Pweight))
|
||||
'SNRdB=None' % (tr_filt.stats.channel, Pweight))
|
||||
else:
|
||||
ax1.set_title('%s, %s, P Weight=%d' % (tr_filt.stats.station,
|
||||
tr_filt.stats.channel,
|
||||
Pweight))
|
||||
tr_filt.stats.channel,
|
||||
Pweight))
|
||||
ax1.legend()
|
||||
ax1.set_yticks([])
|
||||
ax1.set_ylim([-1.5, 1.5])
|
||||
ax1.set_ylabel('Normalized Counts')
|
||||
#fig.suptitle(tr_filt.stats.starttime)
|
||||
# fig.suptitle(tr_filt.stats.starttime)
|
||||
|
||||
if len(edat[0]) > 1 and len(ndat[0]) > 1 and Sflag == 1:
|
||||
# plot horizontal traces
|
||||
ax2 = fig.add_subplot(3,1,2,sharex=ax1)
|
||||
ax2 = fig.add_subplot(3, 1, 2, sharex=ax1)
|
||||
th1data = np.arange(0,
|
||||
trH1_filt.stats.npts /
|
||||
trH1_filt.stats.sampling_rate,
|
||||
@ -750,7 +750,7 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
|
||||
arhcf1.getCF() / max(arhcf1.getCF()), 'b', label='CF1')
|
||||
if aicSflag == 1:
|
||||
ax2.plot(arhcf2.getTimeArray(),
|
||||
arhcf2.getCF() / max(arhcf2.getCF()), 'm', label='CF2')
|
||||
arhcf2.getCF() / max(arhcf2.getCF()), 'm', label='CF2')
|
||||
ax2.plot(
|
||||
[aicarhpick.getpick(), aicarhpick.getpick()],
|
||||
[-1, 1], 'g', label='Initial S Onset')
|
||||
@ -782,9 +782,9 @@ def autopickstation(wfstream, pickparam, verbose=False, iplot=0, fig_dict=None):
|
||||
ax2.set_yticks([])
|
||||
ax2.set_ylim([-1.5, 1.5])
|
||||
ax2.set_ylabel('Normalized Counts')
|
||||
#fig.suptitle(trH1_filt.stats.starttime)
|
||||
# fig.suptitle(trH1_filt.stats.starttime)
|
||||
|
||||
ax3 = fig.add_subplot(3,1,3, sharex=ax1)
|
||||
ax3 = fig.add_subplot(3, 1, 3, sharex=ax1)
|
||||
th2data = np.arange(0,
|
||||
trH2_filt.stats.npts /
|
||||
trH2_filt.stats.sampling_rate,
|
||||
|
@ -17,7 +17,6 @@ autoregressive prediction: application ot local and regional distances, Geophys.
|
||||
:author: MAGS2 EP3 working group
|
||||
"""
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from obspy.core import Stream
|
||||
|
||||
@ -466,7 +465,7 @@ class ARHcf(CharacteristicFunction):
|
||||
# prediction error = CF
|
||||
cf[i + lpred] = np.sqrt(np.sum(np.power(self.xpred[0][i:i + lpred] - xnp[0][i:i + lpred], 2) \
|
||||
+ np.power(self.xpred[1][i:i + lpred] - xnp[1][i:i + lpred], 2)) / (
|
||||
2 * lpred))
|
||||
2 * lpred))
|
||||
nn = np.isnan(cf)
|
||||
if len(nn) > 1:
|
||||
cf[nn] = 0
|
||||
@ -608,7 +607,7 @@ class AR3Ccf(CharacteristicFunction):
|
||||
cf[i + lpred] = np.sqrt(np.sum(np.power(self.xpred[0][i:i + lpred] - xnp[0][i:i + lpred], 2) \
|
||||
+ np.power(self.xpred[1][i:i + lpred] - xnp[1][i:i + lpred], 2) \
|
||||
+ np.power(self.xpred[2][i:i + lpred] - xnp[2][i:i + lpred], 2)) / (
|
||||
3 * lpred))
|
||||
3 * lpred))
|
||||
nn = np.isnan(cf)
|
||||
if len(nn) > 1:
|
||||
cf[nn] = 0
|
||||
|
@ -4,11 +4,10 @@
|
||||
import copy
|
||||
import operator
|
||||
import os
|
||||
import numpy as np
|
||||
import glob
|
||||
import matplotlib.pyplot as plt
|
||||
from obspy import read_events
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from obspy import read_events
|
||||
from pylot.core.io.phases import picksdict_from_picks
|
||||
from pylot.core.util.pdf import ProbabilityDensityFunction
|
||||
from pylot.core.util.utils import find_in_list
|
||||
@ -334,7 +333,7 @@ class PDFDictionary(object):
|
||||
axarr[l].set_title(phase)
|
||||
if l is 0:
|
||||
axann = axarr[l].annotate(station, xy=(.05, .5),
|
||||
xycoords='axes fraction')
|
||||
xycoords='axes fraction')
|
||||
bbox_props = dict(boxstyle='round', facecolor='lightgrey',
|
||||
alpha=.7)
|
||||
axann.set_bbox(bbox_props)
|
||||
@ -352,7 +351,6 @@ class PDFstatistics(object):
|
||||
Takes a path as argument.
|
||||
"""
|
||||
|
||||
|
||||
def __init__(self, directory):
|
||||
"""Initiates some values needed when dealing with pdfs later"""
|
||||
self._rootdir = directory
|
||||
@ -449,7 +447,7 @@ class PDFstatistics(object):
|
||||
else:
|
||||
raise ValueError("for call to method {0} value has to be "
|
||||
"defined but is 'None' ".format(method_options[
|
||||
property.upper()]))
|
||||
property.upper()]))
|
||||
|
||||
for pdf_dict in self:
|
||||
# create worklist
|
||||
@ -459,7 +457,7 @@ class PDFstatistics(object):
|
||||
|
||||
return rlist
|
||||
|
||||
def writeThetaToFile(self,array,out_dir):
|
||||
def writeThetaToFile(self, array, out_dir):
|
||||
"""
|
||||
Method to write array like data to file. Useful since acquiring can take
|
||||
serious amount of time when dealing with large databases.
|
||||
@ -471,12 +469,12 @@ class PDFstatistics(object):
|
||||
"""
|
||||
fid = open(os.path.join(out_dir), 'w')
|
||||
for val in array:
|
||||
fid.write(str(val)+'\n')
|
||||
fid.write(str(val) + '\n')
|
||||
fid.close()
|
||||
|
||||
|
||||
def main():
|
||||
root_dir ='/home/sebastianp/Codetesting/xmls/'
|
||||
root_dir = '/home/sebastianp/Codetesting/xmls/'
|
||||
Insheim = PDFstatistics(root_dir)
|
||||
Insheim.curphase = 'p'
|
||||
qdlist = Insheim.get('qdf', 0.2)
|
||||
|
@ -19,12 +19,13 @@ calculated after Diehl & Kissling (2009).
|
||||
:author: MAGS2 EP3 working group / Ludger Kueperkoch
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
from pylot.core.pick.utils import getnoisewin, getsignalwin
|
||||
from pylot.core.pick.charfuns import CharacteristicFunction
|
||||
import warnings
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from pylot.core.pick.charfuns import CharacteristicFunction
|
||||
from pylot.core.pick.utils import getnoisewin, getsignalwin
|
||||
|
||||
|
||||
class AutoPicker(object):
|
||||
'''
|
||||
@ -212,14 +213,14 @@ class AICPicker(AutoPicker):
|
||||
self.Data[0].data = self.Data[0].data * 1000000
|
||||
# get signal window
|
||||
isignal = getsignalwin(self.Tcf, self.Pick, self.TSNR[2])
|
||||
ii = min([isignal[len(isignal)-1], len(self.Tcf)])
|
||||
ii = min([isignal[len(isignal) - 1], len(self.Tcf)])
|
||||
isignal = isignal[0:ii]
|
||||
try:
|
||||
aic[isignal]
|
||||
aic[isignal]
|
||||
except IndexError as e:
|
||||
msg = "Time series out of bounds! {}".format(e)
|
||||
print(msg)
|
||||
return
|
||||
msg = "Time series out of bounds! {}".format(e)
|
||||
print(msg)
|
||||
return
|
||||
# calculate SNR from CF
|
||||
self.SNR = max(abs(aic[isignal] - np.mean(aic[isignal]))) / \
|
||||
max(abs(aic[inoise] - np.mean(aic[inoise])))
|
||||
@ -242,7 +243,7 @@ class AICPicker(AutoPicker):
|
||||
print("Choose longer slope determination window!")
|
||||
if self.iplot > 1:
|
||||
if not self.fig:
|
||||
fig = plt.figure() #self.iplot) ### WHY? MP MP
|
||||
fig = plt.figure() # self.iplot) ### WHY? MP MP
|
||||
else:
|
||||
fig = self.fig
|
||||
ax = fig.add_subplot(111)
|
||||
@ -271,7 +272,7 @@ class AICPicker(AutoPicker):
|
||||
|
||||
if self.iplot > 1:
|
||||
if not self.fig:
|
||||
fig = plt.figure()#self.iplot)
|
||||
fig = plt.figure() # self.iplot)
|
||||
else:
|
||||
fig = self.fig
|
||||
ax1 = fig.add_subplot(211)
|
||||
@ -285,19 +286,23 @@ class AICPicker(AutoPicker):
|
||||
ax1.legend()
|
||||
|
||||
if self.Pick is not None:
|
||||
ax2 = fig.add_subplot(2,1,2, sharex=ax1)
|
||||
ax2 = fig.add_subplot(2, 1, 2, sharex=ax1)
|
||||
ax2.plot(self.Tcf, x, 'k', label='Data')
|
||||
ax1.axvspan(self.Tcf[inoise[0]],self.Tcf[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
||||
ax1.axvspan(self.Tcf[isignal[0]],self.Tcf[isignal[-1]], color='b', alpha=0.2, lw=0, label='Signal Window')
|
||||
ax1.axvspan(self.Tcf[iislope[0]],self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0, label='Slope Window')
|
||||
ax1.axvspan(self.Tcf[inoise[0]], self.Tcf[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
||||
ax1.axvspan(self.Tcf[isignal[0]], self.Tcf[isignal[-1]], color='b', alpha=0.2, lw=0,
|
||||
label='Signal Window')
|
||||
ax1.axvspan(self.Tcf[iislope[0]], self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0,
|
||||
label='Slope Window')
|
||||
|
||||
ax2.axvspan(self.Tcf[inoise[0]],self.Tcf[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
||||
ax2.axvspan(self.Tcf[isignal[0]],self.Tcf[isignal[-1]], color='b', alpha=0.2, lw=0, label='Signal Window')
|
||||
ax2.axvspan(self.Tcf[iislope[0]],self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0, label='Slope Window')
|
||||
ax2.axvspan(self.Tcf[inoise[0]], self.Tcf[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
||||
ax2.axvspan(self.Tcf[isignal[0]], self.Tcf[isignal[-1]], color='b', alpha=0.2, lw=0,
|
||||
label='Signal Window')
|
||||
ax2.axvspan(self.Tcf[iislope[0]], self.Tcf[iislope[-1]], color='g', alpha=0.2, lw=0,
|
||||
label='Slope Window')
|
||||
ax2.plot(self.Tcf[iislope], datafit, 'g', linewidth=2, label='Slope')
|
||||
|
||||
ax1.set_title('Station %s, SNR=%7.2f, Slope= %12.2f counts/s' % (self.Data[0].stats.station,
|
||||
self.SNR, self.slope))
|
||||
self.SNR, self.slope))
|
||||
ax2.set_xlabel('Time [s] since %s' % self.Data[0].stats.starttime)
|
||||
ax2.set_ylabel('Counts')
|
||||
ax2.set_yticks([])
|
||||
@ -402,7 +407,7 @@ class PragPicker(AutoPicker):
|
||||
|
||||
if self.getiplot() > 1:
|
||||
if not self.fig:
|
||||
fig = plt.figure()#self.getiplot())
|
||||
fig = plt.figure() # self.getiplot())
|
||||
else:
|
||||
fig = self.fig
|
||||
ax = fig.add_subplot(111)
|
||||
|
@ -9,6 +9,7 @@
|
||||
"""
|
||||
|
||||
import warnings
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from obspy.core import Stream, UTCDateTime
|
||||
@ -75,8 +76,8 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None):
|
||||
ildown, = np.where(x[isignal] < -nlevel)
|
||||
if not ilup.size and not ildown.size:
|
||||
if verbosity:
|
||||
print ("earllatepicker: Signal lower than noise level!\n"
|
||||
"Skip this trace!")
|
||||
print("earllatepicker: Signal lower than noise level!\n"
|
||||
"Skip this trace!")
|
||||
return LPick, EPick, PickError
|
||||
il = min(np.min(ilup) if ilup.size else float('inf'),
|
||||
np.min(ildown) if ildown.size else float('inf'))
|
||||
@ -118,7 +119,7 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None):
|
||||
|
||||
if iplot > 1:
|
||||
if not fig:
|
||||
fig = plt.figure()#iplot)
|
||||
fig = plt.figure() # iplot)
|
||||
ax = fig.add_subplot(111)
|
||||
ax.plot(t, x, 'k', label='Data')
|
||||
ax.axvspan(t[inoise[0]], t[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
||||
@ -131,9 +132,9 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, verbosity=1, fig=None):
|
||||
ax.plot([LPick, LPick], [max(x) / 2, -max(x) / 2], '--k', label='lpp')
|
||||
ax.plot([EPick, EPick], [max(x) / 2, -max(x) / 2], '--k', label='epp')
|
||||
ax.plot([Pick1 + PickError, Pick1 + PickError],
|
||||
[max(x) / 2, -max(x) / 2], 'r--', label='spe')
|
||||
[max(x) / 2, -max(x) / 2], 'r--', label='spe')
|
||||
ax.plot([Pick1 - PickError, Pick1 - PickError],
|
||||
[max(x) / 2, -max(x) / 2], 'r--')
|
||||
[max(x) / 2, -max(x) / 2], 'r--')
|
||||
ax.set_xlabel('Time [s] since %s' % X[0].stats.starttime)
|
||||
ax.set_yticks([])
|
||||
ax.set_title(
|
||||
@ -173,7 +174,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None):
|
||||
|
||||
FM = None
|
||||
if Pick is not None:
|
||||
print ("fmpicker: Get first motion (polarity) of onset using unfiltered seismogram...")
|
||||
print("fmpicker: Get first motion (polarity) of onset using unfiltered seismogram...")
|
||||
|
||||
xraw = Xraw[0].data
|
||||
xfilt = Xfilt[0].data
|
||||
@ -212,15 +213,15 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None):
|
||||
else:
|
||||
li1 = index1[0]
|
||||
if np.size(xraw[ipick[0][1]:ipick[0][li1]]) == 0:
|
||||
print ("fmpicker: Onset on unfiltered trace too emergent for first motion determination!")
|
||||
print("fmpicker: Onset on unfiltered trace too emergent for first motion determination!")
|
||||
P1 = None
|
||||
else:
|
||||
imax1 = np.argmax(abs(xraw[ipick[0][1]:ipick[0][li1]]))
|
||||
if imax1 == 0:
|
||||
imax1 = np.argmax(abs(xraw[ipick[0][1]:ipick[0][index1[1]]]))
|
||||
if imax1 == 0:
|
||||
print ("fmpicker: Zero crossings too close!")
|
||||
print ("Skip first motion determination!")
|
||||
print("fmpicker: Zero crossings too close!")
|
||||
print("Skip first motion determination!")
|
||||
return FM
|
||||
|
||||
islope1 = np.where((t >= Pick) & (t <= Pick + t[imax1]))
|
||||
@ -254,15 +255,15 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None):
|
||||
else:
|
||||
li2 = index2[0]
|
||||
if np.size(xfilt[ipick[0][1]:ipick[0][li2]]) == 0:
|
||||
print ("fmpicker: Onset on filtered trace too emergent for first motion determination!")
|
||||
print("fmpicker: Onset on filtered trace too emergent for first motion determination!")
|
||||
P2 = None
|
||||
else:
|
||||
imax2 = np.argmax(abs(xfilt[ipick[0][1]:ipick[0][li2]]))
|
||||
if imax2 == 0:
|
||||
imax2 = np.argmax(abs(xfilt[ipick[0][1]:ipick[0][index2[1]]]))
|
||||
if imax2 == 0:
|
||||
print ("fmpicker: Zero crossings too close!")
|
||||
print ("Skip first motion determination!")
|
||||
print("fmpicker: Zero crossings too close!")
|
||||
print("Skip first motion determination!")
|
||||
return FM
|
||||
|
||||
islope2 = np.where((t >= Pick) & (t <= Pick + t[imax2]))
|
||||
@ -286,11 +287,11 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None):
|
||||
elif P1[0] > 0 >= P2[0]:
|
||||
FM = '+'
|
||||
|
||||
print ("fmpicker: Found polarity %s" % FM)
|
||||
print("fmpicker: Found polarity %s" % FM)
|
||||
|
||||
if iplot > 1:
|
||||
if not fig:
|
||||
fig = plt.figure()#iplot)
|
||||
fig = plt.figure() # iplot)
|
||||
ax1 = fig.add_subplot(211)
|
||||
ax1.plot(t, xraw, 'k')
|
||||
ax1.plot([Pick, Pick], [max(xraw), -max(xraw)], 'b', linewidth=2, label='Pick')
|
||||
@ -304,11 +305,11 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0, fig=None):
|
||||
ax1.set_title('First-Motion Determination, %s, Unfiltered Data' % Xraw[
|
||||
0].stats.station)
|
||||
|
||||
ax2=fig.add_subplot(2,1,2, sharex=ax1)
|
||||
ax2 = fig.add_subplot(2, 1, 2, sharex=ax1)
|
||||
ax2.set_title('First-Motion Determination, Filtered Data')
|
||||
ax2.plot(t, xfilt, 'k')
|
||||
ax2.plot([Pick, Pick], [max(xfilt), -max(xfilt)], 'b',
|
||||
linewidth=2)
|
||||
linewidth=2)
|
||||
if P2 is not None:
|
||||
ax2.plot(t[islope2], xfilt[islope2])
|
||||
ax2.plot(zc2, np.zeros(len(zc2)), '*g', markersize=14)
|
||||
@ -372,7 +373,7 @@ def getSNR(X, TSNR, t1, tracenum=0):
|
||||
# get signal window
|
||||
isignal = getsignalwin(t, t1, TSNR[2])
|
||||
if np.size(inoise) < 1:
|
||||
print ("getSNR: Empty array inoise, check noise window!")
|
||||
print("getSNR: Empty array inoise, check noise window!")
|
||||
return SNR, SNRdB, noiselevel
|
||||
|
||||
# demean over entire waveform
|
||||
@ -380,13 +381,13 @@ def getSNR(X, TSNR, t1, tracenum=0):
|
||||
|
||||
# calculate ratios
|
||||
noiselevel = np.sqrt(np.mean(np.square(x[inoise])))
|
||||
#signallevel = np.sqrt(np.mean(np.square(x[isignal])))
|
||||
# signallevel = np.sqrt(np.mean(np.square(x[isignal])))
|
||||
|
||||
if np.size(isignal) < 1:
|
||||
print ("getSNR: Empty array isignal, check signal window!")
|
||||
print("getSNR: Empty array isignal, check signal window!")
|
||||
return SNR, SNRdB, noiselevel
|
||||
|
||||
#noiselevel = np.abs(x[inoise]).max()
|
||||
# noiselevel = np.abs(x[inoise]).max()
|
||||
signallevel = np.abs(x[isignal]).max()
|
||||
|
||||
SNR = signallevel / noiselevel
|
||||
@ -418,9 +419,9 @@ def getnoisewin(t, t1, tnoise, tgap):
|
||||
inoise, = np.where((t <= max([t1 - tgap, 0])) \
|
||||
& (t >= max([t1 - tnoise - tgap, 0])))
|
||||
if np.size(inoise) < 1:
|
||||
inoise, = np.where((t>=t[0]) & (t<=t1))
|
||||
inoise, = np.where((t >= t[0]) & (t <= t1))
|
||||
if np.size(inoise) < 1:
|
||||
print ("getnoisewin: Empty array inoise, check noise window!")
|
||||
print("getnoisewin: Empty array inoise, check noise window!")
|
||||
|
||||
return inoise
|
||||
|
||||
@ -444,7 +445,7 @@ def getsignalwin(t, t1, tsignal):
|
||||
isignal, = np.where((t <= min([t1 + tsignal, len(t)])) \
|
||||
& (t >= t1))
|
||||
if np.size(isignal) < 1:
|
||||
print ("getsignalwin: Empty array isignal, check signal window!")
|
||||
print("getsignalwin: Empty array isignal, check signal window!")
|
||||
|
||||
return isignal
|
||||
|
||||
@ -487,7 +488,7 @@ def getResolutionWindow(snr, extent):
|
||||
time_resolution = res_wins[extent]['LRW']
|
||||
elif snr < 3.:
|
||||
time_resolution = res_wins[extent]['MRW']
|
||||
elif snr >3.:
|
||||
elif snr > 3.:
|
||||
time_resolution = res_wins[extent]['HRW']
|
||||
else:
|
||||
time_resolution = res_wins[extent]['VLRW']
|
||||
@ -573,8 +574,8 @@ def wadaticheck(pickdic, dttolerance, iplot):
|
||||
|
||||
# calculate vp/vs ratio before check
|
||||
vpvsr = p1[0] + 1
|
||||
print ("###############################################")
|
||||
print ("wadaticheck: Average Vp/Vs ratio before check: %f" % vpvsr)
|
||||
print("###############################################")
|
||||
print("wadaticheck: Average Vp/Vs ratio before check: %f" % vpvsr)
|
||||
|
||||
checkedPpicks = []
|
||||
checkedSpicks = []
|
||||
@ -611,23 +612,23 @@ def wadaticheck(pickdic, dttolerance, iplot):
|
||||
|
||||
# calculate vp/vs ratio after check
|
||||
cvpvsr = p2[0] + 1
|
||||
print ("wadaticheck: Average Vp/Vs ratio after check: %f" % cvpvsr)
|
||||
print ("wadatacheck: Skipped %d S pick(s)" % ibad)
|
||||
print("wadaticheck: Average Vp/Vs ratio after check: %f" % cvpvsr)
|
||||
print("wadatacheck: Skipped %d S pick(s)" % ibad)
|
||||
else:
|
||||
print ("###############################################")
|
||||
print ("wadatacheck: Not enough checked S-P times available!")
|
||||
print ("Skip Wadati check!")
|
||||
print("###############################################")
|
||||
print("wadatacheck: Not enough checked S-P times available!")
|
||||
print("Skip Wadati check!")
|
||||
|
||||
checkedonsets = pickdic
|
||||
|
||||
else:
|
||||
print ("wadaticheck: Not enough S-P times available for reliable regression!")
|
||||
print ("Skip wadati check!")
|
||||
print("wadaticheck: Not enough S-P times available for reliable regression!")
|
||||
print("Skip wadati check!")
|
||||
wfitflag = 1
|
||||
|
||||
# plot results
|
||||
if iplot > 0:
|
||||
plt.figure()#iplot)
|
||||
plt.figure() # iplot)
|
||||
f1, = plt.plot(Ppicks, SPtimes, 'ro')
|
||||
if wfitflag == 0:
|
||||
f2, = plt.plot(Ppicks, wdfit, 'k')
|
||||
@ -645,11 +646,13 @@ def wadaticheck(pickdic, dttolerance, iplot):
|
||||
|
||||
return checkedonsets
|
||||
|
||||
|
||||
def RMS(X):
|
||||
'''
|
||||
Function returns root mean square of a given array X
|
||||
'''
|
||||
return np.sqrt(np.sum(np.power(X, 2))/len(X))
|
||||
return np.sqrt(np.sum(np.power(X, 2)) / len(X))
|
||||
|
||||
|
||||
def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot=0, fig=None):
|
||||
'''
|
||||
@ -684,7 +687,7 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot=0, fi
|
||||
|
||||
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
||||
|
||||
print ("Checking signal length ...")
|
||||
print("Checking signal length ...")
|
||||
|
||||
if len(X) > 1:
|
||||
# all three components available
|
||||
@ -714,17 +717,17 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot=0, fi
|
||||
numoverthr = len(np.where(rms[isignal] >= minsiglevel)[0])
|
||||
|
||||
if numoverthr >= minnum:
|
||||
print ("checksignallength: Signal reached required length.")
|
||||
print("checksignallength: Signal reached required length.")
|
||||
returnflag = 1
|
||||
else:
|
||||
print ("checksignallength: Signal shorter than required minimum signal length!")
|
||||
print ("Presumably picked noise peak, pick is rejected!")
|
||||
print ("(min. signal length required: %s s)" % minsiglength)
|
||||
print("checksignallength: Signal shorter than required minimum signal length!")
|
||||
print("Presumably picked noise peak, pick is rejected!")
|
||||
print("(min. signal length required: %s s)" % minsiglength)
|
||||
returnflag = 0
|
||||
|
||||
if iplot == 2:
|
||||
if not fig:
|
||||
fig = plt.figure()#iplot)
|
||||
fig = plt.figure() # iplot)
|
||||
ax = fig.add_subplot(111)
|
||||
ax.plot(t, rms, 'k', label='RMS Data')
|
||||
ax.axvspan(t[inoise[0]], t[inoise[-1]], color='y', alpha=0.2, lw=0, label='Noise Window')
|
||||
@ -771,8 +774,8 @@ def checkPonsets(pickdic, dttolerance, iplot):
|
||||
stations.append(key)
|
||||
|
||||
# apply jackknife bootstrapping on variance of P onsets
|
||||
print ("###############################################")
|
||||
print ("checkPonsets: Apply jackknife bootstrapping on P-onset times ...")
|
||||
print("###############################################")
|
||||
print("checkPonsets: Apply jackknife bootstrapping on P-onset times ...")
|
||||
[xjack, PHI_pseudo, PHI_sub] = jackknife(Ppicks, 'VAR', 1)
|
||||
# get pseudo variances smaller than average variances
|
||||
# (times safety factor), these picks passed jackknife test
|
||||
@ -780,7 +783,7 @@ def checkPonsets(pickdic, dttolerance, iplot):
|
||||
# these picks did not pass jackknife test
|
||||
badjk = np.where(PHI_pseudo > 5 * xjack)
|
||||
badjkstations = np.array(stations)[badjk]
|
||||
print ("checkPonsets: %d pick(s) did not pass jackknife test!" % len(badjkstations))
|
||||
print("checkPonsets: %d pick(s) did not pass jackknife test!" % len(badjkstations))
|
||||
print(badjkstations)
|
||||
|
||||
# calculate median from these picks
|
||||
@ -793,9 +796,9 @@ def checkPonsets(pickdic, dttolerance, iplot):
|
||||
goodstations = np.array(stations)[igood]
|
||||
badstations = np.array(stations)[ibad]
|
||||
|
||||
print ("checkPonsets: %d pick(s) deviate too much from median!" % len(ibad))
|
||||
print ("checkPonsets: Skipped %d P pick(s) out of %d" % (len(badstations) \
|
||||
+ len(badjkstations), len(stations)))
|
||||
print("checkPonsets: %d pick(s) deviate too much from median!" % len(ibad))
|
||||
print("checkPonsets: Skipped %d P pick(s) out of %d" % (len(badstations) \
|
||||
+ len(badjkstations), len(stations)))
|
||||
|
||||
goodmarker = 'goodPonsetcheck'
|
||||
badmarker = 'badPonsetcheck'
|
||||
@ -863,8 +866,8 @@ def jackknife(X, phi, h):
|
||||
g = len(X) / h
|
||||
|
||||
if type(g) is not int:
|
||||
print ("jackknife: Cannot divide quantity X in equal sized subgroups!")
|
||||
print ("Choose another size for subgroups!")
|
||||
print("jackknife: Cannot divide quantity X in equal sized subgroups!")
|
||||
print("Choose another size for subgroups!")
|
||||
return PHI_jack, PHI_pseudo, PHI_sub
|
||||
else:
|
||||
# estimator of undisturbed spot check
|
||||
@ -932,7 +935,7 @@ def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None):
|
||||
|
||||
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
||||
|
||||
print ("Check for spuriously picked S onset instead of P onset ...")
|
||||
print("Check for spuriously picked S onset instead of P onset ...")
|
||||
|
||||
returnflag = 0
|
||||
|
||||
@ -963,9 +966,9 @@ def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None):
|
||||
ediff = (edat[0].stats.starttime - min_t)
|
||||
|
||||
# get signal windows
|
||||
isignalz = getsignalwin(tz, pick-zdiff, checkwin)
|
||||
isignaln = getsignalwin(tn, pick-ndiff, checkwin)
|
||||
isignale = getsignalwin(te, pick-ediff, checkwin)
|
||||
isignalz = getsignalwin(tz, pick - zdiff, checkwin)
|
||||
isignaln = getsignalwin(tn, pick - ndiff, checkwin)
|
||||
isignale = getsignalwin(te, pick - ediff, checkwin)
|
||||
|
||||
# calculate RMS of traces
|
||||
rmsz = RMS(zdat[0].data[isignalz])
|
||||
@ -978,9 +981,9 @@ def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None):
|
||||
# vertical P-coda level must exceed horizontal P-coda level
|
||||
# zfac times encodalevel
|
||||
if rmsz < minsiglevel:
|
||||
print ("checkZ4S: Maybe S onset? Skip this P pick!")
|
||||
print("checkZ4S: Maybe S onset? Skip this P pick!")
|
||||
else:
|
||||
print ("checkZ4S: P onset passes checkZ4S test!")
|
||||
print("checkZ4S: P onset passes checkZ4S test!")
|
||||
returnflag = 1
|
||||
|
||||
if iplot > 1:
|
||||
@ -996,28 +999,28 @@ def checkZ4S(X, pick, zfac, checkwin, iplot, fig=None):
|
||||
'N': ndiff,
|
||||
'E': ediff}
|
||||
|
||||
signal_dict = {'Z': isignalz,
|
||||
'N': isignaln,
|
||||
'E': isignale}
|
||||
signal_dict = {'Z': isignalz,
|
||||
'N': isignaln,
|
||||
'E': isignale}
|
||||
|
||||
for i, key in enumerate(['Z', 'N', 'E']):
|
||||
rms = rms_dict[key]
|
||||
trace = traces_dict[key]
|
||||
t = np.arange(diff_dict[key], trace.stats.npts / trace.stats.sampling_rate+diff_dict[key],
|
||||
t = np.arange(diff_dict[key], trace.stats.npts / trace.stats.sampling_rate + diff_dict[key],
|
||||
trace.stats.delta)
|
||||
if i == 0:
|
||||
ax1 = fig.add_subplot(3, 1, i+1)
|
||||
ax1 = fig.add_subplot(3, 1, i + 1)
|
||||
ax = ax1
|
||||
ax.set_title('CheckZ4S, Station %s' % zdat[0].stats.station)
|
||||
else:
|
||||
ax = fig.add_subplot(3,1,i+1, sharex=ax1)
|
||||
ax = fig.add_subplot(3, 1, i + 1, sharex=ax1)
|
||||
ax.plot(t, abs(trace.data), color='b', label='abs')
|
||||
ax.plot(t, trace.data, color='k')
|
||||
name = str(trace.stats.channel) + ': {}'.format(rms)
|
||||
ax.plot([pick, pick+checkwin], [rms, rms], 'r', label='RMS {}'.format(name))
|
||||
ax.plot([pick, pick + checkwin], [rms, rms], 'r', label='RMS {}'.format(name))
|
||||
ax.plot([pick, pick], ax.get_ylim(), 'm', label='Pick')
|
||||
ax.set_ylabel('Normalized Counts')
|
||||
ax.axvspan(pick, pick+checkwin, color='c', alpha=0.2,
|
||||
ax.axvspan(pick, pick + checkwin, color='c', alpha=0.2,
|
||||
lw=0)
|
||||
ax.legend()
|
||||
ax.set_xlabel('Time [s] since %s' % zdat[0].stats.starttime)
|
||||
|
@ -8,14 +8,14 @@
|
||||
|
||||
:author: Ludger Kueperkoch / MAGS2 EP3 working group
|
||||
"""
|
||||
import pdb
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
from obspy.core import Stream, UTCDateTime
|
||||
import warnings
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from obspy.core import Stream, UTCDateTime
|
||||
|
||||
def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, stealthMode = False):
|
||||
|
||||
def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, stealthMode=False):
|
||||
'''
|
||||
Function to derive earliest and latest possible pick after Diehl & Kissling (2009)
|
||||
as reasonable uncertainties. Latest possible pick is based on noise level,
|
||||
@ -45,7 +45,8 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, stealthMode = False):
|
||||
EPick = None
|
||||
PickError = None
|
||||
if stealthMode is False:
|
||||
print 'earllatepicker: Get earliest and latest possible pick relative to most likely pick ...'
|
||||
print
|
||||
'earllatepicker: Get earliest and latest possible pick relative to most likely pick ...'
|
||||
|
||||
x = X[0].data
|
||||
t = np.arange(0, X[0].stats.npts / X[0].stats.sampling_rate,
|
||||
@ -61,8 +62,8 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, stealthMode = False):
|
||||
ilup, = np.where(x[isignal] > nlevel)
|
||||
ildown, = np.where(x[isignal] < -nlevel)
|
||||
if not ilup.size and not ildown.size:
|
||||
print ("earllatepicker: Signal lower than noise level!")
|
||||
print ("Skip this trace!")
|
||||
print("earllatepicker: Signal lower than noise level!")
|
||||
print("Skip this trace!")
|
||||
return LPick, EPick, PickError
|
||||
il = min(np.min(ilup) if ilup.size else float('inf'),
|
||||
np.min(ildown) if ildown.size else float('inf'))
|
||||
@ -70,20 +71,21 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, stealthMode = False):
|
||||
|
||||
# get earliest possible pick
|
||||
|
||||
EPick = np.nan; count = 0
|
||||
EPick = np.nan;
|
||||
count = 0
|
||||
pis = isignal
|
||||
|
||||
# if EPick stays NaN the signal window size will be doubled
|
||||
while np.isnan(EPick):
|
||||
if count > 0:
|
||||
print("earllatepicker: Doubled signal window size %s time(s) "
|
||||
"because of NaN for earliest pick." %count)
|
||||
"because of NaN for earliest pick." % count)
|
||||
if stealthMode is False:
|
||||
print("\nearllatepicker: Doubled signal window size %s time(s) "
|
||||
"because of NaN for earliest pick." %count)
|
||||
"because of NaN for earliest pick." % count)
|
||||
isigDoubleWinStart = pis[-1] + 1
|
||||
isignalDoubleWin = np.arange(isigDoubleWinStart,
|
||||
isigDoubleWinStart + len(pis))
|
||||
isigDoubleWinStart + len(pis))
|
||||
if (isigDoubleWinStart + len(pis)) < X[0].data.size:
|
||||
pis = np.concatenate((pis, isignalDoubleWin))
|
||||
else:
|
||||
@ -97,7 +99,6 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=0, stealthMode = False):
|
||||
# T0/4 is assumed as time difference between most likely and earliest possible pick!
|
||||
EPick = Pick1 - T0 / 2
|
||||
|
||||
|
||||
# get symmetric pick error as mean from earliest and latest possible pick
|
||||
# by weighting latest possible pick two times earliest possible pick
|
||||
diffti_tl = LPick - Pick1
|
||||
@ -165,7 +166,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0):
|
||||
|
||||
FM = None
|
||||
if Pick is not None:
|
||||
print ("fmpicker: Get first motion (polarity) of onset using unfiltered seismogram...")
|
||||
print("fmpicker: Get first motion (polarity) of onset using unfiltered seismogram...")
|
||||
|
||||
xraw = Xraw[0].data
|
||||
xfilt = Xfilt[0].data
|
||||
@ -204,15 +205,15 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0):
|
||||
else:
|
||||
li1 = index1[0]
|
||||
if np.size(xraw[ipick[0][1]:ipick[0][li1]]) == 0:
|
||||
print ("fmpicker: Onset on unfiltered trace too emergent for first motion determination!")
|
||||
print("fmpicker: Onset on unfiltered trace too emergent for first motion determination!")
|
||||
P1 = None
|
||||
else:
|
||||
imax1 = np.argmax(abs(xraw[ipick[0][1]:ipick[0][li1]]))
|
||||
if imax1 == 0:
|
||||
imax1 = np.argmax(abs(xraw[ipick[0][1]:ipick[0][index1[1]]]))
|
||||
if imax1 == 0:
|
||||
print ("fmpicker: Zero crossings too close!")
|
||||
print ("Skip first motion determination!")
|
||||
print("fmpicker: Zero crossings too close!")
|
||||
print("Skip first motion determination!")
|
||||
return FM
|
||||
|
||||
islope1 = np.where((t >= Pick) & (t <= Pick + t[imax1]))
|
||||
@ -246,15 +247,15 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0):
|
||||
else:
|
||||
li2 = index2[0]
|
||||
if np.size(xfilt[ipick[0][1]:ipick[0][li2]]) == 0:
|
||||
print ("fmpicker: Onset on filtered trace too emergent for first motion determination!")
|
||||
print("fmpicker: Onset on filtered trace too emergent for first motion determination!")
|
||||
P2 = None
|
||||
else:
|
||||
imax2 = np.argmax(abs(xfilt[ipick[0][1]:ipick[0][li2]]))
|
||||
if imax2 == 0:
|
||||
imax2 = np.argmax(abs(xfilt[ipick[0][1]:ipick[0][index2[1]]]))
|
||||
if imax2 == 0:
|
||||
print ("fmpicker: Zero crossings too close!")
|
||||
print ("Skip first motion determination!")
|
||||
print("fmpicker: Zero crossings too close!")
|
||||
print("Skip first motion determination!")
|
||||
return FM
|
||||
|
||||
islope2 = np.where((t >= Pick) & (t <= Pick + t[imax2]))
|
||||
@ -278,7 +279,7 @@ def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=0):
|
||||
elif P1[0] > 0 >= P2[0]:
|
||||
FM = '+'
|
||||
|
||||
print ("fmpicker: Found polarity %s" % FM)
|
||||
print("fmpicker: Found polarity %s" % FM)
|
||||
|
||||
if iplot > 1:
|
||||
plt.figure(iplot)
|
||||
@ -353,10 +354,10 @@ def getSNR(X, TSNR, t1):
|
||||
# get signal window
|
||||
isignal = getsignalwin(t, t1, TSNR[2])
|
||||
if np.size(inoise) < 1:
|
||||
print ("getSNR: Empty array inoise, check noise window!")
|
||||
print("getSNR: Empty array inoise, check noise window!")
|
||||
return
|
||||
elif np.size(isignal) < 1:
|
||||
print ("getSNR: Empty array isignal, check signal window!")
|
||||
print("getSNR: Empty array isignal, check signal window!")
|
||||
return
|
||||
|
||||
# demean over entire waveform
|
||||
@ -392,9 +393,9 @@ def getnoisewin(t, t1, tnoise, tgap):
|
||||
|
||||
# get noise window
|
||||
inoise, = np.where((t <= max([t1 - tgap, 0])) \
|
||||
& (t >= max([t1 - tnoise - tgap, 0])))
|
||||
& (t >= max([t1 - tnoise - tgap, 0])))
|
||||
if np.size(inoise) < 1:
|
||||
print ("getnoisewin: Empty array inoise, check noise window!")
|
||||
print("getnoisewin: Empty array inoise, check noise window!")
|
||||
|
||||
return inoise
|
||||
|
||||
@ -416,9 +417,9 @@ def getsignalwin(t, t1, tsignal):
|
||||
|
||||
# get signal window
|
||||
isignal, = np.where((t <= min([t1 + tsignal, len(t)])) \
|
||||
& (t >= t1))
|
||||
& (t >= t1))
|
||||
if np.size(isignal) < 1:
|
||||
print ("getsignalwin: Empty array isignal, check signal window!")
|
||||
print("getsignalwin: Empty array isignal, check signal window!")
|
||||
|
||||
return isignal
|
||||
|
||||
@ -457,7 +458,7 @@ def getResolutionWindow(snr):
|
||||
else:
|
||||
time_resolution = res_wins['HRW']
|
||||
|
||||
return time_resolution/2
|
||||
return time_resolution / 2
|
||||
|
||||
|
||||
def wadaticheck(pickdic, dttolerance, iplot):
|
||||
@ -485,17 +486,16 @@ def wadaticheck(pickdic, dttolerance, iplot):
|
||||
SPtimes = []
|
||||
for key in pickdic:
|
||||
if pickdic[key]['P']['weight'] < 4 and pickdic[key]['S']['weight'] < 4:
|
||||
# calculate S-P time
|
||||
spt = pickdic[key]['S']['mpp'] - pickdic[key]['P']['mpp']
|
||||
# add S-P time to dictionary
|
||||
pickdic[key]['SPt'] = spt
|
||||
# add P onsets and corresponding S-P times to list
|
||||
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
|
||||
UTCSpick = UTCDateTime(pickdic[key]['S']['mpp'])
|
||||
Ppicks.append(UTCPpick.timestamp)
|
||||
Spicks.append(UTCSpick.timestamp)
|
||||
SPtimes.append(spt)
|
||||
|
||||
# calculate S-P time
|
||||
spt = pickdic[key]['S']['mpp'] - pickdic[key]['P']['mpp']
|
||||
# add S-P time to dictionary
|
||||
pickdic[key]['SPt'] = spt
|
||||
# add P onsets and corresponding S-P times to list
|
||||
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
|
||||
UTCSpick = UTCDateTime(pickdic[key]['S']['mpp'])
|
||||
Ppicks.append(UTCPpick.timestamp)
|
||||
Spicks.append(UTCSpick.timestamp)
|
||||
SPtimes.append(spt)
|
||||
|
||||
if len(SPtimes) >= 3:
|
||||
# calculate slope
|
||||
@ -505,8 +505,8 @@ def wadaticheck(pickdic, dttolerance, iplot):
|
||||
|
||||
# calculate vp/vs ratio before check
|
||||
vpvsr = p1[0] + 1
|
||||
print ("###############################################")
|
||||
print ("wadaticheck: Average Vp/Vs ratio before check: %f" % vpvsr)
|
||||
print("###############################################")
|
||||
print("wadaticheck: Average Vp/Vs ratio before check: %f" % vpvsr)
|
||||
|
||||
checkedPpicks = []
|
||||
checkedSpicks = []
|
||||
@ -527,7 +527,7 @@ def wadaticheck(pickdic, dttolerance, iplot):
|
||||
ibad += 1
|
||||
else:
|
||||
marker = 'goodWadatiCheck'
|
||||
checkedPpick = UTCDateTime(pickdic[key]['P']['mpp'])
|
||||
checkedPpick = UTCDateTime(pickdic[key]['P']['mpp'])
|
||||
checkedPpicks.append(checkedPpick.timestamp)
|
||||
checkedSpick = UTCDateTime(pickdic[key]['S']['mpp'])
|
||||
checkedSpicks.append(checkedSpick.timestamp)
|
||||
@ -543,18 +543,18 @@ def wadaticheck(pickdic, dttolerance, iplot):
|
||||
|
||||
# calculate vp/vs ratio after check
|
||||
cvpvsr = p2[0] + 1
|
||||
print ("wadaticheck: Average Vp/Vs ratio after check: %f" % cvpvsr)
|
||||
print ("wadatacheck: Skipped %d S pick(s)" % ibad)
|
||||
print("wadaticheck: Average Vp/Vs ratio after check: %f" % cvpvsr)
|
||||
print("wadatacheck: Skipped %d S pick(s)" % ibad)
|
||||
else:
|
||||
print ("###############################################")
|
||||
print ("wadatacheck: Not enough checked S-P times available!")
|
||||
print ("Skip Wadati check!")
|
||||
print("###############################################")
|
||||
print("wadatacheck: Not enough checked S-P times available!")
|
||||
print("Skip Wadati check!")
|
||||
|
||||
checkedonsets = pickdic
|
||||
|
||||
else:
|
||||
print ("wadaticheck: Not enough S-P times available for reliable regression!")
|
||||
print ("Skip wadati check!")
|
||||
print("wadaticheck: Not enough S-P times available for reliable regression!")
|
||||
print("Skip wadati check!")
|
||||
wfitflag = 1
|
||||
|
||||
# plot results
|
||||
@ -614,7 +614,7 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
|
||||
|
||||
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
||||
|
||||
print ("Checking signal length ...")
|
||||
print("Checking signal length ...")
|
||||
|
||||
if len(X) > 1:
|
||||
# all three components available
|
||||
@ -639,25 +639,25 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
|
||||
# calculate minimum adjusted signal level
|
||||
minsiglevel = max(rms[inoise]) * nfac
|
||||
# minimum adjusted number of samples over minimum signal level
|
||||
minnum = len(isignal) * minpercent/100
|
||||
minnum = len(isignal) * minpercent / 100
|
||||
# get number of samples above minimum adjusted signal level
|
||||
numoverthr = len(np.where(rms[isignal] >= minsiglevel)[0])
|
||||
|
||||
if numoverthr >= minnum:
|
||||
print ("checksignallength: Signal reached required length.")
|
||||
print("checksignallength: Signal reached required length.")
|
||||
returnflag = 1
|
||||
else:
|
||||
print ("checksignallength: Signal shorter than required minimum signal length!")
|
||||
print ("Presumably picked noise peak, pick is rejected!")
|
||||
print ("(min. signal length required: %s s)" % minsiglength)
|
||||
print("checksignallength: Signal shorter than required minimum signal length!")
|
||||
print("Presumably picked noise peak, pick is rejected!")
|
||||
print("(min. signal length required: %s s)" % minsiglength)
|
||||
returnflag = 0
|
||||
|
||||
if iplot == 2:
|
||||
plt.figure(iplot)
|
||||
p1, = plt.plot(t,rms, 'k')
|
||||
p1, = plt.plot(t, rms, 'k')
|
||||
p2, = plt.plot(t[inoise], rms[inoise], 'c')
|
||||
p3, = plt.plot(t[isignal],rms[isignal], 'r')
|
||||
p4, = plt.plot([t[isignal[0]], t[isignal[len(isignal)-1]]],
|
||||
p3, = plt.plot(t[isignal], rms[isignal], 'r')
|
||||
p4, = plt.plot([t[isignal[0]], t[isignal[len(isignal) - 1]]],
|
||||
[minsiglevel, minsiglevel], 'g', linewidth=2)
|
||||
p5, = plt.plot([pick, pick], [min(rms), max(rms)], 'b', linewidth=2)
|
||||
plt.legend([p1, p2, p3, p4, p5], ['RMS Data', 'RMS Noise Window',
|
||||
@ -698,22 +698,22 @@ def checkPonsets(pickdic, dttolerance, iplot):
|
||||
stations = []
|
||||
for key in pickdic:
|
||||
if pickdic[key]['P']['weight'] < 4:
|
||||
# add P onsets to list
|
||||
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
|
||||
Ppicks.append(UTCPpick.timestamp)
|
||||
stations.append(key)
|
||||
# add P onsets to list
|
||||
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
|
||||
Ppicks.append(UTCPpick.timestamp)
|
||||
stations.append(key)
|
||||
|
||||
# apply jackknife bootstrapping on variance of P onsets
|
||||
print ("###############################################")
|
||||
print ("checkPonsets: Apply jackknife bootstrapping on P-onset times ...")
|
||||
[xjack,PHI_pseudo,PHI_sub] = jackknife(Ppicks, 'VAR', 1)
|
||||
print("###############################################")
|
||||
print("checkPonsets: Apply jackknife bootstrapping on P-onset times ...")
|
||||
[xjack, PHI_pseudo, PHI_sub] = jackknife(Ppicks, 'VAR', 1)
|
||||
# get pseudo variances smaller than average variances
|
||||
# (times safety factor), these picks passed jackknife test
|
||||
ij = np.where(PHI_pseudo <= 2 * xjack)
|
||||
# these picks did not pass jackknife test
|
||||
badjk = np.where(PHI_pseudo > 2 * xjack)
|
||||
badjkstations = np.array(stations)[badjk]
|
||||
print ("checkPonsets: %d pick(s) did not pass jackknife test!" % len(badjkstations))
|
||||
print("checkPonsets: %d pick(s) did not pass jackknife test!" % len(badjkstations))
|
||||
|
||||
# calculate median from these picks
|
||||
pmedian = np.median(np.array(Ppicks)[ij])
|
||||
@ -725,9 +725,9 @@ def checkPonsets(pickdic, dttolerance, iplot):
|
||||
goodstations = np.array(stations)[igood]
|
||||
badstations = np.array(stations)[ibad]
|
||||
|
||||
print ("checkPonsets: %d pick(s) deviate too much from median!" % len(ibad))
|
||||
print ("checkPonsets: Skipped %d P pick(s) out of %d" % (len(badstations) \
|
||||
+ len(badjkstations), len(stations)))
|
||||
print("checkPonsets: %d pick(s) deviate too much from median!" % len(ibad))
|
||||
print("checkPonsets: Skipped %d P pick(s) out of %d" % (len(badstations) \
|
||||
+ len(badjkstations), len(stations)))
|
||||
|
||||
goodmarker = 'goodPonsetcheck'
|
||||
badmarker = 'badPonsetcheck'
|
||||
@ -794,8 +794,8 @@ def jackknife(X, phi, h):
|
||||
g = len(X) / h
|
||||
|
||||
if type(g) is not int:
|
||||
print ("jackknife: Cannot divide quantity X in equal sized subgroups!")
|
||||
print ("Choose another size for subgroups!")
|
||||
print("jackknife: Cannot divide quantity X in equal sized subgroups!")
|
||||
print("Choose another size for subgroups!")
|
||||
return PHI_jack, PHI_pseudo, PHI_sub
|
||||
else:
|
||||
# estimator of undisturbed spot check
|
||||
@ -863,7 +863,7 @@ def checkZ4S(X, pick, zfac, checkwin, iplot):
|
||||
|
||||
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
|
||||
|
||||
print ("Check for spuriously picked S onset instead of P onset ...")
|
||||
print("Check for spuriously picked S onset instead of P onset ...")
|
||||
|
||||
returnflag = 0
|
||||
|
||||
@ -876,10 +876,9 @@ def checkZ4S(X, pick, zfac, checkwin, iplot):
|
||||
if len(ndat) == 0: # check for other components
|
||||
ndat = X.select(component="1")
|
||||
|
||||
|
||||
z = zdat[0].data
|
||||
tz = np.arange(0, zdat[0].stats.npts / zdat[0].stats.sampling_rate,
|
||||
zdat[0].stats.delta)
|
||||
zdat[0].stats.delta)
|
||||
|
||||
# calculate RMS trace from vertical component
|
||||
absz = np.sqrt(np.power(z, 2))
|
||||
@ -904,16 +903,16 @@ def checkZ4S(X, pick, zfac, checkwin, iplot):
|
||||
# vertical P-coda level must exceed horizontal P-coda level
|
||||
# zfac times encodalevel
|
||||
if zcodalevel < minsiglevel:
|
||||
print ("checkZ4S: Maybe S onset? Skip this P pick!")
|
||||
print("checkZ4S: Maybe S onset? Skip this P pick!")
|
||||
else:
|
||||
print ("checkZ4S: P onset passes checkZ4S test!")
|
||||
print("checkZ4S: P onset passes checkZ4S test!")
|
||||
returnflag = 1
|
||||
|
||||
if iplot > 1:
|
||||
te = np.arange(0, edat[0].stats.npts / edat[0].stats.sampling_rate,
|
||||
edat[0].stats.delta)
|
||||
edat[0].stats.delta)
|
||||
tn = np.arange(0, ndat[0].stats.npts / ndat[0].stats.sampling_rate,
|
||||
ndat[0].stats.delta)
|
||||
ndat[0].stats.delta)
|
||||
plt.plot(tz, z / max(z), 'k')
|
||||
plt.plot(tz[isignal], z[isignal] / max(z), 'r')
|
||||
plt.plot(te, edat[0].data / max(edat[0].data) + 1, 'k')
|
||||
@ -955,9 +954,8 @@ def writephases(arrivals, fformat, filename):
|
||||
:type: string
|
||||
'''
|
||||
|
||||
|
||||
if fformat == 'NLLoc':
|
||||
print ("Writing phases to %s for NLLoc" % filename)
|
||||
print("Writing phases to %s for NLLoc" % filename)
|
||||
fid = open("%s" % filename, 'w')
|
||||
# write header
|
||||
fid.write('# EQEVENT: Label: EQ001 Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n')
|
||||
@ -969,34 +967,32 @@ def writephases(arrivals, fformat, filename):
|
||||
onset = arrivals[key]['P']['mpp']
|
||||
year = onset.year
|
||||
month = onset.month
|
||||
day =onset.day
|
||||
day = onset.day
|
||||
hh = onset.hour
|
||||
mm = onset.minute
|
||||
ss = onset.second
|
||||
ms = onset.microsecond
|
||||
ss_ms = ss + (ms / 1E06)
|
||||
fid.write('%s ? ? ? P %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 1 \n' \
|
||||
% (key, fm, year, month, day, hh, mm, ss_ms))
|
||||
% (key, fm, year, month, day, hh, mm, ss_ms))
|
||||
if arrivals[key]['S']['weight'] < 4:
|
||||
fm = '?'
|
||||
onset = arrivals[key]['S']['mpp']
|
||||
year = onset.year
|
||||
month = onset.month
|
||||
day =onset.day
|
||||
day = onset.day
|
||||
hh = onset.hour
|
||||
mm = onset.minute
|
||||
ss = onset.second
|
||||
ms = onset.microsecond
|
||||
ss_ms = ss + (ms / 1E06)
|
||||
fid.write('%s ? ? ? S %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 1 \n' \
|
||||
% (key, fm, year, month, day, hh, mm, ss_ms))
|
||||
% (key, fm, year, month, day, hh, mm, ss_ms))
|
||||
|
||||
fid.close()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import doctest
|
||||
|
||||
doctest.testmod()
|
||||
|
@ -1,12 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
import numpy as np
|
||||
|
||||
from obspy import UTCDateTime, read_inventory, read
|
||||
from obspy.io.xseed import Parser
|
||||
from pylot.core.util.utils import key_for_set_value, find_in_list, \
|
||||
@ -116,7 +115,7 @@ def make_time_line(line, datetime):
|
||||
return newline
|
||||
|
||||
|
||||
def evt_head_check(root_dir, out_dir = None):
|
||||
def evt_head_check(root_dir, out_dir=None):
|
||||
"""
|
||||
A function to make sure that an arbitrary number of .gse files have correct values in their header.
|
||||
:param root_dir: a directory leading to the .gse files.
|
||||
@ -244,14 +243,14 @@ def restitute_trace(input_tuple):
|
||||
remove_trace = True
|
||||
# apply restitution to data
|
||||
print("Correcting instrument at station %s, channel %s" \
|
||||
% (tr.stats.station, tr.stats.channel))
|
||||
% (tr.stats.station, tr.stats.channel))
|
||||
try:
|
||||
if invtype in ['resp', 'dless']:
|
||||
try:
|
||||
tr.simulate(**kwargs)
|
||||
tr.simulate(**kwargs)
|
||||
except ValueError as e:
|
||||
vmsg = '{0}'.format(e)
|
||||
print(vmsg)
|
||||
vmsg = '{0}'.format(e)
|
||||
print(vmsg)
|
||||
|
||||
else:
|
||||
tr.attach_response(inventory)
|
||||
@ -343,8 +342,8 @@ def get_prefilt(trace, tlow=(0.5, 0.9), thi=(5., 2.), verbosity=0):
|
||||
trace.stats.station, trace.stats.channel))
|
||||
# get corner frequencies for pre-filtering
|
||||
fny = trace.stats.sampling_rate / 2
|
||||
fc21 = fny - (fny * thi[0]/100.)
|
||||
fc22 = fny - (fny * thi[1]/100.)
|
||||
fc21 = fny - (fny * thi[0] / 100.)
|
||||
fc22 = fny - (fny * thi[1] / 100.)
|
||||
return (tlow[0], tlow[1], fc21, fc22)
|
||||
|
||||
|
||||
|
@ -9,17 +9,19 @@ Created on Wed Feb 26 12:31:25 2014
|
||||
import os
|
||||
import platform
|
||||
|
||||
from pylot.core.loc import nll
|
||||
from pylot.core.loc import hyposat
|
||||
from pylot.core.io.inputs import PylotParameter
|
||||
from pylot.core.loc import hypo71
|
||||
from pylot.core.loc import hypodd
|
||||
from pylot.core.loc import hyposat
|
||||
from pylot.core.loc import nll
|
||||
from pylot.core.loc import velest
|
||||
from pylot.core.io.inputs import PylotParameter
|
||||
|
||||
|
||||
def readDefaultFilterInformation(fname):
|
||||
pparam = PylotParameter(fname)
|
||||
return readFilterInformation(pparam)
|
||||
|
||||
|
||||
def readFilterInformation(pylot_parameter):
|
||||
p_filter = {'filtertype': pylot_parameter['filter_type'][0],
|
||||
'freq': [pylot_parameter['minfreq'][0], pylot_parameter['maxfreq'][0]],
|
||||
@ -31,6 +33,7 @@ def readFilterInformation(pylot_parameter):
|
||||
'S': s_filter}
|
||||
return filter_information
|
||||
|
||||
|
||||
# determine system dependent path separator
|
||||
system_name = platform.system()
|
||||
if system_name in ["Linux", "Darwin"]:
|
||||
@ -85,10 +88,10 @@ class SetChannelComponents(object):
|
||||
def setCompPosition(self, component_alter, component, switch=True):
|
||||
component_alter = str(component_alter)
|
||||
if not component_alter in self.compName_Map.keys():
|
||||
errMsg='setCompPosition: Unrecognized alternative component {}. Expecting one of {}.'
|
||||
errMsg = 'setCompPosition: Unrecognized alternative component {}. Expecting one of {}.'
|
||||
raise ValueError(errMsg.format(component_alter, self.compName_Map.keys()))
|
||||
if not component in self.compPosition_Map.keys():
|
||||
errMsg='setCompPosition: Unrecognized target component {}. Expecting one of {}.'
|
||||
errMsg = 'setCompPosition: Unrecognized target component {}. Expecting one of {}.'
|
||||
raise ValueError(errMsg.format(component, self.compPosition_Map.keys()))
|
||||
print('setCompPosition: set component {} to {}'.format(component_alter, component))
|
||||
if switch:
|
||||
@ -105,6 +108,5 @@ class SetChannelComponents(object):
|
||||
elif component in self.compName_Map.keys():
|
||||
return self.compPosition_Map[self.compName_Map[component]]
|
||||
else:
|
||||
errMsg='getCompPosition: Unrecognized component {}. Expecting one of {} or {}.'
|
||||
errMsg = 'getCompPosition: Unrecognized component {}. Expecting one of {} or {}.'
|
||||
raise ValueError(errMsg.format(component, self.compPosition_Map.keys(), self.compName_Map.keys()))
|
||||
|
||||
|
@ -25,5 +25,6 @@ class OverwriteError(IOError):
|
||||
class ParameterError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ProcessingError(RuntimeError):
|
||||
pass
|
||||
|
@ -6,7 +6,6 @@ import os
|
||||
from obspy import UTCDateTime
|
||||
from obspy.core.event import Event as ObsPyEvent
|
||||
from obspy.core.event import Origin, ResourceIdentifier
|
||||
|
||||
from pylot.core.io.phases import picks_from_picksdict
|
||||
|
||||
|
||||
@ -14,10 +13,11 @@ class Event(ObsPyEvent):
|
||||
'''
|
||||
Pickable class derived from ~obspy.core.event.Event containing information on a single event.
|
||||
'''
|
||||
|
||||
def __init__(self, path):
|
||||
self.pylot_id = path.split('/')[-1]
|
||||
# initialize super class
|
||||
super(Event, self).__init__(resource_id=ResourceIdentifier('smi:local/'+self.pylot_id))
|
||||
super(Event, self).__init__(resource_id=ResourceIdentifier('smi:local/' + self.pylot_id))
|
||||
self.path = path
|
||||
self.database = path.split('/')[-2]
|
||||
self.datapath = path.split('/')[-3]
|
||||
@ -38,7 +38,7 @@ class Event(ObsPyEvent):
|
||||
if os.path.isfile(notesfile):
|
||||
with open(notesfile) as infile:
|
||||
path = str(infile.readlines()[0].split('\n')[0])
|
||||
text = '[eventInfo: '+path+']'
|
||||
text = '[eventInfo: ' + path + ']'
|
||||
self.addNotes(text)
|
||||
try:
|
||||
datetime = UTCDateTime(path.split('/')[-1])
|
||||
@ -73,7 +73,7 @@ class Event(ObsPyEvent):
|
||||
'''
|
||||
for station in picks:
|
||||
self.pylot_picks[station] = picks[station]
|
||||
#add ObsPy picks
|
||||
# add ObsPy picks
|
||||
self.picks = picks_from_picksdict(self.pylot_picks)
|
||||
|
||||
def addAutopicks(self, autopicks):
|
||||
|
@ -1,16 +1,15 @@
|
||||
from mpl_toolkits.basemap import Basemap
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
import obspy
|
||||
from matplotlib import cm
|
||||
from scipy.interpolate import griddata
|
||||
from PySide import QtGui
|
||||
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
|
||||
from PySide import QtCore, QtGui
|
||||
|
||||
from mpl_toolkits.basemap import Basemap
|
||||
from pylot.core.util.widgets import PickDlg
|
||||
from scipy.interpolate import griddata
|
||||
|
||||
plt.interactive(False)
|
||||
|
||||
|
||||
class map_projection(QtGui.QWidget):
|
||||
def __init__(self, parent, figure=None):
|
||||
'''
|
||||
@ -29,7 +28,7 @@ class map_projection(QtGui.QWidget):
|
||||
self.init_stations()
|
||||
self.init_basemap(resolution='l')
|
||||
self.init_map()
|
||||
#self.show()
|
||||
# self.show()
|
||||
|
||||
def init_map(self):
|
||||
self.init_lat_lon_dimensions()
|
||||
@ -45,7 +44,7 @@ class map_projection(QtGui.QWidget):
|
||||
return
|
||||
data = self._parent.get_data().getWFData()
|
||||
for index in ind:
|
||||
station=str(self.station_names[index].split('.')[-1])
|
||||
station = str(self.station_names[index].split('.')[-1])
|
||||
try:
|
||||
pickDlg = PickDlg(self, parameter=self._parent._inputs,
|
||||
data=data.select(station=station),
|
||||
@ -110,21 +109,21 @@ class map_projection(QtGui.QWidget):
|
||||
|
||||
self.top_row.addWidget(QtGui.QLabel('Select a phase: '))
|
||||
self.top_row.addWidget(self.comboBox_phase)
|
||||
self.top_row.setStretch(1,1) #set stretch of item 1 to 1
|
||||
self.top_row.setStretch(1, 1) # set stretch of item 1 to 1
|
||||
|
||||
self.main_box.addWidget(self.canvas)
|
||||
self.main_box.addWidget(self.toolbar)
|
||||
|
||||
def init_stations(self):
|
||||
def get_station_names_lat_lon(parser):
|
||||
station_names=[]
|
||||
lat=[]
|
||||
lon=[]
|
||||
station_names = []
|
||||
lat = []
|
||||
lon = []
|
||||
for station in parser.stations:
|
||||
station_name=station[0].station_call_letters
|
||||
network=station[0].network_code
|
||||
station_name = station[0].station_call_letters
|
||||
network = station[0].network_code
|
||||
if not station_name in station_names:
|
||||
station_names.append(network+'.'+station_name)
|
||||
station_names.append(network + '.' + station_name)
|
||||
lat.append(station[0].latitude)
|
||||
lon.append(station[0].longitude)
|
||||
return station_names, lat, lon
|
||||
@ -136,18 +135,19 @@ class map_projection(QtGui.QWidget):
|
||||
|
||||
def init_picks(self):
|
||||
phase = self.comboBox_phase.currentText()
|
||||
|
||||
def get_picks(station_names):
|
||||
picks=[]
|
||||
picks = []
|
||||
for station in station_names:
|
||||
try:
|
||||
station=station.split('.')[-1]
|
||||
station = station.split('.')[-1]
|
||||
picks.append(self.picks_dict[station][phase]['mpp'])
|
||||
except:
|
||||
picks.append(np.nan)
|
||||
return picks
|
||||
|
||||
def get_picks_rel(picks):
|
||||
picks_rel=[]
|
||||
picks_rel = []
|
||||
picks_utc = []
|
||||
for pick in picks:
|
||||
if type(pick) is obspy.core.utcdatetime.UTCDateTime:
|
||||
@ -164,7 +164,7 @@ class map_projection(QtGui.QWidget):
|
||||
|
||||
def init_picks_active(self):
|
||||
def remove_nan_picks(picks):
|
||||
picks_no_nan=[]
|
||||
picks_no_nan = []
|
||||
for pick in picks:
|
||||
if not np.isnan(pick):
|
||||
picks_no_nan.append(pick)
|
||||
@ -174,8 +174,8 @@ class map_projection(QtGui.QWidget):
|
||||
|
||||
def init_stations_active(self):
|
||||
def remove_nan_lat_lon(picks, lat, lon):
|
||||
lat_no_nan=[]
|
||||
lon_no_nan=[]
|
||||
lat_no_nan = []
|
||||
lon_no_nan = []
|
||||
for index, pick in enumerate(picks):
|
||||
if not np.isnan(pick):
|
||||
lat_no_nan.append(lat[index])
|
||||
@ -202,14 +202,14 @@ class map_projection(QtGui.QWidget):
|
||||
self.xdim, self.ydim = get_x_y_dim(self.x, self.y)
|
||||
|
||||
def init_basemap(self, resolution='l'):
|
||||
#basemap = Basemap(projection=projection, resolution = resolution, ax=self.main_ax)
|
||||
basemap = Basemap(projection='lcc', resolution = resolution, ax=self.main_ax,
|
||||
# basemap = Basemap(projection=projection, resolution = resolution, ax=self.main_ax)
|
||||
basemap = Basemap(projection='lcc', resolution=resolution, ax=self.main_ax,
|
||||
width=5e6, height=2e6,
|
||||
lat_0=(min(self.lat)+max(self.lat))/2.,
|
||||
lon_0=(min(self.lon)+max(self.lon))/2.)
|
||||
lat_0=(min(self.lat) + max(self.lat)) / 2.,
|
||||
lon_0=(min(self.lon) + max(self.lon)) / 2.)
|
||||
|
||||
#basemap.fillcontinents(color=None, lake_color='aqua',zorder=1)
|
||||
basemap.drawmapboundary(zorder=2)#fill_color='darkblue')
|
||||
# basemap.fillcontinents(color=None, lake_color='aqua',zorder=1)
|
||||
basemap.drawmapboundary(zorder=2) # fill_color='darkblue')
|
||||
basemap.shadedrelief(zorder=3)
|
||||
basemap.drawcountries(zorder=4)
|
||||
basemap.drawstates(zorder=5)
|
||||
@ -219,8 +219,8 @@ class map_projection(QtGui.QWidget):
|
||||
|
||||
def init_lat_lon_grid(self):
|
||||
def get_lat_lon_axis(lat, lon):
|
||||
steplat = (max(lat)-min(lat))/250
|
||||
steplon = (max(lon)-min(lon))/250
|
||||
steplat = (max(lat) - min(lat)) / 250
|
||||
steplon = (max(lon) - min(lon)) / 250
|
||||
|
||||
lataxis = np.arange(min(lat), max(lat), steplat)
|
||||
lonaxis = np.arange(min(lon), max(lon), steplon)
|
||||
@ -235,7 +235,8 @@ class map_projection(QtGui.QWidget):
|
||||
|
||||
def init_picksgrid(self):
|
||||
self.picksgrid_no_nan = griddata((self.lat_no_nan, self.lon_no_nan),
|
||||
self.picks_no_nan, (self.latgrid, self.longrid), method='linear') ##################
|
||||
self.picks_no_nan, (self.latgrid, self.longrid),
|
||||
method='linear') ##################
|
||||
|
||||
def draw_contour_filled(self, nlevel='50'):
|
||||
levels = np.linspace(min(self.picks_no_nan), max(self.picks_no_nan), nlevel)
|
||||
@ -244,7 +245,7 @@ class map_projection(QtGui.QWidget):
|
||||
|
||||
def scatter_all_stations(self):
|
||||
self.sc = self.basemap.scatter(self.lon, self.lat, s=50, facecolor='none', latlon=True,
|
||||
zorder=10, picker=True, edgecolor='m', label='Not Picked')
|
||||
zorder=10, picker=True, edgecolor='m', label='Not Picked')
|
||||
self.cid = self.canvas.mpl_connect('pick_event', self.onpick)
|
||||
if self.eventLoc:
|
||||
lat, lon = self.eventLoc
|
||||
@ -255,7 +256,7 @@ class map_projection(QtGui.QWidget):
|
||||
lon = self.lon_no_nan
|
||||
lat = self.lat_no_nan
|
||||
|
||||
#workaround because of an issue with latlon transformation of arrays with len <3
|
||||
# workaround because of an issue with latlon transformation of arrays with len <3
|
||||
if len(lon) <= 2 and len(lat) <= 2:
|
||||
self.sc_picked = self.basemap.scatter(lon[0], lat[0], s=50, facecolor='white',
|
||||
c=self.picks_no_nan[0], latlon=True, zorder=11, label='Picked')
|
||||
@ -267,11 +268,11 @@ class map_projection(QtGui.QWidget):
|
||||
c=self.picks_no_nan, latlon=True, zorder=11, label='Picked')
|
||||
|
||||
def annotate_ax(self):
|
||||
self.annotations=[]
|
||||
self.annotations = []
|
||||
for index, name in enumerate(self.station_names):
|
||||
self.annotations.append(self.main_ax.annotate(' %s' % name, xy=(self.x[index], self.y[index]),
|
||||
fontsize='x-small', color='white', zorder=12))
|
||||
self.legend=self.main_ax.legend()
|
||||
self.legend = self.main_ax.legend()
|
||||
|
||||
def add_cbar(self, label):
|
||||
cbar = self.main_ax.figure.colorbar(self.sc_picked, fraction=0.025)
|
||||
@ -307,19 +308,19 @@ class map_projection(QtGui.QWidget):
|
||||
def remove_drawings(self):
|
||||
if hasattr(self, 'sc_picked'):
|
||||
self.sc_picked.remove()
|
||||
del(self.sc_picked)
|
||||
del (self.sc_picked)
|
||||
if hasattr(self, 'sc_event'):
|
||||
self.sc_event.remove()
|
||||
del(self.sc_event)
|
||||
del (self.sc_event)
|
||||
if hasattr(self, 'cbar'):
|
||||
self.cbar.remove()
|
||||
del(self.cbar)
|
||||
del (self.cbar)
|
||||
if hasattr(self, 'contourf'):
|
||||
self.remove_contourf()
|
||||
del(self.contourf)
|
||||
del (self.contourf)
|
||||
if hasattr(self, 'cid'):
|
||||
self.canvas.mpl_disconnect(self.cid)
|
||||
del(self.cid)
|
||||
del (self.cid)
|
||||
try:
|
||||
self.sc.remove()
|
||||
except Exception as e:
|
||||
@ -343,7 +344,7 @@ class map_projection(QtGui.QWidget):
|
||||
xlim = map.ax.get_xlim()
|
||||
ylim = map.ax.get_ylim()
|
||||
x, y = event.xdata, event.ydata
|
||||
zoom = {'up': 1./2.,
|
||||
zoom = {'up': 1. / 2.,
|
||||
'down': 2.}
|
||||
|
||||
if not event.xdata or not event.ydata:
|
||||
@ -351,10 +352,10 @@ class map_projection(QtGui.QWidget):
|
||||
|
||||
if event.button in zoom:
|
||||
factor = zoom[event.button]
|
||||
xdiff = (xlim[1]-xlim[0])*factor
|
||||
xdiff = (xlim[1] - xlim[0]) * factor
|
||||
xl = x - 0.5 * xdiff
|
||||
xr = x + 0.5 * xdiff
|
||||
ydiff = (ylim[1]-ylim[0])*factor
|
||||
ydiff = (ylim[1] - ylim[0]) * factor
|
||||
yb = y - 0.5 * ydiff
|
||||
yt = y + 0.5 * ydiff
|
||||
|
||||
@ -369,5 +370,3 @@ class map_projection(QtGui.QWidget):
|
||||
self.qmb = QtGui.QMessageBox(QtGui.QMessageBox.Icon.Warning,
|
||||
'Warning', message)
|
||||
self.qmb.show()
|
||||
|
||||
|
||||
|
@ -2,20 +2,23 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import warnings
|
||||
|
||||
import numpy as np
|
||||
from obspy import UTCDateTime
|
||||
from pylot.core.util.utils import fit_curve, find_nearest, clims
|
||||
from pylot.core.util.utils import fit_curve, clims
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
__author__ = 'sebastianw'
|
||||
|
||||
|
||||
def create_axis(x0, incr, npts):
|
||||
ax = np.zeros(npts)
|
||||
for i in range(npts):
|
||||
ax[i] = x0 + incr * i
|
||||
return ax
|
||||
|
||||
|
||||
def gauss_parameter(te, tm, tl, eta):
|
||||
'''
|
||||
takes three onset times and returns the parameters sig1, sig2, a1 and a2
|
||||
@ -79,7 +82,7 @@ def gauss_branches(k, param_tuple):
|
||||
:returns fun_vals: list with function values along axes x
|
||||
'''
|
||||
|
||||
#python 3 workaround
|
||||
# python 3 workaround
|
||||
mu, sig1, sig2, a1, a2 = param_tuple
|
||||
|
||||
def _func(k, mu, sig1, sig2, a1, a2):
|
||||
@ -110,7 +113,7 @@ def exp_branches(k, param_tuple):
|
||||
:returns fun_vals: list with function values along axes x:
|
||||
'''
|
||||
|
||||
#python 3 workaround
|
||||
# python 3 workaround
|
||||
mu, sig1, sig2, a = param_tuple
|
||||
|
||||
def _func(k, mu, sig1, sig2, a):
|
||||
@ -313,8 +316,8 @@ class ProbabilityDensityFunction(object):
|
||||
:return float: rval
|
||||
'''
|
||||
|
||||
#rval = 0
|
||||
#for x in self.axis:
|
||||
# rval = 0
|
||||
# for x in self.axis:
|
||||
# rval += x * self.data(x)
|
||||
rval = self.mu
|
||||
# Not sure about this! That might not be the barycentre.
|
||||
@ -394,7 +397,6 @@ class ProbabilityDensityFunction(object):
|
||||
qu = self.quantile(1 - prob_value)
|
||||
return qu - ql
|
||||
|
||||
|
||||
def quantile_dist_frac(self, x):
|
||||
"""
|
||||
takes a probability value and returns the fraction of two
|
||||
@ -411,8 +413,7 @@ class ProbabilityDensityFunction(object):
|
||||
"""
|
||||
if x <= 0 or x >= 0.25:
|
||||
raise ValueError('Value out of range.')
|
||||
return self.quantile_distance(0.5-x)/self.quantile_distance(x)
|
||||
|
||||
return self.quantile_distance(0.5 - x) / self.quantile_distance(x)
|
||||
|
||||
def plot(self, label=None):
|
||||
import matplotlib.pyplot as plt
|
||||
@ -486,4 +487,3 @@ class ProbabilityDensityFunction(object):
|
||||
x0, npts = self.commonlimits(incr, other)
|
||||
|
||||
return x0, incr, npts
|
||||
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
def create_bin_list(l_boundary, u_boundary, nbins=100):
|
||||
"""
|
||||
takes two boundaries and a number of bins and creates a list of bins for
|
||||
|
@ -1,6 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import sys, os, traceback
|
||||
import multiprocessing
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from PySide.QtCore import QThread, Signal, Qt, Slot, QRunnable, QObject
|
||||
from PySide.QtGui import QDialog, QProgressBar, QLabel, QHBoxLayout, QPushButton
|
||||
|
||||
@ -109,6 +112,7 @@ class Worker(QRunnable):
|
||||
'''
|
||||
|
||||
'''
|
||||
|
||||
def __init__(self, fun, args,
|
||||
progressText=None,
|
||||
pb_widget=None,
|
||||
@ -116,7 +120,7 @@ class Worker(QRunnable):
|
||||
super(Worker, self).__init__()
|
||||
self.fun = fun
|
||||
self.args = args
|
||||
#self.kwargs = kwargs
|
||||
# self.kwargs = kwargs
|
||||
self.signals = WorkerSignals()
|
||||
self.progressText = progressText
|
||||
self.pb_widget = pb_widget
|
||||
@ -131,9 +135,9 @@ class Worker(QRunnable):
|
||||
result = self.fun(self.args)
|
||||
except:
|
||||
traceback.print_exc()
|
||||
exctype, value = sys.exc_info ()[:2]
|
||||
exctype, value = sys.exc_info()[:2]
|
||||
print(exctype, value, traceback.format_exc())
|
||||
#self.signals.error.emit ((exctype, value, traceback.format_exc ()))
|
||||
# self.signals.error.emit ((exctype, value, traceback.format_exc ()))
|
||||
else:
|
||||
self.signals.result.emit(result)
|
||||
finally:
|
||||
@ -173,13 +177,13 @@ class MultiThread(QThread):
|
||||
|
||||
def run(self):
|
||||
if self.redirect_stdout:
|
||||
sys.stdout = self
|
||||
sys.stdout = self
|
||||
try:
|
||||
if not self.ncores:
|
||||
self.ncores = multiprocessing.cpu_count()
|
||||
pool = multiprocessing.Pool(self.ncores)
|
||||
self.data = pool.map_async(self.func, self.args, callback=self.emitDone)
|
||||
#self.data = pool.apply_async(self.func, self.shotlist, callback=self.emitDone) #emit each time returned
|
||||
# self.data = pool.apply_async(self.func, self.shotlist, callback=self.emitDone) #emit each time returned
|
||||
pool.close()
|
||||
self._executed = True
|
||||
except Exception as e:
|
||||
|
@ -2,15 +2,15 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import hashlib
|
||||
import numpy as np
|
||||
from scipy.interpolate import splrep, splev
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import warnings
|
||||
import subprocess
|
||||
|
||||
import numpy as np
|
||||
from obspy import UTCDateTime, read
|
||||
from pylot.core.io.inputs import PylotParameter
|
||||
from scipy.interpolate import splrep, splev
|
||||
|
||||
|
||||
def _pickle_method(m):
|
||||
@ -19,9 +19,11 @@ def _pickle_method(m):
|
||||
else:
|
||||
return getattr, (m.im_self, m.im_func.func_name)
|
||||
|
||||
|
||||
def fit_curve(x, y):
|
||||
return splev, splrep(x, y)
|
||||
|
||||
|
||||
def getindexbounds(f, eta):
|
||||
mi = f.argmax()
|
||||
m = max(f)
|
||||
@ -34,8 +36,8 @@ def getindexbounds(f, eta):
|
||||
def gen_Pool(ncores='max'):
|
||||
import multiprocessing
|
||||
|
||||
if ncores=='max':
|
||||
ncores=multiprocessing.cpu_count()
|
||||
if ncores == 'max':
|
||||
ncores = multiprocessing.cpu_count()
|
||||
|
||||
pool = multiprocessing.Pool(ncores)
|
||||
return pool
|
||||
@ -106,6 +108,7 @@ def findComboBoxIndex(combo_box, val):
|
||||
"""
|
||||
return combo_box.findText(val) if combo_box.findText(val) is not -1 else 0
|
||||
|
||||
|
||||
def find_in_list(list, str):
|
||||
"""
|
||||
takes a list of strings and a string and returns the first list item
|
||||
@ -135,6 +138,7 @@ def find_in_list(list, str):
|
||||
return rlist[0]
|
||||
return None
|
||||
|
||||
|
||||
def find_nearest(array, value):
|
||||
'''
|
||||
function find_nearest takes an array and a value and returns the
|
||||
@ -181,12 +185,14 @@ def fnConstructor(s):
|
||||
fn = '_' + fn
|
||||
return fn
|
||||
|
||||
|
||||
def real_None(value):
|
||||
if value == 'None':
|
||||
return None
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
def four_digits(year):
|
||||
"""
|
||||
takes a two digit year integer and returns the correct four digit equivalent
|
||||
@ -307,6 +313,7 @@ def getPatternLine(fn, pattern):
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def is_executable(fn):
|
||||
"""
|
||||
takes a filename and returns True if the file is executable on the system
|
||||
@ -492,6 +499,7 @@ def runProgram(cmd, parameter=None):
|
||||
|
||||
subprocess.check_output('{} | tee /dev/stderr'.format(cmd), shell=True)
|
||||
|
||||
|
||||
def which(program, infile=None):
|
||||
"""
|
||||
takes a program name and returns the full path to the executable or None
|
||||
@ -538,6 +546,7 @@ def which(program, infile=None):
|
||||
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,7 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys, time
|
||||
import sys
|
||||
|
||||
from PySide.QtGui import QApplication
|
||||
from pylot.core.util.widgets import HelpForm
|
||||
|
||||
|
@ -2,6 +2,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
|
||||
import matplotlib
|
||||
|
||||
matplotlib.use('Qt4Agg')
|
||||
@ -10,7 +11,6 @@ matplotlib.rcParams['backend.qt4'] = 'PySide'
|
||||
from PySide.QtGui import QApplication
|
||||
from obspy.core import read
|
||||
from pylot.core.util.widgets import PickDlg
|
||||
import icons_rc
|
||||
|
||||
app = QApplication(sys.argv)
|
||||
|
||||
|
@ -1,7 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys, time
|
||||
import sys
|
||||
|
||||
from PySide.QtGui import QApplication
|
||||
from pylot.core.util.widgets import PropertiesDlg
|
||||
|
||||
|
@ -2,7 +2,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
import sys, time
|
||||
import sys
|
||||
import time
|
||||
|
||||
from PySide.QtGui import QApplication
|
||||
from pylot.core.util.widgets import FilterOptionsDialog, PropertiesDlg, HelpForm
|
||||
|
||||
|
@ -8,20 +8,16 @@ import unittest
|
||||
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
|
||||
|
||||
def setUp(self):
|
||||
pass
|
||||
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
|
||||
def testName(self):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
#import sys;sys.argv = ['', 'Test.testName']
|
||||
# import sys;sys.argv = ['', 'Test.testName']
|
||||
unittest.main()
|
||||
|
@ -8,12 +8,10 @@ import unittest
|
||||
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
|
||||
|
||||
def testName(self):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
#import sys;sys.argv = ['', 'Test.testName']
|
||||
# import sys;sys.argv = ['', 'Test.testName']
|
||||
unittest.main()
|
||||
|
@ -6,40 +6,40 @@
|
||||
Only for test purposes!
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import glob
|
||||
|
||||
from obspy.core import read
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from pylot.core.pick.charfuns import *
|
||||
from pylot.core.pick.picker import *
|
||||
import glob
|
||||
import argparse
|
||||
|
||||
|
||||
def run_makeCF(project, database, event, iplot, station=None):
|
||||
#parameters for CF calculation
|
||||
t2 = 7 #length of moving window for HOS calculation [sec]
|
||||
p = 4 #order of HOS
|
||||
cuttimes = [10, 50] #start and end time for CF calculation
|
||||
bpz = [2, 30] #corner frequencies of bandpass filter, vertical component
|
||||
bph = [2, 15] #corner frequencies of bandpass filter, horizontal components
|
||||
tdetz= 1.2 #length of AR-determination window [sec], vertical component
|
||||
tdeth= 0.8 #length of AR-determination window [sec], horizontal components
|
||||
tpredz = 0.4 #length of AR-prediction window [sec], vertical component
|
||||
tpredh = 0.4 #length of AR-prediction window [sec], horizontal components
|
||||
addnoise = 0.001 #add noise to seismogram for stable AR prediction
|
||||
arzorder = 2 #chosen order of AR process, vertical component
|
||||
arhorder = 4 #chosen order of AR process, horizontal components
|
||||
TSNRhos = [5, 0.5, 1, 0.1] #window lengths [s] for calculating SNR for earliest/latest pick and quality assessment
|
||||
#from HOS-CF [noise window, safety gap, signal window, slope determination window]
|
||||
TSNRarz = [5, 0.5, 1, 0.5] #window lengths [s] for calculating SNR for earliest/lates pick and quality assessment
|
||||
#from ARZ-CF
|
||||
#get waveform data
|
||||
# parameters for CF calculation
|
||||
t2 = 7 # length of moving window for HOS calculation [sec]
|
||||
p = 4 # order of HOS
|
||||
cuttimes = [10, 50] # start and end time for CF calculation
|
||||
bpz = [2, 30] # corner frequencies of bandpass filter, vertical component
|
||||
bph = [2, 15] # corner frequencies of bandpass filter, horizontal components
|
||||
tdetz = 1.2 # length of AR-determination window [sec], vertical component
|
||||
tdeth = 0.8 # length of AR-determination window [sec], horizontal components
|
||||
tpredz = 0.4 # length of AR-prediction window [sec], vertical component
|
||||
tpredh = 0.4 # length of AR-prediction window [sec], horizontal components
|
||||
addnoise = 0.001 # add noise to seismogram for stable AR prediction
|
||||
arzorder = 2 # chosen order of AR process, vertical component
|
||||
arhorder = 4 # chosen order of AR process, horizontal components
|
||||
TSNRhos = [5, 0.5, 1, 0.1] # window lengths [s] for calculating SNR for earliest/latest pick and quality assessment
|
||||
# from HOS-CF [noise window, safety gap, signal window, slope determination window]
|
||||
TSNRarz = [5, 0.5, 1, 0.5] # window lengths [s] for calculating SNR for earliest/lates pick and quality assessment
|
||||
# from ARZ-CF
|
||||
# get waveform data
|
||||
if station:
|
||||
dpz = '/data/%s/EVENT_DATA/LOCAL/%s/%s/%s*HZ.msd' % (project, database, event, station)
|
||||
dpe = '/data/%s/EVENT_DATA/LOCAL/%s/%s/%s*HE.msd' % (project, database, event, station)
|
||||
dpn = '/data/%s/EVENT_DATA/LOCAL/%s/%s/%s*HN.msd' % (project, database, event, station)
|
||||
#dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_z.gse' % (project, database, event, station)
|
||||
#dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_e.gse' % (project, database, event, station)
|
||||
#dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_n.gse' % (project, database, event, station)
|
||||
dpz = '/data/%s/EVENT_DATA/LOCAL/%s/%s/%s*HZ.msd' % (project, database, event, station)
|
||||
dpe = '/data/%s/EVENT_DATA/LOCAL/%s/%s/%s*HE.msd' % (project, database, event, station)
|
||||
dpn = '/data/%s/EVENT_DATA/LOCAL/%s/%s/%s*HN.msd' % (project, database, event, station)
|
||||
# dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_z.gse' % (project, database, event, station)
|
||||
# dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_e.gse' % (project, database, event, station)
|
||||
# dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_n.gse' % (project, database, event, station)
|
||||
else:
|
||||
# dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*_z.gse' % (project, database, event)
|
||||
# dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*_e.gse' % (project, database, event)
|
||||
@ -51,245 +51,253 @@ def run_makeCF(project, database, event, iplot, station=None):
|
||||
wfefiles = glob.glob(dpe)
|
||||
wfnfiles = glob.glob(dpn)
|
||||
if wfzfiles:
|
||||
for i in range(len(wfzfiles)):
|
||||
print 'Vertical component data found ...'
|
||||
print wfzfiles[i]
|
||||
st = read('%s' % wfzfiles[i])
|
||||
st_copy = st.copy()
|
||||
#filter and taper data
|
||||
tr_filt = st[0].copy()
|
||||
tr_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
|
||||
tr_filt.taper(max_percentage=0.05, type='hann')
|
||||
st_copy[0].data = tr_filt.data
|
||||
##############################################################
|
||||
#calculate HOS-CF using subclass HOScf of class CharacteristicFunction
|
||||
hoscf = HOScf(st_copy, cuttimes, t2, p) #instance of HOScf
|
||||
##############################################################
|
||||
#calculate AIC-HOS-CF using subclass AICcf of class CharacteristicFunction
|
||||
#class needs stream object => build it
|
||||
tr_aic = tr_filt.copy()
|
||||
tr_aic.data = hoscf.getCF()
|
||||
st_copy[0].data = tr_aic.data
|
||||
aiccf = AICcf(st_copy, cuttimes) #instance of AICcf
|
||||
##############################################################
|
||||
#get prelimenary onset time from AIC-HOS-CF using subclass AICPicker of class AutoPicking
|
||||
aicpick = AICPicker(aiccf, None, TSNRhos, 3, 10, None, 0.1)
|
||||
##############################################################
|
||||
#get refined onset time from HOS-CF using class Picker
|
||||
hospick = PragPicker(hoscf, None, TSNRhos, 2, 10, 0.001, 0.2, aicpick.getpick())
|
||||
#get earliest and latest possible picks
|
||||
hosELpick = EarlLatePicker(hoscf, 1.5, TSNRhos, None, 10, None, None, hospick.getpick())
|
||||
##############################################################
|
||||
#calculate ARZ-CF using subclass ARZcf of class CharcteristicFunction
|
||||
#get stream object of filtered data
|
||||
st_copy[0].data = tr_filt.data
|
||||
arzcf = ARZcf(st_copy, cuttimes, tpredz, arzorder, tdetz, addnoise) #instance of ARZcf
|
||||
##############################################################
|
||||
#calculate AIC-ARZ-CF using subclass AICcf of class CharacteristicFunction
|
||||
#class needs stream object => build it
|
||||
tr_arzaic = tr_filt.copy()
|
||||
tr_arzaic.data = arzcf.getCF()
|
||||
st_copy[0].data = tr_arzaic.data
|
||||
araiccf = AICcf(st_copy, cuttimes, tpredz, 0, tdetz) #instance of AICcf
|
||||
##############################################################
|
||||
#get onset time from AIC-ARZ-CF using subclass AICPicker of class AutoPicking
|
||||
aicarzpick = AICPicker(araiccf, 1.5, TSNRarz, 2, 10, None, 0.1)
|
||||
##############################################################
|
||||
#get refined onset time from ARZ-CF using class Picker
|
||||
arzpick = PragPicker(arzcf, 1.5, TSNRarz, 2.0, 10, 0.1, 0.05, aicarzpick.getpick())
|
||||
#get earliest and latest possible picks
|
||||
arzELpick = EarlLatePicker(arzcf, 1.5, TSNRarz, None, 10, None, None, arzpick.getpick())
|
||||
for i in range(len(wfzfiles)):
|
||||
print
|
||||
'Vertical component data found ...'
|
||||
print
|
||||
wfzfiles[i]
|
||||
st = read('%s' % wfzfiles[i])
|
||||
st_copy = st.copy()
|
||||
# filter and taper data
|
||||
tr_filt = st[0].copy()
|
||||
tr_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
|
||||
tr_filt.taper(max_percentage=0.05, type='hann')
|
||||
st_copy[0].data = tr_filt.data
|
||||
##############################################################
|
||||
# calculate HOS-CF using subclass HOScf of class CharacteristicFunction
|
||||
hoscf = HOScf(st_copy, cuttimes, t2, p) # instance of HOScf
|
||||
##############################################################
|
||||
# calculate AIC-HOS-CF using subclass AICcf of class CharacteristicFunction
|
||||
# class needs stream object => build it
|
||||
tr_aic = tr_filt.copy()
|
||||
tr_aic.data = hoscf.getCF()
|
||||
st_copy[0].data = tr_aic.data
|
||||
aiccf = AICcf(st_copy, cuttimes) # instance of AICcf
|
||||
##############################################################
|
||||
# get prelimenary onset time from AIC-HOS-CF using subclass AICPicker of class AutoPicking
|
||||
aicpick = AICPicker(aiccf, None, TSNRhos, 3, 10, None, 0.1)
|
||||
##############################################################
|
||||
# get refined onset time from HOS-CF using class Picker
|
||||
hospick = PragPicker(hoscf, None, TSNRhos, 2, 10, 0.001, 0.2, aicpick.getpick())
|
||||
# get earliest and latest possible picks
|
||||
hosELpick = EarlLatePicker(hoscf, 1.5, TSNRhos, None, 10, None, None, hospick.getpick())
|
||||
##############################################################
|
||||
# calculate ARZ-CF using subclass ARZcf of class CharcteristicFunction
|
||||
# get stream object of filtered data
|
||||
st_copy[0].data = tr_filt.data
|
||||
arzcf = ARZcf(st_copy, cuttimes, tpredz, arzorder, tdetz, addnoise) # instance of ARZcf
|
||||
##############################################################
|
||||
# calculate AIC-ARZ-CF using subclass AICcf of class CharacteristicFunction
|
||||
# class needs stream object => build it
|
||||
tr_arzaic = tr_filt.copy()
|
||||
tr_arzaic.data = arzcf.getCF()
|
||||
st_copy[0].data = tr_arzaic.data
|
||||
araiccf = AICcf(st_copy, cuttimes, tpredz, 0, tdetz) # instance of AICcf
|
||||
##############################################################
|
||||
# get onset time from AIC-ARZ-CF using subclass AICPicker of class AutoPicking
|
||||
aicarzpick = AICPicker(araiccf, 1.5, TSNRarz, 2, 10, None, 0.1)
|
||||
##############################################################
|
||||
# get refined onset time from ARZ-CF using class Picker
|
||||
arzpick = PragPicker(arzcf, 1.5, TSNRarz, 2.0, 10, 0.1, 0.05, aicarzpick.getpick())
|
||||
# get earliest and latest possible picks
|
||||
arzELpick = EarlLatePicker(arzcf, 1.5, TSNRarz, None, 10, None, None, arzpick.getpick())
|
||||
elif not wfzfiles:
|
||||
print 'No vertical component data found!'
|
||||
print
|
||||
'No vertical component data found!'
|
||||
|
||||
if wfefiles and wfnfiles:
|
||||
for i in range(len(wfefiles)):
|
||||
print 'Horizontal component data found ...'
|
||||
print wfefiles[i]
|
||||
print wfnfiles[i]
|
||||
#merge streams
|
||||
H = read('%s' % wfefiles[i])
|
||||
H += read('%s' % wfnfiles[i])
|
||||
H_copy = H.copy()
|
||||
#filter and taper data
|
||||
trH1_filt = H[0].copy()
|
||||
trH2_filt = H[1].copy()
|
||||
trH1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
trH2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
trH1_filt.taper(max_percentage=0.05, type='hann')
|
||||
trH2_filt.taper(max_percentage=0.05, type='hann')
|
||||
H_copy[0].data = trH1_filt.data
|
||||
H_copy[1].data = trH2_filt.data
|
||||
for i in range(len(wfefiles)):
|
||||
print
|
||||
'Horizontal component data found ...'
|
||||
print
|
||||
wfefiles[i]
|
||||
print
|
||||
wfnfiles[i]
|
||||
# merge streams
|
||||
H = read('%s' % wfefiles[i])
|
||||
H += read('%s' % wfnfiles[i])
|
||||
H_copy = H.copy()
|
||||
# filter and taper data
|
||||
trH1_filt = H[0].copy()
|
||||
trH2_filt = H[1].copy()
|
||||
trH1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
trH2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
trH1_filt.taper(max_percentage=0.05, type='hann')
|
||||
trH2_filt.taper(max_percentage=0.05, type='hann')
|
||||
H_copy[0].data = trH1_filt.data
|
||||
H_copy[1].data = trH2_filt.data
|
||||
|
||||
##############################################################
|
||||
#calculate ARH-CF using subclass ARHcf of class CharcteristicFunction
|
||||
arhcf = ARHcf(H_copy, cuttimes, tpredh, arhorder, tdeth, addnoise) #instance of ARHcf
|
||||
##############################################################
|
||||
#calculate AIC-ARH-CF using subclass AICcf of class CharacteristicFunction
|
||||
#class needs stream object => build it
|
||||
tr_arhaic = trH1_filt.copy()
|
||||
tr_arhaic.data = arhcf.getCF()
|
||||
H_copy[0].data = tr_arhaic.data
|
||||
#calculate ARH-AIC-CF
|
||||
arhaiccf = AICcf(H_copy, cuttimes, tpredh, 0, tdeth) #instance of AICcf
|
||||
##############################################################
|
||||
#get onset time from AIC-ARH-CF using subclass AICPicker of class AutoPicking
|
||||
aicarhpick = AICPicker(arhaiccf, 1.5, TSNRarz, 4, 10, None, 0.1)
|
||||
###############################################################
|
||||
#get refined onset time from ARH-CF using class Picker
|
||||
arhpick = PragPicker(arhcf, 1.5, TSNRarz, 2.5, 10, 0.1, 0.05, aicarhpick.getpick())
|
||||
#get earliest and latest possible picks
|
||||
arhELpick = EarlLatePicker(arhcf, 1.5, TSNRarz, None, 10, None, None, arhpick.getpick())
|
||||
##############################################################
|
||||
# calculate ARH-CF using subclass ARHcf of class CharcteristicFunction
|
||||
arhcf = ARHcf(H_copy, cuttimes, tpredh, arhorder, tdeth, addnoise) # instance of ARHcf
|
||||
##############################################################
|
||||
# calculate AIC-ARH-CF using subclass AICcf of class CharacteristicFunction
|
||||
# class needs stream object => build it
|
||||
tr_arhaic = trH1_filt.copy()
|
||||
tr_arhaic.data = arhcf.getCF()
|
||||
H_copy[0].data = tr_arhaic.data
|
||||
# calculate ARH-AIC-CF
|
||||
arhaiccf = AICcf(H_copy, cuttimes, tpredh, 0, tdeth) # instance of AICcf
|
||||
##############################################################
|
||||
# get onset time from AIC-ARH-CF using subclass AICPicker of class AutoPicking
|
||||
aicarhpick = AICPicker(arhaiccf, 1.5, TSNRarz, 4, 10, None, 0.1)
|
||||
###############################################################
|
||||
# get refined onset time from ARH-CF using class Picker
|
||||
arhpick = PragPicker(arhcf, 1.5, TSNRarz, 2.5, 10, 0.1, 0.05, aicarhpick.getpick())
|
||||
# get earliest and latest possible picks
|
||||
arhELpick = EarlLatePicker(arhcf, 1.5, TSNRarz, None, 10, None, None, arhpick.getpick())
|
||||
|
||||
# create stream with 3 traces
|
||||
# merge streams
|
||||
AllC = read('%s' % wfefiles[i])
|
||||
AllC += read('%s' % wfnfiles[i])
|
||||
AllC += read('%s' % wfzfiles[i])
|
||||
# filter and taper data
|
||||
All1_filt = AllC[0].copy()
|
||||
All2_filt = AllC[1].copy()
|
||||
All3_filt = AllC[2].copy()
|
||||
All1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
All2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
All3_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
|
||||
All1_filt.taper(max_percentage=0.05, type='hann')
|
||||
All2_filt.taper(max_percentage=0.05, type='hann')
|
||||
All3_filt.taper(max_percentage=0.05, type='hann')
|
||||
AllC[0].data = All1_filt.data
|
||||
AllC[1].data = All2_filt.data
|
||||
AllC[2].data = All3_filt.data
|
||||
# calculate AR3C-CF using subclass AR3Ccf of class CharacteristicFunction
|
||||
ar3ccf = AR3Ccf(AllC, cuttimes, tpredz, arhorder, tdetz, addnoise) # instance of AR3Ccf
|
||||
# get earliest and latest possible pick from initial ARH-pick
|
||||
ar3cELpick = EarlLatePicker(ar3ccf, 1.5, TSNRarz, None, 10, None, None, arhpick.getpick())
|
||||
##############################################################
|
||||
if iplot:
|
||||
# plot vertical trace
|
||||
plt.figure()
|
||||
tr = st[0]
|
||||
tdata = np.arange(0, tr.stats.npts / tr.stats.sampling_rate, tr.stats.delta)
|
||||
p1, = plt.plot(tdata, tr_filt.data / max(tr_filt.data), 'k')
|
||||
p2, = plt.plot(hoscf.getTimeArray(), hoscf.getCF() / max(hoscf.getCF()), 'r')
|
||||
p3, = plt.plot(aiccf.getTimeArray(), aiccf.getCF() / max(aiccf.getCF()), 'b')
|
||||
p4, = plt.plot(arzcf.getTimeArray(), arzcf.getCF() / max(arzcf.getCF()), 'g')
|
||||
p5, = plt.plot(araiccf.getTimeArray(), araiccf.getCF() / max(araiccf.getCF()), 'y')
|
||||
plt.plot([aicpick.getpick(), aicpick.getpick()], [-1, 1], 'b--')
|
||||
plt.plot([aicpick.getpick() - 0.5, aicpick.getpick() + 0.5], [1, 1], 'b')
|
||||
plt.plot([aicpick.getpick() - 0.5, aicpick.getpick() + 0.5], [-1, -1], 'b')
|
||||
plt.plot([hospick.getpick(), hospick.getpick()], [-1.3, 1.3], 'r', linewidth=2)
|
||||
plt.plot([hospick.getpick() - 0.5, hospick.getpick() + 0.5], [1.3, 1.3], 'r')
|
||||
plt.plot([hospick.getpick() - 0.5, hospick.getpick() + 0.5], [-1.3, -1.3], 'r')
|
||||
plt.plot([hosELpick.getLpick(), hosELpick.getLpick()], [-1.1, 1.1], 'r--')
|
||||
plt.plot([hosELpick.getEpick(), hosELpick.getEpick()], [-1.1, 1.1], 'r--')
|
||||
plt.plot([aicarzpick.getpick(), aicarzpick.getpick()], [-1.2, 1.2], 'y', linewidth=2)
|
||||
plt.plot([aicarzpick.getpick() - 0.5, aicarzpick.getpick() + 0.5], [1.2, 1.2], 'y')
|
||||
plt.plot([aicarzpick.getpick() - 0.5, aicarzpick.getpick() + 0.5], [-1.2, -1.2], 'y')
|
||||
plt.plot([arzpick.getpick(), arzpick.getpick()], [-1.4, 1.4], 'g', linewidth=2)
|
||||
plt.plot([arzpick.getpick() - 0.5, arzpick.getpick() + 0.5], [1.4, 1.4], 'g')
|
||||
plt.plot([arzpick.getpick() - 0.5, arzpick.getpick() + 0.5], [-1.4, -1.4], 'g')
|
||||
plt.plot([arzELpick.getLpick(), arzELpick.getLpick()], [-1.2, 1.2], 'g--')
|
||||
plt.plot([arzELpick.getEpick(), arzELpick.getEpick()], [-1.2, 1.2], 'g--')
|
||||
plt.yticks([])
|
||||
plt.ylim([-1.5, 1.5])
|
||||
plt.xlabel('Time [s]')
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title('%s, %s, CF-SNR=%7.2f, CF-Slope=%12.2f' % (tr.stats.station, \
|
||||
tr.stats.channel, aicpick.getSNR(),
|
||||
aicpick.getSlope()))
|
||||
plt.suptitle(tr.stats.starttime)
|
||||
plt.legend([p1, p2, p3, p4, p5], ['Data', 'HOS-CF', 'HOSAIC-CF', 'ARZ-CF', 'ARZAIC-CF'])
|
||||
# plot horizontal traces
|
||||
plt.figure(2)
|
||||
plt.subplot(2, 1, 1)
|
||||
tsteph = tpredh / 4
|
||||
th1data = np.arange(0, trH1_filt.stats.npts / trH1_filt.stats.sampling_rate, trH1_filt.stats.delta)
|
||||
th2data = np.arange(0, trH2_filt.stats.npts / trH2_filt.stats.sampling_rate, trH2_filt.stats.delta)
|
||||
tarhcf = np.arange(0, len(arhcf.getCF()) * tsteph, tsteph) + cuttimes[0] + tdeth + tpredh
|
||||
p21, = plt.plot(th1data, trH1_filt.data / max(trH1_filt.data), 'k')
|
||||
p22, = plt.plot(arhcf.getTimeArray(), arhcf.getCF() / max(arhcf.getCF()), 'r')
|
||||
p23, = plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF() / max(arhaiccf.getCF()))
|
||||
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'r')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'r')
|
||||
plt.plot([arhELpick.getLpick(), arhELpick.getLpick()], [-0.8, 0.8], 'r--')
|
||||
plt.plot([arhELpick.getEpick(), arhELpick.getEpick()], [-0.8, 0.8], 'r--')
|
||||
plt.plot([arhpick.getpick() + arhELpick.getPickError(), arhpick.getpick() + arhELpick.getPickError()], \
|
||||
[-0.2, 0.2], 'r--')
|
||||
plt.plot([arhpick.getpick() - arhELpick.getPickError(), arhpick.getpick() - arhELpick.getPickError()], \
|
||||
[-0.2, 0.2], 'r--')
|
||||
plt.yticks([])
|
||||
plt.ylim([-1.5, 1.5])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
|
||||
plt.suptitle(trH1_filt.stats.starttime)
|
||||
plt.legend([p21, p22, p23], ['Data', 'ARH-CF', 'ARHAIC-CF'])
|
||||
plt.subplot(2, 1, 2)
|
||||
plt.plot(th2data, trH2_filt.data / max(trH2_filt.data), 'k')
|
||||
plt.plot(arhcf.getTimeArray(), arhcf.getCF() / max(arhcf.getCF()), 'r')
|
||||
plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF() / max(arhaiccf.getCF()))
|
||||
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'r')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'r')
|
||||
plt.plot([arhELpick.getLpick(), arhELpick.getLpick()], [-0.8, 0.8], 'r--')
|
||||
plt.plot([arhELpick.getEpick(), arhELpick.getEpick()], [-0.8, 0.8], 'r--')
|
||||
plt.plot([arhpick.getpick() + arhELpick.getPickError(), arhpick.getpick() + arhELpick.getPickError()], \
|
||||
[-0.2, 0.2], 'r--')
|
||||
plt.plot([arhpick.getpick() - arhELpick.getPickError(), arhpick.getpick() - arhELpick.getPickError()], \
|
||||
[-0.2, 0.2], 'r--')
|
||||
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
|
||||
plt.yticks([])
|
||||
plt.ylim([-1.5, 1.5])
|
||||
plt.xlabel('Time [s]')
|
||||
plt.ylabel('Normalized Counts')
|
||||
# plot 3-component window
|
||||
plt.figure(3)
|
||||
plt.subplot(3, 1, 1)
|
||||
p31, = plt.plot(tdata, tr_filt.data / max(tr_filt.data), 'k')
|
||||
p32, = plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF() / max(ar3ccf.getCF()), 'r')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'b')
|
||||
plt.plot([ar3cELpick.getLpick(), ar3cELpick.getLpick()], [-0.8, 0.8], 'b--')
|
||||
plt.plot([ar3cELpick.getEpick(), ar3cELpick.getEpick()], [-0.8, 0.8], 'b--')
|
||||
plt.yticks([])
|
||||
plt.xticks([])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([tr.stats.station, tr.stats.channel])
|
||||
plt.suptitle(trH1_filt.stats.starttime)
|
||||
plt.legend([p31, p32], ['Data', 'AR3C-CF'])
|
||||
plt.subplot(3, 1, 2)
|
||||
plt.plot(th1data, trH1_filt.data / max(trH1_filt.data), 'k')
|
||||
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF() / max(ar3ccf.getCF()), 'r')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'b')
|
||||
plt.plot([ar3cELpick.getLpick(), ar3cELpick.getLpick()], [-0.8, 0.8], 'b--')
|
||||
plt.plot([ar3cELpick.getEpick(), ar3cELpick.getEpick()], [-0.8, 0.8], 'b--')
|
||||
plt.yticks([])
|
||||
plt.xticks([])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
|
||||
plt.subplot(3, 1, 3)
|
||||
plt.plot(th2data, trH2_filt.data / max(trH2_filt.data), 'k')
|
||||
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF() / max(ar3ccf.getCF()), 'r')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'b')
|
||||
plt.plot([ar3cELpick.getLpick(), ar3cELpick.getLpick()], [-0.8, 0.8], 'b--')
|
||||
plt.plot([ar3cELpick.getEpick(), ar3cELpick.getEpick()], [-0.8, 0.8], 'b--')
|
||||
plt.yticks([])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
|
||||
plt.xlabel('Time [s]')
|
||||
plt.show()
|
||||
raw_input()
|
||||
plt.close()
|
||||
|
||||
#create stream with 3 traces
|
||||
#merge streams
|
||||
AllC = read('%s' % wfefiles[i])
|
||||
AllC += read('%s' % wfnfiles[i])
|
||||
AllC += read('%s' % wfzfiles[i])
|
||||
#filter and taper data
|
||||
All1_filt = AllC[0].copy()
|
||||
All2_filt = AllC[1].copy()
|
||||
All3_filt = AllC[2].copy()
|
||||
All1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
All2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
All3_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
|
||||
All1_filt.taper(max_percentage=0.05, type='hann')
|
||||
All2_filt.taper(max_percentage=0.05, type='hann')
|
||||
All3_filt.taper(max_percentage=0.05, type='hann')
|
||||
AllC[0].data = All1_filt.data
|
||||
AllC[1].data = All2_filt.data
|
||||
AllC[2].data = All3_filt.data
|
||||
#calculate AR3C-CF using subclass AR3Ccf of class CharacteristicFunction
|
||||
ar3ccf = AR3Ccf(AllC, cuttimes, tpredz, arhorder, tdetz, addnoise) #instance of AR3Ccf
|
||||
#get earliest and latest possible pick from initial ARH-pick
|
||||
ar3cELpick = EarlLatePicker(ar3ccf, 1.5, TSNRarz, None, 10, None, None, arhpick.getpick())
|
||||
##############################################################
|
||||
if iplot:
|
||||
#plot vertical trace
|
||||
plt.figure()
|
||||
tr = st[0]
|
||||
tdata = np.arange(0, tr.stats.npts / tr.stats.sampling_rate, tr.stats.delta)
|
||||
p1, = plt.plot(tdata, tr_filt.data/max(tr_filt.data), 'k')
|
||||
p2, = plt.plot(hoscf.getTimeArray(), hoscf.getCF() / max(hoscf.getCF()), 'r')
|
||||
p3, = plt.plot(aiccf.getTimeArray(), aiccf.getCF()/max(aiccf.getCF()), 'b')
|
||||
p4, = plt.plot(arzcf.getTimeArray(), arzcf.getCF()/max(arzcf.getCF()), 'g')
|
||||
p5, = plt.plot(araiccf.getTimeArray(), araiccf.getCF()/max(araiccf.getCF()), 'y')
|
||||
plt.plot([aicpick.getpick(), aicpick.getpick()], [-1, 1], 'b--')
|
||||
plt.plot([aicpick.getpick()-0.5, aicpick.getpick()+0.5], [1, 1], 'b')
|
||||
plt.plot([aicpick.getpick()-0.5, aicpick.getpick()+0.5], [-1, -1], 'b')
|
||||
plt.plot([hospick.getpick(), hospick.getpick()], [-1.3, 1.3], 'r', linewidth=2)
|
||||
plt.plot([hospick.getpick()-0.5, hospick.getpick()+0.5], [1.3, 1.3], 'r')
|
||||
plt.plot([hospick.getpick()-0.5, hospick.getpick()+0.5], [-1.3, -1.3], 'r')
|
||||
plt.plot([hosELpick.getLpick(), hosELpick.getLpick()], [-1.1, 1.1], 'r--')
|
||||
plt.plot([hosELpick.getEpick(), hosELpick.getEpick()], [-1.1, 1.1], 'r--')
|
||||
plt.plot([aicarzpick.getpick(), aicarzpick.getpick()], [-1.2, 1.2], 'y', linewidth=2)
|
||||
plt.plot([aicarzpick.getpick()-0.5, aicarzpick.getpick()+0.5], [1.2, 1.2], 'y')
|
||||
plt.plot([aicarzpick.getpick()-0.5, aicarzpick.getpick()+0.5], [-1.2, -1.2], 'y')
|
||||
plt.plot([arzpick.getpick(), arzpick.getpick()], [-1.4, 1.4], 'g', linewidth=2)
|
||||
plt.plot([arzpick.getpick()-0.5, arzpick.getpick()+0.5], [1.4, 1.4], 'g')
|
||||
plt.plot([arzpick.getpick()-0.5, arzpick.getpick()+0.5], [-1.4, -1.4], 'g')
|
||||
plt.plot([arzELpick.getLpick(), arzELpick.getLpick()], [-1.2, 1.2], 'g--')
|
||||
plt.plot([arzELpick.getEpick(), arzELpick.getEpick()], [-1.2, 1.2], 'g--')
|
||||
plt.yticks([])
|
||||
plt.ylim([-1.5, 1.5])
|
||||
plt.xlabel('Time [s]')
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title('%s, %s, CF-SNR=%7.2f, CF-Slope=%12.2f' % (tr.stats.station, \
|
||||
tr.stats.channel, aicpick.getSNR(), aicpick.getSlope()))
|
||||
plt.suptitle(tr.stats.starttime)
|
||||
plt.legend([p1, p2, p3, p4, p5], ['Data', 'HOS-CF', 'HOSAIC-CF', 'ARZ-CF', 'ARZAIC-CF'])
|
||||
#plot horizontal traces
|
||||
plt.figure(2)
|
||||
plt.subplot(2,1,1)
|
||||
tsteph = tpredh / 4
|
||||
th1data = np.arange(0, trH1_filt.stats.npts / trH1_filt.stats.sampling_rate, trH1_filt.stats.delta)
|
||||
th2data = np.arange(0, trH2_filt.stats.npts / trH2_filt.stats.sampling_rate, trH2_filt.stats.delta)
|
||||
tarhcf = np.arange(0, len(arhcf.getCF()) * tsteph, tsteph) + cuttimes[0] + tdeth +tpredh
|
||||
p21, = plt.plot(th1data, trH1_filt.data/max(trH1_filt.data), 'k')
|
||||
p22, = plt.plot(arhcf.getTimeArray(), arhcf.getCF()/max(arhcf.getCF()), 'r')
|
||||
p23, = plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF()/max(arhaiccf.getCF()))
|
||||
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'r')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'r')
|
||||
plt.plot([arhELpick.getLpick(), arhELpick.getLpick()], [-0.8, 0.8], 'r--')
|
||||
plt.plot([arhELpick.getEpick(), arhELpick.getEpick()], [-0.8, 0.8], 'r--')
|
||||
plt.plot([arhpick.getpick() + arhELpick.getPickError(), arhpick.getpick() + arhELpick.getPickError()], \
|
||||
[-0.2, 0.2], 'r--')
|
||||
plt.plot([arhpick.getpick() - arhELpick.getPickError(), arhpick.getpick() - arhELpick.getPickError()], \
|
||||
[-0.2, 0.2], 'r--')
|
||||
plt.yticks([])
|
||||
plt.ylim([-1.5, 1.5])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
|
||||
plt.suptitle(trH1_filt.stats.starttime)
|
||||
plt.legend([p21, p22, p23], ['Data', 'ARH-CF', 'ARHAIC-CF'])
|
||||
plt.subplot(2,1,2)
|
||||
plt.plot(th2data, trH2_filt.data/max(trH2_filt.data), 'k')
|
||||
plt.plot(arhcf.getTimeArray(), arhcf.getCF()/max(arhcf.getCF()), 'r')
|
||||
plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF()/max(arhaiccf.getCF()))
|
||||
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'r')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'r')
|
||||
plt.plot([arhELpick.getLpick(), arhELpick.getLpick()], [-0.8, 0.8], 'r--')
|
||||
plt.plot([arhELpick.getEpick(), arhELpick.getEpick()], [-0.8, 0.8], 'r--')
|
||||
plt.plot([arhpick.getpick() + arhELpick.getPickError(), arhpick.getpick() + arhELpick.getPickError()], \
|
||||
[-0.2, 0.2], 'r--')
|
||||
plt.plot([arhpick.getpick() - arhELpick.getPickError(), arhpick.getpick() - arhELpick.getPickError()], \
|
||||
[-0.2, 0.2], 'r--')
|
||||
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
|
||||
plt.yticks([])
|
||||
plt.ylim([-1.5, 1.5])
|
||||
plt.xlabel('Time [s]')
|
||||
plt.ylabel('Normalized Counts')
|
||||
#plot 3-component window
|
||||
plt.figure(3)
|
||||
plt.subplot(3,1,1)
|
||||
p31, = plt.plot(tdata, tr_filt.data/max(tr_filt.data), 'k')
|
||||
p32, = plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF()/max(ar3ccf.getCF()), 'r')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'b')
|
||||
plt.plot([ar3cELpick.getLpick(), ar3cELpick.getLpick()], [-0.8, 0.8], 'b--')
|
||||
plt.plot([ar3cELpick.getEpick(), ar3cELpick.getEpick()], [-0.8, 0.8], 'b--')
|
||||
plt.yticks([])
|
||||
plt.xticks([])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([tr.stats.station, tr.stats.channel])
|
||||
plt.suptitle(trH1_filt.stats.starttime)
|
||||
plt.legend([p31, p32], ['Data', 'AR3C-CF'])
|
||||
plt.subplot(3,1,2)
|
||||
plt.plot(th1data, trH1_filt.data/max(trH1_filt.data), 'k')
|
||||
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF()/max(ar3ccf.getCF()), 'r')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'b')
|
||||
plt.plot([ar3cELpick.getLpick(), ar3cELpick.getLpick()], [-0.8, 0.8], 'b--')
|
||||
plt.plot([ar3cELpick.getEpick(), ar3cELpick.getEpick()], [-0.8, 0.8], 'b--')
|
||||
plt.yticks([])
|
||||
plt.xticks([])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
|
||||
plt.subplot(3,1,3)
|
||||
plt.plot(th2data, trH2_filt.data/max(trH2_filt.data), 'k')
|
||||
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF()/max(ar3ccf.getCF()), 'r')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'b')
|
||||
plt.plot([ar3cELpick.getLpick(), ar3cELpick.getLpick()], [-0.8, 0.8], 'b--')
|
||||
plt.plot([ar3cELpick.getEpick(), ar3cELpick.getEpick()], [-0.8, 0.8], 'b--')
|
||||
plt.yticks([])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
|
||||
plt.xlabel('Time [s]')
|
||||
plt.show()
|
||||
raw_input()
|
||||
plt.close()
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
|
@ -2,6 +2,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pylot.core.util.pdf import ProbabilityDensityFunction
|
||||
|
||||
pdf = ProbabilityDensityFunction.from_pick(0.34, 0.5, 0.54, type='exp')
|
||||
pdf2 = ProbabilityDensityFunction.from_pick(0.34, 0.5, 0.54, type='exp')
|
||||
diff = pdf - pdf2
|
@ -2,6 +2,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import argparse
|
||||
|
||||
import numpy
|
||||
from pylot.core.pick.utils import getnoisewin
|
||||
|
||||
|
@ -11,6 +11,7 @@
|
||||
"""
|
||||
|
||||
import argparse
|
||||
|
||||
import obspy
|
||||
from pylot.core.pick.utils import earllatepicker
|
||||
|
||||
|
@ -8,6 +8,7 @@
|
||||
"""
|
||||
|
||||
import argparse
|
||||
|
||||
import obspy
|
||||
from pylot.core.pick.utils import fmpicker
|
||||
|
||||
|
@ -3,8 +3,8 @@
|
||||
|
||||
import argparse
|
||||
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
from pylot.core.io.phases import reassess_pilot_db
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
__author__ = 'S. Wehling-Benatelli'
|
||||
|
@ -3,8 +3,8 @@
|
||||
|
||||
import argparse
|
||||
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
from pylot.core.io.phases import reassess_pilot_event
|
||||
from pylot.core.util.version import get_git_version as _getVersionString
|
||||
|
||||
__version__ = _getVersionString()
|
||||
__author__ = 'S. Wehling-Benatelli'
|
||||
|
@ -2,6 +2,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import argparse
|
||||
|
||||
import numpy
|
||||
from pylot.core.pick.utils import getsignalwin
|
||||
|
||||
|
@ -9,6 +9,7 @@
|
||||
"""
|
||||
|
||||
import argparse
|
||||
|
||||
import obspy
|
||||
from pylot.core.pick.utils import getSNR
|
||||
|
||||
@ -27,4 +28,5 @@ if __name__ == "__main__":
|
||||
'are calculated',
|
||||
dest='time')
|
||||
args = parser.parse_args()
|
||||
print getSNR(args.data, args.tsnr, args.time)
|
||||
print
|
||||
getSNR(args.data, args.tsnr, args.time)
|
||||
|
@ -6,295 +6,302 @@
|
||||
Only for test purposes!
|
||||
"""
|
||||
|
||||
from obspy.core import read
|
||||
import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from pylot.core.pick.charfuns import CharacteristicFunction
|
||||
from pylot.core.pick.picker import AutoPicker
|
||||
from pylot.core.pick.utils import *
|
||||
import glob
|
||||
import argparse
|
||||
import glob
|
||||
|
||||
from obspy.core import read
|
||||
from pylot.core.pick.utils import *
|
||||
|
||||
|
||||
def run_makeCF(project, database, event, iplot, station=None):
|
||||
#parameters for CF calculation
|
||||
t2 = 7 #length of moving window for HOS calculation [sec]
|
||||
p = 4 #order of HOS
|
||||
cuttimes = [10, 50] #start and end time for CF calculation
|
||||
bpz = [2, 30] #corner frequencies of bandpass filter, vertical component
|
||||
bph = [2, 15] #corner frequencies of bandpass filter, horizontal components
|
||||
tdetz= 1.2 #length of AR-determination window [sec], vertical component
|
||||
tdeth= 0.8 #length of AR-determination window [sec], horizontal components
|
||||
tpredz = 0.4 #length of AR-prediction window [sec], vertical component
|
||||
tpredh = 0.4 #length of AR-prediction window [sec], horizontal components
|
||||
addnoise = 0.001 #add noise to seismogram for stable AR prediction
|
||||
arzorder = 2 #chosen order of AR process, vertical component
|
||||
arhorder = 4 #chosen order of AR process, horizontal components
|
||||
TSNRhos = [5, 0.5, 1, .6] #window lengths [s] for calculating SNR for earliest/latest pick and quality assessment
|
||||
#from HOS-CF [noise window, safety gap, signal window, slope determination window]
|
||||
TSNRarz = [5, 0.5, 1, 1.0] #window lengths [s] for calculating SNR for earliest/lates pick and quality assessment
|
||||
#from ARZ-CF
|
||||
#get waveform data
|
||||
# parameters for CF calculation
|
||||
t2 = 7 # length of moving window for HOS calculation [sec]
|
||||
p = 4 # order of HOS
|
||||
cuttimes = [10, 50] # start and end time for CF calculation
|
||||
bpz = [2, 30] # corner frequencies of bandpass filter, vertical component
|
||||
bph = [2, 15] # corner frequencies of bandpass filter, horizontal components
|
||||
tdetz = 1.2 # length of AR-determination window [sec], vertical component
|
||||
tdeth = 0.8 # length of AR-determination window [sec], horizontal components
|
||||
tpredz = 0.4 # length of AR-prediction window [sec], vertical component
|
||||
tpredh = 0.4 # length of AR-prediction window [sec], horizontal components
|
||||
addnoise = 0.001 # add noise to seismogram for stable AR prediction
|
||||
arzorder = 2 # chosen order of AR process, vertical component
|
||||
arhorder = 4 # chosen order of AR process, horizontal components
|
||||
TSNRhos = [5, 0.5, 1, .6] # window lengths [s] for calculating SNR for earliest/latest pick and quality assessment
|
||||
# from HOS-CF [noise window, safety gap, signal window, slope determination window]
|
||||
TSNRarz = [5, 0.5, 1, 1.0] # window lengths [s] for calculating SNR for earliest/lates pick and quality assessment
|
||||
# from ARZ-CF
|
||||
# get waveform data
|
||||
if station:
|
||||
dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*HZ.msd' % (project, database, event, station)
|
||||
dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*HE.msd' % (project, database, event, station)
|
||||
dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*HN.msd' % (project, database, event, station)
|
||||
#dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_z.gse' % (project, database, event, station)
|
||||
#dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_e.gse' % (project, database, event, station)
|
||||
#dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_n.gse' % (project, database, event, station)
|
||||
dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*HZ.msd' % (project, database, event, station)
|
||||
dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*HE.msd' % (project, database, event, station)
|
||||
dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*HN.msd' % (project, database, event, station)
|
||||
# dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_z.gse' % (project, database, event, station)
|
||||
# dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_e.gse' % (project, database, event, station)
|
||||
# dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/%s*_n.gse' % (project, database, event, station)
|
||||
else:
|
||||
dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*HZ.msd' % (project, database, event)
|
||||
dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*HE.msd' % (project, database, event)
|
||||
dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*HN.msd' % (project, database, event)
|
||||
dpz = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*HZ.msd' % (project, database, event)
|
||||
dpe = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*HE.msd' % (project, database, event)
|
||||
dpn = '/DATA/%s/EVENT_DATA/LOCAL/%s/%s/*HN.msd' % (project, database, event)
|
||||
wfzfiles = glob.glob(dpz)
|
||||
wfefiles = glob.glob(dpe)
|
||||
wfnfiles = glob.glob(dpn)
|
||||
if wfzfiles:
|
||||
for i in range(len(wfzfiles)):
|
||||
print 'Vertical component data found ...'
|
||||
print wfzfiles[i]
|
||||
st = read('%s' % wfzfiles[i])
|
||||
st_copy = st.copy()
|
||||
#filter and taper data
|
||||
tr_filt = st[0].copy()
|
||||
tr_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
|
||||
tr_filt.taper(max_percentage=0.05, type='hann')
|
||||
st_copy[0].data = tr_filt.data
|
||||
##############################################################
|
||||
#calculate HOS-CF using subclass HOScf of class CharacteristicFunction
|
||||
hoscf = HOScf(st_copy, cuttimes, t2, p) #instance of HOScf
|
||||
##############################################################
|
||||
#calculate AIC-HOS-CF using subclass AICcf of class CharacteristicFunction
|
||||
#class needs stream object => build it
|
||||
tr_aic = tr_filt.copy()
|
||||
tr_aic.data = hoscf.getCF()
|
||||
st_copy[0].data = tr_aic.data
|
||||
aiccf = AICcf(st_copy, cuttimes) #instance of AICcf
|
||||
##############################################################
|
||||
#get prelimenary onset time from AIC-HOS-CF using subclass AICPicker of class AutoPicking
|
||||
aicpick = AICPicker(aiccf, TSNRhos, 3, 10, None, 0.1)
|
||||
##############################################################
|
||||
#get refined onset time from HOS-CF using class Picker
|
||||
hospick = PragPicker(hoscf, TSNRhos, 2, 10, 0.001, 0.2, aicpick.getpick())
|
||||
#############################################################
|
||||
#get earliest and latest possible picks
|
||||
st_copy[0].data = tr_filt.data
|
||||
[lpickhos, epickhos, pickerrhos] = earllatepicker(st_copy, 1.5, TSNRhos, hospick.getpick(), 10)
|
||||
#############################################################
|
||||
#get SNR
|
||||
[SNR, SNRdB] = getSNR(st_copy, TSNRhos, hospick.getpick())
|
||||
print 'SNR:', SNR, 'SNR[dB]:', SNRdB
|
||||
##########################################################
|
||||
#get first motion of onset
|
||||
hosfm = fmpicker(st, st_copy, 0.2, hospick.getpick(), 11)
|
||||
##############################################################
|
||||
#calculate ARZ-CF using subclass ARZcf of class CharcteristicFunction
|
||||
arzcf = ARZcf(st, cuttimes, tpredz, arzorder, tdetz, addnoise) #instance of ARZcf
|
||||
##############################################################
|
||||
#calculate AIC-ARZ-CF using subclass AICcf of class CharacteristicFunction
|
||||
#class needs stream object => build it
|
||||
tr_arzaic = tr_filt.copy()
|
||||
tr_arzaic.data = arzcf.getCF()
|
||||
st_copy[0].data = tr_arzaic.data
|
||||
araiccf = AICcf(st_copy, cuttimes, tpredz, 0, tdetz) #instance of AICcf
|
||||
##############################################################
|
||||
#get onset time from AIC-ARZ-CF using subclass AICPicker of class AutoPicking
|
||||
aicarzpick = AICPicker(araiccf, TSNRarz, 2, 10, None, 0.1)
|
||||
##############################################################
|
||||
#get refined onset time from ARZ-CF using class Picker
|
||||
arzpick = PragPicker(arzcf, TSNRarz, 2.0, 10, 0.1, 0.05, aicarzpick.getpick())
|
||||
#get earliest and latest possible picks
|
||||
st_copy[0].data = tr_filt.data
|
||||
[lpickarz, epickarz, pickerrarz] = earllatepicker(st_copy, 1.5, TSNRarz, arzpick.getpick(), 10)
|
||||
for i in range(len(wfzfiles)):
|
||||
print
|
||||
'Vertical component data found ...'
|
||||
print
|
||||
wfzfiles[i]
|
||||
st = read('%s' % wfzfiles[i])
|
||||
st_copy = st.copy()
|
||||
# filter and taper data
|
||||
tr_filt = st[0].copy()
|
||||
tr_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
|
||||
tr_filt.taper(max_percentage=0.05, type='hann')
|
||||
st_copy[0].data = tr_filt.data
|
||||
##############################################################
|
||||
# calculate HOS-CF using subclass HOScf of class CharacteristicFunction
|
||||
hoscf = HOScf(st_copy, cuttimes, t2, p) # instance of HOScf
|
||||
##############################################################
|
||||
# calculate AIC-HOS-CF using subclass AICcf of class CharacteristicFunction
|
||||
# class needs stream object => build it
|
||||
tr_aic = tr_filt.copy()
|
||||
tr_aic.data = hoscf.getCF()
|
||||
st_copy[0].data = tr_aic.data
|
||||
aiccf = AICcf(st_copy, cuttimes) # instance of AICcf
|
||||
##############################################################
|
||||
# get prelimenary onset time from AIC-HOS-CF using subclass AICPicker of class AutoPicking
|
||||
aicpick = AICPicker(aiccf, TSNRhos, 3, 10, None, 0.1)
|
||||
##############################################################
|
||||
# get refined onset time from HOS-CF using class Picker
|
||||
hospick = PragPicker(hoscf, TSNRhos, 2, 10, 0.001, 0.2, aicpick.getpick())
|
||||
#############################################################
|
||||
# get earliest and latest possible picks
|
||||
st_copy[0].data = tr_filt.data
|
||||
[lpickhos, epickhos, pickerrhos] = earllatepicker(st_copy, 1.5, TSNRhos, hospick.getpick(), 10)
|
||||
#############################################################
|
||||
# get SNR
|
||||
[SNR, SNRdB] = getSNR(st_copy, TSNRhos, hospick.getpick())
|
||||
print
|
||||
'SNR:', SNR, 'SNR[dB]:', SNRdB
|
||||
##########################################################
|
||||
# get first motion of onset
|
||||
hosfm = fmpicker(st, st_copy, 0.2, hospick.getpick(), 11)
|
||||
##############################################################
|
||||
# calculate ARZ-CF using subclass ARZcf of class CharcteristicFunction
|
||||
arzcf = ARZcf(st, cuttimes, tpredz, arzorder, tdetz, addnoise) # instance of ARZcf
|
||||
##############################################################
|
||||
# calculate AIC-ARZ-CF using subclass AICcf of class CharacteristicFunction
|
||||
# class needs stream object => build it
|
||||
tr_arzaic = tr_filt.copy()
|
||||
tr_arzaic.data = arzcf.getCF()
|
||||
st_copy[0].data = tr_arzaic.data
|
||||
araiccf = AICcf(st_copy, cuttimes, tpredz, 0, tdetz) # instance of AICcf
|
||||
##############################################################
|
||||
# get onset time from AIC-ARZ-CF using subclass AICPicker of class AutoPicking
|
||||
aicarzpick = AICPicker(araiccf, TSNRarz, 2, 10, None, 0.1)
|
||||
##############################################################
|
||||
# get refined onset time from ARZ-CF using class Picker
|
||||
arzpick = PragPicker(arzcf, TSNRarz, 2.0, 10, 0.1, 0.05, aicarzpick.getpick())
|
||||
# get earliest and latest possible picks
|
||||
st_copy[0].data = tr_filt.data
|
||||
[lpickarz, epickarz, pickerrarz] = earllatepicker(st_copy, 1.5, TSNRarz, arzpick.getpick(), 10)
|
||||
elif not wfzfiles:
|
||||
print 'No vertical component data found!'
|
||||
print
|
||||
'No vertical component data found!'
|
||||
|
||||
if wfefiles and wfnfiles:
|
||||
for i in range(len(wfefiles)):
|
||||
print 'Horizontal component data found ...'
|
||||
print wfefiles[i]
|
||||
print wfnfiles[i]
|
||||
#merge streams
|
||||
H = read('%s' % wfefiles[i])
|
||||
H += read('%s' % wfnfiles[i])
|
||||
H_copy = H.copy()
|
||||
#filter and taper data
|
||||
trH1_filt = H[0].copy()
|
||||
trH2_filt = H[1].copy()
|
||||
trH1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
trH2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
trH1_filt.taper(max_percentage=0.05, type='hann')
|
||||
trH2_filt.taper(max_percentage=0.05, type='hann')
|
||||
H_copy[0].data = trH1_filt.data
|
||||
H_copy[1].data = trH2_filt.data
|
||||
for i in range(len(wfefiles)):
|
||||
print
|
||||
'Horizontal component data found ...'
|
||||
print
|
||||
wfefiles[i]
|
||||
print
|
||||
wfnfiles[i]
|
||||
# merge streams
|
||||
H = read('%s' % wfefiles[i])
|
||||
H += read('%s' % wfnfiles[i])
|
||||
H_copy = H.copy()
|
||||
# filter and taper data
|
||||
trH1_filt = H[0].copy()
|
||||
trH2_filt = H[1].copy()
|
||||
trH1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
trH2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
trH1_filt.taper(max_percentage=0.05, type='hann')
|
||||
trH2_filt.taper(max_percentage=0.05, type='hann')
|
||||
H_copy[0].data = trH1_filt.data
|
||||
H_copy[1].data = trH2_filt.data
|
||||
|
||||
##############################################################
|
||||
#calculate ARH-CF using subclass ARHcf of class CharcteristicFunction
|
||||
arhcf = ARHcf(H_copy, cuttimes, tpredh, arhorder, tdeth, addnoise) #instance of ARHcf
|
||||
##############################################################
|
||||
#calculate AIC-ARH-CF using subclass AICcf of class CharacteristicFunction
|
||||
#class needs stream object => build it
|
||||
tr_arhaic = trH1_filt.copy()
|
||||
tr_arhaic.data = arhcf.getCF()
|
||||
H_copy[0].data = tr_arhaic.data
|
||||
#calculate ARH-AIC-CF
|
||||
arhaiccf = AICcf(H_copy, cuttimes, tpredh, 0, tdeth) #instance of AICcf
|
||||
##############################################################
|
||||
#get onset time from AIC-ARH-CF using subclass AICPicker of class AutoPicking
|
||||
aicarhpick = AICPicker(arhaiccf, TSNRarz, 4, 10, None, 0.1)
|
||||
###############################################################
|
||||
#get refined onset time from ARH-CF using class Picker
|
||||
arhpick = PragPicker(arhcf, TSNRarz, 2.5, 10, 0.1, 0.05, aicarhpick.getpick())
|
||||
#get earliest and latest possible picks
|
||||
H_copy[0].data = trH1_filt.data
|
||||
[lpickarh1, epickarh1, pickerrarh1] = earllatepicker(H_copy, 1.5, TSNRarz, arhpick.getpick(), 10)
|
||||
H_copy[0].data = trH2_filt.data
|
||||
[lpickarh2, epickarh2, pickerrarh2] = earllatepicker(H_copy, 1.5, TSNRarz, arhpick.getpick(), 10)
|
||||
#get earliest pick of both earliest possible picks
|
||||
epick = [epickarh1, epickarh2]
|
||||
lpick = [lpickarh1, lpickarh2]
|
||||
pickerr = [pickerrarh1, pickerrarh2]
|
||||
ipick =np.argmin([epickarh1, epickarh2])
|
||||
epickarh = epick[ipick]
|
||||
lpickarh = lpick[ipick]
|
||||
pickerrarh = pickerr[ipick]
|
||||
##############################################################
|
||||
# calculate ARH-CF using subclass ARHcf of class CharcteristicFunction
|
||||
arhcf = ARHcf(H_copy, cuttimes, tpredh, arhorder, tdeth, addnoise) # instance of ARHcf
|
||||
##############################################################
|
||||
# calculate AIC-ARH-CF using subclass AICcf of class CharacteristicFunction
|
||||
# class needs stream object => build it
|
||||
tr_arhaic = trH1_filt.copy()
|
||||
tr_arhaic.data = arhcf.getCF()
|
||||
H_copy[0].data = tr_arhaic.data
|
||||
# calculate ARH-AIC-CF
|
||||
arhaiccf = AICcf(H_copy, cuttimes, tpredh, 0, tdeth) # instance of AICcf
|
||||
##############################################################
|
||||
# get onset time from AIC-ARH-CF using subclass AICPicker of class AutoPicking
|
||||
aicarhpick = AICPicker(arhaiccf, TSNRarz, 4, 10, None, 0.1)
|
||||
###############################################################
|
||||
# get refined onset time from ARH-CF using class Picker
|
||||
arhpick = PragPicker(arhcf, TSNRarz, 2.5, 10, 0.1, 0.05, aicarhpick.getpick())
|
||||
# get earliest and latest possible picks
|
||||
H_copy[0].data = trH1_filt.data
|
||||
[lpickarh1, epickarh1, pickerrarh1] = earllatepicker(H_copy, 1.5, TSNRarz, arhpick.getpick(), 10)
|
||||
H_copy[0].data = trH2_filt.data
|
||||
[lpickarh2, epickarh2, pickerrarh2] = earllatepicker(H_copy, 1.5, TSNRarz, arhpick.getpick(), 10)
|
||||
# get earliest pick of both earliest possible picks
|
||||
epick = [epickarh1, epickarh2]
|
||||
lpick = [lpickarh1, lpickarh2]
|
||||
pickerr = [pickerrarh1, pickerrarh2]
|
||||
ipick = np.argmin([epickarh1, epickarh2])
|
||||
epickarh = epick[ipick]
|
||||
lpickarh = lpick[ipick]
|
||||
pickerrarh = pickerr[ipick]
|
||||
|
||||
# create stream with 3 traces
|
||||
# merge streams
|
||||
AllC = read('%s' % wfefiles[i])
|
||||
AllC += read('%s' % wfnfiles[i])
|
||||
AllC += read('%s' % wfzfiles[i])
|
||||
# filter and taper data
|
||||
All1_filt = AllC[0].copy()
|
||||
All2_filt = AllC[1].copy()
|
||||
All3_filt = AllC[2].copy()
|
||||
All1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
All2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
All3_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
|
||||
All1_filt.taper(max_percentage=0.05, type='hann')
|
||||
All2_filt.taper(max_percentage=0.05, type='hann')
|
||||
All3_filt.taper(max_percentage=0.05, type='hann')
|
||||
AllC[0].data = All1_filt.data
|
||||
AllC[1].data = All2_filt.data
|
||||
AllC[2].data = All3_filt.data
|
||||
# calculate AR3C-CF using subclass AR3Ccf of class CharacteristicFunction
|
||||
ar3ccf = AR3Ccf(AllC, cuttimes, tpredz, arhorder, tdetz, addnoise) # instance of AR3Ccf
|
||||
##############################################################
|
||||
if iplot:
|
||||
# plot vertical trace
|
||||
plt.figure()
|
||||
tr = st[0]
|
||||
tdata = np.arange(0, tr.stats.npts / tr.stats.sampling_rate, tr.stats.delta)
|
||||
p1, = plt.plot(tdata, tr_filt.data / max(tr_filt.data), 'k')
|
||||
p2, = plt.plot(hoscf.getTimeArray(), hoscf.getCF() / max(hoscf.getCF()), 'r')
|
||||
p3, = plt.plot(aiccf.getTimeArray(), aiccf.getCF() / max(aiccf.getCF()), 'b')
|
||||
p4, = plt.plot(arzcf.getTimeArray(), arzcf.getCF() / max(arzcf.getCF()), 'g')
|
||||
p5, = plt.plot(araiccf.getTimeArray(), araiccf.getCF() / max(araiccf.getCF()), 'y')
|
||||
plt.plot([aicpick.getpick(), aicpick.getpick()], [-1, 1], 'b--')
|
||||
plt.plot([aicpick.getpick() - 0.5, aicpick.getpick() + 0.5], [1, 1], 'b')
|
||||
plt.plot([aicpick.getpick() - 0.5, aicpick.getpick() + 0.5], [-1, -1], 'b')
|
||||
plt.plot([hospick.getpick(), hospick.getpick()], [-1.3, 1.3], 'r', linewidth=2)
|
||||
plt.plot([hospick.getpick() - 0.5, hospick.getpick() + 0.5], [1.3, 1.3], 'r')
|
||||
plt.plot([hospick.getpick() - 0.5, hospick.getpick() + 0.5], [-1.3, -1.3], 'r')
|
||||
plt.plot([lpickhos, lpickhos], [-1.1, 1.1], 'r--')
|
||||
plt.plot([epickhos, epickhos], [-1.1, 1.1], 'r--')
|
||||
plt.plot([aicarzpick.getpick(), aicarzpick.getpick()], [-1.2, 1.2], 'y', linewidth=2)
|
||||
plt.plot([aicarzpick.getpick() - 0.5, aicarzpick.getpick() + 0.5], [1.2, 1.2], 'y')
|
||||
plt.plot([aicarzpick.getpick() - 0.5, aicarzpick.getpick() + 0.5], [-1.2, -1.2], 'y')
|
||||
plt.plot([arzpick.getpick(), arzpick.getpick()], [-1.4, 1.4], 'g', linewidth=2)
|
||||
plt.plot([arzpick.getpick() - 0.5, arzpick.getpick() + 0.5], [1.4, 1.4], 'g')
|
||||
plt.plot([arzpick.getpick() - 0.5, arzpick.getpick() + 0.5], [-1.4, -1.4], 'g')
|
||||
plt.plot([lpickarz, lpickarz], [-1.2, 1.2], 'g--')
|
||||
plt.plot([epickarz, epickarz], [-1.2, 1.2], 'g--')
|
||||
plt.yticks([])
|
||||
plt.ylim([-1.5, 1.5])
|
||||
plt.xlabel('Time [s]')
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title('%s, %s, CF-SNR=%7.2f, CF-Slope=%12.2f' % (tr.stats.station,
|
||||
tr.stats.channel, aicpick.getSNR(),
|
||||
aicpick.getSlope()))
|
||||
plt.suptitle(tr.stats.starttime)
|
||||
plt.legend([p1, p2, p3, p4, p5], ['Data', 'HOS-CF', 'HOSAIC-CF', 'ARZ-CF', 'ARZAIC-CF'])
|
||||
# plot horizontal traces
|
||||
plt.figure(2)
|
||||
plt.subplot(2, 1, 1)
|
||||
tsteph = tpredh / 4
|
||||
th1data = np.arange(0, trH1_filt.stats.npts / trH1_filt.stats.sampling_rate, trH1_filt.stats.delta)
|
||||
th2data = np.arange(0, trH2_filt.stats.npts / trH2_filt.stats.sampling_rate, trH2_filt.stats.delta)
|
||||
tarhcf = np.arange(0, len(arhcf.getCF()) * tsteph, tsteph) + cuttimes[0] + tdeth + tpredh
|
||||
p21, = plt.plot(th1data, trH1_filt.data / max(trH1_filt.data), 'k')
|
||||
p22, = plt.plot(arhcf.getTimeArray(), arhcf.getCF() / max(arhcf.getCF()), 'r')
|
||||
p23, = plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF() / max(arhaiccf.getCF()))
|
||||
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'r')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'r')
|
||||
plt.plot([lpickarh, lpickarh], [-0.8, 0.8], 'r--')
|
||||
plt.plot([epickarh, epickarh], [-0.8, 0.8], 'r--')
|
||||
plt.plot([arhpick.getpick() + pickerrarh, arhpick.getpick() + pickerrarh], [-0.2, 0.2], 'r--')
|
||||
plt.plot([arhpick.getpick() - pickerrarh, arhpick.getpick() - pickerrarh], [-0.2, 0.2], 'r--')
|
||||
plt.yticks([])
|
||||
plt.ylim([-1.5, 1.5])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
|
||||
plt.suptitle(trH1_filt.stats.starttime)
|
||||
plt.legend([p21, p22, p23], ['Data', 'ARH-CF', 'ARHAIC-CF'])
|
||||
plt.subplot(2, 1, 2)
|
||||
plt.plot(th2data, trH2_filt.data / max(trH2_filt.data), 'k')
|
||||
plt.plot(arhcf.getTimeArray(), arhcf.getCF() / max(arhcf.getCF()), 'r')
|
||||
plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF() / max(arhaiccf.getCF()))
|
||||
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick() - 0.5, aicarhpick.getpick() + 0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'r')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'r')
|
||||
plt.plot([lpickarh, lpickarh], [-0.8, 0.8], 'r--')
|
||||
plt.plot([epickarh, epickarh], [-0.8, 0.8], 'r--')
|
||||
plt.plot([arhpick.getpick() + pickerrarh, arhpick.getpick() + pickerrarh], [-0.2, 0.2], 'r--')
|
||||
plt.plot([arhpick.getpick() - pickerrarh, arhpick.getpick() - pickerrarh], [-0.2, 0.2], 'r--')
|
||||
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
|
||||
plt.yticks([])
|
||||
plt.ylim([-1.5, 1.5])
|
||||
plt.xlabel('Time [s]')
|
||||
plt.ylabel('Normalized Counts')
|
||||
# plot 3-component window
|
||||
plt.figure(3)
|
||||
plt.subplot(3, 1, 1)
|
||||
p31, = plt.plot(tdata, tr_filt.data / max(tr_filt.data), 'k')
|
||||
p32, = plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF() / max(ar3ccf.getCF()), 'r')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'b')
|
||||
plt.yticks([])
|
||||
plt.xticks([])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([tr.stats.station, tr.stats.channel])
|
||||
plt.suptitle(trH1_filt.stats.starttime)
|
||||
plt.legend([p31, p32], ['Data', 'AR3C-CF'])
|
||||
plt.subplot(3, 1, 2)
|
||||
plt.plot(th1data, trH1_filt.data / max(trH1_filt.data), 'k')
|
||||
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF() / max(ar3ccf.getCF()), 'r')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'b')
|
||||
plt.yticks([])
|
||||
plt.xticks([])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
|
||||
plt.subplot(3, 1, 3)
|
||||
plt.plot(th2data, trH2_filt.data / max(trH2_filt.data), 'k')
|
||||
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF() / max(ar3ccf.getCF()), 'r')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick() - 0.5, arhpick.getpick() + 0.5], [1, 1], 'b')
|
||||
plt.yticks([])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
|
||||
plt.xlabel('Time [s]')
|
||||
plt.show()
|
||||
raw_input()
|
||||
plt.close()
|
||||
|
||||
#create stream with 3 traces
|
||||
#merge streams
|
||||
AllC = read('%s' % wfefiles[i])
|
||||
AllC += read('%s' % wfnfiles[i])
|
||||
AllC += read('%s' % wfzfiles[i])
|
||||
#filter and taper data
|
||||
All1_filt = AllC[0].copy()
|
||||
All2_filt = AllC[1].copy()
|
||||
All3_filt = AllC[2].copy()
|
||||
All1_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
All2_filt.filter('bandpass', freqmin=bph[0], freqmax=bph[1], zerophase=False)
|
||||
All3_filt.filter('bandpass', freqmin=bpz[0], freqmax=bpz[1], zerophase=False)
|
||||
All1_filt.taper(max_percentage=0.05, type='hann')
|
||||
All2_filt.taper(max_percentage=0.05, type='hann')
|
||||
All3_filt.taper(max_percentage=0.05, type='hann')
|
||||
AllC[0].data = All1_filt.data
|
||||
AllC[1].data = All2_filt.data
|
||||
AllC[2].data = All3_filt.data
|
||||
#calculate AR3C-CF using subclass AR3Ccf of class CharacteristicFunction
|
||||
ar3ccf = AR3Ccf(AllC, cuttimes, tpredz, arhorder, tdetz, addnoise) #instance of AR3Ccf
|
||||
##############################################################
|
||||
if iplot:
|
||||
#plot vertical trace
|
||||
plt.figure()
|
||||
tr = st[0]
|
||||
tdata = np.arange(0, tr.stats.npts / tr.stats.sampling_rate, tr.stats.delta)
|
||||
p1, = plt.plot(tdata, tr_filt.data/max(tr_filt.data), 'k')
|
||||
p2, = plt.plot(hoscf.getTimeArray(), hoscf.getCF() / max(hoscf.getCF()), 'r')
|
||||
p3, = plt.plot(aiccf.getTimeArray(), aiccf.getCF()/max(aiccf.getCF()), 'b')
|
||||
p4, = plt.plot(arzcf.getTimeArray(), arzcf.getCF()/max(arzcf.getCF()), 'g')
|
||||
p5, = plt.plot(araiccf.getTimeArray(), araiccf.getCF()/max(araiccf.getCF()), 'y')
|
||||
plt.plot([aicpick.getpick(), aicpick.getpick()], [-1, 1], 'b--')
|
||||
plt.plot([aicpick.getpick()-0.5, aicpick.getpick()+0.5], [1, 1], 'b')
|
||||
plt.plot([aicpick.getpick()-0.5, aicpick.getpick()+0.5], [-1, -1], 'b')
|
||||
plt.plot([hospick.getpick(), hospick.getpick()], [-1.3, 1.3], 'r', linewidth=2)
|
||||
plt.plot([hospick.getpick()-0.5, hospick.getpick()+0.5], [1.3, 1.3], 'r')
|
||||
plt.plot([hospick.getpick()-0.5, hospick.getpick()+0.5], [-1.3, -1.3], 'r')
|
||||
plt.plot([lpickhos, lpickhos], [-1.1, 1.1], 'r--')
|
||||
plt.plot([epickhos, epickhos], [-1.1, 1.1], 'r--')
|
||||
plt.plot([aicarzpick.getpick(), aicarzpick.getpick()], [-1.2, 1.2], 'y', linewidth=2)
|
||||
plt.plot([aicarzpick.getpick()-0.5, aicarzpick.getpick()+0.5], [1.2, 1.2], 'y')
|
||||
plt.plot([aicarzpick.getpick()-0.5, aicarzpick.getpick()+0.5], [-1.2, -1.2], 'y')
|
||||
plt.plot([arzpick.getpick(), arzpick.getpick()], [-1.4, 1.4], 'g', linewidth=2)
|
||||
plt.plot([arzpick.getpick()-0.5, arzpick.getpick()+0.5], [1.4, 1.4], 'g')
|
||||
plt.plot([arzpick.getpick()-0.5, arzpick.getpick()+0.5], [-1.4, -1.4], 'g')
|
||||
plt.plot([lpickarz, lpickarz], [-1.2, 1.2], 'g--')
|
||||
plt.plot([epickarz, epickarz], [-1.2, 1.2], 'g--')
|
||||
plt.yticks([])
|
||||
plt.ylim([-1.5, 1.5])
|
||||
plt.xlabel('Time [s]')
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title('%s, %s, CF-SNR=%7.2f, CF-Slope=%12.2f' % (tr.stats.station,
|
||||
tr.stats.channel, aicpick.getSNR(), aicpick.getSlope()))
|
||||
plt.suptitle(tr.stats.starttime)
|
||||
plt.legend([p1, p2, p3, p4, p5], ['Data', 'HOS-CF', 'HOSAIC-CF', 'ARZ-CF', 'ARZAIC-CF'])
|
||||
#plot horizontal traces
|
||||
plt.figure(2)
|
||||
plt.subplot(2,1,1)
|
||||
tsteph = tpredh / 4
|
||||
th1data = np.arange(0, trH1_filt.stats.npts / trH1_filt.stats.sampling_rate, trH1_filt.stats.delta)
|
||||
th2data = np.arange(0, trH2_filt.stats.npts / trH2_filt.stats.sampling_rate, trH2_filt.stats.delta)
|
||||
tarhcf = np.arange(0, len(arhcf.getCF()) * tsteph, tsteph) + cuttimes[0] + tdeth +tpredh
|
||||
p21, = plt.plot(th1data, trH1_filt.data/max(trH1_filt.data), 'k')
|
||||
p22, = plt.plot(arhcf.getTimeArray(), arhcf.getCF()/max(arhcf.getCF()), 'r')
|
||||
p23, = plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF()/max(arhaiccf.getCF()))
|
||||
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'r')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'r')
|
||||
plt.plot([lpickarh, lpickarh], [-0.8, 0.8], 'r--')
|
||||
plt.plot([epickarh, epickarh], [-0.8, 0.8], 'r--')
|
||||
plt.plot([arhpick.getpick() + pickerrarh, arhpick.getpick() + pickerrarh], [-0.2, 0.2], 'r--')
|
||||
plt.plot([arhpick.getpick() - pickerrarh, arhpick.getpick() - pickerrarh], [-0.2, 0.2], 'r--')
|
||||
plt.yticks([])
|
||||
plt.ylim([-1.5, 1.5])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
|
||||
plt.suptitle(trH1_filt.stats.starttime)
|
||||
plt.legend([p21, p22, p23], ['Data', 'ARH-CF', 'ARHAIC-CF'])
|
||||
plt.subplot(2,1,2)
|
||||
plt.plot(th2data, trH2_filt.data/max(trH2_filt.data), 'k')
|
||||
plt.plot(arhcf.getTimeArray(), arhcf.getCF()/max(arhcf.getCF()), 'r')
|
||||
plt.plot(arhaiccf.getTimeArray(), arhaiccf.getCF()/max(arhaiccf.getCF()))
|
||||
plt.plot([aicarhpick.getpick(), aicarhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [1, 1], 'b')
|
||||
plt.plot([aicarhpick.getpick()-0.5, aicarhpick.getpick()+0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'r')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'r')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'r')
|
||||
plt.plot([lpickarh, lpickarh], [-0.8, 0.8], 'r--')
|
||||
plt.plot([epickarh, epickarh], [-0.8, 0.8], 'r--')
|
||||
plt.plot([arhpick.getpick() + pickerrarh, arhpick.getpick() + pickerrarh], [-0.2, 0.2], 'r--')
|
||||
plt.plot([arhpick.getpick() - pickerrarh, arhpick.getpick() - pickerrarh], [-0.2, 0.2], 'r--')
|
||||
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
|
||||
plt.yticks([])
|
||||
plt.ylim([-1.5, 1.5])
|
||||
plt.xlabel('Time [s]')
|
||||
plt.ylabel('Normalized Counts')
|
||||
#plot 3-component window
|
||||
plt.figure(3)
|
||||
plt.subplot(3,1,1)
|
||||
p31, = plt.plot(tdata, tr_filt.data/max(tr_filt.data), 'k')
|
||||
p32, = plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF()/max(ar3ccf.getCF()), 'r')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'b')
|
||||
plt.yticks([])
|
||||
plt.xticks([])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([tr.stats.station, tr.stats.channel])
|
||||
plt.suptitle(trH1_filt.stats.starttime)
|
||||
plt.legend([p31, p32], ['Data', 'AR3C-CF'])
|
||||
plt.subplot(3,1,2)
|
||||
plt.plot(th1data, trH1_filt.data/max(trH1_filt.data), 'k')
|
||||
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF()/max(ar3ccf.getCF()), 'r')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'b')
|
||||
plt.yticks([])
|
||||
plt.xticks([])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([trH1_filt.stats.station, trH1_filt.stats.channel])
|
||||
plt.subplot(3,1,3)
|
||||
plt.plot(th2data, trH2_filt.data/max(trH2_filt.data), 'k')
|
||||
plt.plot(ar3ccf.getTimeArray(), ar3ccf.getCF()/max(ar3ccf.getCF()), 'r')
|
||||
plt.plot([arhpick.getpick(), arhpick.getpick()], [-1, 1], 'b')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [-1, -1], 'b')
|
||||
plt.plot([arhpick.getpick()-0.5, arhpick.getpick()+0.5], [1, 1], 'b')
|
||||
plt.yticks([])
|
||||
plt.ylabel('Normalized Counts')
|
||||
plt.title([trH2_filt.stats.station, trH2_filt.stats.channel])
|
||||
plt.xlabel('Time [s]')
|
||||
plt.show()
|
||||
raw_input()
|
||||
plt.close()
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--project', type=str, help='project name (e.g. Insheim)')
|
||||
|
Loading…
Reference in New Issue
Block a user