Resolved conflicts fmtomoUtils

This commit is contained in:
Marcel Paffrath 2016-05-02 11:28:56 +02:00
commit ddb1ad4a15
38 changed files with 2332 additions and 1398 deletions

View File

@ -43,8 +43,10 @@ from obspy import UTCDateTime
from pylot.core.read.data import Data
from pylot.core.read.inputs import FilterOptions, AutoPickParameter
from pylot.core.pick.autopick import autopickevent
from pylot.core.read.io import picks_from_evt
from pylot.core.loc.nll import locate as locateNll
from pylot.core.util.defaults import FILTERDEFAULTS
from pylot.core.util.defaults import FILTERDEFAULTS, COMPNAME_MAP,\
AUTOMATIC_DEFAULTS
from pylot.core.util.errors import FormatError, DatastructureError, \
OverwriteError
from pylot.core.util.connection import checkurl
@ -87,7 +89,7 @@ class MainWindow(QMainWindow):
self.dispComponent = str(settings.value("plotting/dispComponent", "Z"))
if settings.value("data/dataRoot", None) is None:
dirname = QFileDialog().getExistingDirectory(
caption='Choose data root ...')
caption='Choose data root ...')
settings.setValue("data/dataRoot", dirname)
settings.sync()
@ -110,14 +112,16 @@ class MainWindow(QMainWindow):
# load and display waveform data
self.dirty = False
self.loadData()
self.loadWaveformData()
self.updateFilterOptions()
if self.loadWaveformData():
self.updateFilterOptions()
else:
sys.exit(0)
def setupUi(self):
try:
self.startTime = min(
[tr.stats.starttime for tr in self.data.wfdata])
[tr.stats.starttime for tr in self.data.wfdata])
except:
self.startTime = UTCDateTime()
@ -354,7 +358,10 @@ class MainWindow(QMainWindow):
settings = QSettings()
return settings.value("data/dataRoot")
def loadData(self, fname=None):
def loadAutoPicks(self):
self.loadData(type='auto')
def loadData(self, fname=None, type='manual'):
if not self.okToContinue():
return
if fname is None:
@ -368,10 +375,10 @@ class MainWindow(QMainWindow):
filter=filt)
fname = fname[0]
else:
fname = unicode(action.data().toString())
fname = str(action.data().toString())
self.setFileName(fname)
self.data += Data(self, evtdata=self.getFileName())
self.updatePicks()
self.updatePicks(type=type)
self.drawPicks()
def getLastEvent(self):
@ -403,6 +410,8 @@ class MainWindow(QMainWindow):
else:
raise DatastructureError('not specified')
if not self.fnames:
return None
return self.fnames
except DatastructureError as e:
print(e)
@ -431,13 +440,14 @@ class MainWindow(QMainWindow):
print('warning: {0}'.format(e))
directory = os.path.join(self.getRoot(), self.getEventFileName())
file_filter = "QuakeML file (*.xml);;VELEST observation file format (*.cnv);;NonLinLoc observation file (*.obs)"
fname = QFileDialog.getSaveFileName(self, 'Save event data ...',
directory, file_filter)
fname, selected_filter = QFileDialog.getSaveFileName(self, 'Save event data ...',
directory, file_filter)
fbasename, exform = os.path.splitext(fname[0])
fbasename, exform = os.path.splitext(fname)
if not exform and selected_filter:
exform = selected_filter.split('*')[1][:-1]
if not exform:
exform = file_filter[0].split('*')[1][:-1]
return fbasename, exform
settings = QSettings()
@ -455,7 +465,7 @@ class MainWindow(QMainWindow):
ret = msgBox.exec_()
if ret == QMessageBox.Save:
self.getData().resetPicks()
self.saveData()
return self.saveData()
elif ret == QMessageBox.Cancel:
return False
try:
@ -524,17 +534,23 @@ class MainWindow(QMainWindow):
def loadWaveformData(self):
if self.fnames and self.okToContinue():
self.setDirty(True)
self.data.setWFData(self.fnames)
ans = self.data.setWFData(self.fnames)
elif self.fnames is None and self.okToContinue():
self.data.setWFData(self.getWFFnames())
self.plotWaveformData()
ans = self.data.setWFData(self.getWFFnames())
if ans:
self.plotWaveformData()
return ans
else:
return ans
def plotWaveformData(self):
zne_text = {'Z': 'vertical', 'N': 'north-south', 'E': 'east-west'}
comp = self.getComponent()
title = 'section: {0} components'.format(zne_text[comp])
alter_comp = COMPNAME_MAP[comp]
wfst = self.getData().getWFData().select(component=comp)
self.getPlotWidget().plotWFData(wfdata=wfst, title=title)
wfst += self.getData().getWFData().select(component=alter_comp)
self.getPlotWidget().plotWFData(wfdata=wfst, title=title, mapping=False)
self.draw()
plotDict = self.getPlotWidget().getPlotDict()
pos = plotDict.keys()
@ -569,6 +585,7 @@ class MainWindow(QMainWindow):
else:
self.getData().resetWFData()
self.plotWaveformData()
self.drawPicks()
def adjustFilterOptions(self):
fstring = "Filter Options ({0})".format(self.getSeismicPhase())
@ -615,8 +632,8 @@ class MainWindow(QMainWindow):
else:
self.updateStatus('Filter loaded ... '
'[{0}: {1} Hz]'.format(
self.getFilterOptions().getFilterType(),
self.getFilterOptions().getFreq()))
self.getFilterOptions().getFilterType(),
self.getFilterOptions().getFreq()))
if self.filterAction.isChecked():
self.filterWaveformData()
@ -673,8 +690,7 @@ class MainWindow(QMainWindow):
self.logDockWidget.setWidget(self.listWidget)
self.addDockWidget(Qt.LeftDockWidgetArea, self.logDockWidget)
self.addListItem('loading default values for local data ...')
home = os.path.expanduser("~")
autopick_parameter = AutoPickParameter('%s/.pylot/autoPyLoT_local.in' % home)
autopick_parameter = AutoPickParameter(AUTOMATIC_DEFAULTS)
self.addListItem(str(autopick_parameter))
# Create the worker thread and run it
@ -718,29 +734,12 @@ class MainWindow(QMainWindow):
self.getPicks(type=type)[station] = stat_picks
return rval
def updatePicks(self):
evt = self.getData().getEvtData()
picks = {}
for pick in evt.picks:
phase = {}
station = pick.waveform_id.station_code
try:
onsets = picks[station]
except KeyError as e:
print(e)
onsets = {}
mpp = pick.time
lpp = mpp + pick.time_errors.upper_uncertainty
epp = mpp - pick.time_errors.lower_uncertainty
spe = pick.time_errors.uncertainty
phase['mpp'] = mpp
phase['epp'] = epp
phase['lpp'] = lpp
phase['spe'] = spe
onsets[pick.phase_hint] = phase.copy()
picks[station] = onsets.copy()
self.picks.update(picks)
def updatePicks(self, type='manual'):
picks = picks_from_evt(evt=self.getData().getEvtData())
if type == 'manual':
self.picks.update(picks)
elif type == 'auto':
self.autopicks.update(picks)
def drawPicks(self, station=None, picktype='manual'):
# if picks to draw not specified, draw all picks available
@ -811,12 +810,12 @@ class MainWindow(QMainWindow):
if self.getData() is not None:
if not self.getData().isNew():
self.setWindowTitle(
"PyLoT - processing event %s[*]" % self.getData().getID())
"PyLoT - processing event %s[*]" % self.getData().getID())
elif self.getData().isNew():
self.setWindowTitle("PyLoT - New event [*]")
else:
self.setWindowTitle(
"PyLoT - seismic processing the python way[*]")
"PyLoT - seismic processing the python way[*]")
self.setWindowModified(self.dirty)
def tutorUser(self):
@ -854,7 +853,7 @@ class MainWindow(QMainWindow):
def helpHelp(self):
if checkurl():
form = HelpForm(
'https://ariadne.geophysik.ruhr-uni-bochum.de/trac/PyLoT/wiki')
'https://ariadne.geophysik.ruhr-uni-bochum.de/trac/PyLoT/wiki')
else:
form = HelpForm(':/help.html')
form.show()

View File

@ -1,6 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import argparse
import glob
@ -55,9 +56,9 @@ def autoPyLoT(inputfile):
if parameter.hasParam('datastructure'):
datastructure = DATASTRUCTURE[parameter.getParam('datastructure')]()
dsfields = {'root' :parameter.getParam('rootpath'),
'dpath' :parameter.getParam('datapath'),
'dbase' :parameter.getParam('database')}
dsfields = {'root': parameter.getParam('rootpath'),
'dpath': parameter.getParam('datapath'),
'dbase': parameter.getParam('database')}
exf = ['root', 'dpath', 'dbase']
@ -86,7 +87,7 @@ def autoPyLoT(inputfile):
ttpat = parameter.getParam('ttpatter')
# pattern of NLLoc-output file
nllocoutpatter = parameter.getParam('outpatter')
maxnumit = 3 # maximum number of iterations for re-picking
maxnumit = 3 # maximum number of iterations for re-picking
else:
locflag = 0
print(" !!! ")
@ -94,7 +95,6 @@ def autoPyLoT(inputfile):
print("!!No source parameter estimation possible!!")
print(" !!! ")
# multiple event processing
# read each event in database
datapath = datastructure.expandDataPath()
@ -115,7 +115,7 @@ def autoPyLoT(inputfile):
picksExport(picks, 'NLLoc', phasefile)
# For locating the event the NLLoc-control file has to be modified!
evID = event[string.rfind(event, "/") + 1 : len(events) - 1]
evID = event[string.rfind(event, "/") + 1: len(events) - 1]
nllocout = '%s_%s' % (evID, nllocoutpatter)
# create comment line for NLLoc-control file
modifyInputFile(ctrf, nllocroot, nllocout, phasef, ttpat)
@ -129,21 +129,21 @@ def autoPyLoT(inputfile):
# get stations with bad onsets
badpicks = []
for key in picks:
if picks[key]['P']['weight'] >= 4 or picks[key]['S']['weight'] >= 4:
badpicks.append([key, picks[key]['P']['mpp']])
if picks[key]['P']['weight'] >= 4 or picks[key]['S']['weight'] >= 4:
badpicks.append([key, picks[key]['P']['mpp']])
if len(badpicks) == 0:
print("autoPyLoT: No bad onsets found, thus no iterative picking necessary!")
print("autoPyLoT: No bad onsets found, thus no iterative picking necessary!")
# get NLLoc-location file
locsearch = '%s/loc/%s.????????.??????.grid?.loc.hyp' % (nllocroot, nllocout)
if len(glob.glob(locsearch)) > 0:
# get latest NLLoc-location file if several are available
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
# calculating seismic moment Mo and moment magnitude Mw
finalpicks = M0Mw(wfdat, None, None, parameter.getParam('iplot'), \
nllocfile, picks, parameter.getParam('rho'), \
parameter.getParam('vp'), parameter.getParam('Qp'), \
parameter.getParam('invdir'))
finalpicks = M0Mw(wfdat, None, None, parameter.getParam('iplot'), \
nllocfile, picks, parameter.getParam('rho'), \
parameter.getParam('vp'), parameter.getParam('Qp'), \
parameter.getParam('invdir'))
else:
print("autoPyLoT: No NLLoc-location file available!")
print("No source parameter estimation possible!")
@ -152,9 +152,9 @@ def autoPyLoT(inputfile):
locsearch = '%s/loc/%s.????????.??????.grid?.loc.hyp' % (nllocroot, nllocout)
if len(glob.glob(locsearch)) > 0:
# get latest file if several are available
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
nlloccounter = 0
while len(badpicks) > 0 and nlloccounter <= maxnumit:
while len(badpicks) > 0 and nlloccounter <= maxnumit:
nlloccounter += 1
if nlloccounter > maxnumit:
print("autoPyLoT: Number of maximum iterations reached, stop iterative picking!")
@ -169,28 +169,28 @@ def autoPyLoT(inputfile):
locate(nlloccall, ctrfile)
print("autoPyLoT: Iteration No. %d finished." % nlloccounter)
# get updated NLLoc-location file
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
# check for bad picks
badpicks = []
for key in picks:
if picks[key]['P']['weight'] >= 4 or picks[key]['S']['weight'] >= 4:
badpicks.append([key, picks[key]['P']['mpp']])
if picks[key]['P']['weight'] >= 4 or picks[key]['S']['weight'] >= 4:
badpicks.append([key, picks[key]['P']['mpp']])
print("autoPyLoT: After iteration No. %d: %d bad onsets found ..." % (nlloccounter, \
len(badpicks)))
len(badpicks)))
if len(badpicks) == 0:
print("autoPyLoT: No more bad onsets found, stop iterative picking!")
nlloccounter = maxnumit
# calculating seismic moment Mo and moment magnitude Mw
finalpicks = M0Mw(wfdat, None, None, parameter.getParam('iplot'), \
nllocfile, picks, parameter.getParam('rho'), \
parameter.getParam('vp'), parameter.getParam('Qp'), \
parameter.getParam('invdir'))
finalpicks = M0Mw(wfdat, None, None, parameter.getParam('iplot'), \
nllocfile, picks, parameter.getParam('rho'), \
parameter.getParam('vp'), parameter.getParam('Qp'), \
parameter.getParam('invdir'))
# get network moment magntiude
netMw = []
for key in finalpicks.getpicdic():
if finalpicks.getpicdic()[key]['P']['Mw'] is not None:
netMw.append(finalpicks.getpicdic()[key]['P']['Mw'])
for key in finalpicks.getpicdic():
if finalpicks.getpicdic()[key]['P']['Mw'] is not None:
netMw.append(finalpicks.getpicdic()[key]['P']['Mw'])
netMw = np.median(netMw)
print("Network moment magnitude: %4.1f" % netMw)
else:
@ -202,13 +202,18 @@ def autoPyLoT(inputfile):
if hasattr(finalpicks, 'getpicdic'):
if finalpicks.getpicdic() is not None:
writephases(finalpicks.getpicdic(), 'HYPO71', hypo71file)
data.applyEVTData(finalpicks.getpicdic())
else:
writephases(picks, 'HYPO71', hypo71file)
data.applyEVTData(picks)
else:
writephases(picks, 'HYPO71', hypo71file)
data.applyEVTData(picks)
fnqml = '%s/autoPyLoT' % event
data.exportEvent(fnqml)
endsplash = '''------------------------------------------\n'
-----Finished event %s!-----\n'
-----Finished event %s!-----\n'
------------------------------------------'''.format \
(version=_getVersionString()) % evID
print(endsplash)
@ -218,8 +223,8 @@ def autoPyLoT(inputfile):
# single event processing
else:
data.setWFData(glob.glob(os.path.join(datapath, parameter.getParam('eventID'), '*')))
print("Working on event "), parameter.getParam('eventID')
print data
print("Working on event {0}".format(parameter.getParam('eventID')))
print(data)
wfdat = data.getWFData() # all available streams
##########################################################
@ -245,21 +250,21 @@ def autoPyLoT(inputfile):
# get stations with bad onsets
badpicks = []
for key in picks:
if picks[key]['P']['weight'] >= 4 or picks[key]['S']['weight'] >= 4:
badpicks.append([key, picks[key]['P']['mpp']])
if picks[key]['P']['weight'] >= 4 or picks[key]['S']['weight'] >= 4:
badpicks.append([key, picks[key]['P']['mpp']])
if len(badpicks) == 0:
print("autoPyLoT: No bad onsets found, thus no iterative picking necessary!")
print("autoPyLoT: No bad onsets found, thus no iterative picking necessary!")
# get NLLoc-location file
locsearch = '%s/loc/%s.????????.??????.grid?.loc.hyp' % (nllocroot, nllocout)
if len(glob.glob(locsearch)) > 0:
# get latest NLLOc-location file if several are available
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
# calculating seismic moment Mo and moment magnitude Mw
finalpicks = M0Mw(wfdat, None, None, parameter.getParam('iplot'), \
nllocfile, picks, parameter.getParam('rho'), \
parameter.getParam('vp'), parameter.getParam('Qp'), \
parameter.getParam('invdir'))
finalpicks = M0Mw(wfdat, None, None, parameter.getParam('iplot'), \
nllocfile, picks, parameter.getParam('rho'), \
parameter.getParam('vp'), parameter.getParam('Qp'), \
parameter.getParam('invdir'))
else:
print("autoPyLoT: No NLLoc-location file available!")
print("No source parameter estimation possible!")
@ -268,9 +273,9 @@ def autoPyLoT(inputfile):
locsearch = '%s/loc/%s.????????.??????.grid?.loc.hyp' % (nllocroot, nllocout)
if len(glob.glob(locsearch)) > 0:
# get latest file if several are available
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
nlloccounter = 0
while len(badpicks) > 0 and nlloccounter <= maxnumit:
while len(badpicks) > 0 and nlloccounter <= maxnumit:
nlloccounter += 1
if nlloccounter > maxnumit:
print("autoPyLoT: Number of maximum iterations reached, stop iterative picking!")
@ -285,28 +290,28 @@ def autoPyLoT(inputfile):
locate(nlloccall, ctrfile)
print("autoPyLoT: Iteration No. %d finished." % nlloccounter)
# get updated NLLoc-location file
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
nllocfile = max(glob.glob(locsearch), key=os.path.getctime)
# check for bad picks
badpicks = []
for key in picks:
if picks[key]['P']['weight'] >= 4 or picks[key]['S']['weight'] >= 4:
badpicks.append([key, picks[key]['P']['mpp']])
if picks[key]['P']['weight'] >= 4 or picks[key]['S']['weight'] >= 4:
badpicks.append([key, picks[key]['P']['mpp']])
print("autoPyLoT: After iteration No. %d: %d bad onsets found ..." % (nlloccounter, \
len(badpicks)))
len(badpicks)))
if len(badpicks) == 0:
print("autoPyLoT: No more bad onsets found, stop iterative picking!")
nlloccounter = maxnumit
# calculating seismic moment Mo and moment magnitude Mw
finalpicks = M0Mw(wfdat, None, None, parameter.getParam('iplot'), \
nllocfile, picks, parameter.getParam('rho'), \
parameter.getParam('vp'), parameter.getParam('Qp'), \
parameter.getParam('invdir'))
finalpicks = M0Mw(wfdat, None, None, parameter.getParam('iplot'), \
nllocfile, picks, parameter.getParam('rho'), \
parameter.getParam('vp'), parameter.getParam('Qp'), \
parameter.getParam('invdir'))
# get network moment magntiude
netMw = []
for key in finalpicks.getpicdic():
if finalpicks.getpicdic()[key]['P']['Mw'] is not None:
netMw.append(finalpicks.getpicdic()[key]['P']['Mw'])
for key in finalpicks.getpicdic():
if finalpicks.getpicdic()[key]['P']['Mw'] is not None:
netMw.append(finalpicks.getpicdic()[key]['P']['Mw'])
netMw = np.median(netMw)
print("Network moment magnitude: %4.1f" % netMw)
else:
@ -318,19 +323,24 @@ def autoPyLoT(inputfile):
if hasattr(finalpicks, 'getpicdic'):
if finalpicks.getpicdic() is not None:
writephases(finalpicks.getpicdic(), 'HYPO71', hypo71file)
data.applyEVTData(finalpicks.getpicdic())
else:
writephases(picks, 'HYPO71', hypo71file)
data.applyEVTData(picks)
else:
writephases(picks, 'HYPO71', hypo71file)
data.applyEVTData(picks)
fnqml = '%s/%s/autoPyLoT' % (datapath, parameter.getParam('eventID'))
data.exportEvent(fnqml)
endsplash = '''------------------------------------------\n'
-----Finished event %s!-----\n'
-----Finished event %s!-----\n'
------------------------------------------'''.format \
(version=_getVersionString()) % parameter.getParam('eventID')
print(endsplash)
if locflag == 0:
print("autoPyLoT was running in non-location mode!")
endsp = '''####################################\n
************************************\n
*********autoPyLoT terminates*******\n
@ -338,7 +348,9 @@ def autoPyLoT(inputfile):
************************************'''.format(version=_getVersionString())
print(endsp)
if __name__ == "__main__":
from pylot.core.util.defaults import AUTOMATIC_DEFAULTS
# parse arguments
parser = argparse.ArgumentParser(
description='''autoPyLoT automatically picks phase onset times using higher order statistics,
@ -348,8 +360,7 @@ if __name__ == "__main__":
action='store',
help='''full path to the file containing the input
parameters for autoPyLoT''',
default=os.path.join(os.path.expanduser('~'), '.pylot',
'autoPyLoT.in')
default=AUTOMATIC_DEFAULTS
)
parser.add_argument('-v', '-V', '--version', action='version',
version='autoPyLoT ' + __version__,

View File

@ -1,2 +1,2 @@
P
S bandpass 4 0.5 5.0
P bandpass 4 2.0 20.0
S bandpass 4 2.0 15.0

View File

@ -1,5 +1,7 @@
#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
"""
makePyLoT -- build and install PyLoT
@ -123,7 +125,7 @@ USAGE
except KeyboardInterrupt:
cleanUp(verbose)
return 0
except Exception, e:
except Exception as e:
if DEBUG or TESTRUN:
raise e
indent = len(program_name) * " "
@ -139,7 +141,7 @@ def buildPyLoT(verbosity=None):
"\n"
" Current working directory: {1}\n"
).format(system, os.getcwd())
print msg
print(msg)
if system.startswith(('win', 'microsoft')):
raise CLIError(
"building on Windows system not tested yet; implementation pending")

View File

@ -1 +1 @@
a31e-dirty
1508-dirty

View File

@ -4,8 +4,9 @@ import numpy as np
from pylot.core.active import seismicshot
from pylot.core.active.surveyUtils import cleanUp
class Survey(object):
def __init__(self, path, sourcefile, receiverfile, useDefaultParas = False):
def __init__(self, path, sourcefile, receiverfile, useDefaultParas=False):
'''
The Survey Class contains all shots [type: seismicshot] of a survey
as well as the aquisition geometry and the topography.
@ -37,7 +38,7 @@ class Survey(object):
shot_dict = {}
shotlist = self.getShotlist()
for shotnumber in shotlist: # loop over data files
for shotnumber in shotlist: # loop over data files
# generate filenames and read manual picks to a list
obsfile = self._obsdir + str(shotnumber) + '_pickle.dat'
if obsfile not in shot_dict.keys():
@ -47,7 +48,7 @@ class Survey(object):
self.data = shot_dict
print ("Generated Survey object for %d shots" % len(shotlist))
print ("Total number of traces: %d \n" %self.countAllTraces())
print ("Total number of traces: %d \n" % self.countAllTraces())
def _removeAllEmptyTraces(self):
filename = 'removeEmptyTraces.out'
@ -58,11 +59,11 @@ class Survey(object):
if count == 0: outfile = open(filename, 'w')
count += 1
outfile.writelines('shot: %s, removed empty traces: %s\n'
%(shot.getShotnumber(), removed))
print ("\nremoveEmptyTraces: Finished! Removed %d traces" %count)
% (shot.getShotnumber(), removed))
print ("\nremoveEmptyTraces: Finished! Removed %d traces" % count)
if count > 0:
print ("See %s for more information "
"on removed traces."%(filename))
"on removed traces." % (filename))
outfile.close()
def _updateShots(self):
@ -70,7 +71,8 @@ class Survey(object):
Removes traces that do not exist in the dataset for any reason.
'''
filename = 'updateShots.out'
count = 0; countTraces = 0
count = 0;
countTraces = 0
for shot in self.data.values():
del_traceIDs = shot.updateTraceList()
if len(del_traceIDs) > 0:
@ -79,13 +81,13 @@ class Survey(object):
countTraces += len(del_traceIDs)
outfile.writelines("shot: %s, removed traceID(s) %s because "
"they were not found in the corresponding stream\n"
%(shot.getShotnumber(), del_traceIDs))
% (shot.getShotnumber(), del_traceIDs))
print ("\nupdateShots: Finished! Updated %d shots and removed "
"%d traces" %(count, countTraces))
"%d traces" % (count, countTraces))
if count > 0:
print ("See %s for more information "
"on removed traces."%(filename))
"on removed traces." % (filename))
outfile.close()
def setArtificialPick(self, traceID, pick):
@ -96,9 +98,9 @@ class Survey(object):
for shot in self.data.values():
shot.setPick(traceID, pick)
def setParametersForShots(self, cutwindow = (0, 0.2), tmovwind = 0.3, tsignal = 0.03, tgap = 0.0007):
def setParametersForShots(self, cutwindow=(0, 0.2), tmovwind=0.3, tsignal=0.03, tgap=0.0007):
if (cutwindow == (0, 0.2) and tmovwind == 0.3 and
tsignal == 0.03 and tgap == 0.0007):
tsignal == 0.03 and tgap == 0.0007):
print ("Warning: Standard values used for "
"setParamters. This might not be clever.")
# CHANGE this later. Parameters only needed for survey, not for each shot.
@ -107,12 +109,12 @@ class Survey(object):
shot.setTmovwind(tmovwind)
shot.setTsignal(tsignal)
shot.setTgap(tgap)
shot.setOrder(order = 4)
shot.setOrder(order=4)
print ("setParametersForShots: Parameters set to:\n"
"cutwindow = %s, tMovingWindow = %f, tsignal = %f, tgap = %f"
%(cutwindow, tmovwind, tsignal, tgap))
% (cutwindow, tmovwind, tsignal, tgap))
def setManualPicksFromFiles(self, directory = 'picks'):
def setManualPicksFromFiles(self, directory='picks'):
'''
Read manual picks from *.pck files in a directory.
The * must be identical with the shotnumber.
@ -135,7 +137,10 @@ class Survey(object):
def plotDiffs(self):
import matplotlib.pyplot as plt
diffs = []; dists = []; mpicks = []; picks = []
diffs = [];
dists = [];
mpicks = [];
picks = []
diffsDic = self.getDiffsFromManual()
for shot in self.data.values():
for traceID in shot.getTraceIDlist():
@ -144,22 +149,22 @@ class Survey(object):
mpicks.append(shot.getManualPick(traceID))
picks.append(shot.getPick(traceID))
diffs.append(diffsDic[shot][traceID])
labelm = 'manual picks'
labela = 'automatic picks'
fig = plt.figure()
ax = fig.add_subplot(111)
sc_a = ax.scatter(dists, picks, c = '0.5', s=10, edgecolors='none', label = labela, alpha = 0.3)
sc = ax.scatter(dists, mpicks, c = diffs, s=5, edgecolors='none', label = labelm)
sc_a = ax.scatter(dists, picks, c='0.5', s=10, edgecolors='none', label=labela, alpha=0.3)
sc = ax.scatter(dists, mpicks, c=diffs, s=5, edgecolors='none', label=labelm)
cbar = plt.colorbar(sc, fraction=0.05)
cbar.set_label(labelm)
ax.set_xlabel('Distance [m]')
ax.set_ylabel('Time [s]')
ax.text(0.5, 0.95, 'Plot of all MANUAL picks', transform=ax.transAxes, horizontalalignment='center')
def plotHist(self, nbins = 20, ax = None):
def plotHist(self, nbins=20, ax=None):
import matplotlib.pyplot as plt
plt.interactive(True)
diffs = []
@ -170,48 +175,51 @@ class Survey(object):
for traceID in shot.getTraceIDlist():
if shot.getPickFlag(traceID) == 1 and shot.getManualPickFlag(traceID) == 1:
diffs.append(self.getDiffsFromManual()[shot][traceID])
hist = plt.hist(diffs, nbins, histtype = 'step', normed = True, stacked = True)
hist = plt.hist(diffs, nbins, histtype='step', normed=True, stacked=True)
plt.title('Histogram of the differences between automatic and manual pick')
plt.xlabel('Difference in time (auto - manual) [s]')
return diffs
def pickAllShots(self, windowsize, HosAic = 'hos', vmin = 333, vmax = 5500, folm = 0.6):
def pickAllShots(self, windowsize, HosAic='hos', vmin=333, vmax=5500, folm=0.6):
'''
Automatically pick all traces of all shots of the survey.
'''
from datetime import datetime
starttime = datetime.now()
count = 0; tpicksum = starttime - starttime
count = 0;
tpicksum = starttime - starttime
for shot in self.data.values():
tstartpick = datetime.now(); count += 1
tstartpick = datetime.now();
count += 1
for traceID in shot.getTraceIDlist():
distance = shot.getDistance(traceID) # receive distance
distance = shot.getDistance(traceID) # receive distance
pickwin_used = shot.getCut()
cutwindow = shot.getCut()
# for higher distances use a linear vmin/vmax to cut out late/early regions with high noise
if distance > 5.:
pwleft = distance/vmax ################## TEST
pwright = distance/vmin
pwleft = distance / vmax ################## TEST
pwright = distance / vmin
if pwright > cutwindow[1]:
pwright = cutwindow[1]
pickwin_used = (pwleft, pwright)
shot.setPickwindow(traceID, pickwin_used)
shot.pickTraces(traceID, windowsize, folm, HosAic) # picker
shot.pickTraces(traceID, windowsize, folm, HosAic) # picker
shot.setSNR(traceID)
#if shot.getSNR(traceID)[0] < snrthreshold:
# if shot.getSNR(traceID)[0] < snrthreshold:
if shot.getSNR(traceID)[0] < shot.getSNRthreshold(traceID):
shot.removePick(traceID)
shot.removePick(traceID)
# set epp and lpp if SNR > 1 (else earllatepicker cant set values)
if shot.getSNR(traceID)[0] > 1:
shot.setEarllatepick(traceID)
tpicksum += (datetime.now() - tstartpick); tpick = tpicksum/count
tpicksum += (datetime.now() - tstartpick);
tpick = tpicksum / count
tremain = (tpick * (len(self.getShotDict()) - count))
tend = datetime.now() + tremain
progress = float(count) / float(len(self.getShotDict())) * 100
@ -220,7 +228,7 @@ class Survey(object):
ntraces = self.countAllTraces()
pickedtraces = self.countAllPickedTraces()
print('Picked %s / %s traces (%d %%)\n'
%(pickedtraces, ntraces, float(pickedtraces)/float(ntraces)*100.))
% (pickedtraces, ntraces, float(pickedtraces) / float(ntraces) * 100.))
def cleanBySPE(self, maxSPE):
for shot in self.data.values():
@ -237,7 +245,7 @@ class Survey(object):
if shot.getPickFlag(traceID) == 1:
spe.append(shot.getSymmetricPickError(traceID))
spe.sort()
plt.plot(spe, label = 'SPE')
plt.plot(spe, label='SPE')
plt.ylabel('Symmetric Pickerror')
plt.legend()
@ -255,7 +263,7 @@ class Survey(object):
shot.removePick(traceID)
else:
numpicks += 1
print('Recovered %d picks'%numpicks)
print('Recovered %d picks' % numpicks)
def setArtificialPick(self, traceID, pick):
for shot in self.data.values():
@ -265,13 +273,13 @@ class Survey(object):
def countAllTraces(self):
numtraces = 0
for shot in self.getShotlist():
for rec in self.getReceiverlist(): ### shot.getReceiverlist etc.
for rec in self.getReceiverlist(): ### shot.getReceiverlist etc.
numtraces += 1
return numtraces
def getShotlist(self):
filename = self.getPath() + self.getSourcefile()
srcfile = open(filename,'r')
srcfile = open(filename, 'r')
shotlist = []
for line in srcfile.readlines():
line = line.split()
@ -281,7 +289,7 @@ class Survey(object):
def getReceiverlist(self):
filename = self.getPath() + self.getReceiverfile()
recfile = open(filename,'r')
recfile = open(filename, 'r')
reclist = []
for line in recfile.readlines():
line = line.split()
@ -318,8 +326,8 @@ class Survey(object):
pickedTraces += 1
info_dict[shot.getShotnumber()] = {'numtraces': numtraces,
'picked traces': [pickedTraces,
'%2.2f %%'%(float(pickedTraces) /
float(numtraces) * 100)],
'%2.2f %%' % (float(pickedTraces) /
float(numtraces) * 100)],
'mean SNR': np.mean(snrlist),
'mean distance': np.mean(dist)}
@ -330,7 +338,7 @@ class Survey(object):
if shot.getShotnumber() == shotnumber:
return shot
def exportFMTOMO(self, directory = 'FMTOMO_export', sourcefile = 'input_sf.in', ttFileExtension = '.tt'):
def exportFMTOMO(self, directory='FMTOMO_export', sourcefile='input_sf.in', ttFileExtension='.tt'):
def getAngle(distance):
PI = np.pi
R = 6371.
@ -338,18 +346,22 @@ class Survey(object):
return angle
count = 0
fmtomo_factor = 1000 # transforming [m/s] -> [km/s]
LatAll = []; LonAll = []; DepthAll = []
fmtomo_factor = 1000 # transforming [m/s] -> [km/s]
LatAll = [];
LonAll = [];
DepthAll = []
srcfile = open(directory + '/' + sourcefile, 'w')
srcfile.writelines('%10s\n' %len(self.data)) # number of sources
srcfile.writelines('%10s\n' % len(self.data)) # number of sources
for shotnumber in self.getShotlist():
shot = self.getShotForShotnumber(shotnumber)
ttfilename = str(shotnumber) + ttFileExtension
(x, y, z) = shot.getSrcLoc() # getSrcLoc returns (x, y, z)
srcfile.writelines('%10s %10s %10s\n' %(getAngle(y), getAngle(x), (-1)*z)) # lat, lon, depth
LatAll.append(getAngle(y)); LonAll.append(getAngle(x)); DepthAll.append((-1)*z)
srcfile.writelines('%10s\n' %1) #
srcfile.writelines('%10s %10s %10s\n' %(1, 1, ttfilename))
(x, y, z) = shot.getSrcLoc() # getSrcLoc returns (x, y, z)
srcfile.writelines('%10s %10s %10s\n' % (getAngle(y), getAngle(x), (-1) * z)) # lat, lon, depth
LatAll.append(getAngle(y));
LonAll.append(getAngle(x));
DepthAll.append((-1) * z)
srcfile.writelines('%10s\n' % 1) #
srcfile.writelines('%10s %10s %10s\n' % (1, 1, ttfilename))
ttfile = open(directory + '/' + ttfilename, 'w')
traceIDlist = shot.getTraceIDlist()
traceIDlist.sort()
@ -359,8 +371,10 @@ class Survey(object):
pick = shot.getPick(traceID) * fmtomo_factor
delta = shot.getSymmetricPickError(traceID) * fmtomo_factor
(x, y, z) = shot.getRecLoc(traceID)
ttfile.writelines('%20s %20s %20s %10s %10s\n' %(getAngle(y), getAngle(x), (-1)*z, pick, delta))
LatAll.append(getAngle(y)); LonAll.append(getAngle(x)); DepthAll.append((-1)*z)
ttfile.writelines('%20s %20s %20s %10s %10s\n' % (getAngle(y), getAngle(x), (-1) * z, pick, delta))
LatAll.append(getAngle(y));
LonAll.append(getAngle(x));
DepthAll.append((-1) * z)
count += 1
ttfile.close()
srcfile.close()
@ -393,7 +407,7 @@ class Survey(object):
count += 1
return count
def plotAllShots(self, rows = 3, columns = 4, mode = '3d'):
def plotAllShots(self, rows=3, columns=4, mode='3d'):
'''
Plots all shots as Matrices with the color corresponding to the traveltime for each receiver.
IMPORTANT NOTE: Topography (z - coordinate) is not considered in the diagrams!
@ -408,8 +422,8 @@ class Survey(object):
figPerSubplot = columns * rows
index = 1
#shotnames = []
#shotnumbers = []
# shotnames = []
# shotnumbers = []
# for shot in self.data.values():
# shotnames.append(shot.getShotname())
@ -419,24 +433,24 @@ class Survey(object):
for shotnumber in self.getShotlist():
if index <= figPerSubplot:
#ax = fig.add_subplot(3,3,i, projection = '3d', title = 'shot:'
#+str(shot_dict[shotnumber].getShotnumber()), xlabel = 'X', ylabel = 'Y', zlabel = 'traveltime')
#shot_dict[shotnumber].plot3dttc(ax = ax, plotpicks = True)
# ax = fig.add_subplot(3,3,i, projection = '3d', title = 'shot:'
# +str(shot_dict[shotnumber].getShotnumber()), xlabel = 'X', ylabel = 'Y', zlabel = 'traveltime')
# shot_dict[shotnumber].plot3dttc(ax = ax, plotpicks = True)
ax = fig.add_subplot(rows, columns, index)
if mode == '3d':
self.getShot(shotnumber).matshow(ax = ax, colorbar = False, annotations = True, legend = False)
self.getShot(shotnumber).matshow(ax=ax, colorbar=False, annotations=True, legend=False)
elif mode == '2d':
self.getShot(shotnumber).plot2dttc(ax)
self.getShot(shotnumber).plotmanual2dttc(ax)
index += 1
if index > figPerSubplot:
fig.subplots_adjust(left = 0, bottom = 0, right = 1, top = 1, wspace = 0, hspace = 0)
fig.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0)
fig = plt.figure()
index = 1
fig.subplots_adjust(left = 0, bottom = 0, right = 1, top = 1, wspace = 0, hspace = 0)
fig.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0)
def plotAllPicks(self, plotRemoved = False, colorByVal = 'log10SNR', ax = None, cbar = None, refreshPlot = False):
def plotAllPicks(self, plotRemoved=False, colorByVal='log10SNR', ax=None, cbar=None, refreshPlot=False):
'''
Plots all picks over the distance between source and receiver. Returns (ax, region).
Picks can be checked and removed by using region class (pylot.core.active.surveyPlotTools.regions)
@ -488,8 +502,8 @@ class Survey(object):
spe.append(shot.getSymmetricPickError(traceID))
color = {'log10SNR': snrlog,
'pickerror': pickerror,
'spe': spe}
'pickerror': pickerror,
'spe': spe}
self.color = color
if refreshPlot is False:
ax, cbar = self.createPlot(dist, pick, color[colorByVal], label='%s' % colorByVal)
@ -501,7 +515,7 @@ class Survey(object):
ax.legend()
return ax
def createPlot(self, dist, pick, inkByVal, label, ax = None, cbar = None):
def createPlot(self, dist, pick, inkByVal, label, ax=None, cbar=None):
import matplotlib.pyplot as plt
plt.interactive(True)
cm = plt.cm.jet
@ -526,19 +540,19 @@ class Survey(object):
def _update_progress(self, shotname, tend, progress):
sys.stdout.write('Working on shot %s. ETC is %02d:%02d:%02d [%2.2f %%]\r' % (shotname,
tend.hour,
tend.minute,
tend.second,
progress))
tend.hour,
tend.minute,
tend.second,
progress))
sys.stdout.flush()
def saveSurvey(self, filename = 'survey.pickle'):
def saveSurvey(self, filename='survey.pickle'):
import cPickle
cleanUp(self)
outfile = open(filename, 'wb')
cPickle.dump(self, outfile, -1)
print('saved Survey to file %s'%(filename))
print('saved Survey to file %s' % (filename))
@staticmethod
def from_pickle(filename):

View File

@ -2,11 +2,12 @@
import sys
import numpy as np
def vgrids2VTK(inputfile = 'vgrids.in', outputfile = 'vgrids.vtk', absOrRel = 'abs', inputfileref = 'vgridsref.in'):
def vgrids2VTK(inputfile='vgrids.in', outputfile='vgrids.vtk', absOrRel='abs', inputfileref='vgridsref.in'):
'''
Generate a vtk-file readable by e.g. paraview from FMTOMO output vgrids.in
'''
R = 6371. # earth radius
R = 6371. # earth radius
outfile = open(outputfile, 'w')
number, delta, start, vel = _readVgrid(inputfile)
@ -14,12 +15,14 @@ def vgrids2VTK(inputfile = 'vgrids.in', outputfile = 'vgrids.vtk', absOrRel = 'a
nR, nTheta, nPhi = number
dR, dTheta, dPhi = delta
sR, sTheta, sPhi = start
thetaGrid, phiGrid, rGrid = _generateGrids(number, delta, start)
nPoints = nR * nTheta * nPhi
nX = nPhi; nY = nTheta; nZ = nR
nX = nPhi;
nY = nTheta;
nZ = nR
sZ = sR - R
sX = _getDistance(sPhi)
@ -36,17 +39,21 @@ def vgrids2VTK(inputfile = 'vgrids.in', outputfile = 'vgrids.vtk', absOrRel = 'a
outfile.writelines('ASCII\n')
outfile.writelines('DATASET STRUCTURED_POINTS\n')
outfile.writelines('DIMENSIONS %d %d %d\n' %(nX, nY, nZ))
outfile.writelines('ORIGIN %f %f %f\n' %(sX, sY, sZ))
outfile.writelines('SPACING %f %f %f\n' %(dX, dY, dZ))
outfile.writelines('DIMENSIONS %d %d %d\n' % (nX, nY, nZ))
outfile.writelines('ORIGIN %f %f %f\n' % (sX, sY, sZ))
outfile.writelines('SPACING %f %f %f\n' % (dX, dY, dZ))
outfile.writelines('POINT_DATA %15d\n' %(nPoints))
outfile.writelines('POINT_DATA %15d\n' % (nPoints))
if absOrRel == 'abs':
<<<<<<< HEAD
outfile.writelines('SCALARS velocity float %d\n' %(1))
if absOrRel == 'relDepth':
outfile.writelines('SCALARS velocity2depthMean float %d\n' %(1))
=======
outfile.writelines('SCALARS velocity float %d\n' % (1))
>>>>>>> 37f9292c39246b327d3630995ca2521725c6cdd7
elif absOrRel == 'rel':
outfile.writelines('SCALARS velChangePercent float %d\n' %(1))
outfile.writelines('SCALARS velChangePercent float %d\n' % (1))
outfile.writelines('LOOKUP_TABLE default\n')
pointsPerR = nTheta * nPhi
@ -55,6 +62,7 @@ def vgrids2VTK(inputfile = 'vgrids.in', outputfile = 'vgrids.vtk', absOrRel = 'a
if absOrRel == 'abs':
print("Writing velocity values to VTK file...")
for velocity in vel:
<<<<<<< HEAD
outfile.writelines('%10f\n' %velocity)
elif absOrRel == 'relDepth':
print("Writing velocity values to VTK file relative to mean of each depth...")
@ -69,34 +77,38 @@ def vgrids2VTK(inputfile = 'vgrids.in', outputfile = 'vgrids.vtk', absOrRel = 'a
for vel in veldepth:
outfile.writelines('%10f\n' %(vel - velmean))
veldepth = []
=======
outfile.writelines('%10f\n' % velocity)
>>>>>>> 37f9292c39246b327d3630995ca2521725c6cdd7
elif absOrRel == 'rel':
nref, dref, sref, velref = _readVgrid(inputfileref)
nR_ref, nTheta_ref, nPhi_ref = nref
if not len(velref) == len(vel):
print('ERROR: Number of gridpoints mismatch for %s and %s'%(inputfile, inputfileref))
print('ERROR: Number of gridpoints mismatch for %s and %s' % (inputfile, inputfileref))
return
#velrel = [((vel - velref) / velref * 100) for vel, velref in zip(vel, velref)]
# velrel = [((vel - velref) / velref * 100) for vel, velref in zip(vel, velref)]
velrel = []
for velocities in zip(vel, velref):
v, vref = velocities
if not vref == 0:
velrel.append((v - vref) / vref * 100)
else:
velrel.append(0)
velrel.append(0)
if not nR_ref == nR and nTheta_ref == nTheta and nPhi_ref == nPhi:
print('ERROR: Dimension mismatch of grids %s and %s'%(inputfile, inputfileref))
print('ERROR: Dimension mismatch of grids %s and %s' % (inputfile, inputfileref))
return
print("Writing velocity values to VTK file...")
for velocity in velrel:
outfile.writelines('%10f\n' %velocity)
print('Pertubations: min: %s %%, max: %s %%'%(min(velrel), max(velrel)))
outfile.writelines('%10f\n' % velocity)
print('Pertubations: min: %s %%, max: %s %%' % (min(velrel), max(velrel)))
outfile.close()
print("Wrote velocity grid for %d points to file: %s" %(nPoints, outputfile))
print("Wrote velocity grid for %d points to file: %s" % (nPoints, outputfile))
return
def rays2VTK(fnin, fdirout = './vtk_files/', nthPoint = 50):
def rays2VTK(fnin, fdirout='./vtk_files/', nthPoint=50):
'''
Writes VTK file(s) for FMTOMO rays from rays.dat
@ -113,12 +125,12 @@ def rays2VTK(fnin, fdirout = './vtk_files/', nthPoint = 50):
while True:
raynumber += 1
firstline = infile.readline()
if firstline == '': break # break at EOF
if firstline == '': break # break at EOF
raynumber = int(firstline.split()[0])
shotnumber = int(firstline.split()[1])
rayValid = int(firstline.split()[4]) # is zero if the ray is invalid
rayValid = int(firstline.split()[4]) # is zero if the ray is invalid
if rayValid == 0:
print('Invalid ray number %d for shot number %d'%(raynumber, shotnumber))
print('Invalid ray number %d for shot number %d' % (raynumber, shotnumber))
continue
nRayPoints = int(infile.readline().split()[0])
if not shotnumber in rays.keys():
@ -127,14 +139,15 @@ def rays2VTK(fnin, fdirout = './vtk_files/', nthPoint = 50):
for index in range(nRayPoints):
if index % nthPoint is 0 or index == (nRayPoints - 1):
rad, lat, lon = infile.readline().split()
rays[shotnumber][raynumber].append([_getDistance(np.rad2deg(float(lon))), _getDistance(np.rad2deg(float(lat))), float(rad) - R])
rays[shotnumber][raynumber].append(
[_getDistance(np.rad2deg(float(lon))), _getDistance(np.rad2deg(float(lat))), float(rad) - R])
else:
dummy = infile.readline()
infile.close()
for shotnumber in rays.keys():
fnameout = fdirout + 'rays%03d.vtk'%(shotnumber)
fnameout = fdirout + 'rays%03d.vtk' % (shotnumber)
outfile = open(fnameout, 'w')
nPoints = 0
@ -143,32 +156,33 @@ def rays2VTK(fnin, fdirout = './vtk_files/', nthPoint = 50):
nPoints += 1
# write header
#print("Writing header for VTK file...")
print("Writing shot %d to file %s" %(shotnumber, fnameout))
# print("Writing header for VTK file...")
print("Writing shot %d to file %s" % (shotnumber, fnameout))
outfile.writelines('# vtk DataFile Version 3.1\n')
outfile.writelines('FMTOMO rays\n')
outfile.writelines('ASCII\n')
outfile.writelines('DATASET POLYDATA\n')
outfile.writelines('POINTS %15d float\n' %(nPoints))
outfile.writelines('POINTS %15d float\n' % (nPoints))
# write coordinates
#print("Writing coordinates to VTK file...")
# print("Writing coordinates to VTK file...")
for raynumber in rays[shotnumber].keys():
for raypoint in rays[shotnumber][raynumber]:
outfile.writelines('%10f %10f %10f \n' %(raypoint[0], raypoint[1], raypoint[2]))
outfile.writelines('%10f %10f %10f \n' % (raypoint[0], raypoint[1], raypoint[2]))
outfile.writelines('LINES %15d %15d\n' %(len(rays[shotnumber]), len(rays[shotnumber]) + nPoints))
outfile.writelines('LINES %15d %15d\n' % (len(rays[shotnumber]), len(rays[shotnumber]) + nPoints))
# write indices
#print("Writing indices to VTK file...")
# print("Writing indices to VTK file...")
count = 0
for raynumber in rays[shotnumber].keys():
outfile.writelines('%d ' %(len(rays[shotnumber][raynumber])))
outfile.writelines('%d ' % (len(rays[shotnumber][raynumber])))
for index in range(len(rays[shotnumber][raynumber])):
outfile.writelines('%d ' %(count))
outfile.writelines('%d ' % (count))
count += 1
outfile.writelines('\n')
def _readVgrid(filename):
def readNumberOfPoints(filename):
fin = open(filename, 'r')
@ -179,7 +193,7 @@ def _readVgrid(filename):
nPhi = int(vglines[1].split()[2])
print('readNumberOf Points: Awaiting %d grid points in %s'
%(nR*nTheta*nPhi, filename))
% (nR * nTheta * nPhi, filename))
fin.close()
return nR, nTheta, nPhi
@ -206,10 +220,11 @@ def _readVgrid(filename):
return sR, sTheta, sPhi
def readVelocity(filename):
'''
'''
Reads in velocity from vgrids file and returns a list containing all values in the same order
'''
vel = []; count = 0
vel = [];
count = 0
fin = open(filename, 'r')
vglines = fin.readlines()
@ -218,7 +233,7 @@ def _readVgrid(filename):
if count > 4:
vel.append(float(line.split()[0]))
print("Read %d points out of file: %s" %(count - 4, filename))
print("Read %d points out of file: %s" % (count - 4, filename))
return vel
# Theta, Phi in radians, R in km
@ -235,23 +250,25 @@ def _readVgrid(filename):
start = (sR, sTheta, sPhi)
return number, delta, start, vel
def _generateGrids(number, delta, start):
nR, nTheta, nPhi = number
dR, dTheta, dPhi = delta
sR, sTheta, sPhi = start
eR = sR + (nR - 1) * dR
ePhi = sPhi + (nPhi - 1) * dPhi
eTheta = sTheta + (nTheta - 1) * dTheta
thetaGrid = np.linspace(sTheta, eTheta, num = nTheta)
phiGrid = np.linspace(sPhi, ePhi, num = nPhi)
rGrid = np.linspace(sR, eR, num = nR)
thetaGrid = np.linspace(sTheta, eTheta, num=nTheta)
phiGrid = np.linspace(sPhi, ePhi, num=nPhi)
rGrid = np.linspace(sR, eR, num=nR)
return (thetaGrid, phiGrid, rGrid)
def addCheckerboard(spacing = 10., pertubation = 0.1, inputfile = 'vgrids.in',
outputfile = 'vgrids_cb.in', ampmethod = 'linear', rect = (None, None)):
def addCheckerboard(spacing=10., pertubation=0.1, inputfile='vgrids.in',
outputfile='vgrids_cb.in', ampmethod='linear', rect=(None, None)):
'''
Add a checkerboard to an existing vgrids.in velocity model.
@ -261,13 +278,14 @@ def addCheckerboard(spacing = 10., pertubation = 0.1, inputfile = 'vgrids.in',
:param: pertubation, pertubation (default: 0.1 = 10%)
type: float
'''
def correctSpacing(spacing, delta, disttype = None):
def correctSpacing(spacing, delta, disttype=None):
if spacing > delta:
spacing_corr = round(spacing / delta) * delta
elif spacing < delta:
spacing_corr = delta
print('The spacing of the checkerboard of %s (%s) was corrected to '
'a value of %s to fit the grid spacing of %s.' %(spacing, disttype, spacing_corr, delta))
'a value of %s to fit the grid spacing of %s.' % (spacing, disttype, spacing_corr, delta))
return spacing_corr
def linearAmp(InCell):
@ -282,7 +300,7 @@ def addCheckerboard(spacing = 10., pertubation = 0.1, inputfile = 'vgrids.in',
else:
return 0
def ampFunc(InCell, method = 'linear', rect = None):
def ampFunc(InCell, method='linear', rect=None):
if method == 'linear':
return linearAmp(InCell)
if method == 'rect' and rect is not None:
@ -290,7 +308,7 @@ def addCheckerboard(spacing = 10., pertubation = 0.1, inputfile = 'vgrids.in',
else:
print('ampFunc: Could not amplify cb pattern')
decm = 0.3 # diagonal elements of the covariance matrix (grid3dg's default value is 0.3)
decm = 0.3 # diagonal elements of the covariance matrix (grid3dg's default value is 0.3)
outfile = open(outputfile, 'w')
number, delta, start, vel = _readVgrid(inputfile)
@ -298,16 +316,16 @@ def addCheckerboard(spacing = 10., pertubation = 0.1, inputfile = 'vgrids.in',
nR, nTheta, nPhi = number
dR, dTheta, dPhi = delta
sR, sTheta, sPhi = start
thetaGrid, phiGrid, rGrid = _generateGrids(number, delta, start)
nPoints = nR * nTheta * nPhi
# write header for velocity grid file (in RADIANS)
outfile.writelines('%10s %10s \n' %(1, 1))
outfile.writelines('%10s %10s %10s\n' %(nR, nTheta, nPhi))
outfile.writelines('%10s %10s %10s\n' %(dR, np.deg2rad(dTheta), np.deg2rad(dPhi)))
outfile.writelines('%10s %10s %10s\n' %(sR, np.deg2rad(sTheta), np.deg2rad(sPhi)))
outfile.writelines('%10s %10s \n' % (1, 1))
outfile.writelines('%10s %10s %10s\n' % (nR, nTheta, nPhi))
outfile.writelines('%10s %10s %10s\n' % (dR, np.deg2rad(dTheta), np.deg2rad(dPhi)))
outfile.writelines('%10s %10s %10s\n' % (sR, np.deg2rad(sTheta), np.deg2rad(sPhi)))
spacR = correctSpacing(spacing, dR, '[meter], R')
spacTheta = correctSpacing(_getAngle(spacing), dTheta, '[degree], Theta')
@ -315,7 +333,8 @@ def addCheckerboard(spacing = 10., pertubation = 0.1, inputfile = 'vgrids.in',
count = 0
evenOdd = 1
even = 0; odd = 0
even = 0;
odd = 0
# In the following loop it is checked whether the positive distance from the border of the model
# for a point on the grid divided by the spacing is even or odd and then pertubated.
@ -326,21 +345,21 @@ def addCheckerboard(spacing = 10., pertubation = 0.1, inputfile = 'vgrids.in',
# The amplification factor ampFactor comes from a linear relationship and ranges between 0 (cell border)
# and 1 (cell middle)
for radius in rGrid:
rInCell = (radius - sR - dR/2) / spacR
rInCell = (radius - sR - dR / 2) / spacR
ampR = ampFunc(rInCell, ampmethod, rect)
if np.floor(rInCell) % 2:
evenOddR = 1
else:
evenOddR = -1
for theta in thetaGrid:
thetaInCell = (theta - sTheta - dTheta/2) / spacTheta
thetaInCell = (theta - sTheta - dTheta / 2) / spacTheta
ampTheta = ampFunc(thetaInCell, ampmethod, rect)
if np.floor(thetaInCell) % 2:
evenOddT = 1
else:
evenOddT = -1
for phi in phiGrid:
phiInCell = (phi - sPhi - dPhi/2) / spacPhi
phiInCell = (phi - sPhi - dPhi / 2) / spacPhi
ampPhi = ampFunc(phiInCell, ampmethod, rect)
if np.floor(phiInCell) % 2:
evenOddP = 1
@ -351,19 +370,20 @@ def addCheckerboard(spacing = 10., pertubation = 0.1, inputfile = 'vgrids.in',
evenOdd = evenOddR * evenOddT * evenOddP * ampFactor
velocity += evenOdd * pertubation * velocity
outfile.writelines('%10s %10s\n'%(velocity, decm))
outfile.writelines('%10s %10s\n' % (velocity, decm))
count += 1
progress = float(count) / float(nPoints) * 100
_update_progress(progress)
print('Added checkerboard to the grid in file %s with a spacing of %s and a pertubation of %s %%. '
'Outputfile: %s.'%(inputfile, spacing, pertubation*100, outputfile))
'Outputfile: %s.' % (inputfile, spacing, pertubation * 100, outputfile))
outfile.close()
def addBox(x = (None, None), y = (None, None), z = (None, None),
boxvelocity = 1.0, inputfile = 'vgrids.in',
outputfile = 'vgrids_box.in'):
def addBox(x=(None, None), y=(None, None), z=(None, None),
boxvelocity=1.0, inputfile='vgrids.in',
outputfile='vgrids_box.in'):
'''
Add a box with constant velocity to an existing vgrids.in velocity model.
@ -380,7 +400,7 @@ def addBox(x = (None, None), y = (None, None), z = (None, None),
type: float
'''
R = 6371.
decm = 0.3 # diagonal elements of the covariance matrix (grid3dg's default value is 0.3)
decm = 0.3 # diagonal elements of the covariance matrix (grid3dg's default value is 0.3)
outfile = open(outputfile, 'w')
theta1 = _getAngle(y[0])
@ -392,23 +412,23 @@ def addBox(x = (None, None), y = (None, None), z = (None, None),
print('Adding box to grid with theta = (%s, %s), phi = (%s, %s), '
'r = (%s, %s), velocity = %s [km/s]'
%(theta1, theta2, phi1, phi2, r1, r2, boxvelocity))
% (theta1, theta2, phi1, phi2, r1, r2, boxvelocity))
number, delta, start, vel = _readVgrid(inputfile)
nR, nTheta, nPhi = number
dR, dTheta, dPhi = delta
sR, sTheta, sPhi = start
thetaGrid, phiGrid, rGrid = _generateGrids(number, delta, start)
nPoints = nR * nTheta * nPhi
# write header for velocity grid file (in RADIANS)
outfile.writelines('%10s %10s \n' %(1, 1))
outfile.writelines('%10s %10s %10s\n' %(nR, nTheta, nPhi))
outfile.writelines('%10s %10s %10s\n' %(dR, np.deg2rad(dTheta), np.deg2rad(dPhi)))
outfile.writelines('%10s %10s %10s\n' %(sR, np.deg2rad(sTheta), np.deg2rad(sPhi)))
outfile.writelines('%10s %10s \n' % (1, 1))
outfile.writelines('%10s %10s %10s\n' % (nR, nTheta, nPhi))
outfile.writelines('%10s %10s %10s\n' % (dR, np.deg2rad(dTheta), np.deg2rad(dPhi)))
outfile.writelines('%10s %10s %10s\n' % (sR, np.deg2rad(sTheta), np.deg2rad(sPhi)))
count = 0
for radius in rGrid:
@ -430,20 +450,22 @@ def addBox(x = (None, None), y = (None, None), z = (None, None),
if rFlag * thetaFlag * phiFlag is not 0:
velocity = boxvelocity
outfile.writelines('%10s %10s\n'%(velocity, decm))
outfile.writelines('%10s %10s\n' % (velocity, decm))
count += 1
progress = float(count) / float(nPoints) * 100
_update_progress(progress)
print('Added box to the grid in file %s. '
'Outputfile: %s.'%(inputfile, outputfile))
'Outputfile: %s.' % (inputfile, outputfile))
outfile.close()
def _update_progress(progress):
sys.stdout.write("%d%% done \r" % (progress) )
sys.stdout.write("%d%% done \r" % (progress))
sys.stdout.flush()
def _getAngle(distance):
'''
Function returns the angle on a Sphere of the radius R = 6371 [km] for a distance [km].
@ -453,9 +475,9 @@ def _getAngle(distance):
angle = distance * 180. / (PI * R)
return angle
def _getDistance(angle):
PI = np.pi
R = 6371.
distance = angle / 180 * (PI * R)
return distance

View File

@ -3,6 +3,7 @@ import sys
import numpy as np
from scipy.interpolate import griddata
class SeisArray(object):
'''
Can be used to interpolate missing values of a receiver grid, if only support points were measured.
@ -15,6 +16,7 @@ class SeisArray(object):
Supports vtk output for sources and receivers.
Note: Source and Receiver files for FMTOMO will be generated by the Survey object (because traveltimes will be added directly).
'''
def __init__(self, recfile):
self.recfile = recfile
self._receiverlines = {}
@ -35,7 +37,7 @@ class SeisArray(object):
'''
for receiver in self._receiverlist:
traceID = int(receiver.split()[0])
lineID = int(receiver.split()[1])
lineID = int(receiver.split()[1])
if not lineID in self._receiverlines.keys():
self._receiverlines[lineID] = []
self._receiverlines[lineID].append(traceID)
@ -132,7 +134,7 @@ class SeisArray(object):
if traceID2 < traceID1:
direction = -1
return direction
print "Error: Same Value for traceID1 = %s and traceID2 = %s" %(traceID1, traceID2)
print "Error: Same Value for traceID1 = %s and traceID2 = %s" % (traceID1, traceID2)
def _checkCoordDirection(self, traceID1, traceID2, coordinate):
'''
@ -144,14 +146,15 @@ class SeisArray(object):
if self._getReceiverValue(traceID1, coordinate) > self._getReceiverValue(traceID2, coordinate):
direction = -1
return direction
print "Error: Same Value for traceID1 = %s and traceID2 = %s" %(traceID1, traceID2)
print "Error: Same Value for traceID1 = %s and traceID2 = %s" % (traceID1, traceID2)
def _interpolateMeanDistances(self, traceID1, traceID2, coordinate):
'''
Returns the mean distance between two traceID's depending on the number of geophones in between
'''
num_spaces = abs(self._getGeophoneNumber(traceID1) - self._getGeophoneNumber(traceID2))
mean_distance = abs(self._getReceiverValue(traceID1, coordinate) - self._getReceiverValue(traceID2, coordinate))/num_spaces
mean_distance = abs(
self._getReceiverValue(traceID1, coordinate) - self._getReceiverValue(traceID2, coordinate)) / num_spaces
return mean_distance
def interpolateValues(self, coordinate):
@ -159,22 +162,22 @@ class SeisArray(object):
Interpolates and sets all values (linear) for coordinate = 'X', 'Y' or 'Z'
'''
for lineID in self._getReceiverlines().keys():
number_measured = len(self._getReceiverlines()[lineID])
for index, traceID1 in enumerate(self._getReceiverlines()[lineID]):
if index + 1 < number_measured:
traceID2 = self._getReceiverlines()[lineID][index + 1]
number_measured = len(self._getReceiverlines()[lineID])
for index, traceID1 in enumerate(self._getReceiverlines()[lineID]):
if index + 1 < number_measured:
traceID2 = self._getReceiverlines()[lineID][index + 1]
traceID_dir = self._checkTraceIDdirection(traceID1, traceID2)
traceID_interp = traceID1 + traceID_dir
traceID_dir = self._checkTraceIDdirection(traceID1, traceID2)
traceID_interp = traceID1 + traceID_dir
coord_dir = self._checkCoordDirection(traceID1, traceID2, coordinate)
mean_distance = self._interpolateMeanDistances(traceID1, traceID2, coordinate)
coord_dir = self._checkCoordDirection(traceID1, traceID2, coordinate)
mean_distance = self._interpolateMeanDistances(traceID1, traceID2, coordinate)
while (traceID_dir * traceID_interp) < (traceID_dir * traceID2):
self._setValue(traceID_interp, coordinate,
(self._getReceiverValue(traceID_interp - traceID_dir, coordinate)
+ coord_dir * (mean_distance)))
traceID_interp += traceID_dir
while (traceID_dir * traceID_interp) < (traceID_dir * traceID2):
self._setValue(traceID_interp, coordinate,
(self._getReceiverValue(traceID_interp - traceID_dir, coordinate)
+ coord_dir * (mean_distance)))
traceID_interp += traceID_dir
def addMeasuredTopographyPoints(self, filename):
'''
@ -206,7 +209,7 @@ class SeisArray(object):
z = float(line[3])
self._sourceLocs[pointID] = (x, y, z)
def interpZcoords4rec(self, method = 'linear'):
def interpZcoords4rec(self, method='linear'):
'''
Interpolates z values for all receivers.
'''
@ -214,7 +217,8 @@ class SeisArray(object):
for traceID in self.getReceiverCoordinates().keys():
if type(self.getReceiverCoordinates()[traceID]) is not tuple:
z = griddata((measured_x, measured_y), measured_z, (self._getXreceiver(traceID), self._getYreceiver(traceID)), method = method)
z = griddata((measured_x, measured_y), measured_z,
(self._getXreceiver(traceID), self._getYreceiver(traceID)), method=method)
self._setZvalue(traceID, float(z))
def _getAngle(self, distance):
@ -239,7 +243,9 @@ class SeisArray(object):
'''
Returns a list of all measured receivers known to SeisArray.
'''
x = []; y = []; z = []
x = [];
y = [];
z = []
for traceID in self.getMeasuredReceivers().keys():
x.append(self.getMeasuredReceivers()[traceID][0])
y.append(self.getMeasuredReceivers()[traceID][1])
@ -250,7 +256,9 @@ class SeisArray(object):
'''
Returns a list of all measured topography points known to the SeisArray.
'''
x = []; y = []; z = []
x = [];
y = [];
z = []
for pointID in self.getMeasuredTopo().keys():
x.append(self.getMeasuredTopo()[pointID][0])
y.append(self.getMeasuredTopo()[pointID][1])
@ -261,7 +269,9 @@ class SeisArray(object):
'''
Returns a list of all measured source locations known to SeisArray.
'''
x = []; y = []; z = []
x = [];
y = [];
z = []
for pointID in self.getSourceLocations().keys():
x.append(self.getSourceLocations()[pointID][0])
y.append(self.getSourceLocations()[pointID][1])
@ -285,7 +295,9 @@ class SeisArray(object):
'''
Returns a list of all receivers (measured and interpolated).
'''
x = []; y =[]; z = []
x = [];
y = [];
z = []
for traceID in self.getReceiverCoordinates().keys():
x.append(self.getReceiverCoordinates()[traceID][0])
y.append(self.getReceiverCoordinates()[traceID][1])
@ -303,7 +315,7 @@ class SeisArray(object):
self._interpolateXY4rec()
self.interpZcoords4rec()
def interpolateTopography(self, nTheta, nPhi, thetaSN, phiWE, elevation = 0.25, method = 'linear'):
def interpolateTopography(self, nTheta, nPhi, thetaSN, phiWE, elevation=0.25, method='linear'):
'''
Interpolate Z values on a regular grid with cushion nodes e.g. to use it as FMTOMO topography interface.
Returns a surface in form of a list of points [[x1, y1, z1], [x2, y2, y2], ...] (cartesian).
@ -325,7 +337,7 @@ class SeisArray(object):
'''
return self.interpolateOnRegularGrid(nTheta, nPhi, thetaSN, phiWE, elevation, method)
def interpolateOnRegularGrid(self, nTheta, nPhi, thetaSN, phiWE, elevation, method = 'linear'):
def interpolateOnRegularGrid(self, nTheta, nPhi, thetaSN, phiWE, elevation, method='linear'):
'''
Interpolate Z values on a regular grid with cushion nodes e.g. to use it as FMTOMO topography interface.
Returns a surface in form of a list of points [[x1, y1, z1], [x2, y2, y2], ...] (cartesian).
@ -349,8 +361,8 @@ class SeisArray(object):
surface = []
print "Interpolating interface on regular grid with the dimensions:"
print "nTheta = %s, nPhi = %s, thetaSN = %s, phiWE = %s"%(nTheta, nPhi, thetaSN, phiWE)
print "method = %s, elevation = %s" %(method, elevation)
print "nTheta = %s, nPhi = %s, thetaSN = %s, phiWE = %s" % (nTheta, nPhi, thetaSN, phiWE)
print "method = %s, elevation = %s" % (method, elevation)
thetaS, thetaN = thetaSN
phiW, phiE = phiWE
@ -361,18 +373,19 @@ class SeisArray(object):
deltaTheta = (thetaN - thetaS) / (nTheta - 1)
deltaPhi = (phiE - phiW) / (nPhi - 1)
thetaGrid = np.linspace(thetaS - deltaTheta, thetaN + deltaTheta, num = nTheta + 2) # +2 cushion nodes
phiGrid = np.linspace(phiW - deltaPhi, phiE + deltaPhi, num = nPhi + 2) # +2 cushion nodes
thetaGrid = np.linspace(thetaS - deltaTheta, thetaN + deltaTheta, num=nTheta + 2) # +2 cushion nodes
phiGrid = np.linspace(phiW - deltaPhi, phiE + deltaPhi, num=nPhi + 2) # +2 cushion nodes
nTotal = len(thetaGrid) * len(phiGrid); count = 0
nTotal = len(thetaGrid) * len(phiGrid);
count = 0
for theta in thetaGrid:
for phi in phiGrid:
xval = self._getDistance(phi)
yval = self._getDistance(theta)
z = griddata((measured_x, measured_y), measured_z, (xval, yval), method = method)
z = griddata((measured_x, measured_y), measured_z, (xval, yval), method=method)
# in case the point lies outside, nan will be returned. Find nearest:
if np.isnan(z) == True:
z = griddata((measured_x, measured_y), measured_z, (xval, yval), method = 'nearest')
z = griddata((measured_x, measured_y), measured_z, (xval, yval), method='nearest')
z = float(z) + elevation
surface.append((xval, yval, z))
count += 1
@ -382,8 +395,8 @@ class SeisArray(object):
return surface
def generateFMTOMOinputFromArray(self, nPointsPropgrid, nPointsInvgrid,
zBotTop, cushionfactor, interpolationMethod = 'linear',
customgrid = 'mygrid.in', writeVTK = True):
zBotTop, cushionfactor, interpolationMethod='linear',
customgrid='mygrid.in', writeVTK=True):
'''
Generate FMTOMO input files from the SeisArray dimensions.
Generates: vgrids.in, interfaces.in, propgrid.in
@ -401,15 +414,15 @@ class SeisArray(object):
:type: float
'''
nRP, nThetaP, nPhiP = nPointsPropgrid
nRP, nThetaP, nPhiP = nPointsPropgrid
nRI, nThetaI, nPhiI = nPointsInvgrid
print('\n------------------------------------------------------------')
print('Automatically generating input for FMTOMO from array size.')
print('Propgrid: nR = %s, nTheta = %s, nPhi = %s'%(nRP, nThetaP, nPhiP))
print('Interpolation Grid and Interfaces Grid: nR = %s, nTheta = %s, nPhi = %s'%(nRI, nThetaI, nPhiI))
print('Bottom and Top of model: (%s, %s)'%(zBotTop[0], zBotTop[1]))
print('Method: %s, customgrid = %s'%(interpolationMethod, customgrid))
print('Propgrid: nR = %s, nTheta = %s, nPhi = %s' % (nRP, nThetaP, nPhiP))
print('Interpolation Grid and Interfaces Grid: nR = %s, nTheta = %s, nPhi = %s' % (nRI, nThetaI, nPhiI))
print('Bottom and Top of model: (%s, %s)' % (zBotTop[0], zBotTop[1]))
print('Method: %s, customgrid = %s' % (interpolationMethod, customgrid))
print('------------------------------------------------------------')
def getZmin(surface):
@ -418,31 +431,31 @@ class SeisArray(object):
z.append(point[2])
return min(z)
self.generatePropgrid(nThetaP, nPhiP, nRP, zBotTop, cushionfactor = cushionfactor,
cushionpropgrid = 0.05)
surface = self.generateVgrid(nThetaI, nPhiI, nRI, zBotTop, method = interpolationMethod,
cushionfactor = cushionfactor, infilename = customgrid,
returnTopo = True)
self.generatePropgrid(nThetaP, nPhiP, nRP, zBotTop, cushionfactor=cushionfactor,
cushionpropgrid=0.05)
surface = self.generateVgrid(nThetaI, nPhiI, nRI, zBotTop, method=interpolationMethod,
cushionfactor=cushionfactor, infilename=customgrid,
returnTopo=True)
depthmax = abs(zBotTop[0] - getZmin(surface)) - 1.0 # cushioning for the bottom interface
depthmax = abs(zBotTop[0] - getZmin(surface)) - 1.0 # cushioning for the bottom interface
interf1, interf2 = self.generateInterfaces(nThetaI, nPhiI, depthmax, cushionfactor = cushionfactor,
returnInterfaces = True, method = interpolationMethod)
interf1, interf2 = self.generateInterfaces(nThetaI, nPhiI, depthmax, cushionfactor=cushionfactor,
returnInterfaces=True, method=interpolationMethod)
if writeVTK == True:
from pylot.core.active import fmtomoUtils
self.surface2VTK(interf1, filename = 'interface1.vtk')
self.surface2VTK(interf2, filename = 'interface2.vtk')
self.surface2VTK(interf1, filename='interface1.vtk')
self.surface2VTK(interf2, filename='interface2.vtk')
self.receivers2VTK()
self.sources2VTK()
fmtomoUtils.vgrids2VTK()
def generateReceiversIn(self, outfilename = 'receivers.in'):
def generateReceiversIn(self, outfilename='receivers.in'):
outfile = open(outfilename, 'w')
recx, recy, recz = self.getReceiverLists()
nsrc = len(self.getSourceLocations())
outfile.writelines('%s\n'%(len(zip(recx, recy, recz)) * nsrc))
outfile.writelines('%s\n' % (len(zip(recx, recy, recz)) * nsrc))
for index in range(nsrc):
for point in zip(recx, recy, recz):
@ -450,17 +463,16 @@ class SeisArray(object):
rad = - rz
lat = self._getAngle(ry)
lon = self._getAngle(rx)
outfile.writelines('%15s %15s %15s\n'%(rad, lat, lon))
outfile.writelines('%15s\n'%(1))
outfile.writelines('%15s\n'%(index + 1))
outfile.writelines('%15s\n'%(1))
outfile.writelines('%15s %15s %15s\n' % (rad, lat, lon))
outfile.writelines('%15s\n' % (1))
outfile.writelines('%15s\n' % (index + 1))
outfile.writelines('%15s\n' % (1))
outfile.close()
def generateInterfaces(self, nTheta, nPhi, depthmax, cushionfactor = 0.1,
outfilename = 'interfaces.in', method = 'linear',
returnInterfaces = False):
def generateInterfaces(self, nTheta, nPhi, depthmax, cushionfactor=0.1,
outfilename='interfaces.in', method='linear',
returnInterfaces=False):
'''
Create an interfaces.in file for FMTOMO from the SeisArray boundaries.
:param: nTheta, number of points in Theta
@ -470,7 +482,7 @@ class SeisArray(object):
type: int
:param: depthmax, maximum depth of the model (below topography)
type: float
type: float
:param: cushionfactor, add some extra space to the model (default: 0.1 = 10%)
type: float
@ -478,7 +490,7 @@ class SeisArray(object):
print('\n------------------------------------------------------------')
print('Generating interfaces...')
nInterfaces = 2
nInterfaces = 2
# generate dimensions of the grid from array
thetaSN, phiWE = self.getThetaPhiFromArray(cushionfactor)
@ -494,22 +506,22 @@ class SeisArray(object):
deltaPhi = abs(phiE - phiW) / float((nPhi - 1))
# write header for interfaces grid file (in RADIANS)
outfile.writelines('%10s\n' %(nInterfaces))
outfile.writelines('%10s %10s\n' %(nTheta + 2, nPhi + 2)) # +2 cushion nodes
outfile.writelines('%10s %10s\n' %(np.deg2rad(deltaTheta), np.deg2rad(deltaPhi)))
outfile.writelines('%10s %10s\n' %(np.deg2rad(thetaS - deltaTheta), np.deg2rad(phiW - deltaPhi)))
outfile.writelines('%10s\n' % (nInterfaces))
outfile.writelines('%10s %10s\n' % (nTheta + 2, nPhi + 2)) # +2 cushion nodes
outfile.writelines('%10s %10s\n' % (np.deg2rad(deltaTheta), np.deg2rad(deltaPhi)))
outfile.writelines('%10s %10s\n' % (np.deg2rad(thetaS - deltaTheta), np.deg2rad(phiW - deltaPhi)))
interface1 = self.interpolateTopography(nTheta, nPhi, thetaSN, phiWE, method = method)
interface2 = self.interpolateOnRegularGrid(nTheta, nPhi, thetaSN, phiWE, -depthmax, method = method)
interface1 = self.interpolateTopography(nTheta, nPhi, thetaSN, phiWE, method=method)
interface2 = self.interpolateOnRegularGrid(nTheta, nPhi, thetaSN, phiWE, -depthmax, method=method)
for point in interface1:
z = point[2]
outfile.writelines('%10s\n'%(z + R))
outfile.writelines('%10s\n' % (z + R))
outfile.writelines('\n')
for point in interface2:
z = point[2]
outfile.writelines('%10s\n'%(z + R))
outfile.writelines('%10s\n' % (z + R))
outfile.close()
@ -519,10 +531,10 @@ class SeisArray(object):
print('Finished generating interfaces.')
print('------------------------------------------------------------')
def getThetaPhiFromArray(self, cushionfactor = 0.1):
def getThetaPhiFromArray(self, cushionfactor=0.1):
'''
Determine and returns PhiWE (tuple: (West, East)) and thetaSN (tuple (South, North)) from the SeisArray boundaries.
:param: cushionfactor, add some extra space to the model (default: 0.1 = 10%)
type: float
'''
@ -535,8 +547,8 @@ class SeisArray(object):
thetaSN = (theta_min - cushionTheta, theta_max + cushionTheta)
return thetaSN, phiWE
def generatePropgrid(self, nTheta, nPhi, nR, Rbt, cushionfactor, cushionpropgrid = 0.05,
refinement = (5, 5), outfilename = 'propgrid.in'):
def generatePropgrid(self, nTheta, nPhi, nR, Rbt, cushionfactor, cushionpropgrid=0.05,
refinement=(5, 5), outfilename='propgrid.in'):
'''
Create a propergation grid file for FMTOMO using SeisArray boundaries
@ -566,10 +578,10 @@ class SeisArray(object):
print('\n------------------------------------------------------------')
print('Generating Propagation Grid for nTheta = %s, nPhi'
' = %s, nR = %s and a cushioning of %s'
%(nTheta, nPhi, nR, cushionpropgrid))
print('Bottom of the grid: %s, top of the grid %s'
%(Rbt[0], Rbt[1]))
' = %s, nR = %s and a cushioning of %s'
% (nTheta, nPhi, nR, cushionpropgrid))
print('Bottom of the grid: %s, top of the grid %s'
% (Rbt[0], Rbt[1]))
thetaSN, phiWE = self.getThetaPhiFromArray(cushionfactor)
@ -584,20 +596,20 @@ class SeisArray(object):
deltaPhi = abs(phiE - phiW) / float(nPhi - 1)
deltaR = abs(rbot - rtop) / float(nR - 1)
outfile.writelines('%10s %10s %10s\n' %(nR, nTheta, nPhi))
outfile.writelines('%10s %10s %10s\n' %(deltaR, deltaTheta, deltaPhi))
outfile.writelines('%10s %10s %10s\n' %(rtop, thetaS, phiW))
outfile.writelines('%10s %10s\n' %refinement)
outfile.writelines('%10s %10s %10s\n' % (nR, nTheta, nPhi))
outfile.writelines('%10s %10s %10s\n' % (deltaR, deltaTheta, deltaPhi))
outfile.writelines('%10s %10s %10s\n' % (rtop, thetaS, phiW))
outfile.writelines('%10s %10s\n' % refinement)
outfile.close()
print('Created Propagation Grid and saved it to %s' %outfilename)
print('Created Propagation Grid and saved it to %s' % outfilename)
print('------------------------------------------------------------')
def generateVgrid(self, nTheta, nPhi, nR, Rbt, thetaSN = None,
phiWE = None, cushionfactor = 0.1,
outfilename = 'vgrids.in', method = 'linear',
infilename = 'mygrid.in', returnTopo = False):
def generateVgrid(self, nTheta, nPhi, nR, Rbt, thetaSN=None,
phiWE=None, cushionfactor=0.1,
outfilename='vgrids.in', method='linear',
infilename='mygrid.in', returnTopo=False):
'''
Generate a velocity grid for fmtomo regarding topography with a linear gradient starting at the topography with 0.34 [km/s].
@ -641,11 +653,14 @@ class SeisArray(object):
return nlayers
def readMygrid(filename):
ztop = []; zbot = []; vtop = []; vbot = []
ztop = [];
zbot = [];
vtop = [];
vbot = []
infile = open(filename, 'r')
nlayers = readMygridNlayers(filename)
print('\nreadMygrid: Reading file %s.'%filename)
print('\nreadMygrid: Reading file %s.' % filename)
for index in range(nlayers):
line1 = infile.readline()
line2 = infile.readline()
@ -655,11 +670,11 @@ class SeisArray(object):
vbot.append(float(line2.split()[1]))
print('Layer %s:\n[Top: v = %s [km/s], z = %s [m]]'
'\n[Bot: v = %s [km/s], z = %s [m]]'
%(index + 1, vtop[index], ztop[index],
vbot[index], zbot[index]))
% (index + 1, vtop[index], ztop[index],
vbot[index], zbot[index]))
if not ztop[0] == 0:
print('ERROR: there must be a velocity set for z = 0 in the file %s'%filename)
print('ERROR: there must be a velocity set for z = 0 in the file %s' % filename)
print('e.g.:\n0 0.33\n-5 1.0\netc.')
infile.close()
@ -667,7 +682,7 @@ class SeisArray(object):
R = 6371.
vmin = 0.34
decm = 0.3 # diagonal elements of the covariance matrix (grid3dg's default value is 0.3)
decm = 0.3 # diagonal elements of the covariance matrix (grid3dg's default value is 0.3)
outfile = open(outfilename, 'w')
# generate dimensions of the grid from array
@ -685,28 +700,29 @@ class SeisArray(object):
deltaR = abs(rbot - rtop) / float((nR - 1))
# create a regular grid including +2 cushion nodes in every direction
thetaGrid = np.linspace(thetaS - deltaTheta, thetaN + deltaTheta, num = nTheta + 2) # +2 cushion nodes
phiGrid = np.linspace(phiW - deltaPhi, phiE + deltaPhi, num = nPhi + 2) # +2 cushion nodes
rGrid = np.linspace(rbot - deltaR, rtop + deltaR, num = nR + 2) # +2 cushion nodes
thetaGrid = np.linspace(thetaS - deltaTheta, thetaN + deltaTheta, num=nTheta + 2) # +2 cushion nodes
phiGrid = np.linspace(phiW - deltaPhi, phiE + deltaPhi, num=nPhi + 2) # +2 cushion nodes
rGrid = np.linspace(rbot - deltaR, rtop + deltaR, num=nR + 2) # +2 cushion nodes
nTotal = len(rGrid) * len(thetaGrid) * len(phiGrid)
print("Total number of grid nodes: %s"%nTotal)
print("Total number of grid nodes: %s" % nTotal)
# write header for velocity grid file (in RADIANS)
outfile.writelines('%10s %10s \n' %(1, 1))
outfile.writelines('%10s %10s %10s\n' %(nR + 2, nTheta + 2, nPhi + 2))
outfile.writelines('%10s %10s %10s\n' %(deltaR, np.deg2rad(deltaTheta), np.deg2rad(deltaPhi)))
outfile.writelines('%10s %10s %10s\n' %(rbot - deltaR, np.deg2rad(thetaS - deltaTheta), np.deg2rad(phiW - deltaPhi)))
outfile.writelines('%10s %10s \n' % (1, 1))
outfile.writelines('%10s %10s %10s\n' % (nR + 2, nTheta + 2, nPhi + 2))
outfile.writelines('%10s %10s %10s\n' % (deltaR, np.deg2rad(deltaTheta), np.deg2rad(deltaPhi)))
outfile.writelines(
'%10s %10s %10s\n' % (rbot - deltaR, np.deg2rad(thetaS - deltaTheta), np.deg2rad(phiW - deltaPhi)))
surface = self.interpolateTopography(nTheta, nPhi, thetaSN, phiWE, method = method)
surface = self.interpolateTopography(nTheta, nPhi, thetaSN, phiWE, method=method)
nlayers = readMygridNlayers(infilename)
ztop, zbot, vtop, vbot = readMygrid(infilename)
print("\nGenerating velocity grid for FMTOMO. "
"Output filename = %s, interpolation method = %s"%(outfilename, method))
"Output filename = %s, interpolation method = %s" % (outfilename, method))
print("nTheta = %s, nPhi = %s, nR = %s, "
"thetaSN = %s, phiWE = %s, Rbt = %s"%(nTheta, nPhi, nR, thetaSN, phiWE, Rbt))
"thetaSN = %s, phiWE = %s, Rbt = %s" % (nTheta, nPhi, nR, thetaSN, phiWE, Rbt))
count = 0
for radius in rGrid:
@ -721,32 +737,36 @@ class SeisArray(object):
depth = -(R + topo - radius)
if depth > 1:
vel = 0.0
elif 1 >= depth > 0: # cushioning around topography
elif 1 >= depth > 0: # cushioning around topography
vel = vtop[0]
else:
for index in range(nlayers):
if (ztop[index]) >= depth > (zbot[index]):
vel = (depth - ztop[index]) / (zbot[index] - ztop[index]) * (vbot[index] - vtop[index]) + vtop[index]
vel = (depth - ztop[index]) / (zbot[index] - ztop[index]) * (
vbot[index] - vtop[index]) + vtop[index]
break
if not (ztop[index]) >= depth > (zbot[index]):
print('ERROR in grid inputfile, could not find velocity for a z-value of %s in the inputfile'%(depth-topo))
print(
'ERROR in grid inputfile, could not find velocity for a z-value of %s in the inputfile' % (
depth - topo))
return
count += 1
if vel < 0:
print('ERROR, vel <0; z, topo, zbot, vbot, vtop:', depth, topo, zbot[index], vbot[index], vtop[index])
outfile.writelines('%10s %10s\n'%(vel, decm))
print(
'ERROR, vel <0; z, topo, zbot, vbot, vtop:', depth, topo, zbot[index], vbot[index], vtop[index])
outfile.writelines('%10s %10s\n' % (vel, decm))
progress = float(count) / float(nTotal) * 100
self._update_progress(progress)
print('\nWrote %d points to file %s for %d layers'%(count, outfilename, nlayers))
print('\nWrote %d points to file %s for %d layers' % (count, outfilename, nlayers))
print('------------------------------------------------------------')
outfile.close()
if returnTopo == True:
return surface
def exportAll(self, filename = 'interpolated_receivers.out'):
def exportAll(self, filename='interpolated_receivers.out'):
'''
Exports all receivers to an input file for ActiveSeismoPick3D.
'''
@ -755,11 +775,11 @@ class SeisArray(object):
for traceID in self.getReceiverCoordinates().keys():
count += 1
x, y, z = self.getReceiverCoordinates()[traceID]
recfile_out.writelines('%5s %15s %15s %15s\n' %(traceID, x, y, z))
print "Exported coordinates for %s traces to file > %s" %(count, filename)
recfile_out.writelines('%5s %15s %15s %15s\n' % (traceID, x, y, z))
print "Exported coordinates for %s traces to file > %s" % (count, filename)
recfile_out.close()
def plotArray2D(self, plot_topo = False, highlight_measured = False, annotations = True, pointsize = 10):
def plotArray2D(self, plot_topo=False, highlight_measured=False, annotations=True, pointsize=10):
import matplotlib.pyplot as plt
plt.interactive(True)
fig = plt.figure()
@ -770,36 +790,36 @@ class SeisArray(object):
xrc, yrc, zrc = self.getReceiverLists()
if len(xrc) > 0:
ax.plot(xrc, yrc, 'k.', markersize = pointsize, label = 'all receivers')
ax.plot(xrc, yrc, 'k.', markersize=pointsize, label='all receivers')
if len(xsc) > 0:
ax.plot(xsc, ysc, 'b*', markersize = pointsize, label = 'shot locations')
ax.plot(xsc, ysc, 'b*', markersize=pointsize, label='shot locations')
if plot_topo == True:
ax.plot(xmt, ymt, 'b.', markersize = pointsize, label = 'measured topo points')
ax.plot(xmt, ymt, 'b.', markersize=pointsize, label='measured topo points')
if highlight_measured == True:
ax.plot(xmr, ymr, 'r.', markersize = pointsize, label = 'measured receivers')
ax.plot(xmr, ymr, 'r.', markersize=pointsize, label='measured receivers')
plt.title('2D plot of seismic array %s'%self.recfile)
plt.title('2D plot of seismic array %s' % self.recfile)
ax.set_xlabel('X [m]')
ax.set_ylabel('Y [m]')
ax.set_aspect('equal')
plt.legend()
if annotations == True:
for traceID in self.getReceiverCoordinates().keys():
ax.annotate((' ' + str(traceID)), xy = (self._getXreceiver(traceID), self._getYreceiver(traceID)), fontsize = 'x-small', color = 'k')
ax.annotate((' ' + str(traceID)), xy=(self._getXreceiver(traceID), self._getYreceiver(traceID)),
fontsize='x-small', color='k')
for shotnumber in self.getSourceLocations().keys():
ax.annotate((' ' + str(shotnumber)), xy = (self._getXshot(shotnumber), self._getYshot(shotnumber)), fontsize = 'x-small', color = 'b')
ax.annotate((' ' + str(shotnumber)), xy=(self._getXshot(shotnumber), self._getYshot(shotnumber)),
fontsize='x-small', color='b')
def plotArray3D(self, ax = None):
def plotArray3D(self, ax=None):
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
plt.interactive(True)
if ax == None:
fig = plt.figure()
ax = plt.axes(projection = '3d')
ax = plt.axes(projection='3d')
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
@ -808,20 +828,21 @@ class SeisArray(object):
plt.title('3D plot of seismic array.')
if len(xmt) > 0:
ax.plot(xmt, ymt, zmt, 'b.', markersize = 10, label = 'measured topo points')
ax.plot(xmt, ymt, zmt, 'b.', markersize=10, label='measured topo points')
if len(xrc) > 0:
ax.plot(xrc, yrc, zrc, 'k.', markersize = 10, label = 'all receivers')
ax.plot(xrc, yrc, zrc, 'k.', markersize=10, label='all receivers')
if len(xmr) > 0:
ax.plot(xmr, ymr, zmr, 'ro', label = 'measured receivers')
ax.plot(xmr, ymr, zmr, 'ro', label='measured receivers')
if len(xsc) > 0:
ax.plot(xsc, ysc, zsc, 'b*', label = 'shot locations')
ax.set_xlabel('X [m]'); ax.set_ylabel('Y [m]'); ax.set_zlabel('Z [m]')
ax.plot(xsc, ysc, zsc, 'b*', label='shot locations')
ax.set_xlabel('X [m]');
ax.set_ylabel('Y [m]');
ax.set_zlabel('Z [m]')
ax.legend()
return ax
def plotSurface3D(self, ax = None, step = 0.5, method = 'linear', exag = False):
def plotSurface3D(self, ax=None, step=0.5, method='linear', exag=False):
from matplotlib import cm
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
@ -829,7 +850,7 @@ class SeisArray(object):
if ax == None:
fig = plt.figure()
ax = plt.axes(projection = '3d')
ax = plt.axes(projection='3d')
xmt, ymt, zmt = self.getMeasuredTopoLists()
xmr, ymr, zmr = self.getMeasuredReceiverLists()
@ -838,31 +859,33 @@ class SeisArray(object):
y = ymt + ymr
z = zmt + zmr
xaxis = np.arange(min(x)+1, max(x), step)
yaxis = np.arange(min(y)+1, max(y), step)
xaxis = np.arange(min(x) + 1, max(x), step)
yaxis = np.arange(min(y) + 1, max(y), step)
xgrid, ygrid = np.meshgrid(xaxis, yaxis)
zgrid = griddata((x, y), z, (xgrid, ygrid), method = method)
zgrid = griddata((x, y), z, (xgrid, ygrid), method=method)
surf = ax.plot_surface(xgrid, ygrid, zgrid, linewidth = 0, cmap = cm.jet, vmin = min(z), vmax = max(z))
surf = ax.plot_surface(xgrid, ygrid, zgrid, linewidth=0, cmap=cm.jet, vmin=min(z), vmax=max(z))
cbar = plt.colorbar(surf)
cbar.set_label('Elevation [m]')
if exag == False:
ax.set_zlim(-(max(x) - min(x)/2),(max(x) - min(x)/2))
ax.set_zlim(-(max(x) - min(x) / 2), (max(x) - min(x) / 2))
ax.set_aspect('equal')
ax.set_xlabel('X [m]'); ax.set_ylabel('Y [m]'); ax.set_zlabel('Z [m]')
ax.set_xlabel('X [m]');
ax.set_ylabel('Y [m]');
ax.set_zlabel('Z [m]')
ax.legend()
return ax
def _update_progress(self, progress):
sys.stdout.write("%d%% done \r" % (progress) )
sys.stdout.write("%d%% done \r" % (progress))
sys.stdout.flush()
def surface2VTK(self, surface, filename = 'surface.vtk'):
def surface2VTK(self, surface, filename='surface.vtk'):
'''
Generates a vtk file from all points of a surface as generated by interpolateTopography.
'''
@ -876,7 +899,7 @@ class SeisArray(object):
outfile.writelines('Surface Points\n')
outfile.writelines('ASCII\n')
outfile.writelines('DATASET POLYDATA\n')
outfile.writelines('POINTS %15d float\n' %(nPoints))
outfile.writelines('POINTS %15d float\n' % (nPoints))
# write coordinates
print("Writing coordinates to VTK file...")
@ -885,14 +908,14 @@ class SeisArray(object):
y = point[1]
z = point[2]
outfile.writelines('%10f %10f %10f \n' %(x, y, z))
outfile.writelines('%10f %10f %10f \n' % (x, y, z))
outfile.writelines('VERTICES %15d %15d\n' %(nPoints, 2 * nPoints))
outfile.writelines('VERTICES %15d %15d\n' % (nPoints, 2 * nPoints))
# write indices
print("Writing indices to VTK file...")
for index in range(nPoints):
outfile.writelines('%10d %10d\n' %(1, index))
outfile.writelines('%10d %10d\n' % (1, index))
# outfile.writelines('POINT_DATA %15d\n' %(nPoints))
# outfile.writelines('SCALARS traceIDs int %d\n' %(1))
@ -904,10 +927,10 @@ class SeisArray(object):
# outfile.writelines('%10d\n' %traceID)
outfile.close()
print("Wrote %d points to file: %s" %(nPoints, filename))
print("Wrote %d points to file: %s" % (nPoints, filename))
return
def receivers2VTK(self, filename = 'receivers.vtk'):
def receivers2VTK(self, filename='receivers.vtk'):
'''
Generates a vtk file from all receivers of the SeisArray object.
'''
@ -925,7 +948,7 @@ class SeisArray(object):
outfile.writelines('Receivers with traceIDs\n')
outfile.writelines('ASCII\n')
outfile.writelines('DATASET POLYDATA\n')
outfile.writelines('POINTS %15d float\n' %(nPoints))
outfile.writelines('POINTS %15d float\n' % (nPoints))
# write coordinates
print("Writing coordinates to VTK file...")
@ -934,29 +957,29 @@ class SeisArray(object):
y = self._getYreceiver(traceID)
z = self._getZreceiver(traceID)
outfile.writelines('%10f %10f %10f \n' %(x, y, z))
outfile.writelines('%10f %10f %10f \n' % (x, y, z))
outfile.writelines('VERTICES %15d %15d\n' %(nPoints, 2 * nPoints))
outfile.writelines('VERTICES %15d %15d\n' % (nPoints, 2 * nPoints))
# write indices
print("Writing indices to VTK file...")
for index in range(nPoints):
outfile.writelines('%10d %10d\n' %(1, index))
outfile.writelines('%10d %10d\n' % (1, index))
outfile.writelines('POINT_DATA %15d\n' %(nPoints))
outfile.writelines('SCALARS traceIDs int %d\n' %(1))
outfile.writelines('POINT_DATA %15d\n' % (nPoints))
outfile.writelines('SCALARS traceIDs int %d\n' % (1))
outfile.writelines('LOOKUP_TABLE default\n')
# write traceIDs
print("Writing traceIDs to VTK file...")
for traceID in traceIDs:
outfile.writelines('%10d\n' %traceID)
outfile.writelines('%10d\n' % traceID)
outfile.close()
print("Wrote %d receiver for to file: %s" %(nPoints, filename))
print("Wrote %d receiver for to file: %s" % (nPoints, filename))
return
def sources2VTK(self, filename = 'sources.vtk'):
def sources2VTK(self, filename='sources.vtk'):
'''
Generates a vtk-file for all source locations in the SeisArray object.
'''
@ -974,7 +997,7 @@ class SeisArray(object):
outfile.writelines('Shots with shotnumbers\n')
outfile.writelines('ASCII\n')
outfile.writelines('DATASET POLYDATA\n')
outfile.writelines('POINTS %15d float\n' %(nPoints))
outfile.writelines('POINTS %15d float\n' % (nPoints))
# write coordinates
print("Writing coordinates to VTK file...")
@ -983,35 +1006,34 @@ class SeisArray(object):
y = self._getYshot(shotnumber)
z = self._getZshot(shotnumber)
outfile.writelines('%10f %10f %10f \n' %(x, y, z))
outfile.writelines('%10f %10f %10f \n' % (x, y, z))
outfile.writelines('VERTICES %15d %15d\n' %(nPoints, 2 * nPoints))
outfile.writelines('VERTICES %15d %15d\n' % (nPoints, 2 * nPoints))
# write indices
print("Writing indices to VTK file...")
for index in range(nPoints):
outfile.writelines('%10d %10d\n' %(1, index))
outfile.writelines('%10d %10d\n' % (1, index))
outfile.writelines('POINT_DATA %15d\n' %(nPoints))
outfile.writelines('SCALARS shotnumbers int %d\n' %(1))
outfile.writelines('POINT_DATA %15d\n' % (nPoints))
outfile.writelines('SCALARS shotnumbers int %d\n' % (1))
outfile.writelines('LOOKUP_TABLE default\n')
# write shotnumber
print("Writing shotnumbers to VTK file...")
for shotnumber in shotnumbers:
outfile.writelines('%10d\n' %shotnumber)
outfile.writelines('%10d\n' % shotnumber)
outfile.close()
print("Wrote %d sources to file: %s" %(nPoints, filename))
print("Wrote %d sources to file: %s" % (nPoints, filename))
return
def saveSeisArray(self, filename = 'seisArray.pickle'):
def saveSeisArray(self, filename='seisArray.pickle'):
import cPickle
outfile = open(filename, 'wb')
cPickle.dump(self, outfile, -1)
print('saved SeisArray to file %s'%(filename))
print('saved SeisArray to file %s' % (filename))
@staticmethod
def from_pickle(filename):

View File

@ -6,17 +6,20 @@ import numpy as np
from obspy.core import read
from obspy import Stream
from obspy import Trace
from pylot.core.pick.CharFuns import HOScf
from pylot.core.pick.CharFuns import AICcf
from pylot.core.pick.charfuns import HOScf
from pylot.core.pick.charfuns import AICcf
from pylot.core.pick.utils import getSNR
from pylot.core.pick.utils import earllatepicker
import matplotlib.pyplot as plt
plt.interactive('True')
class SeismicShot(object):
'''
SuperClass for a seismic shot object.
'''
def __init__(self, obsfile):
'''
Initialize seismic shot object giving an inputfile.
@ -29,8 +32,8 @@ class SeismicShot(object):
self.srcCoordlist = None
self.traceIDs = None
self.picks = {}
self.pwindow= {}
self.manualpicks= {}
self.pwindow = {}
self.manualpicks = {}
self.snr = {}
self.snrthreshold = {}
self.timeArray = {}
@ -61,10 +64,10 @@ class SeismicShot(object):
if traceID == trace.stats.channel:
self.traces.remove(trace)
# for traceID in TraceIDs:
# traces = [trace for trace in self.traces if int(trace.stats.channel) == traceID]
# if len(traces) is not 1:
# self.traces.remove(trace)
# for traceID in TraceIDs:
# traces = [trace for trace in self.traces if int(trace.stats.channel) == traceID]
# if len(traces) is not 1:
# self.traces.remove(trace)
def updateTraceList(self):
'''
@ -87,22 +90,22 @@ class SeismicShot(object):
self.setParameters('tmovwind', tmovwind)
def setOrder(self, order):
self.setParameters('order', order)
self.setParameters('order', order)
def setTsignal(self, tsignal):
self.setParameters('tsignal', tsignal)
self.setParameters('tsignal', tsignal)
def setTgap(self, tgap):
self.setParameters('tgap', tgap)
self.setParameters('tgap', tgap)
def setShotnumber(self, shotname):
self.setParameters('shotname', shotname)
self.setParameters('shotname', shotname)
def setRecfile(self, recfile):
self.setParameters('recfile', recfile)
self.setParameters('recfile', recfile)
def setSourcefile(self, sourcefile):
self.setParameters('sourcefile', sourcefile)
self.setParameters('sourcefile', sourcefile)
def getParas(self):
return self.paras
@ -144,15 +147,15 @@ class SeismicShot(object):
def getManualLatest(self, traceID):
return self.manualpicks[traceID]['lpp']
def getPick(self, traceID, returnRemoved = False):
def getPick(self, traceID, returnRemoved=False):
if not self.getPickFlag(traceID) == 0:
return self.picks[traceID]['mpp']
if returnRemoved == True:
#print('getPick: Returned removed pick for shot %d, traceID %d' %(self.getShotnumber(), traceID))
# print('getPick: Returned removed pick for shot %d, traceID %d' %(self.getShotnumber(), traceID))
return self.picks[traceID]['mpp']
def getPickIncludeRemoved(self, traceID):
return self.getPick(traceID, returnRemoved = True)
return self.getPick(traceID, returnRemoved=True)
def getEarliest(self, traceID):
return self.picks[traceID]['epp']
@ -163,13 +166,13 @@ class SeismicShot(object):
def getSymmetricPickError(self, traceID):
pickerror = self.picks[traceID]['spe']
if np.isnan(pickerror) == True:
print "SPE is NaN for shot %s, traceID %s"%(self.getShotnumber(), traceID)
print "SPE is NaN for shot %s, traceID %s" % (self.getShotnumber(), traceID)
return pickerror
def getPickError(self, traceID):
pickerror = abs(self.getEarliest(traceID) - self.getLatest(traceID)) / 2
if np.isnan(pickerror) == True:
print("PE is NaN for shot %s, traceID %s"%(self.getShotnumber(), traceID))
print("PE is NaN for shot %s, traceID %s" % (self.getShotnumber(), traceID))
return pickerror
def getStreamTraceIDs(self):
@ -207,15 +210,15 @@ class SeismicShot(object):
def getRecCoordlist(self):
if self.recCoordlist is None:
coordlist = open(self.getRecfile(),'r').readlines()
#print 'Reading receiver coordinates from %s' %(self.getRecfile())
coordlist = open(self.getRecfile(), 'r').readlines()
# print 'Reading receiver coordinates from %s' %(self.getRecfile())
self.recCoordlist = coordlist
return self.recCoordlist
def getSrcCoordlist(self):
if self.srcCoordlist is None:
coordlist = open(self.getSourcefile(),'r').readlines()
#print 'Reading shot coordinates from %s' %(self.getSourcefile())
coordlist = open(self.getSourcefile(), 'r').readlines()
# print 'Reading shot coordinates from %s' %(self.getSourcefile())
self.srcCoordlist = coordlist
return self.srcCoordlist
@ -239,7 +242,7 @@ class SeismicShot(object):
:type: int
'''
return HOScf(self.getSingleStream(traceID), self.getCut(),
self.getTmovwind(), self.getOrder(), stealthMode = True)
self.getTmovwind(), self.getOrder(), stealthMode=True)
def getAICcf(self, traceID):
'''
@ -262,7 +265,7 @@ class SeismicShot(object):
tr_cf = Trace()
tr_cf.data = self.getHOScf(traceID).getCF()
st_cf += tr_cf
return AICcf(st_cf, self.getCut(), self.getTmovwind(), stealthMode = True)
return AICcf(st_cf, self.getCut(), self.getTmovwind(), stealthMode=True)
def getSingleStream(self, traceID): ########## SEG2 / SEGY ? ##########
'''
@ -271,16 +274,16 @@ class SeismicShot(object):
:param: traceID
:type: int
'''
#traces = [trace for trace in self.traces if int(trace.stats.seg2['CHANNEL_NUMBER']) == traceID]
# traces = [trace for trace in self.traces if int(trace.stats.seg2['CHANNEL_NUMBER']) == traceID]
traces = [trace for trace in self.traces if int(trace.stats.channel) == traceID]
if len(traces) == 1:
return Stream(traces)
self.setPick(traceID, None)
print 'Warning: ambigious or empty traceID: %s' % traceID
#raise ValueError('ambigious or empty traceID: %s' % traceID)
# raise ValueError('ambigious or empty traceID: %s' % traceID)
def pickTraces(self, traceID, windowsize, folm, HosAic = 'hos'): ########## input variables ##########
def pickTraces(self, traceID, windowsize, folm, HosAic='hos'): ########## input variables ##########
# LOCALMAX NOT IMPLEMENTED!
'''
Intitiate picking for a trace.
@ -306,7 +309,7 @@ class SeismicShot(object):
:param: HosAic, get hos or aic pick (can be 'hos'(default) or 'aic')
:type: 'string'
'''
hoscf = self.getHOScf(traceID) ### determination of both, HOS and AIC (need to change threshold-picker) ###
hoscf = self.getHOScf(traceID) ### determination of both, HOS and AIC (need to change threshold-picker) ###
aiccf = self.getAICcf(traceID)
self.folm = folm
@ -318,7 +321,7 @@ class SeismicShot(object):
self.setPick(traceID, setHosAic[HosAic])
def setEarllatepick(self, traceID, nfac = 1.5):
def setEarllatepick(self, traceID, nfac=1.5):
tgap = self.getTgap()
tsignal = self.getTsignal()
tnoise = self.getPickIncludeRemoved(traceID) - tgap
@ -326,17 +329,17 @@ class SeismicShot(object):
(self.picks[traceID]['epp'],
self.picks[traceID]['lpp'],
self.picks[traceID]['spe']) = earllatepicker(self.getSingleStream(traceID),
nfac, (tnoise, tgap, tsignal),
self.getPickIncludeRemoved(traceID),
stealthMode = True)
nfac, (tnoise, tgap, tsignal),
self.getPickIncludeRemoved(traceID),
stealthMode=True)
if self.picks[traceID]['epp'] < 0:
self.picks[traceID]['epp']
#print('setEarllatepick: Set epp to 0 because it was < 0')
# print('setEarllatepick: Set epp to 0 because it was < 0')
# TEST OF 1/2 PICKERROR
# self.picks[traceID]['spe'] *= 0.5
# TEST OF 1/2 PICKERROR
# TEST OF 1/2 PICKERROR
# self.picks[traceID]['spe'] *= 0.5
# TEST OF 1/2 PICKERROR
def threshold(self, hoscf, aiccf, windowsize, pickwindow, folm):
'''
@ -365,10 +368,11 @@ class SeismicShot(object):
leftb = int(pickwindow[0] / self.getCut()[1] * len(hoscflist))
rightb = int(pickwindow[1] / self.getCut()[1] * len(hoscflist))
#threshold = folm * max(hoscflist[leftb : rightb]) # combination of local maximum and threshold
# threshold = folm * max(hoscflist[leftb : rightb]) # combination of local maximum and threshold
### TEST TEST
threshold = folm * (max(hoscflist[leftb : rightb]) - min(hoscflist[leftb : rightb])) + min(hoscflist[leftb : rightb]) # combination of local maximum and threshold
threshold = folm * (max(hoscflist[leftb: rightb]) - min(hoscflist[leftb: rightb])) + min(
hoscflist[leftb: rightb]) # combination of local maximum and threshold
### TEST TEST
m = leftb
@ -378,8 +382,8 @@ class SeismicShot(object):
hoscftime = list(hoscf.getTimeArray())[m]
lb = max(0, m - windowsize[0]) # if window exceeds t = 0
aiccfcut = list(aiccf.getCF())[lb : m + windowsize[1]]
lb = max(0, m - windowsize[0]) # if window exceeds t = 0
aiccfcut = list(aiccf.getCF())[lb: m + windowsize[1]]
if len(aiccfcut) > 0:
n = aiccfcut.index(min(aiccfcut))
else:
@ -401,13 +405,13 @@ class SeismicShot(object):
'''
shotX, shotY, shotZ = self.getSrcLoc()
recX, recY, recZ = self.getRecLoc(traceID)
dist = np.sqrt((shotX-recX)**2 + (shotY-recY)**2 + (shotZ-recZ)**2)
dist = np.sqrt((shotX - recX) ** 2 + (shotY - recY) ** 2 + (shotZ - recZ) ** 2)
if np.isnan(dist) == True:
raise ValueError("Distance is NaN for traceID %s" %traceID)
raise ValueError("Distance is NaN for traceID %s" % traceID)
return dist
#return abs(float(self.getSrcLoc(traceID))-float(self.getRecLoc(traceID)))
# return abs(float(self.getSrcLoc(traceID))-float(self.getRecLoc(traceID)))
def getRecLoc(self, traceID): ########## input FILENAME ##########
'''
@ -417,7 +421,7 @@ class SeismicShot(object):
:param: traceID
:type: int
'''
if traceID == 0: # artificial traceID 0 with pick at t = 0
if traceID == 0: # artificial traceID 0 with pick at t = 0
return self.getSrcLoc()
coordlist = self.getRecCoordlist()
@ -428,9 +432,9 @@ class SeismicShot(object):
z = coordlist[i].split()[3]
return float(x), float(y), float(z)
#print "WARNING: traceID %s not found" % traceID
# print "WARNING: traceID %s not found" % traceID
raise ValueError("traceID %s not found" % traceID)
#return float(self.getSingleStream(traceID)[0].stats.seg2['RECEIVER_LOCATION'])
# return float(self.getSingleStream(traceID)[0].stats.seg2['RECEIVER_LOCATION'])
def getSrcLoc(self): ########## input FILENAME ##########
'''
@ -444,9 +448,10 @@ class SeismicShot(object):
y = coordlist[i].split()[2]
z = coordlist[i].split()[3]
return float(x), float(y), float(z)
#return float(self.getSingleStream(traceID)[0].stats.seg2['SOURCE_LOCATION'])
# return float(self.getSingleStream(traceID)[0].stats.seg2['SOURCE_LOCATION'])
def getTraceIDs4Dist(self, distance = 0, distancebin = (0, 0)): ########## nur fuer 2D benutzt, 'distance bins' ##########
def getTraceIDs4Dist(self, distance=0,
distancebin=(0, 0)): ########## nur fuer 2D benutzt, 'distance bins' ##########
'''
Returns the traceID(s) for a certain distance between source and receiver.
Used for 2D Tomography. TO BE IMPROVED.
@ -460,39 +465,39 @@ class SeismicShot(object):
traceID_list = []
for trace in self.traces:
#traceID = int(trace.stats.seg2['CHANNEL_NUMBER'])
traceID = int(trace.stats.channel)
if distance != 0:
if self.getDistance(traceID) == distance:
traceID_list.append(traceID)
if distancebin[0] >= 0 and distancebin[1] > 0:
if distancebin[0] < self.getDistance(traceID) < distancebin[1]:
traceID_list.append(traceID)
# traceID = int(trace.stats.seg2['CHANNEL_NUMBER'])
traceID = int(trace.stats.channel)
if distance != 0:
if self.getDistance(traceID) == distance:
traceID_list.append(traceID)
if distancebin[0] >= 0 and distancebin[1] > 0:
if distancebin[0] < self.getDistance(traceID) < distancebin[1]:
traceID_list.append(traceID)
if len(traceID_list) > 0:
return traceID_list
# def setManualPicks(self, traceID, picklist): ########## picklist momentan nicht allgemein, nur testweise benutzt ##########
# '''
# Sets the manual picks for a receiver with the ID == traceID for comparison.
# def setManualPicks(self, traceID, picklist): ########## picklist momentan nicht allgemein, nur testweise benutzt ##########
# '''
# Sets the manual picks for a receiver with the ID == traceID for comparison.
# :param: traceID
# :type: int
# :param: traceID
# :type: int
# :param: picklist, list containing the manual picks (mostlikely, earliest, latest).
# :type: list
# '''
# picks = picklist[traceID - 1].split()
# mostlikely = float(picks[1])
# earliest = float(picks[2])
# latest = float(picks[3])
# :param: picklist, list containing the manual picks (mostlikely, earliest, latest).
# :type: list
# '''
# picks = picklist[traceID - 1].split()
# mostlikely = float(picks[1])
# earliest = float(picks[2])
# latest = float(picks[3])
# if not self.manualpicks.has_key(traceID):
# self.manualpicks[traceID] = (mostlikely, earliest, latest)
#else:
# raise KeyError('MANUAL pick to be set more than once for traceID %s' % traceID)
# if not self.manualpicks.has_key(traceID):
# self.manualpicks[traceID] = (mostlikely, earliest, latest)
# else:
# raise KeyError('MANUAL pick to be set more than once for traceID %s' % traceID)
def setManualPicksFromFile(self, directory = 'picks'):
def setManualPicksFromFile(self, directory='picks'):
'''
Read manual picks from *.pck file.
The * must be identical with the shotnumber.
@ -517,8 +522,7 @@ class SeismicShot(object):
else:
self.setManualPickFlag(traceID, 1)
def setPick(self, traceID, pick): ########## siehe Kommentar ##########
def setPick(self, traceID, pick): ########## siehe Kommentar ##########
if not traceID in self.picks.keys():
self.picks[traceID] = {}
self.picks[traceID]['mpp'] = pick
@ -568,7 +572,7 @@ class SeismicShot(object):
tsignal = self.getTsignal()
tnoise = self.getPick(traceID) - tgap
self.snr[traceID] = getSNR(self.getSingleStream(traceID), (tnoise,tgap,tsignal), self.getPick(traceID))
self.snr[traceID] = getSNR(self.getSingleStream(traceID), (tnoise, tgap, tsignal), self.getPick(traceID))
def setSNRthreshold(self, traceID, snrthreshold):
self.snrthreshold[traceID] = snrthreshold
@ -583,12 +587,11 @@ class SeismicShot(object):
if self.getRecLoc(traceID)[0] > self.getSrcLoc()[0]:
distancearray.append(self.getDistance(traceID))
elif self.getRecLoc(traceID)[0] <= self.getSrcLoc()[0]:
distancearray.append((-1)*self.getDistance(traceID))
distancearray.append((-1) * self.getDistance(traceID))
return distancearray
def plot2dttc(self, ax = None): ########## 2D ##########
def plot2dttc(self, ax=None): ########## 2D ##########
'''
Function to plot the traveltime curve for automated picks of a shot. 2d only! ATM: X DIRECTION!!
'''
@ -605,15 +608,16 @@ class SeismicShot(object):
# shotnumbers = [shotnumbers for (shotnumbers, shotnames) in sorted(zip(shotnumbers, shotnames))]
plotarray = sorted(zip(self.getDistArray4ttcPlot(), picks))
x = []; y = []
x = [];
y = []
for point in plotarray:
x.append(point[0])
y.append(point[1])
ax.plot(x, y,'r', label = "Automatic Picks")
ax.text(0.5, 0.9, 'shot: %s' %self.getShotnumber(), transform = ax.transAxes
, horizontalalignment = 'center')
ax.plot(x, y, 'r', label="Automatic Picks")
ax.text(0.5, 0.9, 'shot: %s' % self.getShotnumber(), transform=ax.transAxes
, horizontalalignment='center')
def plotmanual2dttc(self, ax = None): ########## 2D ##########
def plotmanual2dttc(self, ax=None): ########## 2D ##########
'''
Function to plot the traveltime curve for manual picks of a shot. 2D only!
'''
@ -632,11 +636,12 @@ class SeismicShot(object):
ax = fig.add_subplot(111)
plotarray = sorted(zip(self.getDistArray4ttcPlot(), manualpicktimesarray))
x = []; y = []
x = [];
y = []
for point in plotarray:
x.append(point[0])
y.append(point[1])
ax.plot(x, y, 'b', label = "Manual Picks")
ax.plot(x, y, 'b', label="Manual Picks")
# def plotpickwindow(self): ########## 2D ##########
# '''
@ -656,10 +661,10 @@ class SeismicShot(object):
# plt.plot(self.getDistArray4ttcPlot(), pickwindowarray_lowerb, ':k')
# plt.plot(self.getDistArray4ttcPlot(), pickwindowarray_upperb, ':k')
def plotTrace(self, traceID, plotSNR = True, lw = 1):
def plotTrace(self, traceID, plotSNR=True, lw=1):
fig = plt.figure()
ax = fig.add_subplot(111)
ax = self._drawStream(traceID, ax = ax)
ax = self._drawStream(traceID, ax=ax)
tgap = self.getTgap()
tsignal = self.getTsignal()
@ -667,31 +672,32 @@ class SeismicShot(object):
tnoise = pick - tgap
snr, snrdb, noiselevel = self.getSNR(traceID)
ax.plot([0, tnoise], [noiselevel, noiselevel], 'm', linewidth = lw, label = 'noise level')
ax.plot([tnoise, pick], [noiselevel, noiselevel], 'g:', linewidth = lw, label = 'gap')
ax.plot([tnoise + tgap, pick + tsignal], [noiselevel * snr, noiselevel * snr], 'b', linewidth = lw, label = 'signal level')
ax.plot([0, tnoise], [noiselevel, noiselevel], 'm', linewidth=lw, label='noise level')
ax.plot([tnoise, pick], [noiselevel, noiselevel], 'g:', linewidth=lw, label='gap')
ax.plot([tnoise + tgap, pick + tsignal], [noiselevel * snr, noiselevel * snr], 'b', linewidth=lw,
label='signal level')
ax.legend()
ax.text(0.05, 0.9, 'SNR: %s' %snr, transform = ax.transAxes)
ax.text(0.05, 0.9, 'SNR: %s' % snr, transform=ax.transAxes)
def plot_traces(self, traceID): ########## 2D, muss noch mehr verbessert werden ##########
def plot_traces(self, traceID): ########## 2D, muss noch mehr verbessert werden ##########
from matplotlib.widgets import Button
def onclick(event):
self.setPick(traceID, event.xdata)
if self.getSNR(traceID)[0] > 1:
self.setEarllatepick(traceID)
self._drawStream(traceID, refresh = True)
self._drawCFs(traceID, folm, refresh = True)
self._drawStream(traceID, refresh=True)
self._drawCFs(traceID, folm, refresh=True)
fig.canvas.mpl_disconnect(self.traces4plot[traceID]['cid'])
plt.draw()
def rmPick(event = None):
def rmPick(event=None):
self.removePick(traceID)
self._drawStream(traceID, refresh = True)
self._drawCFs(traceID, folm, refresh = True)
self._drawStream(traceID, refresh=True)
self._drawCFs(traceID, folm, refresh=True)
plt.draw()
def connectButton(event = None):
def connectButton(event=None):
cid = fig.canvas.mpl_connect('button_press_event', onclick)
self.traces4plot[traceID]['cid'] = cid
@ -701,13 +707,13 @@ class SeismicShot(object):
folm = self.folm
fig = plt.figure()
ax1 = fig.add_subplot(2,1,1)
ax2 = fig.add_subplot(2,1,2, sharex = ax1)
ax1 = fig.add_subplot(2, 1, 1)
ax2 = fig.add_subplot(2, 1, 2, sharex=ax1)
axb1 = fig.add_axes([0.15, 0.91, 0.05, 0.03])
axb2 = fig.add_axes([0.22, 0.91, 0.05, 0.03])
button1 = Button(axb1, 'repick', color = 'red', hovercolor = 'grey')
button1 = Button(axb1, 'repick', color='red', hovercolor='grey')
button1.on_clicked(connectButton)
button2 = Button(axb2, 'delete', color = 'green', hovercolor = 'grey')
button2 = Button(axb2, 'delete', color='green', hovercolor='grey')
button2.on_clicked(rmPick)
fig.canvas.mpl_connect('close_event', cleanup)
@ -717,7 +723,7 @@ class SeismicShot(object):
self._drawStream(traceID)
self._drawCFs(traceID, folm)
def _drawStream(self, traceID, refresh = False, ax = None):
def _drawStream(self, traceID, refresh=False, ax=None):
from pylot.core.util.utils import getGlobalTimes
from pylot.core.util.utils import prepTimeAxis
@ -737,27 +743,27 @@ class SeismicShot(object):
ax.set_ylim(ylim)
ax.set_title('Shot: %s, traceID: %s, pick: %s'
%(self.getShotnumber(), traceID, self.getPick(traceID)))
ax.plot(timeaxis, stream[0].data, 'k', label = 'trace')
% (self.getShotnumber(), traceID, self.getPick(traceID)))
ax.plot(timeaxis, stream[0].data, 'k', label='trace')
ax.plot([self.getPick(traceID), self.getPick(traceID)],
[ax.get_ylim()[0],
ax.get_ylim()[1]],
'r', label = 'most likely')
'r', label='most likely')
if self.getEarliest(traceID) is not None:
ax.plot([self.getEarliest(traceID), self.getEarliest(traceID)],
[ax.get_ylim()[0],
ax.get_ylim()[1]],
'g:', label = 'earliest')
'g:', label='earliest')
if self.getLatest(traceID) is not None:
ax.plot([self.getLatest(traceID), self.getLatest(traceID)],
[ax.get_ylim()[0],
ax.get_ylim()[1]],
'b:', label = 'latest')
'b:', label='latest')
ax.legend()
return ax
def _drawCFs(self, traceID, folm = None, refresh = False):
def _drawCFs(self, traceID, folm=None, refresh=False):
hoscf = self.getHOScf(traceID)
aiccf = self.getAICcf(traceID)
ax = self.traces4plot[traceID]['ax2']
@ -769,30 +775,30 @@ class SeismicShot(object):
ax.set_xlim(xlim)
ax.set_ylim(ylim)
ax.plot(hoscf.getTimeArray(), hoscf.getCF(), 'b', label = 'HOS')
ax.plot(hoscf.getTimeArray(), aiccf.getCF(), 'g', label = 'AIC')
ax.plot(hoscf.getTimeArray(), hoscf.getCF(), 'b', label='HOS')
ax.plot(hoscf.getTimeArray(), aiccf.getCF(), 'g', label='AIC')
ax.plot([self.getPick(traceID), self.getPick(traceID)],
[ax.get_ylim()[0],
ax.get_ylim()[1]],
'r', label = 'most likely')
'r', label='most likely')
if self.getEarliest(traceID) is not None:
ax.plot([self.getEarliest(traceID), self.getEarliest(traceID)],
[ax.get_ylim()[0],
ax.get_ylim()[1]],
'g:', label = 'earliest')
'g:', label='earliest')
if self.getLatest(traceID) is not None:
ax.plot([self.getLatest(traceID), self.getLatest(traceID)],
[ax.get_ylim()[0],
ax.get_ylim()[1]],
'b:', label = 'latest')
'b:', label='latest')
if folm is not None:
ax.plot([0, self.getPick(traceID)],
[folm * max(hoscf.getCF()), folm * max(hoscf.getCF())],
'm:', label = 'folm = %s' %folm)
'm:', label='folm = %s' % folm)
ax.set_xlabel('Time [s]')
ax.legend()
def plot3dttc(self, step = 0.5, contour = False, plotpicks = False, method = 'linear', ax = None):
def plot3dttc(self, step=0.5, contour=False, plotpicks=False, method='linear', ax=None):
'''
Plots a 3D 'traveltime cone' as surface plot by interpolating on a regular grid over the traveltimes, not yet regarding the vertical offset of the receivers.
@ -824,20 +830,20 @@ class SeismicShot(object):
xaxis = np.arange(min(x) + step, max(x), step)
yaxis = np.arange(min(y) + step, max(y), step)
xgrid, ygrid = np.meshgrid(xaxis, yaxis)
zgrid = griddata((x, y), z, (xgrid, ygrid), method = method)
zgrid = griddata((x, y), z, (xgrid, ygrid), method=method)
if ax == None:
fig = plt.figure()
ax = plt.axes(projection = '3d')
ax = plt.axes(projection='3d')
xsrc, ysrc, zsrc = self.getSrcLoc()
if contour == True:
ax.contour3D(xgrid,ygrid,zgrid,20)
ax.contour3D(xgrid, ygrid, zgrid, 20)
else:
ax.plot_surface(xgrid, ygrid, zgrid, linewidth = 0, cmap = cm.jet, vmin = min(z), vmax = max(z))
ax.plot([xsrc], [ysrc], [self.getPick(0)], 'k*', markersize = 20) # plot source location
ax.plot([xsrc], [ysrc], [self.getPick(0)], 'r*', markersize = 15) # plot source location
ax.plot_surface(xgrid, ygrid, zgrid, linewidth=0, cmap=cm.jet, vmin=min(z), vmax=max(z))
ax.plot([xsrc], [ysrc], [self.getPick(0)], 'k*', markersize=20) # plot source location
ax.plot([xsrc], [ysrc], [self.getPick(0)], 'r*', markersize=15) # plot source location
if plotpicks == True:
ax.plot(x, y, z, 'k.')
@ -847,7 +853,7 @@ class SeismicShot(object):
plotmethod[method](*args)
def matshow(self, ax = None, step = 0.5, method = 'linear', plotRec = True, annotations = True, colorbar = True, legend = True):
def matshow(self, ax=None, step=0.5, method='linear', plotRec=True, annotations=True, colorbar=True, legend=True):
'''
Plots a 2D matrix of the interpolated traveltimes. This needs less performance than plot3dttc
@ -868,9 +874,12 @@ class SeismicShot(object):
from matplotlib import cm
cmap = cm.jet
x = []; xcut = []
y = []; ycut = []
z = []; zcut = []
x = [];
xcut = []
y = [];
ycut = []
z = [];
zcut = []
for traceID in self.picks.keys():
if self.getPickFlag(traceID) != 0:
@ -882,7 +891,7 @@ class SeismicShot(object):
ycut.append(self.getRecLoc(traceID)[1])
zcut.append(self.getPickIncludeRemoved(traceID))
tmin = 0.8 * min(z) # 20% cushion for colorbar
tmin = 0.8 * min(z) # 20% cushion for colorbar
tmax = 1.2 * max(z)
xaxis = np.arange(min(x), max(x), step)
@ -895,10 +904,11 @@ class SeismicShot(object):
ax = plt.axes()
count = 0
ax.imshow(zgrid, extent = [min(x), max(x), min(y), max(y)], vmin = tmin, vmax = tmax, cmap = cmap, origin = 'lower', alpha = 0.85)
ax.text(0.5, 0.95, 'shot: %s' %self.getShotnumber(), transform = ax.transAxes
, horizontalalignment = 'center')
sc = ax.scatter(x, y, c = z, s = 30, label = 'picked shots', vmin = tmin, vmax = tmax, cmap = cmap, linewidths = 1.5)
ax.imshow(zgrid, extent=[min(x), max(x), min(y), max(y)], vmin=tmin, vmax=tmax, cmap=cmap, origin='lower',
alpha=0.85)
ax.text(0.5, 0.95, 'shot: %s' % self.getShotnumber(), transform=ax.transAxes
, horizontalalignment='center')
sc = ax.scatter(x, y, c=z, s=30, label='picked shots', vmin=tmin, vmax=tmax, cmap=cmap, linewidths=1.5)
label = None
for xyz in zip(xcut, ycut, zcut):
x, y, z = xyz
@ -907,7 +917,7 @@ class SeismicShot(object):
z = 'w'
if count == 1:
label = 'cut out shots'
ax.scatter(x, y, c = z, s = 30, edgecolor = 'm', label = label, vmin = tmin, vmax = tmax, cmap = cmap, linewidths = 1.5)
ax.scatter(x, y, c=z, s=30, edgecolor='m', label=label, vmin=tmin, vmax=tmax, cmap=cmap, linewidths=1.5)
if colorbar == True:
cbar = plt.colorbar(sc)
cbar.set_label('Time [s]')
@ -916,17 +926,15 @@ class SeismicShot(object):
ax.legend()
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.plot(self.getSrcLoc()[0], self.getSrcLoc()[1],'*k', markersize = 15) # plot source location
ax.plot(self.getSrcLoc()[0], self.getSrcLoc()[1], '*k', markersize=15) # plot source location
if annotations == True:
for traceID in self.getTraceIDlist():
if self.getPickFlag(traceID) is not 0:
ax.annotate(' %s' %traceID , xy = (self.getRecLoc(traceID)[0], self.getRecLoc(traceID)[1]),
fontsize = 'x-small', color = 'k')
ax.annotate(' %s' % traceID, xy=(self.getRecLoc(traceID)[0], self.getRecLoc(traceID)[1]),
fontsize='x-small', color='k')
else:
ax.annotate(' %s' %traceID , xy = (self.getRecLoc(traceID)[0], self.getRecLoc(traceID)[1]),
fontsize = 'x-small', color = 'r')
ax.annotate(' %s' % traceID, xy=(self.getRecLoc(traceID)[0], self.getRecLoc(traceID)[1]),
fontsize='x-small', color='r')
plt.show()

View File

@ -2,8 +2,10 @@
import matplotlib.pyplot as plt
import math
import numpy as np
plt.interactive(True)
class regions(object):
'''
A class used for manual inspection and processing of all picks for the user.
@ -12,19 +14,19 @@ class regions(object):
regions.chooseRectangles():
- lets the user choose several rectangular regions in the plot
regions.plotTracesInActiveRegions():
- creates plots (shot.plot_traces) for all traces in the active regions (i.e. chosen by e.g. chooseRectangles)
regions.setAllActiveRegionsForDeletion():
- highlights all shots in a the active regions for deletion
regions.deleteAllMarkedPicks():
- deletes the picks (pick flag set to 0) for all shots set for deletion
regions.deselectSelection(number):
- deselects the region of number = number
'''
def __init__(self, ax, cbar, survey):
@ -57,10 +59,10 @@ class regions(object):
for shot in self.shot_dict.values():
for traceID in shot.getTraceIDlist():
allpicks.append((shot.getDistance(traceID),
shot.getPickIncludeRemoved(traceID),
shot.getShotnumber(),
traceID,
shot.getPickFlag(traceID)))
shot.getPickIncludeRemoved(traceID),
shot.getShotnumber(),
traceID,
shot.getPickFlag(traceID)))
allpicks.sort()
self._allpicks = allpicks
@ -74,9 +76,9 @@ class regions(object):
def _onselect_clicks(self, eclick, erelease):
'''eclick and erelease are matplotlib events at press and release'''
print 'region selected x0, y0 = (%3s, %3s), x1, y1 = (%3s, %3s)' % (eclick.xdata,
eclick.ydata,
erelease.xdata,
erelease.ydata)
eclick.ydata,
erelease.xdata,
erelease.ydata)
x0 = min(eclick.xdata, erelease.xdata)
x1 = max(eclick.xdata, erelease.xdata)
y0 = min(eclick.ydata, erelease.ydata)
@ -105,18 +107,18 @@ class regions(object):
self.disconnectPoly()
self.printOutput('Disconnected polygon selection')
def addTextfield(self, xpos = 0, ypos = 0.95, width = 1, height = 0.03):
def addTextfield(self, xpos=0, ypos=0.95, width=1, height=0.03):
'''
Adds an ax for text output to the plot.
'''
self.axtext = self.ax.figure.add_axes([xpos,
ypos,
width,
height])
ypos,
width,
height])
self.axtext.xaxis.set_visible(False)
self.axtext.yaxis.set_visible(False)
def writeInTextfield(self, text = None):
def writeInTextfield(self, text=None):
self.setXYlim(self.ax.get_xlim(), self.ax.get_ylim())
self.axtext.clear()
self.axtext.text(0.01, 0.5, text, verticalalignment='center', horizontalalignment='left')
@ -136,16 +138,16 @@ class regions(object):
self.addButton('SelAll', self.setAllActiveRegionsForDeletion, xpos=xpos2 + 2 * dx)
self.addButton('DelAll', self.deleteAllMarkedPicks, xpos=xpos2 + 3 * dx, color='red')
def addButton(self, name, action, xpos, ypos = 0.91, color = None):
def addButton(self, name, action, xpos, ypos=0.91, color=None):
from matplotlib.widgets import Button
self.buttons[name] = {'ax': None,
'button': None,
'action': action,
'xpos': xpos}
'button': None,
'action': action,
'xpos': xpos}
ax = self.ax.figure.add_axes([xpos,
ypos,
0.05,
0.03])
ypos,
0.05,
0.03])
button = Button(ax, name, color=color, hovercolor='grey')
button.on_clicked(action)
self.buttons[name]['ax'] = ax
@ -179,23 +181,24 @@ class regions(object):
self.drawLastPolyLine()
x = self._polyx
y = self._polyy
self._polyx = []; self._polyy = []
self._polyx = [];
self._polyy = []
key = self.getKey()
self.markPolygon(x, y, key = key)
self.markPolygon(x, y, key=key)
shots, numtraces = self.findTracesInPoly(x, y)
self.shots_found[key] = {'shots': shots,
'selection': 'poly',
'xvalues': x,
'yvalues': y}
'selection': 'poly',
'xvalues': x,
'yvalues': y}
self.printOutput('Found %d traces in polygon: %s' % (numtraces, shots))
def printOutput(self, text):
print text
self.writeInTextfield(text)
def chooseRectangles(self, event = None):
def chooseRectangles(self, event=None):
'''
Activates matplotlib widget RectangleSelector.
'''
@ -208,7 +211,7 @@ class regions(object):
self._rectangle = RectangleSelector(self.ax, self._onselect_clicks)
return self._rectangle
def choosePolygon(self, event = None):
def choosePolygon(self, event=None):
'''
Activates matplotlib widget LassoSelector.
'''
@ -221,7 +224,7 @@ class regions(object):
self._lasso = LassoSelector(self.ax, self._onselect_verts)
return self._lasso
def disconnectPoly(self, event = None):
def disconnectPoly(self, event=None):
if not hasattr(self, '_cidPoly'):
self.printOutput('no poly selection found')
return
@ -231,7 +234,7 @@ class regions(object):
self._lasso.disconnect_events()
print 'disconnected poly selection\n'
def disconnectRect(self, event = None):
def disconnectRect(self, event=None):
if not hasattr(self, '_cidRect'):
self.printOutput('no rectangle selection found')
return
@ -240,14 +243,14 @@ class regions(object):
self._rectangle.disconnect_events()
print 'disconnected rectangle selection\n'
def deselectLastSelection(self, event = None):
def deselectLastSelection(self, event=None):
if self.shots_found.keys() == []:
self.printOutput('No selection found.')
return
key = max(self.shots_found.keys())
self.deselectSelection(key)
def deselectSelection(self, key, color = 'green', alpha = 0.1):
def deselectSelection(self, key, color='green', alpha=0.1):
if key not in self.shots_found.keys():
self.printOutput('No selection found.')
return
@ -255,17 +258,17 @@ class regions(object):
if self.shots_found[key]['selection'] == 'rect':
self.markRectangle(self.shots_found[key]['xvalues'],
self.shots_found[key]['yvalues'],
key = key, color = color, alpha = alpha,
linewidth = 1)
key=key, color=color, alpha=alpha,
linewidth=1)
elif self.shots_found[key]['selection'] == 'poly':
self.markPolygon(self.shots_found[key]['xvalues'],
self.shots_found[key]['yvalues'],
key = key, color = color, alpha = alpha,
linewidth = 1)
key=key, color=color, alpha=alpha,
linewidth=1)
value = self.shots_found.pop(key)
self.printOutput('Deselected selection number %d' % key)
def findTracesInPoly(self, x, y, picks = 'normal', highlight = True):
def findTracesInPoly(self, x, y, picks='normal', highlight=True):
def dotproduct(v1, v2):
return sum((a * b for a, b in zip(v1, v2)))
@ -279,21 +282,26 @@ class regions(object):
angle = 0
epsilon = 1e-07
for index in range(len(x)):
xval1 = x[index - 1]; yval1 = y[index - 1]
xval2 = x[index]; yval2 = y[index]
xval1 = x[index - 1];
yval1 = y[index - 1]
xval2 = x[index];
yval2 = y[index]
angle += getangle([xval1 - pickX, yval1 - pickY], [xval2 - pickX, yval2 - pickY])
if 360 - epsilon <= angle <= 360 + epsilon: ### IMPROVE THAT??
if 360 - epsilon <= angle <= 360 + epsilon: ### IMPROVE THAT??
return True
if len(x) == 0 or len(y) == 0:
self.printOutput('No polygon defined.')
return
shots_found = {}; numtraces = 0
x0 = min(x); x1 = max(x)
y0 = min(y); y1 = max(y)
shots_found = {};
numtraces = 0
x0 = min(x);
x1 = max(x)
y0 = min(y);
y1 = max(y)
shots, numtracesrect = self.findTracesInShotDict((x0, x1), (y0, y1), highlight = False)
shots, numtracesrect = self.findTracesInShotDict((x0, x1), (y0, y1), highlight=False)
for shotnumber in shots.keys():
shot = self.shot_dict[shotnumber]
for traceID in shots[shotnumber]:
@ -310,18 +318,21 @@ class regions(object):
self.drawFigure()
return shots_found, numtraces
def findTracesInShotDict(self, (x0, x1), (y0, y1), picks = 'normal', highlight = True):
def findTracesInShotDict(self, (x0, x1), (y0, y1), picks='normal', highlight=True):
'''
Returns traces corresponding to a certain area in the plot with all picks over the distances.
'''
shots_found = {}; numtraces = 0
if picks == 'normal': pickflag = 0
elif picks == 'includeCutOut': pickflag = None
shots_found = {};
numtraces = 0
if picks == 'normal':
pickflag = 0
elif picks == 'includeCutOut':
pickflag = None
for line in self._allpicks:
dist, pick, shotnumber, traceID, flag = line
if flag == pickflag: continue ### IMPROVE THAT
if flag == pickflag: continue ### IMPROVE THAT
if (x0 <= dist <= x1 and y0 <= pick <= y1):
if shotnumber not in shots_found.keys():
shots_found[shotnumber] = []
@ -333,7 +344,7 @@ class regions(object):
self.drawFigure()
return shots_found, numtraces
def highlightPick(self, shot, traceID, annotations = True):
def highlightPick(self, shot, traceID, annotations=True):
'''
Highlights a single pick for a shot(object)/shotnumber and traceID.
If annotations == True: Displays shotnumber and traceID in the plot.
@ -344,9 +355,11 @@ class regions(object):
if shot.getPickFlag(traceID) is 0:
return
self.ax.scatter(shot.getDistance(traceID), shot.getPick(traceID), s = 50, marker = 'o', facecolors = 'none', edgecolors = 'm', alpha = 1)
self.ax.scatter(shot.getDistance(traceID), shot.getPick(traceID), s=50, marker='o', facecolors='none',
edgecolors='m', alpha=1)
if annotations == True:
self.ax.annotate(s='s%s|t%s' % (shot.getShotnumber(), traceID), xy=(shot.getDistance(traceID), shot.getPick(traceID)), fontsize='xx-small')
self.ax.annotate(s='s%s|t%s' % (shot.getShotnumber(), traceID),
xy=(shot.getDistance(traceID), shot.getPick(traceID)), fontsize='xx-small')
def highlightAllActiveRegions(self):
'''
@ -358,7 +371,7 @@ class regions(object):
self.highlightPick(self.shot_dict[shotnumber], traceID)
self.drawFigure()
def plotTracesInActiveRegions(self, event = None, keys = 'all', maxfigures = 20):
def plotTracesInActiveRegions(self, event=None, keys='all', maxfigures=20):
'''
Plots all traces in the active region or for all specified keys.
@ -382,13 +395,14 @@ class regions(object):
for traceID in self.shots_found[key]['shots'][shotnumber]:
count += 1
if count > maxfigures:
print 'Maximum number of figures (%s) reached. %sth figure was not opened.' %(maxfigures, count)
print 'Maximum number of figures (%s) reached. %sth figure was not opened.' % (
maxfigures, count)
break
shot.plot_traces(traceID)
else:
self.printOutput('No picks defined in that region(s)')
def setAllActiveRegionsForDeletion(self, event = None):
def setAllActiveRegionsForDeletion(self, event=None):
keys = []
for key in self.shots_found.keys():
keys.append(key)
@ -405,7 +419,7 @@ class regions(object):
for traceID in self.shots_found[key]['shots'][shotnumber]:
if traceID not in self.shots_for_deletion[shotnumber]:
self.shots_for_deletion[shotnumber].append(traceID)
self.deselectSelection(key, color = 'red', alpha = 0.2)
self.deselectSelection(key, color='red', alpha=0.2)
self.deselectSelection(key, color='red', alpha=0.2)
@ -415,13 +429,12 @@ class regions(object):
for key in self.shots_found.keys():
if self.shots_found[key]['selection'] == 'rect':
self.markRectangle(self.shots_found[key]['xvalues'],
self.shots_found[key]['yvalues'], key = key)
self.shots_found[key]['yvalues'], key=key)
if self.shots_found[key]['selection'] == 'poly':
self.markPolygon(self.shots_found[key]['xvalues'],
self.shots_found[key]['yvalues'], key = key)
self.shots_found[key]['yvalues'], key=key)
def markRectangle(self, (x0, x1), (y0, y1), key = None, color = 'grey', alpha = 0.1, linewidth = 1):
def markRectangle(self, (x0, x1), (y0, y1), key=None, color='grey', alpha=0.1, linewidth=1):
'''
Mark a rectangular region on the axes.
'''
@ -431,7 +444,7 @@ class regions(object):
self.ax.text(x0 + (x1 - x0) / 2, y0 + (y1 - y0) / 2, str(key))
self.drawFigure()
def markPolygon(self, x, y, key = None, color = 'grey', alpha = 0.1, linewidth = 1):
def markPolygon(self, x, y, key=None, color='grey', alpha=0.1, linewidth=1):
from matplotlib.patches import Polygon
poly = Polygon(np.array(zip(x, y)), color=color, alpha=alpha, lw=linewidth)
self.ax.add_patch(poly)
@ -449,7 +462,7 @@ class regions(object):
def getShotsForDeletion(self):
return self.shots_for_deletion
def deleteAllMarkedPicks(self, event = None):
def deleteAllMarkedPicks(self, event=None):
'''
Deletes all shots set for deletion.
'''
@ -462,11 +475,11 @@ class regions(object):
if shot.getShotnumber() == shotnumber:
for traceID in self.getShotsForDeletion()[shotnumber]:
shot.removePick(traceID)
print "Deleted the pick for traceID %s on shot number %s" %(traceID, shotnumber)
print "Deleted the pick for traceID %s on shot number %s" % (traceID, shotnumber)
self.clearShotsForDeletion()
self.refreshFigure()
def highlightPicksForShot(self, shot, annotations = False):
def highlightPicksForShot(self, shot, annotations=False):
'''
Highlight all picks for a given shot.
'''
@ -482,19 +495,19 @@ class regions(object):
def setXYlim(self, xlim, ylim):
self._xlim, self._ylim = xlim, ylim
def refreshLog10SNR(self, event = None):
def refreshLog10SNR(self, event=None):
cbv = 'log10SNR'
self.refreshFigure(self, colorByVal=cbv)
def refreshPickerror(self, event = None):
def refreshPickerror(self, event=None):
cbv = 'pickerror'
self.refreshFigure(self, colorByVal=cbv)
def refreshSPE(self, event = None):
def refreshSPE(self, event=None):
cbv = 'spe'
self.refreshFigure(self, colorByVal=cbv)
def refreshFigure(self, event = None, colorByVal = None):
def refreshFigure(self, event=None, colorByVal=None):
if colorByVal == None:
colorByVal = self.cbv
else:
@ -508,7 +521,7 @@ class regions(object):
self.drawFigure()
self.printOutput('Done!')
def drawFigure(self, resetAxes = True):
def drawFigure(self, resetAxes=True):
if resetAxes == True:
self.ax.set_xlim(self._xlim)
self.ax.set_ylim(self._ylim)

View File

@ -1,6 +1,13 @@
import numpy as np
from __future__ import print_function
def readParameters(parfile, parameter):
"""
:param parfile:
:param parameter:
:return:
"""
from ConfigParser import ConfigParser
parameterConfig = ConfigParser()
parameterConfig.read('parfile')
@ -9,14 +16,29 @@ def readParameters(parfile, parameter):
return value
def setArtificialPick(shot_dict, traceID, pick):
"""
:param shot_dict:
:param traceID:
:param pick:
:return:
"""
for shot in shot_dict.values():
shot.setPick(traceID, pick)
shot.setPickwindow(traceID, shot.getCut())
def fitSNR4dist(shot_dict, shiftdist = 30, shiftSNR = 100):
def fitSNR4dist(shot_dict, shiftdist=30, shiftSNR=100):
"""
:param shot_dict:
:param shiftdist:
:param shiftSNR:
:return:
"""
import numpy as np
import matplotlib.pyplot as plt
dists = []
picks = []
snrs = []
@ -29,54 +51,84 @@ def fitSNR4dist(shot_dict, shiftdist = 30, shiftSNR = 100):
dists.append(shot.getDistance(traceID))
picks.append(shot.getPickIncludeRemoved(traceID))
snrs.append(shot.getSNR(traceID)[0])
snr_sqrt_inv.append(1/np.sqrt(shot.getSNR(traceID)[0]))
snr_sqrt_inv.append(1 / np.sqrt(shot.getSNR(traceID)[0]))
fit = np.polyfit(dists, snr_sqrt_inv, 1)
fit_fn = np.poly1d(fit)
for dist in dists:
snrBestFit.append((1/(fit_fn(dist)**2)))
snrBestFit.append((1 / (fit_fn(dist) ** 2)))
dist += shiftdist
snrthresholds.append((1/(fit_fn(dist)**2)) - shiftSNR * np.exp(-0.05 * dist))
snrthresholds.append((1 / (fit_fn(dist) ** 2)) - shiftSNR * np.exp(-0.05 * dist))
plotFittedSNR(dists, snrthresholds, snrs, snrBestFit)
return fit_fn #### ZU VERBESSERN, sollte fertige funktion wiedergeben
return fit_fn #### ZU VERBESSERN, sollte fertige funktion wiedergeben
def plotFittedSNR(dists, snrthresholds, snrs, snrBestFit):
"""
:param dists:
:param snrthresholds:
:param snrs:
:param snrBestFit:
:return:
"""
import matplotlib.pyplot as plt
plt.interactive(True)
fig = plt.figure()
plt.plot(dists, snrs, 'b.', markersize = 2.0, label = 'SNR values')
plt.plot(dists, snrs, 'b.', markersize=2.0, label='SNR values')
dists.sort()
snrthresholds.sort(reverse = True)
snrBestFit.sort(reverse = True)
plt.plot(dists, snrthresholds, 'r', markersize = 1, label = 'Fitted threshold')
plt.plot(dists, snrBestFit, 'k', markersize = 1, label = 'Best fitted curve')
snrthresholds.sort(reverse=True)
snrBestFit.sort(reverse=True)
plt.plot(dists, snrthresholds, 'r', markersize=1, label='Fitted threshold')
plt.plot(dists, snrBestFit, 'k', markersize=1, label='Best fitted curve')
plt.xlabel('Distance[m]')
plt.ylabel('SNR')
plt.legend()
def setDynamicFittedSNR(shot_dict, shiftdist = 30, shiftSNR = 100, p1 = 0.004, p2 = -0.0007):
def setDynamicFittedSNR(shot_dict, shiftdist=30, shiftSNR=100, p1=0.004, p2=-0.0007):
"""
:param shot_dict:
:type shot_dict: dict
:param shiftdist:
:type shiftdist: int
:param shiftSNR:
:type shiftSNR: int
:param p1:
:type p1: float
:param p2:
:type p2: float
:return:
"""
import numpy as np
minSNR = 2.5
#fit_fn = fitSNR4dist(shot_dict)
# fit_fn = fitSNR4dist(shot_dict)
fit_fn = np.poly1d([p1, p2])
for shot in shot_dict.values():
for traceID in shot.getTraceIDlist(): ### IMPROVE
for traceID in shot.getTraceIDlist(): ### IMPROVE
dist = shot.getDistance(traceID) + shiftdist
snrthreshold = (1/(fit_fn(dist)**2)) - shiftSNR * np.exp(-0.05 * dist)
snrthreshold = (1 / (fit_fn(dist) ** 2)) - shiftSNR * np.exp(-0.05 * dist)
if snrthreshold < minSNR:
print('WARNING: SNR threshold %s lower %s. Set SNR threshold to %s.'
%(snrthreshold, minSNR, minSNR))
% (snrthreshold, minSNR, minSNR))
shot.setSNRthreshold(traceID, minSNR)
else:
shot.setSNRthreshold(traceID, snrthreshold)
print "setDynamicFittedSNR: Finished setting of fitted SNR-threshold"
print("setDynamicFittedSNR: Finished setting of fitted SNR-threshold")
def setConstantSNR(shot_dict, snrthreshold = 2.5):
import numpy as np
def setConstantSNR(shot_dict, snrthreshold=2.5):
"""
:param shot_dict:
:param snrthreshold:
:return:
"""
for shot in shot_dict.values():
for traceID in shot.getTraceIDlist():
shot.setSNRthreshold(traceID, snrthreshold)
print "setConstantSNR: Finished setting of SNR threshold to a constant value of %s"%snrthreshold
print("setConstantSNR: Finished setting of SNR threshold to a constant value of %s" % snrthreshold)
def findTracesInRanges(shot_dict, distancebin, pickbin):
'''
@ -94,8 +146,8 @@ def findTracesInRanges(shot_dict, distancebin, pickbin):
'''
shots_found = {}
for shot in shot_dict.values():
if shot.getTraceIDs4Dist(distancebin = distancebin) is not None:
for traceID in shot.getTraceIDs4Dist(distancebin = distancebin):
if shot.getTraceIDs4Dist(distancebin=distancebin) is not None:
for traceID in shot.getTraceIDs4Dist(distancebin=distancebin):
if pickbin[0] < shot.getPick(traceID) < pickbin[1]:
if shot.getShotnumber() not in shots_found.keys():
shots_found[shot.getShotnumber()] = []
@ -103,11 +155,17 @@ def findTracesInRanges(shot_dict, distancebin, pickbin):
return shots_found
def cleanUp(survey):
def cleanUp(survey):
"""
:param survey:
:return:
"""
for shot in survey.data.values():
shot.traces4plot = {}
# def plotScatterStats(survey, key, ax = None):
# import matplotlib.pyplot as plt
# x = []; y = []; value = []
@ -119,7 +177,7 @@ def cleanUp(survey):
# value.append(stats[shotnumber][key])
# x.append(survey.data[shotnumber].getSrcLoc()[0])
# y.append(survey.data[shotnumber].getSrcLoc()[1])
# if ax == None:
# fig = plt.figure()
# ax = fig.add_subplot(111)
@ -131,14 +189,19 @@ def cleanUp(survey):
# cbar.set_label(key)
def plotScatterStats4Shots(survey, key):
'''
"""
Statistics, scatter plot.
key can be 'mean SNR', 'median SNR', 'mean SPE', 'median SPE', or 'picked traces'
'''
:param survey:
:param key:
:return:
"""
import matplotlib.pyplot as plt
import numpy as np
statsShot = {}
x = []; y = []; value = []
x = []
y = []
value = []
for shot in survey.data.values():
for traceID in shot.getTraceIDlist():
if not shot in statsShot.keys():
@ -147,7 +210,7 @@ def plotScatterStats4Shots(survey, key):
'SNR': [],
'SPE': [],
'picked traces': 0}
statsShot[shot]['SNR'].append(shot.getSNR(traceID)[0])
if shot.getPickFlag(traceID) == 1:
statsShot[shot]['picked traces'] += 1
@ -171,7 +234,7 @@ def plotScatterStats4Shots(survey, key):
for val in value:
size.append(100 * val / max(value))
sc = ax.scatter(x, y, s = size, c = value)
sc = ax.scatter(x, y, s=size, c=value)
plt.title('Plot of all shots')
plt.xlabel('X')
plt.ylabel('Y')
@ -179,18 +242,24 @@ def plotScatterStats4Shots(survey, key):
cbar.set_label(key)
for shot in statsShot.keys():
ax.annotate(' %s' %shot.getShotnumber() , xy = (shot.getSrcLoc()[0], shot.getSrcLoc()[1]),
fontsize = 'x-small', color = 'k')
ax.annotate(' %s' % shot.getShotnumber(), xy=(shot.getSrcLoc()[0], shot.getSrcLoc()[1]),
fontsize='x-small', color='k')
def plotScatterStats4Receivers(survey, key):
'''
"""
Statistics, scatter plot.
key can be 'mean SNR', 'median SNR', 'mean SPE', 'median SPE', or 'picked traces'
'''
:param survey:
:param key:
:return:
"""
import matplotlib.pyplot as plt
import numpy as np
statsRec = {}
x = []; y = []; value = []
x = []
y = []
value = []
for shot in survey.data.values():
for traceID in shot.getTraceIDlist():
if not traceID in statsRec.keys():
@ -199,13 +268,12 @@ def plotScatterStats4Receivers(survey, key):
'SNR': [],
'SPE': [],
'picked traces': 0}
statsRec[traceID]['SNR'].append(shot.getSNR(traceID)[0])
if shot.getPickFlag(traceID) == 1:
statsRec[traceID]['picked traces'] += 1
statsRec[traceID]['SPE'].append(shot.getSymmetricPickError(traceID))
for traceID in statsRec.keys():
statsRec[traceID]['mean SNR'] = np.mean(statsRec[traceID]['SNR'])
statsRec[traceID]['median SNR'] = np.median(statsRec[traceID]['SNR'])
@ -224,14 +292,14 @@ def plotScatterStats4Receivers(survey, key):
for val in value:
size.append(100 * val / max(value))
sc = ax.scatter(x, y, s = size, c = value)
sc = ax.scatter(x, y, s=size, c=value)
plt.title('Plot of all receivers')
plt.xlabel('X')
plt.ylabel('Y')
cbar = plt.colorbar(sc)
cbar.set_label(key)
shot = survey.data.values()[0]
for traceID in shot.getTraceIDlist():
ax.annotate(' %s' %traceID , xy = (shot.getRecLoc(traceID)[0], shot.getRecLoc(traceID)[1]),
fontsize = 'x-small', color = 'k')
ax.annotate(' %s' % traceID, xy=(shot.getRecLoc(traceID)[0], shot.getRecLoc(traceID)[1]),
fontsize='x-small', color='k')

View File

@ -5,9 +5,7 @@ from obspy.core import read
from obspy.signal.trigger import coincidenceTrigger
class CoincidenceTimes(object):
def __init__(self, st, comp='Z', coinum=4, sta=1., lta=10., on=5., off=1.):
_type = 'recstalta'
self.coinclist = self.createCoincTriggerlist(data=st, trigcomp=comp,

View File

@ -6,11 +6,15 @@ import numpy as np
def crosscorrsingle(wf1, wf2, taumax):
'''
:param Wx:
:param Wy:
:param taumax:
:return:
Calculates the crosscorrelation between two waveforms with a defined maximum timedifference.
:param wf1: first waveformdata
:type wf1: list
:param wf2: second waveformdata
:type wf2: list
:param taumax: maximum time difference between waveforms
:type taumax: positive integer
:return: returns the crosscorrelation funktion 'c' and the lagvector 'l'
:rtype: c and l are lists
'''
N = len(wf1)
c = np.zeros(2 * taumax - 1)
@ -83,10 +87,13 @@ def wfscrosscorr(weights, wfs, taumax):
SWB 26.01.2010 as arranged with Thomas Meier and Monika Bischoff
:param weights:
:param wfs:
:param taumax:
:return:
:param weights: weighting factors for the single components
:type weights: tuple
:param wfs: tuple of `~numpy.array` object containing waveform data
:type wfs: tuple
:param taumax: maximum time difference
:type taumax: positive integer
:return: returns cross correlation function normalized by the waveform energy
'''
ccnorm = 0.

View File

@ -15,6 +15,7 @@ from scipy.optimize import curve_fit
from scipy import integrate, signal
from pylot.core.read.data import Data
class Magnitude(object):
'''
Superclass for calculating Wood-Anderson peak-to-peak
@ -72,7 +73,6 @@ class Magnitude(object):
self.calcsourcespec()
self.run_calcMoMw()
def getwfstream(self):
return self.wfstream
@ -108,7 +108,7 @@ class Magnitude(object):
def getrho(self):
return self.rho
def setvp(self, vp):
self.vp = vp
@ -117,7 +117,7 @@ class Magnitude(object):
def setQp(self, Qp):
self.Qp = Qp
def getQp(self):
return self.Qp
@ -154,6 +154,7 @@ class Magnitude(object):
def run_calcMoMw(self):
self.pickdic = None
class WApp(Magnitude):
'''
Method to derive peak-to-peak amplitude as seen on a Wood-Anderson-
@ -207,10 +208,10 @@ class WApp(Magnitude):
class M0Mw(Magnitude):
'''
Method to calculate seismic moment Mo and moment magnitude Mw.
Requires results of class calcsourcespec for calculating plateau w0
and corner frequency fc of source spectrum, respectively. Uses
subfunction calcMoMw.py. Returns modified dictionary of picks including
Dc-value, corner frequency fc, seismic moment Mo and
Requires results of class calcsourcespec for calculating plateau w0
and corner frequency fc of source spectrum, respectively. Uses
subfunction calcMoMw.py. Returns modified dictionary of picks including
Dc-value, corner frequency fc, seismic moment Mo and
corresponding moment magntiude Mw.
'''
@ -222,44 +223,45 @@ class M0Mw(Magnitude):
self.picdic = None
for key in picks:
if picks[key]['P']['weight'] < 4:
# select waveform
selwf = wfdat.select(station=key)
if len(key) > 4:
Ppattern = '%s ? ? ? P' % key
elif len(key) == 4:
Ppattern = '%s ? ? ? P' % key
elif len(key) < 4:
Ppattern = '%s ? ? ? P' % key
nllocline = getPatternLine(nllocfile, Ppattern)
# get hypocentral distance, station azimuth and
# angle of incidence from NLLoc-location file
delta = float(nllocline.split(None)[21])
az = float(nllocline.split(None)[22])
inc = float(nllocline.split(None)[24])
# call subfunction to estimate source spectrum
# and to derive w0 and fc
[w0, fc] = calcsourcespec(selwf, picks[key]['P']['mpp'], \
self.getinvdir(), self.getvp(), delta, az, \
inc, self.getQp(), self.getiplot())
if picks[key]['P']['weight'] < 4:
# select waveform
selwf = wfdat.select(station=key)
if len(key) > 4:
Ppattern = '%s ? ? ? P' % key
elif len(key) == 4:
Ppattern = '%s ? ? ? P' % key
elif len(key) < 4:
Ppattern = '%s ? ? ? P' % key
nllocline = getPatternLine(nllocfile, Ppattern)
# get hypocentral distance, station azimuth and
# angle of incidence from NLLoc-location file
delta = float(nllocline.split(None)[21])
az = float(nllocline.split(None)[22])
inc = float(nllocline.split(None)[24])
# call subfunction to estimate source spectrum
# and to derive w0 and fc
[w0, fc] = calcsourcespec(selwf, picks[key]['P']['mpp'], \
self.getinvdir(), self.getvp(), delta, az, \
inc, self.getQp(), self.getiplot())
if w0 is not None:
# call subfunction to calculate Mo and Mw
zdat = selwf.select(component="Z")
if len(zdat) == 0: # check for other components
zdat = selwf.select(component="3")
[Mo, Mw] = calcMoMw(zdat, w0, self.getrho(), self.getvp(), \
delta, self.getinvdir())
else:
Mo = None
Mw = None
if w0 is not None:
# call subfunction to calculate Mo and Mw
zdat = selwf.select(component="Z")
if len(zdat) == 0: # check for other components
zdat = selwf.select(component="3")
[Mo, Mw] = calcMoMw(zdat, w0, self.getrho(), self.getvp(), \
delta, self.getinvdir())
else:
Mo = None
Mw = None
# add w0, fc, Mo and Mw to dictionary
picks[key]['P']['w0'] = w0
picks[key]['P']['fc'] = fc
picks[key]['P']['Mo'] = Mo
picks[key]['P']['Mw'] = Mw
self.picdic = picks
# add w0, fc, Mo and Mw to dictionary
picks[key]['P']['w0'] = w0
picks[key]['P']['fc'] = fc
picks[key]['P']['Mo'] = Mo
picks[key]['P']['Mw'] = Mw
self.picdic = picks
def calcMoMw(wfstream, w0, rho, vp, delta, inv):
'''
@ -271,7 +273,7 @@ def calcMoMw(wfstream, w0, rho, vp, delta, inv):
:param: w0, height of plateau of source spectrum
:type: float
:param: rho, rock density [kg/]
:type: integer
@ -283,25 +285,24 @@ def calcMoMw(wfstream, w0, rho, vp, delta, inv):
'''
tr = wfstream[0]
delta = delta * 1000 # hypocentral distance in [m]
delta = delta * 1000 # hypocentral distance in [m]
print("calcMoMw: Calculating seismic moment Mo and moment magnitude Mw for station %s ..." \
% tr.stats.station)
% tr.stats.station)
# additional common parameters for calculating Mo
rP = 2 / np.sqrt(15) # average radiation pattern of P waves (Aki & Richards, 1980)
freesurf = 2.0 # free surface correction, assuming vertical incidence
rP = 2 / np.sqrt(15) # average radiation pattern of P waves (Aki & Richards, 1980)
freesurf = 2.0 # free surface correction, assuming vertical incidence
Mo = w0 * 4 * np.pi * rho * np.power(vp, 3) * delta / (rP * freesurf)
Mo = w0 * 4 * np.pi * rho * np.power(vp, 3) * delta / (rP * freesurf)
#Mw = np.log10(Mo * 1e07) * 2 / 3 - 10.7 # after Hanks & Kanamori (1979), defined for [dyn*cm]!
Mw = np.log10(Mo) * 2 / 3 - 6.7 # for metric units
# Mw = np.log10(Mo * 1e07) * 2 / 3 - 10.7 # after Hanks & Kanamori (1979), defined for [dyn*cm]!
Mw = np.log10(Mo) * 2 / 3 - 6.7 # for metric units
print("calcMoMw: Calculated seismic moment Mo = %e Nm => Mw = %3.1f " % (Mo, Mw))
return Mo, Mw
def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp, iplot):
'''
@ -310,7 +311,7 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp
source model. Has to be derived from instrument corrected displacement traces,
thus restitution and integration necessary! Integrated traces are rotated
into ray-coordinate system ZNE => LQT using Obspy's rotate modul!
:param: wfstream
:type: `~obspy.core.stream.Stream`
@ -346,7 +347,7 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp
Q = int(qu[0])
# A, i.e. power of frequency
A = float(qu[1])
delta = delta * 1000 # hypocentral distance in [m]
delta = delta * 1000 # hypocentral distance in [m]
fc = None
w0 = None
@ -385,11 +386,11 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp
# L: P-wave direction
# Q: SV-wave direction
# T: SH-wave direction
LQT=cordat_copy.rotate('ZNE->LQT',azimuth, incidence)
LQT = cordat_copy.rotate('ZNE->LQT', azimuth, incidence)
ldat = LQT.select(component="L")
if len(ldat) == 0:
# if horizontal channels are 2 and 3
# no azimuth information is available and thus no
# no azimuth information is available and thus no
# rotation is possible!
print("calcsourcespec: Azimuth information is missing, "
"no rotation of components possible!")
@ -398,30 +399,30 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp
# integrate to displacement
# unrotated vertical component (for copmarison)
inttrz = signal.detrend(integrate.cumtrapz(zdat[0].data, None, \
zdat[0].stats.delta))
zdat[0].stats.delta))
# rotated component Z => L
Ldat = signal.detrend(integrate.cumtrapz(ldat[0].data, None, \
ldat[0].stats.delta))
ldat[0].stats.delta))
# get window after P pulse for
# get window after P pulse for
# calculating source spectrum
if zdat[0].stats.sampling_rate <= 100:
winzc = zdat[0].stats.sampling_rate
elif zdat[0].stats.sampling_rate > 100 and \
zdat[0].stats.sampling_rate <= 200:
winzc = 0.5 * zdat[0].stats.sampling_rate
zdat[0].stats.sampling_rate <= 200:
winzc = 0.5 * zdat[0].stats.sampling_rate
elif zdat[0].stats.sampling_rate > 200 and \
zdat[0].stats.sampling_rate <= 400:
winzc = 0.2 * zdat[0].stats.sampling_rate
zdat[0].stats.sampling_rate <= 400:
winzc = 0.2 * zdat[0].stats.sampling_rate
elif zdat[0].stats.sampling_rate > 400:
winzc = zdat[0].stats.sampling_rate
winzc = zdat[0].stats.sampling_rate
tstart = UTCDateTime(zdat[0].stats.starttime)
tonset = onset.timestamp -tstart.timestamp
tonset = onset.timestamp - tstart.timestamp
impickP = tonset * zdat[0].stats.sampling_rate
wfzc = Ldat[impickP : impickP + winzc]
wfzc = Ldat[impickP: impickP + winzc]
# get time array
t = np.arange(0, len(inttrz) * zdat[0].stats.delta, \
zdat[0].stats.delta)
zdat[0].stats.delta)
# calculate spectrum using only first cycles of
# waveform after P onset!
zc = crossings_nonzero_all(wfzc)
@ -441,14 +442,14 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp
fny = zdat[0].stats.sampling_rate / 2
l = len(xdat) / zdat[0].stats.sampling_rate
# number of fft bins after Bath
n = zdat[0].stats.sampling_rate * l
n = zdat[0].stats.sampling_rate * l
# find next power of 2 of data length
m = pow(2, np.ceil(np.log(len(xdat)) / np.log(2)))
N = int(np.power(m, 2))
y = zdat[0].stats.delta * np.fft.fft(xdat, N)
Y = abs(y[: N/2])
Y = abs(y[: N / 2])
L = (N - 1) / zdat[0].stats.sampling_rate
f = np.arange(0, fny, 1/L)
f = np.arange(0, fny, 1 / L)
# remove zero-frequency and frequencies above
# corner frequency of seismometer (assumed
@ -457,10 +458,10 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp
F = f[fi]
YY = Y[fi]
# correction for attenuation
wa = 2 * np.pi * F #angular frequency
D = np.exp((wa * delta) / (2 * vp * Q*F**A))
YYcor = YY.real*D
# correction for attenuation
wa = 2 * np.pi * F # angular frequency
D = np.exp((wa * delta) / (2 * vp * Q * F ** A))
YYcor = YY.real * D
# get plateau (DC value) and corner frequency
# initial guess of plateau
@ -477,24 +478,24 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp
fc1 = optspecfit[1]
print ("calcsourcespec: Determined w0-value: %e m/Hz, \n"
"Determined corner frequency: %f Hz" % (w01, fc1))
# use of conventional fitting
# use of conventional fitting
[w02, fc2] = fitSourceModel(F, YYcor, Fcin, iplot)
# get w0 and fc as median of both
# source spectrum fits
# get w0 and fc as median of both
# source spectrum fits
w0 = np.median([w01, w02])
fc = np.median([fc1, fc2])
print("calcsourcespec: Using w0-value = %e m/Hz and fc = %f Hz" % (w0, fc))
except TypeError as er:
raise TypeError('''{0}'''.format(er))
if iplot > 1:
f1 = plt.figure()
tLdat = np.arange(0, len(Ldat) * zdat[0].stats.delta, \
zdat[0].stats.delta)
plt.subplot(2,1,1)
zdat[0].stats.delta)
plt.subplot(2, 1, 1)
# show displacement in mm
p1, = plt.plot(t, np.multiply(inttrz, 1000), 'k')
p2, = plt.plot(tLdat, np.multiply(Ldat, 1000))
@ -502,26 +503,26 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp
if plotflag == 1:
plt.plot(t[iwin], np.multiply(xdat, 1000), 'g')
plt.title('Seismogram and P Pulse, Station %s-%s' \
% (zdat[0].stats.station, zdat[0].stats.channel))
% (zdat[0].stats.station, zdat[0].stats.channel))
else:
plt.title('Seismogram, Station %s-%s' \
% (zdat[0].stats.station, zdat[0].stats.channel))
% (zdat[0].stats.station, zdat[0].stats.channel))
plt.xlabel('Time since %s' % zdat[0].stats.starttime)
plt.ylabel('Displacement [mm]')
if plotflag == 1:
plt.subplot(2,1,2)
plt.subplot(2, 1, 2)
p1, = plt.loglog(f, Y.real, 'k')
p2, = plt.loglog(F, YY.real)
p3, = plt.loglog(F, YYcor, 'r')
p4, = plt.loglog(F, fit, 'g')
plt.loglog([fc, fc], [w0/100, w0], 'g')
plt.loglog([fc, fc], [w0 / 100, w0], 'g')
plt.legend([p1, p2, p3, p4], ['Raw Spectrum', \
'Used Raw Spectrum', \
'Q-Corrected Spectrum', \
'Fit to Spectrum'])
plt.title('Source Spectrum from P Pulse, w0=%e m/Hz, fc=%6.2f Hz' \
% (w0, fc))
% (w0, fc))
plt.xlabel('Frequency [Hz]')
plt.ylabel('Amplitude [m/Hz]')
plt.grid()
@ -530,7 +531,7 @@ def calcsourcespec(wfstream, onset, inventory, vp, delta, azimuth, incidence, Qp
plt.close(f1)
return w0, fc
def synthsourcespec(f, omega0, fcorner):
'''
@ -547,7 +548,7 @@ def synthsourcespec(f, omega0, fcorner):
:type: float
'''
#ssp = omega0 / (pow(2, (1 + f / fcorner)))
# ssp = omega0 / (pow(2, (1 + f / fcorner)))
ssp = omega0 / (1 + pow(2, (f / fcorner)))
return ssp
@ -556,8 +557,8 @@ def synthsourcespec(f, omega0, fcorner):
def fitSourceModel(f, S, fc0, iplot):
'''
Calculates synthetic source spectrum by varying corner frequency fc.
Returns best approximated plateau omega0 and corner frequency, i.e. with least
common standard deviations.
Returns best approximated plateau omega0 and corner frequency, i.e. with least
common standard deviations.
:param: f, frequencies
:type: array
@ -569,7 +570,7 @@ def fitSourceModel(f, S, fc0, iplot):
:type: float
'''
w0 = []
w0 = []
stdw0 = []
fc = []
stdfc = []
@ -577,17 +578,17 @@ def fitSourceModel(f, S, fc0, iplot):
# get window around initial corner frequency for trials
fcstopl = fc0 - max(1, len(f) / 10)
il = np.argmin(abs(f-fcstopl))
il = np.argmin(abs(f - fcstopl))
fcstopl = f[il]
fcstopr = fc0 + min(len(f), len(f) /10)
ir = np.argmin(abs(f-fcstopr))
fcstopr = fc0 + min(len(f), len(f) / 10)
ir = np.argmin(abs(f - fcstopr))
fcstopr = f[ir]
iF = np.where((f >= fcstopl) & (f <= fcstopr))
# vary corner frequency around initial point
for i in range(il, ir):
for i in range(il, ir):
FC = f[i]
indexdc = np.where((f > 0 ) & (f <= FC))
indexdc = np.where((f > 0) & (f <= FC))
dc = np.mean(S[indexdc])
stddc = np.std(dc - S[indexdc])
w0.append(dc)
@ -595,7 +596,7 @@ def fitSourceModel(f, S, fc0, iplot):
fc.append(FC)
# slope
indexfc = np.where((f >= FC) & (f <= fcstopr))
yi = dc/(1+(f[indexfc]/FC)**2)
yi = dc / (1 + (f[indexfc] / FC) ** 2)
stdFC = np.std(yi - S[indexfc])
stdfc.append(stdFC)
STD.append(stddc + stdFC)
@ -607,31 +608,31 @@ def fitSourceModel(f, S, fc0, iplot):
elif len(STD) == 0:
fc = fc0
w0 = max(S)
print("fitSourceModel: best fc: %fHz, best w0: %e m/Hz" \
% (fc, w0))
% (fc, w0))
if iplot > 1:
plt.figure(iplot)
plt.loglog(f, S, 'k')
plt.loglog([f[0], fc], [w0, w0], 'g')
plt.loglog([fc, fc], [w0/100, w0], 'g')
plt.loglog([fc, fc], [w0 / 100, w0], 'g')
plt.title('Calculated Source Spectrum, Omega0=%e m/Hz, fc=%6.2f Hz' \
% (w0, fc))
% (w0, fc))
plt.xlabel('Frequency [Hz]')
plt.ylabel('Amplitude [m/Hz]')
plt.grid()
plt.figure(iplot+1)
plt.figure(iplot + 1)
plt.subplot(311)
plt.plot(f[il:ir], STD,'*')
plt.plot(f[il:ir], STD, '*')
plt.title('Common Standard Deviations')
plt.xticks([])
plt.subplot(312)
plt.plot(f[il:ir], stdw0,'*')
plt.plot(f[il:ir], stdw0, '*')
plt.title('Standard Deviations of w0-Values')
plt.xticks([])
plt.subplot(313)
plt.plot(f[il:ir],stdfc,'*')
plt.plot(f[il:ir], stdfc, '*')
plt.title('Standard Deviations of Corner Frequencies')
plt.xlabel('Corner Frequencies [Hz]')
plt.show()
@ -639,10 +640,3 @@ def fitSourceModel(f, S, fc0, iplot):
plt.close()
return w0, fc

View File

@ -1,25 +1,31 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from obspy.signal.trigger import recSTALTA, triggerOnset
from obspy.signal.trigger import recursive_sta_lta, trigger_onset
def createSingleTriggerlist(st, station='ZV01', trigcomp='Z', stalta=(1, 10),
trigonoff=(6, 1)):
'''
uses a single-station trigger to create a triggerlist for this station
:param st:
:param station:
:param trigcomp:
:param stalta:
:param trigonoff:
:return:
:param st: obspy stream
:type st:
:param station: station name to get triggers for (optional, default = ZV01)
:type station: str
:param trigcomp: (optional, default = Z)
:type trigcomp: str
:param stalta: (optional, default = (1,10))
:type stalta: tuple
:param trigonoff: (optional, default = (6,1))
:type trigonoff: tuple
:return: list of triggtimes
:rtype: list
'''
tr = st.copy().select(component=trigcomp, station=station)[0]
df = tr.stats.sampling_rate
cft = recSTALTA(tr.data, int(stalta[0] * df), int(stalta[1] * df))
triggers = triggerOnset(cft, trigonoff[0], trigonoff[1])
cft = recursive_sta_lta(tr.data, int(stalta[0] * df), int(stalta[1] * df))
triggers = trigger_onset(cft, trigonoff[0], trigonoff[1])
trigg = []
for time in triggers:
trigg.append(tr.stats.starttime + time[0] / df)
@ -32,7 +38,7 @@ def createSubCoincTriggerlist(trig, station='ZV01'):
coincidence trigger and are seen at the demanded station
:param trig: list containing triggers from coincidence trigger
:type trig: list
:param station: station name to get triggers for
:param station: station name to get triggers for (optional, default = ZV01)
:type station: str
:return: list of triggertimes
:rtype: list

View File

@ -3,16 +3,16 @@
import subprocess
import os
from obspy.core.event import readEvents
from pylot.core.pick.utils import writephases
from pylot.core.util.utils import getPatternLine
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
def picksExport(picks, locrt, phasefile):
'''
Take <picks> dictionary and exports picking data to a NLLOC-obs
Take <picks> dictionary and exports picking data to a NLLOC-obs
<phasefile> without creating an ObsPy event object.
:param picks: picking data dictionary
@ -27,6 +27,7 @@ def picksExport(picks, locrt, phasefile):
# write phases to NLLoc-phase file
writephases(picks, locrt, phasefile)
def modifyInputFile(ctrfn, root, nllocoutn, phasefn, tttn):
'''
:param ctrfn: name of NLLoc-control file
@ -36,18 +37,18 @@ def modifyInputFile(ctrfn, root, nllocoutn, phasefn, tttn):
:type: str
:param nllocoutn: name of NLLoc-location output file
:type: str
:type: str
:param phasefn: name of NLLoc-input phase file
:type: str
:param tttn: pattern of precalculated NLLoc traveltime tables
:type: str
:type: str
'''
# For locating the event the NLLoc-control file has to be modified!
# create comment line for NLLoc-control file NLLoc-output file
ctrfile = os.path.join(root, 'run', ctrfn)
nllocout = os.path.join(root,'loc', nllocoutn)
nllocout = os.path.join(root, 'loc', nllocoutn)
phasefile = os.path.join(root, 'obs', phasefn)
tttable = os.path.join(root, 'time', tttn)
locfiles = 'LOCFILES %s NLLOC_OBS %s %s 0\n' % (phasefile, tttable, nllocout)
@ -64,6 +65,7 @@ def modifyInputFile(ctrfn, root, nllocoutn, phasefn, tttn):
nllfile.write(filedata)
nllfile.close()
def locate(call, fnin):
'''
Takes paths to NLLoc executable <call> and input parameter file <fnin>
@ -79,8 +81,10 @@ def locate(call, fnin):
# locate the event
subprocess.call([call, fnin])
def readLocation(fn):
pass
if __name__=='__main__':
if __name__ == '__main__':
pass

View File

@ -11,15 +11,17 @@ function conglomerate utils.
import matplotlib.pyplot as plt
import numpy as np
from scipy import integrate
from pylot.core.pick.Picker import AICPicker, PragPicker
from pylot.core.pick.CharFuns import HOScf, AICcf, ARZcf, ARHcf, AR3Ccf
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker,\
from pylot.core.read.inputs import AutoPickParameter
from pylot.core.pick.picker import AICPicker, PragPicker
from pylot.core.pick.charfuns import CharacteristicFunction
from pylot.core.pick.charfuns import HOScf, AICcf, ARZcf, ARHcf, AR3Ccf
from pylot.core.pick.utils import checksignallength, checkZ4S, earllatepicker, \
getSNR, fmpicker, checkPonsets, wadaticheck
from pylot.core.util.utils import getPatternLine
from pylot.core.read.data import Data
from pylot.core.analysis.magnitude import WApp
def autopickevent(data, param):
stations = []
all_onsets = {}
@ -46,14 +48,18 @@ def autopickevent(data, param):
# check S-P times (Wadati)
return wadaticheck(jk_checked_onsets, wdttolerance, iplot)
def autopickstation(wfstream, pickparam, verbose=False):
"""
:param: wfstream
:type: `~obspy.core.stream.Stream`
:param wfstream: `~obspy.core.stream.Stream` containing waveform
:type wfstream: obspy.core.stream.Stream
:param: pickparam
:type: container of picking parameters from input file,
:param pickparam: container of picking parameters from input file,
usually autoPyLoT.in
:type pickparam: AutoPickParameter
:param verbose:
:type verbose: bool
"""
# declaring pickparam variables (only for convenience)
@ -137,7 +143,8 @@ def autopickstation(wfstream, pickparam, verbose=False):
Pflag = 0
Sflag = 0
Pmarker = []
Ao = None # Wood-Anderson peak-to-peak amplitude
Ao = None # Wood-Anderson peak-to-peak amplitude
picker = 'autoPyLoT' # name of the picking programm
# split components
zdat = wfstream.select(component="Z")
@ -152,9 +159,9 @@ def autopickstation(wfstream, pickparam, verbose=False):
if algoP == 'HOS' or algoP == 'ARZ' and zdat is not None:
msg = '##########################################\nautopickstation:' \
' Working on P onset of station {station}\nFiltering vertical ' \
'trace ...\n{data}'.format(station=zdat[0].stats.station,
data=str(zdat))
' Working on P onset of station {station}\nFiltering vertical ' \
'trace ...\n{data}'.format(station=zdat[0].stats.station,
data=str(zdat))
if verbose: print(msg)
z_copy = zdat.copy()
# filter and taper data
@ -169,12 +176,13 @@ def autopickstation(wfstream, pickparam, verbose=False):
Lwf = zdat[0].stats.endtime - zdat[0].stats.starttime
Ldiff = Lwf - Lc
if Ldiff < 0:
msg = 'autopickstation: Cutting times are too large for actual ' \
'waveform!\nUsing entire waveform instead!'
msg = 'autopickstation: Cutting times are too large for actual ' \
'waveform!\nUsing entire waveform instead!'
if verbose: print(msg)
pstart = 0
pstop = len(zdat[0].data) * zdat[0].stats.delta
cuttimes = [pstart, pstop]
cf1 = None
if algoP == 'HOS':
# calculate HOS-CF using subclass HOScf of class
# CharacteristicFunction
@ -188,6 +196,10 @@ def autopickstation(wfstream, pickparam, verbose=False):
# calculate AIC-HOS-CF using subclass AICcf of class
# CharacteristicFunction
# class needs stream object => build it
assert isinstance(cf1, CharacteristicFunction), 'cf2 is not set ' \
'correctly: maybe the algorithm name ({algoP}) is ' \
'corrupted'.format(
algoP=algoP)
tr_aic = tr_filt.copy()
tr_aic.data = cf1.getCF()
z_copy[0].data = tr_aic.data
@ -217,10 +229,10 @@ def autopickstation(wfstream, pickparam, verbose=False):
trH1_filt = edat.copy()
trH2_filt = ndat.copy()
trH1_filt.filter('bandpass', freqmin=bph1[0],
freqmax=bph1[1],
freqmax=bph1[1],
zerophase=False)
trH2_filt.filter('bandpass', freqmin=bph1[0],
freqmax=bph1[1],
freqmax=bph1[1],
zerophase=False)
trH1_filt.taper(max_percentage=0.05, type='hann')
trH2_filt.taper(max_percentage=0.05, type='hann')
@ -249,8 +261,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
##############################################################
# go on with processing if AIC onset passes quality control
if (aicpick.getSlope() >= minAICPslope and
aicpick.getSNR() >= minAICPSNR and
Pflag == 1):
aicpick.getSNR() >= minAICPSNR and Pflag == 1):
aicPflag = 1
msg = 'AIC P-pick passes quality control: Slope: {0} counts/s, ' \
'SNR: {1}\nGo on with refined picking ...\n' \
@ -270,6 +281,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
cuttimes2 = [round(max([aicpick.getpick() - Precalcwin, 0])),
round(min([len(zdat[0].data) * zdat[0].stats.delta,
aicpick.getpick() + Precalcwin]))]
cf2 = None
if algoP == 'HOS':
# calculate HOS-CF using subclass HOScf of class
# CharacteristicFunction
@ -282,14 +294,18 @@ def autopickstation(wfstream, pickparam, verbose=False):
addnoise) # instance of ARZcf
##############################################################
# get refined onset time from CF2 using class Picker
assert isinstance(cf2, CharacteristicFunction), 'cf2 is not set ' \
'correctly: maybe the algorithm name ({algoP}) is ' \
'corrupted'.format(
algoP=algoP)
refPpick = PragPicker(cf2, tsnrz, pickwinP, iplot, ausP, tsmoothP,
aicpick.getpick())
mpickP = refPpick.getpick()
#############################################################
if mpickP is not None:
# quality assessment
# get earliest and latest possible pick and symmetrized uncertainty
[lpickP, epickP, Perror] = earllatepicker(z_copy, nfacP, tsnrz,
# get earliest/latest possible pick and symmetrized uncertainty
[epickP, lpickP, Perror] = earllatepicker(z_copy, nfacP, tsnrz,
mpickP, iplot)
# get SNR
@ -473,13 +489,14 @@ def autopickstation(wfstream, pickparam, verbose=False):
#############################################################
if mpickS is not None:
# quality assessment
# get earliest and latest possible pick and symmetrized uncertainty
# get earliest/latest possible pick and symmetrized uncertainty
h_copy[0].data = trH1_filt.data
[lpickS1, epickS1, Serror1] = earllatepicker(h_copy, nfacS,
[epickS1, lpickS1, Serror1] = earllatepicker(h_copy, nfacS,
tsnrh,
mpickS, iplot)
h_copy[0].data = trH2_filt.data
[lpickS2, epickS2, Serror2] = earllatepicker(h_copy, nfacS,
[epickS2, lpickS2, Serror2] = earllatepicker(h_copy, nfacS,
tsnrh,
mpickS, iplot)
if epickS1 is not None and epickS2 is not None:
@ -488,28 +505,30 @@ def autopickstation(wfstream, pickparam, verbose=False):
epick = [epickS1, epickS2]
lpick = [lpickS1, lpickS2]
pickerr = [Serror1, Serror2]
if epickS1 == None and epickS2 is not None:
if epickS1 is None and epickS2 is not None:
ipick = 1
elif epickS1 is not None and epickS2 == None:
elif epickS1 is not None and epickS2 is None:
ipick = 0
elif epickS1 is not None and epickS2 is not None:
ipick = np.argmin([epickS1, epickS2])
elif algoS == 'AR3':
[lpickS3, epickS3, Serror3] = earllatepicker(h_copy, nfacS,
[epickS3, lpickS3, Serror3] = earllatepicker(h_copy,
nfacS,
tsnrh,
mpickS, iplot)
mpickS,
iplot)
# get earliest pick of all three picks
epick = [epickS1, epickS2, epickS3]
lpick = [lpickS1, lpickS2, lpickS3]
pickerr = [Serror1, Serror2, Serror3]
if epickS1 == None and epickS2 is not None \
if epickS1 is None and epickS2 is not None \
and epickS3 is not None:
ipick = np.argmin([epickS2, epickS3])
elif epickS1 is not None and epickS2 == None \
elif epickS1 is not None and epickS2 is None \
and epickS3 is not None:
ipick = np.argmin([epickS2, epickS3])
elif epickS1 is not None and epickS2 is not None \
and epickS3 == None:
and epickS3 is None:
ipick = np.argmin([epickS1, epickS2])
elif epickS1 is not None and epickS2 is not None \
and epickS3 is not None:
@ -538,7 +557,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
'SNR[dB]: {2}\n'
'################################################'
''.format(Sweight, SNRS, SNRSdB))
##################################################################
################################################################
# get Wood-Anderson peak-to-peak amplitude
# initialize Data object
data = Data()
@ -555,8 +574,8 @@ def autopickstation(wfstream, pickparam, verbose=False):
else:
# use larger window for getting peak-to-peak amplitude
# as the S pick is quite unsure
wapp = WApp(cordat, mpickP, mpickP + sstop + \
(0.5 * (mpickP + sstop)), iplot)
wapp = WApp(cordat, mpickP, mpickP + sstop +
(0.5 * (mpickP + sstop)), iplot)
Ao = wapp.getwapp()
@ -585,7 +604,8 @@ def autopickstation(wfstream, pickparam, verbose=False):
# calculate WA-peak-to-peak amplitude
# using subclass WApp of superclass Magnitude
wapp = WApp(cordat, mpickP, mpickP + sstop + (0.5 * (mpickP
+ sstop)), iplot)
+ sstop)),
iplot)
Ao = wapp.getwapp()
else:
@ -642,7 +662,7 @@ def autopickstation(wfstream, pickparam, verbose=False):
plt.title('%s, %s, P Weight=%d' % (tr_filt.stats.station,
tr_filt.stats.channel,
Pweight))
plt.yticks([])
plt.ylim([-1.5, 1.5])
plt.ylabel('Normalized Counts')
@ -754,46 +774,46 @@ def autopickstation(wfstream, pickparam, verbose=False):
plt.close()
##########################################################################
# calculate "real" onset times
if mpickP is not None and epickP is not None and mpickP is not None:
if lpickP is not None and lpickP == mpickP:
lpickP += timeerrorsP[0]
if epickP is not None and epickP == mpickP:
epickP -= timeerrorsP[0]
if mpickP is not None and epickP is not None and lpickP is not None:
lpickP = zdat[0].stats.starttime + lpickP
epickP = zdat[0].stats.starttime + epickP
mpickP = zdat[0].stats.starttime + mpickP
else:
# dummy values (start of seismic trace) in order to derive
# theoretical onset times for iteratve picking
lpickP = zdat[0].stats.starttime
epickP = zdat[0].stats.starttime
lpickP = zdat[0].stats.starttime + timeerrorsP[3]
epickP = zdat[0].stats.starttime - timeerrorsP[3]
mpickP = zdat[0].stats.starttime
if mpickS is not None and epickS is not None and mpickS is not None:
if lpickS is not None and lpickS == mpickS:
lpickS += timeerrorsS[0]
if epickS is not None and epickS == mpickS:
epickS -= timeerrorsS[0]
if mpickS is not None and epickS is not None and lpickS is not None:
lpickS = edat[0].stats.starttime + lpickS
epickS = edat[0].stats.starttime + epickS
mpickS = edat[0].stats.starttime + mpickS
else:
# dummy values (start of seismic trace) in order to derive
# theoretical onset times for iteratve picking
lpickS = edat[0].stats.starttime
epickS = edat[0].stats.starttime
lpickS = edat[0].stats.starttime + timeerrorsS[3]
epickS = edat[0].stats.starttime - timeerrorsS[3]
mpickS = edat[0].stats.starttime
# create dictionary
# for P phase
phase = 'P'
phasepick = {'lpp': lpickP, 'epp': epickP, 'mpp': mpickP, 'spe': Perror,
'snr': SNRP, 'snrdb': SNRPdB, 'weight': Pweight, 'fm': FM,
'w0': None, 'fc': None, 'Mo': None, 'Mw': None}
picks = {phase: phasepick}
# add P marker
picks[phase]['marked'] = Pmarker
ppick = dict(lpp=lpickP, epp=epickP, mpp=mpickP, spe=Perror, snr=SNRP,
snrdb=SNRPdB, weight=Pweight, fm=FM, w0=None, fc=None, Mo=None,
Mw=None, picker=picker, marked=Pmarker)
# add S phase
phase = 'S'
phasepick = {'lpp': lpickS, 'epp': epickS, 'mpp': mpickS, 'spe': Serror,
'snr': SNRS, 'snrdb': SNRSdB, 'weight': Sweight, 'fm': None}
picks[phase] = phasepick
# add Wood-Anderson amplitude
picks[phase]['Ao'] = Ao
spick = dict(lpp=lpickS, epp=epickS, mpp=mpickS, spe=Serror, snr=SNRS,
snrdb=SNRSdB, weight=Sweight, fm=None, picker=picker, Ao=Ao)
# merge picks into returning dictionary
picks = dict(P=ppick, S=spick)
return picks
@ -819,70 +839,72 @@ def iteratepicker(wf, NLLocfile, picks, badpicks, pickparameter):
newpicks = {}
for i in range(0, len(badpicks)):
if len(badpicks[i][0]) > 4:
Ppattern = '%s ? ? ? P' % badpicks[i][0]
elif len(badpicks[i][0]) == 4:
Ppattern = '%s ? ? ? P' % badpicks[i][0]
elif len(badpicks[i][0]) < 4:
Ppattern = '%s ? ? ? P' % badpicks[i][0]
nllocline = getPatternLine(NLLocfile, Ppattern)
res = nllocline.split(None)[16]
# get theoretical P-onset time from residuum
badpicks[i][1] = picks[badpicks[i][0]]['P']['mpp'] - float(res)
if len(badpicks[i][0]) > 4:
Ppattern = '%s ? ? ? P' % badpicks[i][0]
elif len(badpicks[i][0]) == 4:
Ppattern = '%s ? ? ? P' % badpicks[i][0]
elif len(badpicks[i][0]) < 4:
Ppattern = '%s ? ? ? P' % badpicks[i][0]
nllocline = getPatternLine(NLLocfile, Ppattern)
res = nllocline.split(None)[16]
# get theoretical P-onset time from residuum
badpicks[i][1] = picks[badpicks[i][0]]['P']['mpp'] - float(res)
# get corresponding waveform stream
msg = '#######################################################\n' \
'iteratepicker: Re-picking station {0}'.format(badpicks[i][0])
print(msg)
wf2pick = wf.select(station=badpicks[i][0])
# get corresponding waveform stream
msg = '#######################################################\n' \
'iteratepicker: Re-picking station {0}'.format(badpicks[i][0])
print(msg)
wf2pick = wf.select(station=badpicks[i][0])
# modify some picking parameters
pstart_old = pickparameter.getParam('pstart')
pstop_old = pickparameter.getParam('pstop')
sstop_old = pickparameter.getParam('sstop')
pickwinP_old = pickparameter.getParam('pickwinP')
Precalcwin_old = pickparameter.getParam('Precalcwin')
noisefactor_old = pickparameter.getParam('noisefactor')
zfac_old = pickparameter.getParam('zfac')
pickparameter.setParam(pstart=max([0, badpicks[i][1] - wf2pick[0].stats.starttime \
- pickparameter.getParam('tlta')]))
pickparameter.setParam(pstop=pickparameter.getParam('pstart') + \
(3 * pickparameter.getParam('tlta')))
pickparameter.setParam(sstop=pickparameter.getParam('sstop') / 2)
pickparameter.setParam(pickwinP=pickparameter.getParam('pickwinP') / 2)
pickparameter.setParam(Precalcwin=pickparameter.getParam('Precalcwin') / 2)
pickparameter.setParam(noisefactor=1.0)
pickparameter.setParam(zfac=1.0)
print("iteratepicker: The following picking parameters have been modified for iterative picking:")
print("pstart: %fs => %fs" % (pstart_old, pickparameter.getParam('pstart')))
print("pstop: %fs => %fs" % (pstop_old, pickparameter.getParam('pstop')))
print("sstop: %fs => %fs" % (sstop_old, pickparameter.getParam('sstop')))
print("pickwinP: %fs => %fs" % (pickwinP_old, pickparameter.getParam('pickwinP')))
print("Precalcwin: %fs => %fs" % (Precalcwin_old, pickparameter.getParam('Precalcwin')))
print("noisefactor: %f => %f" % (noisefactor_old, pickparameter.getParam('noisefactor')))
print("zfac: %f => %f" % (zfac_old, pickparameter.getParam('zfac')))
# modify some picking parameters
pstart_old = pickparameter.getParam('pstart')
pstop_old = pickparameter.getParam('pstop')
sstop_old = pickparameter.getParam('sstop')
pickwinP_old = pickparameter.getParam('pickwinP')
Precalcwin_old = pickparameter.getParam('Precalcwin')
noisefactor_old = pickparameter.getParam('noisefactor')
zfac_old = pickparameter.getParam('zfac')
pickparameter.setParam(
pstart=max([0, badpicks[i][1] - wf2pick[0].stats.starttime \
- pickparameter.getParam('tlta')]))
pickparameter.setParam(pstop=pickparameter.getParam('pstart') + \
(3 * pickparameter.getParam('tlta')))
pickparameter.setParam(sstop=pickparameter.getParam('sstop') / 2)
pickparameter.setParam(pickwinP=pickparameter.getParam('pickwinP') / 2)
pickparameter.setParam(
Precalcwin=pickparameter.getParam('Precalcwin') / 2)
pickparameter.setParam(noisefactor=1.0)
pickparameter.setParam(zfac=1.0)
print(
"iteratepicker: The following picking parameters have been modified for iterative picking:")
print(
"pstart: %fs => %fs" % (pstart_old, pickparameter.getParam('pstart')))
print(
"pstop: %fs => %fs" % (pstop_old, pickparameter.getParam('pstop')))
print(
"sstop: %fs => %fs" % (sstop_old, pickparameter.getParam('sstop')))
print("pickwinP: %fs => %fs" % (
pickwinP_old, pickparameter.getParam('pickwinP')))
print("Precalcwin: %fs => %fs" % (
Precalcwin_old, pickparameter.getParam('Precalcwin')))
print("noisefactor: %f => %f" % (
noisefactor_old, pickparameter.getParam('noisefactor')))
print("zfac: %f => %f" % (zfac_old, pickparameter.getParam('zfac')))
# repick station
newpicks = autopickstation(wf2pick, pickparameter)
# repick station
newpicks = autopickstation(wf2pick, pickparameter)
# replace old dictionary with new one
picks[badpicks[i][0]] = newpicks
# replace old dictionary with new one
picks[badpicks[i][0]] = newpicks
# reset temporary change of picking parameters
print("iteratepicker: Resetting picking parameters ...")
pickparameter.setParam(pstart=pstart_old)
pickparameter.setParam(pstop=pstop_old)
pickparameter.setParam(sstop=sstop_old)
pickparameter.setParam(pickwinP=pickwinP_old)
pickparameter.setParam(Precalcwin=Precalcwin_old)
pickparameter.setParam(noisefactor=noisefactor_old)
pickparameter.setParam(zfac=zfac_old)
# reset temporary change of picking parameters
print("iteratepicker: Resetting picking parameters ...")
pickparameter.setParam(pstart=pstart_old)
pickparameter.setParam(pstop=pstop_old)
pickparameter.setParam(sstop=sstop_old)
pickparameter.setParam(pickwinP=pickwinP_old)
pickparameter.setParam(Precalcwin=Precalcwin_old)
pickparameter.setParam(noisefactor=noisefactor_old)
pickparameter.setParam(zfac=zfac_old)
return picks

View File

@ -21,10 +21,12 @@ import matplotlib.pyplot as plt
import numpy as np
from obspy.core import Stream
class CharacteristicFunction(object):
'''
SuperClass for different types of characteristic functions.
'''
def __init__(self, data, cut, t2=None, order=None, t1=None, fnoise=None, stealthMode=False):
'''
Initialize data type object with information from the original
@ -103,9 +105,9 @@ class CharacteristicFunction(object):
def setARdetStep(self, t1):
if t1:
self.ARdetStep = []
self.ARdetStep.append(t1 / 4)
self.ARdetStep.append(int(np.ceil(self.getTime2() / self.getIncrement()) / 4))
self.ARdetStep = []
self.ARdetStep.append(t1 / 4)
self.ARdetStep.append(int(np.ceil(self.getTime2() / self.getIncrement()) / 4))
def getOrder(self):
return self.order
@ -150,14 +152,14 @@ class CharacteristicFunction(object):
if cut is not None:
if len(self.orig_data) == 1:
if self.cut[0] == 0 and self.cut[1] == 0:
start = 0
stop = len(self.orig_data[0])
start = 0
stop = len(self.orig_data[0])
elif self.cut[0] == 0 and self.cut[1] is not 0:
start = 0
stop = self.cut[1] / self.dt
start = 0
stop = self.cut[1] / self.dt
else:
start = self.cut[0] / self.dt
stop = self.cut[1] / self.dt
start = self.cut[0] / self.dt
stop = self.cut[1] / self.dt
zz = self.orig_data.copy()
z1 = zz[0].copy()
zz[0].data = z1.data[int(start):int(stop)]
@ -165,16 +167,16 @@ class CharacteristicFunction(object):
return data
elif len(self.orig_data) == 2:
if self.cut[0] == 0 and self.cut[1] == 0:
start = 0
stop = min([len(self.orig_data[0]), len(self.orig_data[1])])
start = 0
stop = min([len(self.orig_data[0]), len(self.orig_data[1])])
elif self.cut[0] == 0 and self.cut[1] is not 0:
start = 0
stop = min([self.cut[1] / self.dt, len(self.orig_data[0]),
len(self.orig_data[1])])
start = 0
stop = min([self.cut[1] / self.dt, len(self.orig_data[0]),
len(self.orig_data[1])])
else:
start = max([0, self.cut[0] / self.dt])
stop = min([self.cut[1] / self.dt, len(self.orig_data[0]),
len(self.orig_data[1])])
start = max([0, self.cut[0] / self.dt])
stop = min([self.cut[1] / self.dt, len(self.orig_data[0]),
len(self.orig_data[1])])
hh = self.orig_data.copy()
h1 = hh[0].copy()
h2 = hh[1].copy()
@ -184,16 +186,16 @@ class CharacteristicFunction(object):
return data
elif len(self.orig_data) == 3:
if self.cut[0] == 0 and self.cut[1] == 0:
start = 0
stop = min([self.cut[1] / self.dt, len(self.orig_data[0]),
len(self.orig_data[1]), len(self.orig_data[2])])
start = 0
stop = min([self.cut[1] / self.dt, len(self.orig_data[0]),
len(self.orig_data[1]), len(self.orig_data[2])])
elif self.cut[0] == 0 and self.cut[1] is not 0:
start = 0
stop = self.cut[1] / self.dt
start = 0
stop = self.cut[1] / self.dt
else:
start = max([0, self.cut[0] / self.dt])
stop = min([self.cut[1] / self.dt, len(self.orig_data[0]),
len(self.orig_data[1]), len(self.orig_data[2])])
start = max([0, self.cut[0] / self.dt])
stop = min([self.cut[1] / self.dt, len(self.orig_data[0]),
len(self.orig_data[1]), len(self.orig_data[2])])
hh = self.orig_data.copy()
h1 = hh[0].copy()
h2 = hh[1].copy()
@ -223,13 +225,13 @@ class AICcf(CharacteristicFunction):
def calcCF(self, data):
#if self._getStealthMode() is False:
# if self._getStealthMode() is False:
# print 'Calculating AIC ...'
x = self.getDataArray()
xnp = x[0].data
nn = np.isnan(xnp)
if len(nn) > 1:
xnp[nn] = 0
xnp[nn] = 0
datlen = len(xnp)
k = np.arange(1, datlen)
cf = np.zeros(datlen)
@ -247,6 +249,7 @@ class AICcf(CharacteristicFunction):
self.cf = cf - np.mean(cf)
self.xcf = x
class HOScf(CharacteristicFunction):
'''
Function to calculate skewness (statistics of order 3) or kurtosis
@ -257,38 +260,38 @@ class HOScf(CharacteristicFunction):
def calcCF(self, data):
x = self.getDataArray(self.getCut())
xnp =x[0].data
xnp = x[0].data
nn = np.isnan(xnp)
if len(nn) > 1:
xnp[nn] = 0
xnp[nn] = 0
if self.getOrder() == 3: # this is skewness
#if self._getStealthMode() is False:
# if self._getStealthMode() is False:
# print 'Calculating skewness ...'
y = np.power(xnp, 3)
y1 = np.power(xnp, 2)
elif self.getOrder() == 4: # this is kurtosis
#if self._getStealthMode() is False:
# if self._getStealthMode() is False:
# print 'Calculating kurtosis ...'
y = np.power(xnp, 4)
y1 = np.power(xnp, 2)
#Initialisation
#t2: long term moving window
# Initialisation
# t2: long term moving window
ilta = int(round(self.getTime2() / self.getIncrement()))
lta = y[0]
lta1 = y1[0]
#moving windows
# moving windows
LTA = np.zeros(len(xnp))
for j in range(0, len(xnp)):
if j < 4:
LTA[j] = 0
elif j <= ilta:
lta = (y[j] + lta * (j-1)) / j
lta1 = (y1[j] + lta1 * (j-1)) / j
lta = (y[j] + lta * (j - 1)) / j
lta1 = (y1[j] + lta1 * (j - 1)) / j
else:
lta = (y[j] - y[j - ilta]) / ilta + lta
lta1 = (y1[j] - y1[j - ilta]) / ilta + lta1
#define LTA
# define LTA
if self.getOrder() == 3:
LTA[j] = lta / np.power(lta1, 1.5)
elif self.getOrder() == 4:
@ -296,13 +299,12 @@ class HOScf(CharacteristicFunction):
nn = np.isnan(LTA)
if len(nn) > 1:
LTA[nn] = 0
LTA[nn] = 0
self.cf = LTA
self.xcf = x
class ARZcf(CharacteristicFunction):
def calcCF(self, data):
print 'Calculating AR-prediction error from single trace ...'
@ -310,33 +312,33 @@ class ARZcf(CharacteristicFunction):
xnp = x[0].data
nn = np.isnan(xnp)
if len(nn) > 1:
xnp[nn] = 0
#some parameters needed
#add noise to time series
xnp[nn] = 0
# some parameters needed
# add noise to time series
xnoise = xnp + np.random.normal(0.0, 1.0, len(xnp)) * self.getFnoise() * max(abs(xnp))
tend = len(xnp)
#Time1: length of AR-determination window [sec]
#Time2: length of AR-prediction window [sec]
ldet = int(round(self.getTime1() / self.getIncrement())) #length of AR-determination window [samples]
lpred = int(np.ceil(self.getTime2() / self.getIncrement())) #length of AR-prediction window [samples]
# Time1: length of AR-determination window [sec]
# Time2: length of AR-prediction window [sec]
ldet = int(round(self.getTime1() / self.getIncrement())) # length of AR-determination window [samples]
lpred = int(np.ceil(self.getTime2() / self.getIncrement())) # length of AR-prediction window [samples]
cf = np.zeros(len(xnp))
loopstep = self.getARdetStep()
arcalci = ldet + self.getOrder() #AR-calculation index
arcalci = ldet + self.getOrder() # AR-calculation index
for i in range(ldet + self.getOrder(), tend - lpred - 1):
if i == arcalci:
#determination of AR coefficients
#to speed up calculation, AR-coefficients are calculated only every i+loopstep[1]!
self.arDetZ(xnoise, self.getOrder(), i-ldet, i)
# determination of AR coefficients
# to speed up calculation, AR-coefficients are calculated only every i+loopstep[1]!
self.arDetZ(xnoise, self.getOrder(), i - ldet, i)
arcalci = arcalci + loopstep[1]
#AR prediction of waveform using calculated AR coefficients
# AR prediction of waveform using calculated AR coefficients
self.arPredZ(xnp, self.arpara, i + 1, lpred)
#prediction error = CF
cf[i + lpred-1] = np.sqrt(np.sum(np.power(self.xpred[i:i + lpred-1] - xnp[i:i + lpred-1], 2)) / lpred)
# prediction error = CF
cf[i + lpred - 1] = np.sqrt(np.sum(np.power(self.xpred[i:i + lpred - 1] - xnp[i:i + lpred - 1], 2)) / lpred)
nn = np.isnan(cf)
if len(nn) > 1:
cf[nn] = 0
#remove zeros and artefacts
cf[nn] = 0
# remove zeros and artefacts
tap = np.hanning(len(cf))
cf = tap * cf
io = np.where(cf == 0)
@ -366,25 +368,25 @@ class ARZcf(CharacteristicFunction):
Output: AR parameters arpara
'''
#recursive calculation of data vector (right part of eq. 6.5 in Kueperkoch et al. (2012)
# recursive calculation of data vector (right part of eq. 6.5 in Kueperkoch et al. (2012)
rhs = np.zeros(self.getOrder())
for k in range(0, self.getOrder()):
for i in range(rind, ldet+1):
for i in range(rind, ldet + 1):
ki = k + 1
rhs[k] = rhs[k] + data[i] * data[i - ki]
#recursive calculation of data array (second sum at left part of eq. 6.5 in Kueperkoch et al. 2012)
A = np.zeros((self.getOrder(),self.getOrder()))
# recursive calculation of data array (second sum at left part of eq. 6.5 in Kueperkoch et al. 2012)
A = np.zeros((self.getOrder(), self.getOrder()))
for k in range(1, self.getOrder() + 1):
for j in range(1, k + 1):
for i in range(rind, ldet+1):
for i in range(rind, ldet + 1):
ki = k - 1
ji = j - 1
A[ki,ji] = A[ki,ji] + data[i - j] * data[i - k]
A[ki, ji] = A[ki, ji] + data[i - j] * data[i - k]
A[ji,ki] = A[ki,ji]
A[ji, ki] = A[ki, ji]
#apply Moore-Penrose inverse for SVD yielding the AR-parameters
# apply Moore-Penrose inverse for SVD yielding the AR-parameters
self.arpara = np.dot(np.linalg.pinv(A), rhs)
def arPredZ(self, data, arpara, rind, lpred):
@ -406,10 +408,10 @@ class ARZcf(CharacteristicFunction):
Output: predicted waveform z
'''
#be sure of the summation indeces
# be sure of the summation indeces
if rind < len(arpara):
rind = len(arpara)
if rind > len(data) - lpred :
if rind > len(data) - lpred:
rind = len(data) - lpred
if lpred < 1:
lpred = 1
@ -426,7 +428,6 @@ class ARZcf(CharacteristicFunction):
class ARHcf(CharacteristicFunction):
def calcCF(self, data):
print 'Calculating AR-prediction error from both horizontal traces ...'
@ -434,41 +435,42 @@ class ARHcf(CharacteristicFunction):
xnp = self.getDataArray(self.getCut())
n0 = np.isnan(xnp[0].data)
if len(n0) > 1:
xnp[0].data[n0] = 0
xnp[0].data[n0] = 0
n1 = np.isnan(xnp[1].data)
if len(n1) > 1:
xnp[1].data[n1] = 0
xnp[1].data[n1] = 0
#some parameters needed
#add noise to time series
# some parameters needed
# add noise to time series
xenoise = xnp[0].data + np.random.normal(0.0, 1.0, len(xnp[0].data)) * self.getFnoise() * max(abs(xnp[0].data))
xnnoise = xnp[1].data + np.random.normal(0.0, 1.0, len(xnp[1].data)) * self.getFnoise() * max(abs(xnp[1].data))
Xnoise = np.array( [xenoise.tolist(), xnnoise.tolist()] )
Xnoise = np.array([xenoise.tolist(), xnnoise.tolist()])
tend = len(xnp[0].data)
#Time1: length of AR-determination window [sec]
#Time2: length of AR-prediction window [sec]
ldet = int(round(self.getTime1() / self.getIncrement())) #length of AR-determination window [samples]
lpred = int(np.ceil(self.getTime2() / self.getIncrement())) #length of AR-prediction window [samples]
# Time1: length of AR-determination window [sec]
# Time2: length of AR-prediction window [sec]
ldet = int(round(self.getTime1() / self.getIncrement())) # length of AR-determination window [samples]
lpred = int(np.ceil(self.getTime2() / self.getIncrement())) # length of AR-prediction window [samples]
cf = np.zeros(len(xenoise))
loopstep = self.getARdetStep()
arcalci = lpred + self.getOrder() - 1 #AR-calculation index
#arcalci = ldet + self.getOrder() - 1 #AR-calculation index
arcalci = lpred + self.getOrder() - 1 # AR-calculation index
# arcalci = ldet + self.getOrder() - 1 #AR-calculation index
for i in range(lpred + self.getOrder() - 1, tend - 2 * lpred + 1):
if i == arcalci:
#determination of AR coefficients
#to speed up calculation, AR-coefficients are calculated only every i+loopstep[1]!
self.arDetH(Xnoise, self.getOrder(), i-ldet, i)
# determination of AR coefficients
# to speed up calculation, AR-coefficients are calculated only every i+loopstep[1]!
self.arDetH(Xnoise, self.getOrder(), i - ldet, i)
arcalci = arcalci + loopstep[1]
#AR prediction of waveform using calculated AR coefficients
# AR prediction of waveform using calculated AR coefficients
self.arPredH(xnp, self.arpara, i + 1, lpred)
#prediction error = CF
# prediction error = CF
cf[i + lpred] = np.sqrt(np.sum(np.power(self.xpred[0][i:i + lpred] - xnp[0][i:i + lpred], 2) \
+ np.power(self.xpred[1][i:i + lpred] - xnp[1][i:i + lpred], 2)) / (2 * lpred))
+ np.power(self.xpred[1][i:i + lpred] - xnp[1][i:i + lpred], 2)) / (
2 * lpred))
nn = np.isnan(cf)
if len(nn) > 1:
cf[nn] = 0
#remove zeros and artefacts
cf[nn] = 0
# remove zeros and artefacts
tap = np.hanning(len(cf))
cf = tap * cf
io = np.where(cf == 0)
@ -500,24 +502,24 @@ class ARHcf(CharacteristicFunction):
Output: AR parameters arpara
'''
#recursive calculation of data vector (right part of eq. 6.5 in Kueperkoch et al. (2012)
# recursive calculation of data vector (right part of eq. 6.5 in Kueperkoch et al. (2012)
rhs = np.zeros(self.getOrder())
for k in range(0, self.getOrder()):
for i in range(rind, ldet):
rhs[k] = rhs[k] + data[0,i] * data[0,i - k] + data[1,i] * data[1,i - k]
rhs[k] = rhs[k] + data[0, i] * data[0, i - k] + data[1, i] * data[1, i - k]
#recursive calculation of data array (second sum at left part of eq. 6.5 in Kueperkoch et al. 2012)
A = np.zeros((4,4))
# recursive calculation of data array (second sum at left part of eq. 6.5 in Kueperkoch et al. 2012)
A = np.zeros((4, 4))
for k in range(1, self.getOrder() + 1):
for j in range(1, k + 1):
for i in range(rind, ldet):
ki = k - 1
ji = j - 1
A[ki,ji] = A[ki,ji] + data[0,i - ji] * data[0,i - ki] + data[1,i - ji] *data[1,i - ki]
A[ki, ji] = A[ki, ji] + data[0, i - ji] * data[0, i - ki] + data[1, i - ji] * data[1, i - ki]
A[ji,ki] = A[ki,ji]
A[ji, ki] = A[ki, ji]
#apply Moore-Penrose inverse for SVD yielding the AR-parameters
# apply Moore-Penrose inverse for SVD yielding the AR-parameters
self.arpara = np.dot(np.linalg.pinv(A), rhs)
def arPredH(self, data, arpara, rind, lpred):
@ -540,7 +542,7 @@ class ARHcf(CharacteristicFunction):
Output: predicted waveform z
:type: structured array
'''
#be sure of the summation indeces
# be sure of the summation indeces
if rind < len(arpara) + 1:
rind = len(arpara) + 1
if rind > len(data[0]) - lpred + 1:
@ -558,11 +560,11 @@ class ARHcf(CharacteristicFunction):
z1[i] = z1[i] + arpara[ji] * z1[i - ji]
z2[i] = z2[i] + arpara[ji] * z2[i - ji]
z = np.array( [z1.tolist(), z2.tolist()] )
z = np.array([z1.tolist(), z2.tolist()])
self.xpred = z
class AR3Ccf(CharacteristicFunction):
class AR3Ccf(CharacteristicFunction):
def calcCF(self, data):
print 'Calculating AR-prediction error from all 3 components ...'
@ -570,46 +572,47 @@ class AR3Ccf(CharacteristicFunction):
xnp = self.getDataArray(self.getCut())
n0 = np.isnan(xnp[0].data)
if len(n0) > 1:
xnp[0].data[n0] = 0
xnp[0].data[n0] = 0
n1 = np.isnan(xnp[1].data)
if len(n1) > 1:
xnp[1].data[n1] = 0
xnp[1].data[n1] = 0
n2 = np.isnan(xnp[2].data)
if len(n2) > 1:
xnp[2].data[n2] = 0
xnp[2].data[n2] = 0
#some parameters needed
#add noise to time series
# some parameters needed
# add noise to time series
xenoise = xnp[0].data + np.random.normal(0.0, 1.0, len(xnp[0].data)) * self.getFnoise() * max(abs(xnp[0].data))
xnnoise = xnp[1].data + np.random.normal(0.0, 1.0, len(xnp[1].data)) * self.getFnoise() * max(abs(xnp[1].data))
xznoise = xnp[2].data + np.random.normal(0.0, 1.0, len(xnp[2].data)) * self.getFnoise() * max(abs(xnp[2].data))
Xnoise = np.array( [xenoise.tolist(), xnnoise.tolist(), xznoise.tolist()] )
Xnoise = np.array([xenoise.tolist(), xnnoise.tolist(), xznoise.tolist()])
tend = len(xnp[0].data)
#Time1: length of AR-determination window [sec]
#Time2: length of AR-prediction window [sec]
ldet = int(round(self.getTime1() / self.getIncrement())) #length of AR-determination window [samples]
lpred = int(np.ceil(self.getTime2() / self.getIncrement())) #length of AR-prediction window [samples]
# Time1: length of AR-determination window [sec]
# Time2: length of AR-prediction window [sec]
ldet = int(round(self.getTime1() / self.getIncrement())) # length of AR-determination window [samples]
lpred = int(np.ceil(self.getTime2() / self.getIncrement())) # length of AR-prediction window [samples]
cf = np.zeros(len(xenoise))
loopstep = self.getARdetStep()
arcalci = ldet + self.getOrder() - 1 #AR-calculation index
arcalci = ldet + self.getOrder() - 1 # AR-calculation index
for i in range(ldet + self.getOrder() - 1, tend - 2 * lpred + 1):
if i == arcalci:
#determination of AR coefficients
#to speed up calculation, AR-coefficients are calculated only every i+loopstep[1]!
self.arDet3C(Xnoise, self.getOrder(), i-ldet, i)
# determination of AR coefficients
# to speed up calculation, AR-coefficients are calculated only every i+loopstep[1]!
self.arDet3C(Xnoise, self.getOrder(), i - ldet, i)
arcalci = arcalci + loopstep[1]
#AR prediction of waveform using calculated AR coefficients
# AR prediction of waveform using calculated AR coefficients
self.arPred3C(xnp, self.arpara, i + 1, lpred)
#prediction error = CF
# prediction error = CF
cf[i + lpred] = np.sqrt(np.sum(np.power(self.xpred[0][i:i + lpred] - xnp[0][i:i + lpred], 2) \
+ np.power(self.xpred[1][i:i + lpred] - xnp[1][i:i + lpred], 2) \
+ np.power(self.xpred[2][i:i + lpred] - xnp[2][i:i + lpred], 2)) / (3 * lpred))
+ np.power(self.xpred[1][i:i + lpred] - xnp[1][i:i + lpred], 2) \
+ np.power(self.xpred[2][i:i + lpred] - xnp[2][i:i + lpred], 2)) / (
3 * lpred))
nn = np.isnan(cf)
if len(nn) > 1:
cf[nn] = 0
#remove zeros and artefacts
cf[nn] = 0
# remove zeros and artefacts
tap = np.hanning(len(cf))
cf = tap * cf
io = np.where(cf == 0)
@ -641,26 +644,26 @@ class AR3Ccf(CharacteristicFunction):
Output: AR parameters arpara
'''
#recursive calculation of data vector (right part of eq. 6.5 in Kueperkoch et al. (2012)
# recursive calculation of data vector (right part of eq. 6.5 in Kueperkoch et al. (2012)
rhs = np.zeros(self.getOrder())
for k in range(0, self.getOrder()):
for i in range(rind, ldet):
rhs[k] = rhs[k] + data[0,i] * data[0,i - k] + data[1,i] * data[1,i - k] \
+ data[2,i] * data[2,i - k]
rhs[k] = rhs[k] + data[0, i] * data[0, i - k] + data[1, i] * data[1, i - k] \
+ data[2, i] * data[2, i - k]
#recursive calculation of data array (second sum at left part of eq. 6.5 in Kueperkoch et al. 2012)
A = np.zeros((4,4))
# recursive calculation of data array (second sum at left part of eq. 6.5 in Kueperkoch et al. 2012)
A = np.zeros((4, 4))
for k in range(1, self.getOrder() + 1):
for j in range(1, k + 1):
for i in range(rind, ldet):
ki = k - 1
ji = j - 1
A[ki,ji] = A[ki,ji] + data[0,i - ji] * data[0,i - ki] + data[1,i - ji] *data[1,i - ki] \
+ data[2,i - ji] *data[2,i - ki]
A[ki, ji] = A[ki, ji] + data[0, i - ji] * data[0, i - ki] + data[1, i - ji] * data[1, i - ki] \
+ data[2, i - ji] * data[2, i - ki]
A[ji,ki] = A[ki,ji]
A[ji, ki] = A[ki, ji]
#apply Moore-Penrose inverse for SVD yielding the AR-parameters
# apply Moore-Penrose inverse for SVD yielding the AR-parameters
self.arpara = np.dot(np.linalg.pinv(A), rhs)
def arPred3C(self, data, arpara, rind, lpred):
@ -683,7 +686,7 @@ class AR3Ccf(CharacteristicFunction):
Output: predicted waveform z
:type: structured array
'''
#be sure of the summation indeces
# be sure of the summation indeces
if rind < len(arpara) + 1:
rind = len(arpara) + 1
if rind > len(data[0]) - lpred + 1:
@ -703,5 +706,5 @@ class AR3Ccf(CharacteristicFunction):
z2[i] = z2[i] + arpara[ji] * z2[i - ji]
z3[i] = z3[i] + arpara[ji] * z3[i - ji]
z = np.array( [z1.tolist(), z2.tolist(), z3.tolist()] )
z = np.array([z1.tolist(), z2.tolist(), z3.tolist()])
self.xpred = z

259
pylot/core/pick/compare.py Normal file
View File

@ -0,0 +1,259 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import copy
import numpy as np
import matplotlib.pyplot as plt
from obspy import read_events
from pylot.core.read.io import picks_from_evt
from pylot.core.util.pdf import ProbabilityDensityFunction
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
__author__ = 'sebastianw'
class Comparison(object):
"""
A Comparison object contains information on the evaluated picks' probability
density function and compares these in terms of building the difference of
compared pick sets. The results can be displayed as histograms showing its
properties.
"""
def __init__(self, **kwargs):
names = list()
self._pdfs = dict()
for name, fn in kwargs.items():
self._pdfs[name] = PDFDictionary.from_quakeml(fn)
names.append(name)
if len(names) > 2:
raise ValueError('Comparison is only defined for two '
'arguments!')
self._names = names
self._compare = self.compare_picksets()
def __nonzero__(self):
if not len(self.names) == 2 or not self._pdfs:
return False
return True
def get(self, name):
return self._pdfs[name]
@property
def names(self):
return self._names
@names.setter
def names(self, names):
assert isinstance(names, list) and len(names) == 2, 'variable "names"' \
' is either not a' \
' list or its ' \
'length is not 2:' \
'names : {names}'.format(
names=names)
self._names = names
@property
def comparison(self):
return self._compare
@property
def stations(self):
return self.comparison.keys()
@property
def nstations(self):
return len(self.stations)
def compare_picksets(self, type='exp'):
"""
Compare two picksets A and B and return a dictionary compiling the results.
Comparison is carried out with the help of pdf representation of the picks
and a probabilistic approach to the time difference of two onset
measurements.
:param a: filename for pickset A
:type a: str
:param b: filename for pickset B
:type b: str
:return: dictionary containing the resulting comparison pdfs for all picks
:rtype: dict
"""
compare_pdfs = dict()
pdf_a = self.get(self.names[0]).pdf_data(type)
pdf_b = self.get(self.names[1]).pdf_data(type)
for station, phases in pdf_a.items():
if station in pdf_b.keys():
compare_pdf = dict()
for phase in phases:
if phase in pdf_b[station].keys():
compare_pdf[phase] = phases[phase] - pdf_b[station][
phase]
if compare_pdf is not None:
compare_pdfs[station] = compare_pdf
return compare_pdfs
def plot(self):
nstations = self.nstations
stations = self.stations
istations = range(nstations)
fig, axarr = plt.subplots(nstations, 2, sharex='col', sharey='row')
for n in istations:
station = stations[n]
compare_pdf = self.comparison[station]
for l, phase in enumerate(compare_pdf.keys()):
axarr[n, l].plot(compare_pdf[phase].axis,
compare_pdf[phase].data)
if n is 0:
axarr[n, l].set_title(phase)
if l is 0:
axann = axarr[n, l].annotate(station, xy=(.05, .5),
xycoords='axes fraction')
bbox_props = dict(boxstyle='round', facecolor='lightgrey',
alpha=.7)
axann.set_bbox(bbox_props)
if n == int(np.median(istations)) and l is 0:
label = 'probability density (qualitative)'
axarr[n, l].set_ylabel(label)
plt.setp([a.get_xticklabels() for a in axarr[0, :]], visible=False)
plt.setp([a.get_yticklabels() for a in axarr[:, 1]], visible=False)
plt.setp([a.get_yticklabels() for a in axarr[:, 0]], visible=False)
plt.show()
def get_expectation_array(self, phase):
pdf_dict = self.comparison
exp_array = list()
for station, phases in pdf_dict.items():
try:
exp_array.append(phases[phase].expectation())
except KeyError as e:
print('{err_msg}; station = {station}, phase = {phase}'.format(
err_msg=str(e), station=station, phase=phase))
continue
return exp_array
def get_std_array(self, phase):
pdf_dict = self.comparison
std_array = list()
for station, phases in pdf_dict.items():
try:
std_array.append(phases[phase].standard_deviation())
except KeyError as e:
print('{err_msg}; station = {station}, phase = {phase}'.format(
err_msg=str(e), station=station, phase=phase))
continue
return std_array
def hist_expectation(self, phases='all', bins=20, normed=False):
phases.strip()
if phases.find('all') is 0:
phases = 'ps'
phases = phases.upper()
nsp = len(phases)
fig, axarray = plt.subplots(1, nsp, sharey=True)
for n, phase in enumerate(phases):
ax = axarray[n]
data = self.get_expectation_array(phase)
xlims = [min(data), max(data)]
ax.hist(data, range=xlims, bins=bins, normed=normed)
title_str = 'phase: {0}, samples: {1}'.format(phase, len(data))
ax.set_title(title_str)
ax.set_xlabel('expectation [s]')
if n is 0:
ax.set_ylabel('abundance [-]')
plt.setp([a.get_yticklabels() for a in axarray[1:]], visible=False)
plt.show()
def hist_standard_deviation(self, phases='all', bins=20, normed=False):
phases.strip()
if phases.find('all') == 0:
phases = 'ps'
phases = phases.upper()
nsp = len(phases)
fig, axarray = plt.subplots(1, nsp, sharey=True)
for n, phase in enumerate(phases):
ax = axarray[n]
data = self.get_std_array(phase)
xlims = [min(data), max(data)]
ax.hist(data, range=xlims, bins=bins, normed=normed)
title_str = 'phase: {0}, samples: {1}'.format(phase, len(data))
ax.set_title(title_str)
ax.set_xlabel('standard deviation [s]')
if n is 0:
ax.set_ylabel('abundance [-]')
plt.setp([a.get_yticklabels() for a in axarray[1:]], visible=False)
plt.show()
def hist(self, type='std'):
pass
class PDFDictionary(object):
"""
A PDFDictionary is a dictionary like object containing structured data on
the probability density function of seismic phase onsets.
"""
def __init__(self, data):
self._pickdata = data
def __nonzero__(self):
if len(self.pick_data) < 1:
return False
else:
return True
@property
def pick_data(self):
return self._pickdata
@pick_data.setter
def pick_data(self, data):
self._pickdata = data
@classmethod
def from_quakeml(self, fn):
cat = read_events(fn)
if len(cat) > 1:
raise NotImplementedError('reading more than one event at the same '
'time is not implemented yet! Sorry!')
return PDFDictionary(picks_from_evt(cat[0]))
def pdf_data(self, type='exp'):
"""
Returns probabiliy density function dictionary containing the
representation of the actual pick_data.
:param type: type of the returned
`~pylot.core.util.pdf.ProbabilityDensityFunction` object
:type type: str
:return: a dictionary containing the picks represented as pdfs
"""
pdf_picks = copy.deepcopy(self.pick_data)
for station, phases in pdf_picks.items():
for phase, values in phases.items():
phases[phase] = ProbabilityDensityFunction.from_pick(
values['epp'],
values['mpp'],
values['lpp'],
type=type)
return pdf_picks
#comp_obj = Comparison(manual='/home/sebastianp/Data/Insheim/e0019.048.13.xml',
# auto='/data/Geothermie/Insheim/EVENT_DATA/LOCAL/RefDB/e0019.048.13/autoPyLoT.xml')
#comp_obj.plot()
#comp_obj.hist_expectation()
#comp_obj.hist_standard_deviation()

View File

@ -22,10 +22,11 @@ calculated after Diehl & Kissling (2009).
import numpy as np
import matplotlib.pyplot as plt
from pylot.core.pick.utils import getnoisewin, getsignalwin
from pylot.core.pick.CharFuns import CharacteristicFunction
from pylot.core.pick.charfuns import CharacteristicFunction
import warnings
class AutoPicking(object):
class AutoPicker(object):
'''
Superclass of different, automated picking algorithms applied on a CF determined
using AIC, HOS, or AR prediction.
@ -87,7 +88,6 @@ class AutoPicking(object):
Tsmooth=self.getTsmooth(),
Pick1=self.getpick1())
def getTSNR(self):
return self.TSNR
@ -137,7 +137,7 @@ class AutoPicking(object):
self.Pick = None
class AICPicker(AutoPicking):
class AICPicker(AutoPicker):
'''
Method to derive the onset time of an arriving phase based on CF
derived from AIC. In order to get an impression of the quality of this inital pick,
@ -152,14 +152,14 @@ class AICPicker(AutoPicking):
self.Pick = None
self.slope = None
self.SNR = None
#find NaN's
# find NaN's
nn = np.isnan(self.cf)
if len(nn) > 1:
self.cf[nn] = 0
#taper AIC-CF to get rid off side maxima
# taper AIC-CF to get rid off side maxima
tap = np.hanning(len(self.cf))
aic = tap * self.cf + max(abs(self.cf))
#smooth AIC-CF
# smooth AIC-CF
ismooth = int(round(self.Tsmooth / self.dt))
aicsmooth = np.zeros(len(aic))
if len(aic) < ismooth:
@ -171,32 +171,32 @@ class AICPicker(AutoPicking):
ii1 = i - ismooth
aicsmooth[i] = aicsmooth[i - 1] + (aic[i] - aic[ii1]) / ismooth
else:
aicsmooth[i] = np.mean(aic[1 : i])
#remove offset
aicsmooth[i] = np.mean(aic[1: i])
# remove offset
offset = abs(min(aic) - min(aicsmooth))
aicsmooth = aicsmooth - offset
#get maximum of 1st derivative of AIC-CF (more stable!) as starting point
# get maximum of 1st derivative of AIC-CF (more stable!) as starting point
diffcf = np.diff(aicsmooth)
#find NaN's
# find NaN's
nn = np.isnan(diffcf)
if len(nn) > 1:
diffcf[nn] = 0
#taper CF to get rid off side maxima
# taper CF to get rid off side maxima
tap = np.hanning(len(diffcf))
diffcf = tap * diffcf * max(abs(aicsmooth))
icfmax = np.argmax(diffcf)
#find minimum in AIC-CF front of maximum
# find minimum in AIC-CF front of maximum
lpickwindow = int(round(self.PickWindow / self.dt))
for i in range(icfmax - 1, max([icfmax - lpickwindow, 2]), -1):
if aicsmooth[i - 1] >= aicsmooth[i]:
self.Pick = self.Tcf[i]
break
#if no minimum could be found:
#search in 1st derivative of AIC-CF
# if no minimum could be found:
# search in 1st derivative of AIC-CF
if self.Pick is None:
for i in range(icfmax -1, max([icfmax -lpickwindow, 2]), -1):
if diffcf[i -1] >= diffcf[i]:
for i in range(icfmax - 1, max([icfmax - lpickwindow, 2]), -1):
if diffcf[i - 1] >= diffcf[i]:
self.Pick = self.Tcf[i]
break
@ -215,7 +215,7 @@ class AICPicker(AutoPicking):
max(abs(aic[inoise] - np.mean(aic[inoise])))
# calculate slope from CF after initial pick
# get slope window
tslope = self.TSNR[3] #slope determination window
tslope = self.TSNR[3] # slope determination window
islope = np.where((self.Tcf <= min([self.Pick + tslope, len(self.Data[0].data)])) \
& (self.Tcf >= self.Pick))
# find maximum within slope determination window
@ -237,7 +237,7 @@ class AICPicker(AutoPicking):
raw_input()
plt.close(p)
return
islope = islope[0][0 :imax]
islope = islope[0][0:imax]
dataslope = self.Data[0].data[islope]
# calculate slope as polynomal fit of order 1
xslope = np.arange(0, len(dataslope), 1)
@ -258,7 +258,7 @@ class AICPicker(AutoPicking):
p1, = plt.plot(self.Tcf, x / max(x), 'k')
p2, = plt.plot(self.Tcf, aicsmooth / max(aicsmooth), 'r')
if self.Pick is not None:
p3, = plt.plot([self.Pick, self.Pick], [-0.1 , 0.5], 'b', linewidth=2)
p3, = plt.plot([self.Pick, self.Pick], [-0.1, 0.5], 'b', linewidth=2)
plt.legend([p1, p2, p3], ['(HOS-/AR-) Data', 'Smoothed AIC-CF', 'AIC-Pick'])
else:
plt.legend([p1, p2], ['(HOS-/AR-) Data', 'Smoothed AIC-CF'])
@ -273,7 +273,8 @@ class AICPicker(AutoPicking):
p13, = plt.plot(self.Tcf[isignal], self.Data[0].data[isignal], 'r')
p14, = plt.plot(self.Tcf[islope], dataslope, 'g--')
p15, = plt.plot(self.Tcf[islope], datafit, 'g', linewidth=2)
plt.legend([p11, p12, p13, p14, p15], ['Data', 'Noise Window', 'Signal Window', 'Slope Window', 'Slope'],
plt.legend([p11, p12, p13, p14, p15],
['Data', 'Noise Window', 'Signal Window', 'Slope Window', 'Slope'],
loc='best')
plt.title('Station %s, SNR=%7.2f, Slope= %12.2f counts/s' % (self.Data[0].stats.station,
self.SNR, self.slope))
@ -289,7 +290,7 @@ class AICPicker(AutoPicking):
print('AICPicker: Could not find minimum, picking window too short?')
class PragPicker(AutoPicking):
class PragPicker(AutoPicker):
'''
Method of pragmatic picking exploiting information given by CF.
'''
@ -303,7 +304,7 @@ class PragPicker(AutoPicking):
self.SNR = None
self.slope = None
pickflag = 0
#smooth CF
# smooth CF
ismooth = int(round(self.Tsmooth / self.dt))
cfsmooth = np.zeros(len(self.cf))
if len(self.cf) < ismooth:
@ -315,28 +316,28 @@ class PragPicker(AutoPicking):
ii1 = i - ismooth
cfsmooth[i] = cfsmooth[i - 1] + (self.cf[i] - self.cf[ii1]) / ismooth
else:
cfsmooth[i] = np.mean(self.cf[1 : i])
cfsmooth[i] = np.mean(self.cf[1: i])
#select picking window
#which is centered around tpick1
# select picking window
# which is centered around tpick1
ipick = np.where((self.Tcf >= self.getpick1() - self.PickWindow / 2) \
& (self.Tcf <= self.getpick1() + self.PickWindow / 2))
cfipick = self.cf[ipick] - np.mean(self.cf[ipick])
Tcfpick = self.Tcf[ipick]
cfsmoothipick = cfsmooth[ipick]- np.mean(self.cf[ipick])
cfsmoothipick = cfsmooth[ipick] - np.mean(self.cf[ipick])
ipick1 = np.argmin(abs(self.Tcf - self.getpick1()))
cfpick1 = 2 * self.cf[ipick1]
#check trend of CF, i.e. differences of CF and adjust aus regarding this trend
#prominent trend: decrease aus
#flat: use given aus
# check trend of CF, i.e. differences of CF and adjust aus regarding this trend
# prominent trend: decrease aus
# flat: use given aus
cfdiff = np.diff(cfipick)
i0diff = np.where(cfdiff > 0)
cfdiff = cfdiff[i0diff]
minaus = min(cfdiff * (1 + self.aus))
aus1 = max([minaus, self.aus])
#at first we look to the right until the end of the pick window is reached
# at first we look to the right until the end of the pick window is reached
flagpick_r = 0
flagpick_l = 0
cfpick_r = 0
@ -380,8 +381,8 @@ class PragPicker(AutoPicking):
if self.getiplot() > 1:
p = plt.figure(self.getiplot())
p1, = plt.plot(Tcfpick,cfipick, 'k')
p2, = plt.plot(Tcfpick,cfsmoothipick, 'r')
p1, = plt.plot(Tcfpick, cfipick, 'k')
p2, = plt.plot(Tcfpick, cfsmoothipick, 'r')
if pickflag > 0:
p3, = plt.plot([self.Pick, self.Pick], [min(cfipick), max(cfipick)], 'b', linewidth=2)
plt.legend([p1, p2, p3], ['CF', 'Smoothed CF', 'Pick'])

View File

@ -15,7 +15,7 @@ from obspy.core import Stream, UTCDateTime
import warnings
def earllatepicker(X, nfac, TSNR, Pick1, iplot=None, stealthMode = False):
def earllatepicker(X, nfac, TSNR, Pick1, iplot=None, stealthMode=False):
'''
Function to derive earliest and latest possible pick after Diehl & Kissling (2009)
as reasonable uncertainties. Latest possible pick is based on noise level,
@ -70,7 +70,8 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=None, stealthMode = False):
# get earliest possible pick
EPick = np.nan; count = 0
EPick = np.nan;
count = 0
pis = isignal
# if EPick stays NaN the signal window size will be doubled
@ -78,10 +79,10 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=None, stealthMode = False):
if count > 0:
if stealthMode is False:
print("\nearllatepicker: Doubled signal window size %s time(s) "
"because of NaN for earliest pick." %count)
"because of NaN for earliest pick." % count)
isigDoubleWinStart = pis[-1] + 1
isignalDoubleWin = np.arange(isigDoubleWinStart,
isigDoubleWinStart + len(pis))
isigDoubleWinStart + len(pis))
if (isigDoubleWinStart + len(pis)) < X[0].data.size:
pis = np.concatenate((pis, isignalDoubleWin))
else:
@ -92,8 +93,7 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=None, stealthMode = False):
zc = crossings_nonzero_all(x[pis] - x[pis].mean())
# calculate mean half period T0 of signal as the average of the
T0 = np.mean(np.diff(zc)) * X[0].stats.delta # this is half wave length!
EPick = Pick1 - T0 # half wavelength as suggested by Diehl et al.
EPick = Pick1 - T0 # half wavelength as suggested by Diehl et al.
# get symmetric pick error as mean from earliest and latest possible pick
# by weighting latest possible pick two times earliest possible pick
@ -133,117 +133,6 @@ def earllatepicker(X, nfac, TSNR, Pick1, iplot=None, stealthMode = False):
return EPick, LPick, PickError
def gauss_parameter(te, tm, tl, eta):
'''
takes three onset times and returns the parameters sig1, sig2, a1 and a2
to represent the pick as a probability density funtion (PDF) with two
Gauss branches
:param te:
:param tm:
:param tl:
:param eta:
:return:
'''
sig1 = (tm - te) / np.sqrt(2 * np.log(1 / eta))
sig2 = (tl - tm) / np.sqrt(2 * np.log(1 / eta))
a1 = 2 / (1 + sig2 / sig1)
a2 = 2 / (1 + sig1 / sig2)
return sig1, sig2, a1, a2
def exp_parameter(te, tm, tl, eta):
'''
takes three onset times te, tm and tl and returns the parameters sig1,
sig2 and a to represent the pick as a probability density function (PDF)
with two exponential decay branches
:param te:
:param tm:
:param tl:
:param eta:
:return:
'''
sig1 = np.log(eta) / (te - tm)
sig2 = np.log(eta) / (tm - tl)
a = 1 / (1 / sig1 + 1 / sig2)
return sig1, sig2, a
def gauss_branches(x, mu, sig1, sig2, a1, a2):
'''
function gauss_branches takes an axes x, a center value mu, two sigma
values sig1 and sig2 and two scaling factors a1 and a2 and return a
list containing the values of a probability density function (PDF)
consisting of gauss branches
:param x:
:type x:
:param mu:
:type mu:
:param sig1:
:type sig1:
:param sig2:
:type sig2:
:param a1:
:type a1:
:param a2:
:returns fun_vals: list with function values along axes x
'''
fun_vals = []
for k in x:
if k < mu:
fun_vals.append(a1 * 1 / (np.sqrt(2 * np.pi) * sig1) * np.exp(-((k - mu) / sig1)**2 / 2 ))
else:
fun_vals.append(a2 * 1 / (np.sqrt(2 * np.pi) * sig2) * np.exp(-((k - mu) / sig2)**2 / 2))
return fun_vals
def exp_branches(x, mu, sig1, sig2, a):
'''
function exp_branches takes an axes x, a center value mu, two sigma
values sig1 and sig2 and a scaling factor a and return a
list containing the values of a probability density function (PDF)
consisting of exponential decay branches
:param x:
:param mu:
:param sig1:
:param sig2:
:param a:
:returns fun_vals: list with function values along axes x:
'''
fun_vals = []
for k in x:
if k < mu:
fun_vals.append(a * np.exp(sig1 * (k - mu)))
else:
fun_vals.append(a * np.exp(-sig2 * (k - mu)))
return fun_vals
def pick_pdf(t, te, tm, tl, type='gauss', eta=0.01):
'''
:param t:
:param te:
:param tm:
:param tl:
:param type:
:param eta:
:param args:
:return:
'''
parameter = dict(gauss=gauss_parameter, exp=exp_parameter)
branches = dict(gauss=gauss_branches, exp=exp_branches)
params = parameter[type](te, tm, tl, eta)
return branches[type](t, tm, *params)
def fmpicker(Xraw, Xfilt, pickwin, Pick, iplot=None):
'''
Function to derive first motion (polarity) of given phase onset Pick.
@ -432,7 +321,7 @@ def crossings_nonzero_all(data):
return ((pos[:-1] & npos[1:]) | (npos[:-1] & pos[1:])).nonzero()[0]
def getSNR(X, TSNR, t1):
def getSNR(X, TSNR, t1, tracenum=0):
'''
Function to calculate SNR of certain part of seismogram relative to
given time (onset) out of given noise and signal windows. A safety gap
@ -451,9 +340,11 @@ def getSNR(X, TSNR, t1):
assert isinstance(X, Stream), "%s is not a stream object" % str(X)
x = X[0].data
t = np.arange(0, X[0].stats.npts / X[0].stats.sampling_rate,
X[0].stats.delta)
x = X[tracenum].data
npts = X[tracenum].stats.npts
sr = X[tracenum].stats.sampling_rate
dt = X[tracenum].stats.delta
t = np.arange(0, npts / sr, dt)
# get noise window
inoise = getnoisewin(t, t1, TSNR[0], TSNR[1])
@ -471,8 +362,12 @@ def getSNR(X, TSNR, t1):
x = x - np.mean(x[inoise])
# calculate ratios
noiselevel = np.sqrt(np.mean(np.square(x[inoise])))
signallevel = np.sqrt(np.mean(np.square(x[isignal])))
# noiselevel = np.sqrt(np.mean(np.square(x[inoise])))
# signallevel = np.sqrt(np.mean(np.square(x[isignal])))
noiselevel = np.abs(x[inoise]).max()
signallevel = np.abs(x[isignal]).max()
SNR = signallevel / noiselevel
SNRdB = 10 * np.log10(SNR)
@ -500,7 +395,7 @@ def getnoisewin(t, t1, tnoise, tgap):
# get noise window
inoise, = np.where((t <= max([t1 - tgap, 0])) \
& (t >= max([t1 - tnoise - tgap, 0])))
& (t >= max([t1 - tnoise - tgap, 0])))
if np.size(inoise) < 1:
print ("getnoisewin: Empty array inoise, check noise window!")
@ -524,7 +419,7 @@ def getsignalwin(t, t1, tsignal):
# get signal window
isignal, = np.where((t <= min([t1 + tsignal, len(t)])) \
& (t >= t1))
& (t >= t1))
if np.size(isignal) < 1:
print ("getsignalwin: Empty array isignal, check signal window!")
@ -565,7 +460,7 @@ def getResolutionWindow(snr):
else:
time_resolution = res_wins['HRW']
return time_resolution/2
return time_resolution / 2
def wadaticheck(pickdic, dttolerance, iplot):
@ -593,17 +488,16 @@ def wadaticheck(pickdic, dttolerance, iplot):
SPtimes = []
for key in pickdic:
if pickdic[key]['P']['weight'] < 4 and pickdic[key]['S']['weight'] < 4:
# calculate S-P time
spt = pickdic[key]['S']['mpp'] - pickdic[key]['P']['mpp']
# add S-P time to dictionary
pickdic[key]['SPt'] = spt
# add P onsets and corresponding S-P times to list
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
UTCSpick = UTCDateTime(pickdic[key]['S']['mpp'])
Ppicks.append(UTCPpick.timestamp)
Spicks.append(UTCSpick.timestamp)
SPtimes.append(spt)
# calculate S-P time
spt = pickdic[key]['S']['mpp'] - pickdic[key]['P']['mpp']
# add S-P time to dictionary
pickdic[key]['SPt'] = spt
# add P onsets and corresponding S-P times to list
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
UTCSpick = UTCDateTime(pickdic[key]['S']['mpp'])
Ppicks.append(UTCPpick.timestamp)
Spicks.append(UTCSpick.timestamp)
SPtimes.append(spt)
if len(SPtimes) >= 3:
# calculate slope
@ -635,7 +529,7 @@ def wadaticheck(pickdic, dttolerance, iplot):
ibad += 1
else:
marker = 'goodWadatiCheck'
checkedPpick = UTCDateTime(pickdic[key]['P']['mpp'])
checkedPpick = UTCDateTime(pickdic[key]['P']['mpp'])
checkedPpicks.append(checkedPpick.timestamp)
checkedSpick = UTCDateTime(pickdic[key]['S']['mpp'])
checkedSpicks.append(checkedSpick.timestamp)
@ -747,7 +641,7 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
# calculate minimum adjusted signal level
minsiglevel = max(rms[inoise]) * nfac
# minimum adjusted number of samples over minimum signal level
minnum = len(isignal) * minpercent/100
minnum = len(isignal) * minpercent / 100
# get number of samples above minimum adjusted signal level
numoverthr = len(np.where(rms[isignal] >= minsiglevel)[0])
@ -762,10 +656,10 @@ def checksignallength(X, pick, TSNR, minsiglength, nfac, minpercent, iplot):
if iplot == 2:
plt.figure(iplot)
p1, = plt.plot(t,rms, 'k')
p1, = plt.plot(t, rms, 'k')
p2, = plt.plot(t[inoise], rms[inoise], 'c')
p3, = plt.plot(t[isignal],rms[isignal], 'r')
p4, = plt.plot([t[isignal[0]], t[isignal[len(isignal)-1]]],
p3, = plt.plot(t[isignal], rms[isignal], 'r')
p4, = plt.plot([t[isignal[0]], t[isignal[len(isignal) - 1]]],
[minsiglevel, minsiglevel], 'g', linewidth=2)
p5, = plt.plot([pick, pick], [min(rms), max(rms)], 'b', linewidth=2)
plt.legend([p1, p2, p3, p4, p5], ['RMS Data', 'RMS Noise Window',
@ -806,15 +700,15 @@ def checkPonsets(pickdic, dttolerance, iplot):
stations = []
for key in pickdic:
if pickdic[key]['P']['weight'] < 4:
# add P onsets to list
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
Ppicks.append(UTCPpick.timestamp)
stations.append(key)
# add P onsets to list
UTCPpick = UTCDateTime(pickdic[key]['P']['mpp'])
Ppicks.append(UTCPpick.timestamp)
stations.append(key)
# apply jackknife bootstrapping on variance of P onsets
print ("###############################################")
print ("checkPonsets: Apply jackknife bootstrapping on P-onset times ...")
[xjack,PHI_pseudo,PHI_sub] = jackknife(Ppicks, 'VAR', 1)
[xjack, PHI_pseudo, PHI_sub] = jackknife(Ppicks, 'VAR', 1)
# get pseudo variances smaller than average variances
# (times safety factor), these picks passed jackknife test
ij = np.where(PHI_pseudo <= 2 * xjack)
@ -835,7 +729,7 @@ def checkPonsets(pickdic, dttolerance, iplot):
print ("checkPonsets: %d pick(s) deviate too much from median!" % len(ibad))
print ("checkPonsets: Skipped %d P pick(s) out of %d" % (len(badstations) \
+ len(badjkstations), len(stations)))
+ len(badjkstations), len(stations)))
goodmarker = 'goodPonsetcheck'
badmarker = 'badPonsetcheck'
@ -986,10 +880,9 @@ def checkZ4S(X, pick, zfac, checkwin, iplot):
if len(ndat) == 0: # check for other components
ndat = X.select(component="1")
z = zdat[0].data
tz = np.arange(0, zdat[0].stats.npts / zdat[0].stats.sampling_rate,
zdat[0].stats.delta)
zdat[0].stats.delta)
# calculate RMS trace from vertical component
absz = np.sqrt(np.power(z, 2))
@ -1021,9 +914,9 @@ def checkZ4S(X, pick, zfac, checkwin, iplot):
if iplot > 1:
te = np.arange(0, edat[0].stats.npts / edat[0].stats.sampling_rate,
edat[0].stats.delta)
edat[0].stats.delta)
tn = np.arange(0, ndat[0].stats.npts / ndat[0].stats.sampling_rate,
ndat[0].stats.delta)
ndat[0].stats.delta)
plt.plot(tz, z / max(z), 'k')
plt.plot(tz[isignal], z[isignal] / max(z), 'r')
plt.plot(te, edat[0].data / max(edat[0].data) + 1, 'k')
@ -1065,65 +958,64 @@ def writephases(arrivals, fformat, filename):
:type: string
'''
if fformat == 'NLLoc':
print ("Writing phases to %s for NLLoc" % filename)
fid = open("%s" % filename, 'w')
# write header
fid.write('# EQEVENT: Label: EQ001 Loc: X 0.00 Y 0.00 Z 10.00 OT 0.00 \n')
for key in arrivals:
# P onsets
if arrivals[key]['P']:
fm = arrivals[key]['P']['fm']
if fm == None:
fm = '?'
onset = arrivals[key]['P']['mpp']
year = onset.year
month = onset.month
day = onset.day
hh = onset.hour
mm = onset.minute
ss = onset.second
ms = onset.microsecond
ss_ms = ss + ms / 1000000.0
if arrivals[key]['P']['weight'] < 4:
pweight = 1 # use pick
else:
pweight = 0 # do not use pick
fid.write('%s ? ? ? P %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 %d \n' % (key,
fm,
year,
month,
day,
hh,
mm,
ss_ms,
pweight))
# S onsets
if arrivals[key]['S']:
fm = '?'
onset = arrivals[key]['S']['mpp']
year = onset.year
month = onset.month
day = onset.day
hh = onset.hour
mm = onset.minute
ss = onset.second
ms = onset.microsecond
ss_ms = ss + ms / 1000000.0
if arrivals[key]['S']['weight'] < 4:
sweight = 1 # use pick
else:
sweight = 0 # do not use pick
fid.write('%s ? ? ? S %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 %d \n' % (key,
fm,
year,
month,
day,
hh,
mm,
ss_ms,
sweight))
# P onsets
if arrivals[key]['P']:
fm = arrivals[key]['P']['fm']
if fm == None:
fm = '?'
onset = arrivals[key]['P']['mpp']
year = onset.year
month = onset.month
day = onset.day
hh = onset.hour
mm = onset.minute
ss = onset.second
ms = onset.microsecond
ss_ms = ss + ms / 1000000.0
if arrivals[key]['P']['weight'] < 4:
pweight = 1 # use pick
else:
pweight = 0 # do not use pick
fid.write('%s ? ? ? P %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 %d \n' % (key,
fm,
year,
month,
day,
hh,
mm,
ss_ms,
pweight))
# S onsets
if arrivals[key]['S']:
fm = '?'
onset = arrivals[key]['S']['mpp']
year = onset.year
month = onset.month
day = onset.day
hh = onset.hour
mm = onset.minute
ss = onset.second
ms = onset.microsecond
ss_ms = ss + ms / 1000000.0
if arrivals[key]['S']['weight'] < 4:
sweight = 1 # use pick
else:
sweight = 0 # do not use pick
fid.write('%s ? ? ? S %s %d%02d%02d %02d%02d %7.4f GAU 0 0 0 0 %d \n' % (key,
fm,
year,
month,
day,
hh,
mm,
ss_ms,
sweight))
fid.close()
@ -1148,9 +1040,9 @@ def writephases(arrivals, fformat, filename):
Ao = str('%7.2f' % Ao)
year = Ponset.year
if year >= 2000:
year = year -2000
year = year - 2000
else:
year = year - 1900
year = year - 1900
month = Ponset.month
day = Ponset.day
hh = Ponset.hour
@ -1159,9 +1051,9 @@ def writephases(arrivals, fformat, filename):
ms = Ponset.microsecond
ss_ms = ss + ms / 1000000.0
if pweight < 2:
pstr = 'I'
pstr = 'I'
elif pweight >= 2:
pstr = 'E'
pstr = 'E'
if arrivals[key]['S']['weight'] < 4:
Sss = Sonset.second
Sms = Sonset.microsecond
@ -1172,35 +1064,36 @@ def writephases(arrivals, fformat, filename):
elif sweight >= 2:
sstr = 'E'
fid.write('%s%sP%s%d %02d%02d%02d%02d%02d%5.2f %s%sS %d %s\n' % (key,
pstr,
fm,
pweight,
year,
month,
day,
hh,
mm,
ss_ms,
Sss_ms,
sstr,
sweight,
Ao))
pstr,
fm,
pweight,
year,
month,
day,
hh,
mm,
ss_ms,
Sss_ms,
sstr,
sweight,
Ao))
else:
fid.write('%s%sP%s%d %02d%02d%02d%02d%02d%5.2f %s\n' % (key,
pstr,
fm,
pweight,
year,
month,
day,
hh,
mm,
ss_ms,
Ao))
pstr,
fm,
pweight,
year,
month,
day,
hh,
mm,
ss_ms,
Ao))
fid.close()
if __name__ == '__main__':
import doctest
doctest.testmod()

View File

@ -3,9 +3,10 @@
import os
import glob
from obspy.xseed import Parser
import warnings
from obspy.io.xseed import Parser
from obspy.core import read, Stream, UTCDateTime
from obspy import readEvents, read_inventory
from obspy import read_events, read_inventory
from obspy.core.event import Event, ResourceIdentifier, Pick, WaveformStreamID
from pylot.core.read.io import readPILOTEvent
@ -37,9 +38,10 @@ class Data(object):
if isinstance(evtdata, Event):
self.evtdata = evtdata
elif isinstance(evtdata, dict):
cat = readPILOTEvent(**evtdata)
evt = readPILOTEvent(**evtdata)
self.evtdata = evt
elif evtdata:
cat = readEvents(evtdata)
cat = read_events(evtdata)
self.evtdata = cat[0]
else: # create an empty Event object
self.setNew()
@ -79,7 +81,6 @@ class Data(object):
picks_str += str(pick) + '\n'
return picks_str
def getParent(self):
"""
@ -186,8 +187,11 @@ class Data(object):
self.wforiginal = None
if fnames is not None:
self.appendWFData(fnames)
else:
return False
self.wforiginal = self.getWFData().copy()
self.dirty = False
return True
def appendWFData(self, fnames):
"""
@ -413,16 +417,24 @@ class Data(object):
for station, onsets in picks.items():
print('Reading picks on station %s' % station)
for label, phase in onsets.items():
if not isinstance(phase, dict):
continue
onset = phase['mpp']
epp = phase['epp']
lpp = phase['lpp']
error = phase['spe']
try:
picker = phase['picker']
except KeyError as e:
warnings.warn(str(e), Warning)
picker = 'Unknown'
pick = Pick()
pick.time = onset
pick.time_errors.lower_uncertainty = onset - epp
pick.time_errors.upper_uncertainty = lpp - onset
pick.time_errors.uncertainty = error
pick.phase_hint = label
pick.method_id = ResourceIdentifier(id=picker)
pick.waveform_id = WaveformStreamID(station_code=station)
self.getEvtData().picks.append(pick)
try:
@ -432,11 +444,13 @@ class Data(object):
if firstonset is None or firstonset > onset:
firstonset = onset
if 'smi:local' in self.getID():
if 'smi:local' in self.getID() and firstonset:
fonset_str = firstonset.strftime('%Y_%m_%d_%H_%M_%S')
ID = ResourceIdentifier('event/' + fonset_str)
ID.convertIDToQuakeMLURI(authority_id=authority_id)
self.getEvtData().resource_id = ID
else:
print('No picks to apply!')
def applyArrivals(arrivals):
"""

View File

@ -1,6 +1,8 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pylot.core.util.errors import ParameterError
class AutoPickParameter(object):
'''
@ -49,7 +51,7 @@ class AutoPickParameter(object):
parFileCont[key] = val
if self.__filename is not None:
inputFile = open(self.__filename, 'r')
inputFile = open(self.__filename, 'r')
else:
return
try:
@ -57,7 +59,7 @@ class AutoPickParameter(object):
for line in lines:
parspl = line.split('\t')[:2]
parFileCont[parspl[0].strip()] = parspl[1]
except Exception as e:
except IndexError as e:
self._printParameterError(e)
inputFile.seek(0)
lines = inputFile.readlines()
@ -136,16 +138,18 @@ class AutoPickParameter(object):
return self.__getitem__(param)
except KeyError as e:
self._printParameterError(e)
raise ParameterError(e)
except TypeError:
try:
return self.__getitem__(args)
except KeyError as e:
self._printParameterError(e)
raise ParameterError(e)
def setParam(self, **kwargs):
for param, value in kwargs.items():
self.__setitem__(param, value)
#print(self)
# print(self)
@staticmethod
def _printParameterError(errmsg):
@ -190,6 +194,7 @@ class FilterOptions(object):
``'highpass'``
Butterworth-Highpass
'''
def __init__(self, filtertype='bandpass', freq=[2., 5.], order=3,
**kwargs):
self._order = order

View File

@ -7,9 +7,10 @@ import scipy.io as sio
import obspy.core.event as ope
from obspy.core import UTCDateTime
from pylot.core.util.utils import getOwner, createPick, createArrival,\
from pylot.core.util.utils import getOwner, createPick, createArrival, \
createEvent, createOrigin, createMagnitude
def readPILOTEvent(phasfn=None, locfn=None, authority_id=None, **kwargs):
"""
readPILOTEvent - function
@ -134,4 +135,57 @@ def readPILOTEvent(phasfn=None, locfn=None, authority_id=None, **kwargs):
raise AttributeError('{0} - Matlab LOC files {1} and {2} contains \
insufficient data!'.format(e, phasfn, locfn))
def picks_from_obs(fn):
picks = dict()
station_name = str()
for line in open(fn, 'r'):
if line.startswith('#'):
continue
else:
phase_line = line.split()
if not station_name == phase_line[0]:
phase = dict()
station_name = phase_line[0]
phase_name = phase_line[4].upper()
pick = UTCDateTime(phase_line[6] + phase_line[7] + phase_line[8])
phase[phase_name] = dict(mpp=pick, fm=phase_line[5])
picks[station_name] = phase
return picks
def picks_from_evt(evt):
'''
Takes an Event object and return the pick dictionary commonly used within
PyLoT
:param evt: Event object contain all available information
:type evt: `~obspy.core.event.Event`
:return: pick dictionary
'''
picks = {}
for pick in evt.picks:
phase = {}
station = pick.waveform_id.station_code
try:
onsets = picks[station]
except KeyError as e:
print(e)
onsets = {}
mpp = pick.time
lpp = mpp + pick.time_errors.upper_uncertainty
epp = mpp - pick.time_errors.lower_uncertainty
spe = pick.time_errors.uncertainty
phase['mpp'] = mpp
phase['epp'] = epp
phase['lpp'] = lpp
phase['spe'] = spe
try:
picker = str(pick.method_id)
if picker.startswith('smi:local/'):
picker = picker.split('smi:local/')[1]
phase['picker'] = picker
except IndexError:
pass
onsets[pick.phase_hint] = phase.copy()
picks[station] = onsets.copy()
return picks

View File

@ -7,7 +7,7 @@ from pylot.core.pick.utils import getnoisewin
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--t', type=~numpy.array, help='numpy array of time stamps')
parser.add_argument('--t', type=numpy.array, help='numpy array of time stamps')
parser.add_argument('--t1', type=float, help='time from which relativ to it noise window is extracted')
parser.add_argument('--tnoise', type=float, help='length of time window [s] for noise part extraction')
parser.add_argument('--tgap', type=float, help='safety gap between signal (t1=onset) and noise')

View File

@ -14,11 +14,12 @@ import argparse
import obspy
from pylot.core.pick.utils import earllatepicker
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--X', type=~obspy.core.stream.Stream, help='time series (seismogram) read with obspy module read')
parser.add_argument('--nfac', type=int, help='(noise factor), nfac times noise level to calculate latest possible pick')
parser.add_argument('--X', type=~obspy.core.stream.Stream,
help='time series (seismogram) read with obspy module read')
parser.add_argument('--nfac', type=int,
help='(noise factor), nfac times noise level to calculate latest possible pick')
parser.add_argument('--TSNR', type=tuple, help='length of time windows around pick used to determine SNR \
[s] (Tnoise, Tgap, Tsignal)')
parser.add_argument('--Pick1', type=float, help='Onset time of most likely pick')

View File

@ -13,11 +13,12 @@ from pylot.core.pick.utils import fmpicker
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--Xraw', type=~obspy.core.stream.Stream, help='unfiltered time series (seismogram) read with obspy module read')
parser.add_argument('--Xfilt', type=~obspy.core.stream.Stream, help='filtered time series (seismogram) read with obspy module read')
parser.add_argument('--Xraw', type=obspy.core.stream.Stream,
help='unfiltered time series (seismogram) read with obspy module read')
parser.add_argument('--Xfilt', type=obspy.core.stream.Stream,
help='filtered time series (seismogram) read with obspy module read')
parser.add_argument('--pickwin', type=float, help='length of pick window [s] for first motion determination')
parser.add_argument('--Pick', type=float, help='Onset time of most likely pick')
parser.add_argument('--iplot', type=int, help='if set, figure no. iplot occurs')
args = parser.parse_args()
fmpicker(args.Xraw, args.Xfilt, args.pickwin, args.Pick, args.iplot)

View File

@ -7,7 +7,7 @@ from pylot.core.pick.utils import getsignalwin
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--t', type=~numpy.array, help='numpy array of time stamps')
parser.add_argument('--t', type=numpy.array, help='numpy array of time stamps')
parser.add_argument('--t1', type=float, help='time from which relativ to it signal window is extracted')
parser.add_argument('--tsignal', type=float, help='length of time window [s] for signal part extraction')
args = parser.parse_args()

View File

@ -14,7 +14,7 @@ from pylot.core.pick.utils import getSNR
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--data', '-d', type=~obspy.core.stream.Stream,
parser.add_argument('--data', '-d', type=obspy.core.stream.Stream,
help='time series (seismogram) read with obspy module '
'read',
dest='data')

View File

@ -9,8 +9,8 @@
from obspy.core import read
import matplotlib.pyplot as plt
import numpy as np
from pylot.core.pick.CharFuns import CharacteristicFunction
from pylot.core.pick.Picker import AutoPicking
from pylot.core.pick.charfuns import CharacteristicFunction
from pylot.core.pick.picker import AutoPicker
from pylot.core.pick.utils import *
import glob
import argparse

View File

@ -11,6 +11,7 @@ from pylot.core.loc import nll
from pylot.core.loc import hsat
from pylot.core.loc import velest
def readFilterInformation(fname):
def convert2FreqRange(*args):
if len(args) > 1:
@ -18,6 +19,7 @@ def readFilterInformation(fname):
elif len(args) == 1:
return float(args[0])
return None
filter_file = open(fname, 'r')
filter_information = dict()
for filter_line in filter_file.readlines():
@ -26,14 +28,14 @@ def readFilterInformation(fname):
if pos == '\n':
filter_line[n] = ''
filter_information[filter_line[0]] = {'filtertype': filter_line[1]
if filter_line[1]
else None,
if filter_line[1]
else None,
'order': int(filter_line[2])
if filter_line[1]
else None,
if filter_line[1]
else None,
'freq': convert2FreqRange(*filter_line[3:])
if filter_line[1]
else None}
if filter_line[1]
else None}
return filter_information
@ -41,8 +43,19 @@ FILTERDEFAULTS = readFilterInformation(os.path.join(os.path.expanduser('~'),
'.pylot',
'filter.in'))
OUTPUTFORMATS = {'.xml':'QUAKEML',
'.cnv':'CNV',
'.obs':'NLLOC_OBS'}
AUTOMATIC_DEFAULTS = os.path.join(os.path.expanduser('~'),
'.pylot',
'autoPyLoT.in')
LOCTOOLS = dict(nll = nll, hsat = hsat, velest = velest)
OUTPUTFORMATS = {'.xml': 'QUAKEML',
'.cnv': 'CNV',
'.obs': 'NLLOC_OBS'}
LOCTOOLS = dict(nll=nll, hsat=hsat, velest=velest)
COMPPOSITION_MAP = dict(Z=2, N=1, E=0)
COMPPOSITION_MAP['1'] = 1
COMPPOSITION_MAP['2'] = 0
COMPPOSITION_MAP['3'] = 2
COMPNAME_MAP = dict(Z='3', N='1', E='2')

View File

@ -20,3 +20,7 @@ class DatastructureError(Exception):
class OverwriteError(IOError):
pass
class ParameterError(Exception):
pass

395
pylot/core/util/pdf.py Normal file
View File

@ -0,0 +1,395 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import warnings
import numpy as np
from obspy import UTCDateTime
from pylot.core.util.utils import find_nearest
from pylot.core.util.version import get_git_version as _getVersionString
__version__ = _getVersionString()
__author__ = 'sebastianw'
def create_axis(x0, incr, npts):
ax = np.zeros(npts)
for i in range(npts):
ax[i] = x0 + incr * i
return ax
def gauss_parameter(te, tm, tl, eta):
'''
takes three onset times and returns the parameters sig1, sig2, a1 and a2
to represent the pick as a probability density funtion (PDF) with two
Gauss branches
:param te:
:param tm:
:param tl:
:param eta:
:return:
'''
sig1 = (tm - te) / np.sqrt(2 * np.log(1 / eta))
sig2 = (tl - tm) / np.sqrt(2 * np.log(1 / eta))
a1 = 2 / (1 + sig2 / sig1)
a2 = 2 / (1 + sig1 / sig2)
return sig1, sig2, a1, a2
def exp_parameter(te, tm, tl, eta):
'''
takes three onset times te, tm and tl and returns the parameters sig1,
sig2 and a to represent the pick as a probability density function (PDF)
with two exponential decay branches
:param te:
:param tm:
:param tl:
:param eta:
:return:
'''
sig1 = np.log(eta) / (te - tm)
sig2 = np.log(eta) / (tm - tl)
a = 1 / (1 / sig1 + 1 / sig2)
return sig1, sig2, a
def gauss_branches(x, mu, sig1, sig2, a1, a2):
'''
function gauss_branches takes an axes x, a center value mu, two sigma
values sig1 and sig2 and two scaling factors a1 and a2 and return a
list containing the values of a probability density function (PDF)
consisting of gauss branches
:param x:
:type x:
:param mu:
:type mu:
:param sig1:
:type sig1:
:param sig2:
:type sig2:
:param a1:
:type a1:
:param a2:
:returns fun_vals: list with function values along axes x
'''
fun_vals = []
for k in x:
if k < mu:
fun_vals.append(a1 * 1 / (np.sqrt(2 * np.pi) * sig1) * np.exp(-((k - mu) / sig1) ** 2 / 2))
else:
fun_vals.append(a2 * 1 / (np.sqrt(2 * np.pi) * sig2) * np.exp(-((k - mu) / sig2) ** 2 / 2))
return np.array(fun_vals)
def exp_branches(x, mu, sig1, sig2, a):
'''
function exp_branches takes an axes x, a center value mu, two sigma
values sig1 and sig2 and a scaling factor a and return a
list containing the values of a probability density function (PDF)
consisting of exponential decay branches
:param x:
:param mu:
:param sig1:
:param sig2:
:param a:
:returns fun_vals: list with function values along axes x:
'''
fun_vals = []
for k in x:
if k < mu:
fun_vals.append(a * np.exp(sig1 * (k - mu)))
else:
fun_vals.append(a * np.exp(-sig2 * (k - mu)))
return np.array(fun_vals)
# define container dictionaries for different types of pdfs
parameter = dict(gauss=gauss_parameter, exp=exp_parameter)
branches = dict(gauss=gauss_branches, exp=exp_branches)
class ProbabilityDensityFunction(object):
'''
A probability density function toolkit.
'''
version = __version__
def __init__(self, x0, incr, npts, pdf):
self.x0 = x0
self.incr = incr
self.npts = npts
self.axis = create_axis(x0, incr, npts)
self.data = pdf
def __add__(self, other):
assert isinstance(other, ProbabilityDensityFunction), \
'both operands must be of type ProbabilityDensityFunction'
x0, incr, npts, pdf_self, pdf_other = self.rearrange(other)
pdf = np.convolve(pdf_self, pdf_other, 'full') * incr
# shift axis values for correct plotting
npts = pdf.size
x0 *= 2
return ProbabilityDensityFunction(x0, incr, npts, pdf)
def __sub__(self, other):
assert isinstance(other, ProbabilityDensityFunction), \
'both operands must be of type ProbabilityDensityFunction'
x0, incr, npts, pdf_self, pdf_other = self.rearrange(other)
pdf = np.correlate(pdf_self, pdf_other, 'same') * incr
# shift axis values for correct plotting
midpoint = npts / 2
x0 = -incr * midpoint
return ProbabilityDensityFunction(x0, incr, npts, pdf)
def __nonzero__(self):
prec = self.precision(self.incr)
gtzero = np.all(self.data >= 0)
probone = bool(np.round(self.prob_gt_val(self.axis[0]), prec) == 1.)
return bool(gtzero and probone)
def __str__(self):
return str(self.data)
@staticmethod
def precision(incr):
prec = int(np.ceil(np.abs(np.log10(incr)))) - 2
return prec if prec >= 0 else 0
@property
def data(self):
return self._pdf
@data.setter
def data(self, pdf):
self._pdf = np.array(pdf)
@property
def axis(self):
return self._x
@axis.setter
def axis(self, x):
self._x = np.array(x)
@classmethod
def from_pick(self, lbound, barycentre, rbound, incr=0.001, decfact=0.01,
type='gauss'):
'''
Initialize a new ProbabilityDensityFunction object.
Takes incr, lbound, barycentre and rbound to derive x0 and the number
of points npts for the axis vector.
Maximum density
is given at the barycentre and on the boundaries the function has
declined to decfact times the maximum value. Integration of the
function over a particular interval gives the probability for the
variable value to be in that interval.
'''
# derive adequate window of definition
margin = 2. * np.max([barycentre - lbound, rbound - barycentre])
# find midpoint accounting also for `~obspy.UTCDateTime` object usage
try:
midpoint = (rbound + lbound) / 2
except TypeError:
try:
midpoint = (rbound + float(lbound)) / 2
except TypeError:
midpoint = float(rbound + float(lbound)) / 2
# find x0 on a grid point and sufficient npts
was_datetime = None
if isinstance(barycentre, UTCDateTime):
barycentre = float(barycentre)
was_datetime = True
n = int(np.ceil((barycentre - midpoint) / incr))
m = int(np.ceil((margin / incr)))
midpoint = barycentre - n * incr
margin = m * incr
x0 = midpoint - margin
npts = 2 * m
if was_datetime:
barycentre = UTCDateTime(barycentre)
# calculate parameter for pdf representing function
params = parameter[type](lbound, barycentre, rbound, decfact)
# calculate pdf values
try:
pdf = branches[type](create_axis(x0, incr, npts), barycentre, *params)
except TypeError as e:
print('Warning:\n' + e.message + '\n' + 'trying timestamp instead')
assert isinstance(barycentre, UTCDateTime), 'object not capable of' \
' timestamp representation'
pdf = branches[type](create_axis(x0, incr, npts),
barycentre.timestamp, *params)
# return the object
return ProbabilityDensityFunction(x0, incr, npts, pdf)
def broadcast(self, pdf, si, ei, data):
try:
pdf[si:ei] = data
except ValueError as e:
warnings.warn(str(e), Warning)
return self.broadcast(pdf, si, ei, data[:-1])
return pdf
def expectation(self):
'''
returns the expectation value of the actual pdf object
..formula::
mu_{\Delta t} = \int\limits_{-\infty}^\infty x \cdot f(x)dx
:return float: rval
'''
rval = 0
for n, x in enumerate(self.axis):
rval += x * self.data[n]
return rval * self.incr
def standard_deviation(self):
mu = self.expectation()
rval = 0
for n, x in enumerate(self.axis):
rval += (x - mu) ** 2 * self.data[n]
return rval * self.incr
def prob_lt_val(self, value):
if value <= self.axis[0] or value > self.axis[-1]:
raise ValueError('value out of bounds: {0}'.format(value))
return self.prob_limits((self.axis[0], value))
def prob_gt_val(self, value):
if value < self.axis[0] or value >= self.axis[-1]:
raise ValueError('value out of bounds: {0}'.format(value))
return self.prob_limits((value, self.axis[-1]))
def prob_limits(self, limits):
lim_ind = np.logical_and(limits[0] <= self.axis, self.axis <= limits[1])
data = self.data[lim_ind]
min_est, max_est = 0., 0.
for n in range(len(data) - 1):
min_est += min(data[n], data[n + 1])
max_est += max(data[n], data[n + 1])
return (min_est + max_est) / 2. * self.incr
def prob_val(self, value):
if not (self.axis[0] <= value <= self.axis[-1]):
Warning('{0} not on axis'.format(value))
return None
return self.data[find_nearest(self.axis, value)] * self.incr
def plot(self, label=None):
import matplotlib.pyplot as plt
plt.plot(self.axis, self.data)
plt.xlabel('x')
plt.ylabel('f(x)')
plt.autoscale(axis='x', tight=True)
if self:
title_str = 'Probability density function '
if label:
title_str += label
title_str.strip()
else:
title_str = 'Function not suitable as probability density function'
plt.title(title_str)
plt.show()
def commonlimits(self, incr, other, max_npts=1e5):
'''
Takes an increment incr and two left and two right limits and returns
the left most limit and the minimum number of points needed to cover
the whole given interval.
:param incr:
:param l1:
:param l2:
:param r1:
:param r2:
:param max_npts:
:return:
'''
# >>> manu = ProbabilityDensityFunction.from_pick(0.01, 0.3, 0.5, 0.54)
# >>> auto = ProbabilityDensityFunction.from_pick(0.01, 0.3, 0.34, 0.54)
# >>> manu.commonlimits(0.01, auto)
# (
l1 = self.x0
r1 = l1 + self.incr * self.npts
l2 = other.x0
r2 = l2 + other.incr * other.npts
if l1 < l2:
x0 = l1
else:
x0 = l2
# calculate index for rounding
ri = self.precision(incr)
if r1 < r2:
npts = int(round(r2 - x0, ri) // incr)
else:
npts = int(round(r1 - x0, ri) // incr)
if npts > max_npts:
raise ValueError('Maximum number of points exceeded:\n'
'max_npts - %d\n'
'npts - %d\n' % (max_npts, npts))
npts = np.max([npts, self.npts, other.npts])
if npts < self.npts or npts < other.npts:
raise ValueError('new npts is to small')
return x0, npts
def rearrange(self, other):
'''
Method rearrange takes another Probability Density Function and returns
a new axis with mid-point 0 and covering positive and negative range
of axis values, either containing the maximum value of both axis or
the sum of the maxima
:param other:
:return:
'''
assert isinstance(other, ProbabilityDensityFunction), \
'both operands must be of type ProbabilityDensityFunction'
if not self.incr == other.incr:
raise NotImplementedError('Upsampling of the lower sampled PDF not implemented yet!')
else:
incr = self.incr
x0, npts = self.commonlimits(incr, other)
pdf_self = np.zeros(npts)
pdf_other = np.zeros(npts)
x = create_axis(x0, incr, npts)
sstart = find_nearest(x, self.x0)
s_end = sstart + self.data.size
ostart = find_nearest(x, other.x0)
o_end = ostart + other.data.size
pdf_self = self.broadcast(pdf_self, sstart, s_end, self.data)
pdf_other = self.broadcast(pdf_other, ostart, o_end, other.data)
return x0, incr, npts, pdf_self, pdf_other

View File

@ -2,6 +2,7 @@
import sys
from PySide.QtCore import QThread, Signal
class AutoPickThread(QThread):
message = Signal(str)
finished = Signal()
@ -28,6 +29,5 @@ class AutoPickThread(QThread):
sys.stdout = sys.__stdout__
self.finished.emit()
def write(self, text):
self.message.emit(text)

View File

@ -10,6 +10,7 @@ import numpy as np
from obspy.core import UTCDateTime
import obspy.core.event as ope
def createAmplitude(pickID, amp, unit, category, cinfo):
'''
@ -28,6 +29,7 @@ def createAmplitude(pickID, amp, unit, category, cinfo):
amplitude.pick_id = pickID
return amplitude
def createArrival(pickresID, cinfo, phase, azimuth=None, dist=None):
'''
createArrival - function to create an Obspy Arrival
@ -56,6 +58,7 @@ def createArrival(pickresID, cinfo, phase, azimuth=None, dist=None):
arrival.distance = dist
return arrival
def createCreationInfo(agency_id=None, creation_time=None, author=None):
'''
@ -71,6 +74,7 @@ def createCreationInfo(agency_id=None, creation_time=None, author=None):
return ope.CreationInfo(agency_id=agency_id, author=author,
creation_time=creation_time)
def createEvent(origintime, cinfo, originloc=None, etype=None, resID=None,
authority_id=None):
'''
@ -115,6 +119,7 @@ def createEvent(origintime, cinfo, originloc=None, etype=None, resID=None,
event.origins = [o]
return event
def createMagnitude(originID, cinfo):
'''
createMagnitude - function to create an ObsPy Magnitude object
@ -129,6 +134,7 @@ def createMagnitude(originID, cinfo):
magnitude.origin_id = originID
return magnitude
def createOrigin(origintime, cinfo, latitude, longitude, depth):
'''
createOrigin - function to create an ObsPy Origin
@ -158,6 +164,7 @@ def createOrigin(origintime, cinfo, latitude, longitude, depth):
origin.depth = depth
return origin
def createPick(origintime, picknum, picktime, eventnum, cinfo, phase, station,
wfseedstr, authority_id):
'''
@ -196,6 +203,7 @@ def createPick(origintime, picknum, picktime, eventnum, cinfo, phase, station,
pick.waveform_id = ope.ResourceIdentifier(id=wfseedstr, prefix='file:/')
return pick
def createResourceID(timetohash, restype, authority_id=None, hrstr=None):
'''
@ -220,6 +228,7 @@ def createResourceID(timetohash, restype, authority_id=None, hrstr=None):
resID.convertIDToQuakeMLURI(authority_id=authority_id)
return resID
def demeanTrace(trace, window):
"""
returns the DATA where each trace is demean by the average value within
@ -234,6 +243,7 @@ def demeanTrace(trace, window):
trace.data -= trace.data[window].mean()
return trace
def findComboBoxIndex(combo_box, val):
"""
Function findComboBoxIndex takes a QComboBox object and a string and
@ -246,6 +256,18 @@ def findComboBoxIndex(combo_box, val):
"""
return combo_box.findText(val) if combo_box.findText(val) is not -1 else 0
def find_nearest(array, value):
'''
Function find_nearest takes an array and a value and returns the
index of the nearest value found in the array.
:param array:
:param value:
:return:
'''
return (np.abs(array - value)).argmin()
def fnConstructor(s):
'''
@ -267,6 +289,7 @@ def fnConstructor(s):
fn = '_' + fn
return fn
def getGlobalTimes(stream):
'''
@ -283,6 +306,7 @@ def getGlobalTimes(stream):
max_end = trace.stats.endtime
return min_start, max_end
def getHash(time):
'''
:param time: time object for which a hash should be calculated
@ -293,6 +317,7 @@ def getHash(time):
hg.update(time.strftime('%Y-%m-%d %H:%M:%S.%f'))
return hg.hexdigest()
def getLogin():
'''
@ -300,6 +325,7 @@ def getLogin():
'''
return pwd.getpwuid(os.getuid())[0]
def getOwner(fn):
'''
@ -309,6 +335,7 @@ def getOwner(fn):
'''
return pwd.getpwuid(os.stat(fn).st_uid).pw_name
def getPatternLine(fn, pattern):
"""
Takes a file name and a pattern string to search for in the file and
@ -333,6 +360,7 @@ def getPatternLine(fn, pattern):
return None
def isSorted(iterable):
'''
@ -342,6 +370,7 @@ def isSorted(iterable):
'''
return sorted(iterable) == iterable
def prepTimeAxis(stime, trace):
'''
@ -368,6 +397,7 @@ def prepTimeAxis(stime, trace):
'delta: {2}'.format(nsamp, len(time_ax), tincr))
return time_ax
def scaleWFData(data, factor=None, components='all'):
"""
produce scaled waveforms from given waveform data and a scaling factor,
@ -399,6 +429,7 @@ def scaleWFData(data, factor=None, components='all'):
return data
def runProgram(cmd, parameter=None):
"""
run an external program specified by cmd with parameters input returning the
@ -417,8 +448,10 @@ def runProgram(cmd, parameter=None):
cmd += ' %s 2>&1' % parameter
output = subprocess.check_output('{} | tee /dev/stderr'.format(cmd),
shell = True)
shell=True)
if __name__ == "__main__":
import doctest
doctest.testmod()

View File

@ -31,16 +31,19 @@
#
# include RELEASE-VERSION
from __future__ import print_function
__all__ = "get_git_version"
# NO IMPORTS FROM PYLOT IN THIS FILE! (file gets used at installation time)
import os
import inspect
from subprocess import Popen, PIPE
# NO IMPORTS FROM PYLOT IN THIS FILE! (file gets used at installation time)
script_dir = os.path.abspath(os.path.dirname(inspect.getfile(
inspect.currentframe())))
inspect.currentframe())))
PYLOT_ROOT = os.path.abspath(os.path.join(script_dir, os.pardir,
os.pardir, os.pardir))
VERSION_FILE = os.path.join(PYLOT_ROOT, "pylot", "RELEASE-VERSION")
@ -108,4 +111,4 @@ def get_git_version(abbrev=4):
if __name__ == "__main__":
print get_git_version()
print(get_git_version())

View File

@ -5,10 +5,12 @@ Created on Wed Mar 19 11:27:35 2014
@author: sebastianw
"""
import warnings
import datetime
import numpy as np
from matplotlib.figure import Figure
try:
from matplotlib.backends.backend_qt4agg import FigureCanvas
except ImportError:
@ -23,9 +25,10 @@ from PySide.QtCore import QSettings, Qt, QUrl, Signal, Slot
from PySide.QtWebKit import QWebView
from obspy import Stream, UTCDateTime
from pylot.core.read.inputs import FilterOptions
from pylot.core.pick.utils import getSNR, earllatepicker, getnoisewin,\
from pylot.core.pick.utils import getSNR, earllatepicker, getnoisewin, \
getResolutionWindow
from pylot.core.util.defaults import OUTPUTFORMATS, FILTERDEFAULTS, LOCTOOLS
from pylot.core.util.defaults import OUTPUTFORMATS, FILTERDEFAULTS, LOCTOOLS, \
COMPPOSITION_MAP
from pylot.core.util.utils import prepTimeAxis, getGlobalTimes, scaleWFData, \
demeanTrace, isSorted, findComboBoxIndex
@ -63,7 +66,7 @@ class MPLWidget(FigureCanvas):
# clear axes each time plot is called
self.axes.hold(True)
# initialize super class
FigureCanvas.__init__(self, self.figure)
super(MPLWidget, self).__init__(self.figure)
# add an cursor for station selection
self.multiCursor = MultiCursor(self.figure.canvas, (self.axes,),
horizOn=True,
@ -87,13 +90,19 @@ class MPLWidget(FigureCanvas):
self._parent = parent
def plotWFData(self, wfdata, title=None, zoomx=None, zoomy=None,
noiselevel=None, scaleddata=False):
noiselevel=None, scaleddata=False, mapping=True):
self.getAxes().cla()
self.clearPlotDict()
wfstart, wfend = getGlobalTimes(wfdata)
nmax = 0
for n, trace in enumerate(wfdata):
channel = trace.stats.channel
station = trace.stats.station
if mapping:
comp = channel[-1]
n = COMPPOSITION_MAP[comp]
if n > nmax:
nmax = n
msg = 'plotting %s channel of station %s' % (channel, station)
print(msg)
stime = trace.stats.starttime - wfstart
@ -110,7 +119,7 @@ class MPLWidget(FigureCanvas):
ylabel = ''
self.updateWidget(xlabel, ylabel, title)
self.setXLims([0, wfend - wfstart])
self.setYLims([-0.5, n + 0.5])
self.setYLims([-0.5, nmax + 0.5])
if zoomx is not None:
self.setXLims(zoomx)
if zoomy is not None:
@ -157,9 +166,10 @@ class MPLWidget(FigureCanvas):
def insertLabel(self, pos, text):
pos = pos / max(self.getAxes().ylim)
axann = self.getAxes().annotate(text, xy=(.03, pos),
xycoords='axes fraction')
xycoords='axes fraction')
axann.set_bbox(dict(facecolor='lightgrey', alpha=.6))
class PickDlg(QDialog):
def __init__(self, parent=None, data=None, station=None, picks=None,
rotate=False):
@ -169,11 +179,14 @@ class PickDlg(QDialog):
self.station = station
self.rotate = rotate
self.components = 'ZNE'
settings = QSettings()
self._user = settings.value('user/Login', 'anonymous')
if picks:
self.picks = picks
else:
self.picks = {}
self.filteroptions = FILTERDEFAULTS
self.pick_block = False
# initialize panning attributes
self.press = None
@ -247,15 +260,14 @@ class PickDlg(QDialog):
slot=self.filterWFData,
icon=filter_icon,
tip='Toggle filtered/original'
' waveforms',
checkable=True)
' waveforms')
self.zoomAction = createAction(parent=self, text='Zoom',
slot=self.zoom, icon=zoom_icon,
tip='Zoom into waveform',
checkable=True)
self.resetZoomAction = createAction(parent=self, text='Home',
slot=self.resetZoom, icon=home_icon,
tip='Reset zoom to original limits')
slot=self.resetZoom, icon=home_icon,
tip='Reset zoom to original limits')
self.resetPicksAction = createAction(parent=self, text='Delete Picks',
slot=self.delPicks, icon=del_icon,
tip='Delete current picks.')
@ -337,6 +349,10 @@ class PickDlg(QDialog):
return widget.mpl_connect('button_release_event', slot)
def verifyPhaseSelection(self):
if self.pick_block:
self.pick_block = self.togglePickBlocker()
warnings.warn('Changed selection before phase was set!',
UserWarning)
phase = self.selectPhase.currentText()
self.updateCurrentLimits()
if phase:
@ -348,6 +364,7 @@ class PickDlg(QDialog):
self.disconnectPressEvent()
self.cidpress = self.connectPressEvent(self.setIniPick)
self.filterWFData()
self.pick_block = self.togglePickBlocker()
else:
self.disconnectPressEvent()
self.cidpress = self.connectPressEvent(self.panPress)
@ -383,6 +400,9 @@ class PickDlg(QDialog):
traceIDs.append(traceID)
return traceIDs
def getUser(self):
return self._user
def getFilterOptions(self, phase):
options = self.filteroptions[phase]
return FilterOptions(**options)
@ -422,6 +442,11 @@ class PickDlg(QDialog):
trace = selectTrace(trace, 'NE')
if trace:
wfdata.append(trace)
elif component == '1' or component == '2':
for trace in self.getWFData():
trace = selectTrace(trace, '12')
if trace:
wfdata.append(trace)
elif component == 'Z':
wfdata = self.getWFData().select(component=component)
return wfdata
@ -470,8 +495,22 @@ class PickDlg(QDialog):
noise_win = settings.value('picking/noise_win_P', 5.)
gap_win = settings.value('picking/gap_win_P', .2)
signal_win = settings.value('picking/signal_win_P', 3.)
itrace = int(trace_number)
result = getSNR(wfdata, (noise_win, gap_win, signal_win), ini_pick)
while itrace > len(wfdata) - 1:
itrace -= 1
# copy data for plotting
data = self.getWFData().copy()
# filter data and trace on which is picked prior to determination of SNR
phase = self.selectPhase.currentText()
filteroptions = self.getFilterOptions(phase).parseFilterOptions()
if filteroptions:
data.filter(**filteroptions)
wfdata.filter(**filteroptions)
result = getSNR(wfdata, (noise_win, gap_win, signal_win), ini_pick, itrace)
snr = result[0]
noiselevel = result[2] * nfac
@ -479,8 +518,7 @@ class PickDlg(QDialog):
x_res = getResolutionWindow(snr)
# remove mean noise level from waveforms
wfdata = self.getWFData().copy()
for trace in wfdata:
for trace in data:
t = prepTimeAxis(trace.stats.starttime - self.getStartTime(), trace)
inoise = getnoisewin(t, ini_pick, noise_win, gap_win)
trace = demeanTrace(trace=trace, window=inoise)
@ -488,7 +526,7 @@ class PickDlg(QDialog):
self.setXLims([ini_pick - x_res, ini_pick + x_res])
self.setYLims(np.array([-noiselevel * 2.5, noiselevel * 2.5]) +
trace_number)
self.getPlotWidget().plotWFData(wfdata=wfdata,
self.getPlotWidget().plotWFData(wfdata=data,
title=self.getStation() +
' picking mode',
zoomx=self.getXLims(),
@ -502,30 +540,40 @@ class PickDlg(QDialog):
settings = QSettings()
nfac = settings.value('picking/nfac_P', 1.5)
noise_win = settings.value('picking/noise_win_P', 5.)
gap_win = settings.value('picking/gap_win_P', .2)
signal_win = settings.value('picking/signal_win_P', 3.)
nfac = settings.value('picking/nfac_S', 1.5)
noise_win = settings.value('picking/noise_win_S', 5.)
gap_win = settings.value('picking/gap_win_S', .2)
signal_win = settings.value('picking/signal_win_S', 3.)
# copy data for plotting
data = self.getWFData().copy()
# filter data and trace on which is picked prior to determination of SNR
phase = self.selectPhase.currentText()
filteroptions = self.getFilterOptions(phase).parseFilterOptions()
if filteroptions:
data.filter(**filteroptions)
wfdata.filter(**filteroptions)
# determine SNR and noiselevel
result = getSNR(wfdata, (noise_win, gap_win, signal_win), ini_pick)
snr = result[0]
noiselevel = result[2] * nfac
data = self.getWFData().copy()
phase = self.selectPhase.currentText()
filteroptions = self.getFilterOptions(phase).parseFilterOptions()
data.filter(**filteroptions)
# prepare plotting of data
for trace in data:
t = prepTimeAxis(trace.stats.starttime - self.getStartTime(), trace)
inoise = getnoisewin(t, ini_pick, noise_win, gap_win)
trace = demeanTrace(trace, inoise)
horiz_comp = ('n', 'e')
data = scaleWFData(data, noiselevel * 2.5, horiz_comp)
# account for non-oriented horizontal waveforms
try:
horiz_comp = ('n', 'e')
data = scaleWFData(data, noiselevel * 2.5, horiz_comp)
except IndexError as e:
print('warning: {0}'.format(e))
horiz_comp = ('1', '2')
data = scaleWFData(data, noiselevel * 2.5, horiz_comp)
x_res = getResolutionWindow(snr)
@ -533,8 +581,8 @@ class PickDlg(QDialog):
traces = self.getTraceID(horiz_comp)
traces.sort()
self.setYLims(tuple(np.array([-0.5, +0.5]) +
np.array(traces)))
noiselevels = [trace + 1 / (2.5 * 2) for trace in traces] +\
np.array(traces)))
noiselevels = [trace + 1 / (2.5 * 2) for trace in traces] + \
[trace - 1 / (2.5 * 2) for trace in traces]
self.getPlotWidget().plotWFData(wfdata=data,
@ -554,21 +602,28 @@ class PickDlg(QDialog):
pick = gui_event.xdata # get pick time relative to the traces timeaxis not to the global
channel = self.getChannelID(round(gui_event.ydata))
wfdata = self.getWFData().copy().select(channel=channel)
stime = self.getStartTime()
# get earliest and latest possible pick and symmetric pick error
[epp, lpp, spe] = earllatepicker(wfdata, 1.5, (5., .5, 2.), pick)
# get name of phase actually picked
phase = self.selectPhase.currentText()
# get filter parameter for the phase to be picked
filteroptions = self.getFilterOptions(phase).parseFilterOptions()
# copy and filter data for earliest and latest possible picks
wfdata = self.getWFData().copy().select(channel=channel)
wfdata.filter(**filteroptions)
# get earliest and latest possible pick and symmetric pick error
[epp, lpp, spe] = earllatepicker(wfdata, 1.5, (5., .5, 2.), pick)
# return absolute time values for phases
stime = self.getStartTime()
epp = stime + epp
mpp = stime + pick
lpp = stime + lpp
# save pick times for actual phase
phasepicks = {'epp': epp, 'lpp': lpp, 'mpp': mpp, 'spe': spe}
phasepicks = dict(epp=epp, lpp=lpp, mpp=mpp, spe=spe,
picker=self.getUser())
try:
oldphasepick = self.picks[phase]
@ -601,6 +656,7 @@ class PickDlg(QDialog):
self.drawPicks()
self.disconnectPressEvent()
self.zoomAction.setEnabled(True)
self.pick_block = self.togglePickBlocker()
self.selectPhase.setCurrentIndex(-1)
self.setPlotLabels()
@ -660,7 +716,12 @@ class PickDlg(QDialog):
ax.figure.canvas.draw()
def togglePickBlocker(self):
return not self.pick_block
def filterWFData(self):
if self.pick_block:
return
self.updateCurrentLimits()
data = self.getWFData().copy()
old_title = self.getPlotWidget().getAxes().get_title()
@ -675,13 +736,15 @@ class PickDlg(QDialog):
filtoptions = filtoptions.parseFilterOptions()
if filtoptions is not None:
data.filter(**filtoptions)
if old_title.endswith(')'):
title = old_title[:-1] + ', filtered)'
else:
if not old_title.endswith(')'):
title = old_title + ' (filtered)'
elif not old_title.endswith(' (filtered)') and not old_title.endswith(', filtered)'):
title = old_title[:-1] + ', filtered)'
else:
if old_title.endswith(' (filtered)'):
title = old_title.replace(' (filtered)', '')
elif old_title.endswith(', filtered)'):
title = old_title.replace(', filtered)', ')')
if title is None:
title = old_title
self.getPlotWidget().plotWFData(wfdata=data, title=title,
@ -702,7 +765,6 @@ class PickDlg(QDialog):
self.drawPicks()
self.draw()
def setPlotLabels(self):
# get channel labels
@ -715,7 +777,9 @@ class PickDlg(QDialog):
self.getPlotWidget().setYLims(self.getYLims())
def zoom(self):
if self.zoomAction.isChecked():
if self.zoomAction.isChecked() and self.pick_block:
self.zoomAction.setChecked(False)
elif self.zoomAction.isChecked():
self.disconnectPressEvent()
self.disconnectMotionEvent()
self.disconnectReleaseEvent()
@ -984,7 +1048,7 @@ class LocalisationTab(PropTab):
self.binlabel.setText("{0} bin directory".format(curtool))
def selectDirectory(self, edit):
selected_directory = QFileDialog.getExistingDirectory()
selected_directory = QFileDialog.getExistingDirectory()
edit.setText(selected_directory)
def getValues(self):
@ -995,7 +1059,6 @@ class LocalisationTab(PropTab):
return values
class NewEventDlg(QDialog):
def __init__(self, parent=None, titleString="Create a new event"):
"""
@ -1236,6 +1299,8 @@ class HelpForm(QDialog):
def updatePageTitle(self):
self.pageLabel.setText(self.webBrowser.documentTitle())
if __name__ == '__main__':
import doctest
doctest.testmod()

View File

@ -9,8 +9,8 @@
from obspy.core import read
import matplotlib.pyplot as plt
import numpy as np
from pylot.core.pick.CharFuns import *
from pylot.core.pick.Picker import *
from pylot.core.pick.charfuns import *
from pylot.core.pick.picker import *
import glob
import argparse